merged 3 files

Merge branch 'develop' of ariadne.geophysik.ruhr-uni-bochum.de:/data/git/pylot into develop

Conflicts:
	pylot/core/active/activeSeismoPick.py
	pylot/core/active/seismicshot.py
	pylot/core/active/surveyPlotTools.py
This commit is contained in:
Marcel Paffrath 2015-10-19 13:15:28 +02:00
commit 195352a7ca
31 changed files with 454 additions and 289 deletions

View File

@ -8,11 +8,11 @@ import glob
import matplotlib.pyplot as plt
from obspy.core import read
from pylot.core.util import _getVersionString
from pylot.core.read.data import Data
from pylot.core.read.inputs import AutoPickParameter
from pylot.core.util.structure import DATASTRUCTURE
from pylot.core.pick.autopick import autopickevent
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()

View File

@ -1,10 +1,10 @@
<RCC>
<qresource>
<file>icons/pylot.ico</file>
<file>icons/pylot.png</file>
<file>icons/pylot.ico</file>
<file>icons/pylot.png</file>
<file>icons/printer.png</file>
<file>icons/delete.png</file>
<file>icons/key_E.png</file>
<file>icons/key_E.png</file>
<file>icons/key_N.png</file>
<file>icons/key_P.png</file>
<file>icons/key_Q.png</file>
@ -14,7 +14,7 @@
<file>icons/key_U.png</file>
<file>icons/key_V.png</file>
<file>icons/key_W.png</file>
<file>icons/key_Z.png</file>
<file>icons/key_Z.png</file>
<file>icons/filter.png</file>
<file>icons/sync.png</file>
<file>icons/zoom_0.png</file>

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -1 +1,2 @@
# -*- coding: utf-8 -*-
__author__ = 'sebastianw'

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
import numpy as np
from pylot.core.active import seismicshot
@ -14,7 +15,7 @@ class Survey(object):
self._sourcefile = sourcefile
self._obsdir = path
self._generateSurvey()
if useDefaultParas == True:
if useDefaultParas == True:
self.setParametersForShots()
self._removeAllEmptyTraces()
self._updateShots()
@ -72,7 +73,7 @@ class Survey(object):
if removed is not None:
if count == 0: outfile = open(filename, 'w')
count += 1
outfile.writelines('shot: %s, removed empty traces: %s\n'
outfile.writelines('shot: %s, removed empty traces: %s\n'
%(shot.getShotnumber(), removed))
print ("\nremoveEmptyTraces: Finished! Removed %d traces" %count)
if count > 0:
@ -90,7 +91,7 @@ class Survey(object):
count += 1
countTraces += len(del_traceIDs)
outfile.writelines("shot: %s, removed traceID(s) %s because "
"they were not found in the corresponding stream\n"
"they were not found in the corresponding stream\n"
%(shot.getShotnumber(), del_traceIDs))
print ("\nupdateShots: Finished! Updated %d shots and removed "
@ -166,8 +167,8 @@ class Survey(object):
def countAllTraces(self):
numtraces = 0
for line in self.getShotlist():
for line in self.getReceiverlist():
for shot in self.getShotlist():
for rec in self.getReceiverlist():
numtraces += 1
return numtraces
@ -202,7 +203,7 @@ class Survey(object):
def getReceiverfile(self):
return self._recfile
def getPath(self):
return self._obsdir
@ -250,7 +251,7 @@ class Survey(object):
(x, y, z) = shot.getSrcLoc() # getSrcLoc returns (x, y, z)
srcfile.writelines('%10s %10s %10s\n' %(getAngle(y), getAngle(x), (-1)*z)) # lat, lon, depth
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
srcfile.writelines('%10s\n' %1) #
srcfile.writelines('%10s\n' %1) #
srcfile.writelines('%10s %10s %10s\n' %(1, 1, ttfilename))
ttfile = open(directory + '/' + ttfilename, 'w')
traceIDlist = shot.getTraceIDlist()
@ -265,7 +266,7 @@ class Survey(object):
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
count += 1
ttfile.close()
srcfile.close()
srcfile.close()
print 'Wrote output for %s traces' %count
print 'WARNING: output generated for FMTOMO-obsdata. Obsdata seems to take Lat, Lon, Depth and creates output for FMTOMO as Depth, Lat, Lon'
print 'Dimensions of the seismic Array, transformed for FMTOMO, are Depth(%s, %s), Lat(%s, %s), Lon(%s, %s)'%(
@ -410,7 +411,7 @@ class Survey(object):
return ax
def _update_progress(self, shotname, tend, progress):
sys.stdout.write("Working on shot %s. ETC is %02d:%02d:%02d [%2.2f %%]\r"
sys.stdout.write("Working on shot %s. ETC is %02d:%02d:%02d [%2.2f %%]\r"
%(shotname, tend.hour, tend.minute, tend.second, progress))
sys.stdout.flush()
@ -420,8 +421,8 @@ class Survey(object):
cPickle.dump(self, outfile, -1)
print('saved Survey to file %s'%(filename))
@staticmethod
@staticmethod
def from_pickle(filename):
import cPickle
infile = open(filename, 'rb')

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import numpy as np
def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk'):
@ -73,7 +74,7 @@ def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk'):
dX = getDistance(np.rad2deg(dPhi))
dY = getDistance(np.rad2deg(dTheta))
nPoints = nX * nY * nZ
dZ = dR
@ -114,7 +115,7 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
R = 6371.
distance = angle / 180 * (PI * R)
return distance
infile = open(fnin, 'r')
R = 6371
rays = {}
@ -124,7 +125,7 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
### NOTE: rays.dat seems to be in km and radians
while True:
raynumber += 1
raynumber += 1
firstline = infile.readline()
if firstline == '': break # break at EOF
raynumber = int(firstline.split()[0])

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
from obspy import read
from obspy import Stream
@ -28,7 +29,7 @@ if rockeskyll == True:
obsdir = "/rscratch/minos22/marcel/flachseismik/rockeskyll_200615_270615/"
filename = 'survey_rockes.pickle'
else:
receiverfile = "Geophone_interpoliert_GZB"
receiverfile = "Geophone_interpoliert_GZB"
sourcefile = "Schusspunkte_GZB"
obsdir = "/rscratch/minos22/marcel/flachseismik/GZB_26_06_15_01/"
filename = 'survey_GZB.pickle'
@ -85,9 +86,9 @@ for shot in survey.data.values():
shot.setPickwindow(traceID, pickwin_used)
shot.pickTraces(traceID, windowsize, folm, HosAic) # picker
#shot.setManualPicks(traceID, picklist) # set manual picks if given (yet used on 2D only)
# ++ TEST: set and check SNR before adding to distance bin ############################
shot.setSNR(traceID)
shot.setSNR(traceID)
#if shot.getSNR(traceID)[0] < snrthreshold:
if shot.getSNR(traceID)[0] < shot.getSNRthreshold(traceID):
shot.removePick(traceID)

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
import numpy as np
from scipy.interpolate import griddata
@ -28,12 +29,12 @@ class SeisArray(object):
def _generateReceiverlines(self):
'''
Connects the traceIDs to the lineIDs
Connects the traceIDs to the lineIDs
for each receiverline in a dictionary.
'''
for receiver in self._receiverlist:
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
if not lineID in self._receiverlines.keys():
self._receiverlines[lineID] = []
self._receiverlines[lineID].append(traceID)
@ -43,16 +44,16 @@ class SeisArray(object):
Fills the three x, y, z dictionaries with measured coordinates
'''
for line in self._getReceiverlist():
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
self._receiverCoords[traceID] = (x, y, z)
self._measuredReceivers[traceID] = (x, y, z)
def _setGeophoneNumbers(self):
for line in self._getReceiverlist():
traceID = int(line.split()[0])
traceID = int(line.split()[0])
gphoneNum = float(line.split()[2])
self._geophoneNumbers[traceID] = gphoneNum
@ -93,7 +94,7 @@ class SeisArray(object):
return self._geophoneNumbers[traceID]
def getMeasuredReceivers(self):
return self._measuredReceivers
return self._measuredReceivers
def getMeasuredTopo(self):
return self._measuredTopo
@ -139,11 +140,11 @@ class SeisArray(object):
if self._getReceiverValue(traceID1, coordinate) < self._getReceiverValue(traceID2, coordinate):
direction = +1
return direction
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
direction = -1
return direction
print "Error: Same Value for traceID1 = %s and traceID2 = %s" %(traceID1, traceID2)
def _interpolateMeanDistances(self, traceID1, traceID2, coordinate):
'''
Returns the mean distance between two traceID's depending on the number of geophones in between
@ -186,7 +187,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._measuredTopo[pointID] = (x, y, z)
self._measuredTopo[pointID] = (x, y, z)
def addSourceLocations(self, filename):
'''
@ -202,7 +203,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._sourceLocs[pointID] = (x, y, z)
self._sourceLocs[pointID] = (x, y, z)
def interpZcoords4rec(self, method = 'linear'):
'''
@ -239,9 +240,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for traceID in self.getMeasuredReceivers().keys():
x.append(self.getMeasuredReceivers()[traceID][0])
x.append(self.getMeasuredReceivers()[traceID][0])
y.append(self.getMeasuredReceivers()[traceID][1])
z.append(self.getMeasuredReceivers()[traceID][2])
z.append(self.getMeasuredReceivers()[traceID][2])
return x, y, z
def getMeasuredTopoLists(self):
@ -250,9 +251,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getMeasuredTopo().keys():
x.append(self.getMeasuredTopo()[pointID][0])
x.append(self.getMeasuredTopo()[pointID][0])
y.append(self.getMeasuredTopo()[pointID][1])
z.append(self.getMeasuredTopo()[pointID][2])
z.append(self.getMeasuredTopo()[pointID][2])
return x, y, z
def getSourceLocsLists(self):
@ -261,9 +262,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getSourceLocations().keys():
x.append(self.getSourceLocations()[pointID][0])
x.append(self.getSourceLocations()[pointID][0])
y.append(self.getSourceLocations()[pointID][1])
z.append(self.getSourceLocations()[pointID][2])
z.append(self.getSourceLocations()[pointID][2])
return x, y, z
def getAllMeasuredPointsLists(self):
@ -289,7 +290,7 @@ class SeisArray(object):
y.append(self.getReceiverCoordinates()[traceID][1])
z.append(self.getReceiverCoordinates()[traceID][2])
return x, y, z
def _interpolateXY4rec(self):
'''
Interpolates the X and Y coordinates for all receivers.
@ -317,7 +318,7 @@ class SeisArray(object):
:param: phiWE (W, E) extensions of the model in degree
type: tuple
'''
'''
surface = []
elevation = 0.25 # elevate topography so that no source lies above the surface
@ -356,9 +357,9 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
if filename is not None:
if filename is not None:
outfile.writelines('%10s\n'%(z + elevation))
return surface
def generateVgrid(self, nTheta = 80, nPhi = 80, nR = 120,
@ -415,7 +416,7 @@ class SeisArray(object):
thetaDelta = abs(thetaN - thetaS) / (nTheta - 1)
phiDelta = abs(phiE - phiW) / (nPhi - 1)
rDelta = abs(rbot - rtop) / (nR - 1)
# create a regular grid including +2 cushion nodes in every direction
thetaGrid = np.linspace(thetaS - thetaDelta, thetaN + thetaDelta, num = nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - phiDelta, phiE + phiDelta, num = nPhi + 2) # +2 cushion nodes
@ -455,7 +456,7 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
outfile.close()
def exportAll(self, filename = 'interpolated_receivers.out'):
@ -463,7 +464,7 @@ class SeisArray(object):
count = 0
for traceID in self.getReceiverCoordinates().keys():
count += 1
x, y, z = self.getReceiverCoordinates()[traceID]
x, y, z = self.getReceiverCoordinates()[traceID]
recfile_out.writelines('%5s %15s %15s %15s\n' %(traceID, x, y, z))
print "Exported coordinates for %s traces to file > %s" %(count, filename)
recfile_out.close()
@ -472,15 +473,15 @@ class SeisArray(object):
import matplotlib.pyplot as plt
plt.interactive(True)
plt.figure()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xsc, ysc, zsc = self.getSourceLocsLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
plt.plot(xrc, yrc, 'k.', markersize = 10, label = 'all receivers')
plt.plot(xsc, ysc, 'b*', markersize = 10, label = 'shot locations')
if plot_topo == True:
if plot_topo == True:
plt.plot(xmt, ymt, 'b', markersize = 10, label = 'measured topo points')
if highlight_measured == True:
plt.plot(xmr, ymr, 'ro', label = 'measured receivers')
@ -505,9 +506,9 @@ class SeisArray(object):
fig = plt.figure()
ax = plt.axes(projection = '3d')
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
ax.plot(xmt, ymt, zmt, 'b*', markersize = 10, label = 'measured topo points')
ax.plot(xin, yin, zin, 'k.', markersize = 10, label = 'interpolated receivers')
@ -516,8 +517,8 @@ class SeisArray(object):
ax.legend()
return ax
def plotSurface3D(self, ax = None, step = 0.5, method = 'linear'):
from matplotlib import cm
import matplotlib.pyplot as plt
@ -661,8 +662,8 @@ class SeisArray(object):
cPickle.dump(self, outfile, -1)
print('saved SeisArray to file %s'%(filename))
@staticmethod
@staticmethod
def from_pickle(filename):
import cPickle
infile = open(filename, 'rb')

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
import numpy as np
from scipy.interpolate import griddata
@ -28,12 +29,12 @@ class SeisArray(object):
def _generateReceiverlines(self):
'''
Connects the traceIDs to the lineIDs
Connects the traceIDs to the lineIDs
for each receiverline in a dictionary.
'''
for receiver in self._receiverlist:
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
if not lineID in self._receiverlines.keys():
self._receiverlines[lineID] = []
self._receiverlines[lineID].append(traceID)
@ -43,16 +44,16 @@ class SeisArray(object):
Fills the three x, y, z dictionaries with measured coordinates
'''
for line in self._getReceiverlist():
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
self._receiverCoords[traceID] = (x, y, z)
self._measuredReceivers[traceID] = (x, y, z)
def _setGeophoneNumbers(self):
for line in self._getReceiverlist():
traceID = int(line.split()[0])
traceID = int(line.split()[0])
gphoneNum = float(line.split()[2])
self._geophoneNumbers[traceID] = gphoneNum
@ -93,7 +94,7 @@ class SeisArray(object):
return self._geophoneNumbers[traceID]
def getMeasuredReceivers(self):
return self._measuredReceivers
return self._measuredReceivers
def getMeasuredTopo(self):
return self._measuredTopo
@ -139,11 +140,11 @@ class SeisArray(object):
if self._getReceiverValue(traceID1, coordinate) < self._getReceiverValue(traceID2, coordinate):
direction = +1
return direction
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
direction = -1
return direction
print "Error: Same Value for traceID1 = %s and traceID2 = %s" %(traceID1, traceID2)
def _interpolateMeanDistances(self, traceID1, traceID2, coordinate):
'''
Returns the mean distance between two traceID's depending on the number of geophones in between
@ -186,7 +187,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._measuredTopo[pointID] = (x, y, z)
self._measuredTopo[pointID] = (x, y, z)
def addSourceLocations(self, filename):
'''
@ -202,7 +203,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._sourceLocs[pointID] = (x, y, z)
self._sourceLocs[pointID] = (x, y, z)
def interpZcoords4rec(self, method = 'linear'):
'''
@ -239,9 +240,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for traceID in self.getMeasuredReceivers().keys():
x.append(self.getMeasuredReceivers()[traceID][0])
x.append(self.getMeasuredReceivers()[traceID][0])
y.append(self.getMeasuredReceivers()[traceID][1])
z.append(self.getMeasuredReceivers()[traceID][2])
z.append(self.getMeasuredReceivers()[traceID][2])
return x, y, z
def getMeasuredTopoLists(self):
@ -250,9 +251,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getMeasuredTopo().keys():
x.append(self.getMeasuredTopo()[pointID][0])
x.append(self.getMeasuredTopo()[pointID][0])
y.append(self.getMeasuredTopo()[pointID][1])
z.append(self.getMeasuredTopo()[pointID][2])
z.append(self.getMeasuredTopo()[pointID][2])
return x, y, z
def getSourceLocsLists(self):
@ -261,9 +262,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getSourceLocations().keys():
x.append(self.getSourceLocations()[pointID][0])
x.append(self.getSourceLocations()[pointID][0])
y.append(self.getSourceLocations()[pointID][1])
z.append(self.getSourceLocations()[pointID][2])
z.append(self.getSourceLocations()[pointID][2])
return x, y, z
def getAllMeasuredPointsLists(self):
@ -289,7 +290,7 @@ class SeisArray(object):
y.append(self.getReceiverCoordinates()[traceID][1])
z.append(self.getReceiverCoordinates()[traceID][2])
return x, y, z
def _interpolateXY4rec(self):
'''
Interpolates the X and Y coordinates for all receivers.
@ -317,7 +318,7 @@ class SeisArray(object):
:param: phiWE (W, E) extensions of the model in degree
type: tuple
'''
'''
surface = []
elevation = 0.25 # elevate topography so that no source lies above the surface
@ -356,9 +357,9 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
if filename is not None:
if filename is not None:
outfile.writelines('%10s\n'%(z + elevation))
return surface
def generateVgrid(self, nTheta = 80, nPhi = 80, nR = 120,
@ -415,7 +416,7 @@ class SeisArray(object):
thetaDelta = abs(thetaN - thetaS) / (nTheta - 1)
phiDelta = abs(phiE - phiW) / (nPhi - 1)
rDelta = abs(rbot - rtop) / (nR - 1)
# create a regular grid including +2 cushion nodes in every direction
thetaGrid = np.linspace(thetaS - thetaDelta, thetaN + thetaDelta, num = nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - phiDelta, phiE + phiDelta, num = nPhi + 2) # +2 cushion nodes
@ -455,7 +456,7 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
outfile.close()
def exportAll(self, filename = 'interpolated_receivers.out'):
@ -463,7 +464,7 @@ class SeisArray(object):
count = 0
for traceID in self.getReceiverCoordinates().keys():
count += 1
x, y, z = self.getReceiverCoordinates()[traceID]
x, y, z = self.getReceiverCoordinates()[traceID]
recfile_out.writelines('%5s %15s %15s %15s\n' %(traceID, x, y, z))
print "Exported coordinates for %s traces to file > %s" %(count, filename)
recfile_out.close()
@ -472,15 +473,15 @@ class SeisArray(object):
import matplotlib.pyplot as plt
plt.interactive(True)
plt.figure()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xsc, ysc, zsc = self.getSourceLocsLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
plt.plot(xrc, yrc, 'k.', markersize = 10, label = 'all receivers')
plt.plot(xsc, ysc, 'b*', markersize = 10, label = 'shot locations')
if plot_topo == True:
if plot_topo == True:
plt.plot(xmt, ymt, 'b', markersize = 10, label = 'measured topo points')
if highlight_measured == True:
plt.plot(xmr, ymr, 'ro', label = 'measured receivers')
@ -501,9 +502,9 @@ class SeisArray(object):
fig = plt.figure()
ax = plt.axes(projection = '3d')
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
ax.plot(xmt, ymt, zmt, 'b*', markersize = 10, label = 'measured topo points')
ax.plot(xin, yin, zin, 'k.', markersize = 10, label = 'interpolated receivers')
@ -512,8 +513,8 @@ class SeisArray(object):
ax.legend()
return ax
def plotSurface3D(self, ax = None, step = 0.5, method = 'linear'):
from matplotlib import cm
import matplotlib.pyplot as plt
@ -657,8 +658,8 @@ class SeisArray(object):
cPickle.dump(self, outfile, -1)
print('saved SeisArray to file %s'%(filename))
@staticmethod
@staticmethod
def from_pickle(filename):
import cPickle
infile = open(filename, 'rb')

View File

@ -1,3 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import numpy as np
from obspy.core import read
@ -31,8 +34,7 @@ class SeismicShot(object):
self.snr = {}
self.snrthreshold = {}
self.timeArray = {}
self.paras = {}
self.paras['shotname'] = obsfile
self.paras = {'shotname': obsfile}
def removeEmptyTraces(self):
traceIDs = []
@ -40,14 +42,14 @@ class SeismicShot(object):
removed = []
for i in range(0, len(coordlist)):
traceIDs.append(int(coordlist[i].split()[0]))
for trace in self.traces:
try:
traceIDs.index(int(trace.stats.channel))
except:
self.traces.remove(trace)
self.traces.remove(trace)
removed.append(int(trace.stats.channel))
if len(removed) > 0:
return removed
@ -55,7 +57,7 @@ class SeismicShot(object):
for trace in self.traces:
if traceID == trace.stats.channel:
self.traces.remove(trace)
# for traceID in TraceIDs:
# traces = [trace for trace in self.traces if int(trace.stats.channel) == traceID]
# if len(traces) is not 1:
@ -153,10 +155,10 @@ class SeismicShot(object):
def getPickError(self, traceID):
pickerror = abs(self.getEarliest(traceID) - self.getLatest(traceID))
if np.isnan(pickerror) == True:
print "SPE is NaN for shot %s, traceID %s"%(self.getShotnumber(), traceID)
return pickerror
if np.isnan(pickerror) == True:
print("SPE is NaN for shot %s, traceID %s"%(self.getShotnumber(), traceID))
return pickerror
def getStreamTraceIDs(self):
traceIDs = []
for trace in self.traces:
@ -178,15 +180,15 @@ class SeismicShot(object):
def getPickwindow(self, traceID):
try:
self.pickwindow[traceID]
except KeyError, e:
self.pickwindow[traceID]
except KeyError as e:
print('no pickwindow for trace %s, set to %s' % (traceID, self.getCut()))
self.setPickwindow(traceID, self.getCut())
return self.pickwindow[traceID]
def getSNR(self, traceID):
return self.snr[traceID]
def getSNRthreshold(self, traceID):
return self.snrthreshold[traceID]
@ -262,17 +264,21 @@ class SeismicShot(object):
return Stream(traces)
else:
self.setPick(traceID, None)
print 'Warning: ambigious or empty traceID: %s' % traceID
print('Warning: ambigious or empty traceID: %s' % traceID)
#raise ValueError('ambigious or empty traceID: %s' % traceID)
def pickTraces(self, traceID, windowsize, folm = 0.6, HosAic = 'hos'): ########## input variables ##########
def pickTraces(self, traceID, pickmethod, windowsize, folm = 0.6, HosAic = 'hos'): ########## input variables ##########
# LOCALMAX NOT IMPLEMENTED!
'''
Intitiate picking for a trace.
:param: traceID
:type: int
:param: pickmethod, use either 'threshold' or 'localmax' method. (localmax not yet implemented 04_08_15)
:type: string
:param: cutwindow (equals HOScf 'cut' variable)
:type: tuple
@ -296,7 +302,13 @@ class SeismicShot(object):
self.timeArray[traceID] = hoscf.getTimeArray()
aiccftime, hoscftime = self.threshold(hoscf, aiccf, windowsize, self.getPickwindow(traceID), folm)
if pickmethod == 'threshold':
aiccftime, hoscftime = self.threshold(hoscf, aiccf, windowsize, self.getPickwindow(traceID), folm)
#setpick = {'threshold':self.threshold,
# 'localmax':self.localmax}
#aiccftime, hoscftime = setpick[pickmethod](hoscf, aiccf, windowsize, pickwindow)
setHosAic = {'hos': hoscftime,
'aic': aiccftime}
@ -316,9 +328,9 @@ class SeismicShot(object):
def threshold(self, hoscf, aiccf, windowsize, pickwindow, folm = 0.6):
'''
Threshold picker, using the local maximum in a pickwindow to find the time at
Threshold picker, using the local maximum in a pickwindow to find the time at
which a fraction of the local maximum is reached for the first time.
:param: hoscf, Higher Order Statistics Characteristic Function
:type: 'Characteristic Function'
@ -344,34 +356,34 @@ class SeismicShot(object):
threshold = folm * max(hoscflist[leftb : rightb]) # combination of local maximum and threshold
m = leftb
while hoscflist[m] < threshold:
m += 1
hoscftime = list(hoscf.getTimeArray())[m]
lb = max(0, m - windowsize[0]) # if window exceeds t = 0
aiccfcut = list(aiccf.getCF())[lb : m + windowsize[1]]
n = aiccfcut.index(min(aiccfcut))
m = lb + n
aiccftime = list(hoscf.getTimeArray())[m]
return aiccftime, hoscftime
def getDistance(self, traceID):
'''
Returns the distance of the receiver with the ID == traceID to the source location (shot location).
Uses getSrcLoc and getRecLoc.
:param: traceID
:type: int
'''
shotX, shotY, shotZ = self.getSrcLoc()
recX, recY, recZ = self.getRecLoc(traceID)
dist = np.sqrt((shotX-recX)**2 + (shotY-recY)**2 + (shotZ-recZ)**2)
if np.isnan(dist) == True:
raise ValueError("Distance is NaN for traceID %s" %traceID)
@ -382,7 +394,7 @@ class SeismicShot(object):
'''
Returns the location (x, y, z) of the receiver with the ID == traceID.
RECEIVEIVER FILE MUST BE SET FIRST, TO BE IMPROVED.
:param: traceID
:type: int
'''
@ -396,7 +408,7 @@ class SeismicShot(object):
y = coordlist[i].split()[2]
z = coordlist[i].split()[3]
return float(x), float(y), float(z)
#print "WARNING: traceID %s not found" % traceID
raise ValueError("traceID %s not found" % traceID)
#return float(self.getSingleStream(traceID)[0].stats.seg2['RECEIVER_LOCATION'])
@ -419,7 +431,7 @@ class SeismicShot(object):
'''
Returns the traceID(s) for a certain distance between source and receiver.
Used for 2D Tomography. TO BE IMPROVED.
:param: distance
:type: real
@ -444,7 +456,7 @@ class SeismicShot(object):
def setManualPicks(self, traceID, picklist): ########## picklist momentan nicht allgemein, nur testweise benutzt ##########
'''
Sets the manual picks for a receiver with the ID == traceID for comparison.
:param: traceID
:type: int
@ -459,8 +471,8 @@ class SeismicShot(object):
if not self.manualpicks.has_key(traceID):
self.manualpicks[traceID] = (mostlikely, earliest, latest)
#else:
# raise KeyError('MANUAL pick to be set more than once for traceID %s' % traceID)
# raise KeyError('MANUAL pick to be set more than once for traceID %s' % traceID)
def setPick(self, traceID, pick): ########## siehe Kommentar ##########
if not traceID in self.pick.keys():
self.pick[traceID] = {}
@ -470,7 +482,7 @@ class SeismicShot(object):
# if not self.pick.has_key(traceID):
# self.getPick(traceID) = picks
# else:
# raise KeyError('pick to be set more than once for traceID %s' % traceID)
# raise KeyError('pick to be set more than once for traceID %s' % traceID)
# def readParameter(self, parfile):
# parlist = open(parfile,'r').readlines()
@ -491,12 +503,13 @@ class SeismicShot(object):
def setSNR(self, traceID): ########## FORCED HOS PICK ##########
'''
Gets the SNR using pylot and then sets the SNR for the traceID.
:param: traceID
:type: int
:param: (tnoise, tgap, tsignal), as used in pylot SNR
'''
from pylot.core.pick.utils import getSNR
tgap = self.getTgap()
@ -526,7 +539,7 @@ class SeismicShot(object):
# def plot2dttc(self, dist_med = 0): ########## 2D ##########
# '''
# Function to plot the traveltime curve for automated picks (AIC & HOS) of a shot. 2d only!
# :param: dist_med (optional)
# :type: 'dictionary'
# '''
@ -534,7 +547,7 @@ class SeismicShot(object):
# plt.interactive('True')
# aictimearray = []
# hostimearray = []
# if dist_med is not 0:
# if dist_med is not 0:
# dist_medarray = []
# i = 1
@ -625,7 +638,7 @@ class SeismicShot(object):
stream = self.getSingleStream(traceID)
stime = getGlobalTimes(stream)[0]
timeaxis = prepTimeAxis(stime, stream[0])
timeaxis = prepTimeAxis(stime, stream[0])
timeaxis -= stime
ax = self.traces4plot[traceID]['ax1']
@ -707,7 +720,7 @@ class SeismicShot(object):
if ax == None:
fig = plt.figure()
ax = plt.axes(projection = '3d')
xsrc, ysrc, zsrc = self.getSrcLoc()
if contour == True:
@ -719,10 +732,10 @@ class SeismicShot(object):
if plotpicks == True:
ax.plot(x, y, z, 'k.')
def plotttc(self, method, *args):
plotmethod = {'2d': self.plot2dttc, '3d': self.plot3dttc}
plotmethod[method](*args)
def matshow(self, ax = None, step = 0.5, method = 'linear', plotRec = True, annotations = True, colorbar = True):

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import math
import numpy as np

View File

@ -1,16 +1,69 @@
import numpy as np
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylab import *
startpos = []
endpos = []
def generateSurvey(obsdir, shotlist):
from obspy.core import read
from pylot.core.active import seismicshot
shot_dict = {}
for shotnumber in shotlist: # loop over data files
# generate filenames and read manual picks to a list
obsfile = obsdir + str(shotnumber) + '_pickle.dat'
#obsfile = obsdir + str(shotnumber) + '.dat'
if not obsfile in shot_dict.keys():
shot_dict[shotnumber] = []
shot_dict[shotnumber] = seismicshot.SeismicShot(obsfile)
shot_dict[shotnumber].setParameters('shotnumber', shotnumber)
return shot_dict
def setParametersForShots(cutwindow, tmovwind, tsignal, tgap, receiverfile, sourcefile, shot_dict):
for shot in shot_dict.values():
shot.setCut(cutwindow)
shot.setTmovwind(tmovwind)
shot.setTsignal(tsignal)
shot.setTgap(tgap)
shot.setRecfile(receiverfile)
shot.setSourcefile(sourcefile)
shot.setOrder(order = 4)
def removeEmptyTraces(shot_dict):
filename = 'removeEmptyTraces.out'
filename2 = 'updateTraces.out'
outfile = open(filename, 'w')
outfile2 = open(filename2, 'w')
for shot in shot_dict.values():
del_traceIDs = shot.updateTraceList()
removed = shot.removeEmptyTraces()
if removed is not None:
outfile.writelines('shot: %s, removed empty traces: %s\n' %(shot.getShotnumber(), removed))
outfile2.writelines('shot: %s, removed traceID(s) %s because they were not found in the corresponding stream\n' %(shot.getShotnumber(), del_traceIDs))
print '\nremoveEmptyTraces, updateTraces: Finished! See %s and %s for more information of removed traces.\n' %(filename, filename2)
outfile.close()
outfile2.close()
def readParameters(parfile, parameter):
from ConfigParser import ConfigParser
parameterConfig = ConfigParser()
parameterConfig.read('parfile')
value = parameterConfig.get('vars', parameter).split('#')[0]
value = value.replace(" ", "")
value = parameterConfig.get('vars', parameter).split('\t')[0]
return value
def setArtificialPick(shot_dict, traceID, pick):
for shot in shot_dict.values():
shot.setPick(traceID, pick)
shot.setPickwindow(traceID, shot.getCut())
def fitSNR4dist(shot_dict, shiftdist = 5):
import numpy as np
dists = []
picks = []
snrs = []
@ -31,6 +84,7 @@ def fitSNR4dist(shot_dict, shiftdist = 5):
plotFittedSNR(dists, snrthresholds, snrs)
return fit_fn #### ZU VERBESSERN, sollte fertige funktion wiedergeben
def plotFittedSNR(dists, snrthresholds, snrs):
import matplotlib.pyplot as plt
plt.interactive(True)
@ -42,25 +96,98 @@ def plotFittedSNR(dists, snrthresholds, snrs):
plt.legend()
def setFittedSNR(shot_dict, shiftdist = 5, p1 = 0.004, p2 = -0.004):
import numpy as np
#fit_fn = fitSNR4dist(shot_dict)
fit_fn = np.poly1d([p1, p2])
for shot in shot_dict.values():
for traceID in shot.getTraceIDlist(): ### IMPROVE
shot.setSNRthreshold(traceID, 1/(fit_fn(shot.getDistance(traceID) + shiftdist)**2)) ### s.o.
print "\nsetFittedSNR: Finished setting of fitted SNR-threshold"
print "setFittedSNR: Finished setting of fitted SNR-threshold"
#def linearInterp(dist_med, dist_start
def exportFMTOMO(shot_dict, directory = 'FMTOMO_export', sourcefile = 'input_sf.in', ttFileExtension = '.tt'):
count = 0
fmtomo_factor = 1000 # transforming [m/s] -> [km/s]
LatAll = []; LonAll = []; DepthAll = []
srcfile = open(directory + '/' + sourcefile, 'w')
srcfile.writelines('%10s\n' %len(shot_dict)) # number of sources
for shotnumber in getShotlist(shot_dict):
shot = getShotForShotnumber(shot_dict, shotnumber)
ttfilename = str(shotnumber) + ttFileExtension
(x, y, z) = shot.getSrcLoc() # getSrcLoc returns (x, y, z)
srcfile.writelines('%10s %10s %10s\n' %(getAngle(y), getAngle(x), (-1)*z)) # lat, lon, depth
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
srcfile.writelines('%10s\n' %1) #
srcfile.writelines('%10s %10s %10s\n' %(1, 1, ttfilename))
ttfile = open(directory + '/' + ttfilename, 'w')
traceIDlist = shot.getTraceIDlist()
traceIDlist.sort()
ttfile.writelines(str(countPickedTraces(shot)) + '\n')
for traceID in traceIDlist:
if shot.getPick(traceID) is not None:
pick = shot.getPick(traceID) * fmtomo_factor
delta = shot.getPickError(traceID) * fmtomo_factor
(x, y, z) = shot.getRecLoc(traceID)
ttfile.writelines('%20s %20s %20s %10s %10s\n' %(getAngle(y), getAngle(x), (-1)*z, pick, delta))
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
count += 1
ttfile.close()
srcfile.close()
print 'Wrote output for %s traces' %count
print 'WARNING: output generated for FMTOMO-obsdata. Obsdata seems to take Lat, Lon, Depth and creates output for FMTOMO as Depth, Lat, Lon'
print 'Dimensions of the seismic Array, transformed for FMTOMO, are Depth(%s, %s), Lat(%s, %s), Lon(%s, %s)'%(
min(DepthAll), max(DepthAll), min(LatAll), max(LatAll), min(LonAll), max(LonAll))
def getShotlist(shot_dict):
shotlist = []
for shot in shot_dict.values():
shotlist.append(shot.getShotnumber())
shotlist.sort()
return shotlist
def getShotForShotnumber(shot_dict, shotnumber):
for shot in shot_dict.values():
if shot.getShotnumber() == shotnumber:
return shot
def getAngle(distance):
'''
Function returns the angle on a Sphere of the radius R = 6371 [km] for a distance [km].
'''
import numpy as np
PI = np.pi
R = 6371.
angle = distance * 180 / (PI * R)
return angle
def countPickedTraces(shot):
numtraces = 0
for traceID in shot.getTraceIDlist():
if shot.getPick(traceID) is not None:
numtraces += 1
print "countPickedTraces: Found %s picked traces in shot number %s" %(numtraces, shot.getShotnumber())
return numtraces
def countAllPickedTraces(shot_dict):
traces = 0
for shot in shot_dict.values():
traces += countPickedTraces(shot)
return traces
def findTracesInRanges(shot_dict, distancebin, pickbin):
'''
Returns traces corresponding to a certain area in a plot with all picks over the distances.
:param: shot_dict, dictionary containing all shots that are used
:type: dictionary
:param: distancebin
:type: tuple, (dist1[m], dist2[m])
:param: pickbin
:type: tuple, (t1[s], t2[s])
'''
shots_found = {}
for shot in shot_dict.values():
@ -72,3 +199,6 @@ def findTracesInRanges(shot_dict, distancebin, pickbin):
shots_found[shot.getShotnumber()].append(traceID)
return shots_found

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -6,7 +6,7 @@ from obspy.signal.trigger import coincidenceTrigger
class CoincidenceTimes():
class CoincidenceTimes(object):
def __init__(self, st, comp='Z', coinum=4, sta=1., lta=10., on=5., off=1.):
_type = 'recstalta'
@ -54,4 +54,4 @@ def main():
if __name__ == '__main__':
main()
main()

View File

@ -1,3 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created August/September 2015.
@ -34,7 +35,7 @@ class Magnitude(object):
:type: integer
'''
assert isinstance(wfstream, Stream), "%s is not a stream object" % str(wfstream)
self.setwfstream(wfstream)
@ -62,7 +63,7 @@ class Magnitude(object):
def setpwin(self, pwin):
self.pwin = pwin
def getiplot(self):
return self.iplot
@ -71,7 +72,7 @@ class Magnitude(object):
def getwapp(self):
return self.wapp
def getw0(self):
return self.w0
@ -103,7 +104,7 @@ class WApp(Magnitude):
'poles': [5.6089 - 5.4978j, -5.6089 - 5.4978j],
'zeros': [0j, 0j],
'gain': 2080,
'sensitivity': 1}
'sensitivity': 1}
stream.simulate(paz_remove=None, paz_simulate=paz_wa)
@ -133,19 +134,19 @@ class WApp(Magnitude):
raw_input()
plt.close(f)
class DCfc(Magnitude):
'''
Method to calculate the source spectrum and to derive from that the plateau
(so-called DC-value) and the corner frequency assuming Aki's omega-square
Method to calculate the source spectrum and to derive from that the plateau
(so-called DC-value) and the corner frequency assuming Aki's omega-square
source model. Has to be derived from instrument corrected displacement traces!
'''
def calcsourcespec(self):
print ("Calculating source spectrum ....")
print ("Calculating source spectrum ....")
self.w0 = None # DC-value
self.fc = None # corner frequency
self.fc = None # corner frequency
stream = self.getwfstream()
tr = stream[0]
@ -155,7 +156,7 @@ class DCfc(Magnitude):
iwin = getsignalwin(t, self.getTo(), self.getpwin())
xdat = tr.data[iwin]
# fft
# fft
fny = tr.stats.sampling_rate / 2
l = len(xdat) / tr.stats.sampling_rate
n = tr.stats.sampling_rate * l # number of fft bins after Bath
@ -167,7 +168,7 @@ class DCfc(Magnitude):
L = (N - 1) / tr.stats.sampling_rate
f = np.arange(0, fny, 1/L)
# remove zero-frequency and frequencies above
# remove zero-frequency and frequencies above
# corner frequency of seismometer (assumed
# to be 100 Hz)
fi = np.where((f >= 1) & (f < 100))
@ -185,15 +186,15 @@ class DCfc(Magnitude):
self.w0 = optspecfit[0]
self.fc = optspecfit[1]
print ("DCfc: Determined DC-value: %e m/Hz, \n" \
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
if self.getiplot() > 1:
f1 = plt.figure()
plt.subplot(2,1,1)
# show displacement in mm
plt.plot(t, np.multiply(tr, 1000), 'k')
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
plt.plot(t, np.multiply(tr, 1000), 'k')
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
plt.title('Seismogram and P pulse, station %s' % tr.stats.station)
plt.xlabel('Time since %s' % tr.stats.starttime)
plt.ylabel('Displacement [mm]')
@ -214,8 +215,8 @@ class DCfc(Magnitude):
def synthsourcespec(f, omega0, fcorner):
'''
Calculates synthetic source spectrum from given plateau and corner
frequency assuming Akis omega-square model.
Calculates synthetic source spectrum from given plateau and corner
frequency assuming Akis omega-square model.
:param: f, frequencies
:type: array
@ -226,7 +227,7 @@ def synthsourcespec(f, omega0, fcorner):
:param: fcorner, corner frequency of source spectrum
:type: float
'''
#ssp = omega0 / (pow(2, (1 + f / fcorner)))
ssp = omega0 / (1 + pow(2, (f / fcorner)))

View File

@ -1,3 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created Oct/Nov 2014
@ -120,7 +121,7 @@ class CharacteristicFunction(object):
def getTimeArray(self):
incr = self.getIncrement()
self.TimeArray = np.arange(0, len(self.getCF()) * incr, incr) + self.getCut()[0]
self.TimeArray = np.arange(0, len(self.getCF()) * incr, incr) + self.getCut()[0]
return self.TimeArray
def getFnoise(self):
@ -176,7 +177,7 @@ class CharacteristicFunction(object):
h2 = hh[1].copy()
hh[0].data = h1.data[int(start):int(stop)]
hh[1].data = h2.data[int(start):int(stop)]
data = hh
data = hh
return data
elif len(self.orig_data) == 3:
if self.cut[0] == 0 and self.cut[1] == 0:
@ -197,12 +198,12 @@ class CharacteristicFunction(object):
hh[0].data = h1.data[int(start):int(stop)]
hh[1].data = h2.data[int(start):int(stop)]
hh[2].data = h3.data[int(start):int(stop)]
data = hh
data = hh
return data
else:
data = self.orig_data.copy()
return data
def calcCF(self, data=None):
self.cf = data
@ -289,7 +290,7 @@ class HOScf(CharacteristicFunction):
LTA[j] = lta / np.power(lta1, 1.5)
elif self.getOrder() == 4:
LTA[j] = lta / np.power(lta1, 2)
nn = np.isnan(LTA)
if len(nn) > 1:
LTA[nn] = 0
@ -319,7 +320,7 @@ class ARZcf(CharacteristicFunction):
cf = np.zeros(len(xnp))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() #AR-calculation index
for i in range(ldet + self.getOrder(), tend - lpred - 1):
for i in range(ldet + self.getOrder(), tend - lpred - 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
@ -366,7 +367,7 @@ class ARZcf(CharacteristicFunction):
rhs = np.zeros(self.getOrder())
for k in range(0, self.getOrder()):
for i in range(rind, ldet+1):
ki = k + 1
ki = k + 1
rhs[k] = rhs[k] + data[i] * data[i - ki]
#recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
@ -386,7 +387,7 @@ class ARZcf(CharacteristicFunction):
def arPredZ(self, data, arpara, rind, lpred):
'''
Function to predict waveform, assuming an autoregressive process of order
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
:param: data, time series to be predicted
:type: array
@ -404,9 +405,9 @@ class ARZcf(CharacteristicFunction):
'''
#be sure of the summation indeces
if rind < len(arpara):
rind = len(arpara)
rind = len(arpara)
if rind > len(data) - lpred :
rind = len(data) - lpred
rind = len(data) - lpred
if lpred < 1:
lpred = 1
if lpred > len(data) - 2:
@ -426,7 +427,7 @@ class ARHcf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from both horizontal traces ...'
xnp = self.getDataArray(self.getCut())
n0 = np.isnan(xnp[0].data)
if len(n0) > 1:
@ -434,7 +435,7 @@ class ARHcf(CharacteristicFunction):
n1 = np.isnan(xnp[1].data)
if len(n1) > 1:
xnp[1].data[n1] = 0
#some parameters needed
#add noise to time series
xenoise = xnp[0].data + np.random.normal(0.0, 1.0, len(xnp[0].data)) * self.getFnoise() * max(abs(xnp[0].data))
@ -445,7 +446,7 @@ class ARHcf(CharacteristicFunction):
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = lpred + self.getOrder() - 1 #AR-calculation index
@ -519,7 +520,7 @@ class ARHcf(CharacteristicFunction):
def arPredH(self, data, arpara, rind, lpred):
'''
Function to predict waveform, assuming an autoregressive process of order
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
:param: data, horizontal component seismograms to be predicted
:type: structured array
@ -562,7 +563,7 @@ class AR3Ccf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from all 3 components ...'
xnp = self.getDataArray(self.getCut())
n0 = np.isnan(xnp[0].data)
if len(n0) > 1:
@ -573,7 +574,7 @@ class AR3Ccf(CharacteristicFunction):
n2 = np.isnan(xnp[2].data)
if len(n2) > 1:
xnp[2].data[n2] = 0
#some parameters needed
#add noise to time series
xenoise = xnp[0].data + np.random.normal(0.0, 1.0, len(xnp[0].data)) * self.getFnoise() * max(abs(xnp[0].data))
@ -585,7 +586,7 @@ class AR3Ccf(CharacteristicFunction):
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() - 1 #AR-calculation index
@ -620,7 +621,7 @@ class AR3Ccf(CharacteristicFunction):
Function to calculate AR parameters arpara after Thomas Meier (CAU), published
in Kueperkoch et al. (2012). This function solves SLE using the Moore-
Penrose inverse, i.e. the least-squares approach. "data" is a structured array.
AR parameters are calculated based on both horizontal components and vertical
AR parameters are calculated based on both horizontal components and vertical
componant.
:param: data, horizontal component seismograms to calculate AR parameters from
:type: structured array
@ -662,7 +663,7 @@ class AR3Ccf(CharacteristicFunction):
def arPred3C(self, data, arpara, rind, lpred):
'''
Function to predict waveform, assuming an autoregressive process of order
p (=size(arpara)), with AR parameters arpara calculated in arDet3C. After
p (=size(arpara)), with AR parameters arpara calculated in arDet3C. After
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
:param: data, horizontal and vertical component seismograms to be predicted
:type: structured array

View File

@ -312,7 +312,7 @@ class PragPicker(AutoPicking):
else:
for i in range(1, len(self.cf)):
if i > ismooth:
ii1 = i - ismooth;
ii1 = i - ismooth
cfsmooth[i] = cfsmooth[i - 1] + (self.cf[i] - self.cf[ii1]) / ismooth
else:
cfsmooth[i] = np.mean(self.cf[1 : i])

View File

@ -1 +1,2 @@
#
# -*- coding: utf-8 -*-
#

View File

@ -317,29 +317,29 @@ def autopickstation(wfstream, pickparam):
data = Data()
[corzdat, restflag] = data.restituteWFData(invdir, zdat)
if restflag == 1:
# integrate to displacement
corintzdat = integrate.cumtrapz(corzdat[0], None, corzdat[0].stats.delta)
# class needs stream object => build it
z_copy = zdat.copy()
z_copy[0].data = corintzdat
# largest detectable period == window length
# after P pulse for calculating source spectrum
winzc = (1 / bpz2[0]) * z_copy[0].stats.sampling_rate
impickP = mpickP * z_copy[0].stats.sampling_rate
wfzc = z_copy[0].data[impickP : impickP + winzc]
# calculate spectrum using only first cycles of
# waveform after P onset!
zc = crossings_nonzero_all(wfzc)
if np.size(zc) == 0:
print ("Something is wrong with the waveform, " \
"no zero crossings derived!")
print ("Cannot calculate source spectrum!")
else:
calcwin = (zc[3] - zc[0]) * z_copy[0].stats.delta
# calculate source spectrum and get w0 and fc
specpara = DCfc(z_copy, mpickP, calcwin, iplot)
w0 = specpara.getw0()
fc = specpara.getfc()
# integrate to displacement
corintzdat = integrate.cumtrapz(corzdat[0], None, corzdat[0].stats.delta)
# class needs stream object => build it
z_copy = zdat.copy()
z_copy[0].data = corintzdat
# largest detectable period == window length
# after P pulse for calculating source spectrum
winzc = (1 / bpz2[0]) * z_copy[0].stats.sampling_rate
impickP = mpickP * z_copy[0].stats.sampling_rate
wfzc = z_copy[0].data[impickP : impickP + winzc]
# calculate spectrum using only first cycles of
# waveform after P onset!
zc = crossings_nonzero_all(wfzc)
if np.size(zc) == 0:
print ("Something is wrong with the waveform, " \
"no zero crossings derived!")
print ("Cannot calculate source spectrum!")
else:
calcwin = (zc[3] - zc[0]) * z_copy[0].stats.delta
# calculate source spectrum and get w0 and fc
specpara = DCfc(z_copy, mpickP, calcwin, iplot)
w0 = specpara.getw0()
fc = specpara.getfc()
print ("autopickstation: P-weight: %d, SNR: %f, SNR[dB]: %f, " \
"Polarity: %s" % (Pweight, SNRP, SNRPdB, FM))
@ -560,7 +560,7 @@ def autopickstation(wfstream, pickparam):
hdat += ndat
h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy)
# calculate WA-peak-to-peak amplitude
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
if restflag == 1:
if Sweight < 4:
@ -591,7 +591,7 @@ def autopickstation(wfstream, pickparam):
h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy)
if restflag == 1:
# calculate WA-peak-to-peak amplitude
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
wapp = WApp(cordat, mpickP, mpickP + sstop + (0.5 * (mpickP \
+ sstop)), iplot)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# -*- coding: utf-8 -*-
"""
@ -93,7 +94,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode = False):
T0 = np.mean(np.diff(zc)) * X[0].stats.delta # this is half wave length
# T0/4 is assumed as time difference between most likely and earliest possible pick!
EPick = Pick1 - T0 / 2
# get symmetric pick error as mean from earliest and latest possible pick
# by weighting latest possible pick two times earliest possible pick
@ -495,9 +496,9 @@ def wadaticheck(pickdic, dttolerance, iplot):
if len(SPtimes) >= 3:
# calculate slope
p1 = np.polyfit(Ppicks, SPtimes, 1)
wdfit = np.polyval(p1, Ppicks)
# calculate slope
p1 = np.polyfit(Ppicks, SPtimes, 1)
wdfit = np.polyval(p1, Ppicks)
wfitflag = 0
# calculate vp/vs ratio before check
@ -534,40 +535,40 @@ def wadaticheck(pickdic, dttolerance, iplot):
pickdic[key]['S']['marked'] = marker
if len(checkedPpicks) >= 3:
# calculate new slope
p2 = np.polyfit(checkedPpicks, checkedSPtimes, 1)
wdfit2 = np.polyval(p2, checkedPpicks)
# calculate new slope
p2 = np.polyfit(checkedPpicks, checkedSPtimes, 1)
wdfit2 = np.polyval(p2, checkedPpicks)
# calculate vp/vs ratio after check
cvpvsr = p2[0] + 1
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
# calculate vp/vs ratio after check
cvpvsr = p2[0] + 1
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
else:
print ("###############################################")
print ("wadatacheck: Not enough checked S-P times available!")
print ("Skip Wadati check!")
print ("###############################################")
print ("wadatacheck: Not enough checked S-P times available!")
print ("Skip Wadati check!")
checkedonsets = pickdic
else:
print ("wadaticheck: Not enough S-P times available for reliable regression!")
print ("wadaticheck: Not enough S-P times available for reliable regression!")
print ("Skip wadati check!")
wfitflag = 1
# plot results
if iplot > 1:
plt.figure(iplot)
f1, = plt.plot(Ppicks, SPtimes, 'ro')
plt.figure(iplot)
f1, = plt.plot(Ppicks, SPtimes, 'ro')
if wfitflag == 0:
f2, = plt.plot(Ppicks, wdfit, 'k')
f3, = plt.plot(checkedPpicks, checkedSPtimes, 'ko')
f4, = plt.plot(checkedPpicks, wdfit2, 'g')
plt.title('Wadati-Diagram, %d S-P Times, Vp/Vs(raw)=%5.2f,' \
'Vp/Vs(checked)=%5.2f' % (len(SPtimes), vpvsr, cvpvsr))
plt.legend([f1, f2, f3, f4], ['Skipped S-Picks', 'Wadati 1', \
'Reliable S-Picks', 'Wadati 2'], loc='best')
f2, = plt.plot(Ppicks, wdfit, 'k')
f3, = plt.plot(checkedPpicks, checkedSPtimes, 'ko')
f4, = plt.plot(checkedPpicks, wdfit2, 'g')
plt.title('Wadati-Diagram, %d S-P Times, Vp/Vs(raw)=%5.2f,' \
'Vp/Vs(checked)=%5.2f' % (len(SPtimes), vpvsr, cvpvsr))
plt.legend([f1, f2, f3, f4], ['Skipped S-Picks', 'Wadati 1', \
'Reliable S-Picks', 'Wadati 2'], loc='best')
else:
plt.title('Wadati-Diagram, %d S-P Times' % len(SPtimes))
plt.title('Wadati-Diagram, %d S-P Times' % len(SPtimes))
plt.ylabel('S-P Times [s]')
plt.xlabel('P Times [s]')
@ -581,8 +582,8 @@ def wadaticheck(pickdic, dttolerance, iplot):
def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
'''
Function to detect spuriously picked noise peaks.
Uses RMS trace of all 3 components (if available) to determine,
how many samples [per cent] after P onset are below certain
Uses RMS trace of all 3 components (if available) to determine,
how many samples [per cent] after P onset are below certain
threshold, calculated from noise level times noise factor.
: param: X, time series (seismogram)
@ -614,7 +615,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
print ("Checking signal length ...")
if len(X) > 1:
# all three components available
# all three components available
# make sure, all components have equal lengths
ilen = min([len(X[0].data), len(X[1].data), len(X[2].data)])
x1 = X[0][0:ilen]
@ -641,7 +642,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
if numoverthr >= minnum:
print ("checksignallength: Signal reached required length.")
print ("checksignallength: Signal reached required length.")
returnflag = 1
else:
print ("checksignallength: Signal shorter than required minimum signal length!")
@ -651,7 +652,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
if iplot == 2:
plt.figure(iplot)
p1, = plt.plot(t,rms, 'k')
p1, = plt.plot(t,rms, 'k')
p2, = plt.plot(t[inoise], rms[inoise], 'c')
p3, = plt.plot(t[isignal],rms[isignal], 'r')
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal)-1]]], \
@ -731,27 +732,27 @@ def checkPonsets(pickdic, dttolerance, iplot):
badjkmarker = 'badjkcheck'
for i in range(0, len(goodstations)):
# mark P onset as checked and keep P weight
pickdic[goodstations[i]]['P']['marked'] = goodmarker
pickdic[goodstations[i]]['P']['marked'] = goodmarker
for i in range(0, len(badstations)):
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badstations[i]]['P']['marked'] = badmarker
pickdic[badstations[i]]['P']['weight'] = 9
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badstations[i]]['P']['marked'] = badmarker
pickdic[badstations[i]]['P']['weight'] = 9
for i in range(0, len(badjkstations)):
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badjkstations[i]]['P']['marked'] = badjkmarker
pickdic[badjkstations[i]]['P']['weight'] = 9
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badjkstations[i]]['P']['marked'] = badjkmarker
pickdic[badjkstations[i]]['P']['weight'] = 9
checkedonsets = pickdic
if iplot > 1:
p1, = plt.plot(np.arange(0, len(Ppicks)), Ppicks, 'r+', markersize=14)
p1, = plt.plot(np.arange(0, len(Ppicks)), Ppicks, 'r+', markersize=14)
p2, = plt.plot(igood, np.array(Ppicks)[igood], 'g*', markersize=14)
p3, = plt.plot([0, len(Ppicks) - 1], [pmedian, pmedian], 'g', \
linewidth=2)
for i in range(0, len(Ppicks)):
plt.text(i, Ppicks[i] + 0.2, stations[i])
plt.text(i, Ppicks[i] + 0.2, stations[i])
plt.xlabel('Number of P Picks')
plt.ylabel('Onset Time [s] from 1.1.1970')
@ -791,37 +792,37 @@ def jackknife(X, phi, h):
g = len(X) / h
if type(g) is not int:
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
print ("Choose another size for subgroups!")
return PHI_jack, PHI_pseudo, PHI_sub
else:
# estimator of undisturbed spot check
if phi == 'MEA':
phi_sc = np.mean(X)
# estimator of undisturbed spot check
if phi == 'MEA':
phi_sc = np.mean(X)
elif phi == 'VAR':
phi_sc = np.var(X)
phi_sc = np.var(X)
elif phi == 'MED':
phi_sc = np.median(X)
phi_sc = np.median(X)
# estimators of subgroups
# estimators of subgroups
PHI_pseudo = []
PHI_sub = []
for i in range(0, g - 1):
# subgroup i, remove i-th sample
xx = X[:]
del xx[i]
# calculate estimators of disturbed spot check
if phi == 'MEA':
phi_sub = np.mean(xx)
elif phi == 'VAR':
phi_sub = np.var(xx)
elif phi == 'MED':
phi_sub = np.median(xx)
# subgroup i, remove i-th sample
xx = X[:]
del xx[i]
# calculate estimators of disturbed spot check
if phi == 'MEA':
phi_sub = np.mean(xx)
elif phi == 'VAR':
phi_sub = np.var(xx)
elif phi == 'MED':
phi_sub = np.median(xx)
PHI_sub.append(phi_sub)
# pseudo values
phi_pseudo = g * phi_sc - ((g - 1) * phi_sub)
PHI_pseudo.append(phi_pseudo)
PHI_sub.append(phi_sub)
# pseudo values
phi_pseudo = g * phi_sc - ((g - 1) * phi_sub)
PHI_pseudo.append(phi_pseudo)
# jackknife estimator
PHI_jack = np.mean(PHI_pseudo)
@ -901,17 +902,17 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
# vertical P-coda level must exceed horizontal P-coda level
# zfac times encodalevel
if zcodalevel < minsiglevel:
print ("checkZ4S: Maybe S onset? Skip this P pick!")
print ("checkZ4S: Maybe S onset? Skip this P pick!")
else:
print ("checkZ4S: P onset passes checkZ4S test!")
returnflag = 1
if iplot > 1:
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
edat[0].stats.delta)
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
ndat[0].stats.delta)
plt.plot(tz, z / max(z), 'k')
plt.plot(tz, z / max(z), 'k')
plt.plot(tz[isignal], z[isignal] / max(z), 'r')
plt.plot(te, edat[0].data / max(edat[0].data) + 1, 'k')
plt.plot(te[isignal], edat[0].data[isignal] / max(edat[0].data) + 1, 'r')

View File

@ -1 +1 @@
# -*- coding: utf-8 -*-

View File

@ -208,8 +208,7 @@ class FilterOptions(object):
def parseFilterOptions(self):
if self.getFilterType():
robject = {'type':self.getFilterType()}
robject['corners'] = self.getOrder()
robject = {'type': self.getFilterType(), 'corners': self.getOrder()}
if len(self.getFreq()) > 1:
robject['freqmin'] = self.getFreq()[0]
robject['freqmax'] = self.getFreq()[1]

View File

@ -1 +1,2 @@
# -*- coding: utf-8 -*-
from pylot.core.util.version import get_git_version as _getVersionString

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
'''
Created on 10.11.2014
@ -23,4 +24,4 @@ class Test(unittest.TestCase):
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
unittest.main()

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
'''
Created on 10.11.2014
@ -15,4 +16,4 @@ class Test(unittest.TestCase):
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
unittest.main()

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
from PySide.QtCore import QThread, Signal

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# -*- coding: utf-8 -*-

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
from PySide.QtGui import QApplication

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import matplotlib

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
from PySide.QtGui import QApplication
@ -8,4 +9,4 @@ app = QApplication(sys.argv)
win = PropertiesDlg()
win.show()
app.exec_()
app.exec_()

View File

@ -1,4 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
from PySide.QtGui import QApplication
@ -9,7 +11,7 @@ dialogs = [FilterOptionsDialog, PropertiesDlg, HelpForm]
app = QApplication(sys.argv)
for dlg in dialogs:
win = dlg()
win.show()
time.sleep(1)
win.destroy()
win = dlg()
win.show()
time.sleep(1)
win.destroy()