general code clean-up

This commit is contained in:
Sebastian Wehling-Benatelli 2015-10-19 05:32:10 +02:00
parent b49206407a
commit 0fa701a878
30 changed files with 270 additions and 249 deletions

View File

@ -8,11 +8,11 @@ import glob
import matplotlib.pyplot as plt
from obspy.core import read
from pylot.core.util import _getVersionString
from pylot.core.read.data import Data
from pylot.core.read.inputs import AutoPickParameter
from pylot.core.util.structure import DATASTRUCTURE
from pylot.core.pick.autopick import autopickevent
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()

View File

@ -1,10 +1,10 @@
<RCC>
<qresource>
<file>icons/pylot.ico</file>
<file>icons/pylot.png</file>
<file>icons/pylot.ico</file>
<file>icons/pylot.png</file>
<file>icons/printer.png</file>
<file>icons/delete.png</file>
<file>icons/key_E.png</file>
<file>icons/key_E.png</file>
<file>icons/key_N.png</file>
<file>icons/key_P.png</file>
<file>icons/key_Q.png</file>
@ -14,7 +14,7 @@
<file>icons/key_U.png</file>
<file>icons/key_V.png</file>
<file>icons/key_W.png</file>
<file>icons/key_Z.png</file>
<file>icons/key_Z.png</file>
<file>icons/filter.png</file>
<file>icons/sync.png</file>
<file>icons/zoom_0.png</file>

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -1 +1,2 @@
# -*- coding: utf-8 -*-
__author__ = 'sebastianw'

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
import numpy as np
from pylot.core.active import seismicshot
@ -14,11 +15,11 @@ class Survey(object):
self._sourcefile = sourcefile
self._obsdir = path
self._generateSurvey()
if useDefaultParas == True:
if useDefaultParas == True:
self.setParametersForShots()
self._removeAllEmptyTraces()
self._updateShots()
def _generateSurvey(self):
from obspy.core import read
@ -65,7 +66,7 @@ class Survey(object):
if removed is not None:
if count == 0: outfile = open(filename, 'w')
count += 1
outfile.writelines('shot: %s, removed empty traces: %s\n'
outfile.writelines('shot: %s, removed empty traces: %s\n'
%(shot.getShotnumber(), removed))
print ("\nremoveEmptyTraces: Finished! Removed %d traces" %count)
if count > 0:
@ -83,7 +84,7 @@ class Survey(object):
count += 1
countTraces += len(del_traceIDs)
outfile.writelines("shot: %s, removed traceID(s) %s because "
"they were not found in the corresponding stream\n"
"they were not found in the corresponding stream\n"
%(shot.getShotnumber(), del_traceIDs))
print ("\nupdateShots: Finished! Updated %d shots and removed "
@ -121,7 +122,7 @@ class Survey(object):
shot.pickTraces(traceID, windowsize, folm, HosAic) # picker
# ++ TEST: set and check SNR before adding to distance bin ############################
shot.setSNR(traceID)
shot.setSNR(traceID)
#if shot.getSNR(traceID)[0] < snrthreshold:
if shot.getSNR(traceID)[0] < shot.getSNRthreshold(traceID):
shot.removePick(traceID)
@ -144,8 +145,8 @@ class Survey(object):
def countAllTraces(self):
numtraces = 0
for line in self.getShotlist():
for line in self.getReceiverlist():
for shot in self.getShotlist():
for rec in self.getReceiverlist():
numtraces += 1
return numtraces
@ -180,7 +181,7 @@ class Survey(object):
def getReceiverfile(self):
return self._recfile
def getPath(self):
return self._obsdir
@ -228,7 +229,7 @@ class Survey(object):
(x, y, z) = shot.getSrcLoc() # getSrcLoc returns (x, y, z)
srcfile.writelines('%10s %10s %10s\n' %(getAngle(y), getAngle(x), (-1)*z)) # lat, lon, depth
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
srcfile.writelines('%10s\n' %1) #
srcfile.writelines('%10s\n' %1) #
srcfile.writelines('%10s %10s %10s\n' %(1, 1, ttfilename))
ttfile = open(directory + '/' + ttfilename, 'w')
traceIDlist = shot.getTraceIDlist()
@ -243,7 +244,7 @@ class Survey(object):
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
count += 1
ttfile.close()
srcfile.close()
srcfile.close()
print 'Wrote output for %s traces' %count
print 'WARNING: output generated for FMTOMO-obsdata. Obsdata seems to take Lat, Lon, Depth and creates output for FMTOMO as Depth, Lat, Lon'
print 'Dimensions of the seismic Array, transformed for FMTOMO, are Depth(%s, %s), Lat(%s, %s), Lon(%s, %s)'%(
@ -271,7 +272,7 @@ class Survey(object):
for shot in self.data.values():
for traceID in shot.getTraceIDlist():
if plotDeleted == False:
if shot.getPick(traceID) is not None:
if shot.getPick(traceID) is not None:
dist.append(shot.getDistance(traceID))
pick.append(shot.getPick(traceID))
snrloglist.append(math.log10(shot.getSNR(traceID)[0]))
@ -303,7 +304,7 @@ class Survey(object):
return ax
def _update_progress(self, shotname, tend, progress):
sys.stdout.write("Working on shot %s. ETC is %02d:%02d:%02d [%2.2f %%]\r"
sys.stdout.write("Working on shot %s. ETC is %02d:%02d:%02d [%2.2f %%]\r"
%(shotname, tend.hour, tend.minute, tend.second, progress))
sys.stdout.flush()
@ -313,8 +314,8 @@ class Survey(object):
cPickle.dump(self, outfile, -1)
print('saved Survey to file %s'%(filename))
@staticmethod
@staticmethod
def from_pickle(filename):
import cPickle
infile = open(filename, 'rb')

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import numpy as np
def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk'):
@ -73,7 +74,7 @@ def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk'):
dX = getDistance(np.rad2deg(dPhi))
dY = getDistance(np.rad2deg(dTheta))
nPoints = nX * nY * nZ
dZ = dR
@ -114,7 +115,7 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
R = 6371.
distance = angle / 180 * (PI * R)
return distance
infile = open(fnin, 'r')
R = 6371
rays = {}
@ -124,7 +125,7 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
### NOTE: rays.dat seems to be in km and radians
while True:
raynumber += 1
raynumber += 1
firstline = infile.readline()
if firstline == '': break # break at EOF
shotnumber = int(firstline.split()[1])

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
from obspy import read
from obspy import Stream
@ -28,7 +29,7 @@ if rockeskyll == True:
obsdir = "/rscratch/minos22/marcel/flachseismik/rockeskyll_200615_270615/"
filename = 'survey_rockes.pickle'
else:
receiverfile = "Geophone_interpoliert_GZB"
receiverfile = "Geophone_interpoliert_GZB"
sourcefile = "Schusspunkte_GZB"
obsdir = "/rscratch/minos22/marcel/flachseismik/GZB_26_06_15_01/"
filename = 'survey_GZB.pickle'
@ -85,9 +86,9 @@ for shot in survey.data.values():
shot.setPickwindow(traceID, pickwin_used)
shot.pickTraces(traceID, windowsize, folm, HosAic) # picker
#shot.setManualPicks(traceID, picklist) # set manual picks if given (yet used on 2D only)
# ++ TEST: set and check SNR before adding to distance bin ############################
shot.setSNR(traceID)
shot.setSNR(traceID)
#if shot.getSNR(traceID)[0] < snrthreshold:
if shot.getSNR(traceID)[0] < shot.getSNRthreshold(traceID):
shot.removePick(traceID)

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
import numpy as np
from scipy.interpolate import griddata
@ -28,12 +29,12 @@ class SeisArray(object):
def _generateReceiverlines(self):
'''
Connects the traceIDs to the lineIDs
Connects the traceIDs to the lineIDs
for each receiverline in a dictionary.
'''
for receiver in self._receiverlist:
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
if not lineID in self._receiverlines.keys():
self._receiverlines[lineID] = []
self._receiverlines[lineID].append(traceID)
@ -43,16 +44,16 @@ class SeisArray(object):
Fills the three x, y, z dictionaries with measured coordinates
'''
for line in self._getReceiverlist():
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
self._receiverCoords[traceID] = (x, y, z)
self._measuredReceivers[traceID] = (x, y, z)
def _setGeophoneNumbers(self):
for line in self._getReceiverlist():
traceID = int(line.split()[0])
traceID = int(line.split()[0])
gphoneNum = float(line.split()[2])
self._geophoneNumbers[traceID] = gphoneNum
@ -93,7 +94,7 @@ class SeisArray(object):
return self._geophoneNumbers[traceID]
def getMeasuredReceivers(self):
return self._measuredReceivers
return self._measuredReceivers
def getMeasuredTopo(self):
return self._measuredTopo
@ -139,11 +140,11 @@ class SeisArray(object):
if self._getReceiverValue(traceID1, coordinate) < self._getReceiverValue(traceID2, coordinate):
direction = +1
return direction
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
direction = -1
return direction
print "Error: Same Value for traceID1 = %s and traceID2 = %s" %(traceID1, traceID2)
def _interpolateMeanDistances(self, traceID1, traceID2, coordinate):
'''
Returns the mean distance between two traceID's depending on the number of geophones in between
@ -186,7 +187,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._measuredTopo[pointID] = (x, y, z)
self._measuredTopo[pointID] = (x, y, z)
def addSourceLocations(self, filename):
'''
@ -202,7 +203,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._sourceLocs[pointID] = (x, y, z)
self._sourceLocs[pointID] = (x, y, z)
def interpZcoords4rec(self, method = 'linear'):
'''
@ -239,9 +240,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for traceID in self.getMeasuredReceivers().keys():
x.append(self.getMeasuredReceivers()[traceID][0])
x.append(self.getMeasuredReceivers()[traceID][0])
y.append(self.getMeasuredReceivers()[traceID][1])
z.append(self.getMeasuredReceivers()[traceID][2])
z.append(self.getMeasuredReceivers()[traceID][2])
return x, y, z
def getMeasuredTopoLists(self):
@ -250,9 +251,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getMeasuredTopo().keys():
x.append(self.getMeasuredTopo()[pointID][0])
x.append(self.getMeasuredTopo()[pointID][0])
y.append(self.getMeasuredTopo()[pointID][1])
z.append(self.getMeasuredTopo()[pointID][2])
z.append(self.getMeasuredTopo()[pointID][2])
return x, y, z
def getSourceLocsLists(self):
@ -261,9 +262,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getSourceLocations().keys():
x.append(self.getSourceLocations()[pointID][0])
x.append(self.getSourceLocations()[pointID][0])
y.append(self.getSourceLocations()[pointID][1])
z.append(self.getSourceLocations()[pointID][2])
z.append(self.getSourceLocations()[pointID][2])
return x, y, z
def getAllMeasuredPointsLists(self):
@ -289,7 +290,7 @@ class SeisArray(object):
y.append(self.getReceiverCoordinates()[traceID][1])
z.append(self.getReceiverCoordinates()[traceID][2])
return x, y, z
def _interpolateXY4rec(self):
'''
Interpolates the X and Y coordinates for all receivers.
@ -317,7 +318,7 @@ class SeisArray(object):
:param: phiWE (W, E) extensions of the model in degree
type: tuple
'''
'''
surface = []
elevation = 0.25 # elevate topography so that no source lies above the surface
@ -356,9 +357,9 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
if filename is not None:
if filename is not None:
outfile.writelines('%10s\n'%(z + elevation))
return surface
def generateVgrid(self, nTheta = 80, nPhi = 80, nR = 120,
@ -415,7 +416,7 @@ class SeisArray(object):
thetaDelta = abs(thetaN - thetaS) / (nTheta - 1)
phiDelta = abs(phiE - phiW) / (nPhi - 1)
rDelta = abs(rbot - rtop) / (nR - 1)
# create a regular grid including +2 cushion nodes in every direction
thetaGrid = np.linspace(thetaS - thetaDelta, thetaN + thetaDelta, num = nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - phiDelta, phiE + phiDelta, num = nPhi + 2) # +2 cushion nodes
@ -455,7 +456,7 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
outfile.close()
def exportAll(self, filename = 'interpolated_receivers.out'):
@ -463,7 +464,7 @@ class SeisArray(object):
count = 0
for traceID in self.getReceiverCoordinates().keys():
count += 1
x, y, z = self.getReceiverCoordinates()[traceID]
x, y, z = self.getReceiverCoordinates()[traceID]
recfile_out.writelines('%5s %15s %15s %15s\n' %(traceID, x, y, z))
print "Exported coordinates for %s traces to file > %s" %(count, filename)
recfile_out.close()
@ -472,15 +473,15 @@ class SeisArray(object):
import matplotlib.pyplot as plt
plt.interactive(True)
plt.figure()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xsc, ysc, zsc = self.getSourceLocsLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
plt.plot(xrc, yrc, 'k.', markersize = 10, label = 'all receivers')
plt.plot(xsc, ysc, 'b*', markersize = 10, label = 'shot locations')
if plot_topo == True:
if plot_topo == True:
plt.plot(xmt, ymt, 'b', markersize = 10, label = 'measured topo points')
if highlight_measured == True:
plt.plot(xmr, ymr, 'ro', label = 'measured receivers')
@ -501,9 +502,9 @@ class SeisArray(object):
fig = plt.figure()
ax = plt.axes(projection = '3d')
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
ax.plot(xmt, ymt, zmt, 'b*', markersize = 10, label = 'measured topo points')
ax.plot(xin, yin, zin, 'k.', markersize = 10, label = 'interpolated receivers')
@ -512,8 +513,8 @@ class SeisArray(object):
ax.legend()
return ax
def plotSurface3D(self, ax = None, step = 0.5, method = 'linear'):
from matplotlib import cm
import matplotlib.pyplot as plt
@ -657,8 +658,8 @@ class SeisArray(object):
cPickle.dump(self, outfile, -1)
print('saved SeisArray to file %s'%(filename))
@staticmethod
@staticmethod
def from_pickle(filename):
import cPickle
infile = open(filename, 'rb')

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
import numpy as np
from scipy.interpolate import griddata
@ -28,12 +29,12 @@ class SeisArray(object):
def _generateReceiverlines(self):
'''
Connects the traceIDs to the lineIDs
Connects the traceIDs to the lineIDs
for each receiverline in a dictionary.
'''
for receiver in self._receiverlist:
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
if not lineID in self._receiverlines.keys():
self._receiverlines[lineID] = []
self._receiverlines[lineID].append(traceID)
@ -43,16 +44,16 @@ class SeisArray(object):
Fills the three x, y, z dictionaries with measured coordinates
'''
for line in self._getReceiverlist():
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
traceID = int(line.split()[0])
x = float(line.split()[3])
y = float(line.split()[4])
z = float(line.split()[5])
self._receiverCoords[traceID] = (x, y, z)
self._measuredReceivers[traceID] = (x, y, z)
def _setGeophoneNumbers(self):
for line in self._getReceiverlist():
traceID = int(line.split()[0])
traceID = int(line.split()[0])
gphoneNum = float(line.split()[2])
self._geophoneNumbers[traceID] = gphoneNum
@ -93,7 +94,7 @@ class SeisArray(object):
return self._geophoneNumbers[traceID]
def getMeasuredReceivers(self):
return self._measuredReceivers
return self._measuredReceivers
def getMeasuredTopo(self):
return self._measuredTopo
@ -139,11 +140,11 @@ class SeisArray(object):
if self._getReceiverValue(traceID1, coordinate) < self._getReceiverValue(traceID2, coordinate):
direction = +1
return direction
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
direction = -1
return direction
print "Error: Same Value for traceID1 = %s and traceID2 = %s" %(traceID1, traceID2)
def _interpolateMeanDistances(self, traceID1, traceID2, coordinate):
'''
Returns the mean distance between two traceID's depending on the number of geophones in between
@ -186,7 +187,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._measuredTopo[pointID] = (x, y, z)
self._measuredTopo[pointID] = (x, y, z)
def addSourceLocations(self, filename):
'''
@ -202,7 +203,7 @@ class SeisArray(object):
x = float(line[1])
y = float(line[2])
z = float(line[3])
self._sourceLocs[pointID] = (x, y, z)
self._sourceLocs[pointID] = (x, y, z)
def interpZcoords4rec(self, method = 'linear'):
'''
@ -239,9 +240,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for traceID in self.getMeasuredReceivers().keys():
x.append(self.getMeasuredReceivers()[traceID][0])
x.append(self.getMeasuredReceivers()[traceID][0])
y.append(self.getMeasuredReceivers()[traceID][1])
z.append(self.getMeasuredReceivers()[traceID][2])
z.append(self.getMeasuredReceivers()[traceID][2])
return x, y, z
def getMeasuredTopoLists(self):
@ -250,9 +251,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getMeasuredTopo().keys():
x.append(self.getMeasuredTopo()[pointID][0])
x.append(self.getMeasuredTopo()[pointID][0])
y.append(self.getMeasuredTopo()[pointID][1])
z.append(self.getMeasuredTopo()[pointID][2])
z.append(self.getMeasuredTopo()[pointID][2])
return x, y, z
def getSourceLocsLists(self):
@ -261,9 +262,9 @@ class SeisArray(object):
'''
x = []; y = []; z = []
for pointID in self.getSourceLocations().keys():
x.append(self.getSourceLocations()[pointID][0])
x.append(self.getSourceLocations()[pointID][0])
y.append(self.getSourceLocations()[pointID][1])
z.append(self.getSourceLocations()[pointID][2])
z.append(self.getSourceLocations()[pointID][2])
return x, y, z
def getAllMeasuredPointsLists(self):
@ -289,7 +290,7 @@ class SeisArray(object):
y.append(self.getReceiverCoordinates()[traceID][1])
z.append(self.getReceiverCoordinates()[traceID][2])
return x, y, z
def _interpolateXY4rec(self):
'''
Interpolates the X and Y coordinates for all receivers.
@ -317,7 +318,7 @@ class SeisArray(object):
:param: phiWE (W, E) extensions of the model in degree
type: tuple
'''
'''
surface = []
elevation = 0.25 # elevate topography so that no source lies above the surface
@ -356,9 +357,9 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
if filename is not None:
if filename is not None:
outfile.writelines('%10s\n'%(z + elevation))
return surface
def generateVgrid(self, nTheta = 80, nPhi = 80, nR = 120,
@ -415,7 +416,7 @@ class SeisArray(object):
thetaDelta = abs(thetaN - thetaS) / (nTheta - 1)
phiDelta = abs(phiE - phiW) / (nPhi - 1)
rDelta = abs(rbot - rtop) / (nR - 1)
# create a regular grid including +2 cushion nodes in every direction
thetaGrid = np.linspace(thetaS - thetaDelta, thetaN + thetaDelta, num = nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - phiDelta, phiE + phiDelta, num = nPhi + 2) # +2 cushion nodes
@ -455,7 +456,7 @@ class SeisArray(object):
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
outfile.close()
def exportAll(self, filename = 'interpolated_receivers.out'):
@ -463,7 +464,7 @@ class SeisArray(object):
count = 0
for traceID in self.getReceiverCoordinates().keys():
count += 1
x, y, z = self.getReceiverCoordinates()[traceID]
x, y, z = self.getReceiverCoordinates()[traceID]
recfile_out.writelines('%5s %15s %15s %15s\n' %(traceID, x, y, z))
print "Exported coordinates for %s traces to file > %s" %(count, filename)
recfile_out.close()
@ -472,15 +473,15 @@ class SeisArray(object):
import matplotlib.pyplot as plt
plt.interactive(True)
plt.figure()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xsc, ysc, zsc = self.getSourceLocsLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xrc, yrc, zrc = self.getReceiverLists()
plt.plot(xrc, yrc, 'k.', markersize = 10, label = 'all receivers')
plt.plot(xsc, ysc, 'b*', markersize = 10, label = 'shot locations')
if plot_topo == True:
if plot_topo == True:
plt.plot(xmt, ymt, 'b', markersize = 10, label = 'measured topo points')
if highlight_measured == True:
plt.plot(xmr, ymr, 'ro', label = 'measured receivers')
@ -501,9 +502,9 @@ class SeisArray(object):
fig = plt.figure()
ax = plt.axes(projection = '3d')
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
xin, yin, zin = self.getReceiverLists()
ax.plot(xmt, ymt, zmt, 'b*', markersize = 10, label = 'measured topo points')
ax.plot(xin, yin, zin, 'k.', markersize = 10, label = 'interpolated receivers')
@ -512,8 +513,8 @@ class SeisArray(object):
ax.legend()
return ax
def plotSurface3D(self, ax = None, step = 0.5, method = 'linear'):
from matplotlib import cm
import matplotlib.pyplot as plt
@ -657,8 +658,8 @@ class SeisArray(object):
cPickle.dump(self, outfile, -1)
print('saved SeisArray to file %s'%(filename))
@staticmethod
@staticmethod
def from_pickle(filename):
import cPickle
infile = open(filename, 'rb')

View File

@ -35,8 +35,7 @@ class SeismicShot(object):
self.snr = {}
self.snrthreshold = {}
self.timeArray = {}
self.paras = {}
self.paras['shotname'] = obsfile
self.paras = {'shotname': obsfile}
def removeEmptyTraces(self):
traceIDs = []

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
plt.interactive(True)
@ -13,9 +14,9 @@ class regions(object):
self.shots_for_deletion = {}
def _onselect(self, eclick, erelease):
'eclick and erelease are matplotlib events at press and release' #print ' startposition : (%f, %f)' % (eclick.xdata, eclick.ydata)
#print ' endposition : (%f, %f)' % (erelease.xdata, erelease.ydata)
print 'region selected x0, y0 = (%3s, %3s), x1, y1 = (%3s, %3s)'%(eclick.xdata, eclick.ydata, erelease.xdata, erelease.ydata)
'eclick and erelease are matplotlib events at press and release' #print ' startposition : (%f, %f)' % (eclick.xdata, eclick.ydata)
#print ' endposition : (%f, %f)' % (erelease.xdata, erelease.ydata)
print 'region selected x0, y0 = (%3s, %3s), x1, y1 = (%3s, %3s)'%(eclick.xdata, eclick.ydata, erelease.xdata, erelease.ydata)
x0 = min(eclick.xdata, erelease.xdata)
x1 = max(eclick.xdata, erelease.xdata)
y0 = min(eclick.ydata, erelease.ydata)
@ -51,7 +52,7 @@ class regions(object):
return self.shots_for_deletion
def findTracesInShotDict(self, picks = 'normal'):
'''
'''
Returns traces corresponding to a certain area in a plot with all picks over the distances.
'''
print "findTracesInShotDict: Searching for marked traces in the shot dictionary... "
@ -116,7 +117,7 @@ class regions(object):
shot.plot_traces(traceID)
else:
print 'No picks yet defined in the regions x = (%s, %s), y = (%s, %s)' %(self._x0, self._x1, self._y0, self._y1)
def setCurrentRegionsForDeletion(self):
# if len(self.shots_found) == 0:

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -6,7 +6,7 @@ from obspy.signal.trigger import coincidenceTrigger
class CoincidenceTimes():
class CoincidenceTimes(object):
def __init__(self, st, comp='Z', coinum=4, sta=1., lta=10., on=5., off=1.):
_type = 'recstalta'
@ -54,4 +54,4 @@ def main():
if __name__ == '__main__':
main()
main()

View File

@ -1,3 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created August/September 2015.
@ -34,7 +35,7 @@ class Magnitude(object):
:type: integer
'''
assert isinstance(wfstream, Stream), "%s is not a stream object" % str(wfstream)
self.setwfstream(wfstream)
@ -62,7 +63,7 @@ class Magnitude(object):
def setpwin(self, pwin):
self.pwin = pwin
def getiplot(self):
return self.iplot
@ -71,7 +72,7 @@ class Magnitude(object):
def getwapp(self):
return self.wapp
def getw0(self):
return self.w0
@ -103,7 +104,7 @@ class WApp(Magnitude):
'poles': [5.6089 - 5.4978j, -5.6089 - 5.4978j],
'zeros': [0j, 0j],
'gain': 2080,
'sensitivity': 1}
'sensitivity': 1}
stream.simulate(paz_remove=None, paz_simulate=paz_wa)
@ -133,19 +134,19 @@ class WApp(Magnitude):
raw_input()
plt.close(f)
class DCfc(Magnitude):
'''
Method to calculate the source spectrum and to derive from that the plateau
(so-called DC-value) and the corner frequency assuming Aki's omega-square
Method to calculate the source spectrum and to derive from that the plateau
(so-called DC-value) and the corner frequency assuming Aki's omega-square
source model. Has to be derived from instrument corrected displacement traces!
'''
def calcsourcespec(self):
print ("Calculating source spectrum ....")
print ("Calculating source spectrum ....")
self.w0 = None # DC-value
self.fc = None # corner frequency
self.fc = None # corner frequency
stream = self.getwfstream()
tr = stream[0]
@ -155,7 +156,7 @@ class DCfc(Magnitude):
iwin = getsignalwin(t, self.getTo(), self.getpwin())
xdat = tr.data[iwin]
# fft
# fft
fny = tr.stats.sampling_rate / 2
l = len(xdat) / tr.stats.sampling_rate
n = tr.stats.sampling_rate * l # number of fft bins after Bath
@ -167,7 +168,7 @@ class DCfc(Magnitude):
L = (N - 1) / tr.stats.sampling_rate
f = np.arange(0, fny, 1/L)
# remove zero-frequency and frequencies above
# remove zero-frequency and frequencies above
# corner frequency of seismometer (assumed
# to be 100 Hz)
fi = np.where((f >= 1) & (f < 100))
@ -185,15 +186,15 @@ class DCfc(Magnitude):
self.w0 = optspecfit[0]
self.fc = optspecfit[1]
print ("DCfc: Determined DC-value: %e m/Hz, \n" \
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
if self.getiplot() > 1:
f1 = plt.figure()
plt.subplot(2,1,1)
# show displacement in mm
plt.plot(t, np.multiply(tr, 1000), 'k')
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
plt.plot(t, np.multiply(tr, 1000), 'k')
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
plt.title('Seismogram and P pulse, station %s' % tr.stats.station)
plt.xlabel('Time since %s' % tr.stats.starttime)
plt.ylabel('Displacement [mm]')
@ -214,8 +215,8 @@ class DCfc(Magnitude):
def synthsourcespec(f, omega0, fcorner):
'''
Calculates synthetic source spectrum from given plateau and corner
frequency assuming Akis omega-square model.
Calculates synthetic source spectrum from given plateau and corner
frequency assuming Akis omega-square model.
:param: f, frequencies
:type: array
@ -226,7 +227,7 @@ def synthsourcespec(f, omega0, fcorner):
:param: fcorner, corner frequency of source spectrum
:type: float
'''
#ssp = omega0 / (pow(2, (1 + f / fcorner)))
ssp = omega0 / (1 + pow(2, (f / fcorner)))

View File

@ -1,3 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created Oct/Nov 2014
@ -119,7 +120,7 @@ class CharacteristicFunction(object):
def getTimeArray(self):
incr = self.getIncrement()
self.TimeArray = np.arange(0, len(self.getCF()) * incr, incr) + self.getCut()[0]
self.TimeArray = np.arange(0, len(self.getCF()) * incr, incr) + self.getCut()[0]
return self.TimeArray
def getFnoise(self):
@ -175,7 +176,7 @@ class CharacteristicFunction(object):
h2 = hh[1].copy()
hh[0].data = h1.data[int(start):int(stop)]
hh[1].data = h2.data[int(start):int(stop)]
data = hh
data = hh
return data
elif len(self.orig_data) == 3:
if self.cut[0] == 0 and self.cut[1] == 0:
@ -196,12 +197,12 @@ class CharacteristicFunction(object):
hh[0].data = h1.data[int(start):int(stop)]
hh[1].data = h2.data[int(start):int(stop)]
hh[2].data = h3.data[int(start):int(stop)]
data = hh
data = hh
return data
else:
data = self.orig_data.copy()
return data
def calcCF(self, data=None):
self.cf = data
@ -285,7 +286,7 @@ class HOScf(CharacteristicFunction):
LTA[j] = lta / np.power(lta1, 1.5)
elif self.getOrder() == 4:
LTA[j] = lta / np.power(lta1, 2)
nn = np.isnan(LTA)
if len(nn) > 1:
LTA[nn] = 0
@ -315,7 +316,7 @@ class ARZcf(CharacteristicFunction):
cf = np.zeros(len(xnp))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() #AR-calculation index
for i in range(ldet + self.getOrder(), tend - lpred - 1):
for i in range(ldet + self.getOrder(), tend - lpred - 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
@ -362,7 +363,7 @@ class ARZcf(CharacteristicFunction):
rhs = np.zeros(self.getOrder())
for k in range(0, self.getOrder()):
for i in range(rind, ldet+1):
ki = k + 1
ki = k + 1
rhs[k] = rhs[k] + data[i] * data[i - ki]
#recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
@ -382,7 +383,7 @@ class ARZcf(CharacteristicFunction):
def arPredZ(self, data, arpara, rind, lpred):
'''
Function to predict waveform, assuming an autoregressive process of order
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
:param: data, time series to be predicted
:type: array
@ -400,9 +401,9 @@ class ARZcf(CharacteristicFunction):
'''
#be sure of the summation indeces
if rind < len(arpara):
rind = len(arpara)
rind = len(arpara)
if rind > len(data) - lpred :
rind = len(data) - lpred
rind = len(data) - lpred
if lpred < 1:
lpred = 1
if lpred > len(data) - 2:
@ -422,7 +423,7 @@ class ARHcf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from both horizontal traces ...'
xnp = self.getDataArray(self.getCut())
n0 = np.isnan(xnp[0].data)
if len(n0) > 1:
@ -430,7 +431,7 @@ class ARHcf(CharacteristicFunction):
n1 = np.isnan(xnp[1].data)
if len(n1) > 1:
xnp[1].data[n1] = 0
#some parameters needed
#add noise to time series
xenoise = xnp[0].data + np.random.normal(0.0, 1.0, len(xnp[0].data)) * self.getFnoise() * max(abs(xnp[0].data))
@ -441,7 +442,7 @@ class ARHcf(CharacteristicFunction):
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = lpred + self.getOrder() - 1 #AR-calculation index
@ -515,7 +516,7 @@ class ARHcf(CharacteristicFunction):
def arPredH(self, data, arpara, rind, lpred):
'''
Function to predict waveform, assuming an autoregressive process of order
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
:param: data, horizontal component seismograms to be predicted
:type: structured array
@ -558,7 +559,7 @@ class AR3Ccf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from all 3 components ...'
xnp = self.getDataArray(self.getCut())
n0 = np.isnan(xnp[0].data)
if len(n0) > 1:
@ -569,7 +570,7 @@ class AR3Ccf(CharacteristicFunction):
n2 = np.isnan(xnp[2].data)
if len(n2) > 1:
xnp[2].data[n2] = 0
#some parameters needed
#add noise to time series
xenoise = xnp[0].data + np.random.normal(0.0, 1.0, len(xnp[0].data)) * self.getFnoise() * max(abs(xnp[0].data))
@ -581,7 +582,7 @@ class AR3Ccf(CharacteristicFunction):
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() - 1 #AR-calculation index
@ -616,7 +617,7 @@ class AR3Ccf(CharacteristicFunction):
Function to calculate AR parameters arpara after Thomas Meier (CAU), published
in Kueperkoch et al. (2012). This function solves SLE using the Moore-
Penrose inverse, i.e. the least-squares approach. "data" is a structured array.
AR parameters are calculated based on both horizontal components and vertical
AR parameters are calculated based on both horizontal components and vertical
componant.
:param: data, horizontal component seismograms to calculate AR parameters from
:type: structured array
@ -658,7 +659,7 @@ class AR3Ccf(CharacteristicFunction):
def arPred3C(self, data, arpara, rind, lpred):
'''
Function to predict waveform, assuming an autoregressive process of order
p (=size(arpara)), with AR parameters arpara calculated in arDet3C. After
p (=size(arpara)), with AR parameters arpara calculated in arDet3C. After
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
:param: data, horizontal and vertical component seismograms to be predicted
:type: structured array

View File

@ -312,7 +312,7 @@ class PragPicker(AutoPicking):
else:
for i in range(1, len(self.cf)):
if i > ismooth:
ii1 = i - ismooth;
ii1 = i - ismooth
cfsmooth[i] = cfsmooth[i - 1] + (self.cf[i] - self.cf[ii1]) / ismooth
else:
cfsmooth[i] = np.mean(self.cf[1 : i])

View File

@ -1 +1,2 @@
#
# -*- coding: utf-8 -*-
#

View File

@ -317,29 +317,29 @@ def autopickstation(wfstream, pickparam):
data = Data()
[corzdat, restflag] = data.restituteWFData(invdir, zdat)
if restflag == 1:
# integrate to displacement
corintzdat = integrate.cumtrapz(corzdat[0], None, corzdat[0].stats.delta)
# class needs stream object => build it
z_copy = zdat.copy()
z_copy[0].data = corintzdat
# largest detectable period == window length
# after P pulse for calculating source spectrum
winzc = (1 / bpz2[0]) * z_copy[0].stats.sampling_rate
impickP = mpickP * z_copy[0].stats.sampling_rate
wfzc = z_copy[0].data[impickP : impickP + winzc]
# calculate spectrum using only first cycles of
# waveform after P onset!
zc = crossings_nonzero_all(wfzc)
if np.size(zc) == 0:
print ("Something is wrong with the waveform, " \
"no zero crossings derived!")
print ("Cannot calculate source spectrum!")
else:
calcwin = (zc[3] - zc[0]) * z_copy[0].stats.delta
# calculate source spectrum and get w0 and fc
specpara = DCfc(z_copy, mpickP, calcwin, iplot)
w0 = specpara.getw0()
fc = specpara.getfc()
# integrate to displacement
corintzdat = integrate.cumtrapz(corzdat[0], None, corzdat[0].stats.delta)
# class needs stream object => build it
z_copy = zdat.copy()
z_copy[0].data = corintzdat
# largest detectable period == window length
# after P pulse for calculating source spectrum
winzc = (1 / bpz2[0]) * z_copy[0].stats.sampling_rate
impickP = mpickP * z_copy[0].stats.sampling_rate
wfzc = z_copy[0].data[impickP : impickP + winzc]
# calculate spectrum using only first cycles of
# waveform after P onset!
zc = crossings_nonzero_all(wfzc)
if np.size(zc) == 0:
print ("Something is wrong with the waveform, " \
"no zero crossings derived!")
print ("Cannot calculate source spectrum!")
else:
calcwin = (zc[3] - zc[0]) * z_copy[0].stats.delta
# calculate source spectrum and get w0 and fc
specpara = DCfc(z_copy, mpickP, calcwin, iplot)
w0 = specpara.getw0()
fc = specpara.getfc()
print ("autopickstation: P-weight: %d, SNR: %f, SNR[dB]: %f, " \
"Polarity: %s" % (Pweight, SNRP, SNRPdB, FM))
@ -560,7 +560,7 @@ def autopickstation(wfstream, pickparam):
hdat += ndat
h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy)
# calculate WA-peak-to-peak amplitude
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
if restflag == 1:
if Sweight < 4:
@ -591,7 +591,7 @@ def autopickstation(wfstream, pickparam):
h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy)
if restflag == 1:
# calculate WA-peak-to-peak amplitude
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
wapp = WApp(cordat, mpickP, mpickP + sstop + (0.5 * (mpickP \
+ sstop)), iplot)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# -*- coding: utf-8 -*-
"""
@ -91,7 +92,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None):
T0 = np.mean(np.diff(zc)) * X[0].stats.delta # this is half wave length
# T0/4 is assumed as time difference between most likely and earliest possible pick!
EPick = Pick1 - T0 / 2
# get symmetric pick error as mean from earliest and latest possible pick
# by weighting latest possible pick two times earliest possible pick
@ -493,9 +494,9 @@ def wadaticheck(pickdic, dttolerance, iplot):
if len(SPtimes) >= 3:
# calculate slope
p1 = np.polyfit(Ppicks, SPtimes, 1)
wdfit = np.polyval(p1, Ppicks)
# calculate slope
p1 = np.polyfit(Ppicks, SPtimes, 1)
wdfit = np.polyval(p1, Ppicks)
wfitflag = 0
# calculate vp/vs ratio before check
@ -532,40 +533,40 @@ def wadaticheck(pickdic, dttolerance, iplot):
pickdic[key]['S']['marked'] = marker
if len(checkedPpicks) >= 3:
# calculate new slope
p2 = np.polyfit(checkedPpicks, checkedSPtimes, 1)
wdfit2 = np.polyval(p2, checkedPpicks)
# calculate new slope
p2 = np.polyfit(checkedPpicks, checkedSPtimes, 1)
wdfit2 = np.polyval(p2, checkedPpicks)
# calculate vp/vs ratio after check
cvpvsr = p2[0] + 1
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
# calculate vp/vs ratio after check
cvpvsr = p2[0] + 1
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
else:
print ("###############################################")
print ("wadatacheck: Not enough checked S-P times available!")
print ("Skip Wadati check!")
print ("###############################################")
print ("wadatacheck: Not enough checked S-P times available!")
print ("Skip Wadati check!")
checkedonsets = pickdic
else:
print ("wadaticheck: Not enough S-P times available for reliable regression!")
print ("wadaticheck: Not enough S-P times available for reliable regression!")
print ("Skip wadati check!")
wfitflag = 1
# plot results
if iplot > 1:
plt.figure(iplot)
f1, = plt.plot(Ppicks, SPtimes, 'ro')
plt.figure(iplot)
f1, = plt.plot(Ppicks, SPtimes, 'ro')
if wfitflag == 0:
f2, = plt.plot(Ppicks, wdfit, 'k')
f3, = plt.plot(checkedPpicks, checkedSPtimes, 'ko')
f4, = plt.plot(checkedPpicks, wdfit2, 'g')
plt.title('Wadati-Diagram, %d S-P Times, Vp/Vs(raw)=%5.2f,' \
'Vp/Vs(checked)=%5.2f' % (len(SPtimes), vpvsr, cvpvsr))
plt.legend([f1, f2, f3, f4], ['Skipped S-Picks', 'Wadati 1', \
'Reliable S-Picks', 'Wadati 2'], loc='best')
f2, = plt.plot(Ppicks, wdfit, 'k')
f3, = plt.plot(checkedPpicks, checkedSPtimes, 'ko')
f4, = plt.plot(checkedPpicks, wdfit2, 'g')
plt.title('Wadati-Diagram, %d S-P Times, Vp/Vs(raw)=%5.2f,' \
'Vp/Vs(checked)=%5.2f' % (len(SPtimes), vpvsr, cvpvsr))
plt.legend([f1, f2, f3, f4], ['Skipped S-Picks', 'Wadati 1', \
'Reliable S-Picks', 'Wadati 2'], loc='best')
else:
plt.title('Wadati-Diagram, %d S-P Times' % len(SPtimes))
plt.title('Wadati-Diagram, %d S-P Times' % len(SPtimes))
plt.ylabel('S-P Times [s]')
plt.xlabel('P Times [s]')
@ -579,8 +580,8 @@ def wadaticheck(pickdic, dttolerance, iplot):
def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
'''
Function to detect spuriously picked noise peaks.
Uses RMS trace of all 3 components (if available) to determine,
how many samples [per cent] after P onset are below certain
Uses RMS trace of all 3 components (if available) to determine,
how many samples [per cent] after P onset are below certain
threshold, calculated from noise level times noise factor.
: param: X, time series (seismogram)
@ -612,7 +613,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
print ("Checking signal length ...")
if len(X) > 1:
# all three components available
# all three components available
# make sure, all components have equal lengths
ilen = min([len(X[0].data), len(X[1].data), len(X[2].data)])
x1 = X[0][0:ilen]
@ -639,7 +640,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
if numoverthr >= minnum:
print ("checksignallength: Signal reached required length.")
print ("checksignallength: Signal reached required length.")
returnflag = 1
else:
print ("checksignallength: Signal shorter than required minimum signal length!")
@ -649,7 +650,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
if iplot == 2:
plt.figure(iplot)
p1, = plt.plot(t,rms, 'k')
p1, = plt.plot(t,rms, 'k')
p2, = plt.plot(t[inoise], rms[inoise], 'c')
p3, = plt.plot(t[isignal],rms[isignal], 'r')
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal)-1]]], \
@ -729,27 +730,27 @@ def checkPonsets(pickdic, dttolerance, iplot):
badjkmarker = 'badjkcheck'
for i in range(0, len(goodstations)):
# mark P onset as checked and keep P weight
pickdic[goodstations[i]]['P']['marked'] = goodmarker
pickdic[goodstations[i]]['P']['marked'] = goodmarker
for i in range(0, len(badstations)):
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badstations[i]]['P']['marked'] = badmarker
pickdic[badstations[i]]['P']['weight'] = 9
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badstations[i]]['P']['marked'] = badmarker
pickdic[badstations[i]]['P']['weight'] = 9
for i in range(0, len(badjkstations)):
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badjkstations[i]]['P']['marked'] = badjkmarker
pickdic[badjkstations[i]]['P']['weight'] = 9
# mark P onset and downgrade P weight to 9
# (not used anymore)
pickdic[badjkstations[i]]['P']['marked'] = badjkmarker
pickdic[badjkstations[i]]['P']['weight'] = 9
checkedonsets = pickdic
if iplot > 1:
p1, = plt.plot(np.arange(0, len(Ppicks)), Ppicks, 'r+', markersize=14)
p1, = plt.plot(np.arange(0, len(Ppicks)), Ppicks, 'r+', markersize=14)
p2, = plt.plot(igood, np.array(Ppicks)[igood], 'g*', markersize=14)
p3, = plt.plot([0, len(Ppicks) - 1], [pmedian, pmedian], 'g', \
linewidth=2)
for i in range(0, len(Ppicks)):
plt.text(i, Ppicks[i] + 0.2, stations[i])
plt.text(i, Ppicks[i] + 0.2, stations[i])
plt.xlabel('Number of P Picks')
plt.ylabel('Onset Time [s] from 1.1.1970')
@ -789,37 +790,37 @@ def jackknife(X, phi, h):
g = len(X) / h
if type(g) is not int:
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
print ("Choose another size for subgroups!")
return PHI_jack, PHI_pseudo, PHI_sub
else:
# estimator of undisturbed spot check
if phi == 'MEA':
phi_sc = np.mean(X)
# estimator of undisturbed spot check
if phi == 'MEA':
phi_sc = np.mean(X)
elif phi == 'VAR':
phi_sc = np.var(X)
phi_sc = np.var(X)
elif phi == 'MED':
phi_sc = np.median(X)
phi_sc = np.median(X)
# estimators of subgroups
# estimators of subgroups
PHI_pseudo = []
PHI_sub = []
for i in range(0, g - 1):
# subgroup i, remove i-th sample
xx = X[:]
del xx[i]
# calculate estimators of disturbed spot check
if phi == 'MEA':
phi_sub = np.mean(xx)
elif phi == 'VAR':
phi_sub = np.var(xx)
elif phi == 'MED':
phi_sub = np.median(xx)
# subgroup i, remove i-th sample
xx = X[:]
del xx[i]
# calculate estimators of disturbed spot check
if phi == 'MEA':
phi_sub = np.mean(xx)
elif phi == 'VAR':
phi_sub = np.var(xx)
elif phi == 'MED':
phi_sub = np.median(xx)
PHI_sub.append(phi_sub)
# pseudo values
phi_pseudo = g * phi_sc - ((g - 1) * phi_sub)
PHI_pseudo.append(phi_pseudo)
PHI_sub.append(phi_sub)
# pseudo values
phi_pseudo = g * phi_sc - ((g - 1) * phi_sub)
PHI_pseudo.append(phi_pseudo)
# jackknife estimator
PHI_jack = np.mean(PHI_pseudo)
@ -899,17 +900,17 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
# vertical P-coda level must exceed horizontal P-coda level
# zfac times encodalevel
if zcodalevel < minsiglevel:
print ("checkZ4S: Maybe S onset? Skip this P pick!")
print ("checkZ4S: Maybe S onset? Skip this P pick!")
else:
print ("checkZ4S: P onset passes checkZ4S test!")
returnflag = 1
if iplot > 1:
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
edat[0].stats.delta)
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
ndat[0].stats.delta)
plt.plot(tz, z / max(z), 'k')
plt.plot(tz, z / max(z), 'k')
plt.plot(tz[isignal], z[isignal] / max(z), 'r')
plt.plot(te, edat[0].data / max(edat[0].data) + 1, 'k')
plt.plot(te[isignal], edat[0].data[isignal] / max(edat[0].data) + 1, 'r')

View File

@ -1 +1 @@
# -*- coding: utf-8 -*-

View File

@ -208,8 +208,7 @@ class FilterOptions(object):
def parseFilterOptions(self):
if self.getFilterType():
robject = {'type':self.getFilterType()}
robject['corners'] = self.getOrder()
robject = {'type': self.getFilterType(), 'corners': self.getOrder()}
if len(self.getFreq()) > 1:
robject['freqmin'] = self.getFreq()[0]
robject['freqmax'] = self.getFreq()[1]

View File

@ -1 +1,2 @@
# -*- coding: utf-8 -*-
from pylot.core.util.version import get_git_version as _getVersionString

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
'''
Created on 10.11.2014
@ -23,4 +24,4 @@ class Test(unittest.TestCase):
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
unittest.main()

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
'''
Created on 10.11.2014
@ -15,4 +16,4 @@ class Test(unittest.TestCase):
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
unittest.main()

View File

@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
import sys
from PySide.QtCore import QThread, Signal

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# -*- coding: utf-8 -*-

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
from PySide.QtGui import QApplication

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import matplotlib

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
from PySide.QtGui import QApplication
@ -8,4 +9,4 @@ app = QApplication(sys.argv)
win = PropertiesDlg()
win.show()
app.exec_()
app.exec_()

View File

@ -1,4 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
from PySide.QtGui import QApplication
@ -9,7 +11,7 @@ dialogs = [FilterOptionsDialog, PropertiesDlg, HelpForm]
app = QApplication(sys.argv)
for dlg in dialogs:
win = dlg()
win.show()
time.sleep(1)
win.destroy()
win = dlg()
win.show()
time.sleep(1)
win.destroy()