Merge branch 'develop' of ariadne.geophysik.ruhr-uni-bochum.de:/data/git/pylot into develop
This commit is contained in:
commit
5083c5a876
@ -24,6 +24,7 @@ https://www.iconfinder.com/iconsets/flavour
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from os.path import expanduser
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use('Qt4Agg')
|
||||
@ -670,7 +671,8 @@ class MainWindow(QMainWindow):
|
||||
self.logDockWidget.setWidget(self.listWidget)
|
||||
self.addDockWidget(Qt.LeftDockWidgetArea, self.logDockWidget)
|
||||
self.addListItem('loading default values for local data ...')
|
||||
autopick_parameter = AutoPickParameter('autoPyLoT_local.in')
|
||||
home = expanduser("~")
|
||||
autopick_parameter = AutoPickParameter('%s/.pylot/autoPyLoT_local.in' % home)
|
||||
self.addListItem(str(autopick_parameter))
|
||||
|
||||
# Create the worker thread and run it
|
||||
|
39
autoPyLoT.py
39
autoPyLoT.py
@ -6,6 +6,7 @@ import argparse
|
||||
import glob
|
||||
import subprocess
|
||||
import string
|
||||
import numpy as np
|
||||
from obspy.core import read, UTCDateTime
|
||||
from pylot.core.read.data import Data
|
||||
from pylot.core.read.inputs import AutoPickParameter
|
||||
@ -141,7 +142,8 @@ def autoPyLoT(inputfile):
|
||||
# calculating seismic moment Mo and moment magnitude Mw
|
||||
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
|
||||
nllocfile, picks, parameter.getParam('rho'), \
|
||||
parameter.getParam('vp'), parameter.getParam('invdir'))
|
||||
parameter.getParam('vp'), parameter.getParam('Qp'), \
|
||||
parameter.getParam('invdir'))
|
||||
else:
|
||||
print("autoPyLoT: No NLLoc-location file available!")
|
||||
print("No source parameter estimation possible!")
|
||||
@ -161,6 +163,8 @@ def autoPyLoT(inputfile):
|
||||
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter)
|
||||
# write phases to NLLoc-phase file
|
||||
picksExport(picks, 'NLLoc', phasefile)
|
||||
# remove actual NLLoc-location file to keep only the last
|
||||
os.remove(nllocfile)
|
||||
# locate the event
|
||||
locate(nlloccall, ctrfile)
|
||||
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
|
||||
@ -180,14 +184,25 @@ def autoPyLoT(inputfile):
|
||||
# calculating seismic moment Mo and moment magnitude Mw
|
||||
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
|
||||
nllocfile, picks, parameter.getParam('rho'), \
|
||||
parameter.getParam('vp'), parameter.getParam('invdir'))
|
||||
parameter.getParam('vp'), parameter.getParam('Qp'), \
|
||||
parameter.getParam('invdir'))
|
||||
# get network moment magntiude
|
||||
netMw = []
|
||||
for key in finalpicks.getpicdic():
|
||||
if finalpicks.getpicdic()[key]['P']['Mw'] is not None:
|
||||
netMw.append(finalpicks.getpicdic()[key]['P']['Mw'])
|
||||
netMw = np.median(netMw)
|
||||
print("Network moment magnitude: %4.1f" % netMw)
|
||||
else:
|
||||
print("autoPyLoT: No NLLoc-location file available! Stop iteration!")
|
||||
##########################################################
|
||||
# write phase files for various location routines
|
||||
# HYPO71
|
||||
hypo71file = '%s/%s/autoPyLoT_HYPO71.pha' % (datapath, evID)
|
||||
hypo71file = '%s/autoPyLoT_HYPO71.pha' % event
|
||||
if finalpicks.getpicdic() is not None:
|
||||
writephases(finalpicks.getpicdic(), 'HYPO71', hypo71file)
|
||||
else:
|
||||
writephases(picks, 'HYPO71', hypo71file)
|
||||
|
||||
endsplash = '''------------------------------------------\n'
|
||||
-----Finished event %s!-----\n'
|
||||
@ -240,7 +255,8 @@ def autoPyLoT(inputfile):
|
||||
# calculating seismic moment Mo and moment magnitude Mw
|
||||
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
|
||||
nllocfile, picks, parameter.getParam('rho'), \
|
||||
parameter.getParam('vp'), parameter.getParam('invdir'))
|
||||
parameter.getParam('vp'), parameter.getParam('Qp'), \
|
||||
parameter.getParam('invdir'))
|
||||
else:
|
||||
print("autoPyLoT: No NLLoc-location file available!")
|
||||
print("No source parameter estimation possible!")
|
||||
@ -260,6 +276,8 @@ def autoPyLoT(inputfile):
|
||||
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter)
|
||||
# write phases to NLLoc-phase file
|
||||
picksExport(picks, 'NLLoc', phasefile)
|
||||
# remove actual NLLoc-location file to keep only the last
|
||||
os.remove(nllocfile)
|
||||
# locate the event
|
||||
locate(nlloccall, ctrfile)
|
||||
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
|
||||
@ -279,14 +297,25 @@ def autoPyLoT(inputfile):
|
||||
# calculating seismic moment Mo and moment magnitude Mw
|
||||
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
|
||||
nllocfile, picks, parameter.getParam('rho'), \
|
||||
parameter.getParam('vp'), parameter.getParam('invdir'))
|
||||
parameter.getParam('vp'), parameter.getParam('Qp'), \
|
||||
parameter.getParam('invdir'))
|
||||
# get network moment magntiude
|
||||
netMw = []
|
||||
for key in finalpicks.getpicdic():
|
||||
if finalpicks.getpicdic()[key]['P']['Mw'] is not None:
|
||||
netMw.append(finalpicks.getpicdic()[key]['P']['Mw'])
|
||||
netMw = np.median(netMw)
|
||||
print("Network moment magnitude: %4.1f" % netMw)
|
||||
else:
|
||||
print("autoPyLoT: No NLLoc-location file available! Stop iteration!")
|
||||
##########################################################
|
||||
# write phase files for various location routines
|
||||
# HYPO71
|
||||
hypo71file = '%s/%s/autoPyLoT_HYPO71.pha' % (datapath, parameter.getParam('eventID'))
|
||||
if finalpicks.getpicdic() is not None:
|
||||
writephases(finalpicks.getpicdic(), 'HYPO71', hypo71file)
|
||||
else:
|
||||
writephases(picks, 'HYPO71', hypo71file)
|
||||
|
||||
endsplash = '''------------------------------------------\n'
|
||||
-----Finished event %s!-----\n'
|
||||
|
@ -25,16 +25,15 @@ AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
|
||||
%(returns 'eventID_outpatter')
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#parameters for seismic moment estimation#
|
||||
3000 #vp# %average P-wave velocity
|
||||
2600 #rho# %rock density [kg/m^3]
|
||||
300 #Qp# %quality factor for P waves
|
||||
3530 #vp# %average P-wave velocity
|
||||
2500 #rho# %average rock density [kg/m^3]
|
||||
300f**0.8 #Qp# %quality factor for P waves (Qp*f^a)
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
AUTOFOCMEC_AIC_HOS4_ARH.in #focmecin# %name of focmec input file containing polarities
|
||||
6 #pmin# %minimum required P picks for location
|
||||
4 #p0min# %minimum required P picks for location if at least
|
||||
%3 excellent P picks are found
|
||||
2 #smin# %minimum required S picks for location
|
||||
/home/ludger/bin/run_HYPOSAT4autoPILOT.csh #cshellp# %path and name of c-shell script to run location routine
|
||||
7.6 8.5 #blon# %longitude bounding for location map
|
||||
49 49.4 #blat# %lattitude bounding for location map
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
@ -12,7 +12,7 @@ from obspy.core import Stream, UTCDateTime
|
||||
from pylot.core.pick.utils import getsignalwin, crossings_nonzero_all
|
||||
from pylot.core.util.utils import getPatternLine
|
||||
from scipy.optimize import curve_fit
|
||||
from scipy import integrate
|
||||
from scipy import integrate, signal
|
||||
from pylot.core.read.data import Data
|
||||
|
||||
class Magnitude(object):
|
||||
@ -23,7 +23,7 @@ class Magnitude(object):
|
||||
'''
|
||||
|
||||
def __init__(self, wfstream, To, pwin, iplot, NLLocfile=None, \
|
||||
picks=None, rho=None, vp=None, invdir=None):
|
||||
picks=None, rho=None, vp=None, Qp=None, invdir=None):
|
||||
'''
|
||||
:param: wfstream
|
||||
:type: `~obspy.core.stream.Stream
|
||||
@ -66,6 +66,7 @@ class Magnitude(object):
|
||||
self.setrho(rho)
|
||||
self.setpicks(picks)
|
||||
self.setvp(vp)
|
||||
self.setQp(Qp)
|
||||
self.setinvdir(invdir)
|
||||
self.calcwapp()
|
||||
self.calcsourcespec()
|
||||
@ -114,6 +115,12 @@ class Magnitude(object):
|
||||
def getvp(self):
|
||||
return self.vp
|
||||
|
||||
def setQp(self, Qp):
|
||||
self.Qp = Qp
|
||||
|
||||
def getQp(self):
|
||||
return self.Qp
|
||||
|
||||
def setpicks(self, picks):
|
||||
self.picks = picks
|
||||
|
||||
@ -200,7 +207,7 @@ class WApp(Magnitude):
|
||||
class M0Mw(Magnitude):
|
||||
'''
|
||||
Method to calculate seismic moment Mo and moment magnitude Mw.
|
||||
Requires results of class w0fc for calculating plateau w0
|
||||
Requires results of class calcsourcespec for calculating plateau w0
|
||||
and corner frequency fc of source spectrum, respectively. Uses
|
||||
subfunction calcMoMw.py. Returns modified dictionary of picks including
|
||||
Dc-value, corner frequency fc, seismic moment Mo and
|
||||
@ -212,17 +219,12 @@ class M0Mw(Magnitude):
|
||||
picks = self.getpicks()
|
||||
nllocfile = self.getNLLocfile()
|
||||
wfdat = self.getwfstream()
|
||||
# get vertical component data only
|
||||
zdat = wfdat.select(component="Z")
|
||||
if len(zdat) == 0: # check for other components
|
||||
zdat = wfdat.select(component="3")
|
||||
self.picdic = None
|
||||
|
||||
for key in picks:
|
||||
if picks[key]['P']['weight'] < 4:
|
||||
# select waveform
|
||||
selwf = zdat.select(station=key)
|
||||
# get hypocentral distance of station
|
||||
# from NLLoc-location file
|
||||
selwf = wfdat.select(station=key)
|
||||
if len(key) > 4:
|
||||
Ppattern = '%s ? ? ? P' % key
|
||||
elif len(key) == 4:
|
||||
@ -230,15 +232,23 @@ class M0Mw(Magnitude):
|
||||
elif len(key) < 4:
|
||||
Ppattern = '%s ? ? ? P' % key
|
||||
nllocline = getPatternLine(nllocfile, Ppattern)
|
||||
# get hypocentral distance, station azimuth and
|
||||
# angle of incidence from NLLoc-location file
|
||||
delta = float(nllocline.split(None)[21])
|
||||
az = float(nllocline.split(None)[22])
|
||||
inc = float(nllocline.split(None)[24])
|
||||
# call subfunction to estimate source spectrum
|
||||
# and to derive w0 and fc
|
||||
[w0, fc] = calcsourcespec(selwf, picks[key]['P']['mpp'], \
|
||||
self.getiplot(), self.getinvdir())
|
||||
self.getinvdir(), az, inc, self.getQp(), \
|
||||
self.getiplot())
|
||||
|
||||
if w0 is not None:
|
||||
# call subfunction to calculate Mo and Mw
|
||||
[Mo, Mw] = calcMoMw(selwf, w0, self.getrho(), self.getvp(), \
|
||||
zdat = selwf.select(component="Z")
|
||||
if len(zdat) == 0: # check for other components
|
||||
zdat = selwf.select(component="3")
|
||||
[Mo, Mw] = calcMoMw(zdat, w0, self.getrho(), self.getvp(), \
|
||||
delta, self.getinvdir())
|
||||
else:
|
||||
Mo = None
|
||||
@ -276,70 +286,119 @@ def calcMoMw(wfstream, w0, rho, vp, delta, inv):
|
||||
|
||||
|
||||
|
||||
def calcsourcespec(wfstream, onset, iplot, inventory):
|
||||
def calcsourcespec(wfstream, onset, inventory, azimuth, incidence, Qp, iplot):
|
||||
'''
|
||||
Subfunction to calculate the source spectrum and to derive from that the plateau
|
||||
(usually called omega0) and the corner frequency assuming Aki's omega-square
|
||||
source model. Has to be derived from instrument corrected displacement traces,
|
||||
thus restitution and integration necessary!
|
||||
thus restitution and integration necessary! Integrated traces have to be rotated
|
||||
into ray-coordinate system ZNE => LQT!
|
||||
'''
|
||||
print ("Calculating source spectrum ....")
|
||||
|
||||
# get Q value
|
||||
qu = Qp.split('f**')
|
||||
# constant Q
|
||||
Q = int(qu[0])
|
||||
# A, i.e. power of frequency
|
||||
A = float(qu[1])
|
||||
|
||||
fc = None
|
||||
w0 = None
|
||||
data = Data()
|
||||
z_copy = wfstream.copy()
|
||||
|
||||
[corzdat, restflag] = data.restituteWFData(inventory, z_copy)
|
||||
wf_copy = wfstream.copy()
|
||||
|
||||
[cordat, restflag] = data.restituteWFData(inventory, wf_copy)
|
||||
if restflag == 1:
|
||||
# integrate to displacment
|
||||
corintzdat = integrate.cumtrapz(corzdat[0], None, corzdat[0].stats.delta)
|
||||
z_copy[0].data = corintzdat
|
||||
tr = z_copy[0]
|
||||
zdat = cordat.select(component="Z")
|
||||
if len(zdat) == 0:
|
||||
zdat = cordat.select(component="3")
|
||||
cordat_copy = cordat.copy()
|
||||
# get equal time stamps and lengths of traces
|
||||
# necessary for rotation of traces
|
||||
tr0start = cordat_copy[0].stats.starttime
|
||||
tr0start = tr0start.timestamp
|
||||
tr0end = cordat_copy[0].stats.endtime
|
||||
tr0end = tr0end.timestamp
|
||||
tr1start = cordat_copy[1].stats.starttime
|
||||
tr1start = tr1start.timestamp
|
||||
tr1end = cordat_copy[1].stats.endtime
|
||||
tr1end = tr1end.timestamp
|
||||
tr2start = cordat_copy[2].stats.starttime
|
||||
tr2start = tr2start.timestamp
|
||||
tr2end = cordat_copy[0].stats.endtime
|
||||
tr2end = tr2end.timestamp
|
||||
trstart = UTCDateTime(max([tr0start, tr1start, tr2start]))
|
||||
trend = UTCDateTime(min([tr0end, tr1end, tr2end]))
|
||||
cordat_copy.trim(trstart, trend)
|
||||
minlen = min([len(cordat_copy[0]), len(cordat_copy[1]), len(cordat_copy[2])])
|
||||
cordat_copy[0].data = cordat_copy[0].data[0:minlen]
|
||||
cordat_copy[1].data = cordat_copy[1].data[0:minlen]
|
||||
cordat_copy[2].data = cordat_copy[2].data[0:minlen]
|
||||
try:
|
||||
# rotate into LQT (ray-coordindate-) system using Obspy's rotate
|
||||
# L: P-wave direction
|
||||
# Q: SV-wave direction
|
||||
# T: SH-wave direction
|
||||
LQT=cordat_copy.rotate('ZNE->LQT',azimuth, incidence)
|
||||
ldat = LQT.select(component="L")
|
||||
if len(ldat) == 0:
|
||||
# yet Obspy's rotate can not handle channels 3/2/1
|
||||
ldat = LQT.select(component="Z")
|
||||
|
||||
# integrate to displacement
|
||||
# unrotated vertical component (for copmarison)
|
||||
inttrz = signal.detrend(integrate.cumtrapz(zdat[0].data, None, \
|
||||
zdat[0].stats.delta))
|
||||
# rotated component Z => L
|
||||
Ldat = signal.detrend(integrate.cumtrapz(ldat[0].data, None, \
|
||||
ldat[0].stats.delta))
|
||||
|
||||
# get window after P pulse for
|
||||
# calculating source spectrum
|
||||
if tr.stats.sampling_rate <= 100:
|
||||
winzc = tr.stats.sampling_rate
|
||||
elif tr.stats.sampling_rate > 100 and \
|
||||
tr.stats.sampling_rate <= 200:
|
||||
winzc = 0.5 * tr.stats.sampling_rate
|
||||
elif tr.stats.sampling_rate > 200 and \
|
||||
tr.stats.sampling_rate <= 400:
|
||||
winzc = 0.2 * tr.stats.sampling_rate
|
||||
elif tr.stats.sampling_rate > 400:
|
||||
winzc = tr.stats.sampling_rate
|
||||
tstart = UTCDateTime(tr.stats.starttime)
|
||||
if zdat[0].stats.sampling_rate <= 100:
|
||||
winzc = zdat[0].stats.sampling_rate
|
||||
elif zdat[0].stats.sampling_rate > 100 and \
|
||||
zdat[0].stats.sampling_rate <= 200:
|
||||
winzc = 0.5 * zdat[0].stats.sampling_rate
|
||||
elif zdat[0].stats.sampling_rate > 200 and \
|
||||
zdat[0].stats.sampling_rate <= 400:
|
||||
winzc = 0.2 * zdat[0].stats.sampling_rate
|
||||
elif zdat[0].stats.sampling_rate > 400:
|
||||
winzc = zdat[0].stats.sampling_rate
|
||||
tstart = UTCDateTime(zdat[0].stats.starttime)
|
||||
tonset = onset.timestamp -tstart.timestamp
|
||||
impickP = tonset * tr.stats.sampling_rate
|
||||
wfzc = tr.data[impickP : impickP + winzc]
|
||||
impickP = tonset * zdat[0].stats.sampling_rate
|
||||
wfzc = Ldat[impickP : impickP + winzc]
|
||||
# get time array
|
||||
t = np.arange(0, len(tr) * tr.stats.delta, tr.stats.delta)
|
||||
t = np.arange(0, len(inttrz) * zdat[0].stats.delta, \
|
||||
zdat[0].stats.delta)
|
||||
# calculate spectrum using only first cycles of
|
||||
# waveform after P onset!
|
||||
zc = crossings_nonzero_all(wfzc)
|
||||
if np.size(zc) == 0 or len(zc) <= 3:
|
||||
print ("Something is wrong with the waveform, "
|
||||
print ("calcsourcespec: Something is wrong with the waveform, "
|
||||
"no zero crossings derived!")
|
||||
print ("No calculation of source spectrum possible!")
|
||||
plotflag = 0
|
||||
else:
|
||||
plotflag = 1
|
||||
index = min([3, len(zc) - 1])
|
||||
calcwin = (zc[index] - zc[0]) * z_copy[0].stats.delta
|
||||
calcwin = (zc[index] - zc[0]) * zdat[0].stats.delta
|
||||
iwin = getsignalwin(t, tonset, calcwin)
|
||||
xdat = tr.data[iwin]
|
||||
xdat = Ldat[iwin]
|
||||
|
||||
# fft
|
||||
fny = tr.stats.sampling_rate / 2
|
||||
l = len(xdat) / tr.stats.sampling_rate
|
||||
n = tr.stats.sampling_rate * l # number of fft bins after Bath
|
||||
fny = zdat[0].stats.sampling_rate / 2
|
||||
l = len(xdat) / zdat[0].stats.sampling_rate
|
||||
# number of fft bins after Bath
|
||||
n = zdat[0].stats.sampling_rate * l
|
||||
# find next power of 2 of data length
|
||||
m = pow(2, np.ceil(np.log(len(xdat)) / np.log(2)))
|
||||
N = int(np.power(m, 2))
|
||||
y = tr.stats.delta * np.fft.fft(xdat, N)
|
||||
y = zdat[0].stats.delta * np.fft.fft(xdat, N)
|
||||
Y = abs(y[: N/2])
|
||||
L = (N - 1) / tr.stats.sampling_rate
|
||||
L = (N - 1) / zdat[0].stats.sampling_rate
|
||||
f = np.arange(0, fny, 1/L)
|
||||
|
||||
# remove zero-frequency and frequencies above
|
||||
@ -348,51 +407,65 @@ def calcsourcespec(wfstream, onset, iplot, inventory):
|
||||
fi = np.where((f >= 1) & (f < 100))
|
||||
F = f[fi]
|
||||
YY = Y[fi]
|
||||
# correction for attenuation
|
||||
YYcor = YY.real*Q**A
|
||||
# get plateau (DC value) and corner frequency
|
||||
# initial guess of plateau
|
||||
w0in = np.mean(YY[0:100])
|
||||
w0in = np.mean(YYcor[0:100])
|
||||
# initial guess of corner frequency
|
||||
# where spectral level reached 50% of flat level
|
||||
iin = np.where(YY >= 0.5 * w0in)
|
||||
iin = np.where(YYcor >= 0.5 * w0in)
|
||||
Fcin = F[iin[0][np.size(iin) - 1]]
|
||||
|
||||
# use of implicit scipy otimization function
|
||||
fit = synthsourcespec(F, w0in, Fcin)
|
||||
[optspecfit, pcov] = curve_fit(synthsourcespec, F, YY.real, [w0in, Fcin])
|
||||
[optspecfit, pcov] = curve_fit(synthsourcespec, F, YYcor, [w0in, Fcin])
|
||||
w01 = optspecfit[0]
|
||||
fc1 = optspecfit[1]
|
||||
print ("w0fc: Determined w0-value: %e m/Hz, \n"
|
||||
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"
|
||||
"Determined corner frequency: %f Hz" % (w01, fc1))
|
||||
|
||||
# use of conventional fitting
|
||||
[w02, fc2] = fitSourceModel(F, YY.real, Fcin, iplot)
|
||||
[w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot)
|
||||
|
||||
# get w0 and fc as median
|
||||
w0 = np.median([w01, w02])
|
||||
fc = np.median([fc1, fc2])
|
||||
print("w0fc: Using w0-value = %e m/Hz and fc = %f Hz" % (w0, fc))
|
||||
print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (w0, fc))
|
||||
|
||||
except TypeError as er:
|
||||
raise TypeError('''{0}'''.format(er))
|
||||
|
||||
if iplot > 1:
|
||||
f1 = plt.figure()
|
||||
tLdat = np.arange(0, len(Ldat) * zdat[0].stats.delta, \
|
||||
zdat[0].stats.delta)
|
||||
plt.subplot(2,1,1)
|
||||
# show displacement in mm
|
||||
plt.plot(t, np.multiply(tr, 1000), 'k')
|
||||
p1, = plt.plot(t, np.multiply(inttrz, 1000), 'k')
|
||||
p2, = plt.plot(tLdat, np.multiply(Ldat, 1000))
|
||||
plt.legend([p1, p2], ['Displacement', 'Rotated Displacement'])
|
||||
if plotflag == 1:
|
||||
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
|
||||
plt.title('Seismogram and P Pulse, Station %s-%s' \
|
||||
% (tr.stats.station, tr.stats.channel))
|
||||
% (zdat[0].stats.station, zdat[0].stats.channel))
|
||||
else:
|
||||
plt.title('Seismogram, Station %s-%s' \
|
||||
% (tr.stats.station, tr.stats.channel))
|
||||
plt.xlabel('Time since %s' % tr.stats.starttime)
|
||||
% (zdat[0].stats.station, zdat[0].stats.channel))
|
||||
plt.xlabel('Time since %s' % zdat[0].stats.starttime)
|
||||
plt.ylabel('Displacement [mm]')
|
||||
|
||||
if plotflag == 1:
|
||||
plt.subplot(2,1,2)
|
||||
plt.loglog(f, Y.real, 'k')
|
||||
plt.loglog(F, YY.real)
|
||||
plt.loglog(F, fit, 'g')
|
||||
p1, = plt.loglog(f, Y.real, 'k')
|
||||
p2, = plt.loglog(F, YY.real)
|
||||
p3, = plt.loglog(F, YYcor, 'r')
|
||||
p4, = plt.loglog(F, fit, 'g')
|
||||
plt.loglog([fc, fc], [w0/100, w0], 'g')
|
||||
plt.legend([p1, p2, p3, p4], ['Raw Spectrum', \
|
||||
'Used Raw Spectrum', \
|
||||
'Q-Corrected Spectrum', \
|
||||
'Fit to Spectrum'])
|
||||
plt.title('Source Spectrum from P Pulse, w0=%e m/Hz, fc=%6.2f Hz' \
|
||||
% (w0, fc))
|
||||
plt.xlabel('Frequency [Hz]')
|
||||
@ -474,8 +547,13 @@ def fitSourceModel(f, S, fc0, iplot):
|
||||
STD.append(stddc + stdFC)
|
||||
|
||||
# get best found w0 anf fc from minimum
|
||||
if len(STD) > 0:
|
||||
fc = fc[np.argmin(STD)]
|
||||
w0 = w0[np.argmin(STD)]
|
||||
elif len(STD) == 0:
|
||||
fc = fc0
|
||||
w0 = max(S)
|
||||
|
||||
print("fitSourceModel: best fc: %fHz, best w0: %e m/Hz" \
|
||||
% (fc, w0))
|
||||
|
||||
|
@ -204,7 +204,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
||||
z_copy[0].data = tr_filt.data
|
||||
zne = z_copy
|
||||
if len(ndat) == 0 or len(edat) == 0:
|
||||
msg = 'One or more horizontal components missing!\nSignal ' \
|
||||
msg = 'One or more horizontal component(s) missing!\nSignal ' \
|
||||
'length only checked on vertical component!\n' \
|
||||
'Decreasing minsiglengh from {0} to ' \
|
||||
'{1}'.format(minsiglength, minsiglength / 2)
|
||||
@ -335,15 +335,15 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
||||
Sflag = 0
|
||||
|
||||
else:
|
||||
print("autopickstation: No vertical component data available!, "
|
||||
"Skipping station!")
|
||||
print('autopickstation: No vertical component data available!, '
|
||||
'Skipping station!')
|
||||
|
||||
if edat is not None and ndat is not None and len(edat) > 0 and len(
|
||||
ndat) > 0 and Pweight < 4:
|
||||
msg = "Go on picking S onset ...\n" \
|
||||
"##################################################\n" \
|
||||
"Working on S onset of station {0}\nFiltering horizontal " \
|
||||
"traces ...".format(edat[0].stats.station)
|
||||
msg = 'Go on picking S onset ...\n' \
|
||||
'##################################################\n' \
|
||||
'Working on S onset of station {0}\nFiltering horizontal ' \
|
||||
'traces ...'.format(edat[0].stats.station)
|
||||
if verbose: print(msg)
|
||||
# determine time window for calculating CF after P onset
|
||||
cuttimesh = [round(max([mpickP + sstart, 0])),
|
||||
@ -424,6 +424,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
||||
'SNR: {1}\nGo on with refined picking ...\n' \
|
||||
'autopickstation: re-filtering horizontal traces ' \
|
||||
'...'.format(aicarhpick.getSlope(), aicarhpick.getSNR())
|
||||
if verbose: print(msg)
|
||||
# re-calculate CF from re-filtered trace in vicinity of initial
|
||||
# onset
|
||||
cuttimesh2 = [round(aicarhpick.getpick() - Srecalcwin),
|
||||
@ -774,7 +775,8 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
||||
# for P phase
|
||||
phase = 'P'
|
||||
phasepick = {'lpp': lpickP, 'epp': epickP, 'mpp': mpickP, 'spe': Perror,
|
||||
'snr': SNRP, 'snrdb': SNRPdB, 'weight': Pweight, 'fm': FM}
|
||||
'snr': SNRP, 'snrdb': SNRPdB, 'weight': Pweight, 'fm': FM,
|
||||
'w0': None, 'fc': None, 'Mo': None, 'Mw': None}
|
||||
picks = {phase: phasepick}
|
||||
# add P marker
|
||||
picks[phase]['marked'] = Pmarker
|
||||
|
@ -10,165 +10,60 @@ import numpy as np
|
||||
from obspy.core import UTCDateTime
|
||||
import obspy.core.event as ope
|
||||
|
||||
def runProgram(cmd, parameter=None):
|
||||
"""
|
||||
run an external program specified by cmd with parameters input returning the
|
||||
stdout output
|
||||
|
||||
:param cmd: name of the command to run
|
||||
:type cmd: str
|
||||
:param parameter: filename of parameter file or parameter string
|
||||
:type parameter: str
|
||||
:return: stdout output
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if parameter:
|
||||
cmd.strip()
|
||||
cmd += ' %s 2>&1' % parameter
|
||||
|
||||
output = subprocess.check_output('{} | tee /dev/stderr'.format(cmd),
|
||||
shell = True)
|
||||
|
||||
def isSorted(iterable):
|
||||
return sorted(iterable) == iterable
|
||||
|
||||
def fnConstructor(s):
|
||||
if type(s) is str:
|
||||
s = s.split(':')[-1]
|
||||
else:
|
||||
s = getHash(UTCDateTime())
|
||||
|
||||
badchars = re.compile(r'[^A-Za-z0-9_. ]+|^\.|\.$|^ | $|^$')
|
||||
badsuffix = re.compile(r'(aux|com[1-9]|con|lpt[1-9]|prn)(\.|$)')
|
||||
|
||||
fn = badchars.sub('_', s)
|
||||
|
||||
if badsuffix.match(fn):
|
||||
fn = '_' + fn
|
||||
return fn
|
||||
|
||||
|
||||
def getLogin():
|
||||
return pwd.getpwuid(os.getuid())[0]
|
||||
|
||||
|
||||
def getHash(time):
|
||||
def createAmplitude(pickID, amp, unit, category, cinfo):
|
||||
'''
|
||||
:param time: time object for which a hash should be calculated
|
||||
:type time: :class: `~obspy.core.utcdatetime.UTCDateTime` object
|
||||
:return: str
|
||||
|
||||
:param pickID:
|
||||
:param amp:
|
||||
:param unit:
|
||||
:param category:
|
||||
:param cinfo:
|
||||
:return:
|
||||
'''
|
||||
hg = hashlib.sha1()
|
||||
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f'))
|
||||
return hg.hexdigest()
|
||||
amplitude = ope.Amplitude()
|
||||
amplitude.creation_info = cinfo
|
||||
amplitude.generic_amplitude = amp
|
||||
amplitude.unit = ope.AmplitudeUnit(unit)
|
||||
amplitude.type = ope.AmplitudeCategory(category)
|
||||
amplitude.pick_id = pickID
|
||||
return amplitude
|
||||
|
||||
def createArrival(pickresID, cinfo, phase, azimuth=None, dist=None):
|
||||
'''
|
||||
createArrival - function to create an Obspy Arrival
|
||||
|
||||
def getOwner(fn):
|
||||
return pwd.getpwuid(os.stat(fn).st_uid).pw_name
|
||||
|
||||
def getPatternLine(fn, pattern):
|
||||
"""
|
||||
Takes a file name and a pattern string to search for in the file and
|
||||
returns the first line which contains the pattern string otherwise None.
|
||||
|
||||
:param fn: file name
|
||||
:type fn: str
|
||||
:param pattern: pattern string to search for
|
||||
:type pattern: str
|
||||
:return: the complete line containing pattern or None
|
||||
|
||||
>>> getPatternLine('utils.py', 'python')
|
||||
'#!/usr/bin/env python\\n'
|
||||
>>> print(getPatternLine('version.py', 'palindrome'))
|
||||
None
|
||||
"""
|
||||
fobj = open(fn, 'r')
|
||||
for line in fobj.readlines():
|
||||
if pattern in line:
|
||||
fobj.close()
|
||||
return line
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def prepTimeAxis(stime, trace):
|
||||
nsamp = trace.stats.npts
|
||||
srate = trace.stats.sampling_rate
|
||||
tincr = trace.stats.delta
|
||||
etime = stime + nsamp / srate
|
||||
time_ax = np.arange(stime, etime, tincr)
|
||||
if len(time_ax) < nsamp:
|
||||
print 'elongate time axes by one datum'
|
||||
time_ax = np.arange(stime, etime + tincr, tincr)
|
||||
elif len(time_ax) > nsamp:
|
||||
print 'shorten time axes by one datum'
|
||||
time_ax = np.arange(stime, etime - tincr, tincr)
|
||||
if len(time_ax) != nsamp:
|
||||
raise ValueError('{0} samples of data \n '
|
||||
'{1} length of time vector \n'
|
||||
'delta: {2}'.format(nsamp, len(time_ax), tincr))
|
||||
return time_ax
|
||||
|
||||
|
||||
def scaleWFData(data, factor=None, components='all'):
|
||||
"""
|
||||
produce scaled waveforms from given waveform data and a scaling factor,
|
||||
waveform may be selected by their components name
|
||||
:param data: waveform data to be scaled
|
||||
:type data: `~obspy.core.stream.Stream` object
|
||||
:param factor: scaling factor
|
||||
:type factor: float
|
||||
:param components: components labels for the traces in data to be scaled by
|
||||
the scaling factor (optional, default: 'all')
|
||||
:type components: tuple
|
||||
:return: scaled waveform data
|
||||
:rtype: `~obspy.core.stream.Stream` object
|
||||
"""
|
||||
if components is not 'all':
|
||||
for comp in components:
|
||||
if factor is None:
|
||||
max_val = np.max(np.abs(data.select(component=comp)[0].data))
|
||||
data.select(component=comp)[0].data /= 2 * max_val
|
||||
:param pickresID: Resource identifier of the created pick
|
||||
:type pickresID: :class: `~obspy.core.event.ResourceIdentifier` object
|
||||
:param cinfo: An ObsPy :class: `~obspy.core.event.CreationInfo` object
|
||||
holding information on the creation of the returned object
|
||||
:type cinfo: :class: `~obspy.core.event.CreationInfo` object
|
||||
:param phase: name of the arrivals seismic phase
|
||||
:type phase: str
|
||||
:param azimuth: azimuth between source and receiver
|
||||
:type azimuth: float or int, optional
|
||||
:param dist: distance between source and receiver
|
||||
:type dist: float or int, optional
|
||||
:return: An ObsPy :class: `~obspy.core.event.Arrival` object
|
||||
'''
|
||||
arrival = ope.Arrival()
|
||||
arrival.creation_info = cinfo
|
||||
arrival.pick_id = pickresID
|
||||
arrival.phase = phase
|
||||
if azimuth is not None:
|
||||
arrival.azimuth = float(azimuth) if azimuth > -180 else azimuth + 360.
|
||||
else:
|
||||
data.select(component=comp)[0].data /= 2 * factor
|
||||
else:
|
||||
for tr in data:
|
||||
if factor is None:
|
||||
max_val = float(np.max(np.abs(tr.data)))
|
||||
tr.data /= 2 * max_val
|
||||
else:
|
||||
tr.data /= 2 * factor
|
||||
|
||||
return data
|
||||
|
||||
def demeanTrace(trace, window):
|
||||
"""
|
||||
returns the DATA where each trace is demean by the average value within
|
||||
WINDOW
|
||||
:param trace: waveform trace object
|
||||
:type trace: `~obspy.core.stream.Trace`
|
||||
:param inoise: range of indices of DATA within the WINDOW
|
||||
:type window: tuple
|
||||
:return: trace
|
||||
:rtype: `~obspy.core.stream.Trace`
|
||||
"""
|
||||
trace.data -= trace.data[window].mean()
|
||||
return trace
|
||||
|
||||
|
||||
def getGlobalTimes(stream):
|
||||
min_start = UTCDateTime()
|
||||
max_end = None
|
||||
for trace in stream:
|
||||
if trace.stats.starttime < min_start:
|
||||
min_start = trace.stats.starttime
|
||||
if max_end is None or trace.stats.endtime > max_end:
|
||||
max_end = trace.stats.endtime
|
||||
return min_start, max_end
|
||||
|
||||
arrival.azimuth = azimuth
|
||||
arrival.distance = dist
|
||||
return arrival
|
||||
|
||||
def createCreationInfo(agency_id=None, creation_time=None, author=None):
|
||||
'''
|
||||
|
||||
:param agency_id:
|
||||
:param creation_time:
|
||||
:param author:
|
||||
:return:
|
||||
'''
|
||||
if author is None:
|
||||
author = getLogin()
|
||||
if creation_time is None:
|
||||
@ -176,57 +71,6 @@ def createCreationInfo(agency_id=None, creation_time=None, author=None):
|
||||
return ope.CreationInfo(agency_id=agency_id, author=author,
|
||||
creation_time=creation_time)
|
||||
|
||||
|
||||
def createResourceID(timetohash, restype, authority_id=None, hrstr=None):
|
||||
'''
|
||||
|
||||
:param timetohash:
|
||||
:param restype: type of the resource, e.g. 'orig', 'earthquake' ...
|
||||
:type restype: str
|
||||
:param authority_id: name of the institution carrying out the processing
|
||||
:type authority_id: str, optional
|
||||
:return:
|
||||
'''
|
||||
assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \
|
||||
"UTCDateTime object"
|
||||
hid = getHash(timetohash)
|
||||
if hrstr is None:
|
||||
resID = ope.ResourceIdentifier(restype + '/' + hid[0:6])
|
||||
else:
|
||||
resID = ope.ResourceIdentifier(restype + '/' + hrstr + '_' + hid[0:6])
|
||||
if authority_id is not None:
|
||||
resID.convertIDToQuakeMLURI(authority_id=authority_id)
|
||||
return resID
|
||||
|
||||
|
||||
def createOrigin(origintime, cinfo, latitude, longitude, depth):
|
||||
'''
|
||||
createOrigin - function to create an ObsPy Origin
|
||||
:param origintime: the origins time of occurence
|
||||
:type origintime: :class: `~obspy.core.utcdatetime.UTCDateTime` object
|
||||
:param latitude: latitude in decimal degree of the origins location
|
||||
:type latitude: float
|
||||
:param longitude: longitude in decimal degree of the origins location
|
||||
:type longitude: float
|
||||
:param depth: hypocentral depth of the origin
|
||||
:type depth: float
|
||||
:return: An ObsPy :class: `~obspy.core.event.Origin` object
|
||||
'''
|
||||
|
||||
assert isinstance(origintime, UTCDateTime), "origintime has to be " \
|
||||
"a UTCDateTime object, but " \
|
||||
"actually is of type " \
|
||||
"'%s'" % type(origintime)
|
||||
|
||||
origin = ope.Origin()
|
||||
origin.time = origintime
|
||||
origin.creation_info = cinfo
|
||||
origin.latitude = latitude
|
||||
origin.longitude = longitude
|
||||
origin.depth = depth
|
||||
return origin
|
||||
|
||||
|
||||
def createEvent(origintime, cinfo, originloc=None, etype=None, resID=None,
|
||||
authority_id=None):
|
||||
'''
|
||||
@ -271,14 +115,60 @@ def createEvent(origintime, cinfo, originloc=None, etype=None, resID=None,
|
||||
event.origins = [o]
|
||||
return event
|
||||
|
||||
def createMagnitude(originID, cinfo):
|
||||
'''
|
||||
createMagnitude - function to create an ObsPy Magnitude object
|
||||
:param originID:
|
||||
:type originID:
|
||||
:param cinfo:
|
||||
:type cinfo:
|
||||
:return:
|
||||
'''
|
||||
magnitude = ope.Magnitude()
|
||||
magnitude.creation_info = cinfo
|
||||
magnitude.origin_id = originID
|
||||
return magnitude
|
||||
|
||||
def createOrigin(origintime, cinfo, latitude, longitude, depth):
|
||||
'''
|
||||
createOrigin - function to create an ObsPy Origin
|
||||
:param origintime: the origins time of occurence
|
||||
:type origintime: :class: `~obspy.core.utcdatetime.UTCDateTime` object
|
||||
:param cinfo:
|
||||
:type cinfo:
|
||||
:param latitude: latitude in decimal degree of the origins location
|
||||
:type latitude: float
|
||||
:param longitude: longitude in decimal degree of the origins location
|
||||
:type longitude: float
|
||||
:param depth: hypocentral depth of the origin
|
||||
:type depth: float
|
||||
:return: An ObsPy :class: `~obspy.core.event.Origin` object
|
||||
'''
|
||||
|
||||
assert isinstance(origintime, UTCDateTime), "origintime has to be " \
|
||||
"a UTCDateTime object, but " \
|
||||
"actually is of type " \
|
||||
"'%s'" % type(origintime)
|
||||
|
||||
origin = ope.Origin()
|
||||
origin.time = origintime
|
||||
origin.creation_info = cinfo
|
||||
origin.latitude = latitude
|
||||
origin.longitude = longitude
|
||||
origin.depth = depth
|
||||
return origin
|
||||
|
||||
def createPick(origintime, picknum, picktime, eventnum, cinfo, phase, station,
|
||||
wfseedstr, authority_id):
|
||||
'''
|
||||
createPick - function to create an ObsPy Pick
|
||||
|
||||
:param origintime:
|
||||
:type origintime:
|
||||
:param picknum: number of the created pick
|
||||
:type picknum: int
|
||||
:param picktime:
|
||||
:type picktime:
|
||||
:param eventnum: human-readable event identifier
|
||||
:type eventnum: str
|
||||
:param cinfo: An ObsPy :class: `~obspy.core.event.CreationInfo` object
|
||||
@ -306,64 +196,43 @@ def createPick(origintime, picknum, picktime, eventnum, cinfo, phase, station,
|
||||
pick.waveform_id = ope.ResourceIdentifier(id=wfseedstr, prefix='file:/')
|
||||
return pick
|
||||
|
||||
|
||||
def createArrival(pickresID, cinfo, phase, azimuth=None, dist=None):
|
||||
def createResourceID(timetohash, restype, authority_id=None, hrstr=None):
|
||||
'''
|
||||
createArrival - function to create an Obspy Arrival
|
||||
:param pickresID: Resource identifier of the created pick
|
||||
:type pickresID: :class: `~obspy.core.event.ResourceIdentifier` object
|
||||
:param eventnum: human-readable event identifier
|
||||
:type eventnum: str
|
||||
:param cinfo: An ObsPy :class: `~obspy.core.event.CreationInfo` object
|
||||
holding information on the creation of the returned object
|
||||
:type cinfo: :class: `~obspy.core.event.CreationInfo` object
|
||||
:param phase: name of the arrivals seismic phase
|
||||
:type phase: str
|
||||
:param station: name of the station at which the seismic phase has been
|
||||
picked
|
||||
:type station: str
|
||||
|
||||
:param timetohash:
|
||||
:type timetohash
|
||||
:param restype: type of the resource, e.g. 'orig', 'earthquake' ...
|
||||
:type restype: str
|
||||
:param authority_id: name of the institution carrying out the processing
|
||||
:type authority_id: str
|
||||
:param azimuth: azimuth between source and receiver
|
||||
:type azimuth: float or int, optional
|
||||
:param dist: distance between source and receiver
|
||||
:type dist: float or int, optional
|
||||
:return: An ObsPy :class: `~obspy.core.event.Arrival` object
|
||||
'''
|
||||
arrival = ope.Arrival()
|
||||
arrival.creation_info = cinfo
|
||||
arrival.pick_id = pickresID
|
||||
arrival.phase = phase
|
||||
if azimuth is not None:
|
||||
arrival.azimuth = float(azimuth) if azimuth > -180 else azimuth + 360.
|
||||
else:
|
||||
arrival.azimuth = azimuth
|
||||
arrival.distance = dist
|
||||
return arrival
|
||||
|
||||
|
||||
def createMagnitude(originID, cinfo):
|
||||
'''
|
||||
createMagnitude - function to create an ObsPy Magnitude object
|
||||
:param originID:
|
||||
:param cinfo:
|
||||
:param authority_id:
|
||||
:type authority_id: str, optional
|
||||
:param hrstr:
|
||||
:type hrstr:
|
||||
:return:
|
||||
'''
|
||||
magnitude = ope.Magnitude()
|
||||
magnitude.creation_info = cinfo
|
||||
magnitude.origin_id = originID
|
||||
return magnitude
|
||||
assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \
|
||||
"UTCDateTime object"
|
||||
hid = getHash(timetohash)
|
||||
if hrstr is None:
|
||||
resID = ope.ResourceIdentifier(restype + '/' + hid[0:6])
|
||||
else:
|
||||
resID = ope.ResourceIdentifier(restype + '/' + hrstr + '_' + hid[0:6])
|
||||
if authority_id is not None:
|
||||
resID.convertIDToQuakeMLURI(authority_id=authority_id)
|
||||
return resID
|
||||
|
||||
|
||||
def createAmplitude(pickID, amp, unit, category, cinfo):
|
||||
amplitude = ope.Amplitude()
|
||||
amplitude.creation_info = cinfo
|
||||
amplitude.generic_amplitude = amp
|
||||
amplitude.unit = ope.AmplitudeUnit(unit)
|
||||
amplitude.type = ope.AmplitudeCategory(category)
|
||||
amplitude.pick_id = pickID
|
||||
return amplitude
|
||||
def demeanTrace(trace, window):
|
||||
"""
|
||||
returns the DATA where each trace is demean by the average value within
|
||||
WINDOW
|
||||
:param trace: waveform trace object
|
||||
:type trace: `~obspy.core.stream.Trace`
|
||||
:param window:
|
||||
:type window: tuple
|
||||
:return: trace
|
||||
:rtype: `~obspy.core.stream.Trace`
|
||||
"""
|
||||
trace.data -= trace.data[window].mean()
|
||||
return trace
|
||||
|
||||
def findComboBoxIndex(combo_box, val):
|
||||
"""
|
||||
@ -372,11 +241,184 @@ def findComboBoxIndex(combo_box, val):
|
||||
:param combo_box: Combo box object.
|
||||
:type combo_box: QComboBox
|
||||
:param val: Name of a combo box to search for.
|
||||
:type val:
|
||||
:return: index value of item with name val or 0
|
||||
"""
|
||||
|
||||
return combo_box.findText(val) if combo_box.findText(val) is not -1 else 0
|
||||
|
||||
def fnConstructor(s):
|
||||
'''
|
||||
|
||||
:param s:
|
||||
:type s:
|
||||
:return:
|
||||
'''
|
||||
if type(s) is str:
|
||||
s = s.split(':')[-1]
|
||||
else:
|
||||
s = getHash(UTCDateTime())
|
||||
|
||||
badchars = re.compile(r'[^A-Za-z0-9_. ]+|^\.|\.$|^ | $|^$')
|
||||
badsuffix = re.compile(r'(aux|com[1-9]|con|lpt[1-9]|prn)(\.|$)')
|
||||
|
||||
fn = badchars.sub('_', s)
|
||||
|
||||
if badsuffix.match(fn):
|
||||
fn = '_' + fn
|
||||
return fn
|
||||
|
||||
def getGlobalTimes(stream):
|
||||
'''
|
||||
|
||||
:param stream:
|
||||
:type stream
|
||||
:return:
|
||||
'''
|
||||
min_start = UTCDateTime()
|
||||
max_end = None
|
||||
for trace in stream:
|
||||
if trace.stats.starttime < min_start:
|
||||
min_start = trace.stats.starttime
|
||||
if max_end is None or trace.stats.endtime > max_end:
|
||||
max_end = trace.stats.endtime
|
||||
return min_start, max_end
|
||||
|
||||
def getHash(time):
|
||||
'''
|
||||
:param time: time object for which a hash should be calculated
|
||||
:type time: :class: `~obspy.core.utcdatetime.UTCDateTime` object
|
||||
:return: str
|
||||
'''
|
||||
hg = hashlib.sha1()
|
||||
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f'))
|
||||
return hg.hexdigest()
|
||||
|
||||
def getLogin():
|
||||
'''
|
||||
|
||||
:return:
|
||||
'''
|
||||
return pwd.getpwuid(os.getuid())[0]
|
||||
|
||||
def getOwner(fn):
|
||||
'''
|
||||
|
||||
:param fn:
|
||||
:type fn:
|
||||
:return:
|
||||
'''
|
||||
return pwd.getpwuid(os.stat(fn).st_uid).pw_name
|
||||
|
||||
def getPatternLine(fn, pattern):
|
||||
"""
|
||||
Takes a file name and a pattern string to search for in the file and
|
||||
returns the first line which contains the pattern string otherwise None.
|
||||
|
||||
:param fn: file name
|
||||
:type fn: str
|
||||
:param pattern: pattern string to search for
|
||||
:type pattern: str
|
||||
:return: the complete line containing pattern or None
|
||||
|
||||
>>> getPatternLine('utils.py', 'python')
|
||||
'#!/usr/bin/env python\\n'
|
||||
>>> print(getPatternLine('version.py', 'palindrome'))
|
||||
None
|
||||
"""
|
||||
fobj = open(fn, 'r')
|
||||
for line in fobj.readlines():
|
||||
if pattern in line:
|
||||
fobj.close()
|
||||
return line
|
||||
|
||||
return None
|
||||
|
||||
def isSorted(iterable):
|
||||
'''
|
||||
|
||||
:param iterable:
|
||||
:type iterable:
|
||||
:return:
|
||||
'''
|
||||
return sorted(iterable) == iterable
|
||||
|
||||
def prepTimeAxis(stime, trace):
|
||||
'''
|
||||
|
||||
:param stime:
|
||||
:type stime:
|
||||
:param trace:
|
||||
:type trace:
|
||||
:return:
|
||||
'''
|
||||
nsamp = trace.stats.npts
|
||||
srate = trace.stats.sampling_rate
|
||||
tincr = trace.stats.delta
|
||||
etime = stime + nsamp / srate
|
||||
time_ax = np.arange(stime, etime, tincr)
|
||||
if len(time_ax) < nsamp:
|
||||
print 'elongate time axes by one datum'
|
||||
time_ax = np.arange(stime, etime + tincr, tincr)
|
||||
elif len(time_ax) > nsamp:
|
||||
print 'shorten time axes by one datum'
|
||||
time_ax = np.arange(stime, etime - tincr, tincr)
|
||||
if len(time_ax) != nsamp:
|
||||
raise ValueError('{0} samples of data \n '
|
||||
'{1} length of time vector \n'
|
||||
'delta: {2}'.format(nsamp, len(time_ax), tincr))
|
||||
return time_ax
|
||||
|
||||
def scaleWFData(data, factor=None, components='all'):
|
||||
"""
|
||||
produce scaled waveforms from given waveform data and a scaling factor,
|
||||
waveform may be selected by their components name
|
||||
:param data: waveform data to be scaled
|
||||
:type data: `~obspy.core.stream.Stream` object
|
||||
:param factor: scaling factor
|
||||
:type factor: float
|
||||
:param components: components labels for the traces in data to be scaled by
|
||||
the scaling factor (optional, default: 'all')
|
||||
:type components: tuple
|
||||
:return: scaled waveform data
|
||||
:rtype: `~obspy.core.stream.Stream` object
|
||||
"""
|
||||
if components is not 'all':
|
||||
for comp in components:
|
||||
if factor is None:
|
||||
max_val = np.max(np.abs(data.select(component=comp)[0].data))
|
||||
data.select(component=comp)[0].data /= 2 * max_val
|
||||
else:
|
||||
data.select(component=comp)[0].data /= 2 * factor
|
||||
else:
|
||||
for tr in data:
|
||||
if factor is None:
|
||||
max_val = float(np.max(np.abs(tr.data)))
|
||||
tr.data /= 2 * max_val
|
||||
else:
|
||||
tr.data /= 2 * factor
|
||||
|
||||
return data
|
||||
|
||||
def runProgram(cmd, parameter=None):
|
||||
"""
|
||||
run an external program specified by cmd with parameters input returning the
|
||||
stdout output
|
||||
|
||||
:param cmd: name of the command to run
|
||||
:type cmd: str
|
||||
:param parameter: filename of parameter file or parameter string
|
||||
:type parameter: str
|
||||
:return: stdout output
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if parameter:
|
||||
cmd.strip()
|
||||
cmd += ' %s 2>&1' % parameter
|
||||
|
||||
output = subprocess.check_output('{} | tee /dev/stderr'.format(cmd),
|
||||
shell = True)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
|
Loading…
Reference in New Issue
Block a user