[remove] moved correlation code from PyLoT to the seisobs utils scope
This commit is contained in:
parent
45184fd888
commit
ae0cc5e160
35
README
35
README
@ -1,35 +0,0 @@
|
|||||||
PyLoT
|
|
||||||
|
|
||||||
version: 0.1
|
|
||||||
|
|
||||||
The Python picking and Localisation Tool
|
|
||||||
|
|
||||||
This python library contains a graphical user interfaces for picking
|
|
||||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
|
||||||
and the PySide Qt4 bindings for python to be installed first.
|
|
||||||
|
|
||||||
PILOT has originally been developed in Mathworks' MatLab. In order to
|
|
||||||
distribute PILOT without facing portability problems, it has been decided
|
|
||||||
to redevelop the software package in Python. The great work of the ObsPy
|
|
||||||
group allows easy handling of a bunch of seismic data and PyLoT will
|
|
||||||
benefit a lot compared to the former MatLab version.
|
|
||||||
|
|
||||||
The development of PyLoT is part of the joint research project MAGS2.
|
|
||||||
|
|
||||||
staff:
|
|
||||||
======
|
|
||||||
|
|
||||||
original author(s): L. Kueperkoch, S. Wehling-Benatelli, M. Bischoff (PILOT)
|
|
||||||
|
|
||||||
developer(s): S. Wehling-Benatelli, L. Kueperkoch, K. Olbert, M. Bischoff,
|
|
||||||
C. Wollin, M. Rische
|
|
||||||
|
|
||||||
others: A. Bruestle, T. Meier, W. Friederich
|
|
||||||
|
|
||||||
release notes:
|
|
||||||
==============
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
October 2013
|
|
||||||
|
|
@ -22,14 +22,19 @@ staff:
|
|||||||
original author(s): L. Kueperkoch, S. Wehling-Benatelli, M. Bischoff (PILOT)
|
original author(s): L. Kueperkoch, S. Wehling-Benatelli, M. Bischoff (PILOT)
|
||||||
|
|
||||||
developer(s): S. Wehling-Benatelli, L. Kueperkoch, K. Olbert, M. Bischoff,
|
developer(s): S. Wehling-Benatelli, L. Kueperkoch, K. Olbert, M. Bischoff,
|
||||||
C. Wollin, M. Rische
|
C. Wollin, M. Rische, M. Paffrath
|
||||||
|
|
||||||
others: A. Bruestle, T. Meier, W. Friederich
|
others: A. Bruestle, T. Meier, W. Friederich
|
||||||
|
|
||||||
release notes:
|
release notes:
|
||||||
==============
|
==============
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- consistent manual phase picking through:
|
||||||
|
1. predefined SNR dependant zoom level
|
||||||
|
2.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
October 2016
|
October 2016
|
||||||
|
@ -1,55 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from obspy.core import read
|
|
||||||
from obspy.signal.trigger import coincidenceTrigger
|
|
||||||
|
|
||||||
|
|
||||||
class CoincidenceTimes(object):
|
|
||||||
def __init__(self, st, comp='Z', coinum=4, sta=1., lta=10., on=5., off=1.):
|
|
||||||
_type = 'recstalta'
|
|
||||||
self.coinclist = self.createCoincTriggerlist(data=st, trigcomp=comp,
|
|
||||||
coinum=coinum, sta=sta,
|
|
||||||
lta=lta, trigon=on,
|
|
||||||
trigoff=off, type=_type)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
n = 1
|
|
||||||
out = ''
|
|
||||||
for time in self.getCoincTimes():
|
|
||||||
out += 'event no. {0}: starttime is {1}\n'.format(n, time)
|
|
||||||
n += 1
|
|
||||||
return out
|
|
||||||
|
|
||||||
def getCoincTimes(self):
|
|
||||||
timelist = []
|
|
||||||
for info in self.getCoincList():
|
|
||||||
timelist.append(info['time'])
|
|
||||||
|
|
||||||
return timelist
|
|
||||||
|
|
||||||
def getCoincList(self):
|
|
||||||
return self.coinclist
|
|
||||||
|
|
||||||
def createCoincTriggerlist(self, data, trigcomp, coinum, sta, lta,
|
|
||||||
trigon, trigoff, type):
|
|
||||||
'''
|
|
||||||
uses a coincidence trigger to detect all events in the given
|
|
||||||
dataset
|
|
||||||
'''
|
|
||||||
|
|
||||||
triggerlist = coincidenceTrigger(type, trigon, trigoff,
|
|
||||||
data.select(component=trigcomp),
|
|
||||||
coinum, sta=sta, lta=lta)
|
|
||||||
return triggerlist
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
data = read('/data/SDS/2014/1A/ZV??/?H?.D/*.365')
|
|
||||||
data.filter(type='bandpass', freqmin=5., freqmax=30.)
|
|
||||||
coincs = CoincidenceTimes(data)
|
|
||||||
print(coincs)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,107 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
|
|
||||||
def crosscorrsingle(wf1, wf2, taumax):
|
|
||||||
'''
|
|
||||||
Calculates the crosscorrelation between two waveforms with a defined maximum timedifference.
|
|
||||||
:param wf1: first waveformdata
|
|
||||||
:type wf1: list
|
|
||||||
:param wf2: second waveformdata
|
|
||||||
:type wf2: list
|
|
||||||
:param taumax: maximum time difference between waveforms
|
|
||||||
:type taumax: positive integer
|
|
||||||
:return: returns the crosscorrelation funktion 'c' and the lagvector 'l'
|
|
||||||
:rtype: c and l are lists
|
|
||||||
'''
|
|
||||||
N = len(wf1)
|
|
||||||
c = np.zeros(2 * taumax - 1)
|
|
||||||
l = np.zeros(2 * taumax - 1)
|
|
||||||
for tau in range(taumax):
|
|
||||||
Cxyplus = 0
|
|
||||||
Cxyminus = 0
|
|
||||||
for n in range(N - tau):
|
|
||||||
Cxy1plus = wf1[n] * wf2[n + tau]
|
|
||||||
Cxy1minus = wf1[n + tau] * wf2[n]
|
|
||||||
Cxyplus = Cxyplus + Cxy1plus
|
|
||||||
Cxyminus = Cxyminus + Cxy1minus
|
|
||||||
|
|
||||||
c[(taumax - 1) - tau] = Cxyminus
|
|
||||||
c[(taumax - 1) + tau] = Cxyplus
|
|
||||||
l[(taumax - 1) - tau] = -tau
|
|
||||||
l[(taumax - 1) + tau] = tau
|
|
||||||
return c, l
|
|
||||||
|
|
||||||
|
|
||||||
def crosscorrnormcalc(weights, wfs):
|
|
||||||
'''
|
|
||||||
crosscorrnormcalc - function that calculates the normalization for the
|
|
||||||
cross correlation carried out by 'wfscrosscorr'
|
|
||||||
:param weights: weighting factors for the single components
|
|
||||||
:type weights: tuple
|
|
||||||
:param wfs: tuple of `~numpy.array` object containing waveform data
|
|
||||||
:type wfs: tuple
|
|
||||||
:return: a floating point number yielding the by 'weights' weighted energy
|
|
||||||
of the waveforms in 'wfs'
|
|
||||||
:rtype: float
|
|
||||||
'''
|
|
||||||
|
|
||||||
# check if the parameters are of the right type
|
|
||||||
if not isinstance(weights, tuple):
|
|
||||||
raise TypeError("type of 'weight' should be 'tuple', but is {0}".format(
|
|
||||||
type(weights)))
|
|
||||||
if not isinstance(wfs, tuple):
|
|
||||||
raise TypeError(
|
|
||||||
"type of parameter 'wfs' should be 'tuple', but is {0}".format(
|
|
||||||
type(wfs)))
|
|
||||||
sqrsumwfs = 0.
|
|
||||||
for n, wf in enumerate(wfs):
|
|
||||||
sqrsumwf = np.sum(weights[n] ** 2. * wf ** 2.)
|
|
||||||
sqrsumwfs += sqrsumwf
|
|
||||||
return np.sqrt(sqrsumwfs)
|
|
||||||
|
|
||||||
|
|
||||||
def wfscrosscorr(weights, wfs, taumax):
|
|
||||||
'''
|
|
||||||
wfscrosscorr - function that calculates successive cross-correlations from a set of waveforms stored in a matrix
|
|
||||||
|
|
||||||
base formula is:
|
|
||||||
C(i)=SUM[p=1:nComponent](eP(p)*(SUM[n=1:N]APp(x,n)*APp(y,n+i)))/(SQRT(SUM[p=1:nComponent]eP(p)^2*(SUM[n=1:N](APp(x,n)^2)))*SQRT(SUM[p=1:nComponent]eP(p)^2*(SUM[n=1:N]APp(y,n)^2)))
|
|
||||||
whereas
|
|
||||||
nComponent is the number of components
|
|
||||||
N is the number of samples
|
|
||||||
i is the lag-index
|
|
||||||
|
|
||||||
input:
|
|
||||||
APp rowvectors containing the waveforms of each component p for which the cross-correlation is calculated
|
|
||||||
tPp rowvectros containing times
|
|
||||||
eP vector containing the weighting factors for the components (maxsize = [1x3])
|
|
||||||
|
|
||||||
output:
|
|
||||||
C cross-correlation function
|
|
||||||
L lag-vector
|
|
||||||
|
|
||||||
author(s):
|
|
||||||
|
|
||||||
SWB 26.01.2010 as arranged with Thomas Meier and Monika Bischoff
|
|
||||||
|
|
||||||
:param weights: weighting factors for the single components
|
|
||||||
:type weights: tuple
|
|
||||||
:param wfs: tuple of `~numpy.array` object containing waveform data
|
|
||||||
:type wfs: tuple
|
|
||||||
:param taumax: maximum time difference
|
|
||||||
:type taumax: positive integer
|
|
||||||
:return: returns cross correlation function normalized by the waveform energy
|
|
||||||
'''
|
|
||||||
|
|
||||||
ccnorm = 0.
|
|
||||||
ccnorm = crosscorrnormcalc(weights, wfs[0])
|
|
||||||
ccnorm *= crosscorrnormcalc(weights, wfs[1])
|
|
||||||
|
|
||||||
c = 0.
|
|
||||||
for n in range(len(wfs)):
|
|
||||||
cc, l = crosscorrsingle(wfs[0][n], wfs[1][n], taumax)
|
|
||||||
c += cc
|
|
||||||
return c / ccnorm, l
|
|
@ -1,50 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from obspy.signal.trigger import recursive_sta_lta, trigger_onset
|
|
||||||
|
|
||||||
|
|
||||||
def createSingleTriggerlist(st, station='ZV01', trigcomp='Z', stalta=(1, 10),
|
|
||||||
trigonoff=(6, 1)):
|
|
||||||
'''
|
|
||||||
uses a single-station trigger to create a triggerlist for this station
|
|
||||||
:param st: obspy stream
|
|
||||||
:type st:
|
|
||||||
:param station: station name to get triggers for (optional, default = ZV01)
|
|
||||||
:type station: str
|
|
||||||
:param trigcomp: (optional, default = Z)
|
|
||||||
:type trigcomp: str
|
|
||||||
:param stalta: (optional, default = (1,10))
|
|
||||||
:type stalta: tuple
|
|
||||||
:param trigonoff: (optional, default = (6,1))
|
|
||||||
:type trigonoff: tuple
|
|
||||||
:return: list of triggtimes
|
|
||||||
:rtype: list
|
|
||||||
'''
|
|
||||||
tr = st.copy().select(component=trigcomp, station=station)[0]
|
|
||||||
df = tr.stats.sampling_rate
|
|
||||||
|
|
||||||
cft = recursive_sta_lta(tr.data, int(stalta[0] * df), int(stalta[1] * df))
|
|
||||||
triggers = trigger_onset(cft, trigonoff[0], trigonoff[1])
|
|
||||||
trigg = []
|
|
||||||
for time in triggers:
|
|
||||||
trigg.append(tr.stats.starttime + time[0] / df)
|
|
||||||
return trigg
|
|
||||||
|
|
||||||
|
|
||||||
def createSubCoincTriggerlist(trig, station='ZV01'):
|
|
||||||
'''
|
|
||||||
makes a triggerlist with the events, that are triggered by the
|
|
||||||
coincidence trigger and are seen at the demanded station
|
|
||||||
:param trig: list containing triggers from coincidence trigger
|
|
||||||
:type trig: list
|
|
||||||
:param station: station name to get triggers for (optional, default = ZV01)
|
|
||||||
:type station: str
|
|
||||||
:return: list of triggertimes
|
|
||||||
:rtype: list
|
|
||||||
'''
|
|
||||||
trigg = []
|
|
||||||
for tri in trig:
|
|
||||||
if station in tri['stations']:
|
|
||||||
trigg.append(tri['time'])
|
|
||||||
return trigg
|
|
Loading…
Reference in New Issue
Block a user