Compare commits
25 Commits
ae6c4966a9
...
41c9183be3
Author | SHA1 | Date | |
---|---|---|---|
41c9183be3 | |||
28f75cedcb | |||
424d42aa1c | |||
2cea10088d | |||
466f19eb2e | |||
5d90904838 | |||
29107ee40c | |||
db11e125c0 | |||
b59232d77b | |||
176e93d833 | |||
759e7bb848 | |||
61c3f40063 | |||
67f34cc871 | |||
f4f48a930f | |||
a068bb8457 | |||
452f2a2e18 | |||
c3a2ef5022 | |||
8e7bd87711 | |||
d5817adc46 | |||
14f01ec46d | |||
1b074d14ff | |||
ce71c549ca | |||
c4220b389e | |||
0f29d0e20d | |||
cdcd226c87 |
4
PyLoT.py
4
PyLoT.py
@ -2196,7 +2196,8 @@ class MainWindow(QMainWindow):
|
|||||||
if event.pylot_autopicks:
|
if event.pylot_autopicks:
|
||||||
self.drawPicks(picktype='auto')
|
self.drawPicks(picktype='auto')
|
||||||
if event.pylot_picks or event.pylot_autopicks:
|
if event.pylot_picks or event.pylot_autopicks:
|
||||||
self.locateEventAction.setEnabled(True)
|
if not self._inputs.get('extent') == 'global':
|
||||||
|
self.locateEventAction.setEnabled(True)
|
||||||
self.qualities_action.setEnabled(True)
|
self.qualities_action.setEnabled(True)
|
||||||
self.eventlist_xml_action.setEnabled(True)
|
self.eventlist_xml_action.setEnabled(True)
|
||||||
|
|
||||||
@ -2631,7 +2632,6 @@ class MainWindow(QMainWindow):
|
|||||||
picks=self.getPicksOnStation(station, 'manual'),
|
picks=self.getPicksOnStation(station, 'manual'),
|
||||||
autopicks=self.getPicksOnStation(station, 'auto'),
|
autopicks=self.getPicksOnStation(station, 'auto'),
|
||||||
metadata=self.metadata, event=event,
|
metadata=self.metadata, event=event,
|
||||||
model=self.inputs.get('taup_model'),
|
|
||||||
filteroptions=self.filteroptions, wftype=wftype,
|
filteroptions=self.filteroptions, wftype=wftype,
|
||||||
show_comp_data=self.dataPlot.comp_checkbox.isChecked())
|
show_comp_data=self.dataPlot.comp_checkbox.isChecked())
|
||||||
if self.filterActionP.isChecked():
|
if self.filterActionP.isChecked():
|
||||||
|
@ -9,7 +9,7 @@ PyLoT - the Python picking and Localization Tool
|
|||||||
|
|
||||||
This python library contains a graphical user interfaces for picking
|
This python library contains a graphical user interfaces for picking
|
||||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||||
and the Qt4 libraries to be installed first.
|
and the Qt libraries to be installed first.
|
||||||
|
|
||||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||||
PILOT without facing portability problems, it has been decided to re-
|
PILOT without facing portability problems, it has been decided to re-
|
||||||
|
@ -501,7 +501,7 @@ defaults = {'datapath': {'type': str,
|
|||||||
|
|
||||||
'taup_model': {'type': str,
|
'taup_model': {'type': str,
|
||||||
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
||||||
'value': None,
|
'value': 'iasp91',
|
||||||
'namestring': 'TauPy model'},
|
'namestring': 'TauPy model'},
|
||||||
|
|
||||||
'taup_phases': {'type': str,
|
'taup_phases': {'type': str,
|
||||||
|
@ -21,6 +21,7 @@ try:
|
|||||||
from scipy.signal import tukey
|
from scipy.signal import tukey
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from scipy.signal.windows import tukey
|
from scipy.signal.windows import tukey
|
||||||
|
|
||||||
from obspy.core import Stream
|
from obspy.core import Stream
|
||||||
|
|
||||||
from pylot.core.pick.utils import PickingFailedException
|
from pylot.core.pick.utils import PickingFailedException
|
||||||
|
@ -521,7 +521,6 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
picks=self._parent.get_current_event().getPick(station),
|
picks=self._parent.get_current_event().getPick(station),
|
||||||
autopicks=self._parent.get_current_event().getAutopick(station),
|
autopicks=self._parent.get_current_event().getAutopick(station),
|
||||||
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
||||||
model=self.parameter.get('taup_model'),
|
|
||||||
event=pyl_mw.get_current_event())
|
event=pyl_mw.get_current_event())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
||||||
|
@ -1871,13 +1871,14 @@ class PickDlg(QDialog):
|
|||||||
|
|
||||||
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
|
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
|
||||||
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
|
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
|
||||||
event=None, filteroptions=None, model=None, wftype=None):
|
event=None, filteroptions=None, wftype=None):
|
||||||
super(PickDlg, self).__init__(parent, Qt.Window)
|
super(PickDlg, self).__init__(parent, Qt.Window)
|
||||||
self.orig_parent = parent
|
self.orig_parent = parent
|
||||||
self.setAttribute(Qt.WA_DeleteOnClose)
|
self.setAttribute(Qt.WA_DeleteOnClose)
|
||||||
|
|
||||||
# initialize attributes
|
# initialize attributes
|
||||||
self.parameter = parameter
|
self.parameter = parameter
|
||||||
|
model = self.parameter.get('taup_model')
|
||||||
self._embedded = embedded
|
self._embedded = embedded
|
||||||
self.showCompData = show_comp_data
|
self.showCompData = show_comp_data
|
||||||
self.station = station
|
self.station = station
|
||||||
@ -2270,8 +2271,8 @@ class PickDlg(QDialog):
|
|||||||
arrivals = func[plot](source_origin.depth,
|
arrivals = func[plot](source_origin.depth,
|
||||||
source_origin.latitude,
|
source_origin.latitude,
|
||||||
source_origin.longitude,
|
source_origin.longitude,
|
||||||
station_coords['latitude'],
|
station_coords.get('latitude'),
|
||||||
station_coords['longitude'],
|
station_coords.get('longitude'),
|
||||||
phases)
|
phases)
|
||||||
self.arrivals = arrivals
|
self.arrivals = arrivals
|
||||||
|
|
||||||
|
2
pylot/correlation/__init__.py
Normal file
2
pylot/correlation/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
90
pylot/correlation/parameters_adriaarray.yaml
Normal file
90
pylot/correlation/parameters_adriaarray.yaml
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
############################# correlation parameters #####################################
|
||||||
|
# min_corr_stacking: minimum correlation coefficient for building beam trace
|
||||||
|
# min_corr_export: minimum correlation coefficient for pick export
|
||||||
|
# min_stack: minimum number of stations for building beam trace
|
||||||
|
# t_before: correlation window before pick
|
||||||
|
# t_after: correlation window after pick#
|
||||||
|
# cc_maxlag: maximum shift for initial correlation
|
||||||
|
# cc_maxlag2: maximum shift for second (final) correlation (also for calculating pick uncertainty)
|
||||||
|
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
|
||||||
|
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
|
||||||
|
# min_picks_export: minimum number of correlated picks for export
|
||||||
|
# min_picks_autopylot: minimum number of reference autopicks picks to continue with event
|
||||||
|
# check_RMS: do RMS check to search for restitution errors (very experimental)
|
||||||
|
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
|
||||||
|
# station_list: use the following stations as reference for stacking
|
||||||
|
# use_stacked_trace: use existing stacked trace if found (spare re-computation)
|
||||||
|
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
|
||||||
|
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
|
||||||
|
# such as 'PyLoT_20170501_141822_autopylot.xml'
|
||||||
|
|
||||||
|
logging: info
|
||||||
|
pick_phases: ['P', 'S']
|
||||||
|
|
||||||
|
# P-phase
|
||||||
|
P:
|
||||||
|
min_corr_stacking: 0.8
|
||||||
|
min_corr_export: 0.6
|
||||||
|
min_stack: 20
|
||||||
|
t_before: 30.
|
||||||
|
t_after: 50.
|
||||||
|
cc_maxlag: 50.
|
||||||
|
cc_maxlag2: 5.
|
||||||
|
initial_pick_outlier_threshold: 30.
|
||||||
|
export_threshold: 2.5
|
||||||
|
min_picks_export: 100
|
||||||
|
min_picks_autopylot: 50
|
||||||
|
check_RMS: True
|
||||||
|
use_taupy_onsets: False
|
||||||
|
station_list: ['HU.MORH', 'HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||||
|
use_stacked_trace: False
|
||||||
|
data_dir: 'processed'
|
||||||
|
pickfile_extension: '_autopylot'
|
||||||
|
dt_stacking: [250, 250]
|
||||||
|
|
||||||
|
# filter for first correlation (rough)
|
||||||
|
filter_options:
|
||||||
|
freqmax: 0.5
|
||||||
|
freqmin: 0.03
|
||||||
|
# filter for second correlation (fine)
|
||||||
|
filter_options_final:
|
||||||
|
freqmax: 0.5
|
||||||
|
freqmin: 0.03
|
||||||
|
|
||||||
|
filter_type: bandpass
|
||||||
|
sampfreq: 20.0
|
||||||
|
|
||||||
|
# S-phase
|
||||||
|
S:
|
||||||
|
min_corr_stacking: 0.7
|
||||||
|
min_corr_export: 0.6
|
||||||
|
min_stack: 20
|
||||||
|
t_before: 60.
|
||||||
|
t_after: 60.
|
||||||
|
cc_maxlag: 100.
|
||||||
|
cc_maxlag2: 25.
|
||||||
|
initial_pick_outlier_threshold: 30.
|
||||||
|
export_threshold: 5.0
|
||||||
|
min_picks_export: 200
|
||||||
|
min_picks_autopylot: 50
|
||||||
|
check_RMS: True
|
||||||
|
use_taupy_onsets: False
|
||||||
|
station_list: ['HU.MORH','HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||||
|
use_stacked_trace: False
|
||||||
|
data_dir: 'processed'
|
||||||
|
pickfile_extension: '_autopylot'
|
||||||
|
dt_stacking: [250, 250]
|
||||||
|
|
||||||
|
# filter for first correlation (rough)
|
||||||
|
filter_options:
|
||||||
|
freqmax: 0.1
|
||||||
|
freqmin: 0.01
|
||||||
|
|
||||||
|
# filter for second correlation (fine)
|
||||||
|
filter_options_final:
|
||||||
|
freqmax: 0.2
|
||||||
|
freqmin: 0.01
|
||||||
|
|
||||||
|
filter_type: bandpass
|
||||||
|
sampfreq: 20.0
|
||||||
|
|
1987
pylot/correlation/pick_correlation_correction.py
Normal file
1987
pylot/correlation/pick_correlation_correction.py
Normal file
File diff suppressed because it is too large
Load Diff
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
#ulimit -s 8192
|
||||||
|
#ulimit -v $(ulimit -v | awk '{printf("%d",$1*0.95)}')
|
||||||
|
#ulimit -v
|
||||||
|
|
||||||
|
#655360
|
||||||
|
|
||||||
|
source /opt/anaconda3/etc/profile.d/conda.sh
|
||||||
|
conda activate pylot_311
|
||||||
|
NSLOTS=20
|
||||||
|
|
||||||
|
#qsub -l low -cwd -l "os=*stretch" -pe smp 40 submit_pick_corr_correction.sh
|
||||||
|
#$ -l low
|
||||||
|
#$ -l h_vmem=6G
|
||||||
|
#$ -cwd
|
||||||
|
#$ -pe smp 20
|
||||||
|
#$ -N corr_pick
|
||||||
|
|
||||||
|
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||||
|
|
||||||
|
#export MKL_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
#export NUMEXPR_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
#export OMP_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M6.0-6.5' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||||
|
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
|
||||||
|
|
||||||
|
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||||
|
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||||
|
|
||||||
|
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
|
||||||
|
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
|
||||||
|
#--event_blacklist eventlist.txt
|
23
pylot/correlation/submit_to_grid_engine.py
Executable file
23
pylot/correlation/submit_to_grid_engine.py
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
fnames = [
|
||||||
|
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
|
||||||
|
]
|
||||||
|
|
||||||
|
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
|
||||||
|
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
|
||||||
|
|
||||||
|
####
|
||||||
|
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
|
||||||
|
####
|
||||||
|
|
||||||
|
for fnin, istart in fnames:
|
||||||
|
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
|
||||||
|
|
||||||
|
print(input_cmds)
|
||||||
|
print(subprocess.check_output(input_cmds.split()))
|
||||||
|
|
||||||
|
|
||||||
|
|
61
pylot/correlation/utils.py
Normal file
61
pylot/correlation/utils.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
|
||||||
|
from obspy import read_events
|
||||||
|
|
||||||
|
from pylot.core.util.dataprocessing import Metadata
|
||||||
|
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_obspy_dmt(eventdir):
|
||||||
|
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
|
||||||
|
if not os.path.exists(event_pkl_file):
|
||||||
|
raise IOError('Could not find event path for event: {}'.format(eventdir))
|
||||||
|
event = qml_from_obspyDMT(event_pkl_file)
|
||||||
|
return event
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_pylot(eventdir, extension=''):
|
||||||
|
event_id = get_event_id(eventdir)
|
||||||
|
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
|
||||||
|
if not os.path.isfile(filename):
|
||||||
|
return
|
||||||
|
cat = read_events(filename)
|
||||||
|
return cat[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_id(eventdir):
|
||||||
|
event_id = os.path.split(eventdir)[-1]
|
||||||
|
return event_id
|
||||||
|
|
||||||
|
|
||||||
|
def get_picks(eventdir, extension=''):
|
||||||
|
event_id = get_event_id(eventdir)
|
||||||
|
filename = 'PyLoT_{}{}.xml'
|
||||||
|
filename = filename.format(event_id, extension)
|
||||||
|
fpath = os.path.join(eventdir, filename)
|
||||||
|
fpaths = glob.glob(fpath)
|
||||||
|
if len(fpaths) == 1:
|
||||||
|
cat = read_events(fpaths[0])
|
||||||
|
picks = cat[0].picks
|
||||||
|
return picks
|
||||||
|
elif len(fpaths) == 0:
|
||||||
|
print('get_picks: File not found: {}'.format(fpath))
|
||||||
|
return
|
||||||
|
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def write_json(object, fname):
|
||||||
|
with open(fname, 'w') as outfile:
|
||||||
|
json.dump(object, outfile, sort_keys=True, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
def get_metadata(eventdir):
|
||||||
|
metadata_path = os.path.join(eventdir, 'resp')
|
||||||
|
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||||
|
return metadata
|
@ -0,0 +1,76 @@
|
|||||||
|
import pytest
|
||||||
|
from obspy import read, Trace, UTCDateTime
|
||||||
|
|
||||||
|
from pylot.correlation.pick_correlation_correction import XCorrPickCorrection
|
||||||
|
|
||||||
|
|
||||||
|
class TestXCorrPickCorrection():
|
||||||
|
def setup(self):
|
||||||
|
self.make_test_traces()
|
||||||
|
self.make_test_picks()
|
||||||
|
self.t_before = 2.
|
||||||
|
self.t_after = 2.
|
||||||
|
self.cc_maxlag = 0.5
|
||||||
|
|
||||||
|
def make_test_traces(self):
|
||||||
|
# take first trace of test Stream from obspy
|
||||||
|
tr1 = read()[0]
|
||||||
|
# filter trace
|
||||||
|
tr1.filter('bandpass', freqmin=1, freqmax=20)
|
||||||
|
# make a copy and shift the copy by 0.1 s
|
||||||
|
tr2 = tr1.copy()
|
||||||
|
tr2.stats.starttime += 0.1
|
||||||
|
|
||||||
|
self.trace1 = tr1
|
||||||
|
self.trace2 = tr2
|
||||||
|
|
||||||
|
def make_test_picks(self):
|
||||||
|
# create an artificial reference pick on reference trace (trace1) and another one on the 0.1 s shifted trace
|
||||||
|
self.tpick1 = UTCDateTime('2009-08-24T00:20:07.7')
|
||||||
|
# shift the second pick by 0.2 s, the correction should be around 0.1 s now
|
||||||
|
self.tpick2 = self.tpick1 + 0.2
|
||||||
|
|
||||||
|
def test_slice_trace_okay(self):
|
||||||
|
|
||||||
|
self.setup()
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
|
||||||
|
test_trace = self.trace1
|
||||||
|
pick_time = self.tpick2
|
||||||
|
|
||||||
|
sliced_trace = xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
assert ((sliced_trace.stats.starttime == pick_time - self.t_before - self.cc_maxlag / 2)
|
||||||
|
and (sliced_trace.stats.endtime == pick_time + self.t_after + self.cc_maxlag / 2))
|
||||||
|
|
||||||
|
def test_slice_trace_fails(self):
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
test_trace = self.trace1
|
||||||
|
pick_time = self.tpick1
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before + 20, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before, t_after=self.t_after + 50, cc_maxlag=self.cc_maxlag)
|
||||||
|
xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
|
||||||
|
def test_cross_correlation(self):
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
# create XCorrPickCorrection object
|
||||||
|
xcpc = XCorrPickCorrection(self.tpick1, self.trace1, self.tpick2, self.trace2, t_before=self.t_before,
|
||||||
|
t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
|
||||||
|
# execute correlation
|
||||||
|
correction, cc_max, uncert, fwfm = xcpc.cross_correlation(False, '', '')
|
||||||
|
|
||||||
|
# define awaited test result
|
||||||
|
test_result = (-0.09983091718314982, 0.9578431835689154, 0.0015285160561610929, 0.03625786256084631)
|
||||||
|
|
||||||
|
# check results
|
||||||
|
assert pytest.approx(test_result, rel=1e-6) == (correction, cc_max, uncert, fwfm)
|
Loading…
Reference in New Issue
Block a user