Compare commits
97 Commits
38-simplif
...
develop
Author | SHA1 | Date | |
---|---|---|---|
4bd2e78259 | |||
468a7721c8 | |||
555fb8a719 | |||
5a2a1fe990 | |||
64b719fd54 | |||
71d4269a4f | |||
81e34875b9 | |||
d7ee820de3 | |||
621cbbfbda | |||
050b9fb0c4 | |||
eb3cd713c6 | |||
18c37dfdd0 | |||
9333ebf7f3 | |||
8c46b1ed18 | |||
c743813446 | |||
41c9183be3 | |||
ae6c4966a9 | |||
e8a516d16b | |||
f78315dec4 | |||
28f75cedcb | |||
e02b62696d | |||
e4217f0e30 | |||
8f154e70d7 | |||
6542b6cc4f | |||
5ab6c494c5 | |||
3da47c6f6b | |||
cc7716a2b7 | |||
03947d2363 | |||
e1b0d48527 | |||
431dbe8924 | |||
63810730e5 | |||
f2159c47f9 | |||
d0fbb91ffe | |||
424d42aa1c | |||
2cea10088d | |||
5971508cab | |||
c765e7c66b | |||
466f19eb2e | |||
e6a4ba7ee2 | |||
5d90904838 | |||
7a13288c85 | |||
3f97097bf6 | |||
29107ee40c | |||
fa310461d0 | |||
42a7d12292 | |||
2e49813292 | |||
5d6f4619cc | |||
db11e125c0 | |||
b59232d77b | |||
176e93d833 | |||
759e7bb848 | |||
61c3f40063 | |||
213819c702 | |||
67f34cc871 | |||
f4f48a930f | |||
b41e2b2de6 | |||
a068bb8457 | |||
452f2a2e18 | |||
c3a2ef5022 | |||
8e7bd87711 | |||
d5817adc46 | |||
14f01ec46d | |||
1b074d14ff | |||
ce71c549ca | |||
c4220b389e | |||
0f29d0e20d | |||
e1e0913e3a | |||
cdcd226c87 | |||
5f53cc5365 | |||
6cce05b035 | |||
7326f061e5 | |||
1a18401fe3 | |||
ec930dbc12 | |||
b991f771af | |||
2c3b1876ab | |||
0acd23d4d0 | |||
f349c8bc7e | |||
6688ef845d | |||
5b18e9ab71 | |||
31ca0d7a85 | |||
c7f9ad4c6f | |||
65dbaad446 | |||
5b97d51517 | |||
b3fdbc811e | |||
9fce4998d3 | |||
c468bfbe84 | |||
4861d33e9a | |||
f5f4635c3d | |||
b12d92eebb | |||
e9da81376e | |||
e68fc849f0 | |||
efb117177c | |||
710ea57503 | |||
8aaad643ec | |||
bc808b66c2 | |||
472e5b3b9e | |||
|
503ea419c4 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,3 +2,4 @@
|
|||||||
*~
|
*~
|
||||||
.idea
|
.idea
|
||||||
pylot/RELEASE-VERSION
|
pylot/RELEASE-VERSION
|
||||||
|
/tests/test_autopicker/dmt_database_test/
|
||||||
|
39
.mailmap
Normal file
39
.mailmap
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <Darius_A@web.de>
|
||||||
|
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <darius.arnold@rub.de>
|
||||||
|
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <darius.arnold@ruhr-uni-bochum.de>
|
||||||
|
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <mail@dariusarnold.de>
|
||||||
|
|
||||||
|
Dennis Wlecklik <dennisw@minos02.geophysik.ruhr-uni-bochum.de>
|
||||||
|
|
||||||
|
Jeldrik Gaal <jeldrikgaal@gmail.com>
|
||||||
|
|
||||||
|
Kaan Coekerim <kaan.coekerim@ruhr-uni-bochum.de>
|
||||||
|
Kaan Coekerim <kaan.coekerim@ruhr-uni-bochum.de> <kaan.coekerim@rub.de>
|
||||||
|
|
||||||
|
Ludger Kueperkoch <kueperkoch@igem-energie.de> <kueperkoch@bestec-for-nature.com>
|
||||||
|
Ludger Kueperkoch <kueperkoch@igem-energie.de> <ludger@quake2.(none)>
|
||||||
|
Ludger Kueperkoch <kueperkoch@igem-energie.de> <ludger@sauron.bestec-for-nature>
|
||||||
|
|
||||||
|
Marc S. Boxberg <marc.boxberg@rub.de>
|
||||||
|
|
||||||
|
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel.paffrath@rub.de>
|
||||||
|
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@minos01.geophysik.ruhr-uni-bochum.de>
|
||||||
|
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@minos02.geophysik.ruhr-uni-bochum.de>
|
||||||
|
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@minos25.geophysik.ruhr-uni-bochum.de>
|
||||||
|
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@email.com>
|
||||||
|
|
||||||
|
Sally Zimmermann <sally.zimmermann@ruhr-uni-bochum.de>
|
||||||
|
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastianw@minos01.geophysik.ruhr-uni-bochum.de>
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastianw@minos02.geophysik.ruhr-uni-bochum.de>
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastianw@minos22.geophysik.ruhr-uni-bochum.de>
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastian.wehling-benatelli@scisys.de>
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastian.wehling@rub.de>
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastian.wehling@rub.de>
|
||||||
|
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <DarkBeQst@users.noreply.github.com>
|
||||||
|
|
||||||
|
Thomas Moeller <thomas.moeller@rub.de>
|
||||||
|
|
||||||
|
Ann-Christin Koch <ann-christin.koch@ruhr-uni-bochum.de> <Ann-Christin.Koch@ruhr-uni-bochum.de>
|
||||||
|
|
||||||
|
Sebastian Priebe <sebastian.priebe@rub.de>
|
210
PyLoT.py
210
PyLoT.py
@ -25,6 +25,7 @@ https://www.iconfinder.com/iconsets/flavour
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import shutil
|
import shutil
|
||||||
@ -60,7 +61,7 @@ except ImportError:
|
|||||||
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
||||||
from matplotlib.figure import Figure
|
from matplotlib.figure import Figure
|
||||||
|
|
||||||
from pylot.core.analysis.magnitude import LocalMagnitude, MomentMagnitude, calcsourcespec
|
from pylot.core.analysis.magnitude import LocalMagnitude, MomentMagnitude
|
||||||
from pylot.core.io.data import Data
|
from pylot.core.io.data import Data
|
||||||
from pylot.core.io.inputs import FilterOptions, PylotParameter
|
from pylot.core.io.inputs import FilterOptions, PylotParameter
|
||||||
from autoPyLoT import autoPyLoT
|
from autoPyLoT import autoPyLoT
|
||||||
@ -72,19 +73,19 @@ from pylot.core.util.errors import DatastructureError, \
|
|||||||
OverwriteError
|
OverwriteError
|
||||||
from pylot.core.util.connection import checkurl
|
from pylot.core.util.connection import checkurl
|
||||||
from pylot.core.util.dataprocessing import Metadata, restitute_data
|
from pylot.core.util.dataprocessing import Metadata, restitute_data
|
||||||
from pylot.core.util.utils import fnConstructor, getLogin, \
|
from pylot.core.util.utils import fnConstructor, get_login, \
|
||||||
full_range, readFilterInformation, pick_color_plt, \
|
full_range, readFilterInformation, pick_color_plt, \
|
||||||
pick_linestyle_plt, identifyPhaseID, excludeQualityClasses, \
|
pick_linestyle_plt, identifyPhaseID, excludeQualityClasses, \
|
||||||
transform_colors_mpl, transform_colors_mpl_str, getAutoFilteroptions, check_all_obspy, \
|
transform_colors_mpl, transform_colors_mpl_str, getAutoFilteroptions, check_all_obspy, \
|
||||||
check_all_pylot, get_bool, get_None, get_pylot_eventfile_with_extension
|
check_all_pylot, get_bool, get_none
|
||||||
from pylot.core.util.gui import make_pen
|
from pylot.core.util.gui import make_pen
|
||||||
from pylot.core.util.event import Event
|
from pylot.core.util.event import Event
|
||||||
from pylot.core.io.location import create_creation_info, create_event
|
from pylot.core.io.location import create_creation_info, create_event
|
||||||
from pylot.core.util.widgets import FilterOptionsDialog, NewEventDlg, \
|
from pylot.core.util.widgets import FilterOptionsDialog, NewEventDlg, \
|
||||||
PylotCanvas, WaveformWidgetPG, PropertiesDlg, HelpForm, createAction, PickDlg, \
|
PylotCanvas, WaveformWidgetPG, PropertiesDlg, HelpForm, createAction, PickDlg, \
|
||||||
ComparisonWidget, TuneAutopicker, PylotParaBox, AutoPickDlg, CanvasWidget, AutoPickWidget, \
|
ComparisonWidget, TuneAutopicker, PylotParameterWidget, AutoPickDlg, CanvasWidget, AutoPickWidget, \
|
||||||
CompareEventsWidget, ProgressBarWidget, AddMetadataWidget, SingleTextLineDialog, LogWidget, PickQualitiesFromXml, \
|
CompareEventsWidget, ProgressBarWidget, AddMetadataWidget, SingleTextLineDialog, LogWidget, PickQualitiesFromXml, \
|
||||||
SourceSpecWindow, ChooseWaveFormWindow, SpectrogramTab, SearchFileByExtensionDialog
|
SpectrogramTab, SearchFileByExtensionDialog
|
||||||
from pylot.core.util.array_map import Array_map
|
from pylot.core.util.array_map import Array_map
|
||||||
from pylot.core.util.structure import DATASTRUCTURE
|
from pylot.core.util.structure import DATASTRUCTURE
|
||||||
from pylot.core.util.thread import Thread, Worker
|
from pylot.core.util.thread import Thread, Worker
|
||||||
@ -113,11 +114,7 @@ class MainWindow(QMainWindow):
|
|||||||
def __init__(self, parent=None, infile=None, reset_qsettings=False):
|
def __init__(self, parent=None, infile=None, reset_qsettings=False):
|
||||||
super(MainWindow, self).__init__(parent)
|
super(MainWindow, self).__init__(parent)
|
||||||
|
|
||||||
# check for default pylot.in-file
|
if infile and os.path.isfile(infile) is False:
|
||||||
if not infile:
|
|
||||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
|
||||||
print('Using default input file {}'.format(infile))
|
|
||||||
if os.path.isfile(infile) is False:
|
|
||||||
infile = QFileDialog().getOpenFileName(caption='Choose PyLoT-input file')[0]
|
infile = QFileDialog().getOpenFileName(caption='Choose PyLoT-input file')[0]
|
||||||
|
|
||||||
if not os.path.exists(infile):
|
if not os.path.exists(infile):
|
||||||
@ -139,7 +136,7 @@ class MainWindow(QMainWindow):
|
|||||||
self.project.parameter = self._inputs
|
self.project.parameter = self._inputs
|
||||||
self.tap = None
|
self.tap = None
|
||||||
self.apw = None
|
self.apw = None
|
||||||
self.paraBox = None
|
self.parameterWidget = None
|
||||||
self.array_map = None
|
self.array_map = None
|
||||||
self._metadata = Metadata(verbosity=0)
|
self._metadata = Metadata(verbosity=0)
|
||||||
self._eventChanged = [False, False]
|
self._eventChanged = [False, False]
|
||||||
@ -181,6 +178,7 @@ class MainWindow(QMainWindow):
|
|||||||
self.autodata = Data(self)
|
self.autodata = Data(self)
|
||||||
|
|
||||||
self.fnames = None
|
self.fnames = None
|
||||||
|
self.fnames_comp = None
|
||||||
self._stime = None
|
self._stime = None
|
||||||
|
|
||||||
# track deleted picks for logging
|
# track deleted picks for logging
|
||||||
@ -190,13 +188,12 @@ class MainWindow(QMainWindow):
|
|||||||
self.table_headers = ['', 'Event', 'Time', 'Lat', 'Lon', 'Depth', 'Ml', 'Mw', '[N] MP', '[N] AP', 'Tuning Set',
|
self.table_headers = ['', 'Event', 'Time', 'Lat', 'Lon', 'Depth', 'Ml', 'Mw', '[N] MP', '[N] AP', 'Tuning Set',
|
||||||
'Test Set', 'Notes']
|
'Test Set', 'Notes']
|
||||||
|
|
||||||
# TODO: refactor rootpath to datapath
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
if settings.value("user/FullName", None) is None:
|
if settings.value("user/FullName", None) is None:
|
||||||
fulluser = QInputDialog.getText(self, "Enter Name:", "Full name")
|
fulluser = QInputDialog.getText(self, "Enter Name:", "Full name")
|
||||||
settings.setValue("user/FullName", fulluser)
|
settings.setValue("user/FullName", fulluser)
|
||||||
settings.setValue("user/Login", getLogin())
|
settings.setValue("user/Login", get_login())
|
||||||
if settings.value("agency_id", None) is None:
|
if settings.value("agency_id", None) is None:
|
||||||
agency = QInputDialog.getText(self,
|
agency = QInputDialog.getText(self,
|
||||||
"Enter authority/institution name:",
|
"Enter authority/institution name:",
|
||||||
@ -253,7 +250,7 @@ class MainWindow(QMainWindow):
|
|||||||
self._inputs.reset_defaults()
|
self._inputs.reset_defaults()
|
||||||
# check for default pylot.in-file
|
# check for default pylot.in-file
|
||||||
infile = os.path.join(pylot_config_dir, '.pylot.in')
|
infile = os.path.join(pylot_config_dir, '.pylot.in')
|
||||||
print('Using default input file {}'.format(infile))
|
logging.warning('Using default input file {}'.format(infile))
|
||||||
self._inputs.export2File(infile)
|
self._inputs.export2File(infile)
|
||||||
self.infile = infile
|
self.infile = infile
|
||||||
|
|
||||||
@ -688,10 +685,9 @@ class MainWindow(QMainWindow):
|
|||||||
# add scroll area used in case number of traces gets too high
|
# add scroll area used in case number of traces gets too high
|
||||||
self.wf_scroll_area = QtWidgets.QScrollArea(self)
|
self.wf_scroll_area = QtWidgets.QScrollArea(self)
|
||||||
self.wf_scroll_area.setVisible(False)
|
self.wf_scroll_area.setVisible(False)
|
||||||
self.no_data_label = QLabel('No Data')
|
self.no_data_label = QLabel('No Data. If data were already loaded, try to select the event again in the eventbox.')
|
||||||
self.no_data_label.setStyleSheet('color: red')
|
self.no_data_label.setStyleSheet('color: red')
|
||||||
self.no_data_label.setAlignment(Qt.AlignCenter)
|
self.no_data_label.setAlignment(Qt.AlignCenter)
|
||||||
|
|
||||||
# create central matplotlib figure canvas widget
|
# create central matplotlib figure canvas widget
|
||||||
self.init_wfWidget()
|
self.init_wfWidget()
|
||||||
|
|
||||||
@ -720,14 +716,14 @@ class MainWindow(QMainWindow):
|
|||||||
self.tabs.addTab(wf_tab, 'Waveform Plot')
|
self.tabs.addTab(wf_tab, 'Waveform Plot')
|
||||||
self.tabs.addTab(array_tab, 'Array Map')
|
self.tabs.addTab(array_tab, 'Array Map')
|
||||||
self.tabs.addTab(events_tab, 'Eventlist')
|
self.tabs.addTab(events_tab, 'Eventlist')
|
||||||
self.tabs.addTab(spectro_tab, 'Spectro')
|
#self.tabs.addTab(spectro_tab, 'Spectro')
|
||||||
|
|
||||||
self.wf_layout.addWidget(self.no_data_label)
|
self.wf_layout.addWidget(self.no_data_label)
|
||||||
self.wf_layout.addWidget(self.wf_scroll_area)
|
self.wf_layout.addWidget(self.wf_scroll_area)
|
||||||
self.wf_scroll_area.setWidgetResizable(True)
|
self.wf_scroll_area.setWidgetResizable(True)
|
||||||
self.init_array_tab()
|
self.init_array_tab()
|
||||||
self.init_event_table()
|
self.init_event_table()
|
||||||
self.init_spectro_tab()
|
#self.init_spectro_tab()
|
||||||
self.tabs.setCurrentIndex(0)
|
self.tabs.setCurrentIndex(0)
|
||||||
|
|
||||||
self.eventLabel = QLabel()
|
self.eventLabel = QLabel()
|
||||||
@ -1010,12 +1006,12 @@ class MainWindow(QMainWindow):
|
|||||||
events=events)
|
events=events)
|
||||||
if not sld.exec_():
|
if not sld.exec_():
|
||||||
return
|
return
|
||||||
fext = sld.comboBox.currentText()
|
|
||||||
# fext = '.xml'
|
filenames = sld.getChecked()
|
||||||
for event in events:
|
for event in events:
|
||||||
filename = get_pylot_eventfile_with_extension(event, fext)
|
for filename in filenames:
|
||||||
if filename:
|
if os.path.isfile(filename) and event.pylot_id in filename:
|
||||||
self.load_data(filename, draw=False, event=event, overwrite=True)
|
self.load_data(filename, draw=False, event=event, ask_user=False, merge_strategy=sld.merge_strategy)
|
||||||
refresh = True
|
refresh = True
|
||||||
if not refresh:
|
if not refresh:
|
||||||
return
|
return
|
||||||
@ -1024,8 +1020,8 @@ class MainWindow(QMainWindow):
|
|||||||
self.fill_eventbox()
|
self.fill_eventbox()
|
||||||
self.setDirty(True)
|
self.setDirty(True)
|
||||||
|
|
||||||
def load_data(self, fname=None, loc=False, draw=True, event=None, overwrite=False):
|
def load_data(self, fname=None, loc=False, draw=True, event=None, ask_user=True, merge_strategy='Overwrite',):
|
||||||
if not overwrite:
|
if ask_user:
|
||||||
if not self.okToContinue():
|
if not self.okToContinue():
|
||||||
return
|
return
|
||||||
if fname is None:
|
if fname is None:
|
||||||
@ -1034,14 +1030,33 @@ class MainWindow(QMainWindow):
|
|||||||
fname = self.filename_from_action(action)
|
fname = self.filename_from_action(action)
|
||||||
if not fname:
|
if not fname:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not event:
|
if not event:
|
||||||
event = self.get_current_event()
|
event = self.get_current_event()
|
||||||
|
|
||||||
|
if event.picks and ask_user:
|
||||||
|
qmb = QMessageBox(self, icon=QMessageBox.Question,
|
||||||
|
text='Do you want to overwrite the data?',)
|
||||||
|
overwrite_button = qmb.addButton('Overwrite', QMessageBox.YesRole)
|
||||||
|
merge_button = qmb.addButton('Merge', QMessageBox.NoRole)
|
||||||
|
qmb.exec_()
|
||||||
|
|
||||||
|
if qmb.clickedButton() == overwrite_button:
|
||||||
|
merge_strategy = 'Overwrite'
|
||||||
|
elif qmb.clickedButton() == merge_button:
|
||||||
|
merge_strategy = 'Merge'
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
data = Data(self, event)
|
data = Data(self, event)
|
||||||
try:
|
try:
|
||||||
data_new = Data(self, evtdata=str(fname))
|
data_new = Data(self, evtdata=str(fname))
|
||||||
# MP MP commented because adding several picks might cause inconsistencies
|
if merge_strategy == 'Overwrite':
|
||||||
data = data_new
|
data = data_new
|
||||||
# data += data_new
|
elif merge_strategy == 'Merge':
|
||||||
|
data += data_new
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f'Unknown merge strategy: {merge_strategy}')
|
||||||
except ValueError:
|
except ValueError:
|
||||||
qmb = QMessageBox(self, icon=QMessageBox.Question,
|
qmb = QMessageBox(self, icon=QMessageBox.Question,
|
||||||
text='Warning: Missmatch in event identifiers {} and {}. Continue?'.format(
|
text='Warning: Missmatch in event identifiers {} and {}. Continue?'.format(
|
||||||
@ -1129,16 +1144,19 @@ class MainWindow(QMainWindow):
|
|||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
def getWFFnames_from_eventbox(self, eventbox=None):
|
def getWFFnames_from_eventbox(self, eventbox: str = None, subpath: str = None) -> list:
|
||||||
'''
|
'''
|
||||||
Return waveform filenames from event in eventbox.
|
Return waveform filenames from event in eventbox.
|
||||||
'''
|
'''
|
||||||
# TODO: add dataStructure class for obspyDMT here, this is just a workaround!
|
# TODO: add dataStructure class for obspyDMT here, this is just a workaround!
|
||||||
eventpath = self.get_current_event_path(eventbox)
|
eventpath = self.get_current_event_path(eventbox)
|
||||||
basepath = eventpath.split(os.path.basename(eventpath))[0]
|
if subpath:
|
||||||
|
eventpath = os.path.join(eventpath, subpath)
|
||||||
|
if not os.path.isdir(eventpath):
|
||||||
|
return []
|
||||||
if self.dataStructure:
|
if self.dataStructure:
|
||||||
if not eventpath:
|
if not eventpath:
|
||||||
return
|
return []
|
||||||
fnames = [os.path.join(eventpath, f) for f in os.listdir(eventpath)]
|
fnames = [os.path.join(eventpath, f) for f in os.listdir(eventpath)]
|
||||||
else:
|
else:
|
||||||
raise DatastructureError('not specified')
|
raise DatastructureError('not specified')
|
||||||
@ -1192,7 +1210,7 @@ class MainWindow(QMainWindow):
|
|||||||
with open(eventlist_file, 'r') as infile:
|
with open(eventlist_file, 'r') as infile:
|
||||||
eventlist_subset = [os.path.join(basepath, filename.split('\n')[0]) for filename in
|
eventlist_subset = [os.path.join(basepath, filename.split('\n')[0]) for filename in
|
||||||
infile.readlines()]
|
infile.readlines()]
|
||||||
msg = 'Found file "eventlist.txt" in database path. WILL ONLY USE SELECTED EVENTS out of {} events ' \
|
msg = 'Found file "eventlist.txt" in datapath. WILL ONLY USE SELECTED EVENTS out of {} events ' \
|
||||||
'contained in this subset'
|
'contained in this subset'
|
||||||
print(msg.format(len(eventlist_subset)))
|
print(msg.format(len(eventlist_subset)))
|
||||||
eventlist = [eventname for eventname in eventlist if eventname in eventlist_subset]
|
eventlist = [eventname for eventname in eventlist if eventname in eventlist_subset]
|
||||||
@ -1217,49 +1235,34 @@ class MainWindow(QMainWindow):
|
|||||||
# get path from first event in list and split them
|
# get path from first event in list and split them
|
||||||
path = eventlist[0]
|
path = eventlist[0]
|
||||||
try:
|
try:
|
||||||
system_name = platform.system()
|
datapath = os.path.split(path)[0]
|
||||||
if system_name in ["Linux", "Darwin"]:
|
|
||||||
dirs = {
|
dirs = {
|
||||||
'database': path.split('/')[-2],
|
'datapath': datapath,
|
||||||
'datapath': os.path.split(path)[0], # path.split('/')[-3],
|
|
||||||
'rootpath': '/' + os.path.join(*path.split('/')[:-3])
|
|
||||||
}
|
|
||||||
elif system_name == "Windows":
|
|
||||||
rootpath = path.split('/')[:-3]
|
|
||||||
rootpath[0] += '/'
|
|
||||||
dirs = {
|
|
||||||
# TODO: Arrange path to meet Win standards
|
|
||||||
'database': path.split('/')[-2],
|
|
||||||
'datapath': path.split('/')[-3],
|
|
||||||
'rootpath': os.path.join(*rootpath)
|
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
dirs = {
|
dirs = {
|
||||||
'database': '',
|
|
||||||
'datapath': '',
|
'datapath': '',
|
||||||
'rootpath': ''
|
|
||||||
}
|
}
|
||||||
print('Warning: Could not automatically init folder structure. ({})'.format(e))
|
print('Warning: Could not automatically init folder structure. ({})'.format(e))
|
||||||
|
|
||||||
settings = QSettings()
|
settings = QSettings()
|
||||||
settings.setValue("data/dataRoot", dirs['datapath']) # d irs['rootpath'])
|
settings.setValue("data/dataRoot", dirs['datapath'])
|
||||||
settings.sync()
|
settings.sync()
|
||||||
|
|
||||||
if not self.project.eventlist:
|
if not self.project.eventlist:
|
||||||
# init parameter object
|
# init parameter object
|
||||||
self.setParameter(show=False)
|
self.setParameter(show=False)
|
||||||
# hide all parameter (show all needed parameter later)
|
# hide all parameter (show all needed parameter later)
|
||||||
self.paraBox.hide_parameter()
|
self.parameterWidget.hide_parameter()
|
||||||
for directory in dirs.keys():
|
for directory in dirs.keys():
|
||||||
# set parameter
|
# set parameter
|
||||||
box = self.paraBox.boxes[directory]
|
box = self.parameterWidget.boxes[directory]
|
||||||
self.paraBox.setValue(box, dirs[directory])
|
self.parameterWidget.setValue(box, dirs[directory])
|
||||||
# show needed parameter in box
|
# show needed parameter in box
|
||||||
self.paraBox.show_parameter(directory)
|
self.parameterWidget.show_parameter(directory)
|
||||||
dirs_box = self.paraBox.get_groupbox_dialog('Directories')
|
dirs_box = self.parameterWidget.get_groupbox_dialog('Directories')
|
||||||
if not dirs_box.exec_():
|
if not dirs_box.exec_():
|
||||||
return
|
return
|
||||||
self.project.rootpath = dirs['rootpath']
|
|
||||||
self.project.datapath = dirs['datapath']
|
self.project.datapath = dirs['datapath']
|
||||||
else:
|
else:
|
||||||
if hasattr(self.project, 'datapath'):
|
if hasattr(self.project, 'datapath'):
|
||||||
@ -1268,7 +1271,6 @@ class MainWindow(QMainWindow):
|
|||||||
'Datapath missmatch to current project!')
|
'Datapath missmatch to current project!')
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
self.project.rootpath = dirs['rootpath']
|
|
||||||
self.project.datapath = dirs['datapath']
|
self.project.datapath = dirs['datapath']
|
||||||
|
|
||||||
self.project.add_eventlist(eventlist)
|
self.project.add_eventlist(eventlist)
|
||||||
@ -1356,11 +1358,10 @@ class MainWindow(QMainWindow):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def modify_project_path(self, new_rootpath):
|
def modify_project_path(self, new_rootpath):
|
||||||
# TODO: change root to datapath
|
self.project.datapath = new_rootpath
|
||||||
self.project.rootpath = new_rootpath
|
|
||||||
for event in self.project.eventlist:
|
for event in self.project.eventlist:
|
||||||
event.rootpath = new_rootpath
|
event.datapath = new_rootpath
|
||||||
event.path = os.path.join(event.rootpath, event.datapath, event.database, event.pylot_id)
|
event.path = os.path.join(event.datapath, event.pylot_id)
|
||||||
event.path = event.path.replace('\\', '/')
|
event.path = event.path.replace('\\', '/')
|
||||||
event.path = event.path.replace('//', '/')
|
event.path = event.path.replace('//', '/')
|
||||||
|
|
||||||
@ -1554,7 +1555,7 @@ class MainWindow(QMainWindow):
|
|||||||
self.set_fname(self.get_data().getEventFileName(), type)
|
self.set_fname(self.get_data().getEventFileName(), type)
|
||||||
return self.get_fnames(type)
|
return self.get_fnames(type)
|
||||||
|
|
||||||
def saveData(self, event=None, directory=None, outformats=['.xml', '.cnv', '.obs', '_focmec.in', '.pha']):
|
def saveData(self, event=None, directory=None, outformats=None):
|
||||||
'''
|
'''
|
||||||
Save event data to directory with specified output formats.
|
Save event data to directory with specified output formats.
|
||||||
:param event: PyLoT Event, if not set current event will be used
|
:param event: PyLoT Event, if not set current event will be used
|
||||||
@ -1562,6 +1563,8 @@ class MainWindow(QMainWindow):
|
|||||||
:param outformats: str/list of output formats
|
:param outformats: str/list of output formats
|
||||||
:return:
|
:return:
|
||||||
'''
|
'''
|
||||||
|
if outformats is None:
|
||||||
|
outformats = ['.xml', '.cnv', '.obs', '_focmec.in', '.pha']
|
||||||
if not event:
|
if not event:
|
||||||
event = self.get_current_event()
|
event = self.get_current_event()
|
||||||
if not type(outformats) == list:
|
if not type(outformats) == list:
|
||||||
@ -1697,7 +1700,7 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
# WIP JG
|
# WIP JG
|
||||||
def eventlistXml(self):
|
def eventlistXml(self):
|
||||||
path = self._inputs['rootpath'] + '/' + self._inputs['datapath'] + '/' + self._inputs['database']
|
path = self._inputs['datapath']
|
||||||
outpath = self.project.location[:self.project.location.rfind('/')]
|
outpath = self.project.location[:self.project.location.rfind('/')]
|
||||||
geteventlistfromxml(path, outpath)
|
geteventlistfromxml(path, outpath)
|
||||||
return
|
return
|
||||||
@ -1960,7 +1963,13 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
def prepareLoadWaveformData(self):
|
def prepareLoadWaveformData(self):
|
||||||
self.fnames = self.getWFFnames_from_eventbox()
|
self.fnames = self.getWFFnames_from_eventbox()
|
||||||
self.fnames_syn = []
|
self.fnames_comp = []
|
||||||
|
fnames_comp = self.getWFFnames_from_eventbox(subpath='compare')
|
||||||
|
self.dataPlot.activateCompareOptions(bool(fnames_comp))
|
||||||
|
if fnames_comp:
|
||||||
|
if self.dataPlot.comp_checkbox.isChecked():
|
||||||
|
self.fnames_comp = fnames_comp
|
||||||
|
|
||||||
eventpath = self.get_current_event_path()
|
eventpath = self.get_current_event_path()
|
||||||
basepath = eventpath.split(os.path.basename(eventpath))[0]
|
basepath = eventpath.split(os.path.basename(eventpath))[0]
|
||||||
self.obspy_dmt = check_obspydmt_structure(basepath)
|
self.obspy_dmt = check_obspydmt_structure(basepath)
|
||||||
@ -1990,8 +1999,8 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
if len(curr_event.origins) > 0:
|
if len(curr_event.origins) > 0:
|
||||||
origin_time = curr_event.origins[0].time
|
origin_time = curr_event.origins[0].time
|
||||||
tstart = settings.value('tstart') if get_None(settings.value('tstart')) else 0
|
tstart = settings.value('tstart') if get_none(settings.value('tstart')) else 0
|
||||||
tstop = settings.value('tstop') if get_None(settings.value('tstop')) else 0
|
tstop = settings.value('tstop') if get_none(settings.value('tstop')) else 0
|
||||||
tstart = origin_time + float(tstart)
|
tstart = origin_time + float(tstart)
|
||||||
tstop = origin_time + float(tstop)
|
tstop = origin_time + float(tstop)
|
||||||
else:
|
else:
|
||||||
@ -1999,7 +2008,7 @@ class MainWindow(QMainWindow):
|
|||||||
tstop = None
|
tstop = None
|
||||||
|
|
||||||
self.data.setWFData(self.fnames,
|
self.data.setWFData(self.fnames,
|
||||||
self.fnames_syn,
|
self.fnames_comp,
|
||||||
checkRotated=True,
|
checkRotated=True,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
tstart=tstart,
|
tstart=tstart,
|
||||||
@ -2007,7 +2016,7 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
def prepareObspyDMT_data(self, eventpath):
|
def prepareObspyDMT_data(self, eventpath):
|
||||||
qcbox_processed = self.dataPlot.qcombo_processed
|
qcbox_processed = self.dataPlot.qcombo_processed
|
||||||
qcheckb_syn = self.dataPlot.syn_checkbox
|
qcheckb_syn = self.dataPlot.comp_checkbox
|
||||||
qcbox_processed.setEnabled(False)
|
qcbox_processed.setEnabled(False)
|
||||||
qcheckb_syn.setEnabled(False)
|
qcheckb_syn.setEnabled(False)
|
||||||
for fpath in os.listdir(eventpath):
|
for fpath in os.listdir(eventpath):
|
||||||
@ -2015,8 +2024,8 @@ class MainWindow(QMainWindow):
|
|||||||
if 'syngine' in fpath:
|
if 'syngine' in fpath:
|
||||||
eventpath_syn = os.path.join(eventpath, fpath)
|
eventpath_syn = os.path.join(eventpath, fpath)
|
||||||
qcheckb_syn.setEnabled(True)
|
qcheckb_syn.setEnabled(True)
|
||||||
if self.dataPlot.syn_checkbox.isChecked():
|
if self.dataPlot.comp_checkbox.isChecked():
|
||||||
self.fnames_syn = [os.path.join(eventpath_syn, filename) for filename in os.listdir(eventpath_syn)]
|
self.fnames_comp = [os.path.join(eventpath_syn, filename) for filename in os.listdir(eventpath_syn)]
|
||||||
if 'processed' in fpath:
|
if 'processed' in fpath:
|
||||||
qcbox_processed.setEnabled(True)
|
qcbox_processed.setEnabled(True)
|
||||||
if qcbox_processed.isEnabled():
|
if qcbox_processed.isEnabled():
|
||||||
@ -2143,10 +2152,11 @@ class MainWindow(QMainWindow):
|
|||||||
self.wf_scroll_area.setVisible(len(plots) > 0)
|
self.wf_scroll_area.setVisible(len(plots) > 0)
|
||||||
self.no_data_label.setVisible(not len(plots) > 0)
|
self.no_data_label.setVisible(not len(plots) > 0)
|
||||||
for times, data, times_syn, data_syn in plots:
|
for times, data, times_syn, data_syn in plots:
|
||||||
self.dataPlot.plotWidget.getPlotItem().plot(times, data,
|
self.dataPlot.plotWidget.getPlotItem().plot(np.array(times), np.array(data),
|
||||||
pen=self.dataPlot.pen_linecolor)
|
pen=self.dataPlot.pen_linecolor,
|
||||||
|
skipFiniteCheck=True)
|
||||||
if len(data_syn) > 0:
|
if len(data_syn) > 0:
|
||||||
self.dataPlot.plotWidget.getPlotItem().plot(times_syn, data_syn,
|
self.dataPlot.plotWidget.getPlotItem().plot(np.array(times_syn), np.array(data_syn),
|
||||||
pen=self.dataPlot.pen_linecolor_syn)
|
pen=self.dataPlot.pen_linecolor_syn)
|
||||||
self.dataPlot.reinitMoveProxy()
|
self.dataPlot.reinitMoveProxy()
|
||||||
self.highlight_stations()
|
self.highlight_stations()
|
||||||
@ -2186,6 +2196,7 @@ class MainWindow(QMainWindow):
|
|||||||
if event.pylot_autopicks:
|
if event.pylot_autopicks:
|
||||||
self.drawPicks(picktype='auto')
|
self.drawPicks(picktype='auto')
|
||||||
if event.pylot_picks or event.pylot_autopicks:
|
if event.pylot_picks or event.pylot_autopicks:
|
||||||
|
if not self._inputs.get('extent') == 'global':
|
||||||
self.locateEventAction.setEnabled(True)
|
self.locateEventAction.setEnabled(True)
|
||||||
self.qualities_action.setEnabled(True)
|
self.qualities_action.setEnabled(True)
|
||||||
self.eventlist_xml_action.setEnabled(True)
|
self.eventlist_xml_action.setEnabled(True)
|
||||||
@ -2297,7 +2308,7 @@ class MainWindow(QMainWindow):
|
|||||||
comp = self.getComponent()
|
comp = self.getComponent()
|
||||||
title = 'section: {0} components'.format(zne_text[comp])
|
title = 'section: {0} components'.format(zne_text[comp])
|
||||||
wfst = self.get_data().getWFData()
|
wfst = self.get_data().getWFData()
|
||||||
wfsyn = self.get_data().getSynWFData()
|
wfsyn = self.get_data().getAltWFdata()
|
||||||
if self.filterActionP.isChecked() and filter:
|
if self.filterActionP.isChecked() and filter:
|
||||||
self.filterWaveformData(plot=False, phase='P')
|
self.filterWaveformData(plot=False, phase='P')
|
||||||
elif self.filterActionS.isChecked() and filter:
|
elif self.filterActionS.isChecked() and filter:
|
||||||
@ -2306,7 +2317,7 @@ class MainWindow(QMainWindow):
|
|||||||
# wfst += self.get_data().getWFData().select(component=alter_comp)
|
# wfst += self.get_data().getWFData().select(component=alter_comp)
|
||||||
plotWidget = self.getPlotWidget()
|
plotWidget = self.getPlotWidget()
|
||||||
self.adjustPlotHeight()
|
self.adjustPlotHeight()
|
||||||
if get_bool(settings.value('large_dataset')):
|
if get_bool(settings.value('large_dataset')) == True:
|
||||||
self.plot_method = 'fast'
|
self.plot_method = 'fast'
|
||||||
else:
|
else:
|
||||||
self.plot_method = 'normal'
|
self.plot_method = 'normal'
|
||||||
@ -2416,7 +2427,7 @@ class MainWindow(QMainWindow):
|
|||||||
filterS = filteroptions['S']
|
filterS = filteroptions['S']
|
||||||
minP, maxP = filterP.getFreq()
|
minP, maxP = filterP.getFreq()
|
||||||
minS, maxS = filterS.getFreq()
|
minS, maxS = filterS.getFreq()
|
||||||
self.paraBox.params_to_gui()
|
self.parameterWidget.params_to_gui()
|
||||||
|
|
||||||
def getFilterOptions(self):
|
def getFilterOptions(self):
|
||||||
return self.filteroptions
|
return self.filteroptions
|
||||||
@ -2609,18 +2620,20 @@ class MainWindow(QMainWindow):
|
|||||||
print("Warning! No network, station, and location info available!")
|
print("Warning! No network, station, and location info available!")
|
||||||
return
|
return
|
||||||
self.update_status('picking on station {0}'.format(station))
|
self.update_status('picking on station {0}'.format(station))
|
||||||
data = self.get_data().getOriginalWFData().copy()
|
wfdata = self.get_data().getOriginalWFData().copy()
|
||||||
|
wfdata_comp = self.get_data().getAltWFdata().copy()
|
||||||
event = self.get_current_event()
|
event = self.get_current_event()
|
||||||
wftype = self.dataPlot.qcombo_processed.currentText() if self.obspy_dmt else None
|
wftype = self.dataPlot.qcombo_processed.currentText() if self.obspy_dmt else None
|
||||||
pickDlg = PickDlg(self, parameter=self._inputs,
|
pickDlg = PickDlg(self, parameter=self._inputs,
|
||||||
data=data.select(station=station),
|
data=wfdata.select(station=station),
|
||||||
|
data_compare=wfdata_comp.select(station=station),
|
||||||
station=station, network=network,
|
station=station, network=network,
|
||||||
location=location,
|
location=location,
|
||||||
picks=self.getPicksOnStation(station, 'manual'),
|
picks=self.getPicksOnStation(station, 'manual'),
|
||||||
autopicks=self.getPicksOnStation(station, 'auto'),
|
autopicks=self.getPicksOnStation(station, 'auto'),
|
||||||
metadata=self.metadata, event=event,
|
metadata=self.metadata, event=event,
|
||||||
model=self.inputs.get('taup_model'),
|
filteroptions=self.filteroptions, wftype=wftype,
|
||||||
filteroptions=self.filteroptions, wftype=wftype)
|
show_comp_data=self.dataPlot.comp_checkbox.isChecked())
|
||||||
if self.filterActionP.isChecked():
|
if self.filterActionP.isChecked():
|
||||||
pickDlg.currentPhase = "P"
|
pickDlg.currentPhase = "P"
|
||||||
pickDlg.filterWFData()
|
pickDlg.filterWFData()
|
||||||
@ -2999,10 +3012,16 @@ class MainWindow(QMainWindow):
|
|||||||
event = self.get_current_event()
|
event = self.get_current_event()
|
||||||
event.pylot_picks = {}
|
event.pylot_picks = {}
|
||||||
event.pylot_autopicks = {}
|
event.pylot_autopicks = {}
|
||||||
picksdict = picksdict_from_picks(evt=self.get_data().get_evt_data())
|
picksdict = picksdict_from_picks(evt=self.get_data().get_evt_data(), parameter=self.getParameter())
|
||||||
event.addPicks(picksdict['manual'])
|
event.addPicks(picksdict['manual'])
|
||||||
event.addAutopicks(picksdict['auto'])
|
event.addAutopicks(picksdict['auto'])
|
||||||
|
|
||||||
|
def getParameter(self):
|
||||||
|
if hasattr(self.project, 'parameter') and isinstance(self.project.parameter, PylotParameter):
|
||||||
|
return self.project.parameter
|
||||||
|
else:
|
||||||
|
return self._inputs
|
||||||
|
|
||||||
def drawPicks(self, station=None, picktype=None, stime=None):
|
def drawPicks(self, station=None, picktype=None, stime=None):
|
||||||
# if picktype not specified, draw both
|
# if picktype not specified, draw both
|
||||||
if not stime:
|
if not stime:
|
||||||
@ -3077,7 +3096,7 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
if self.pg:
|
if self.pg:
|
||||||
if spe:
|
if spe:
|
||||||
if picks['epp'] and picks['lpp']:
|
if not self.plot_method == 'fast' and picks['epp'] and picks['lpp']:
|
||||||
pen = make_pen(picktype, phaseID, 'epp', quality)
|
pen = make_pen(picktype, phaseID, 'epp', quality)
|
||||||
self.drawnPicks[picktype][station].append(pw.plot([epp, epp], ylims,
|
self.drawnPicks[picktype][station].append(pw.plot([epp, epp], ylims,
|
||||||
alpha=.25, pen=pen, name='EPP'))
|
alpha=.25, pen=pen, name='EPP'))
|
||||||
@ -3156,8 +3175,8 @@ class MainWindow(QMainWindow):
|
|||||||
ttt = parameter['ttpatter']
|
ttt = parameter['ttpatter']
|
||||||
outfile = parameter['outpatter']
|
outfile = parameter['outpatter']
|
||||||
eventname = self.get_current_event_name()
|
eventname = self.get_current_event_name()
|
||||||
obsdir = os.path.join(self._inputs['rootpath'], self._inputs['datapath'], self._inputs['database'], eventname)
|
obsdir = os.path.join(self._inputs['datapath'], eventname)
|
||||||
self.saveData(event=self.get_current_event(), directory=obsdir, outformats='.obs')
|
self.saveData(event=self.get_current_event(), directory=obsdir, outformats=['.obs'])
|
||||||
filename = 'PyLoT_' + eventname
|
filename = 'PyLoT_' + eventname
|
||||||
locpath = os.path.join(locroot, 'loc', filename)
|
locpath = os.path.join(locroot, 'loc', filename)
|
||||||
phasefile = os.path.join(obsdir, filename + '.obs')
|
phasefile = os.path.join(obsdir, filename + '.obs')
|
||||||
@ -3571,7 +3590,7 @@ class MainWindow(QMainWindow):
|
|||||||
def calc_magnitude(self):
|
def calc_magnitude(self):
|
||||||
self.init_metadata()
|
self.init_metadata()
|
||||||
if not self.metadata:
|
if not self.metadata:
|
||||||
return None
|
return []
|
||||||
|
|
||||||
wf_copy = self.get_data().getWFData().copy()
|
wf_copy = self.get_data().getWFData().copy()
|
||||||
|
|
||||||
@ -3580,6 +3599,10 @@ class MainWindow(QMainWindow):
|
|||||||
for station in np.unique(list(self.getPicks('manual').keys()) + list(self.getPicks('auto').keys())):
|
for station in np.unique(list(self.getPicks('manual').keys()) + list(self.getPicks('auto').keys())):
|
||||||
wf_select += wf_copy.select(station=station)
|
wf_select += wf_copy.select(station=station)
|
||||||
|
|
||||||
|
if not wf_select:
|
||||||
|
logging.warning('Empty Stream in calc_magnitude. Return.')
|
||||||
|
return []
|
||||||
|
|
||||||
corr_wf = restitute_data(wf_select, self.metadata)
|
corr_wf = restitute_data(wf_select, self.metadata)
|
||||||
# calculate moment magnitude
|
# calculate moment magnitude
|
||||||
moment_mag = MomentMagnitude(corr_wf, self.get_data().get_evt_data(), self.inputs.get('vp'),
|
moment_mag = MomentMagnitude(corr_wf, self.get_data().get_evt_data(), self.inputs.get('vp'),
|
||||||
@ -3714,6 +3737,7 @@ class MainWindow(QMainWindow):
|
|||||||
if self.project.parameter:
|
if self.project.parameter:
|
||||||
# do this step to update default parameter on older PyLoT projects
|
# do this step to update default parameter on older PyLoT projects
|
||||||
self.project.parameter.reinit_default_parameters()
|
self.project.parameter.reinit_default_parameters()
|
||||||
|
PylotParameter.check_deprecated_parameters(self.project.parameter)
|
||||||
|
|
||||||
self._inputs = self.project.parameter
|
self._inputs = self.project.parameter
|
||||||
self.updateFilteroptions()
|
self.updateFilteroptions()
|
||||||
@ -3831,13 +3855,13 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
def setParameter(self, checked=0, show=True):
|
def setParameter(self, checked=0, show=True):
|
||||||
if checked: pass # dummy argument to receive trigger signal (checked) if called by QAction
|
if checked: pass # dummy argument to receive trigger signal (checked) if called by QAction
|
||||||
if not self.paraBox:
|
if not self.parameterWidget:
|
||||||
self.paraBox = PylotParaBox(self._inputs, parent=self, windowflag=Qt.Window)
|
self.parameterWidget = PylotParameterWidget(self._inputs, parent=self, windowflag=Qt.Window)
|
||||||
self.paraBox.accepted.connect(self._setDirty)
|
self.parameterWidget.accepted.connect(self._setDirty)
|
||||||
self.paraBox.accepted.connect(self.filterOptionsFromParameter)
|
self.parameterWidget.accepted.connect(self.filterOptionsFromParameter)
|
||||||
if show:
|
if show:
|
||||||
self.paraBox.params_to_gui()
|
self.parameterWidget.params_to_gui()
|
||||||
self.paraBox.show()
|
self.parameterWidget.show()
|
||||||
|
|
||||||
def deleteAllAutopicks(self):
|
def deleteAllAutopicks(self):
|
||||||
qmb = QMessageBox(self, icon=QMessageBox.Question,
|
qmb = QMessageBox(self, icon=QMessageBox.Question,
|
||||||
@ -3884,16 +3908,18 @@ class Project(object):
|
|||||||
Pickable class containing information of a PyLoT project, like event lists and file locations.
|
Pickable class containing information of a PyLoT project, like event lists and file locations.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# TODO: remove rootpath
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.eventlist = []
|
self.eventlist = []
|
||||||
self.location = None
|
self.location = None
|
||||||
self.rootpath = None
|
|
||||||
self.datapath = None
|
self.datapath = None
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
self.parameter = None
|
self.parameter = None
|
||||||
self._table = None
|
self._table = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rootpath(self):
|
||||||
|
return self.datapath
|
||||||
|
|
||||||
def add_eventlist(self, eventlist):
|
def add_eventlist(self, eventlist):
|
||||||
'''
|
'''
|
||||||
Add events from an eventlist containing paths to event directories.
|
Add events from an eventlist containing paths to event directories.
|
||||||
@ -3903,8 +3929,6 @@ class Project(object):
|
|||||||
return
|
return
|
||||||
for item in eventlist:
|
for item in eventlist:
|
||||||
event = Event(item)
|
event = Event(item)
|
||||||
event.rootpath = self.parameter['rootpath']
|
|
||||||
event.database = self.parameter['database']
|
|
||||||
event.datapath = self.parameter['datapath']
|
event.datapath = self.parameter['datapath']
|
||||||
if not event.path in self.getPaths():
|
if not event.path in self.getPaths():
|
||||||
self.eventlist.append(event)
|
self.eventlist.append(event)
|
||||||
|
49
README.md
49
README.md
@ -11,7 +11,7 @@ PILOT has originally been developed in Mathworks' MatLab. In order to distribute
|
|||||||
problems, it has been decided to redevelop the software package in Python. The great work of the ObsPy group allows easy
|
problems, it has been decided to redevelop the software package in Python. The great work of the ObsPy group allows easy
|
||||||
handling of a bunch of seismic data and PyLoT will benefit a lot compared to the former MatLab version.
|
handling of a bunch of seismic data and PyLoT will benefit a lot compared to the former MatLab version.
|
||||||
|
|
||||||
The development of PyLoT is part of the joint research project MAGS2 and AlpArray.
|
The development of PyLoT is part of the joint research project MAGS2, AlpArray and AdriaArray.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@ -27,58 +27,44 @@ Afterwards run (from the PyLoT main directory where the files *requirements.txt*
|
|||||||
conda env create -f pylot.yml
|
conda env create -f pylot.yml
|
||||||
or
|
or
|
||||||
|
|
||||||
conda create --name pylot_38 --file requirements.txt
|
conda create -c conda-forge --name pylot_311 python=3.11 --file requirements.txt
|
||||||
|
|
||||||
to create a new Anaconda environment called "pylot_38".
|
to create a new Anaconda environment called *pylot_311*.
|
||||||
|
|
||||||
Afterwards activate the environment by typing
|
Afterwards activate the environment by typing
|
||||||
|
|
||||||
conda activate pylot_38
|
conda activate pylot_311
|
||||||
|
|
||||||
#### Prerequisites:
|
#### Prerequisites:
|
||||||
|
|
||||||
In order to run PyLoT you need to install:
|
In order to run PyLoT you need to install:
|
||||||
|
|
||||||
- Python 3
|
- Python 3
|
||||||
- obspy
|
|
||||||
- pyside2
|
|
||||||
- pyqtgraph
|
|
||||||
- cartopy
|
- cartopy
|
||||||
|
- joblib
|
||||||
|
- obspy
|
||||||
|
- pyaml
|
||||||
|
- pyqtgraph
|
||||||
|
- pyside2
|
||||||
|
|
||||||
(the following are already dependencies of the above packages):
|
(the following are already dependencies of the above packages):
|
||||||
- scipy
|
- scipy
|
||||||
- numpy
|
- numpy
|
||||||
- matplotlib <= 3.3.x
|
- matplotlib
|
||||||
|
|
||||||
#### Some handwork:
|
#### Some handwork:
|
||||||
|
|
||||||
PyLoT needs a properties folder on your system to work. It should be situated in your home directory
|
Some extra information on error estimates (just needed for reading old PILOT data) and the Richter magnitude scaling
|
||||||
(on Windows usually C:/Users/*username*):
|
|
||||||
|
|
||||||
mkdir ~/.pylot
|
|
||||||
|
|
||||||
In the next step you have to copy some files to this directory:
|
|
||||||
|
|
||||||
*for local distance seismicity*
|
|
||||||
|
|
||||||
cp path-to-pylot/inputs/pylot_local.in ~/.pylot/pylot.in
|
|
||||||
|
|
||||||
*for regional distance seismicity*
|
|
||||||
|
|
||||||
cp path-to-pylot/inputs/pylot_regional.in ~/.pylot/pylot.in
|
|
||||||
|
|
||||||
*for global distance seismicity*
|
|
||||||
|
|
||||||
cp path-to-pylot/inputs/pylot_global.in ~/.pylot/pylot.in
|
|
||||||
|
|
||||||
and some extra information on error estimates (just needed for reading old PILOT data) and the Richter magnitude scaling
|
|
||||||
relation
|
relation
|
||||||
|
|
||||||
cp path-to-pylot/inputs/PILOT_TimeErrors.in path-to-pylot/inputs/richter_scaling.data ~/.pylot/
|
cp path-to-pylot/inputs/PILOT_TimeErrors.in path-to-pylot/inputs/richter_scaling.data ~/.pylot/
|
||||||
|
|
||||||
You may need to do some modifications to these files. Especially folder names should be reviewed.
|
You may need to do some modifications to these files. Especially folder names should be reviewed.
|
||||||
|
|
||||||
PyLoT has been tested on Mac OSX (10.11), Debian Linux 8 and on Windows 10.
|
PyLoT has been tested on Mac OSX (10.11), Debian Linux 8 and on Windows 10/11.
|
||||||
|
|
||||||
|
## Example Dataset
|
||||||
|
An example dataset with waveform data, metadata and automatic picks in the obspy-dmt dataset format for testing the teleseismic picking can be found at https://zenodo.org/doi/10.5281/zenodo.13759803
|
||||||
|
|
||||||
## Release notes
|
## Release notes
|
||||||
|
|
||||||
@ -87,6 +73,7 @@ PyLoT has been tested on Mac OSX (10.11), Debian Linux 8 and on Windows 10.
|
|||||||
- event organisation in project files and waveform visualisation
|
- event organisation in project files and waveform visualisation
|
||||||
- consistent manual phase picking through predefined SNR dependant zoom level
|
- consistent manual phase picking through predefined SNR dependant zoom level
|
||||||
- consistent automatic phase picking routines using Higher Order Statistics, AIC and Autoregression
|
- consistent automatic phase picking routines using Higher Order Statistics, AIC and Autoregression
|
||||||
|
- pick correlation correction for teleseismic waveforms
|
||||||
- interactive tuning of auto-pick parameters
|
- interactive tuning of auto-pick parameters
|
||||||
- uniform uncertainty estimation from waveform's properties for automatic and manual picks
|
- uniform uncertainty estimation from waveform's properties for automatic and manual picks
|
||||||
- pdf representation and comparison of picks taking the uncertainty intrinsically into account
|
- pdf representation and comparison of picks taking the uncertainty intrinsically into account
|
||||||
@ -95,7 +82,7 @@ PyLoT has been tested on Mac OSX (10.11), Debian Linux 8 and on Windows 10.
|
|||||||
|
|
||||||
#### Known issues:
|
#### Known issues:
|
||||||
|
|
||||||
We hope to solve these with the next release.
|
Current release is still in development progress and has several issues. We are currently lacking manpower, but hope to assess many of the issues in the near future.
|
||||||
|
|
||||||
## Staff
|
## Staff
|
||||||
|
|
||||||
@ -108,4 +95,4 @@ Others: A. Bruestle, T. Meier, W. Friederich
|
|||||||
|
|
||||||
[ObsPy]: http://github.com/obspy/obspy/wiki
|
[ObsPy]: http://github.com/obspy/obspy/wiki
|
||||||
|
|
||||||
April 2022
|
September 2024
|
41
autoPyLoT.py
41
autoPyLoT.py
@ -28,7 +28,7 @@ from pylot.core.util.dataprocessing import restitute_data, Metadata
|
|||||||
from pylot.core.util.defaults import SEPARATOR
|
from pylot.core.util.defaults import SEPARATOR
|
||||||
from pylot.core.util.event import Event
|
from pylot.core.util.event import Event
|
||||||
from pylot.core.util.structure import DATASTRUCTURE
|
from pylot.core.util.structure import DATASTRUCTURE
|
||||||
from pylot.core.util.utils import get_None, trim_station_components, check4gapsAndRemove, check4doubled, \
|
from pylot.core.util.utils import get_none, trim_station_components, check4gapsAndRemove, check4doubled, \
|
||||||
check4rotated
|
check4rotated
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
@ -91,9 +91,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
sp=sp_info)
|
sp=sp_info)
|
||||||
print(splash)
|
print(splash)
|
||||||
|
|
||||||
parameter = get_None(parameter)
|
parameter = get_none(parameter)
|
||||||
inputfile = get_None(inputfile)
|
inputfile = get_none(inputfile)
|
||||||
eventid = get_None(eventid)
|
eventid = get_none(eventid)
|
||||||
|
|
||||||
fig_dict = None
|
fig_dict = None
|
||||||
fig_dict_wadatijack = None
|
fig_dict_wadatijack = None
|
||||||
@ -119,13 +119,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
obspyDMT_wfpath = input_dict['obspyDMT_wfpath']
|
obspyDMT_wfpath = input_dict['obspyDMT_wfpath']
|
||||||
|
|
||||||
if not parameter:
|
if not parameter:
|
||||||
if inputfile:
|
if not inputfile:
|
||||||
|
print('Using default input parameter')
|
||||||
parameter = PylotParameter(inputfile)
|
parameter = PylotParameter(inputfile)
|
||||||
# iplot = parameter['iplot']
|
|
||||||
else:
|
|
||||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
|
||||||
print('Using default input file {}'.format(infile))
|
|
||||||
parameter = PylotParameter(infile)
|
|
||||||
else:
|
else:
|
||||||
if not type(parameter) == PylotParameter:
|
if not type(parameter) == PylotParameter:
|
||||||
print('Wrong input type for parameter: {}'.format(type(parameter)))
|
print('Wrong input type for parameter: {}'.format(type(parameter)))
|
||||||
@ -140,11 +136,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
if parameter.hasParam('datastructure'):
|
if parameter.hasParam('datastructure'):
|
||||||
# getting information on data structure
|
# getting information on data structure
|
||||||
datastructure = DATASTRUCTURE[parameter.get('datastructure')]()
|
datastructure = DATASTRUCTURE[parameter.get('datastructure')]()
|
||||||
dsfields = {'root': parameter.get('rootpath'),
|
dsfields = {'dpath': parameter.get('datapath'),}
|
||||||
'dpath': parameter.get('datapath'),
|
|
||||||
'dbase': parameter.get('database')}
|
|
||||||
|
|
||||||
exf = ['root', 'dpath', 'dbase']
|
exf = ['dpath']
|
||||||
|
|
||||||
if parameter['eventID'] != '*' and fnames == 'None':
|
if parameter['eventID'] != '*' and fnames == 'None':
|
||||||
dsfields['eventID'] = parameter['eventID']
|
dsfields['eventID'] = parameter['eventID']
|
||||||
@ -154,7 +148,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
datastructure.setExpandFields(exf)
|
datastructure.setExpandFields(exf)
|
||||||
|
|
||||||
# check if default location routine NLLoc is available and all stations are used
|
# check if default location routine NLLoc is available and all stations are used
|
||||||
if get_None(parameter['nllocbin']) and station == 'all':
|
if get_none(parameter['nllocbin']) and station == 'all':
|
||||||
locflag = 1
|
locflag = 1
|
||||||
# get NLLoc-root path
|
# get NLLoc-root path
|
||||||
nllocroot = parameter.get('nllocroot')
|
nllocroot = parameter.get('nllocroot')
|
||||||
@ -190,15 +184,15 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
if not input_dict:
|
if not input_dict:
|
||||||
# started in production mode
|
# started in production mode
|
||||||
datapath = datastructure.expandDataPath()
|
datapath = datastructure.expandDataPath()
|
||||||
if fnames == 'None' and parameter['eventID'] == '*':
|
if fnames in [None, 'None'] and parameter['eventID'] == '*':
|
||||||
# multiple event processing
|
# multiple event processing
|
||||||
# read each event in database
|
# read each event in database
|
||||||
events = [event for event in glob.glob(os.path.join(datapath, '*')) if
|
events = [event for event in glob.glob(os.path.join(datapath, '*')) if
|
||||||
(os.path.isdir(event) and not event.endswith('EVENTS-INFO'))]
|
(os.path.isdir(event) and not event.endswith('EVENTS-INFO'))]
|
||||||
elif fnames == 'None' and parameter['eventID'] != '*' and not type(parameter['eventID']) == list:
|
elif fnames in [None, 'None'] and parameter['eventID'] != '*' and not type(parameter['eventID']) == list:
|
||||||
# single event processing
|
# single event processing
|
||||||
events = glob.glob(os.path.join(datapath, parameter['eventID']))
|
events = glob.glob(os.path.join(datapath, parameter['eventID']))
|
||||||
elif fnames == 'None' and type(parameter['eventID']) == list:
|
elif fnames in [None, 'None'] and type(parameter['eventID']) == list:
|
||||||
# multiple event processing
|
# multiple event processing
|
||||||
events = []
|
events = []
|
||||||
for eventID in parameter['eventID']:
|
for eventID in parameter['eventID']:
|
||||||
@ -210,12 +204,10 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
locflag = 2
|
locflag = 2
|
||||||
else:
|
else:
|
||||||
# started in tune or interactive mode
|
# started in tune or interactive mode
|
||||||
datapath = os.path.join(parameter['rootpath'],
|
datapath = parameter['datapath']
|
||||||
parameter['datapath'])
|
|
||||||
events = []
|
events = []
|
||||||
for eventID in eventid:
|
for eventID in eventid:
|
||||||
events.append(os.path.join(datapath,
|
events.append(os.path.join(datapath,
|
||||||
parameter['database'],
|
|
||||||
eventID))
|
eventID))
|
||||||
|
|
||||||
if not events:
|
if not events:
|
||||||
@ -242,12 +234,15 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
|||||||
data.get_evt_data().path = eventpath
|
data.get_evt_data().path = eventpath
|
||||||
print('Reading event data from filename {}...'.format(filename))
|
print('Reading event data from filename {}...'.format(filename))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
if type(e) == FileNotFoundError:
|
||||||
|
print('Creating new event file.')
|
||||||
|
else:
|
||||||
print('Could not read event from file {}: {}'.format(filename, e))
|
print('Could not read event from file {}: {}'.format(filename, e))
|
||||||
data = Data()
|
data = Data()
|
||||||
pylot_event = Event(eventpath) # event should be path to event directory
|
pylot_event = Event(eventpath) # event should be path to event directory
|
||||||
data.setEvtData(pylot_event)
|
data.setEvtData(pylot_event)
|
||||||
if fnames == 'None':
|
if fnames in [None, 'None']:
|
||||||
data.setWFData(glob.glob(os.path.join(datapath, event_datapath, '*')))
|
data.setWFData(glob.glob(os.path.join(event_datapath, '*')))
|
||||||
# the following is necessary because within
|
# the following is necessary because within
|
||||||
# multiple event processing no event ID is provided
|
# multiple event processing no event ID is provided
|
||||||
# in autopylot.in
|
# in autopylot.in
|
||||||
|
77
docs/correlation.md
Normal file
77
docs/correlation.md
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
# Pick-Correlation Correction
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
Currently, the pick-correlation correction algorithm is not accessible from they PyLoT GUI. The main file *pick_correlation_correction.py* is located in the directory *pylot\correlation*.
|
||||||
|
The program only works for an obspy dmt database structure.
|
||||||
|
|
||||||
|
The basic workflow of the algorithm is shown in the following diagram. The first step **(1)** is the normal (automatic) picking procedure in PyLoT. Everything from step **(2)** to **(5)** is part of the correlation correction algorithm.
|
||||||
|
|
||||||
|
*Note: The first step is not required in case theoretical onsets are used instead of external picks when the parameter use_taupy_onsets is set to True. However, an existing event quakeML (.xml) file generated by PyLoT might be required for each event in case not external picks are used.*
|
||||||
|
|
||||||
|
![images/workflow_stacking.png](images/workflow_stacking.png)
|
||||||
|
|
||||||
|
A detailed description of the algorithm can be found in the corresponding publication:
|
||||||
|
|
||||||
|
*Paffrath, M., Friederich, W., and the AlpArray and AlpArray-SWATH D Working Groups: Teleseismic P waves at the AlpArray seismic network: wave fronts, absolute travel times and travel-time residuals, Solid Earth, 12, 1635–1660, https://doi.org/10.5194/se-12-1635-2021, 2021.*
|
||||||
|
|
||||||
|
## How to use
|
||||||
|
To use the program you have to call the main program providing two mandatory arguments: a path to the obspy dmt database folder *dmt_database_path* and the path to the PyLoT infile *pylot.in* for picking of the beam trace:
|
||||||
|
|
||||||
|
```python pick_correlation_correction.py dmt_database_path pylot.in```
|
||||||
|
|
||||||
|
By default, the parameter file *parameters.yaml* is used. You can use the command line option *--params* to specify a different parameter file and other optional arguments such as *-pd* for plotting detailed information or *-n 4* to use 4 cores for parallel processing:
|
||||||
|
|
||||||
|
```python pick_correlation_correction.py dmt_database_path pylot.in --params parameters_adriaarray.yaml -pd -n 4```
|
||||||
|
|
||||||
|
## Cross-Correlation Parameters
|
||||||
|
|
||||||
|
The program uses the parameters in the file *parameters.yaml* by default. You can use the command line option *--params* to specify a different parameter file. An example of the parameter file is provided in the *correlation\parameters.yaml* file.
|
||||||
|
|
||||||
|
In the top level of the parameter file the logging level *logging* can be set, as well as a list of pick phases *pick_phases* (e.g. ['P', 'S']).
|
||||||
|
|
||||||
|
For each pick phase the different parameters can be set in the first sub-level of the parameter file, e.g.:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
logging: info
|
||||||
|
pick_phases: ['P', 'S']
|
||||||
|
|
||||||
|
P:
|
||||||
|
min_corr_stacking: 0.8
|
||||||
|
min_corr_export: 0.6
|
||||||
|
[...]
|
||||||
|
|
||||||
|
S:
|
||||||
|
min_corr_stacking: 0.7
|
||||||
|
[...]
|
||||||
|
```
|
||||||
|
|
||||||
|
The following parameters are available:
|
||||||
|
|
||||||
|
|
||||||
|
| Parameter Name | Description | Parameter Type |
|
||||||
|
|--------------------------------|----------------------------------------------------------------------------------------------------|----------------|
|
||||||
|
| min_corr_stacking | Minimum correlation coefficient for building beam trace | float |
|
||||||
|
| min_corr_export | Minimum correlation coefficient for pick export | float |
|
||||||
|
| min_stack | Minimum number of stations for building beam trace | int |
|
||||||
|
| t_before | Correlation window before reference pick | float |
|
||||||
|
| t_after | Correlation window after reference pick | float |
|
||||||
|
| cc_maxlag | Maximum shift for initial correlation | float |
|
||||||
|
| cc_maxlag2 | Maximum shift for second (final) correlation (also for calculating pick uncertainty) | float |
|
||||||
|
| initial_pick_outlier_threshold | Threshold for excluding large outliers of initial (AIC) picks | float |
|
||||||
|
| export_threshold | Automatically exclude all onsets which deviate more than this threshold from corrected taup onsets | float |
|
||||||
|
| min_picks_export | Minimum number of correlated picks for export | int |
|
||||||
|
| min_picks_autopylot | Minimum number of reference auto picks to continue with event | int |
|
||||||
|
| check_RMS | Do RMS check to search for restitution errors (very experimental) | bool |
|
||||||
|
| use_taupy_onsets | Use taupy onsets as reference picks instead of external picks | bool |
|
||||||
|
| station_list | Use the following stations as reference for stacking | list[str] |
|
||||||
|
| use_stacked_trace | Use existing stacked trace if found (spare re-computation) | bool |
|
||||||
|
| data_dir | obspyDMT data subdirectory (e.g. 'raw', 'processed') | str |
|
||||||
|
| pickfile_extension | Use quakeML files (PyLoT output) with the following extension | str |
|
||||||
|
| dt_stacking | Time difference for stacking window (in seconds) | list[float] |
|
||||||
|
| filter_options | Filter for first correlation (rough) | dict |
|
||||||
|
| filter_options_final | Filter for second correlation (fine) | dict |
|
||||||
|
| filter_type | Filter type (e.g. bandpass) | str |
|
||||||
|
| sampfreq | Sampling frequency (in Hz) | float |
|
||||||
|
|
||||||
|
## Example Dataset
|
||||||
|
An example dataset with waveform data, metadata and automatic picks in the obspy-dmt dataset format for testing can be found at https://zenodo.org/doi/10.5281/zenodo.13759803
|
@ -203,8 +203,6 @@ The meaning of the header entries is:
|
|||||||
PyLoT GUI starts with an empty project. To add events, use the add event data button. Select one or multiple folders
|
PyLoT GUI starts with an empty project. To add events, use the add event data button. Select one or multiple folders
|
||||||
containing events.
|
containing events.
|
||||||
|
|
||||||
[//]: <> (TODO: explain _Directories: Root path, Data path, Database path_)
|
|
||||||
|
|
||||||
### Saving projects
|
### Saving projects
|
||||||
|
|
||||||
Save the current project from the menu with File->Save project or File->Save project as. PyLoT uses ``.plp`` files to
|
Save the current project from the menu with File->Save project or File->Save project as. PyLoT uses ``.plp`` files to
|
||||||
|
BIN
docs/images/workflow_stacking.png
Normal file
BIN
docs/images/workflow_stacking.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 64 KiB |
185
docs/tuning.md
185
docs/tuning.md
@ -7,121 +7,122 @@ A description of the parameters used for determining automatic picks.
|
|||||||
Parameters applied to the traces before picking algorithm starts.
|
Parameters applied to the traces before picking algorithm starts.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|---------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| *P Start*, *P
|
| *P Start*, *P | |
|
||||||
Stop* | Define time interval relative to trace start time for CF calculation on vertical trace. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
| Stop* | Define time interval relative to trace start time for CF calculation on vertical trace. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||||
| *S Start*, *S
|
| *S Start*, *S | |
|
||||||
Stop* | Define time interval relative to trace start time for CF calculation on horizontal traces. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
| Stop* | Define time interval relative to trace start time for CF calculation on horizontal traces. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||||
| *Bandpass
|
| *Bandpass | |
|
||||||
Z1* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of initial P pick. |
|
| Z1* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of initial P pick. |
|
||||||
| *Bandpass
|
| *Bandpass | |
|
||||||
Z2* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of precise P pick. |
|
| Z2* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of precise P pick. |
|
||||||
| *Bandpass
|
| *Bandpass | |
|
||||||
H1* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of initial S pick. |
|
| H1* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of initial S pick. |
|
||||||
| *Bandpass
|
| *Bandpass | |
|
||||||
H2* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of precise S pick. |
|
| H2* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of precise S pick. |
|
||||||
|
|
||||||
## Inital P pick
|
## Inital P pick
|
||||||
|
|
||||||
Parameters used for determination of initial P pick.
|
Parameters used for determination of initial P pick.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|--------------|------------------------------------------------------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|
|
||||||
| *
|
| * | |
|
||||||
tLTA* | Size of gliding LTA window in seconds used for calculation of HOS-CF. |
|
| tLTA* | Size of gliding LTA window in seconds used for calculation of HOS-CF. |
|
||||||
| *pickwin
|
| *pickwin | |
|
||||||
P* | Size of time window in seconds in which the minimum of the AIC-CF in front of the maximum of the HOS-CF is determined. |
|
| P* | Size of time window in seconds in which the minimum of the AIC-CF in front of the maximum of the HOS-CF is determined. |
|
||||||
| *
|
| * | |
|
||||||
AICtsmooth* | Average of samples in this time window will be used for smoothing of the AIC-CF. |
|
| AICtsmooth* | Average of samples in this time window will be used for smoothing of the AIC-CF. |
|
||||||
| *
|
| * | |
|
||||||
checkwinP* | Time in front of the global maximum of the HOS-CF in which to search for a second local extrema. |
|
| checkwinP* | Time in front of the global maximum of the HOS-CF in which to search for a second local extrema. |
|
||||||
| *minfactorP* | Used with *
|
| *minfactorP* | Used with * |
|
||||||
checkwinP*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorP*. |
|
| checkwinP*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorP*. | |
|
||||||
| *
|
| * | |
|
||||||
tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
| tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||||
| *
|
| * | |
|
||||||
tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
| tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||||
| *tsafetey* | Time in seconds between *tsignal* and *
|
| *tsafetey* | Time in seconds between *tsignal* and * |
|
||||||
tnoise*. |
|
| tnoise*. | |
|
||||||
| *
|
| * | |
|
||||||
tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
| tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||||
|
|
||||||
## Inital S pick
|
## Inital S pick
|
||||||
|
|
||||||
Parameters used for determination of initial S pick
|
Parameters used for determination of initial S pick
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|---------------|------------------------------------------------------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------|
|
||||||
| *
|
| * | |
|
||||||
tdet1h* | Length of time window in seconds in which AR params of the waveform are determined. |
|
| tdet1h* | Length of time window in seconds in which AR params of the waveform are determined. |
|
||||||
| *
|
| * | |
|
||||||
tpred1h* | Length of time window in seconds in which the waveform is predicted using the AR model. |
|
| tpred1h* | Length of time window in seconds in which the waveform is predicted using the AR model. |
|
||||||
| *
|
| * | |
|
||||||
AICtsmoothS* | Average of samples in this time window is used for smoothing the AIC-CF. |
|
| AICtsmoothS* | Average of samples in this time window is used for smoothing the AIC-CF. |
|
||||||
| *
|
| * | |
|
||||||
pickwinS* | Time window in which the minimum in the AIC-CF in front of the maximum in the ARH-CF is determined. |
|
| pickwinS* | Time window in which the minimum in the AIC-CF in front of the maximum in the ARH-CF is determined. |
|
||||||
| *
|
| * | |
|
||||||
checkwinS* | Time in front of the global maximum of the ARH-CF in which to search for a second local extrema. |
|
| checkwinS* | Time in front of the global maximum of the ARH-CF in which to search for a second local extrema. |
|
||||||
| *minfactorP* | Used with *
|
| *minfactorP* | Used with * |
|
||||||
checkwinS*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorS*. |
|
| checkwinS*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorS*. | |
|
||||||
| *
|
| * | |
|
||||||
tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
| tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||||
| *
|
| * | |
|
||||||
tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
| tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||||
| *tsafetey* | Time in seconds between *tsignal* and *
|
| *tsafetey* | Time in seconds between *tsignal* and * |
|
||||||
tnoise*. |
|
| tnoise*. | |
|
||||||
| *
|
| * | |
|
||||||
tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
| tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||||
|
|
||||||
## Precise P pick
|
## Precise P pick
|
||||||
|
|
||||||
Parameters used for determination of precise P pick.
|
Parameters used for determination of precise P pick.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|-------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| *Precalcwin* | Time window in seconds for recalculation of the HOS-CF. The new CF will be two times the size of *
|
| *Precalcwin* | Time window in seconds for recalculation of the HOS-CF. The new CF will be two times the size of * |
|
||||||
Precalcwin*, since it will be calculated from the initial pick to +/- *Precalcwin*. |
|
| Precalcwin*, since it will be calculated from the initial pick to +/- *Precalcwin*. | |
|
||||||
| *
|
| * | |
|
||||||
tsmoothP* | Average of samples in this time window will be used for smoothing the second HOS-CF. |
|
| tsmoothP* | Average of samples in this time window will be used for smoothing the second HOS-CF. |
|
||||||
| *
|
| * | |
|
||||||
ausP* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed HOS-CF is found when the previous sample is larger or equal to the current sample times (1+*
|
| ausP* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed HOS-CF is found when the previous sample is larger or equal to the current sample times (1+* |
|
||||||
ausP*). |
|
| ausP*). | |
|
||||||
|
|
||||||
## Precise S pick
|
## Precise S pick
|
||||||
|
|
||||||
Parameters used for determination of precise S pick.
|
Parameters used for determination of precise S pick.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| *
|
| * | |
|
||||||
tdet2h* | Time window for determination of AR coefficients. |
|
| tdet2h* | Time window for determination of AR coefficients. |
|
||||||
| *
|
| * | |
|
||||||
tpred2h* | Time window in which the waveform is predicted using the determined AR parameters. |
|
| tpred2h* | Time window in which the waveform is predicted using the determined AR parameters. |
|
||||||
| *Srecalcwin* | Time window for recalculation of ARH-CF. New CF will be calculated from initial pick +/- *
|
| *Srecalcwin* | Time window for recalculation of ARH-CF. New CF will be calculated from initial pick +/- * |
|
||||||
Srecalcwin*. |
|
| Srecalcwin*. | |
|
||||||
| *
|
| * | |
|
||||||
tsmoothS* | Average of samples in this time window will be used for smoothing the second ARH-CF. |
|
| tsmoothS* | Average of samples in this time window will be used for smoothing the second ARH-CF. |
|
||||||
| *
|
| * | |
|
||||||
ausS* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed ARH-CF is found when the previous sample is larger or equal to the current sample times (1+*
|
| ausS* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed ARH-CF is found when the previous sample is larger or equal to the current sample times (1+* |
|
||||||
ausS*). |
|
| ausS*). | |
|
||||||
| *
|
| * | |
|
||||||
pickwinS* | Time window around initial pick in which to look for a precise pick. |
|
| pickwinS* | Time window around initial pick in which to look for a precise pick. |
|
||||||
|
|
||||||
## Pick quality control
|
## Pick quality control
|
||||||
|
|
||||||
Parameters used for checking quality and integrity of automatic picks.
|
Parameters used for checking quality and integrity of automatic picks.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|--------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|--------------------------------------------|-----------------------------------------------------------------------|
|
||||||
| *
|
| * | |
|
||||||
minAICPslope* | Initial P picks with a slope lower than this value will be discared. |
|
| minAICPslope* | Initial P picks with a slope lower than this value will be discared. |
|
||||||
| *
|
| * | |
|
||||||
minAICPSNR* | Initial P picks with a SNR below this value will be discarded. |
|
| minAICPSNR* | Initial P picks with a SNR below this value will be discarded. |
|
||||||
| *
|
| * | |
|
||||||
minAICSslope* | Initial S picks with a slope lower than this value will be discarded. |
|
| minAICSslope* | Initial S picks with a slope lower than this value will be discarded. |
|
||||||
| *
|
| * | |
|
||||||
minAICSSNR* | Initial S picks with a SNR below this value will be discarded. |
|
| minAICSSNR* | Initial S picks with a SNR below this value will be discarded. |
|
||||||
| *minsiglength*, *noisefacor*. *minpercent* | Parameters for checking signal length. In the time window of size *
|
| *minsiglength*, *noisefacor*. *minpercent* | Parameters for checking signal length. In the time window of size * |
|
||||||
|
|
||||||
minsiglength* after the initial P pick *
|
minsiglength* after the initial P pick *
|
||||||
minpercent* of samples have to be larger than the RMS value. |
|
minpercent* of samples have to be larger than the RMS value. |
|
||||||
| *
|
| *
|
||||||
@ -140,11 +141,11 @@ wdttolerance* | Maximum allowed deviation of S onset
|
|||||||
Parameters for discrete quality classes.
|
Parameters for discrete quality classes.
|
||||||
|
|
||||||
| Name | Description |
|
| Name | Description |
|
||||||
|------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|--------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| *
|
| * | |
|
||||||
timeerrorsP* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for P onsets. |
|
| timeerrorsP* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for P onsets. |
|
||||||
| *
|
| * | |
|
||||||
timeerrorsS* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for S onsets. |
|
| timeerrorsS* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for S onsets. |
|
||||||
| *nfacP*, *nfacS* | For determination of latest possible onset time. The time when the signal reaches an amplitude of *
|
| *nfacP*, *nfacS* | For determination of latest possible onset time. The time when the signal reaches an amplitude of * |
|
||||||
nfac* * mean value of the RMS amplitude in the time window *tnoise* corresponds to the latest possible onset time. |
|
| nfac* * mean value of the RMS amplitude in the time window *tnoise* corresponds to the latest possible onset time. | |
|
||||||
|
|
||||||
|
@ -4,10 +4,8 @@
|
|||||||
%Parameters are optimized for %extent data sets!
|
%Parameters are optimized for %extent data sets!
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
#main settings#
|
#main settings#
|
||||||
#rootpath# %project path
|
|
||||||
#datapath# %data path
|
#datapath# %data path
|
||||||
#database# %name of data base
|
#eventID# %event ID for single event processing (* for all events found in datapath)
|
||||||
#eventID# %event ID for single event processing (* for all events found in database)
|
|
||||||
#invdir# %full path to inventory or dataless-seed file
|
#invdir# %full path to inventory or dataless-seed file
|
||||||
PILOT #datastructure# %choose data structure
|
PILOT #datastructure# %choose data structure
|
||||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
@ -43,6 +41,7 @@ global #extent# %extent of a
|
|||||||
1150.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
1150.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
iasp91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
iasp91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
0.05 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
0.05 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
0.05 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
0.05 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
@ -4,10 +4,8 @@
|
|||||||
%Parameters are optimized for %extent data sets!
|
%Parameters are optimized for %extent data sets!
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
#main settings#
|
#main settings#
|
||||||
/DATA/Insheim #rootpath# %project path
|
/DATA/Insheim/EVENT_DATA/LOCAL/2018.02_Insheim #datapath# %data path
|
||||||
EVENT_DATA/LOCAL #datapath# %data path
|
e0006.038.18 #eventID# %event ID for single event processing (* for all events found in datapath)
|
||||||
2018.02_Insheim #database# %name of data base
|
|
||||||
e0006.038.18 #eventID# %event ID for single event processing (* for all events found in database)
|
|
||||||
/DATA/Insheim/STAT_INFO #invdir# %full path to inventory or dataless-seed file
|
/DATA/Insheim/STAT_INFO #invdir# %full path to inventory or dataless-seed file
|
||||||
PILOT #datastructure# %choose data structure
|
PILOT #datastructure# %choose data structure
|
||||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
@ -43,6 +41,7 @@ local #extent# %extent of a
|
|||||||
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||||
|
P #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
2.0 20.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
2.0 20.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
2.0 30.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
2.0 30.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
2.0 10.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
2.0 10.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
@ -4,10 +4,8 @@
|
|||||||
%Parameters are optimized for %extent data sets!
|
%Parameters are optimized for %extent data sets!
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
#main settings#
|
#main settings#
|
||||||
#rootpath# %project path
|
|
||||||
#datapath# %data path
|
#datapath# %data path
|
||||||
#database# %name of data base
|
#eventID# %event ID for single event processing (* for all events found in datapath)
|
||||||
#eventID# %event ID for single event processing (* for all events found in database)
|
|
||||||
#invdir# %full path to inventory or dataless-seed file
|
#invdir# %full path to inventory or dataless-seed file
|
||||||
PILOT #datastructure# %choose data structure
|
PILOT #datastructure# %choose data structure
|
||||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
@ -43,6 +41,7 @@ local #extent# %extent of a
|
|||||||
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||||
|
P #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
2.0 10.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
2.0 10.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
2.0 12.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
2.0 12.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
2.0 8.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
2.0 8.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
18
pylot.yml
18
pylot.yml
@ -1,14 +1,12 @@
|
|||||||
name: pylot_38
|
name: pylot_311
|
||||||
channels:
|
channels:
|
||||||
- conda-forge
|
- conda-forge
|
||||||
- defaults
|
- defaults
|
||||||
dependencies:
|
dependencies:
|
||||||
- cartopy=0.20.2
|
- cartopy=0.23.0=py311hcf9f919_1
|
||||||
- matplotlib-base=3.3.4
|
- joblib=1.4.2=pyhd8ed1ab_0
|
||||||
- numpy=1.22.3
|
- obspy=1.4.1=py311he736701_3
|
||||||
- obspy=1.3.0
|
- pyaml=24.7.0=pyhd8ed1ab_0
|
||||||
- pyqtgraph=0.12.4
|
- pyqtgraph=0.13.7=pyhd8ed1ab_0
|
||||||
- pyside2>=5.13.2
|
- pyside2=5.15.8=py311h3d699ce_4
|
||||||
- python=3.8.12
|
- pytest=8.3.2=pyhd8ed1ab_0
|
||||||
- qt>=5.12.9
|
|
||||||
- scipy=1.8.0
|
|
@ -9,7 +9,7 @@ PyLoT - the Python picking and Localization Tool
|
|||||||
|
|
||||||
This python library contains a graphical user interfaces for picking
|
This python library contains a graphical user interfaces for picking
|
||||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||||
and the Qt4 libraries to be installed first.
|
and the Qt libraries to be installed first.
|
||||||
|
|
||||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||||
PILOT without facing portability problems, it has been decided to re-
|
PILOT without facing portability problems, it has been decided to re-
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from PySide2.QtWidgets import QMessageBox
|
from PySide2.QtWidgets import QMessageBox
|
||||||
@ -35,8 +36,17 @@ class Data(object):
|
|||||||
loaded event. Container object holding, e.g. phase arrivals, etc.
|
loaded event. Container object holding, e.g. phase arrivals, etc.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, parent=None, evtdata=None):
|
def __init__(self, parent=None, evtdata=None, picking_parameter=None):
|
||||||
self._parent = parent
|
self._parent = parent
|
||||||
|
|
||||||
|
if not picking_parameter:
|
||||||
|
if hasattr(parent, '_inputs'):
|
||||||
|
picking_parameter = parent._inputs
|
||||||
|
else:
|
||||||
|
logging.warning('No picking parameters found! Using default input parameters!!!')
|
||||||
|
picking_parameter = PylotParameter()
|
||||||
|
self.picking_parameter = picking_parameter
|
||||||
|
|
||||||
if self.getParent():
|
if self.getParent():
|
||||||
self.comp = parent.getComponent()
|
self.comp = parent.getComponent()
|
||||||
else:
|
else:
|
||||||
@ -402,25 +412,19 @@ class Data(object):
|
|||||||
not implemented: {1}'''.format(evtformat, e))
|
not implemented: {1}'''.format(evtformat, e))
|
||||||
if fnext == '.cnv':
|
if fnext == '.cnv':
|
||||||
try:
|
try:
|
||||||
velest.export(picks_copy, fnout + fnext, eventinfo=self.get_evt_data())
|
velest.export(picks_copy, fnout + fnext, self.picking_parameter, eventinfo=self.get_evt_data())
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
raise KeyError('''{0} export format
|
raise KeyError('''{0} export format
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
not implemented: {1}'''.format(evtformat, e))
|
||||||
if fnext == '_focmec.in':
|
if fnext == '_focmec.in':
|
||||||
try:
|
try:
|
||||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
focmec.export(picks_copy, fnout + fnext, self.picking_parameter, eventinfo=self.get_evt_data())
|
||||||
print('Using default input file {}'.format(infile))
|
|
||||||
parameter = PylotParameter(infile)
|
|
||||||
focmec.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
raise KeyError('''{0} export format
|
raise KeyError('''{0} export format
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
not implemented: {1}'''.format(evtformat, e))
|
||||||
if fnext == '.pha':
|
if fnext == '.pha':
|
||||||
try:
|
try:
|
||||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
hypodd.export(picks_copy, fnout + fnext, self.picking_parameter, eventinfo=self.get_evt_data())
|
||||||
print('Using default input file {}'.format(infile))
|
|
||||||
parameter = PylotParameter(infile)
|
|
||||||
hypodd.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
raise KeyError('''{0} export format
|
raise KeyError('''{0} export format
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
not implemented: {1}'''.format(evtformat, e))
|
||||||
@ -451,10 +455,11 @@ class Data(object):
|
|||||||
data.filter(**kwargs)
|
data.filter(**kwargs)
|
||||||
self.dirty = True
|
self.dirty = True
|
||||||
|
|
||||||
def setWFData(self, fnames, fnames_syn=None, checkRotated=False, metadata=None, tstart=0, tstop=0):
|
def setWFData(self, fnames, fnames_alt=None, checkRotated=False, metadata=None, tstart=0, tstop=0):
|
||||||
"""
|
"""
|
||||||
Clear current waveform data and set given waveform data
|
Clear current waveform data and set given waveform data
|
||||||
:param fnames: waveform data names to append
|
:param fnames: waveform data names to append
|
||||||
|
:param fnames_alt: alternative data to show (e.g. synthetic/processed)
|
||||||
:type fnames: list
|
:type fnames: list
|
||||||
"""
|
"""
|
||||||
def check_fname_exists(filenames: list) -> list:
|
def check_fname_exists(filenames: list) -> list:
|
||||||
@ -464,14 +469,16 @@ class Data(object):
|
|||||||
|
|
||||||
self.wfdata = Stream()
|
self.wfdata = Stream()
|
||||||
self.wforiginal = None
|
self.wforiginal = None
|
||||||
self.wfsyn = Stream()
|
self.wf_alt = Stream()
|
||||||
if tstart == tstop:
|
if tstart == tstop:
|
||||||
tstart = tstop = None
|
tstart = tstop = None
|
||||||
self.tstart = tstart
|
self.tstart = tstart
|
||||||
self.tstop = tstop
|
self.tstop = tstop
|
||||||
|
|
||||||
|
# remove directories
|
||||||
fnames = check_fname_exists(fnames)
|
fnames = check_fname_exists(fnames)
|
||||||
fnames_syn = check_fname_exists(fnames_syn)
|
fnames_alt = check_fname_exists(fnames_alt)
|
||||||
|
|
||||||
# if obspy_dmt:
|
# if obspy_dmt:
|
||||||
# wfdir = 'raw'
|
# wfdir = 'raw'
|
||||||
# self.processed = False
|
# self.processed = False
|
||||||
@ -489,8 +496,8 @@ class Data(object):
|
|||||||
# wffnames = fnames
|
# wffnames = fnames
|
||||||
if fnames is not None:
|
if fnames is not None:
|
||||||
self.appendWFData(fnames)
|
self.appendWFData(fnames)
|
||||||
if fnames_syn is not None:
|
if fnames_alt is not None:
|
||||||
self.appendWFData(fnames_syn, synthetic=True)
|
self.appendWFData(fnames_alt, alternative=True)
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -512,7 +519,7 @@ class Data(object):
|
|||||||
self.dirty = False
|
self.dirty = False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def appendWFData(self, fnames, synthetic=False):
|
def appendWFData(self, fnames, alternative=False):
|
||||||
"""
|
"""
|
||||||
Read waveform data from fnames and append it to current wf data
|
Read waveform data from fnames and append it to current wf data
|
||||||
:param fnames: waveform data to append
|
:param fnames: waveform data to append
|
||||||
@ -525,19 +532,19 @@ class Data(object):
|
|||||||
if self.dirty:
|
if self.dirty:
|
||||||
self.resetWFData()
|
self.resetWFData()
|
||||||
|
|
||||||
real_or_syn_data = {True: self.wfsyn,
|
orig_or_alternative_data = {True: self.wf_alt,
|
||||||
False: self.wfdata}
|
False: self.wfdata}
|
||||||
|
|
||||||
warnmsg = ''
|
warnmsg = ''
|
||||||
for fname in set(fnames):
|
for fname in set(fnames):
|
||||||
try:
|
try:
|
||||||
real_or_syn_data[synthetic] += read(fname, starttime=self.tstart, endtime=self.tstop)
|
orig_or_alternative_data[alternative] += read(fname, starttime=self.tstart, endtime=self.tstop)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
try:
|
try:
|
||||||
real_or_syn_data[synthetic] += read(fname, format='GSE2', starttime=self.tstart, endtime=self.tstop)
|
orig_or_alternative_data[alternative] += read(fname, format='GSE2', starttime=self.tstart, endtime=self.tstop)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
try:
|
||||||
real_or_syn_data[synthetic] += read(fname, format='SEGY', starttime=self.tstart,
|
orig_or_alternative_data[alternative] += read(fname, format='SEGY', starttime=self.tstart,
|
||||||
endtime=self.tstop)
|
endtime=self.tstop)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
warnmsg += '{0}\n{1}\n'.format(fname, e)
|
warnmsg += '{0}\n{1}\n'.format(fname, e)
|
||||||
@ -553,8 +560,8 @@ class Data(object):
|
|||||||
def getOriginalWFData(self):
|
def getOriginalWFData(self):
|
||||||
return self.wforiginal
|
return self.wforiginal
|
||||||
|
|
||||||
def getSynWFData(self):
|
def getAltWFdata(self):
|
||||||
return self.wfsyn
|
return self.wf_alt
|
||||||
|
|
||||||
def resetWFData(self):
|
def resetWFData(self):
|
||||||
"""
|
"""
|
||||||
|
@ -6,24 +6,14 @@ import numpy as np
|
|||||||
Default parameters used for picking
|
Default parameters used for picking
|
||||||
"""
|
"""
|
||||||
|
|
||||||
defaults = {'rootpath': {'type': str,
|
defaults = {'datapath': {'type': str,
|
||||||
'tooltip': 'project path',
|
'tooltip': 'path to eventfolders',
|
||||||
'value': '',
|
|
||||||
'namestring': 'Root path'},
|
|
||||||
|
|
||||||
'datapath': {'type': str,
|
|
||||||
'tooltip': 'data path',
|
|
||||||
'value': '',
|
'value': '',
|
||||||
'namestring': 'Data path'},
|
'namestring': 'Data path'},
|
||||||
|
|
||||||
'database': {'type': str,
|
|
||||||
'tooltip': 'name of data base',
|
|
||||||
'value': '',
|
|
||||||
'namestring': 'Database path'},
|
|
||||||
|
|
||||||
'eventID': {'type': str,
|
'eventID': {'type': str,
|
||||||
'tooltip': 'event ID for single event processing (* for all events found in database)',
|
'tooltip': 'event ID for single event processing (* for all events found in datapath)',
|
||||||
'value': '',
|
'value': '*',
|
||||||
'namestring': 'Event ID'},
|
'namestring': 'Event ID'},
|
||||||
|
|
||||||
'extent': {'type': str,
|
'extent': {'type': str,
|
||||||
@ -511,7 +501,7 @@ defaults = {'rootpath': {'type': str,
|
|||||||
|
|
||||||
'taup_model': {'type': str,
|
'taup_model': {'type': str,
|
||||||
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
||||||
'value': None,
|
'value': 'iasp91',
|
||||||
'namestring': 'TauPy model'},
|
'namestring': 'TauPy model'},
|
||||||
|
|
||||||
'taup_phases': {'type': str,
|
'taup_phases': {'type': str,
|
||||||
@ -522,9 +512,7 @@ defaults = {'rootpath': {'type': str,
|
|||||||
|
|
||||||
settings_main = {
|
settings_main = {
|
||||||
'dirs': [
|
'dirs': [
|
||||||
'rootpath',
|
|
||||||
'datapath',
|
'datapath',
|
||||||
'database',
|
|
||||||
'eventID',
|
'eventID',
|
||||||
'invdir',
|
'invdir',
|
||||||
'datastructure',
|
'datastructure',
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
from pylot.core.io import default_parameters
|
from pylot.core.io import default_parameters
|
||||||
from pylot.core.util.errors import ParameterError
|
from pylot.core.util.errors import ParameterError
|
||||||
@ -51,10 +53,16 @@ class PylotParameter(object):
|
|||||||
self.__parameter = {}
|
self.__parameter = {}
|
||||||
self._verbosity = verbosity
|
self._verbosity = verbosity
|
||||||
self._parFileCont = {}
|
self._parFileCont = {}
|
||||||
|
|
||||||
# io from parsed arguments alternatively
|
# io from parsed arguments alternatively
|
||||||
for key, val in kwargs.items():
|
for key, val in kwargs.items():
|
||||||
self._parFileCont[key] = val
|
self._parFileCont[key] = val
|
||||||
self.from_file()
|
self.from_file()
|
||||||
|
|
||||||
|
# if no filename or kwargs given, use default values
|
||||||
|
if not fnin and not kwargs:
|
||||||
|
self.reset_defaults()
|
||||||
|
|
||||||
if fnout:
|
if fnout:
|
||||||
self.export2File(fnout)
|
self.export2File(fnout)
|
||||||
|
|
||||||
@ -88,10 +96,10 @@ class PylotParameter(object):
|
|||||||
return bool(self.__parameter)
|
return bool(self.__parameter)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
try:
|
if key in self.__parameter:
|
||||||
return self.__parameter[key]
|
return self.__parameter[key]
|
||||||
except:
|
else:
|
||||||
return None
|
logging.warning(f'{key} not found in PylotParameter')
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
try:
|
try:
|
||||||
@ -418,6 +426,28 @@ class PylotParameter(object):
|
|||||||
line = value + name + ttip
|
line = value + name + ttip
|
||||||
fid.write(line)
|
fid.write(line)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check_deprecated_parameters(parameters):
|
||||||
|
if parameters.hasParam('database') and parameters.hasParam('rootpath'):
|
||||||
|
parameters['datapath'] = os.path.join(parameters['rootpath'], parameters['datapath'],
|
||||||
|
parameters['database'])
|
||||||
|
logging.warning(
|
||||||
|
f'Parameters database and rootpath are deprecated. '
|
||||||
|
f'Tried to merge them to now path: {parameters["datapath"]}.'
|
||||||
|
)
|
||||||
|
|
||||||
|
remove_keys = []
|
||||||
|
for key in parameters:
|
||||||
|
if not key in default_parameters.defaults.keys():
|
||||||
|
remove_keys.append(key)
|
||||||
|
logging.warning(f'Removing deprecated parameter: {key}')
|
||||||
|
|
||||||
|
for key in remove_keys:
|
||||||
|
del parameters[key]
|
||||||
|
|
||||||
|
parameters._settings_main = default_parameters.settings_main
|
||||||
|
parameters._settings_special_pick = default_parameters.settings_special_pick
|
||||||
|
|
||||||
|
|
||||||
class FilterOptions(object):
|
class FilterOptions(object):
|
||||||
'''
|
'''
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from obspy import UTCDateTime
|
from obspy import UTCDateTime
|
||||||
from obspy.core import event as ope
|
from obspy.core import event as ope
|
||||||
|
|
||||||
from pylot.core.util.utils import getLogin, getHash
|
from pylot.core.util.utils import get_login, get_hash
|
||||||
|
|
||||||
|
|
||||||
def create_amplitude(pickID, amp, unit, category, cinfo):
|
def create_amplitude(pickID, amp, unit, category, cinfo):
|
||||||
@ -61,7 +61,7 @@ def create_creation_info(agency_id=None, creation_time=None, author=None):
|
|||||||
:return:
|
:return:
|
||||||
'''
|
'''
|
||||||
if author is None:
|
if author is None:
|
||||||
author = getLogin()
|
author = get_login()
|
||||||
if creation_time is None:
|
if creation_time is None:
|
||||||
creation_time = UTCDateTime()
|
creation_time = UTCDateTime()
|
||||||
return ope.CreationInfo(agency_id=agency_id, author=author,
|
return ope.CreationInfo(agency_id=agency_id, author=author,
|
||||||
@ -210,7 +210,7 @@ def create_resourceID(timetohash, restype, authority_id=None, hrstr=None):
|
|||||||
'''
|
'''
|
||||||
assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \
|
assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \
|
||||||
"UTCDateTime object"
|
"UTCDateTime object"
|
||||||
hid = getHash(timetohash)
|
hid = get_hash(timetohash)
|
||||||
if hrstr is None:
|
if hrstr is None:
|
||||||
resID = ope.ResourceIdentifier(restype + '/' + hid[0:6])
|
resID = ope.ResourceIdentifier(restype + '/' + hid[0:6])
|
||||||
else:
|
else:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import glob
|
import glob
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
@ -16,7 +17,7 @@ from pylot.core.io.inputs import PylotParameter
|
|||||||
from pylot.core.io.location import create_event, \
|
from pylot.core.io.location import create_event, \
|
||||||
create_magnitude
|
create_magnitude
|
||||||
from pylot.core.pick.utils import select_for_phase, get_quality_class
|
from pylot.core.pick.utils import select_for_phase, get_quality_class
|
||||||
from pylot.core.util.utils import getOwner, full_range, four_digits, transformFilterString4Export, \
|
from pylot.core.util.utils import get_owner, full_range, four_digits, transformFilterString4Export, \
|
||||||
backtransformFilterString, loopIdentifyPhase, identifyPhase
|
backtransformFilterString, loopIdentifyPhase, identifyPhase
|
||||||
|
|
||||||
|
|
||||||
@ -58,7 +59,7 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
|||||||
if phasfn is not None and os.path.isfile(phasfn):
|
if phasfn is not None and os.path.isfile(phasfn):
|
||||||
phases = sio.loadmat(phasfn)
|
phases = sio.loadmat(phasfn)
|
||||||
phasctime = UTCDateTime(os.path.getmtime(phasfn))
|
phasctime = UTCDateTime(os.path.getmtime(phasfn))
|
||||||
phasauthor = getOwner(phasfn)
|
phasauthor = get_owner(phasfn)
|
||||||
else:
|
else:
|
||||||
phases = None
|
phases = None
|
||||||
phasctime = None
|
phasctime = None
|
||||||
@ -66,7 +67,7 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
|||||||
if locfn is not None and os.path.isfile(locfn):
|
if locfn is not None and os.path.isfile(locfn):
|
||||||
loc = sio.loadmat(locfn)
|
loc = sio.loadmat(locfn)
|
||||||
locctime = UTCDateTime(os.path.getmtime(locfn))
|
locctime = UTCDateTime(os.path.getmtime(locfn))
|
||||||
locauthor = getOwner(locfn)
|
locauthor = get_owner(locfn)
|
||||||
else:
|
else:
|
||||||
loc = None
|
loc = None
|
||||||
locctime = None
|
locctime = None
|
||||||
@ -217,7 +218,7 @@ def picksdict_from_obs(fn):
|
|||||||
return picks
|
return picks
|
||||||
|
|
||||||
|
|
||||||
def picksdict_from_picks(evt):
|
def picksdict_from_picks(evt, parameter=None):
|
||||||
"""
|
"""
|
||||||
Takes an Event object and return the pick dictionary commonly used within
|
Takes an Event object and return the pick dictionary commonly used within
|
||||||
PyLoT
|
PyLoT
|
||||||
@ -230,6 +231,7 @@ def picksdict_from_picks(evt):
|
|||||||
'auto': {}
|
'auto': {}
|
||||||
}
|
}
|
||||||
for pick in evt.picks:
|
for pick in evt.picks:
|
||||||
|
errors = None
|
||||||
phase = {}
|
phase = {}
|
||||||
station = pick.waveform_id.station_code
|
station = pick.waveform_id.station_code
|
||||||
if pick.waveform_id.channel_code is None:
|
if pick.waveform_id.channel_code is None:
|
||||||
@ -273,33 +275,28 @@ def picksdict_from_picks(evt):
|
|||||||
phase['epp'] = epp
|
phase['epp'] = epp
|
||||||
phase['lpp'] = lpp
|
phase['lpp'] = lpp
|
||||||
phase['spe'] = spe
|
phase['spe'] = spe
|
||||||
try:
|
weight = phase.get('weight')
|
||||||
phase['weight'] = weight
|
if not weight:
|
||||||
except:
|
if not parameter:
|
||||||
# get onset weight from uncertainty
|
logging.warning('Using default input parameter')
|
||||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
parameter = PylotParameter()
|
||||||
print('Using default input file {}'.format(infile))
|
|
||||||
parameter = PylotParameter(infile)
|
|
||||||
pick.phase_hint = identifyPhase(pick.phase_hint)
|
pick.phase_hint = identifyPhase(pick.phase_hint)
|
||||||
if pick.phase_hint == 'P':
|
if pick.phase_hint == 'P':
|
||||||
errors = parameter['timeerrorsP']
|
errors = parameter['timeerrorsP']
|
||||||
elif pick.phase_hint == 'S':
|
elif pick.phase_hint == 'S':
|
||||||
errors = parameter['timeerrorsS']
|
errors = parameter['timeerrorsS']
|
||||||
|
if errors:
|
||||||
weight = get_quality_class(spe, errors)
|
weight = get_quality_class(spe, errors)
|
||||||
phase['weight'] = weight
|
phase['weight'] = weight
|
||||||
phase['channel'] = channel
|
phase['channel'] = channel
|
||||||
phase['network'] = network
|
phase['network'] = network
|
||||||
phase['picker'] = pick_method
|
phase['picker'] = pick_method
|
||||||
try:
|
|
||||||
if pick.polarity == 'positive':
|
if pick.polarity == 'positive':
|
||||||
phase['fm'] = 'U'
|
phase['fm'] = 'U'
|
||||||
elif pick.polarity == 'negative':
|
elif pick.polarity == 'negative':
|
||||||
phase['fm'] = 'D'
|
phase['fm'] = 'D'
|
||||||
else:
|
else:
|
||||||
phase['fm'] = 'N'
|
phase['fm'] = 'N'
|
||||||
except:
|
|
||||||
print("No FM info available!")
|
|
||||||
phase['fm'] = 'N'
|
|
||||||
phase['filter_id'] = filter_id if filter_id is not None else ''
|
phase['filter_id'] = filter_id if filter_id is not None else ''
|
||||||
|
|
||||||
onsets[pick.phase_hint] = phase.copy()
|
onsets[pick.phase_hint] = phase.copy()
|
||||||
@ -514,8 +511,8 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
|||||||
fid = open("%s" % filename, 'w')
|
fid = open("%s" % filename, 'w')
|
||||||
# write header
|
# write header
|
||||||
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
||||||
(parameter.get('database'), parameter.get('eventID')))
|
(parameter.get('datapath'), parameter.get('eventID')))
|
||||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
arrivals = chooseArrivals(arrivals)
|
||||||
for key in arrivals:
|
for key in arrivals:
|
||||||
# P onsets
|
# P onsets
|
||||||
if 'P' in arrivals[key]:
|
if 'P' in arrivals[key]:
|
||||||
@ -667,8 +664,8 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
|||||||
print("Writing phases to %s for HYPOSAT" % filename)
|
print("Writing phases to %s for HYPOSAT" % filename)
|
||||||
fid = open("%s" % filename, 'w')
|
fid = open("%s" % filename, 'w')
|
||||||
# write header
|
# write header
|
||||||
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
|
fid.write('%s, event %s \n' % (parameter.get('datapath'), parameter.get('eventID')))
|
||||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
arrivals = chooseArrivals(arrivals)
|
||||||
for key in arrivals:
|
for key in arrivals:
|
||||||
# P onsets
|
# P onsets
|
||||||
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
|
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
|
||||||
@ -759,11 +756,11 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
|||||||
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
||||||
n = 0
|
n = 0
|
||||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||||
if isinstance(arrivals, dict) == False:
|
if isinstance(arrivals, dict) is False:
|
||||||
# convert pick object (PyLoT) into dictionary
|
# convert pick object (PyLoT) into dictionary
|
||||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||||
evt.picks = arrivals
|
evt.picks = arrivals
|
||||||
arrivals = picksdict_from_picks(evt)
|
arrivals = picksdict_from_picks(evt, parameter=parameter)
|
||||||
# check for automatic and manual picks
|
# check for automatic and manual picks
|
||||||
# prefer manual picks
|
# prefer manual picks
|
||||||
usedarrivals = chooseArrivals(arrivals)
|
usedarrivals = chooseArrivals(arrivals)
|
||||||
@ -824,7 +821,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
|||||||
# convert pick object (PyLoT) into dictionary
|
# convert pick object (PyLoT) into dictionary
|
||||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||||
evt.picks = arrivals
|
evt.picks = arrivals
|
||||||
arrivals = picksdict_from_picks(evt)
|
arrivals = picksdict_from_picks(evt, parameter=parameter)
|
||||||
# check for automatic and manual picks
|
# check for automatic and manual picks
|
||||||
# prefer manual picks
|
# prefer manual picks
|
||||||
usedarrivals = chooseArrivals(arrivals)
|
usedarrivals = chooseArrivals(arrivals)
|
||||||
@ -875,7 +872,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
|||||||
# convert pick object (PyLoT) into dictionary
|
# convert pick object (PyLoT) into dictionary
|
||||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||||
evt.picks = arrivals
|
evt.picks = arrivals
|
||||||
arrivals = picksdict_from_picks(evt)
|
arrivals = picksdict_from_picks(evt, parameter=parameter)
|
||||||
# check for automatic and manual picks
|
# check for automatic and manual picks
|
||||||
# prefer manual picks
|
# prefer manual picks
|
||||||
usedarrivals = chooseArrivals(arrivals)
|
usedarrivals = chooseArrivals(arrivals)
|
||||||
|
@ -22,7 +22,7 @@ from pylot.core.pick.picker import AICPicker, PragPicker
|
|||||||
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
||||||
getSNR, fmpicker, checkPonsets, wadaticheck, get_quality_class, PickingFailedException, MissingTraceException
|
getSNR, fmpicker, checkPonsets, wadaticheck, get_quality_class, PickingFailedException, MissingTraceException
|
||||||
from pylot.core.util.utils import getPatternLine, gen_Pool, \
|
from pylot.core.util.utils import getPatternLine, gen_Pool, \
|
||||||
get_bool, identifyPhaseID, get_None, correct_iplot
|
get_bool, identifyPhaseID, get_none, correct_iplot
|
||||||
|
|
||||||
|
|
||||||
def autopickevent(data, param, iplot=0, fig_dict=None, fig_dict_wadatijack=None, ncores=0, metadata=None, origin=None):
|
def autopickevent(data, param, iplot=0, fig_dict=None, fig_dict_wadatijack=None, ncores=0, metadata=None, origin=None):
|
||||||
@ -258,10 +258,14 @@ class AutopickStation(object):
|
|||||||
self.pickparams = copy.deepcopy(pickparam)
|
self.pickparams = copy.deepcopy(pickparam)
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.iplot = correct_iplot(iplot)
|
self.iplot = correct_iplot(iplot)
|
||||||
self.fig_dict = get_None(fig_dict)
|
self.fig_dict = get_none(fig_dict)
|
||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
self.origin = origin
|
self.origin = origin
|
||||||
|
|
||||||
|
# initialize TauPy pick estimates
|
||||||
|
self.estFirstP = None
|
||||||
|
self.estFirstS = None
|
||||||
|
|
||||||
# initialize picking results
|
# initialize picking results
|
||||||
self.p_results = PickingResults()
|
self.p_results = PickingResults()
|
||||||
self.s_results = PickingResults()
|
self.s_results = PickingResults()
|
||||||
@ -443,15 +447,15 @@ class AutopickStation(object):
|
|||||||
for arr in arrivals:
|
for arr in arrivals:
|
||||||
phases[identifyPhaseID(arr.phase.name)].append(arr)
|
phases[identifyPhaseID(arr.phase.name)].append(arr)
|
||||||
# get first P and S onsets from arrivals list
|
# get first P and S onsets from arrivals list
|
||||||
estFirstP = 0
|
arrival_time_p = 0
|
||||||
estFirstS = 0
|
arrival_time_s = 0
|
||||||
if len(phases['P']) > 0:
|
if len(phases['P']) > 0:
|
||||||
arrP, estFirstP = min([(arr, arr.time) for arr in phases['P']], key=lambda t: t[1])
|
arrP, arrival_time_p = min([(arr, arr.time) for arr in phases['P']], key=lambda t: t[1])
|
||||||
if len(phases['S']) > 0:
|
if len(phases['S']) > 0:
|
||||||
arrS, estFirstS = min([(arr, arr.time) for arr in phases['S']], key=lambda t: t[1])
|
arrS, arrival_time_s = min([(arr, arr.time) for arr in phases['S']], key=lambda t: t[1])
|
||||||
print('autopick: estimated first arrivals for P: {} s, S:{} s after event'
|
print('autopick: estimated first arrivals for P: {} s, S:{} s after event'
|
||||||
' origin time using TauPy'.format(estFirstP, estFirstS))
|
' origin time using TauPy'.format(arrival_time_p, arrival_time_s))
|
||||||
return estFirstP, estFirstS
|
return arrival_time_p, arrival_time_s
|
||||||
|
|
||||||
def exit_taupy():
|
def exit_taupy():
|
||||||
"""If taupy failed to calculate theoretical starttimes, picking continues.
|
"""If taupy failed to calculate theoretical starttimes, picking continues.
|
||||||
@ -477,10 +481,13 @@ class AutopickStation(object):
|
|||||||
raise AttributeError('No source origins given!')
|
raise AttributeError('No source origins given!')
|
||||||
|
|
||||||
arrivals = create_arrivals(self.metadata, self.origin, self.pickparams["taup_model"])
|
arrivals = create_arrivals(self.metadata, self.origin, self.pickparams["taup_model"])
|
||||||
estFirstP, estFirstS = first_PS_onsets(arrivals)
|
arrival_P, arrival_S = first_PS_onsets(arrivals)
|
||||||
|
|
||||||
|
self.estFirstP = (self.origin[0].time + arrival_P) - self.ztrace.stats.starttime
|
||||||
|
|
||||||
# modifiy pstart and pstop relative to estimated first P arrival (relative to station time axis)
|
# modifiy pstart and pstop relative to estimated first P arrival (relative to station time axis)
|
||||||
self.pickparams["pstart"] += (self.origin[0].time + estFirstP) - self.ztrace.stats.starttime
|
self.pickparams["pstart"] += self.estFirstP
|
||||||
self.pickparams["pstop"] += (self.origin[0].time + estFirstP) - self.ztrace.stats.starttime
|
self.pickparams["pstop"] += self.estFirstP
|
||||||
print('autopick: CF calculation times respectively:'
|
print('autopick: CF calculation times respectively:'
|
||||||
' pstart: {} s, pstop: {} s'.format(self.pickparams["pstart"], self.pickparams["pstop"]))
|
' pstart: {} s, pstop: {} s'.format(self.pickparams["pstart"], self.pickparams["pstop"]))
|
||||||
# make sure pstart and pstop are inside the starttime/endtime of vertical trace
|
# make sure pstart and pstop are inside the starttime/endtime of vertical trace
|
||||||
@ -491,9 +498,10 @@ class AutopickStation(object):
|
|||||||
# for the two horizontal components take earliest and latest time to make sure that the s onset is not clipped
|
# for the two horizontal components take earliest and latest time to make sure that the s onset is not clipped
|
||||||
# if start and endtime of horizontal traces differ, the s windowsize will automatically increase
|
# if start and endtime of horizontal traces differ, the s windowsize will automatically increase
|
||||||
trace_s_start = min([self.etrace.stats.starttime, self.ntrace.stats.starttime])
|
trace_s_start = min([self.etrace.stats.starttime, self.ntrace.stats.starttime])
|
||||||
|
self.estFirstS = (self.origin[0].time + arrival_S) - trace_s_start
|
||||||
# modifiy sstart and sstop relative to estimated first S arrival (relative to station time axis)
|
# modifiy sstart and sstop relative to estimated first S arrival (relative to station time axis)
|
||||||
self.pickparams["sstart"] += (self.origin[0].time + estFirstS) - trace_s_start
|
self.pickparams["sstart"] += self.estFirstS
|
||||||
self.pickparams["sstop"] += (self.origin[0].time + estFirstS) - trace_s_start
|
self.pickparams["sstop"] += self.estFirstS
|
||||||
print('autopick: CF calculation times respectively:'
|
print('autopick: CF calculation times respectively:'
|
||||||
' sstart: {} s, sstop: {} s'.format(self.pickparams["sstart"], self.pickparams["sstop"]))
|
' sstart: {} s, sstop: {} s'.format(self.pickparams["sstart"], self.pickparams["sstop"]))
|
||||||
# make sure pstart and pstop are inside the starttime/endtime of horizontal traces
|
# make sure pstart and pstop are inside the starttime/endtime of horizontal traces
|
||||||
@ -609,6 +617,12 @@ class AutopickStation(object):
|
|||||||
# plot tapered trace filtered with bpz2 filter settings
|
# plot tapered trace filtered with bpz2 filter settings
|
||||||
ax1.plot(tdata, self.tr_filt_z_bpz2.data / max(self.tr_filt_z_bpz2.data), color=linecolor, linewidth=0.7,
|
ax1.plot(tdata, self.tr_filt_z_bpz2.data / max(self.tr_filt_z_bpz2.data), color=linecolor, linewidth=0.7,
|
||||||
label='Data')
|
label='Data')
|
||||||
|
# plot pickwindows for P
|
||||||
|
pstart, pstop = self.pickparams['pstart'], self.pickparams['pstop']
|
||||||
|
if pstart is not None and pstop is not None:
|
||||||
|
ax1.axvspan(pstart, pstop, color='r', alpha=0.1, zorder=0, label='P window')
|
||||||
|
if self.estFirstP is not None:
|
||||||
|
ax1.axvline(self.estFirstP, ls='dashed', color='r', alpha=0.4, label='TauPy estimate')
|
||||||
if self.p_results.weight < 4:
|
if self.p_results.weight < 4:
|
||||||
# plot CF of initial onset (HOScf or ARZcf)
|
# plot CF of initial onset (HOScf or ARZcf)
|
||||||
ax1.plot(self.cf1.getTimeArray(), self.cf1.getCF() / max(self.cf1.getCF()), 'b', label='CF1')
|
ax1.plot(self.cf1.getTimeArray(), self.cf1.getCF() / max(self.cf1.getCF()), 'b', label='CF1')
|
||||||
@ -713,6 +727,15 @@ class AutopickStation(object):
|
|||||||
ax3.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
|
ax3.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
|
||||||
ax3.plot([self.s_results.lpp, self.s_results.lpp], [-1.1, 1.1], 'g--', label='lpp')
|
ax3.plot([self.s_results.lpp, self.s_results.lpp], [-1.1, 1.1], 'g--', label='lpp')
|
||||||
ax3.plot([self.s_results.epp, self.s_results.epp], [-1.1, 1.1], 'g--', label='epp')
|
ax3.plot([self.s_results.epp, self.s_results.epp], [-1.1, 1.1], 'g--', label='epp')
|
||||||
|
|
||||||
|
# plot pickwindows for S
|
||||||
|
sstart, sstop = self.pickparams['sstart'], self.pickparams['sstop']
|
||||||
|
if sstart is not None and sstop is not None:
|
||||||
|
for axis in [ax2, ax3]:
|
||||||
|
axis.axvspan(sstart, sstop, color='b', alpha=0.1, zorder=0, label='S window')
|
||||||
|
if self.estFirstS is not None:
|
||||||
|
axis.axvline(self.estFirstS, ls='dashed', color='b', alpha=0.4, label='TauPy estimate')
|
||||||
|
|
||||||
ax3.legend(loc=1)
|
ax3.legend(loc=1)
|
||||||
ax3.set_yticks([])
|
ax3.set_yticks([])
|
||||||
ax3.set_ylim([-1.5, 1.5])
|
ax3.set_ylim([-1.5, 1.5])
|
||||||
@ -835,14 +858,21 @@ class AutopickStation(object):
|
|||||||
self.cf1 = None
|
self.cf1 = None
|
||||||
assert isinstance(self.cf1, CharacteristicFunction), 'cf1 is not set correctly: maybe the algorithm name ({})' \
|
assert isinstance(self.cf1, CharacteristicFunction), 'cf1 is not set correctly: maybe the algorithm name ({})' \
|
||||||
' is corrupted'.format(self.pickparams["algoP"])
|
' is corrupted'.format(self.pickparams["algoP"])
|
||||||
|
# get the original waveform stream from first CF class cut to identical length as CF for plotting
|
||||||
|
cut_ogstream = self.cf1.getDataArray(self.cf1.getCut())
|
||||||
|
|
||||||
|
# MP: Rename to cf_stream for further use of z_copy and to prevent chaos when z_copy suddenly becomes a cf
|
||||||
|
# stream and later again a waveform stream
|
||||||
|
cf_stream = z_copy.copy()
|
||||||
|
cf_stream[0].data = self.cf1.getCF()
|
||||||
|
|
||||||
# calculate AIC cf from first cf (either HOS or ARZ)
|
# calculate AIC cf from first cf (either HOS or ARZ)
|
||||||
z_copy[0].data = self.cf1.getCF()
|
aiccf = AICcf(cf_stream, cuttimes)
|
||||||
aiccf = AICcf(z_copy, cuttimes)
|
|
||||||
# get preliminary onset time from AIC-CF
|
# get preliminary onset time from AIC-CF
|
||||||
self.set_current_figure('aicFig')
|
self.set_current_figure('aicFig')
|
||||||
aicpick = AICPicker(aiccf, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
aicpick = AICPicker(aiccf, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
||||||
Tsmooth=self.pickparams["aictsmooth"], fig=self.current_figure,
|
Tsmooth=self.pickparams["aictsmooth"], fig=self.current_figure,
|
||||||
linecolor=self.current_linecolor)
|
linecolor=self.current_linecolor, ogstream=cut_ogstream)
|
||||||
# save aicpick for plotting later
|
# save aicpick for plotting later
|
||||||
self.p_data.aicpick = aicpick
|
self.p_data.aicpick = aicpick
|
||||||
# add pstart and pstop to aic plot
|
# add pstart and pstop to aic plot
|
||||||
@ -855,7 +885,7 @@ class AutopickStation(object):
|
|||||||
label='P stop')
|
label='P stop')
|
||||||
ax.legend(loc=1)
|
ax.legend(loc=1)
|
||||||
|
|
||||||
Pflag = self._pick_p_quality_control(aicpick, z_copy, tr_filt)
|
Pflag = self._pick_p_quality_control(aicpick, cf_stream, tr_filt)
|
||||||
# go on with processing if AIC onset passes quality control
|
# go on with processing if AIC onset passes quality control
|
||||||
slope = aicpick.getSlope()
|
slope = aicpick.getSlope()
|
||||||
if not slope: slope = 0
|
if not slope: slope = 0
|
||||||
@ -894,7 +924,7 @@ class AutopickStation(object):
|
|||||||
refPpick = PragPicker(self.cf2, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
refPpick = PragPicker(self.cf2, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
||||||
self.pickparams["ausP"],
|
self.pickparams["ausP"],
|
||||||
self.pickparams["tsmoothP"], aicpick.getpick(), self.current_figure,
|
self.pickparams["tsmoothP"], aicpick.getpick(), self.current_figure,
|
||||||
self.current_linecolor)
|
self.current_linecolor, ogstream=cut_ogstream)
|
||||||
# save PragPicker result for plotting
|
# save PragPicker result for plotting
|
||||||
self.p_data.refPpick = refPpick
|
self.p_data.refPpick = refPpick
|
||||||
self.p_results.mpp = refPpick.getpick()
|
self.p_results.mpp = refPpick.getpick()
|
||||||
@ -1146,11 +1176,14 @@ class AutopickStation(object):
|
|||||||
# calculate AIC cf
|
# calculate AIC cf
|
||||||
haiccf = self._calculate_aic_cf_s_pick(cuttimesh)
|
haiccf = self._calculate_aic_cf_s_pick(cuttimesh)
|
||||||
|
|
||||||
|
# get the original waveform stream cut to identical length as CF for plotting
|
||||||
|
ogstream = haiccf.getDataArray(haiccf.getCut())
|
||||||
|
|
||||||
# get preliminary onset time from AIC cf
|
# get preliminary onset time from AIC cf
|
||||||
self.set_current_figure('aicARHfig')
|
self.set_current_figure('aicARHfig')
|
||||||
aicarhpick = AICPicker(haiccf, self.pickparams["tsnrh"], self.pickparams["pickwinS"], self.iplot,
|
aicarhpick = AICPicker(haiccf, self.pickparams["tsnrh"], self.pickparams["pickwinS"], self.iplot,
|
||||||
Tsmooth=self.pickparams["aictsmoothS"], fig=self.current_figure,
|
Tsmooth=self.pickparams["aictsmoothS"], fig=self.current_figure,
|
||||||
linecolor=self.current_linecolor)
|
linecolor=self.current_linecolor, ogstream=ogstream)
|
||||||
# save pick for later plotting
|
# save pick for later plotting
|
||||||
self.aicarhpick = aicarhpick
|
self.aicarhpick = aicarhpick
|
||||||
|
|
||||||
|
@ -17,7 +17,11 @@ autoregressive prediction: application ot local and regional distances, Geophys.
|
|||||||
:author: MAGS2 EP3 working group
|
:author: MAGS2 EP3 working group
|
||||||
"""
|
"""
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy import signal
|
try:
|
||||||
|
from scipy.signal import tukey
|
||||||
|
except ImportError:
|
||||||
|
from scipy.signal.windows import tukey
|
||||||
|
|
||||||
from obspy.core import Stream
|
from obspy.core import Stream
|
||||||
|
|
||||||
from pylot.core.pick.utils import PickingFailedException
|
from pylot.core.pick.utils import PickingFailedException
|
||||||
@ -56,7 +60,7 @@ class CharacteristicFunction(object):
|
|||||||
self.setOrder(order)
|
self.setOrder(order)
|
||||||
self.setFnoise(fnoise)
|
self.setFnoise(fnoise)
|
||||||
self.setARdetStep(t2)
|
self.setARdetStep(t2)
|
||||||
self.calcCF(self.getDataArray())
|
self.calcCF()
|
||||||
self.arpara = np.array([])
|
self.arpara = np.array([])
|
||||||
self.xpred = np.array([])
|
self.xpred = np.array([])
|
||||||
|
|
||||||
@ -208,17 +212,15 @@ class CharacteristicFunction(object):
|
|||||||
data = self.orig_data.copy()
|
data = self.orig_data.copy()
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def calcCF(self, data=None):
|
def calcCF(self):
|
||||||
self.cf = data
|
pass
|
||||||
|
|
||||||
|
|
||||||
class AICcf(CharacteristicFunction):
|
class AICcf(CharacteristicFunction):
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate the Akaike Information Criterion (AIC) after Maeda (1985).
|
Function to calculate the Akaike Information Criterion (AIC) after Maeda (1985).
|
||||||
:param data: data, time series (whether seismogram or CF)
|
|
||||||
:type data: tuple
|
|
||||||
:return: AIC function
|
:return: AIC function
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@ -227,7 +229,7 @@ class AICcf(CharacteristicFunction):
|
|||||||
ind = np.where(~np.isnan(xnp))[0]
|
ind = np.where(~np.isnan(xnp))[0]
|
||||||
if ind.size:
|
if ind.size:
|
||||||
xnp[:ind[0]] = xnp[ind[0]]
|
xnp[:ind[0]] = xnp[ind[0]]
|
||||||
xnp = signal.tukey(len(xnp), alpha=0.05) * xnp
|
xnp = tukey(len(xnp), alpha=0.05) * xnp
|
||||||
xnp = xnp - np.mean(xnp)
|
xnp = xnp - np.mean(xnp)
|
||||||
datlen = len(xnp)
|
datlen = len(xnp)
|
||||||
k = np.arange(1, datlen)
|
k = np.arange(1, datlen)
|
||||||
@ -256,13 +258,11 @@ class HOScf(CharacteristicFunction):
|
|||||||
"""
|
"""
|
||||||
super(HOScf, self).__init__(data, cut, pickparams["tlta"], pickparams["hosorder"])
|
super(HOScf, self).__init__(data, cut, pickparams["tlta"], pickparams["hosorder"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate skewness (statistics of order 3) or kurtosis
|
Function to calculate skewness (statistics of order 3) or kurtosis
|
||||||
(statistics of order 4), using one long moving window, as published
|
(statistics of order 4), using one long moving window, as published
|
||||||
in Kueperkoch et al. (2010), or order 2, i.e. STA/LTA.
|
in Kueperkoch et al. (2010), or order 2, i.e. STA/LTA.
|
||||||
:param data: data, time series (whether seismogram or CF)
|
|
||||||
:type data: tuple
|
|
||||||
:return: HOS cf
|
:return: HOS cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@ -277,47 +277,28 @@ class HOScf(CharacteristicFunction):
|
|||||||
elif self.getOrder() == 4: # this is kurtosis
|
elif self.getOrder() == 4: # this is kurtosis
|
||||||
y = np.power(xnp, 4)
|
y = np.power(xnp, 4)
|
||||||
y1 = np.power(xnp, 2)
|
y1 = np.power(xnp, 2)
|
||||||
elif self.getOrder() == 2: # this is variance, used for STA/LTA processing
|
|
||||||
y = np.power(xnp, 2)
|
|
||||||
y1 = np.power(xnp, 2)
|
|
||||||
|
|
||||||
# Initialisation
|
# Initialisation
|
||||||
# t2: long term moving window
|
# t2: long term moving window
|
||||||
ilta = int(round(self.getTime2() / self.getIncrement()))
|
ilta = int(round(self.getTime2() / self.getIncrement()))
|
||||||
ista = int(round((self.getTime2() / 10) / self.getIncrement())) # TODO: still hard coded!!
|
|
||||||
lta = y[0]
|
lta = y[0]
|
||||||
lta1 = y1[0]
|
lta1 = y1[0]
|
||||||
sta = y[0]
|
|
||||||
# moving windows
|
# moving windows
|
||||||
LTA = np.zeros(len(xnp))
|
LTA = np.zeros(len(xnp))
|
||||||
STA = np.zeros(len(xnp))
|
|
||||||
for j in range(0, len(xnp)):
|
for j in range(0, len(xnp)):
|
||||||
if j < 4:
|
if j < 4:
|
||||||
LTA[j] = 0
|
LTA[j] = 0
|
||||||
STA[j] = 0
|
|
||||||
elif j <= ista and self.getOrder() == 2:
|
|
||||||
lta = (y[j] + lta * (j - 1)) / j
|
|
||||||
if self.getOrder() == 2:
|
|
||||||
sta = (y[j] + sta * (j - 1)) / j
|
|
||||||
# elif j < 4:
|
|
||||||
elif j <= ilta:
|
elif j <= ilta:
|
||||||
lta = (y[j] + lta * (j - 1)) / j
|
lta = (y[j] + lta * (j - 1)) / j
|
||||||
lta1 = (y1[j] + lta1 * (j - 1)) / j
|
lta1 = (y1[j] + lta1 * (j - 1)) / j
|
||||||
if self.getOrder() == 2:
|
|
||||||
sta = (y[j] - y[j - ista]) / ista + sta
|
|
||||||
else:
|
else:
|
||||||
lta = (y[j] - y[j - ilta]) / ilta + lta
|
lta = (y[j] - y[j - ilta]) / ilta + lta
|
||||||
lta1 = (y1[j] - y1[j - ilta]) / ilta + lta1
|
lta1 = (y1[j] - y1[j - ilta]) / ilta + lta1
|
||||||
if self.getOrder() == 2:
|
|
||||||
sta = (y[j] - y[j - ista]) / ista + sta
|
|
||||||
# define LTA
|
# define LTA
|
||||||
if self.getOrder() == 3:
|
if self.getOrder() == 3:
|
||||||
LTA[j] = lta / np.power(lta1, 1.5)
|
LTA[j] = lta / np.power(lta1, 1.5)
|
||||||
elif self.getOrder() == 4:
|
elif self.getOrder() == 4:
|
||||||
LTA[j] = lta / np.power(lta1, 2)
|
LTA[j] = lta / np.power(lta1, 2)
|
||||||
else:
|
|
||||||
LTA[j] = lta
|
|
||||||
STA[j] = sta
|
|
||||||
|
|
||||||
# remove NaN's with first not-NaN-value,
|
# remove NaN's with first not-NaN-value,
|
||||||
# so autopicker doesnt pick discontinuity at start of the trace
|
# so autopicker doesnt pick discontinuity at start of the trace
|
||||||
@ -326,10 +307,7 @@ class HOScf(CharacteristicFunction):
|
|||||||
first = ind[0]
|
first = ind[0]
|
||||||
LTA[:first] = LTA[first]
|
LTA[:first] = LTA[first]
|
||||||
|
|
||||||
if self.getOrder() > 2:
|
|
||||||
self.cf = LTA
|
self.cf = LTA
|
||||||
else: # order 2 means STA/LTA!
|
|
||||||
self.cf = STA / LTA
|
|
||||||
self.xcf = x
|
self.xcf = x
|
||||||
|
|
||||||
|
|
||||||
@ -339,12 +317,10 @@ class ARZcf(CharacteristicFunction):
|
|||||||
super(ARZcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Parorder"],
|
super(ARZcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Parorder"],
|
||||||
fnoise=pickparams["addnoise"])
|
fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
||||||
P onsets.
|
P onsets.
|
||||||
:param data:
|
|
||||||
:type data: ~obspy.core.stream.Stream
|
|
||||||
:return: ARZ cf
|
:return: ARZ cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@ -475,14 +451,12 @@ class ARHcf(CharacteristicFunction):
|
|||||||
super(ARHcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
super(ARHcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
||||||
fnoise=pickparams["addnoise"])
|
fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||||
both horizontal traces.
|
both horizontal traces.
|
||||||
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
||||||
between the predicted and the actual waveform servers as a characteristic function.
|
between the predicted and the actual waveform servers as a characteristic function.
|
||||||
:param data: wavefor stream
|
|
||||||
:type data: ~obspy.core.stream.Stream
|
|
||||||
:return: ARH cf
|
:return: ARH cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@ -631,14 +605,12 @@ class AR3Ccf(CharacteristicFunction):
|
|||||||
super(AR3Ccf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
super(AR3Ccf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
||||||
fnoise=pickparams["addnoise"])
|
fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||||
all three traces.
|
all three traces.
|
||||||
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
||||||
between the predicted and the actual waveform servers as a characteristic function
|
between the predicted and the actual waveform servers as a characteristic function
|
||||||
:param data: stream holding all three traces
|
|
||||||
:type data: ~obspy.core.stream.Stream
|
|
||||||
:return: AR3C cf
|
:return: AR3C cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
|
@ -37,7 +37,8 @@ class AutoPicker(object):
|
|||||||
|
|
||||||
warnings.simplefilter('ignore')
|
warnings.simplefilter('ignore')
|
||||||
|
|
||||||
def __init__(self, cf, TSNR, PickWindow, iplot=0, aus=None, Tsmooth=None, Pick1=None, fig=None, linecolor='k'):
|
def __init__(self, cf, TSNR, PickWindow, iplot=0, aus=None, Tsmooth=None, Pick1=None,
|
||||||
|
fig=None, linecolor='k', ogstream=None):
|
||||||
"""
|
"""
|
||||||
Create AutoPicker object
|
Create AutoPicker object
|
||||||
:param cf: characteristic function, on which the picking algorithm is applied
|
:param cf: characteristic function, on which the picking algorithm is applied
|
||||||
@ -59,12 +60,15 @@ class AutoPicker(object):
|
|||||||
:type fig: `~matplotlib.figure.Figure`
|
:type fig: `~matplotlib.figure.Figure`
|
||||||
:param linecolor: matplotlib line color string
|
:param linecolor: matplotlib line color string
|
||||||
:type linecolor: str
|
:type linecolor: str
|
||||||
|
:param ogstream: original stream (waveform), e.g. for plotting purposes
|
||||||
|
:type ogstream: `~obspy.core.stream.Stream`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert isinstance(cf, CharacteristicFunction), "%s is not a CharacteristicFunction object" % str(cf)
|
assert isinstance(cf, CharacteristicFunction), "%s is not a CharacteristicFunction object" % str(cf)
|
||||||
self._linecolor = linecolor
|
self._linecolor = linecolor
|
||||||
self._pickcolor_p = 'b'
|
self._pickcolor_p = 'b'
|
||||||
self.cf = cf.getCF()
|
self.cf = cf.getCF()
|
||||||
|
self.ogstream = ogstream
|
||||||
self.Tcf = cf.getTimeArray()
|
self.Tcf = cf.getTimeArray()
|
||||||
self.Data = cf.getXCF()
|
self.Data = cf.getXCF()
|
||||||
self.dt = cf.getIncrement()
|
self.dt = cf.getIncrement()
|
||||||
@ -173,7 +177,7 @@ class AICPicker(AutoPicker):
|
|||||||
nn = np.isnan(self.cf)
|
nn = np.isnan(self.cf)
|
||||||
if len(nn) > 1:
|
if len(nn) > 1:
|
||||||
self.cf[nn] = 0
|
self.cf[nn] = 0
|
||||||
# taper AIC-CF to get rid off side maxima
|
# taper AIC-CF to get rid of side maxima
|
||||||
tap = np.hanning(len(self.cf))
|
tap = np.hanning(len(self.cf))
|
||||||
aic = tap * self.cf + max(abs(self.cf))
|
aic = tap * self.cf + max(abs(self.cf))
|
||||||
# smooth AIC-CF
|
# smooth AIC-CF
|
||||||
@ -316,16 +320,7 @@ class AICPicker(AutoPicker):
|
|||||||
plt.close(fig)
|
plt.close(fig)
|
||||||
return
|
return
|
||||||
iislope = islope[0][0:imax + 1]
|
iislope = islope[0][0:imax + 1]
|
||||||
# MP MP change slope calculation
|
dataslope = self.Data[0].data[iislope]
|
||||||
# get all maxima of aicsmooth
|
|
||||||
iaicmaxima = argrelmax(aicsmooth)[0]
|
|
||||||
# get first index of maximum after pickindex (indices saved in iaicmaxima)
|
|
||||||
aicmax = iaicmaxima[np.where(iaicmaxima > pickindex)[0]]
|
|
||||||
if len(aicmax) > 0:
|
|
||||||
iaicmax = aicmax[0]
|
|
||||||
else:
|
|
||||||
iaicmax = -1
|
|
||||||
dataslope = aicsmooth[pickindex: iaicmax]
|
|
||||||
# calculate slope as polynomal fit of order 1
|
# calculate slope as polynomal fit of order 1
|
||||||
xslope = np.arange(0, len(dataslope), 1)
|
xslope = np.arange(0, len(dataslope), 1)
|
||||||
try:
|
try:
|
||||||
@ -336,7 +331,7 @@ class AICPicker(AutoPicker):
|
|||||||
else:
|
else:
|
||||||
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
||||||
# normalize slope to maximum of cf to make it unit independent
|
# normalize slope to maximum of cf to make it unit independent
|
||||||
self.slope /= aicsmooth[iaicmax]
|
self.slope /= self.Data[0].data[icfmax]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("AICPicker: Problems with data fitting! {}".format(e))
|
print("AICPicker: Problems with data fitting! {}".format(e))
|
||||||
|
|
||||||
@ -356,6 +351,12 @@ class AICPicker(AutoPicker):
|
|||||||
self.Tcf = self.Tcf[0:len(self.Tcf) - 1]
|
self.Tcf = self.Tcf[0:len(self.Tcf) - 1]
|
||||||
ax1.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
ax1.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||||
ax1.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
ax1.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
||||||
|
# plot the original waveform also for evaluation of the CF and pick
|
||||||
|
if self.ogstream:
|
||||||
|
data = self.ogstream[0].data
|
||||||
|
if len(data) == len(self.Tcf):
|
||||||
|
ax1.plot(self.Tcf, 0.5 * data / max(data), 'k', label='Seismogram', alpha=0.3, zorder=0,
|
||||||
|
lw=0.5)
|
||||||
if self.Pick is not None:
|
if self.Pick is not None:
|
||||||
ax1.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2, label='AIC-Pick')
|
ax1.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2, label='AIC-Pick')
|
||||||
ax1.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
ax1.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||||
@ -376,7 +377,7 @@ class AICPicker(AutoPicker):
|
|||||||
label='Signal Window')
|
label='Signal Window')
|
||||||
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
||||||
label='Slope Window')
|
label='Slope Window')
|
||||||
ax2.plot(self.Tcf[pickindex: iaicmax], datafit, 'g', linewidth=2,
|
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2,
|
||||||
label='Slope') # MP MP changed temporarily!
|
label='Slope') # MP MP changed temporarily!
|
||||||
|
|
||||||
if self.slope is not None:
|
if self.slope is not None:
|
||||||
|
@ -15,7 +15,7 @@ import numpy as np
|
|||||||
from obspy.core import Stream, UTCDateTime
|
from obspy.core import Stream, UTCDateTime
|
||||||
from scipy.signal import argrelmax
|
from scipy.signal import argrelmax
|
||||||
|
|
||||||
from pylot.core.util.utils import get_bool, get_None, SetChannelComponents
|
from pylot.core.util.utils import get_bool, get_none, SetChannelComponents, common_range
|
||||||
|
|
||||||
|
|
||||||
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecolor='k'):
|
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecolor='k'):
|
||||||
@ -136,7 +136,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecol
|
|||||||
PickError = symmetrize_error(diffti_te, diffti_tl)
|
PickError = symmetrize_error(diffti_te, diffti_tl)
|
||||||
|
|
||||||
if iplot > 1:
|
if iplot > 1:
|
||||||
if get_None(fig) is None:
|
if get_none(fig) is None:
|
||||||
fig = plt.figure() # iplot)
|
fig = plt.figure() # iplot)
|
||||||
plt_flag = 1
|
plt_flag = 1
|
||||||
fig._tight = True
|
fig._tight = True
|
||||||
@ -275,7 +275,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
|||||||
try:
|
try:
|
||||||
P1 = np.polyfit(xslope1, xraw[islope1], 1)
|
P1 = np.polyfit(xslope1, xraw[islope1], 1)
|
||||||
datafit1 = np.polyval(P1, xslope1)
|
datafit1 = np.polyval(P1, xslope1)
|
||||||
except Exception as e:
|
except ValueError as e:
|
||||||
print("fmpicker: Problems with data fit! {}".format(e))
|
print("fmpicker: Problems with data fit! {}".format(e))
|
||||||
print("Skip first motion determination!")
|
print("Skip first motion determination!")
|
||||||
return FM
|
return FM
|
||||||
@ -321,7 +321,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
|||||||
try:
|
try:
|
||||||
P2 = np.polyfit(xslope2, xfilt[islope2], 1)
|
P2 = np.polyfit(xslope2, xfilt[islope2], 1)
|
||||||
datafit2 = np.polyval(P2, xslope2)
|
datafit2 = np.polyval(P2, xslope2)
|
||||||
except Exception as e:
|
except ValueError as e:
|
||||||
emsg = 'fmpicker: polyfit failed: {}'.format(e)
|
emsg = 'fmpicker: polyfit failed: {}'.format(e)
|
||||||
print(emsg)
|
print(emsg)
|
||||||
return FM
|
return FM
|
||||||
@ -344,7 +344,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
|||||||
print("fmpicker: Found polarity %s" % FM)
|
print("fmpicker: Found polarity %s" % FM)
|
||||||
|
|
||||||
if iplot > 1:
|
if iplot > 1:
|
||||||
if get_None(fig) is None:
|
if get_none(fig) is None:
|
||||||
fig = plt.figure() # iplot)
|
fig = plt.figure() # iplot)
|
||||||
plt_flag = 1
|
plt_flag = 1
|
||||||
fig._tight = True
|
fig._tight = True
|
||||||
@ -828,14 +828,22 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
|
|||||||
if len(X) > 1:
|
if len(X) > 1:
|
||||||
# all three components available
|
# all three components available
|
||||||
# make sure, all components have equal lengths
|
# make sure, all components have equal lengths
|
||||||
ilen = min([len(X[0].data), len(X[1].data), len(X[2].data)])
|
earliest_starttime = min(tr.stats.starttime for tr in X)
|
||||||
x1 = X[0][0:ilen]
|
cuttimes = common_range(X)
|
||||||
x2 = X[1][0:ilen]
|
X = X.slice(cuttimes[0], cuttimes[1])
|
||||||
x3 = X[2][0:ilen]
|
x1, x2, x3 = X[:3]
|
||||||
|
|
||||||
|
if not (len(x1) == len(x2) == len(x3)):
|
||||||
|
raise PickingFailedException('checksignallength: unequal lengths of components!')
|
||||||
|
|
||||||
# get RMS trace
|
# get RMS trace
|
||||||
rms = np.sqrt((np.power(x1, 2) + np.power(x2, 2) + np.power(x3, 2)) / 3)
|
rms = np.sqrt((np.power(x1, 2) + np.power(x2, 2) + np.power(x3, 2)) / 3)
|
||||||
|
ilen = len(rms)
|
||||||
|
dt = earliest_starttime - X[0].stats.starttime
|
||||||
|
pick -= dt
|
||||||
else:
|
else:
|
||||||
x1 = X[0].data
|
x1 = X[0].data
|
||||||
|
x2 = x3 = None
|
||||||
ilen = len(x1)
|
ilen = len(x1)
|
||||||
rms = abs(x1)
|
rms = abs(x1)
|
||||||
|
|
||||||
@ -868,12 +876,16 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
|
|||||||
returnflag = 0
|
returnflag = 0
|
||||||
|
|
||||||
if iplot > 1:
|
if iplot > 1:
|
||||||
if get_None(fig) is None:
|
if get_none(fig) is None:
|
||||||
fig = plt.figure() # iplot)
|
fig = plt.figure() # iplot)
|
||||||
plt_flag = 1
|
plt_flag = 1
|
||||||
fig._tight = True
|
fig._tight = True
|
||||||
ax = fig.add_subplot(111)
|
ax = fig.add_subplot(111)
|
||||||
ax.plot(t, rms, color=linecolor, linewidth=0.7, label='RMS Data')
|
ax.plot(t, rms, color=linecolor, linewidth=0.7, label='RMS Data')
|
||||||
|
ax.plot(t, x1, 'k', alpha=0.3, lw=0.3, zorder=0)
|
||||||
|
if x2 is not None and x3 is not None:
|
||||||
|
ax.plot(t, x2, 'r', alpha=0.3, lw=0.3, zorder=0)
|
||||||
|
ax.plot(t, x3, 'g', alpha=0.3, lw=0.3, zorder=0)
|
||||||
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||||
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
||||||
ax.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
|
ax.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
|
||||||
@ -883,6 +895,7 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
|
|||||||
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
||||||
ax.set_ylabel('Counts')
|
ax.set_ylabel('Counts')
|
||||||
ax.set_title('Check for Signal Length, Station %s' % X[0].stats.station)
|
ax.set_title('Check for Signal Length, Station %s' % X[0].stats.station)
|
||||||
|
ax.set_xlim(pickparams["pstart"], pickparams["pstop"])
|
||||||
ax.set_yticks([])
|
ax.set_yticks([])
|
||||||
if plt_flag == 1:
|
if plt_flag == 1:
|
||||||
fig.show()
|
fig.show()
|
||||||
@ -1213,14 +1226,14 @@ def checkZ4S(X, pick, pickparams, iplot, fig=None, linecolor='k'):
|
|||||||
t = np.linspace(diff_dict[key], trace.stats.endtime - trace.stats.starttime + diff_dict[key],
|
t = np.linspace(diff_dict[key], trace.stats.endtime - trace.stats.starttime + diff_dict[key],
|
||||||
trace.stats.npts)
|
trace.stats.npts)
|
||||||
if i == 0:
|
if i == 0:
|
||||||
if get_None(fig) is None:
|
if get_none(fig) is None:
|
||||||
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
||||||
plt_flag = 1
|
plt_flag = 1
|
||||||
ax1 = fig.add_subplot(3, 1, i + 1)
|
ax1 = fig.add_subplot(3, 1, i + 1)
|
||||||
ax = ax1
|
ax = ax1
|
||||||
ax.set_title('CheckZ4S, Station %s' % zdat[0].stats.station)
|
ax.set_title('CheckZ4S, Station %s' % zdat[0].stats.station)
|
||||||
else:
|
else:
|
||||||
if get_None(fig) is None:
|
if get_none(fig) is None:
|
||||||
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
||||||
plt_flag = 1
|
plt_flag = 1
|
||||||
ax = fig.add_subplot(3, 1, i + 1, sharex=ax1)
|
ax = fig.add_subplot(3, 1, i + 1, sharex=ax1)
|
||||||
@ -1494,7 +1507,7 @@ def getQualityFromUncertainty(uncertainty, Errors):
|
|||||||
# set initial quality to 4 (worst) and change only if one condition is hit
|
# set initial quality to 4 (worst) and change only if one condition is hit
|
||||||
quality = 4
|
quality = 4
|
||||||
|
|
||||||
if get_None(uncertainty) is None:
|
if get_none(uncertainty) is None:
|
||||||
return quality
|
return quality
|
||||||
|
|
||||||
if uncertainty <= Errors[0]:
|
if uncertainty <= Errors[0]:
|
||||||
|
@ -5,14 +5,17 @@ import traceback
|
|||||||
|
|
||||||
import cartopy.crs as ccrs
|
import cartopy.crs as ccrs
|
||||||
import cartopy.feature as cf
|
import cartopy.feature as cf
|
||||||
|
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
|
||||||
import matplotlib
|
import matplotlib
|
||||||
import matplotlib.patheffects as PathEffects
|
import matplotlib.patheffects as PathEffects
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import obspy
|
import obspy
|
||||||
from PySide2 import QtWidgets
|
from PySide2 import QtWidgets, QtGui
|
||||||
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
||||||
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
|
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
|
||||||
|
from obspy import UTCDateTime
|
||||||
|
|
||||||
from pylot.core.util.utils import identifyPhaseID
|
from pylot.core.util.utils import identifyPhaseID
|
||||||
from scipy.interpolate import griddata
|
from scipy.interpolate import griddata
|
||||||
|
|
||||||
@ -24,10 +27,10 @@ matplotlib.use('Qt5Agg')
|
|||||||
|
|
||||||
class MplCanvas(FigureCanvas):
|
class MplCanvas(FigureCanvas):
|
||||||
|
|
||||||
def __init__(self, parent=None, extern_axes=None, width=5, height=4, dpi=100):
|
def __init__(self, extern_axes=None, projection=None, width=15, height=5, dpi=100):
|
||||||
if extern_axes is None:
|
if extern_axes is None:
|
||||||
self.fig = plt.figure(figsize=(width, height), dpi=dpi)
|
self.fig = plt.figure(figsize=(width, height), dpi=dpi)
|
||||||
self.axes = self.fig.add_subplot(111)
|
self.axes = self.fig.add_subplot(111, projection=projection)
|
||||||
else:
|
else:
|
||||||
self.fig = extern_axes.figure
|
self.fig = extern_axes.figure
|
||||||
self.axes = extern_axes
|
self.axes = extern_axes
|
||||||
@ -59,24 +62,30 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.parameter = parameter if parameter else parent._inputs
|
self.parameter = parameter if parameter else parent._inputs
|
||||||
|
|
||||||
self.picks_rel = {}
|
self.picks_rel = {}
|
||||||
|
self.picks_rel_mean_corrected = {}
|
||||||
self.marked_stations = []
|
self.marked_stations = []
|
||||||
self.highlighted_stations = []
|
self.highlighted_stations = []
|
||||||
|
|
||||||
# call functions to draw everything
|
# call functions to draw everything
|
||||||
|
self.projection = ccrs.PlateCarree()
|
||||||
self.init_graphics()
|
self.init_graphics()
|
||||||
|
self.ax = self.canvas.axes
|
||||||
|
self.ax.set_adjustable('datalim')
|
||||||
|
|
||||||
self.init_stations()
|
self.init_stations()
|
||||||
self.init_crtpyMap()
|
self.init_crtpyMap()
|
||||||
self.init_map()
|
self.init_map()
|
||||||
|
|
||||||
# set original map limits to fall back on when home button is pressed
|
# set original map limits to fall back on when home button is pressed
|
||||||
self.org_xlim = self.canvas.axes.get_xlim()
|
self.org_xlim = self.ax.get_xlim()
|
||||||
self.org_ylim = self.canvas.axes.get_ylim()
|
self.org_ylim = self.ax.get_ylim()
|
||||||
|
|
||||||
# initial map without event
|
# initial map without event
|
||||||
self.canvas.axes.set_xlim(self.org_xlim[0], self.org_xlim[1])
|
self.ax.set_xlim(self.org_xlim[0], self.org_xlim[1])
|
||||||
self.canvas.axes.set_ylim(self.org_ylim[0], self.org_ylim[1])
|
self.ax.set_ylim(self.org_ylim[0], self.org_ylim[1])
|
||||||
|
|
||||||
self._style = None if not hasattr(parent, '_style') else parent._style
|
self._style = None if not hasattr(parent, '_style') else parent._style
|
||||||
|
|
||||||
|
|
||||||
def init_map(self):
|
def init_map(self):
|
||||||
self.init_colormap()
|
self.init_colormap()
|
||||||
self.connectSignals()
|
self.connectSignals()
|
||||||
@ -89,23 +98,24 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
# initialize figure elements
|
# initialize figure elements
|
||||||
|
|
||||||
if self.extern_plot_axes is None:
|
if self.extern_plot_axes is None:
|
||||||
self.canvas = MplCanvas(self)
|
self.canvas = MplCanvas(projection=self.projection)
|
||||||
self.plotWidget = FigureCanvas(self.canvas.fig)
|
|
||||||
else:
|
else:
|
||||||
self.canvas = MplCanvas(self, extern_axes=self.extern_plot_axes)
|
self.canvas = MplCanvas(extern_axes=self.extern_plot_axes)
|
||||||
self.plotWidget = FigureCanvas(self.canvas.fig)
|
|
||||||
|
self.plotWidget = self.canvas
|
||||||
|
|
||||||
# initialize GUI elements
|
# initialize GUI elements
|
||||||
self.status_label = QtWidgets.QLabel()
|
self.status_label = QtWidgets.QLabel()
|
||||||
self.map_reset_button = QtWidgets.QPushButton('Reset Map View')
|
self.map_reset_button = QtWidgets.QPushButton('Reset Map View')
|
||||||
self.save_map_button = QtWidgets.QPushButton('Save Map')
|
self.save_map_button = QtWidgets.QPushButton('Save Map')
|
||||||
self.go2eq_button = QtWidgets.QPushButton('Go to Event Location')
|
self.go2eq_button = QtWidgets.QPushButton('Go to Event Location')
|
||||||
|
self.subtract_mean_cb = QtWidgets.QCheckBox('Subtract mean')
|
||||||
|
|
||||||
self.main_box = QtWidgets.QVBoxLayout()
|
self.main_box = QtWidgets.QVBoxLayout()
|
||||||
self.setLayout(self.main_box)
|
self.setLayout(self.main_box)
|
||||||
|
|
||||||
self.top_row = QtWidgets.QHBoxLayout()
|
self.top_row = QtWidgets.QHBoxLayout()
|
||||||
self.main_box.addLayout(self.top_row, 1)
|
self.main_box.addLayout(self.top_row, 0)
|
||||||
|
|
||||||
self.comboBox_phase = QtWidgets.QComboBox()
|
self.comboBox_phase = QtWidgets.QComboBox()
|
||||||
self.comboBox_phase.insertItem(0, 'P')
|
self.comboBox_phase.insertItem(0, 'P')
|
||||||
@ -124,8 +134,8 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.cmaps_box = QtWidgets.QComboBox()
|
self.cmaps_box = QtWidgets.QComboBox()
|
||||||
self.cmaps_box.setMaxVisibleItems(20)
|
self.cmaps_box.setMaxVisibleItems(20)
|
||||||
[self.cmaps_box.addItem(map_name) for map_name in sorted(plt.colormaps())]
|
[self.cmaps_box.addItem(map_name) for map_name in sorted(plt.colormaps())]
|
||||||
# try to set to hsv as default
|
# try to set to plasma as default
|
||||||
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('hsv'))
|
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('plasma'))
|
||||||
|
|
||||||
self.top_row.addWidget(QtWidgets.QLabel('Select a phase: '))
|
self.top_row.addWidget(QtWidgets.QLabel('Select a phase: '))
|
||||||
self.top_row.addWidget(self.comboBox_phase)
|
self.top_row.addWidget(self.comboBox_phase)
|
||||||
@ -138,14 +148,15 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.top_row.addWidget(self.auto_refresh_box)
|
self.top_row.addWidget(self.auto_refresh_box)
|
||||||
self.top_row.addWidget(self.refresh_button)
|
self.top_row.addWidget(self.refresh_button)
|
||||||
|
|
||||||
self.main_box.addWidget(self.plotWidget, 1)
|
self.main_box.addWidget(self.plotWidget, 10)
|
||||||
|
|
||||||
self.bot_row = QtWidgets.QHBoxLayout()
|
self.bot_row = QtWidgets.QHBoxLayout()
|
||||||
self.main_box.addLayout(self.bot_row, 0.3)
|
self.main_box.addLayout(self.bot_row, 0)
|
||||||
self.bot_row.addWidget(QtWidgets.QLabel(''), 5)
|
self.bot_row.addWidget(QtWidgets.QLabel(''), 5)
|
||||||
self.bot_row.addWidget(self.map_reset_button, 2)
|
self.bot_row.addWidget(self.map_reset_button, 2)
|
||||||
self.bot_row.addWidget(self.go2eq_button, 2)
|
self.bot_row.addWidget(self.go2eq_button, 2)
|
||||||
self.bot_row.addWidget(self.save_map_button, 2)
|
self.bot_row.addWidget(self.save_map_button, 2)
|
||||||
|
self.bot_row.addWidget(self.subtract_mean_cb, 0)
|
||||||
self.bot_row.addWidget(self.status_label, 5)
|
self.bot_row.addWidget(self.status_label, 5)
|
||||||
|
|
||||||
def init_colormap(self):
|
def init_colormap(self):
|
||||||
@ -153,14 +164,12 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.init_lat_lon_grid()
|
self.init_lat_lon_grid()
|
||||||
|
|
||||||
def init_crtpyMap(self):
|
def init_crtpyMap(self):
|
||||||
self.canvas.axes.cla()
|
self.ax.add_feature(cf.LAND)
|
||||||
self.canvas.axes = plt.axes(projection=ccrs.PlateCarree())
|
self.ax.add_feature(cf.OCEAN)
|
||||||
self.canvas.axes.add_feature(cf.LAND)
|
self.ax.add_feature(cf.COASTLINE, linewidth=1, edgecolor='gray')
|
||||||
self.canvas.axes.add_feature(cf.OCEAN)
|
self.ax.add_feature(cf.BORDERS, alpha=0.7)
|
||||||
self.canvas.axes.add_feature(cf.COASTLINE, linewidth=1, edgecolor='gray')
|
self.ax.add_feature(cf.LAKES, alpha=0.7)
|
||||||
self.canvas.axes.add_feature(cf.BORDERS, alpha=0.7)
|
self.ax.add_feature(cf.RIVERS, linewidth=1)
|
||||||
self.canvas.axes.add_feature(cf.LAKES, alpha=0.7)
|
|
||||||
self.canvas.axes.add_feature(cf.RIVERS, linewidth=1)
|
|
||||||
|
|
||||||
# parallels and meridians
|
# parallels and meridians
|
||||||
self.add_merid_paral()
|
self.add_merid_paral()
|
||||||
@ -168,12 +177,8 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.canvas.fig.tight_layout()
|
self.canvas.fig.tight_layout()
|
||||||
|
|
||||||
def add_merid_paral(self):
|
def add_merid_paral(self):
|
||||||
self.gridlines = self.canvas.axes.gridlines(draw_labels=False, alpha=0.6, color='gray',
|
self.gridlines = self.ax.gridlines(draw_labels=False, alpha=0.6, color='gray',
|
||||||
linewidth=self.linewidth / 2, zorder=7)
|
linewidth=self.linewidth / 2, zorder=7, crs=ccrs.PlateCarree())
|
||||||
# TODO: current cartopy version does not support label removal. Devs are working on it.
|
|
||||||
# Should be fixed in coming cartopy versions
|
|
||||||
# self.gridlines.xformatter = LONGITUDE_FORMATTER
|
|
||||||
# self.gridlines.yformatter = LATITUDE_FORMATTER
|
|
||||||
|
|
||||||
def remove_merid_paral(self):
|
def remove_merid_paral(self):
|
||||||
if len(self.gridlines.xline_artists):
|
if len(self.gridlines.xline_artists):
|
||||||
@ -181,24 +186,24 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.gridlines.yline_artists[0].remove()
|
self.gridlines.yline_artists[0].remove()
|
||||||
|
|
||||||
def org_map_view(self):
|
def org_map_view(self):
|
||||||
self.canvas.axes.set_xlim(self.org_xlim[0], self.org_xlim[1])
|
self.ax.set_xlim(self.org_xlim[0], self.org_xlim[1])
|
||||||
self.canvas.axes.set_ylim(self.org_ylim[0], self.org_ylim[1])
|
self.ax.set_ylim(self.org_ylim[0], self.org_ylim[1])
|
||||||
# parallels and meridians
|
# parallels and meridians
|
||||||
self.remove_merid_paral()
|
#self.remove_merid_paral()
|
||||||
self.add_merid_paral()
|
#self.add_merid_paral()
|
||||||
|
|
||||||
self.canvas.axes.figure.canvas.draw_idle()
|
self.canvas.draw_idle()
|
||||||
|
|
||||||
def go2eq(self):
|
def go2eq(self):
|
||||||
if self.eventLoc:
|
if self.eventLoc:
|
||||||
lats, lons = self.eventLoc
|
lats, lons = self.eventLoc
|
||||||
self.canvas.axes.set_xlim(lons - 10, lons + 10)
|
self.ax.set_xlim(lons - 10, lons + 10)
|
||||||
self.canvas.axes.set_ylim(lats - 5, lats + 5)
|
self.ax.set_ylim(lats - 5, lats + 5)
|
||||||
# parallels and meridians
|
# parallels and meridians
|
||||||
self.remove_merid_paral()
|
#self.remove_merid_paral()
|
||||||
self.add_merid_paral()
|
#self.add_merid_paral()
|
||||||
|
|
||||||
self.canvas.axes.figure.canvas.draw_idle()
|
self.canvas.draw_idle()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.status_label.setText('No event information available')
|
self.status_label.setText('No event information available')
|
||||||
@ -212,6 +217,7 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self.map_reset_button.clicked.connect(self.org_map_view)
|
self.map_reset_button.clicked.connect(self.org_map_view)
|
||||||
self.go2eq_button.clicked.connect(self.go2eq)
|
self.go2eq_button.clicked.connect(self.go2eq)
|
||||||
self.save_map_button.clicked.connect(self.saveFigure)
|
self.save_map_button.clicked.connect(self.saveFigure)
|
||||||
|
self.subtract_mean_cb.stateChanged.connect(self.toggle_subtract_mean)
|
||||||
|
|
||||||
self.plotWidget.mpl_connect('motion_notify_event', self.mouse_moved)
|
self.plotWidget.mpl_connect('motion_notify_event', self.mouse_moved)
|
||||||
self.plotWidget.mpl_connect('scroll_event', self.mouse_scroll)
|
self.plotWidget.mpl_connect('scroll_event', self.mouse_scroll)
|
||||||
@ -220,21 +226,32 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
|
|
||||||
# set mouse events -----------------------------------------------------
|
# set mouse events -----------------------------------------------------
|
||||||
def mouse_moved(self, event):
|
def mouse_moved(self, event):
|
||||||
if not event.inaxes == self.canvas.axes:
|
if not event.inaxes == self.ax:
|
||||||
return
|
return
|
||||||
|
else:
|
||||||
|
cont, inds = self.sc.contains(event)
|
||||||
lat = event.ydata
|
lat = event.ydata
|
||||||
lon = event.xdata
|
lon = event.xdata
|
||||||
self.status_label.setText('Latitude: {:3.5f}, Longitude: {:3.5f}'.format(lat, lon))
|
text = f'Longitude: {lon:3.3f}, Latitude: {lat:3.3f}'
|
||||||
|
|
||||||
|
if cont:
|
||||||
|
indices = inds['ind']
|
||||||
|
text += ' | Station: ' if len(indices) == 1 else ' | Stations: '
|
||||||
|
text += ' - '.join([self._station_onpick_ids[index] for index in indices[:5]])
|
||||||
|
if len(indices) > 5:
|
||||||
|
text += '...'
|
||||||
|
|
||||||
|
self.status_label.setText(text)
|
||||||
|
|
||||||
def mouse_scroll(self, event):
|
def mouse_scroll(self, event):
|
||||||
if not event.inaxes == self.canvas.axes:
|
if not event.inaxes == self.ax:
|
||||||
return
|
return
|
||||||
|
|
||||||
zoom = {'up': 1. / 2., 'down': 2.}
|
zoom = {'up': 1. / 2., 'down': 2.}
|
||||||
|
|
||||||
if event.button in zoom:
|
if event.button in zoom:
|
||||||
xlim = self.canvas.axes.get_xlim()
|
xlim = self.ax.get_xlim()
|
||||||
ylim = self.canvas.axes.get_ylim()
|
ylim = self.ax.get_ylim()
|
||||||
|
|
||||||
x, y = event.xdata, event.ydata
|
x, y = event.xdata, event.ydata
|
||||||
|
|
||||||
@ -246,24 +263,24 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
yb = y - 0.5 * ydiff
|
yb = y - 0.5 * ydiff
|
||||||
yt = y + 0.5 * ydiff
|
yt = y + 0.5 * ydiff
|
||||||
|
|
||||||
self.canvas.axes.set_xlim(xl, xr)
|
self.ax.set_xlim(xl, xr)
|
||||||
self.canvas.axes.set_ylim(yb, yt)
|
self.ax.set_ylim(yb, yt)
|
||||||
# parallels and meridians
|
# parallels and meridians
|
||||||
self.remove_merid_paral()
|
#self.remove_merid_paral()
|
||||||
self.add_merid_paral()
|
#self.add_merid_paral()
|
||||||
|
|
||||||
self.canvas.axes.figure.canvas.draw_idle()
|
self.ax.figure.canvas.draw_idle()
|
||||||
|
|
||||||
def mouseLeftPress(self, event):
|
def mouseLeftPress(self, event):
|
||||||
if not event.inaxes == self.canvas.axes:
|
if not event.inaxes == self.ax:
|
||||||
return
|
return
|
||||||
self.map_x = event.xdata
|
self.map_x = event.xdata
|
||||||
self.map_y = event.ydata
|
self.map_y = event.ydata
|
||||||
self.map_xlim = self.canvas.axes.get_xlim()
|
self.map_xlim = self.ax.get_xlim()
|
||||||
self.map_ylim = self.canvas.axes.get_ylim()
|
self.map_ylim = self.ax.get_ylim()
|
||||||
|
|
||||||
def mouseLeftRelease(self, event):
|
def mouseLeftRelease(self, event):
|
||||||
if not event.inaxes == self.canvas.axes:
|
if not event.inaxes == self.ax:
|
||||||
return
|
return
|
||||||
new_x = event.xdata
|
new_x = event.xdata
|
||||||
new_y = event.ydata
|
new_y = event.ydata
|
||||||
@ -271,13 +288,13 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
dx = new_x - self.map_x
|
dx = new_x - self.map_x
|
||||||
dy = new_y - self.map_y
|
dy = new_y - self.map_y
|
||||||
|
|
||||||
self.canvas.axes.set_xlim((self.map_xlim[0] - dx, self.map_xlim[1] - dx))
|
self.ax.set_xlim((self.map_xlim[0] - dx, self.map_xlim[1] - dx))
|
||||||
self.canvas.axes.set_ylim(self.map_ylim[0] - dy, self.map_ylim[1] - dy)
|
self.ax.set_ylim(self.map_ylim[0] - dy, self.map_ylim[1] - dy)
|
||||||
# parallels and meridians
|
# parallels and meridians
|
||||||
self.remove_merid_paral()
|
#self.remove_merid_paral()
|
||||||
self.add_merid_paral()
|
#self.add_merid_paral()
|
||||||
|
|
||||||
self.canvas.axes.figure.canvas.draw_idle()
|
self.ax.figure.canvas.draw_idle()
|
||||||
|
|
||||||
def onpick(self, event):
|
def onpick(self, event):
|
||||||
btn_msg = {1: ' in selection. Aborted', 2: ' to delete a pick on. Aborted', 3: ' to display info.'}
|
btn_msg = {1: ' in selection. Aborted', 2: ' to delete a pick on. Aborted', 3: ' to display info.'}
|
||||||
@ -357,12 +374,6 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
def get_max_from_stations(self, key):
|
def get_max_from_stations(self, key):
|
||||||
return self._from_dict(max, key)
|
return self._from_dict(max, key)
|
||||||
|
|
||||||
def get_min_from_picks(self):
|
|
||||||
return min(self.picks_rel.values())
|
|
||||||
|
|
||||||
def get_max_from_picks(self):
|
|
||||||
return max(self.picks_rel.values())
|
|
||||||
|
|
||||||
def current_picks_dict(self):
|
def current_picks_dict(self):
|
||||||
picktype = self.comboBox_am.currentText().split(' ')[0]
|
picktype = self.comboBox_am.currentText().split(' ')[0]
|
||||||
auto_manu = {'auto': self.autopicks_dict,
|
auto_manu = {'auto': self.autopicks_dict,
|
||||||
@ -407,22 +418,34 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
print('Cannot display pick for station {}. Reason: {}'.format(station_name, e))
|
print('Cannot display pick for station {}. Reason: {}'.format(station_name, e))
|
||||||
return picks, uncertainties
|
return picks, uncertainties
|
||||||
|
|
||||||
def get_picks_rel(picks):
|
def get_picks_rel(picks, func=min):
|
||||||
picks_rel = {}
|
picks_rel = {}
|
||||||
picks_utc = []
|
picks_utc = []
|
||||||
for pick in picks.values():
|
for pick in picks.values():
|
||||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
if type(pick) is UTCDateTime:
|
||||||
picks_utc.append(pick)
|
picks_utc.append(pick.timestamp)
|
||||||
if picks_utc:
|
if picks_utc:
|
||||||
self._earliest_picktime = min(picks_utc)
|
self._reference_picktime = UTCDateTime(func(picks_utc))
|
||||||
for st_id, pick in picks.items():
|
for st_id, pick in picks.items():
|
||||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
if type(pick) is UTCDateTime:
|
||||||
pick -= self._earliest_picktime
|
pick -= self._reference_picktime
|
||||||
picks_rel[st_id] = pick
|
picks_rel[st_id] = pick
|
||||||
return picks_rel
|
return picks_rel
|
||||||
|
|
||||||
|
def get_picks_rel_mean_corr(picks):
|
||||||
|
return get_picks_rel(picks, func=np.nanmean)
|
||||||
|
|
||||||
self.picks, self.uncertainties = get_picks(self.stations_dict)
|
self.picks, self.uncertainties = get_picks(self.stations_dict)
|
||||||
self.picks_rel = get_picks_rel(self.picks)
|
self.picks_rel = get_picks_rel(self.picks)
|
||||||
|
self.picks_rel_mean_corrected = get_picks_rel_mean_corr(self.picks)
|
||||||
|
|
||||||
|
def toggle_subtract_mean(self):
|
||||||
|
if self.subtract_mean_cb.isChecked():
|
||||||
|
cmap = 'seismic'
|
||||||
|
else:
|
||||||
|
cmap = 'viridis'
|
||||||
|
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText(cmap))
|
||||||
|
self._refresh_drawings()
|
||||||
|
|
||||||
def init_lat_lon_dimensions(self):
|
def init_lat_lon_dimensions(self):
|
||||||
# init minimum and maximum lon and lat dimensions
|
# init minimum and maximum lon and lat dimensions
|
||||||
@ -453,11 +476,12 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
return stations, latitudes, longitudes
|
return stations, latitudes, longitudes
|
||||||
|
|
||||||
def get_picks_lat_lon(self):
|
def get_picks_lat_lon(self):
|
||||||
|
picks_rel = self.picks_rel_mean_corrected if self.subtract_mean_cb.isChecked() else self.picks_rel
|
||||||
picks = []
|
picks = []
|
||||||
uncertainties = []
|
uncertainties = []
|
||||||
latitudes = []
|
latitudes = []
|
||||||
longitudes = []
|
longitudes = []
|
||||||
for st_id, pick in self.picks_rel.items():
|
for st_id, pick in picks_rel.items():
|
||||||
picks.append(pick)
|
picks.append(pick)
|
||||||
uncertainties.append(self.uncertainties.get(st_id))
|
uncertainties.append(self.uncertainties.get(st_id))
|
||||||
latitudes.append(self.stations_dict[st_id]['latitude'])
|
latitudes.append(self.stations_dict[st_id]['latitude'])
|
||||||
@ -469,26 +493,34 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
stat_dict = self.stations_dict['{}.{}'.format(network, station)]
|
stat_dict = self.stations_dict['{}.{}'.format(network, station)]
|
||||||
lat = stat_dict['latitude']
|
lat = stat_dict['latitude']
|
||||||
lon = stat_dict['longitude']
|
lon = stat_dict['longitude']
|
||||||
self.highlighted_stations.append(self.canvas.axes.scatter(lon, lat, s=self.pointsize, edgecolors=color,
|
self.highlighted_stations.append(self.ax.scatter(lon, lat, s=self.pointsize, edgecolors=color,
|
||||||
facecolors='none', zorder=12,
|
facecolors='none', zorder=12,
|
||||||
transform=ccrs.PlateCarree(), label='deleted'))
|
transform=ccrs.PlateCarree(), label='deleted'))
|
||||||
|
|
||||||
def openPickDlg(self, ind):
|
def openPickDlg(self, ind):
|
||||||
data = self._parent.get_data().getWFData()
|
try:
|
||||||
|
wfdata = self._parent.get_data().getWFData()
|
||||||
|
except AttributeError:
|
||||||
|
QtWidgets.QMessageBox.warning(
|
||||||
|
self, "PyLoT Warning",
|
||||||
|
"No waveform data found. Check if they were already loaded in Waveform plot tab."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
wfdata_comp = self._parent.get_data().getAltWFdata()
|
||||||
for index in ind:
|
for index in ind:
|
||||||
network, station = self._station_onpick_ids[index].split('.')[:2]
|
network, station = self._station_onpick_ids[index].split('.')[:2]
|
||||||
pyl_mw = self._parent
|
pyl_mw = self._parent
|
||||||
try:
|
try:
|
||||||
data = data.select(station=station)
|
wfdata = wfdata.select(station=station)
|
||||||
if not data:
|
wfdata_comp = wfdata_comp.select(station=station)
|
||||||
|
if not wfdata:
|
||||||
self._warn('No data for station {}'.format(station))
|
self._warn('No data for station {}'.format(station))
|
||||||
return
|
return
|
||||||
pickDlg = PickDlg(self._parent, parameter=self.parameter,
|
pickDlg = PickDlg(self._parent, parameter=self.parameter,
|
||||||
data=data, network=network, station=station,
|
data=wfdata.copy(), data_compare=wfdata_comp.copy(), network=network, station=station,
|
||||||
picks=self._parent.get_current_event().getPick(station),
|
picks=self._parent.get_current_event().getPick(station),
|
||||||
autopicks=self._parent.get_current_event().getAutopick(station),
|
autopicks=self._parent.get_current_event().getAutopick(station),
|
||||||
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
||||||
model=self.parameter.get('taup_model'),
|
|
||||||
event=pyl_mw.get_current_event())
|
event=pyl_mw.get_current_event())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
||||||
@ -516,20 +548,27 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
print(message, e)
|
print(message, e)
|
||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
|
|
||||||
def draw_contour_filled(self, nlevel=50):
|
def draw_contour_filled(self, nlevel=51):
|
||||||
levels = np.linspace(self.get_min_from_picks(), self.get_max_from_picks(), nlevel)
|
if self.subtract_mean_cb.isChecked():
|
||||||
|
abs_max = self.get_residuals_absmax()
|
||||||
|
levels = np.linspace(-abs_max, abs_max, nlevel)
|
||||||
|
else:
|
||||||
|
levels = np.linspace(min(self.picks_rel.values()), max(self.picks_rel.values()), nlevel)
|
||||||
|
|
||||||
self.contourf = self.canvas.axes.contourf(self.longrid, self.latgrid, self.picksgrid_active, levels,
|
self.contourf = self.ax.contourf(self.longrid, self.latgrid, self.picksgrid_active, levels,
|
||||||
linewidths=self.linewidth * 5, transform=ccrs.PlateCarree(),
|
linewidths=self.linewidth * 5, transform=ccrs.PlateCarree(),
|
||||||
alpha=0.4, zorder=8, cmap=self.get_colormap())
|
alpha=0.4, zorder=8, cmap=self.get_colormap())
|
||||||
|
|
||||||
|
def get_residuals_absmax(self):
|
||||||
|
return np.max(np.absolute(list(self.picks_rel_mean_corrected.values())))
|
||||||
|
|
||||||
def get_colormap(self):
|
def get_colormap(self):
|
||||||
return plt.get_cmap(self.cmaps_box.currentText())
|
return plt.get_cmap(self.cmaps_box.currentText())
|
||||||
|
|
||||||
def scatter_all_stations(self):
|
def scatter_all_stations(self):
|
||||||
stations, lats, lons = self.get_st_lat_lon_for_plot()
|
stations, lats, lons = self.get_st_lat_lon_for_plot()
|
||||||
|
|
||||||
self.sc = self.canvas.axes.scatter(lons, lats, s=self.pointsize * 3, facecolor='none', marker='.',
|
self.sc = self.ax.scatter(lons, lats, s=self.pointsize * 3, facecolor='none', marker='.',
|
||||||
zorder=10, picker=True, edgecolor='0.5', label='Not Picked',
|
zorder=10, picker=True, edgecolor='0.5', label='Not Picked',
|
||||||
transform=ccrs.PlateCarree())
|
transform=ccrs.PlateCarree())
|
||||||
|
|
||||||
@ -537,7 +576,7 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
self._station_onpick_ids = stations
|
self._station_onpick_ids = stations
|
||||||
if self.eventLoc:
|
if self.eventLoc:
|
||||||
lats, lons = self.eventLoc
|
lats, lons = self.eventLoc
|
||||||
self.sc_event = self.canvas.axes.scatter(lons, lats, s=5 * self.pointsize, facecolor='red', zorder=11,
|
self.sc_event = self.ax.scatter(lons, lats, s=5 * self.pointsize, facecolor='red', zorder=11,
|
||||||
label='Event (might be outside map region)', marker='*',
|
label='Event (might be outside map region)', marker='*',
|
||||||
edgecolors='black',
|
edgecolors='black',
|
||||||
transform=ccrs.PlateCarree())
|
transform=ccrs.PlateCarree())
|
||||||
@ -553,7 +592,12 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
for uncertainty in uncertainties])
|
for uncertainty in uncertainties])
|
||||||
|
|
||||||
cmap = self.get_colormap()
|
cmap = self.get_colormap()
|
||||||
self.sc_picked = self.canvas.axes.scatter(lons, lats, s=sizes, edgecolors='white', cmap=cmap,
|
|
||||||
|
vmin = vmax = None
|
||||||
|
if self.subtract_mean_cb.isChecked():
|
||||||
|
vmin, vmax = -self.get_residuals_absmax(), self.get_residuals_absmax()
|
||||||
|
|
||||||
|
self.sc_picked = self.ax.scatter(lons, lats, s=sizes, edgecolors='white', cmap=cmap, vmin=vmin, vmax=vmax,
|
||||||
c=picks, zorder=11, label='Picked', transform=ccrs.PlateCarree())
|
c=picks, zorder=11, label='Picked', transform=ccrs.PlateCarree())
|
||||||
|
|
||||||
def annotate_ax(self):
|
def annotate_ax(self):
|
||||||
@ -572,20 +616,20 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
if st in self.marked_stations:
|
if st in self.marked_stations:
|
||||||
color = 'red'
|
color = 'red'
|
||||||
self.annotations.append(
|
self.annotations.append(
|
||||||
self.canvas.axes.annotate(' %s' % st, xy=(x + 0.003, y + 0.003), fontsize=self.pointsize / 4.,
|
self.ax.annotate(f'{st}', xy=(x + 0.003, y + 0.003), fontsize=self.pointsize / 4.,
|
||||||
fontweight='semibold', color=color, alpha=0.8,
|
fontweight='semibold', color=color, alpha=0.8,
|
||||||
transform=ccrs.PlateCarree(), zorder=14,
|
transform=ccrs.PlateCarree(), zorder=14,
|
||||||
path_effects=[PathEffects.withStroke(
|
path_effects=[PathEffects.withStroke(
|
||||||
linewidth=self.pointsize / 15., foreground='k')]))
|
linewidth=self.pointsize / 15., foreground='k')]))
|
||||||
|
|
||||||
self.legend = self.canvas.axes.legend(loc=1, framealpha=1)
|
self.legend = self.ax.legend(loc=1, framealpha=1)
|
||||||
self.legend.set_zorder(100)
|
self.legend.set_zorder(100)
|
||||||
self.legend.get_frame().set_facecolor((1, 1, 1, 0.95))
|
self.legend.get_frame().set_facecolor((1, 1, 1, 0.95))
|
||||||
|
|
||||||
def add_cbar(self, label):
|
def add_cbar(self, label):
|
||||||
self.cbax_bg = inset_axes(self.canvas.axes, width="6%", height="75%", loc=5)
|
self.cbax_bg = inset_axes(self.ax, width="6%", height="75%", loc=5)
|
||||||
cbax = inset_axes(self.canvas.axes, width='2%', height='70%', loc=5)
|
cbax = inset_axes(self.ax, width='2%', height='70%', loc=5)
|
||||||
cbar = self.canvas.axes.figure.colorbar(self.sc_picked, cax=cbax)
|
cbar = self.ax.figure.colorbar(self.sc_picked, cax=cbax)
|
||||||
cbar.set_label(label)
|
cbar.set_label(label)
|
||||||
cbax.yaxis.tick_left()
|
cbax.yaxis.tick_left()
|
||||||
cbax.yaxis.set_label_position('left')
|
cbax.yaxis.set_label_position('left')
|
||||||
@ -630,7 +674,9 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
if picks_available:
|
if picks_available:
|
||||||
self.scatter_picked_stations()
|
self.scatter_picked_stations()
|
||||||
if hasattr(self, 'sc_picked'):
|
if hasattr(self, 'sc_picked'):
|
||||||
self.cbar = self.add_cbar(label='Time relative to first onset ({}) [s]'.format(self._earliest_picktime))
|
self.cbar = self.add_cbar(
|
||||||
|
label='Time relative to reference onset ({}) [s]'.format(self._reference_picktime)
|
||||||
|
)
|
||||||
self.comboBox_phase.setEnabled(True)
|
self.comboBox_phase.setEnabled(True)
|
||||||
else:
|
else:
|
||||||
self.comboBox_phase.setEnabled(False)
|
self.comboBox_phase.setEnabled(False)
|
||||||
|
@ -27,6 +27,10 @@ class Metadata(object):
|
|||||||
# saves which metadata files are from obspy dmt
|
# saves which metadata files are from obspy dmt
|
||||||
self.obspy_dmt_invs = []
|
self.obspy_dmt_invs = []
|
||||||
if inventory:
|
if inventory:
|
||||||
|
# make sure that no accidental backslashes mess up the path
|
||||||
|
if isinstance(inventory, str):
|
||||||
|
inventory = inventory.replace('\\', '/')
|
||||||
|
inventory = os.path.abspath(inventory)
|
||||||
if os.path.isdir(inventory):
|
if os.path.isdir(inventory):
|
||||||
self.add_inventory(inventory)
|
self.add_inventory(inventory)
|
||||||
if os.path.isfile(inventory):
|
if os.path.isfile(inventory):
|
||||||
@ -55,6 +59,8 @@ class Metadata(object):
|
|||||||
:type path_to_inventory: str
|
:type path_to_inventory: str
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
|
path_to_inventory = path_to_inventory.replace('\\', '/')
|
||||||
|
path_to_inventory = os.path.abspath(path_to_inventory)
|
||||||
assert (os.path.isdir(path_to_inventory)), '{} is no directory'.format(path_to_inventory)
|
assert (os.path.isdir(path_to_inventory)), '{} is no directory'.format(path_to_inventory)
|
||||||
if path_to_inventory not in self.inventories:
|
if path_to_inventory not in self.inventories:
|
||||||
self.inventories.append(path_to_inventory)
|
self.inventories.append(path_to_inventory)
|
||||||
@ -262,9 +268,6 @@ class Metadata(object):
|
|||||||
if not fnames:
|
if not fnames:
|
||||||
# search for station name in filename
|
# search for station name in filename
|
||||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + station + '*'))
|
fnames = glob.glob(os.path.join(path_to_inventory, '*' + station + '*'))
|
||||||
if not fnames:
|
|
||||||
# search for network name in filename
|
|
||||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + network + '*'))
|
|
||||||
if not fnames:
|
if not fnames:
|
||||||
if self.verbosity:
|
if self.verbosity:
|
||||||
print('Could not find filenames matching station name, network name or seed id')
|
print('Could not find filenames matching station name, network name or seed id')
|
||||||
@ -276,7 +279,7 @@ class Metadata(object):
|
|||||||
continue
|
continue
|
||||||
invtype, robj = self._read_metadata_file(os.path.join(path_to_inventory, fname))
|
invtype, robj = self._read_metadata_file(os.path.join(path_to_inventory, fname))
|
||||||
try:
|
try:
|
||||||
# robj.get_coordinates(station_seed_id) # TODO: Commented out, failed with Parser, is this needed?
|
robj.get_coordinates(station_seed_id)
|
||||||
self.inventory_files[fname] = {'invtype': invtype,
|
self.inventory_files[fname] = {'invtype': invtype,
|
||||||
'data': robj}
|
'data': robj}
|
||||||
if station_seed_id in self.seed_ids.keys():
|
if station_seed_id in self.seed_ids.keys():
|
||||||
@ -284,6 +287,7 @@ class Metadata(object):
|
|||||||
self.seed_ids[station_seed_id] = fname
|
self.seed_ids[station_seed_id] = fname
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
logging.warning(e)
|
||||||
continue
|
continue
|
||||||
print('Could not find metadata for station_seed_id {} in path {}'.format(station_seed_id, path_to_inventory))
|
print('Could not find metadata for station_seed_id {} in path {}'.format(station_seed_id, path_to_inventory))
|
||||||
|
|
||||||
@ -648,6 +652,8 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# data = remove_underscores(data)
|
# data = remove_underscores(data)
|
||||||
|
if not data:
|
||||||
|
return
|
||||||
|
|
||||||
# loop over traces
|
# loop over traces
|
||||||
input_tuples = []
|
input_tuples = []
|
||||||
@ -655,6 +661,11 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
|||||||
input_tuples.append((tr, metadata, unit, force))
|
input_tuples.append((tr, metadata, unit, force))
|
||||||
data.remove(tr)
|
data.remove(tr)
|
||||||
|
|
||||||
|
if ncores == 0:
|
||||||
|
result = []
|
||||||
|
for input_tuple in input_tuples:
|
||||||
|
result.append(restitute_trace(input_tuple))
|
||||||
|
else:
|
||||||
pool = gen_Pool(ncores)
|
pool = gen_Pool(ncores)
|
||||||
result = pool.imap_unordered(restitute_trace, input_tuples)
|
result = pool.imap_unordered(restitute_trace, input_tuples)
|
||||||
pool.close()
|
pool.close()
|
||||||
|
@ -22,14 +22,11 @@ class Event(ObsPyEvent):
|
|||||||
:param path: path to event directory
|
:param path: path to event directory
|
||||||
:type path: str
|
:type path: str
|
||||||
"""
|
"""
|
||||||
# TODO: remove rootpath and database
|
|
||||||
self.pylot_id = path.split('/')[-1]
|
self.pylot_id = path.split('/')[-1]
|
||||||
# initialize super class
|
# initialize super class
|
||||||
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/' + self.pylot_id))
|
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/' + self.pylot_id))
|
||||||
self.path = path
|
self.path = path
|
||||||
self.database = path.split('/')[-2]
|
|
||||||
self.datapath = os.path.split(path)[0] # path.split('/')[-3]
|
self.datapath = os.path.split(path)[0] # path.split('/')[-3]
|
||||||
self.rootpath = '/' + os.path.join(*path.split('/')[:-3])
|
|
||||||
self.pylot_autopicks = {}
|
self.pylot_autopicks = {}
|
||||||
self.pylot_picks = {}
|
self.pylot_picks = {}
|
||||||
self.notes = ''
|
self.notes = ''
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
@ -25,14 +26,14 @@ def pick_linestyle_pg(picktype, key):
|
|||||||
:return: Qt line style parameters
|
:return: Qt line style parameters
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
linestyles_manu = {'mpp': (QtCore.Qt.SolidLine, 2.),
|
linestyles_manu = {'mpp': (QtCore.Qt.SolidLine, 2),
|
||||||
'epp': (QtCore.Qt.DashLine, 1.),
|
'epp': (QtCore.Qt.DashLine, 1),
|
||||||
'lpp': (QtCore.Qt.DashLine, 1.),
|
'lpp': (QtCore.Qt.DashLine, 1),
|
||||||
'spe': (QtCore.Qt.DashLine, 1.)}
|
'spe': (QtCore.Qt.DashLine, 1)}
|
||||||
linestyles_auto = {'mpp': (QtCore.Qt.DotLine, 2.),
|
linestyles_auto = {'mpp': (QtCore.Qt.DotLine, 2),
|
||||||
'epp': (QtCore.Qt.DashDotLine, 1.),
|
'epp': (QtCore.Qt.DashDotLine, 1),
|
||||||
'lpp': (QtCore.Qt.DashDotLine, 1.),
|
'lpp': (QtCore.Qt.DashDotLine, 1),
|
||||||
'spe': (QtCore.Qt.DashDotLine, 1.)}
|
'spe': (QtCore.Qt.DashDotLine, 1)}
|
||||||
linestyles = {'manual': linestyles_manu,
|
linestyles = {'manual': linestyles_manu,
|
||||||
'auto': linestyles_auto}
|
'auto': linestyles_auto}
|
||||||
return linestyles[picktype][key]
|
return linestyles[picktype][key]
|
||||||
@ -80,6 +81,7 @@ def which(program, parameter):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=128)
|
||||||
def make_pen(picktype, phase, key, quality):
|
def make_pen(picktype, phase, key, quality):
|
||||||
"""
|
"""
|
||||||
Make PyQtGraph.QPen
|
Make PyQtGraph.QPen
|
||||||
|
@ -8,6 +8,7 @@ import platform
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import warnings
|
import warnings
|
||||||
|
from typing import Literal, Tuple, Type
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -20,6 +21,10 @@ from pylot.core.io.inputs import PylotParameter, FilterOptions
|
|||||||
from pylot.core.util.obspyDMT_interface import check_obspydmt_eventfolder
|
from pylot.core.util.obspyDMT_interface import check_obspydmt_eventfolder
|
||||||
from pylot.styles import style_settings
|
from pylot.styles import style_settings
|
||||||
|
|
||||||
|
Rgba: Type[tuple] = Tuple[int, int, int, int]
|
||||||
|
Mplrgba: Type[tuple] = Tuple[float, float, float, float]
|
||||||
|
Mplrgbastr: Type[tuple] = Tuple[str, str, str, str]
|
||||||
|
|
||||||
|
|
||||||
def _pickle_method(m):
|
def _pickle_method(m):
|
||||||
if m.im_self is None:
|
if m.im_self is None:
|
||||||
@ -46,7 +51,6 @@ def readDefaultFilterInformation():
|
|||||||
:rtype: dict
|
:rtype: dict
|
||||||
"""
|
"""
|
||||||
pparam = PylotParameter()
|
pparam = PylotParameter()
|
||||||
pparam.reset_defaults()
|
|
||||||
return readFilterInformation(pparam)
|
return readFilterInformation(pparam)
|
||||||
|
|
||||||
|
|
||||||
@ -83,25 +87,6 @@ def fit_curve(x, y):
|
|||||||
return splev, splrep(x, y)
|
return splev, splrep(x, y)
|
||||||
|
|
||||||
|
|
||||||
def getindexbounds(f, eta):
|
|
||||||
"""
|
|
||||||
Get indices of values closest below and above maximum value in an array
|
|
||||||
:param f: array
|
|
||||||
:type f: `~numpy.ndarray`
|
|
||||||
:param eta: look for value in array that is closes to max_value * eta
|
|
||||||
:type eta: float
|
|
||||||
:return: tuple containing index of max value, index of value closest below max value,
|
|
||||||
index of value closest above max value
|
|
||||||
:rtype: (int, int, int)
|
|
||||||
"""
|
|
||||||
mi = f.argmax() # get indices of max values
|
|
||||||
m = max(f) # get maximum value
|
|
||||||
b = m * eta #
|
|
||||||
l = find_nearest(f[:mi], b) # find closest value below max value
|
|
||||||
u = find_nearest(f[mi:], b) + mi # find closest value above max value
|
|
||||||
return mi, l, u
|
|
||||||
|
|
||||||
|
|
||||||
def gen_Pool(ncores=0):
|
def gen_Pool(ncores=0):
|
||||||
"""
|
"""
|
||||||
Generate mulitprocessing pool object utilizing ncores amount of cores
|
Generate mulitprocessing pool object utilizing ncores amount of cores
|
||||||
@ -167,11 +152,11 @@ def clims(lim1, lim2):
|
|||||||
"""
|
"""
|
||||||
takes two pairs of limits and returns one pair of common limts
|
takes two pairs of limits and returns one pair of common limts
|
||||||
:param lim1: limit 1
|
:param lim1: limit 1
|
||||||
:type lim1: int
|
:type lim1: List[int]
|
||||||
:param lim2: limit 2
|
:param lim2: limit 2
|
||||||
:type lim2: int
|
:type lim2: List[int]
|
||||||
:return: new upper and lower limit common to both given limits
|
:return: new upper and lower limit common to both given limits
|
||||||
:rtype: [int, int]
|
:rtype: List[int]
|
||||||
|
|
||||||
>>> clims([0, 4], [1, 3])
|
>>> clims([0, 4], [1, 3])
|
||||||
[0, 4]
|
[0, 4]
|
||||||
@ -303,7 +288,7 @@ def fnConstructor(s):
|
|||||||
if type(s) is str:
|
if type(s) is str:
|
||||||
s = s.split(':')[-1]
|
s = s.split(':')[-1]
|
||||||
else:
|
else:
|
||||||
s = getHash(UTCDateTime())
|
s = get_hash(UTCDateTime())
|
||||||
|
|
||||||
badchars = re.compile(r'[^A-Za-z0-9_. ]+|^\.|\.$|^ | $|^$')
|
badchars = re.compile(r'[^A-Za-z0-9_. ]+|^\.|\.$|^ | $|^$')
|
||||||
badsuffix = re.compile(r'(aux|com[1-9]|con|lpt[1-9]|prn)(\.|$)')
|
badsuffix = re.compile(r'(aux|com[1-9]|con|lpt[1-9]|prn)(\.|$)')
|
||||||
@ -315,15 +300,32 @@ def fnConstructor(s):
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def get_None(value):
|
def get_none(value):
|
||||||
"""
|
"""
|
||||||
Convert "None" to None
|
Convert "None" to None
|
||||||
:param value:
|
:param value:
|
||||||
:type value: str, bool
|
:type value: str, NoneType
|
||||||
:return:
|
:return:
|
||||||
:rtype: bool
|
:rtype: type(value) or NoneType
|
||||||
|
|
||||||
|
>>> st = read()
|
||||||
|
>>> print(get_none(st))
|
||||||
|
3 Trace(s) in Stream:
|
||||||
|
BW.RJOB..EHZ | 2009-08-24T00:20:03.000000Z - 2009-08-24T00:20:32.990000Z | 100.0 Hz, 3000 samples
|
||||||
|
BW.RJOB..EHN | 2009-08-24T00:20:03.000000Z - 2009-08-24T00:20:32.990000Z | 100.0 Hz, 3000 samples
|
||||||
|
BW.RJOB..EHE | 2009-08-24T00:20:03.000000Z - 2009-08-24T00:20:32.990000Z | 100.0 Hz, 3000 samples
|
||||||
|
>>> get_none('Stream')
|
||||||
|
'Stream'
|
||||||
|
>>> get_none(0)
|
||||||
|
0
|
||||||
|
>>> get_none(0.)
|
||||||
|
0.0
|
||||||
|
>>> print(get_none('None'))
|
||||||
|
None
|
||||||
|
>>> print(get_none(None))
|
||||||
|
None
|
||||||
"""
|
"""
|
||||||
if value == 'None':
|
if value is None or (type(value) is str and value == 'None'):
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
@ -331,11 +333,30 @@ def get_None(value):
|
|||||||
|
|
||||||
def get_bool(value):
|
def get_bool(value):
|
||||||
"""
|
"""
|
||||||
Convert string representations of bools to their true boolean value
|
Convert string representations of bools to their true boolean value. Return value if it cannot be identified as bool.
|
||||||
:param value:
|
:param value:
|
||||||
:type value: str, bool
|
:type value: str, bool, int, float
|
||||||
:return: true boolean value
|
:return: true boolean value
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
|
|
||||||
|
>>> get_bool(True)
|
||||||
|
True
|
||||||
|
>>> get_bool(False)
|
||||||
|
False
|
||||||
|
>>> get_bool(0)
|
||||||
|
False
|
||||||
|
>>> get_bool(0.)
|
||||||
|
False
|
||||||
|
>>> get_bool(0.1)
|
||||||
|
True
|
||||||
|
>>> get_bool(2)
|
||||||
|
True
|
||||||
|
>>> get_bool(-1)
|
||||||
|
False
|
||||||
|
>>> get_bool(-0.3)
|
||||||
|
False
|
||||||
|
>>> get_bool(None)
|
||||||
|
None
|
||||||
"""
|
"""
|
||||||
if type(value) is bool:
|
if type(value) is bool:
|
||||||
return value
|
return value
|
||||||
@ -343,8 +364,14 @@ def get_bool(value):
|
|||||||
return True
|
return True
|
||||||
elif value in ['False', 'false']:
|
elif value in ['False', 'false']:
|
||||||
return False
|
return False
|
||||||
|
elif isinstance(value, float) or isinstance(value, int):
|
||||||
|
if value > 0. or value > 0:
|
||||||
|
return True
|
||||||
else:
|
else:
|
||||||
return bool(value)
|
return False
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
def four_digits(year):
|
def four_digits(year):
|
||||||
"""
|
"""
|
||||||
@ -355,8 +382,8 @@ def four_digits(year):
|
|||||||
:return: four digit year correspondent
|
:return: four digit year correspondent
|
||||||
:rtype: int
|
:rtype: int
|
||||||
|
|
||||||
>>> four_digits(20)
|
>>> four_digits(75)
|
||||||
1920
|
1975
|
||||||
>>> four_digits(16)
|
>>> four_digits(16)
|
||||||
2016
|
2016
|
||||||
>>> four_digits(00)
|
>>> four_digits(00)
|
||||||
@ -438,36 +465,53 @@ def backtransformFilterString(st):
|
|||||||
return st
|
return st
|
||||||
|
|
||||||
|
|
||||||
def getHash(time):
|
def get_hash(time):
|
||||||
"""
|
"""
|
||||||
takes a time object and returns the corresponding SHA1 hash of the formatted date string
|
takes a time object and returns the corresponding SHA1 hash of the formatted date string
|
||||||
:param time: time object for which a hash should be calculated
|
:param time: time object for which a hash should be calculated
|
||||||
:type time: `~obspy.core.utcdatetime.UTCDateTime`
|
:type time: `~obspy.core.utcdatetime.UTCDateTime`
|
||||||
:return: SHA1 hash
|
:return: SHA1 hash
|
||||||
:rtype: str
|
:rtype: str
|
||||||
|
|
||||||
|
>>> time = UTCDateTime(0)
|
||||||
|
>>> get_hash(time)
|
||||||
|
'7627cce3b1b58dd21b005dac008b34d18317dd15'
|
||||||
|
>>> get_hash(0)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
AssertionError: 'time' is not an ObsPy UTCDateTime object
|
||||||
"""
|
"""
|
||||||
|
assert isinstance(time, UTCDateTime), '\'time\' is not an ObsPy UTCDateTime object'
|
||||||
hg = hashlib.sha1()
|
hg = hashlib.sha1()
|
||||||
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f'))
|
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f').encode('utf-8'))
|
||||||
return hg.hexdigest()
|
return hg.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def getLogin():
|
def get_login():
|
||||||
"""
|
"""
|
||||||
returns the actual user's login ID
|
returns the actual user's name
|
||||||
:return: login ID
|
:return: login name
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
import getpass
|
import getpass
|
||||||
return getpass.getuser()
|
return getpass.getuser()
|
||||||
|
|
||||||
|
|
||||||
def getOwner(fn):
|
def get_owner(fn):
|
||||||
"""
|
"""
|
||||||
takes a filename and return the login ID of the actual owner of the file
|
takes a filename and return the login ID of the actual owner of the file
|
||||||
:param fn: filename of the file tested
|
:param fn: filename of the file tested
|
||||||
:type fn: str
|
:type fn: str
|
||||||
:return: login ID of the file's owner
|
:return: login ID of the file's owner
|
||||||
:rtype: str
|
:rtype: str
|
||||||
|
|
||||||
|
>>> import tempfile
|
||||||
|
>>> with tempfile.NamedTemporaryFile() as tmpfile:
|
||||||
|
... tmpfile.write(b'') and True
|
||||||
|
... tmpfile.flush()
|
||||||
|
... get_owner(tmpfile.name) == os.path.expanduser('~').split('/')[-1]
|
||||||
|
0
|
||||||
|
True
|
||||||
"""
|
"""
|
||||||
system_name = platform.system()
|
system_name = platform.system()
|
||||||
if system_name in ["Linux", "Darwin"]:
|
if system_name in ["Linux", "Darwin"]:
|
||||||
@ -513,6 +557,11 @@ def is_executable(fn):
|
|||||||
:param fn: path to the file to be tested
|
:param fn: path to the file to be tested
|
||||||
:return: True or False
|
:return: True or False
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
|
|
||||||
|
>>> is_executable('/bin/ls')
|
||||||
|
True
|
||||||
|
>>> is_executable('/var/log/system.log')
|
||||||
|
False
|
||||||
"""
|
"""
|
||||||
return os.path.isfile(fn) and os.access(fn, os.X_OK)
|
return os.path.isfile(fn) and os.access(fn, os.X_OK)
|
||||||
|
|
||||||
@ -539,24 +588,36 @@ def isSorted(iterable):
|
|||||||
>>> isSorted([2,3,1,4])
|
>>> isSorted([2,3,1,4])
|
||||||
False
|
False
|
||||||
"""
|
"""
|
||||||
assert isIterable(iterable), 'object is not iterable; object: {' \
|
assert is_iterable(iterable), "object is not iterable; object: {}".format(iterable)
|
||||||
'}'.format(iterable)
|
|
||||||
if type(iterable) is str:
|
if type(iterable) is str:
|
||||||
iterable = [s for s in iterable]
|
iterable = [s for s in iterable]
|
||||||
return sorted(iterable) == iterable
|
return sorted(iterable) == iterable
|
||||||
|
|
||||||
|
|
||||||
def isIterable(obj):
|
def is_iterable(obj):
|
||||||
"""
|
"""
|
||||||
takes a python object and returns True is the object is iterable and
|
takes a python object and returns True is the object is iterable and
|
||||||
False otherwise
|
False otherwise
|
||||||
:param obj: a python object
|
:param obj: a python object
|
||||||
:type obj: object
|
:type obj: obj
|
||||||
:return: True of False
|
:return: True of False
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
|
|
||||||
|
>>> is_iterable(1)
|
||||||
|
False
|
||||||
|
>>> is_iterable(True)
|
||||||
|
False
|
||||||
|
>>> is_iterable(0.)
|
||||||
|
False
|
||||||
|
>>> is_iterable((0,1,3,4))
|
||||||
|
True
|
||||||
|
>>> is_iterable([1])
|
||||||
|
True
|
||||||
|
>>> is_iterable('a')
|
||||||
|
True
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
iterator = iter(obj)
|
iter(obj)
|
||||||
except TypeError as te:
|
except TypeError as te:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -565,13 +626,19 @@ def isIterable(obj):
|
|||||||
def key_for_set_value(d):
|
def key_for_set_value(d):
|
||||||
"""
|
"""
|
||||||
takes a dictionary and returns the first key for which's value the
|
takes a dictionary and returns the first key for which's value the
|
||||||
boolean is True
|
boolean representation is True
|
||||||
:param d: dictionary containing values
|
:param d: dictionary containing values
|
||||||
:type d: dict
|
:type d: dict
|
||||||
:return: key to the first non-False value found; None if no value's
|
:return: key to the first non-False value found; None if no value's
|
||||||
boolean equals True
|
boolean equals True
|
||||||
:rtype:
|
:rtype: bool or NoneType
|
||||||
|
|
||||||
|
>>> key_for_set_value({'one': 0, 'two': 1})
|
||||||
|
'two'
|
||||||
|
>>> print(key_for_set_value({1: 0, 2: False}))
|
||||||
|
None
|
||||||
"""
|
"""
|
||||||
|
assert type(d) is dict, "Function only defined for inputs of type 'dict'."
|
||||||
r = None
|
r = None
|
||||||
for k, v in d.items():
|
for k, v in d.items():
|
||||||
if v:
|
if v:
|
||||||
@ -579,32 +646,53 @@ def key_for_set_value(d):
|
|||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
def prepTimeAxis(stime, trace, verbosity=0):
|
def prep_time_axis(offset, trace, verbosity=0):
|
||||||
"""
|
"""
|
||||||
takes a starttime and a trace object and returns a valid time axis for
|
takes an offset and a trace object and returns a valid time axis for
|
||||||
plotting
|
plotting
|
||||||
:param stime: start time of the actual seismogram as UTCDateTime
|
:param offset: offset of the actual seismogram on plotting axis
|
||||||
:type stime: `~obspy.core.utcdatetime.UTCDateTime`
|
:type offset: float or int
|
||||||
:param trace: seismic trace object
|
:param trace: seismic trace object
|
||||||
:type trace: `~obspy.core.trace.Trace`
|
:type trace: `~obspy.core.trace.Trace`
|
||||||
:param verbosity: if != 0, debug output will be written to console
|
:param verbosity: if != 0, debug output will be written to console
|
||||||
:type verbosity: int
|
:type verbosity: int
|
||||||
:return: valid numpy array with time stamps for plotting
|
:return: valid numpy array with time stamps for plotting
|
||||||
:rtype: `~numpy.ndarray`
|
:rtype: `~numpy.ndarray`
|
||||||
|
|
||||||
|
>>> tr = read()[0]
|
||||||
|
>>> prep_time_axis(0., tr)
|
||||||
|
array([0.00000000e+00, 1.00033344e-02, 2.00066689e-02, ...,
|
||||||
|
2.99799933e+01, 2.99899967e+01, 3.00000000e+01])
|
||||||
|
>>> prep_time_axis(22.5, tr)
|
||||||
|
array([22.5 , 22.51000333, 22.52000667, ..., 52.47999333,
|
||||||
|
52.48999667, 52.5 ])
|
||||||
|
>>> prep_time_axis(tr.stats.starttime, tr)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
AssertionError: 'offset' is not of type 'float' or 'int'; type: <class 'obspy.core.utcdatetime.UTCDateTime'>
|
||||||
|
>>> tr.stats.npts -= 1
|
||||||
|
>>> prep_time_axis(0, tr)
|
||||||
|
array([0.00000000e+00, 1.00033356e-02, 2.00066711e-02, ...,
|
||||||
|
2.99699933e+01, 2.99799967e+01, 2.99900000e+01])
|
||||||
|
>>> tr.stats.npts += 2
|
||||||
|
>>> prep_time_axis(0, tr)
|
||||||
|
array([0.00000000e+00, 1.00033333e-02, 2.00066667e-02, ...,
|
||||||
|
2.99899933e+01, 2.99999967e+01, 3.00100000e+01])
|
||||||
"""
|
"""
|
||||||
|
assert isinstance(offset, (float, int)), "'offset' is not of type 'float' or 'int'; type: {}".format(type(offset))
|
||||||
nsamp = trace.stats.npts
|
nsamp = trace.stats.npts
|
||||||
srate = trace.stats.sampling_rate
|
srate = trace.stats.sampling_rate
|
||||||
tincr = trace.stats.delta
|
tincr = trace.stats.delta
|
||||||
etime = stime + nsamp / srate
|
etime = offset + nsamp / srate
|
||||||
time_ax = np.linspace(stime, etime, nsamp)
|
time_ax = np.linspace(offset, etime, nsamp)
|
||||||
if len(time_ax) < nsamp:
|
if len(time_ax) < nsamp:
|
||||||
if verbosity:
|
if verbosity:
|
||||||
print('elongate time axes by one datum')
|
print('elongate time axes by one datum')
|
||||||
time_ax = np.arange(stime, etime + tincr, tincr)
|
time_ax = np.arange(offset, etime + tincr, tincr)
|
||||||
elif len(time_ax) > nsamp:
|
elif len(time_ax) > nsamp:
|
||||||
if verbosity:
|
if verbosity:
|
||||||
print('shorten time axes by one datum')
|
print('shorten time axes by one datum')
|
||||||
time_ax = np.arange(stime, etime - tincr, tincr)
|
time_ax = np.arange(offset, etime - tincr, tincr)
|
||||||
if len(time_ax) != nsamp:
|
if len(time_ax) != nsamp:
|
||||||
print('Station {0}, {1} samples of data \n '
|
print('Station {0}, {1} samples of data \n '
|
||||||
'{2} length of time vector \n'
|
'{2} length of time vector \n'
|
||||||
@ -620,13 +708,13 @@ def find_horizontals(data):
|
|||||||
:param data: waveform data
|
:param data: waveform data
|
||||||
:type data: `obspy.core.stream.Stream`
|
:type data: `obspy.core.stream.Stream`
|
||||||
:return: components list
|
:return: components list
|
||||||
:rtype: list
|
:rtype: List(str)
|
||||||
|
|
||||||
..example::
|
..example::
|
||||||
|
|
||||||
>>> st = read()
|
>>> st = read()
|
||||||
>>> find_horizontals(st)
|
>>> find_horizontals(st)
|
||||||
[u'N', u'E']
|
['N', 'E']
|
||||||
"""
|
"""
|
||||||
rval = []
|
rval = []
|
||||||
for tr in data:
|
for tr in data:
|
||||||
@ -637,7 +725,7 @@ def find_horizontals(data):
|
|||||||
return rval
|
return rval
|
||||||
|
|
||||||
|
|
||||||
def pick_color(picktype, phase, quality=0):
|
def pick_color(picktype: Literal['manual', 'automatic'], phase: Literal['P', 'S'], quality: int = 0) -> Rgba:
|
||||||
"""
|
"""
|
||||||
Create pick color by modifying the base color by the quality.
|
Create pick color by modifying the base color by the quality.
|
||||||
|
|
||||||
@ -650,7 +738,7 @@ def pick_color(picktype, phase, quality=0):
|
|||||||
:param quality: quality of pick. Decides the new intensity of the modifier color
|
:param quality: quality of pick. Decides the new intensity of the modifier color
|
||||||
:type quality: int
|
:type quality: int
|
||||||
:return: tuple containing modified rgba color values
|
:return: tuple containing modified rgba color values
|
||||||
:rtype: (int, int, int, int)
|
:rtype: Rgba
|
||||||
"""
|
"""
|
||||||
min_quality = 3
|
min_quality = 3
|
||||||
bpc = base_phase_colors(picktype, phase) # returns dict like {'modifier': 'g', 'rgba': (0, 0, 255, 255)}
|
bpc = base_phase_colors(picktype, phase) # returns dict like {'modifier': 'g', 'rgba': (0, 0, 255, 255)}
|
||||||
@ -706,17 +794,17 @@ def pick_linestyle_plt(picktype, key):
|
|||||||
return linestyles[picktype][key]
|
return linestyles[picktype][key]
|
||||||
|
|
||||||
|
|
||||||
def modify_rgba(rgba, modifier, intensity):
|
def modify_rgba(rgba: Rgba, modifier: Literal['r', 'g', 'b'], intensity: float) -> Rgba:
|
||||||
"""
|
"""
|
||||||
Modify rgba color by adding the given intensity to the modifier color
|
Modify rgba color by adding the given intensity to the modifier color
|
||||||
:param rgba: tuple containing rgba values
|
:param rgba: tuple containing rgba values
|
||||||
:type rgba: (int, int, int, int)
|
:type rgba: Rgba
|
||||||
:param modifier: which color should be modified, eg. 'r', 'g', 'b'
|
:param modifier: which color should be modified; options: 'r', 'g', 'b'
|
||||||
:type modifier: str
|
:type modifier: Literal['r', 'g', 'b']
|
||||||
:param intensity: intensity to be added to selected color
|
:param intensity: intensity to be added to selected color
|
||||||
:type intensity: float
|
:type intensity: float
|
||||||
:return: tuple containing rgba values
|
:return: tuple containing rgba values
|
||||||
:rtype: (int, int, int, int)
|
:rtype: Rgba
|
||||||
"""
|
"""
|
||||||
rgba = list(rgba)
|
rgba = list(rgba)
|
||||||
index = {'r': 0,
|
index = {'r': 0,
|
||||||
@ -750,18 +838,20 @@ def transform_colors_mpl_str(colors, no_alpha=False):
|
|||||||
Transforms rgba color values to a matplotlib string of color values with a range of [0, 1]
|
Transforms rgba color values to a matplotlib string of color values with a range of [0, 1]
|
||||||
:param colors: tuple of rgba color values ranging from [0, 255]
|
:param colors: tuple of rgba color values ranging from [0, 255]
|
||||||
:type colors: (float, float, float, float)
|
:type colors: (float, float, float, float)
|
||||||
:param no_alpha: Wether to return a alpha value in the matplotlib color string
|
:param no_alpha: Whether to return an alpha value in the matplotlib color string
|
||||||
:type no_alpha: bool
|
:type no_alpha: bool
|
||||||
:return: String containing r, g, b values and alpha value if no_alpha is False (default)
|
:return: String containing r, g, b values and alpha value if no_alpha is False (default)
|
||||||
:rtype: str
|
:rtype: str
|
||||||
|
|
||||||
|
>>> transform_colors_mpl_str((255., 255., 255., 255.), True)
|
||||||
|
'(1.0, 1.0, 1.0)'
|
||||||
|
>>> transform_colors_mpl_str((255., 255., 255., 255.))
|
||||||
|
'(1.0, 1.0, 1.0, 1.0)'
|
||||||
"""
|
"""
|
||||||
colors = list(colors)
|
|
||||||
colors_mpl = tuple([color / 255. for color in colors])
|
|
||||||
if no_alpha:
|
if no_alpha:
|
||||||
colors_mpl = '({}, {}, {})'.format(*colors_mpl)
|
return '({}, {}, {})'.format(*transform_colors_mpl(colors))
|
||||||
else:
|
else:
|
||||||
colors_mpl = '({}, {}, {}, {})'.format(*colors_mpl)
|
return '({}, {}, {}, {})'.format(*transform_colors_mpl(colors))
|
||||||
return colors_mpl
|
|
||||||
|
|
||||||
|
|
||||||
def transform_colors_mpl(colors):
|
def transform_colors_mpl(colors):
|
||||||
@ -771,27 +861,16 @@ def transform_colors_mpl(colors):
|
|||||||
:type colors: (float, float, float, float)
|
:type colors: (float, float, float, float)
|
||||||
:return: tuple of rgba color values ranging from [0, 1]
|
:return: tuple of rgba color values ranging from [0, 1]
|
||||||
:rtype: (float, float, float, float)
|
:rtype: (float, float, float, float)
|
||||||
|
|
||||||
|
>>> transform_colors_mpl((127.5, 0., 63.75, 255.))
|
||||||
|
(0.5, 0.0, 0.25, 1.0)
|
||||||
|
>>> transform_colors_mpl(())
|
||||||
"""
|
"""
|
||||||
colors = list(colors)
|
colors = list(colors)
|
||||||
colors_mpl = tuple([color / 255. for color in colors])
|
colors_mpl = tuple([color / 255. for color in colors])
|
||||||
return colors_mpl
|
return colors_mpl
|
||||||
|
|
||||||
|
|
||||||
def remove_underscores(data):
|
|
||||||
"""
|
|
||||||
takes a `obspy.core.stream.Stream` object and removes all underscores
|
|
||||||
from station names
|
|
||||||
:param data: stream of seismic data
|
|
||||||
:type data: `~obspy.core.stream.Stream`
|
|
||||||
:return: data stream
|
|
||||||
:rtype: `~obspy.core.stream.Stream`
|
|
||||||
"""
|
|
||||||
# for tr in data:
|
|
||||||
# # remove underscores
|
|
||||||
# tr.stats.station = tr.stats.station.strip('_')
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def trim_station_components(data, trim_start=True, trim_end=True):
|
def trim_station_components(data, trim_start=True, trim_end=True):
|
||||||
"""
|
"""
|
||||||
cut a stream so only the part common to all three traces is kept to avoid dealing with offsets
|
cut a stream so only the part common to all three traces is kept to avoid dealing with offsets
|
||||||
@ -928,11 +1007,11 @@ def get_possible_pylot_eventfile_extensions(event, fext):
|
|||||||
|
|
||||||
def get_stations(data):
|
def get_stations(data):
|
||||||
"""
|
"""
|
||||||
Get list of all station names in data stream
|
Get list of all station names in data-stream
|
||||||
:param data: stream containing seismic traces
|
:param data: stream containing seismic traces
|
||||||
:type data: `~obspy.core.stream.Stream`
|
:type data: `~obspy.core.stream.Stream`
|
||||||
:return: list of all station names in data, no duplicates
|
:return: list of all station names in data, no duplicates
|
||||||
:rtype: list of str
|
:rtype: List(str)
|
||||||
"""
|
"""
|
||||||
stations = []
|
stations = []
|
||||||
for tr in data:
|
for tr in data:
|
||||||
@ -959,66 +1038,88 @@ def check4rotated(data, metadata=None, verbosity=1):
|
|||||||
:rtype: `~obspy.core.stream.Stream`
|
:rtype: `~obspy.core.stream.Stream`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def rotate_components(wfstream, metadata=None):
|
def rotation_required(trace_ids):
|
||||||
|
"""
|
||||||
|
Derive if any rotation is required from the orientation code of the input.
|
||||||
|
|
||||||
|
:param trace_ids: string identifier of waveform data trace
|
||||||
|
:type trace_ids: List(str)
|
||||||
|
:return: boolean representing if rotation is necessary for any of the traces
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
orientations = [trace_id[-1] for trace_id in trace_ids]
|
||||||
|
return any([orientation.isnumeric() for orientation in orientations])
|
||||||
|
|
||||||
|
def rotate_components(wfs_in, metadata=None):
|
||||||
"""
|
"""
|
||||||
Rotate components if orientation code is numeric (= non traditional orientation).
|
Rotate components if orientation code is numeric (= non traditional orientation).
|
||||||
|
|
||||||
Azimut and dip are fetched from metadata. To be rotated, traces of a station have to be cut to the same length.
|
Azimut and dip are fetched from metadata. To be rotated, traces of a station have to be cut to the same length.
|
||||||
Returns unrotated traces of no metadata is provided
|
Returns unrotated traces of no metadata is provided
|
||||||
:param wfstream: stream containing seismic traces of a station
|
:param wfs_in: stream containing seismic traces of a station
|
||||||
:type wfstream: `~obspy.core.stream.Stream`
|
:type wfs_in: `~obspy.core.stream.Stream`
|
||||||
:param metadata: tuple containing metadata type string and metadata parser object
|
:param metadata: tuple containing metadata type string and metadata parser object
|
||||||
:type metadata: (str, `~obspy.io.xseed.parser.Parser`)
|
:type metadata: (str, `~obspy.io.xseed.parser.Parser`)
|
||||||
:return: stream object with traditionally oriented traces (ZNE)
|
:return: stream object with traditionally oriented traces (ZNE)
|
||||||
:rtype: `~obspy.core.stream.Stream`
|
:rtype: `~obspy.core.stream.Stream`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if len(wfs_in) < 3:
|
||||||
|
print(f"Stream {wfs_in=}, has not enough components to rotate.")
|
||||||
|
return wfs_in
|
||||||
|
|
||||||
# check if any traces in this station need to be rotated
|
# check if any traces in this station need to be rotated
|
||||||
trace_ids = [trace.id for trace in wfstream]
|
trace_ids = [trace.id for trace in wfs_in]
|
||||||
orientations = [trace_id[-1] for trace_id in trace_ids]
|
if not rotation_required(trace_ids):
|
||||||
rotation_required = [orientation.isnumeric() for orientation in orientations]
|
logging.debug(f"Stream does not need any rotation: Traces are {trace_ids=}")
|
||||||
if any(rotation_required):
|
return wfs_in
|
||||||
t_start = full_range(wfstream)
|
|
||||||
|
# check metadata quality
|
||||||
|
t_start = full_range(wfs_in)[0]
|
||||||
try:
|
try:
|
||||||
azimuts = []
|
azimuths = []
|
||||||
dips = []
|
dips = []
|
||||||
for tr_id in trace_ids:
|
for tr_id in trace_ids:
|
||||||
azimuts.append(metadata.get_coordinates(tr_id, t_start)['azimuth'])
|
azimuths.append(metadata.get_coordinates(tr_id, t_start)['azimuth'])
|
||||||
dips.append(metadata.get_coordinates(tr_id, t_start)['dip'])
|
dips.append(metadata.get_coordinates(tr_id, t_start)['dip'])
|
||||||
except (KeyError, TypeError) as e:
|
except (KeyError, TypeError) as err:
|
||||||
print('Failed to rotate trace {}, no azimuth or dip available in metadata'.format(tr_id))
|
logging.warning(f"Rotating not possible, not all azimuth and dip information "
|
||||||
return wfstream
|
f"available in metadata. Stream remains unchanged.")
|
||||||
if len(wfstream) < 3:
|
logging.debug(f"Rotating not possible, {err=}, {type(err)=}")
|
||||||
print('Failed to rotate Stream {}, not enough components available.'.format(wfstream))
|
return wfs_in
|
||||||
return wfstream
|
except Exception as err:
|
||||||
|
print(f"Unexpected {err=}, {type(err)=}")
|
||||||
|
raise
|
||||||
|
|
||||||
# to rotate all traces must have same length, so trim them
|
# to rotate all traces must have same length, so trim them
|
||||||
wfstream = trim_station_components(wfstream, trim_start=True, trim_end=True)
|
wfs_out = trim_station_components(wfs_in, trim_start=True, trim_end=True)
|
||||||
try:
|
try:
|
||||||
z, n, e = rotate2zne(wfstream[0], azimuts[0], dips[0],
|
z, n, e = rotate2zne(wfs_out[0], azimuths[0], dips[0],
|
||||||
wfstream[1], azimuts[1], dips[1],
|
wfs_out[1], azimuths[1], dips[1],
|
||||||
wfstream[2], azimuts[2], dips[2])
|
wfs_out[2], azimuths[2], dips[2])
|
||||||
print('check4rotated: rotated trace {} to ZNE'.format(trace_ids))
|
print('check4rotated: rotated trace {} to ZNE'.format(trace_ids))
|
||||||
# replace old data with rotated data, change the channel code to ZNE
|
# replace old data with rotated data, change the channel code to ZNE
|
||||||
z_index = dips.index(min(
|
z_index = dips.index(min(
|
||||||
dips)) # get z-trace index, z has minimum dip of -90 (dip is measured from 0 to -90, with -90 being vertical)
|
dips)) # get z-trace index, z has minimum dip of -90 (dip is measured from 0 to -90, with -90
|
||||||
wfstream[z_index].data = z
|
# being vertical)
|
||||||
wfstream[z_index].stats.channel = wfstream[z_index].stats.channel[0:-1] + 'Z'
|
wfs_out[z_index].data = z
|
||||||
|
wfs_out[z_index].stats.channel = wfs_out[z_index].stats.channel[0:-1] + 'Z'
|
||||||
del trace_ids[z_index]
|
del trace_ids[z_index]
|
||||||
for trace_id in trace_ids:
|
for trace_id in trace_ids:
|
||||||
coordinates = metadata.get_coordinates(trace_id, t_start)
|
coordinates = metadata.get_coordinates(trace_id, t_start)
|
||||||
dip, az = coordinates['dip'], coordinates['azimuth']
|
dip, az = coordinates['dip'], coordinates['azimuth']
|
||||||
trace = wfstream.select(id=trace_id)[0]
|
trace = wfs_out.select(id=trace_id)[0]
|
||||||
if az > 315 or az <= 45 or az > 135 and az <= 225:
|
if az > 315 or az <= 45 or 135 < az <= 225:
|
||||||
trace.data = n
|
trace.data = n
|
||||||
trace.stats.channel = trace.stats.channel[0:-1] + 'N'
|
trace.stats.channel = trace.stats.channel[0:-1] + 'N'
|
||||||
elif az > 45 and az <= 135 or az > 225 and az <= 315:
|
elif 45 < az <= 135 or 225 < az <= 315:
|
||||||
trace.data = e
|
trace.data = e
|
||||||
trace.stats.channel = trace.stats.channel[0:-1] + 'E'
|
trace.stats.channel = trace.stats.channel[0:-1] + 'E'
|
||||||
except (ValueError) as e:
|
except ValueError as err:
|
||||||
print(e)
|
print(f"{err=} Rotation failed. Stream remains unchanged.")
|
||||||
return wfstream
|
return wfs_in
|
||||||
|
|
||||||
return wfstream
|
return wfs_out
|
||||||
|
|
||||||
if metadata is None:
|
if metadata is None:
|
||||||
if verbosity:
|
if verbosity:
|
||||||
@ -1032,38 +1133,6 @@ def check4rotated(data, metadata=None, verbosity=1):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def scaleWFData(data, factor=None, components='all'):
|
|
||||||
"""
|
|
||||||
produce scaled waveforms from given waveform data and a scaling factor,
|
|
||||||
waveform may be selected by their components name
|
|
||||||
:param data: waveform data to be scaled
|
|
||||||
:type data: `~obspy.core.stream.Stream` object
|
|
||||||
:param factor: scaling factor
|
|
||||||
:type factor: float
|
|
||||||
:param components: components labels for the traces in data to be scaled by
|
|
||||||
the scaling factor (optional, default: 'all')
|
|
||||||
:type components: tuple
|
|
||||||
:return: scaled waveform data
|
|
||||||
:rtype: `~obspy.core.stream.Stream` object
|
|
||||||
"""
|
|
||||||
if components != 'all':
|
|
||||||
for comp in components:
|
|
||||||
if factor is None:
|
|
||||||
max_val = np.max(np.abs(data.select(component=comp)[0].data))
|
|
||||||
data.select(component=comp)[0].data /= 2 * max_val
|
|
||||||
else:
|
|
||||||
data.select(component=comp)[0].data /= 2 * factor
|
|
||||||
else:
|
|
||||||
for tr in data:
|
|
||||||
if factor is None:
|
|
||||||
max_val = float(np.max(np.abs(tr.data)))
|
|
||||||
tr.data /= 2 * max_val
|
|
||||||
else:
|
|
||||||
tr.data /= 2 * factor
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def runProgram(cmd, parameter=None):
|
def runProgram(cmd, parameter=None):
|
||||||
"""
|
"""
|
||||||
run an external program specified by cmd with parameters input returning the
|
run an external program specified by cmd with parameters input returning the
|
||||||
|
@ -7,6 +7,8 @@ Created on Wed Mar 19 11:27:35 2014
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import getpass
|
import getpass
|
||||||
|
import glob
|
||||||
|
import logging
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
@ -16,6 +18,7 @@ import traceback
|
|||||||
|
|
||||||
import matplotlib
|
import matplotlib
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pylot.core.io.phases import getQualitiesfromxml
|
||||||
|
|
||||||
matplotlib.use('QT5Agg')
|
matplotlib.use('QT5Agg')
|
||||||
|
|
||||||
@ -49,11 +52,11 @@ from pylot.core.pick.utils import getSNR, earllatepicker, getnoisewin, \
|
|||||||
from pylot.core.pick.compare import Comparison
|
from pylot.core.pick.compare import Comparison
|
||||||
from pylot.core.pick.autopick import fmpicker
|
from pylot.core.pick.autopick import fmpicker
|
||||||
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS
|
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS
|
||||||
from pylot.core.util.utils import prepTimeAxis, full_range, demeanTrace, isSorted, findComboBoxIndex, clims, \
|
from pylot.core.util.utils import prep_time_axis, full_range, demeanTrace, isSorted, findComboBoxIndex, clims, \
|
||||||
pick_linestyle_plt, pick_color_plt, \
|
pick_linestyle_plt, pick_color_plt, \
|
||||||
check4rotated, check4doubled, check_for_gaps_and_merge, check_for_nan, identifyPhase, \
|
check4rotated, check4doubled, check_for_gaps_and_merge, check_for_nan, identifyPhase, \
|
||||||
loopIdentifyPhase, trim_station_components, transformFilteroptions2String, \
|
loopIdentifyPhase, trim_station_components, transformFilteroptions2String, \
|
||||||
identifyPhaseID, get_bool, get_None, pick_color, getAutoFilteroptions, SetChannelComponents, \
|
identifyPhaseID, get_bool, get_none, pick_color, getAutoFilteroptions, SetChannelComponents, \
|
||||||
station_id_remove_channel, get_pylot_eventfile_with_extension, get_possible_pylot_eventfile_extensions
|
station_id_remove_channel, get_pylot_eventfile_with_extension, get_possible_pylot_eventfile_extensions
|
||||||
from autoPyLoT import autoPyLoT
|
from autoPyLoT import autoPyLoT
|
||||||
from pylot.core.util.thread import Thread
|
from pylot.core.util.thread import Thread
|
||||||
@ -793,7 +796,7 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
|||||||
|
|
||||||
def connect_signals(self):
|
def connect_signals(self):
|
||||||
self.qcombo_processed.activated.connect(self.parent().newWF)
|
self.qcombo_processed.activated.connect(self.parent().newWF)
|
||||||
self.syn_checkbox.clicked.connect(self.parent().newWF)
|
self.comp_checkbox.clicked.connect(self.parent().newWF)
|
||||||
|
|
||||||
def init_labels(self):
|
def init_labels(self):
|
||||||
self.label_layout.addWidget(self.status_label)
|
self.label_layout.addWidget(self.status_label)
|
||||||
@ -804,13 +807,13 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
|||||||
# use widgets as placeholder, so that child widgets keep position when others are hidden
|
# use widgets as placeholder, so that child widgets keep position when others are hidden
|
||||||
mid_layout = QHBoxLayout()
|
mid_layout = QHBoxLayout()
|
||||||
right_layout = QHBoxLayout()
|
right_layout = QHBoxLayout()
|
||||||
mid_layout.addWidget(self.syn_checkbox)
|
mid_layout.addWidget(self.comp_checkbox)
|
||||||
right_layout.addWidget(self.qcombo_processed)
|
right_layout.addWidget(self.qcombo_processed)
|
||||||
mid_widget.setLayout(mid_layout)
|
mid_widget.setLayout(mid_layout)
|
||||||
right_widget.setLayout(right_layout)
|
right_widget.setLayout(right_layout)
|
||||||
self.label_layout.addWidget(mid_widget)
|
self.label_layout.addWidget(mid_widget)
|
||||||
self.label_layout.addWidget(right_widget)
|
self.label_layout.addWidget(right_widget)
|
||||||
self.syn_checkbox.setLayoutDirection(Qt.RightToLeft)
|
self.comp_checkbox.setLayoutDirection(Qt.RightToLeft)
|
||||||
self.label_layout.setStretch(0, 4)
|
self.label_layout.setStretch(0, 4)
|
||||||
self.label_layout.setStretch(1, 0)
|
self.label_layout.setStretch(1, 0)
|
||||||
self.label_layout.setStretch(2, 0)
|
self.label_layout.setStretch(2, 0)
|
||||||
@ -825,7 +828,7 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
|||||||
label = QtWidgets.QLabel()
|
label = QtWidgets.QLabel()
|
||||||
self.perm_labels.append(label)
|
self.perm_labels.append(label)
|
||||||
self.qcombo_processed = QtWidgets.QComboBox()
|
self.qcombo_processed = QtWidgets.QComboBox()
|
||||||
self.syn_checkbox = QtWidgets.QCheckBox('synthetics')
|
self.comp_checkbox = QtWidgets.QCheckBox('Load comparison data')
|
||||||
self.addQCboxItem('processed', 'green')
|
self.addQCboxItem('processed', 'green')
|
||||||
self.addQCboxItem('raw', 'black')
|
self.addQCboxItem('raw', 'black')
|
||||||
# self.perm_qcbox_right.setAlignment(2)
|
# self.perm_qcbox_right.setAlignment(2)
|
||||||
@ -834,9 +837,11 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
|||||||
def getPlotDict(self):
|
def getPlotDict(self):
|
||||||
return self.plotdict
|
return self.plotdict
|
||||||
|
|
||||||
def activateObspyDMToptions(self, activate):
|
def activateObspyDMToptions(self, activate: bool) -> None:
|
||||||
self.syn_checkbox.setVisible(activate)
|
self.qcombo_processed.setEnabled(activate)
|
||||||
self.qcombo_processed.setVisible(activate)
|
|
||||||
|
def activateCompareOptions(self, activate: bool) -> None:
|
||||||
|
self.comp_checkbox.setEnabled(activate)
|
||||||
|
|
||||||
def setPermText(self, number, text=None, color='black'):
|
def setPermText(self, number, text=None, color='black'):
|
||||||
if not 0 <= number < len(self.perm_labels):
|
if not 0 <= number < len(self.perm_labels):
|
||||||
@ -936,10 +941,10 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
|||||||
msg = 'plotting %s channel of station %s' % (channel, station)
|
msg = 'plotting %s channel of station %s' % (channel, station)
|
||||||
print(msg)
|
print(msg)
|
||||||
stime = trace.stats.starttime - self.wfstart
|
stime = trace.stats.starttime - self.wfstart
|
||||||
time_ax = prepTimeAxis(stime, trace)
|
time_ax = prep_time_axis(stime, trace)
|
||||||
if st_syn:
|
if st_syn:
|
||||||
stime_syn = trace_syn.stats.starttime - self.wfstart
|
stime_syn = trace_syn.stats.starttime - self.wfstart
|
||||||
time_ax_syn = prepTimeAxis(stime_syn, trace_syn)
|
time_ax_syn = prep_time_axis(stime_syn, trace_syn)
|
||||||
|
|
||||||
if method == 'fast':
|
if method == 'fast':
|
||||||
trace.data, time_ax = self.minMax(trace, time_ax)
|
trace.data, time_ax = self.minMax(trace, time_ax)
|
||||||
@ -959,7 +964,7 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
|||||||
[time for index, time in enumerate(time_ax_syn) if not index % nth_sample] if st_syn else [])
|
[time for index, time in enumerate(time_ax_syn) if not index % nth_sample] if st_syn else [])
|
||||||
trace.data = np.array(
|
trace.data = np.array(
|
||||||
[datum * gain + n for index, datum in enumerate(trace.data) if not index % nth_sample])
|
[datum * gain + n for index, datum in enumerate(trace.data) if not index % nth_sample])
|
||||||
trace_syn.data = np.array([datum + n for index, datum in enumerate(trace_syn.data)
|
trace_syn.data = np.array([datum + n + shift_syn for index, datum in enumerate(trace_syn.data)
|
||||||
if not index % nth_sample] if st_syn else [])
|
if not index % nth_sample] if st_syn else [])
|
||||||
plots.append((times, trace.data,
|
plots.append((times, trace.data,
|
||||||
times_syn, trace_syn.data))
|
times_syn, trace_syn.data))
|
||||||
@ -1148,12 +1153,12 @@ class PylotCanvas(FigureCanvas):
|
|||||||
ax.set_xlim(self.cur_xlim)
|
ax.set_xlim(self.cur_xlim)
|
||||||
ax.set_ylim(self.cur_ylim)
|
ax.set_ylim(self.cur_ylim)
|
||||||
self.refreshPickDlgText()
|
self.refreshPickDlgText()
|
||||||
ax.figure.canvas.draw()
|
ax.figure.canvas.draw_idle()
|
||||||
|
|
||||||
def panRelease(self, gui_event):
|
def panRelease(self, gui_event):
|
||||||
self.press = None
|
self.press = None
|
||||||
self.press_rel = None
|
self.press_rel = None
|
||||||
self.figure.canvas.draw()
|
self.figure.canvas.draw_idle()
|
||||||
|
|
||||||
def panZoom(self, gui_event, threshold=2., factor=1.1):
|
def panZoom(self, gui_event, threshold=2., factor=1.1):
|
||||||
if not gui_event.x and not gui_event.y:
|
if not gui_event.x and not gui_event.y:
|
||||||
@ -1371,11 +1376,15 @@ class PylotCanvas(FigureCanvas):
|
|||||||
plot_positions[channel] = plot_pos
|
plot_positions[channel] = plot_pos
|
||||||
return plot_positions
|
return plot_positions
|
||||||
|
|
||||||
def plotWFData(self, wfdata, title=None, zoomx=None, zoomy=None,
|
def plotWFData(self, wfdata, wfdata_compare=None, title=None, zoomx=None, zoomy=None,
|
||||||
noiselevel=None, scaleddata=False, mapping=True,
|
noiselevel=None, scaleddata=False, mapping=True,
|
||||||
component='*', nth_sample=1, iniPick=None, verbosity=0,
|
component='*', nth_sample=1, iniPick=None, verbosity=0,
|
||||||
plot_additional=False, additional_channel=None, scaleToChannel=None,
|
plot_additional=False, additional_channel=None, scaleToChannel=None,
|
||||||
snr=None):
|
snr=None):
|
||||||
|
def get_wf_dict(data: Stream = Stream(), linecolor = 'k', offset: float = 0., **plot_kwargs):
|
||||||
|
return dict(data=data, linecolor=linecolor, offset=offset, plot_kwargs=plot_kwargs)
|
||||||
|
|
||||||
|
|
||||||
ax = self.axes[0]
|
ax = self.axes[0]
|
||||||
ax.cla()
|
ax.cla()
|
||||||
|
|
||||||
@ -1386,21 +1395,33 @@ class PylotCanvas(FigureCanvas):
|
|||||||
settings = QSettings()
|
settings = QSettings()
|
||||||
compclass = SetChannelComponents.from_qsettings(settings)
|
compclass = SetChannelComponents.from_qsettings(settings)
|
||||||
|
|
||||||
|
linecolor = (0., 0., 0., 1.) if not self.style else self.style['linecolor']['rgba_mpl']
|
||||||
|
|
||||||
|
plot_streams = dict(wfdata=get_wf_dict(linecolor=linecolor, linewidth=0.7),
|
||||||
|
wfdata_comp=get_wf_dict(offset=0.1, linecolor='b', alpha=0.7, linewidth=0.5))
|
||||||
|
|
||||||
if not component == '*':
|
if not component == '*':
|
||||||
alter_comp = compclass.getCompPosition(component)
|
alter_comp = compclass.getCompPosition(component)
|
||||||
# alter_comp = str(alter_comp[0])
|
# alter_comp = str(alter_comp[0])
|
||||||
|
|
||||||
st_select = wfdata.select(component=component)
|
plot_streams['wfdata']['data'] = wfdata.select(component=component)
|
||||||
st_select += wfdata.select(component=alter_comp)
|
plot_streams['wfdata']['data'] += wfdata.select(component=alter_comp)
|
||||||
|
if wfdata_compare:
|
||||||
|
plot_streams['wfdata_comp']['data'] = wfdata_compare.select(component=component)
|
||||||
|
plot_streams['wfdata_comp']['data'] += wfdata_compare.select(component=alter_comp)
|
||||||
else:
|
else:
|
||||||
st_select = wfdata
|
plot_streams['wfdata']['data'] = wfdata
|
||||||
|
if wfdata_compare:
|
||||||
|
plot_streams['wfdata_comp']['data'] = wfdata_compare
|
||||||
|
|
||||||
|
st_main = plot_streams['wfdata']['data']
|
||||||
|
|
||||||
if mapping:
|
if mapping:
|
||||||
plot_positions = self.calcPlotPositions(st_select, compclass)
|
plot_positions = self.calcPlotPositions(st_main, compclass)
|
||||||
|
|
||||||
# list containing tuples of network, station, channel and plot position (for sorting)
|
# list containing tuples of network, station, channel and plot position (for sorting)
|
||||||
nslc = []
|
nslc = []
|
||||||
for plot_pos, trace in enumerate(st_select):
|
for plot_pos, trace in enumerate(st_main):
|
||||||
if not trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
|
if not trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
|
||||||
print('Warning: Unrecognized channel {}'.format(trace.stats.channel))
|
print('Warning: Unrecognized channel {}'.format(trace.stats.channel))
|
||||||
continue
|
continue
|
||||||
@ -1408,10 +1429,12 @@ class PylotCanvas(FigureCanvas):
|
|||||||
nslc.sort()
|
nslc.sort()
|
||||||
nslc.reverse()
|
nslc.reverse()
|
||||||
|
|
||||||
linecolor = (0., 0., 0., 1.) if not self.style else self.style['linecolor']['rgba_mpl']
|
|
||||||
|
|
||||||
for n, seed_id in enumerate(nslc):
|
for n, seed_id in enumerate(nslc):
|
||||||
network, station, location, channel = seed_id.split('.')
|
network, station, location, channel = seed_id.split('.')
|
||||||
|
for wf_name, wf_dict in plot_streams.items():
|
||||||
|
st_select = wf_dict.get('data')
|
||||||
|
if not st_select:
|
||||||
|
continue
|
||||||
st = st_select.select(id=seed_id)
|
st = st_select.select(id=seed_id)
|
||||||
trace = st[0].copy()
|
trace = st[0].copy()
|
||||||
if mapping:
|
if mapping:
|
||||||
@ -1422,7 +1445,7 @@ class PylotCanvas(FigureCanvas):
|
|||||||
msg = 'plotting %s channel of station %s' % (channel, station)
|
msg = 'plotting %s channel of station %s' % (channel, station)
|
||||||
print(msg)
|
print(msg)
|
||||||
stime = trace.stats.starttime - wfstart
|
stime = trace.stats.starttime - wfstart
|
||||||
time_ax = prepTimeAxis(stime, trace)
|
time_ax = prep_time_axis(stime, trace)
|
||||||
if time_ax is not None:
|
if time_ax is not None:
|
||||||
if scaleToChannel:
|
if scaleToChannel:
|
||||||
st_scale = wfdata.select(channel=scaleToChannel)
|
st_scale = wfdata.select(channel=scaleToChannel)
|
||||||
@ -1435,15 +1458,17 @@ class PylotCanvas(FigureCanvas):
|
|||||||
trace.detrend('constant')
|
trace.detrend('constant')
|
||||||
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
||||||
|
|
||||||
|
offset = wf_dict.get('offset')
|
||||||
|
|
||||||
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
||||||
data = [datum + n for index, datum in enumerate(trace.data) if not index % nth_sample]
|
data = [datum + n + offset for index, datum in enumerate(trace.data) if not index % nth_sample]
|
||||||
ax.axhline(n, color="0.5", lw=0.5)
|
ax.axhline(n, color="0.5", lw=0.5)
|
||||||
ax.plot(times, data, color=linecolor, linewidth=0.7)
|
ax.plot(times, data, color=wf_dict.get('linecolor'), **wf_dict.get('plot_kwargs'))
|
||||||
if noiselevel is not None:
|
if noiselevel is not None:
|
||||||
for level in [-noiselevel[channel], noiselevel[channel]]:
|
for level in [-noiselevel[channel], noiselevel[channel]]:
|
||||||
ax.plot([time_ax[0], time_ax[-1]],
|
ax.plot([time_ax[0], time_ax[-1]],
|
||||||
[n + level, n + level],
|
[n + level, n + level],
|
||||||
color=linecolor,
|
color=wf_dict.get('linecolor'),
|
||||||
linestyle='dashed')
|
linestyle='dashed')
|
||||||
self.setPlotDict(n, seed_id)
|
self.setPlotDict(n, seed_id)
|
||||||
if plot_additional and additional_channel:
|
if plot_additional and additional_channel:
|
||||||
@ -1460,7 +1485,7 @@ class PylotCanvas(FigureCanvas):
|
|||||||
if not scaleddata:
|
if not scaleddata:
|
||||||
trace.detrend('constant')
|
trace.detrend('constant')
|
||||||
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
||||||
time_ax = prepTimeAxis(stime, trace)
|
time_ax = prep_time_axis(stime, trace)
|
||||||
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
||||||
p_data = compare_stream[0].data
|
p_data = compare_stream[0].data
|
||||||
# #normalize
|
# #normalize
|
||||||
@ -1574,6 +1599,8 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
|
|||||||
self.events = events
|
self.events = events
|
||||||
self.filepaths = []
|
self.filepaths = []
|
||||||
self.file_extensions = []
|
self.file_extensions = []
|
||||||
|
self.check_all_state = True
|
||||||
|
self.merge_strategy = None
|
||||||
self.default_text = default_text
|
self.default_text = default_text
|
||||||
self.label = label
|
self.label = label
|
||||||
self.setButtons()
|
self.setButtons()
|
||||||
@ -1581,16 +1608,17 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
|
|||||||
self.connectSignals()
|
self.connectSignals()
|
||||||
self.showPaths()
|
self.showPaths()
|
||||||
self.refreshSelectionBox()
|
self.refreshSelectionBox()
|
||||||
self.refresh_timer = QTimer(self)
|
# self.refresh_timer = QTimer(self)
|
||||||
self.refresh_timer.timeout.connect(self.showPaths)
|
# self.refresh_timer.timeout.connect(self.showPaths)
|
||||||
self.refresh_timer.start(10000)
|
# self.refresh_timer.start(10000)
|
||||||
|
|
||||||
self.resize(800, 450)
|
self.resize(800, 450)
|
||||||
|
|
||||||
|
|
||||||
def setupUi(self):
|
def setupUi(self):
|
||||||
|
ncol = 4
|
||||||
self.main_layout = QtWidgets.QVBoxLayout()
|
self.main_layout = QtWidgets.QVBoxLayout()
|
||||||
self.header_layout = QtWidgets.QHBoxLayout()
|
self.header_layout = QtWidgets.QHBoxLayout()
|
||||||
|
self.footer_layout = QtWidgets.QHBoxLayout()
|
||||||
#
|
#
|
||||||
self.setLayout(self.main_layout)
|
self.setLayout(self.main_layout)
|
||||||
|
|
||||||
@ -1604,11 +1632,24 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
|
|||||||
self.searchButton = QtWidgets.QPushButton('Search')
|
self.searchButton = QtWidgets.QPushButton('Search')
|
||||||
self.searchButton.setVisible(False)
|
self.searchButton.setVisible(False)
|
||||||
|
|
||||||
|
# check/uncheck button for table
|
||||||
|
self.checkAllButton = QtWidgets.QPushButton('Check/Uncheck all')
|
||||||
|
|
||||||
|
# radiobutton for merge selection
|
||||||
|
self.mergeRadioButtonGroup = QtWidgets.QButtonGroup()
|
||||||
|
self.merge_button = QtWidgets.QRadioButton('Merge')
|
||||||
|
self.overwrite_button = QtWidgets.QRadioButton('Overwrite')
|
||||||
|
self.mergeRadioButtonGroup.addButton(self.merge_button)
|
||||||
|
self.mergeRadioButtonGroup.addButton(self.overwrite_button)
|
||||||
|
self.merge_button.setChecked(True)
|
||||||
|
self.merge_strategy = self.merge_button.text()
|
||||||
|
|
||||||
|
# table
|
||||||
self.tableWidget = QtWidgets.QTableWidget()
|
self.tableWidget = QtWidgets.QTableWidget()
|
||||||
tableWidget = self.tableWidget
|
tableWidget = self.tableWidget
|
||||||
tableWidget.setColumnCount(3)
|
tableWidget.setColumnCount(ncol)
|
||||||
tableWidget.setRowCount(len(self.events))
|
tableWidget.setRowCount(len(self.events))
|
||||||
tableWidget.setHorizontalHeaderLabels(('Event ID', 'Filename', 'Last modified'))
|
tableWidget.setHorizontalHeaderLabels(('', 'Event ID', 'Filename', 'Last modified'))
|
||||||
tableWidget.setEditTriggers(tableWidget.NoEditTriggers)
|
tableWidget.setEditTriggers(tableWidget.NoEditTriggers)
|
||||||
tableWidget.setSortingEnabled(True)
|
tableWidget.setSortingEnabled(True)
|
||||||
header = tableWidget.horizontalHeader()
|
header = tableWidget.horizontalHeader()
|
||||||
@ -1621,9 +1662,17 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
|
|||||||
self.header_layout.addWidget(self.comboBox)
|
self.header_layout.addWidget(self.comboBox)
|
||||||
self.header_layout.addWidget(self.searchButton)
|
self.header_layout.addWidget(self.searchButton)
|
||||||
|
|
||||||
|
self.footer_layout.addWidget(self.checkAllButton)
|
||||||
|
self.footer_layout.addWidget(self.statusText)
|
||||||
|
self.footer_layout.addWidget(self.merge_button)
|
||||||
|
self.footer_layout.addWidget(self.overwrite_button)
|
||||||
|
|
||||||
|
self.footer_layout.setStretch(0, 0)
|
||||||
|
self.footer_layout.setStretch(1, 1)
|
||||||
|
|
||||||
self.main_layout.addLayout(self.header_layout)
|
self.main_layout.addLayout(self.header_layout)
|
||||||
self.main_layout.addWidget(self.tableWidget)
|
self.main_layout.addWidget(self.tableWidget)
|
||||||
self.main_layout.addWidget(self.statusText)
|
self.main_layout.addLayout(self.footer_layout)
|
||||||
self.main_layout.addWidget(self._buttonbox)
|
self.main_layout.addWidget(self._buttonbox)
|
||||||
|
|
||||||
def showPaths(self):
|
def showPaths(self):
|
||||||
@ -1632,23 +1681,23 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
|
|||||||
self.tableWidget.clearContents()
|
self.tableWidget.clearContents()
|
||||||
for index, event in enumerate(self.events):
|
for index, event in enumerate(self.events):
|
||||||
filename = get_pylot_eventfile_with_extension(event, fext)
|
filename = get_pylot_eventfile_with_extension(event, fext)
|
||||||
self.tableWidget.setItem(index, 0, QtWidgets.QTableWidgetItem(f'{event.pylot_id}'))
|
pf_selected_item = QtWidgets.QTableWidgetItem()
|
||||||
|
check_state = QtCore.Qt.Checked if filename else QtCore.Qt.Unchecked
|
||||||
|
pf_selected_item.setCheckState(check_state)
|
||||||
|
self.tableWidget.setItem(index, 0, pf_selected_item)
|
||||||
|
self.tableWidget.setItem(index, 1, QtWidgets.QTableWidgetItem(f'{event.pylot_id}'))
|
||||||
if filename:
|
if filename:
|
||||||
self.filepaths.append(filename)
|
self.filepaths.append(filename)
|
||||||
ts = int(os.path.getmtime(filename))
|
ts = int(os.path.getmtime(filename))
|
||||||
|
|
||||||
# create QTableWidgetItems of filepath and last modification time
|
# create QTableWidgetItems of filepath and last modification time
|
||||||
fname_item = QtWidgets.QTableWidgetItem(f'{os.path.split(filename)[-1]}')
|
fname_item = QtWidgets.QTableWidgetItem(f'{os.path.split(filename)[-1]}')
|
||||||
|
fname_item.setData(3, filename)
|
||||||
ts_item = QtWidgets.QTableWidgetItem(f'{datetime.datetime.fromtimestamp(ts)}')
|
ts_item = QtWidgets.QTableWidgetItem(f'{datetime.datetime.fromtimestamp(ts)}')
|
||||||
self.tableWidget.setItem(index, 1, fname_item)
|
self.tableWidget.setItem(index, 2, fname_item)
|
||||||
self.tableWidget.setItem(index, 2, ts_item)
|
self.tableWidget.setItem(index, 3, ts_item)
|
||||||
|
|
||||||
# TODO: Idea -> only refresh if table contents changed. Use selection to load only a subset of files
|
self.update_status()
|
||||||
if len(self.filepaths) > 0:
|
|
||||||
status_text = f'Found {len(self.filepaths)} eventfiles. Do you want to load them?'
|
|
||||||
else:
|
|
||||||
status_text = 'Did not find any files for specified file mask.'
|
|
||||||
self.statusText.setText(status_text)
|
|
||||||
|
|
||||||
def refreshSelectionBox(self):
|
def refreshSelectionBox(self):
|
||||||
fext = self.comboBox.currentText()
|
fext = self.comboBox.currentText()
|
||||||
@ -1668,12 +1717,52 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
|
|||||||
self._buttonbox = QDialogButtonBox(QDialogButtonBox.Ok |
|
self._buttonbox = QDialogButtonBox(QDialogButtonBox.Ok |
|
||||||
QDialogButtonBox.Cancel)
|
QDialogButtonBox.Cancel)
|
||||||
|
|
||||||
|
def toggleCheckAll(self):
|
||||||
|
self.check_all_state = not self.check_all_state
|
||||||
|
self.checkAll(self.check_all_state)
|
||||||
|
|
||||||
|
def checkAll(self, state):
|
||||||
|
state = QtCore.Qt.Checked if state else QtCore.Qt.Unchecked
|
||||||
|
for row_ind in range(self.tableWidget.rowCount()):
|
||||||
|
item = self.tableWidget.item(row_ind, 0)
|
||||||
|
item.setCheckState(state)
|
||||||
|
|
||||||
|
def getChecked(self):
|
||||||
|
filepaths = []
|
||||||
|
for row_ind in range(self.tableWidget.rowCount()):
|
||||||
|
item_check = self.tableWidget.item(row_ind, 0)
|
||||||
|
if item_check.checkState() == QtCore.Qt.Checked:
|
||||||
|
item_fname = self.tableWidget.item(row_ind, 2)
|
||||||
|
if item_fname:
|
||||||
|
filepath = item_fname.data(3)
|
||||||
|
filepaths.append(filepath)
|
||||||
|
return filepaths
|
||||||
|
|
||||||
|
def update_status(self, row=None, col=None):
|
||||||
|
if col is not None and col != 0:
|
||||||
|
return
|
||||||
|
filepaths = self.getChecked()
|
||||||
|
if len(filepaths) > 0:
|
||||||
|
status_text = f"Found {len(filepaths)} eventfile{'s' if len(filepaths) > 1 else ''}. Do you want to load them?"
|
||||||
|
else:
|
||||||
|
status_text = 'Did not find/select any files for specified file mask.'
|
||||||
|
self.statusText.setText(status_text)
|
||||||
|
|
||||||
|
def update_merge_strategy(self):
|
||||||
|
for button in (self.merge_button, self.overwrite_button):
|
||||||
|
if button.isChecked():
|
||||||
|
self.merge_strategy = button.text()
|
||||||
|
|
||||||
def connectSignals(self):
|
def connectSignals(self):
|
||||||
self._buttonbox.accepted.connect(self.accept)
|
self._buttonbox.accepted.connect(self.accept)
|
||||||
self._buttonbox.rejected.connect(self.reject)
|
self._buttonbox.rejected.connect(self.reject)
|
||||||
self.comboBox.editTextChanged.connect(self.showPaths)
|
self.comboBox.editTextChanged.connect(self.showPaths)
|
||||||
self.searchButton.clicked.connect(self.showPaths)
|
self.searchButton.clicked.connect(self.showPaths)
|
||||||
|
self.checkAllButton.clicked.connect(self.toggleCheckAll)
|
||||||
|
self.checkAllButton.clicked.connect(self.update_status)
|
||||||
|
self.tableWidget.cellClicked.connect(self.update_status)
|
||||||
|
self.merge_button.clicked.connect(self.update_merge_strategy)
|
||||||
|
self.overwrite_button.clicked.connect(self.update_merge_strategy)
|
||||||
|
|
||||||
|
|
||||||
class SingleTextLineDialog(QtWidgets.QDialog):
|
class SingleTextLineDialog(QtWidgets.QDialog):
|
||||||
@ -1780,16 +1869,18 @@ class PhaseDefaults(QtWidgets.QDialog):
|
|||||||
class PickDlg(QDialog):
|
class PickDlg(QDialog):
|
||||||
update_picks = QtCore.Signal(dict)
|
update_picks = QtCore.Signal(dict)
|
||||||
|
|
||||||
def __init__(self, parent=None, data=None, station=None, network=None, location=None, picks=None,
|
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
|
||||||
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None,
|
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
|
||||||
event=None, filteroptions=None, model=None, wftype=None):
|
event=None, filteroptions=None, wftype=None):
|
||||||
super(PickDlg, self).__init__(parent, Qt.Window)
|
super(PickDlg, self).__init__(parent, Qt.Window)
|
||||||
self.orig_parent = parent
|
self.orig_parent = parent
|
||||||
self.setAttribute(Qt.WA_DeleteOnClose)
|
self.setAttribute(Qt.WA_DeleteOnClose)
|
||||||
|
|
||||||
# initialize attributes
|
# initialize attributes
|
||||||
self.parameter = parameter
|
self.parameter = parameter
|
||||||
|
model = self.parameter.get('taup_model')
|
||||||
self._embedded = embedded
|
self._embedded = embedded
|
||||||
|
self.showCompData = show_comp_data
|
||||||
self.station = station
|
self.station = station
|
||||||
self.network = network
|
self.network = network
|
||||||
self.location = location
|
self.location = location
|
||||||
@ -1828,22 +1919,6 @@ class PickDlg(QDialog):
|
|||||||
else:
|
else:
|
||||||
self.filteroptions = FILTERDEFAULTS
|
self.filteroptions = FILTERDEFAULTS
|
||||||
self.pick_block = False
|
self.pick_block = False
|
||||||
self.nextStation = QtWidgets.QCheckBox('Continue with next station ')
|
|
||||||
|
|
||||||
# comparison channel
|
|
||||||
self.compareChannel = QtWidgets.QComboBox()
|
|
||||||
self.compareChannel.activated.connect(self.resetPlot)
|
|
||||||
|
|
||||||
# scale channel
|
|
||||||
self.scaleChannel = QtWidgets.QComboBox()
|
|
||||||
self.scaleChannel.activated.connect(self.resetPlot)
|
|
||||||
|
|
||||||
# initialize panning attributes
|
|
||||||
self.press = None
|
|
||||||
self.xpress = None
|
|
||||||
self.ypress = None
|
|
||||||
self.cur_xlim = None
|
|
||||||
self.cur_ylim = None
|
|
||||||
|
|
||||||
# set attribute holding data
|
# set attribute holding data
|
||||||
if data is None or not data:
|
if data is None or not data:
|
||||||
@ -1856,6 +1931,31 @@ class PickDlg(QDialog):
|
|||||||
raise Exception(errmsg)
|
raise Exception(errmsg)
|
||||||
else:
|
else:
|
||||||
self.data = data
|
self.data = data
|
||||||
|
self.data_compare = data_compare
|
||||||
|
|
||||||
|
self.nextStation = QtWidgets.QCheckBox('Continue with next station ')
|
||||||
|
|
||||||
|
# comparison channel
|
||||||
|
self.referenceChannel = QtWidgets.QComboBox()
|
||||||
|
self.referenceChannel.activated.connect(self.resetPlot)
|
||||||
|
|
||||||
|
# comparison channel
|
||||||
|
self.compareCB = QtWidgets.QCheckBox()
|
||||||
|
self.compareCB.setChecked(self.showCompData)
|
||||||
|
self.compareCB.clicked.connect(self.switchCompData)
|
||||||
|
self.compareCB.clicked.connect(self.resetPlot)
|
||||||
|
self.compareCB.setVisible(bool(self.data_compare))
|
||||||
|
|
||||||
|
# scale channel
|
||||||
|
self.scaleChannel = QtWidgets.QComboBox()
|
||||||
|
self.scaleChannel.activated.connect(self.resetPlot)
|
||||||
|
|
||||||
|
# initialize panning attributes
|
||||||
|
self.press = None
|
||||||
|
self.xpress = None
|
||||||
|
self.ypress = None
|
||||||
|
self.cur_xlim = None
|
||||||
|
self.cur_ylim = None
|
||||||
|
|
||||||
self.stime, self.etime = full_range(self.getWFData())
|
self.stime, self.etime = full_range(self.getWFData())
|
||||||
|
|
||||||
@ -1868,12 +1968,12 @@ class PickDlg(QDialog):
|
|||||||
self.setupUi()
|
self.setupUi()
|
||||||
|
|
||||||
# fill compare and scale channels
|
# fill compare and scale channels
|
||||||
self.compareChannel.addItem('-', None)
|
self.referenceChannel.addItem('-', None)
|
||||||
self.scaleChannel.addItem('individual', None)
|
self.scaleChannel.addItem('individual', None)
|
||||||
|
|
||||||
for trace in self.getWFData():
|
for trace in self.getWFData():
|
||||||
channel = trace.stats.channel
|
channel = trace.stats.channel
|
||||||
self.compareChannel.addItem(channel, trace)
|
self.referenceChannel.addItem(channel, trace)
|
||||||
if not channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
|
if not channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
|
||||||
print('Skipping unknown channel for scaling: {}'.format(channel))
|
print('Skipping unknown channel for scaling: {}'.format(channel))
|
||||||
continue
|
continue
|
||||||
@ -1890,7 +1990,7 @@ class PickDlg(QDialog):
|
|||||||
if self.wftype is not None:
|
if self.wftype is not None:
|
||||||
title += ' | ({})'.format(self.wftype)
|
title += ' | ({})'.format(self.wftype)
|
||||||
|
|
||||||
self.multicompfig.plotWFData(wfdata=self.getWFData(),
|
self.multicompfig.plotWFData(wfdata=self.getWFData(), wfdata_compare=self.getWFDataComp(),
|
||||||
title=title)
|
title=title)
|
||||||
|
|
||||||
self.multicompfig.setZoomBorders2content()
|
self.multicompfig.setZoomBorders2content()
|
||||||
@ -2066,8 +2166,11 @@ class PickDlg(QDialog):
|
|||||||
_dialtoolbar.addWidget(est_label)
|
_dialtoolbar.addWidget(est_label)
|
||||||
_dialtoolbar.addWidget(self.plot_arrivals_button)
|
_dialtoolbar.addWidget(self.plot_arrivals_button)
|
||||||
_dialtoolbar.addSeparator()
|
_dialtoolbar.addSeparator()
|
||||||
_dialtoolbar.addWidget(QtWidgets.QLabel('Compare to channel: '))
|
_dialtoolbar.addWidget(QtWidgets.QLabel('Plot reference channel: '))
|
||||||
_dialtoolbar.addWidget(self.compareChannel)
|
_dialtoolbar.addWidget(self.referenceChannel)
|
||||||
|
_dialtoolbar.addSeparator()
|
||||||
|
_dialtoolbar.addWidget(QtWidgets.QLabel('Compare: '))
|
||||||
|
_dialtoolbar.addWidget(self.compareCB)
|
||||||
_dialtoolbar.addSeparator()
|
_dialtoolbar.addSeparator()
|
||||||
_dialtoolbar.addWidget(QtWidgets.QLabel('Scaling: '))
|
_dialtoolbar.addWidget(QtWidgets.QLabel('Scaling: '))
|
||||||
_dialtoolbar.addWidget(self.scaleChannel)
|
_dialtoolbar.addWidget(self.scaleChannel)
|
||||||
@ -2152,10 +2255,12 @@ class PickDlg(QDialog):
|
|||||||
station_id = trace.get_id()
|
station_id = trace.get_id()
|
||||||
starttime = trace.stats.starttime
|
starttime = trace.stats.starttime
|
||||||
station_coords = self.metadata.get_coordinates(station_id, starttime)
|
station_coords = self.metadata.get_coordinates(station_id, starttime)
|
||||||
|
if not station_coords:
|
||||||
|
print('get_arrivals: No station coordinates found. Return!')
|
||||||
|
return
|
||||||
origins = self.pylot_event.origins
|
origins = self.pylot_event.origins
|
||||||
if phases == ['None', 'None']:
|
if phases == ['None', 'None']:
|
||||||
print("get_arrivals: Creation info (manual or auto) not available!")
|
print("get_arrivals: Creation info (manual or auto) not available! Return!")
|
||||||
print("Return!")
|
|
||||||
return
|
return
|
||||||
if origins:
|
if origins:
|
||||||
source_origin = origins[0]
|
source_origin = origins[0]
|
||||||
@ -2166,8 +2271,8 @@ class PickDlg(QDialog):
|
|||||||
arrivals = func[plot](source_origin.depth,
|
arrivals = func[plot](source_origin.depth,
|
||||||
source_origin.latitude,
|
source_origin.latitude,
|
||||||
source_origin.longitude,
|
source_origin.longitude,
|
||||||
station_coords['latitude'],
|
station_coords.get('latitude'),
|
||||||
station_coords['longitude'],
|
station_coords.get('longitude'),
|
||||||
phases)
|
phases)
|
||||||
self.arrivals = arrivals
|
self.arrivals = arrivals
|
||||||
|
|
||||||
@ -2267,7 +2372,7 @@ class PickDlg(QDialog):
|
|||||||
|
|
||||||
# create action and add to menu
|
# create action and add to menu
|
||||||
# phase name transferred using lambda function
|
# phase name transferred using lambda function
|
||||||
slot = lambda phase=phase, phaseID=phaseID: phaseSelect[phaseID](phase)
|
slot = lambda ph=phase, phID=phaseID: phaseSelect[phID](ph)
|
||||||
picksAction = createAction(parent=self, text=phase,
|
picksAction = createAction(parent=self, text=phase,
|
||||||
slot=slot,
|
slot=slot,
|
||||||
shortcut=shortcut)
|
shortcut=shortcut)
|
||||||
@ -2402,7 +2507,7 @@ class PickDlg(QDialog):
|
|||||||
def activatePicking(self):
|
def activatePicking(self):
|
||||||
self.leave_rename_phase()
|
self.leave_rename_phase()
|
||||||
self.renamePhaseAction.setEnabled(False)
|
self.renamePhaseAction.setEnabled(False)
|
||||||
self.compareChannel.setEnabled(False)
|
self.referenceChannel.setEnabled(False)
|
||||||
self.scaleChannel.setEnabled(False)
|
self.scaleChannel.setEnabled(False)
|
||||||
phase = self.currentPhase
|
phase = self.currentPhase
|
||||||
phaseID = self.getPhaseID(phase)
|
phaseID = self.getPhaseID(phase)
|
||||||
@ -2434,7 +2539,7 @@ class PickDlg(QDialog):
|
|||||||
self.disconnectPressEvent()
|
self.disconnectPressEvent()
|
||||||
self.multicompfig.connectEvents()
|
self.multicompfig.connectEvents()
|
||||||
self.renamePhaseAction.setEnabled(True)
|
self.renamePhaseAction.setEnabled(True)
|
||||||
self.compareChannel.setEnabled(True)
|
self.referenceChannel.setEnabled(True)
|
||||||
self.scaleChannel.setEnabled(True)
|
self.scaleChannel.setEnabled(True)
|
||||||
self.connect_pick_delete()
|
self.connect_pick_delete()
|
||||||
self.draw()
|
self.draw()
|
||||||
@ -2507,6 +2612,12 @@ class PickDlg(QDialog):
|
|||||||
def getWFData(self):
|
def getWFData(self):
|
||||||
return self.data
|
return self.data
|
||||||
|
|
||||||
|
def getWFDataComp(self):
|
||||||
|
if self.showCompData:
|
||||||
|
return self.data_compare
|
||||||
|
else:
|
||||||
|
return Stream()
|
||||||
|
|
||||||
def selectWFData(self, channel):
|
def selectWFData(self, channel):
|
||||||
component = channel[-1].upper()
|
component = channel[-1].upper()
|
||||||
wfdata = Stream()
|
wfdata = Stream()
|
||||||
@ -2628,11 +2739,16 @@ class PickDlg(QDialog):
|
|||||||
|
|
||||||
stime = self.getStartTime()
|
stime = self.getStartTime()
|
||||||
|
|
||||||
# copy data for plotting
|
# copy wfdata for plotting
|
||||||
data = self.getWFData().copy()
|
wfdata = self.getWFData().copy()
|
||||||
data = self.getPickPhases(data, phase)
|
wfdata_comp = self.getWFDataComp().copy()
|
||||||
data.normalize()
|
wfdata = self.getPickPhases(wfdata, phase)
|
||||||
if not data:
|
wfdata_comp = self.getPickPhases(wfdata_comp, phase)
|
||||||
|
for wfd in [wfdata, wfdata_comp]:
|
||||||
|
if wfd:
|
||||||
|
wfd.normalize()
|
||||||
|
|
||||||
|
if not wfdata:
|
||||||
QtWidgets.QMessageBox.warning(self, 'No channel to plot',
|
QtWidgets.QMessageBox.warning(self, 'No channel to plot',
|
||||||
'No channel to plot for phase: {}. '
|
'No channel to plot for phase: {}. '
|
||||||
'Make sure to select the correct channels for P and S '
|
'Make sure to select the correct channels for P and S '
|
||||||
@ -2640,14 +2756,16 @@ class PickDlg(QDialog):
|
|||||||
self.leave_picking_mode()
|
self.leave_picking_mode()
|
||||||
return
|
return
|
||||||
|
|
||||||
# filter data and trace on which is picked prior to determination of SNR
|
# filter wfdata and trace on which is picked prior to determination of SNR
|
||||||
filterphase = self.currentFilterPhase()
|
filterphase = self.currentFilterPhase()
|
||||||
if filterphase:
|
if filterphase:
|
||||||
filteroptions = self.getFilterOptions(filterphase).parseFilterOptions()
|
filteroptions = self.getFilterOptions(filterphase).parseFilterOptions()
|
||||||
try:
|
try:
|
||||||
data.detrend('linear')
|
for wfd in [wfdata, wfdata_comp]:
|
||||||
data.filter(**filteroptions)
|
if wfd:
|
||||||
# wfdata.filter(**filteroptions)# MP MP removed filtering of original data
|
wfd.detrend('linear')
|
||||||
|
wfd.filter(**filteroptions)
|
||||||
|
# wfdata.filter(**filteroptions)# MP MP removed filtering of original wfdata
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
self.qmb = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Icon.Information,
|
self.qmb = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Icon.Information,
|
||||||
'Denied',
|
'Denied',
|
||||||
@ -2657,8 +2775,8 @@ class PickDlg(QDialog):
|
|||||||
snr = []
|
snr = []
|
||||||
noiselevels = {}
|
noiselevels = {}
|
||||||
# determine SNR and noiselevel
|
# determine SNR and noiselevel
|
||||||
for trace in data.traces:
|
for trace in wfdata.traces:
|
||||||
st = data.select(channel=trace.stats.channel)
|
st = wfdata.select(channel=trace.stats.channel)
|
||||||
stime_diff = trace.stats.starttime - stime
|
stime_diff = trace.stats.starttime - stime
|
||||||
result = getSNR(st, (noise_win, gap_win, signal_win), ini_pick - stime_diff)
|
result = getSNR(st, (noise_win, gap_win, signal_win), ini_pick - stime_diff)
|
||||||
snr.append(result[0])
|
snr.append(result[0])
|
||||||
@ -2669,12 +2787,14 @@ class PickDlg(QDialog):
|
|||||||
noiselevel = nfac
|
noiselevel = nfac
|
||||||
noiselevels[trace.stats.channel] = noiselevel
|
noiselevels[trace.stats.channel] = noiselevel
|
||||||
|
|
||||||
# prepare plotting of data
|
# prepare plotting of wfdata
|
||||||
for trace in data:
|
for wfd in [wfdata, wfdata_comp]:
|
||||||
t = prepTimeAxis(trace.stats.starttime - stime, trace)
|
if wfd:
|
||||||
|
for trace in wfd:
|
||||||
|
t = prep_time_axis(trace.stats.starttime - stime, trace)
|
||||||
inoise = getnoisewin(t, ini_pick, noise_win, gap_win)
|
inoise = getnoisewin(t, ini_pick, noise_win, gap_win)
|
||||||
trace = demeanTrace(trace, inoise)
|
trace = demeanTrace(trace, inoise)
|
||||||
# upscale trace data in a way that each trace is vertically zoomed to noiselevel*factor
|
# upscale trace wfdata in a way that each trace is vertically zoomed to noiselevel*factor
|
||||||
channel = trace.stats.channel
|
channel = trace.stats.channel
|
||||||
noiselevel = noiselevels[channel]
|
noiselevel = noiselevels[channel]
|
||||||
noiseScaleFactor = self.calcNoiseScaleFactor(noiselevel, zoomfactor=5.)
|
noiseScaleFactor = self.calcNoiseScaleFactor(noiselevel, zoomfactor=5.)
|
||||||
@ -2685,7 +2805,7 @@ class PickDlg(QDialog):
|
|||||||
x_res = getResolutionWindow(mean_snr, parameter.get('extent'))
|
x_res = getResolutionWindow(mean_snr, parameter.get('extent'))
|
||||||
|
|
||||||
xlims = [ini_pick - x_res, ini_pick + x_res]
|
xlims = [ini_pick - x_res, ini_pick + x_res]
|
||||||
ylims = list(np.array([-.5, .5]) + [0, len(data) - 1])
|
ylims = list(np.array([-.5, .5]) + [0, len(wfdata) - 1])
|
||||||
|
|
||||||
title = self.getStation() + ' picking mode'
|
title = self.getStation() + ' picking mode'
|
||||||
title += ' | SNR: {}'.format(mean_snr)
|
title += ' | SNR: {}'.format(mean_snr)
|
||||||
@ -2693,9 +2813,10 @@ class PickDlg(QDialog):
|
|||||||
filtops_str = transformFilteroptions2String(filteroptions)
|
filtops_str = transformFilteroptions2String(filteroptions)
|
||||||
title += ' | Filteroptions: {}'.format(filtops_str)
|
title += ' | Filteroptions: {}'.format(filtops_str)
|
||||||
|
|
||||||
plot_additional = bool(self.compareChannel.currentText())
|
plot_additional = bool(self.referenceChannel.currentText())
|
||||||
additional_channel = self.compareChannel.currentText()
|
additional_channel = self.referenceChannel.currentText()
|
||||||
self.multicompfig.plotWFData(wfdata=data,
|
self.multicompfig.plotWFData(wfdata=wfdata,
|
||||||
|
wfdata_compare=wfdata_comp,
|
||||||
title=title,
|
title=title,
|
||||||
zoomx=xlims,
|
zoomx=xlims,
|
||||||
zoomy=ylims,
|
zoomy=ylims,
|
||||||
@ -3086,7 +3207,8 @@ class PickDlg(QDialog):
|
|||||||
self.cur_xlim = self.multicompfig.axes[0].get_xlim()
|
self.cur_xlim = self.multicompfig.axes[0].get_xlim()
|
||||||
self.cur_ylim = self.multicompfig.axes[0].get_ylim()
|
self.cur_ylim = self.multicompfig.axes[0].get_ylim()
|
||||||
# self.multicompfig.updateCurrentLimits()
|
# self.multicompfig.updateCurrentLimits()
|
||||||
data = self.getWFData().copy()
|
wfdata = self.getWFData().copy()
|
||||||
|
wfdata_comp = self.getWFDataComp().copy()
|
||||||
title = self.getStation()
|
title = self.getStation()
|
||||||
if filter:
|
if filter:
|
||||||
filtoptions = None
|
filtoptions = None
|
||||||
@ -3094,19 +3216,22 @@ class PickDlg(QDialog):
|
|||||||
filtoptions = self.getFilterOptions(self.getPhaseID(phase), gui_filter=True).parseFilterOptions()
|
filtoptions = self.getFilterOptions(self.getPhaseID(phase), gui_filter=True).parseFilterOptions()
|
||||||
|
|
||||||
if filtoptions is not None:
|
if filtoptions is not None:
|
||||||
data.detrend('linear')
|
for wfd in [wfdata, wfdata_comp]:
|
||||||
data.taper(0.02, type='cosine')
|
if wfd:
|
||||||
data.filter(**filtoptions)
|
wfd.detrend('linear')
|
||||||
|
wfd.taper(0.02, type='cosine')
|
||||||
|
wfd.filter(**filtoptions)
|
||||||
filtops_str = transformFilteroptions2String(filtoptions)
|
filtops_str = transformFilteroptions2String(filtoptions)
|
||||||
title += ' | Filteroptions: {}'.format(filtops_str)
|
title += ' | Filteroptions: {}'.format(filtops_str)
|
||||||
|
|
||||||
if self.wftype is not None:
|
if self.wftype is not None:
|
||||||
title += ' | ({})'.format(self.wftype)
|
title += ' | ({})'.format(self.wftype)
|
||||||
|
|
||||||
plot_additional = bool(self.compareChannel.currentText())
|
plot_additional = bool(self.referenceChannel.currentText())
|
||||||
additional_channel = self.compareChannel.currentText()
|
additional_channel = self.referenceChannel.currentText()
|
||||||
scale_channel = self.scaleChannel.currentText()
|
scale_channel = self.scaleChannel.currentText()
|
||||||
self.multicompfig.plotWFData(wfdata=data, title=title,
|
self.multicompfig.plotWFData(wfdata=wfdata, wfdata_compare=wfdata_comp,
|
||||||
|
title=title,
|
||||||
zoomx=self.getXLims(),
|
zoomx=self.getXLims(),
|
||||||
zoomy=self.getYLims(),
|
zoomy=self.getYLims(),
|
||||||
plot_additional=plot_additional,
|
plot_additional=plot_additional,
|
||||||
@ -3179,6 +3304,9 @@ class PickDlg(QDialog):
|
|||||||
self.resetZoom()
|
self.resetZoom()
|
||||||
self.refreshPlot()
|
self.refreshPlot()
|
||||||
|
|
||||||
|
def switchCompData(self):
|
||||||
|
self.showCompData = self.compareCB.isChecked()
|
||||||
|
|
||||||
def refreshPlot(self):
|
def refreshPlot(self):
|
||||||
if self.autoFilterAction.isChecked():
|
if self.autoFilterAction.isChecked():
|
||||||
self.filterActionP.setChecked(False)
|
self.filterActionP.setChecked(False)
|
||||||
@ -3709,7 +3837,7 @@ class TuneAutopicker(QWidget):
|
|||||||
self.stb_names = ['aicARHfig', 'refSpick', 'el_S1pick', 'el_S2pick']
|
self.stb_names = ['aicARHfig', 'refSpick', 'el_S1pick', 'el_S2pick']
|
||||||
|
|
||||||
def add_parameters(self):
|
def add_parameters(self):
|
||||||
self.paraBox = PylotParaBox(self.parameter, parent=self, windowflag=Qt.Widget)
|
self.paraBox = PylotParameterWidget(self.parameter, parent=self, windowflag=Qt.Widget)
|
||||||
self.paraBox.set_tune_mode(True)
|
self.paraBox.set_tune_mode(True)
|
||||||
self.update_eventID()
|
self.update_eventID()
|
||||||
self.parameter_layout.addWidget(self.paraBox)
|
self.parameter_layout.addWidget(self.paraBox)
|
||||||
@ -3790,11 +3918,13 @@ class TuneAutopicker(QWidget):
|
|||||||
location = None
|
location = None
|
||||||
|
|
||||||
wfdata = self.data.getWFData()
|
wfdata = self.data.getWFData()
|
||||||
|
wfdata_comp = self.data.getWFDataComp()
|
||||||
metadata = self.parent().metadata
|
metadata = self.parent().metadata
|
||||||
event = self.get_current_event()
|
event = self.get_current_event()
|
||||||
filteroptions = self.parent().filteroptions
|
filteroptions = self.parent().filteroptions
|
||||||
wftype = self.wftype if self.obspy_dmt else ''
|
wftype = self.wftype if self.obspy_dmt else ''
|
||||||
self.pickDlg = PickDlg(self.parent(), data=wfdata.select(station=station).copy(),
|
self.pickDlg = PickDlg(self.parent(), data=wfdata.select(station=station).copy(),
|
||||||
|
data_comp=wfdata_comp.select(station=station).copy(),
|
||||||
station=station, network=network,
|
station=station, network=network,
|
||||||
location=location, parameter=self.parameter,
|
location=location, parameter=self.parameter,
|
||||||
picks=self.get_current_event_picks(station),
|
picks=self.get_current_event_picks(station),
|
||||||
@ -3879,7 +4009,7 @@ class TuneAutopicker(QWidget):
|
|||||||
self.plot_manual_pick_to_ax(ax=ax, picks=picks, phase='S',
|
self.plot_manual_pick_to_ax(ax=ax, picks=picks, phase='S',
|
||||||
starttime=starttime, quality=qualitySpick)
|
starttime=starttime, quality=qualitySpick)
|
||||||
for canvas in self.parent().canvas_dict.values():
|
for canvas in self.parent().canvas_dict.values():
|
||||||
canvas.draw()
|
canvas.draw_idle()
|
||||||
|
|
||||||
def plot_manual_pick_to_ax(self, ax, picks, phase, starttime, quality):
|
def plot_manual_pick_to_ax(self, ax, picks, phase, starttime, quality):
|
||||||
mpp = picks[phase]['mpp'] - starttime
|
mpp = picks[phase]['mpp'] - starttime
|
||||||
@ -4074,7 +4204,7 @@ class TuneAutopicker(QWidget):
|
|||||||
self.qmb.show()
|
self.qmb.show()
|
||||||
|
|
||||||
|
|
||||||
class PylotParaBox(QtWidgets.QWidget):
|
class PylotParameterWidget(QtWidgets.QWidget):
|
||||||
accepted = QtCore.Signal(str)
|
accepted = QtCore.Signal(str)
|
||||||
rejected = QtCore.Signal(str)
|
rejected = QtCore.Signal(str)
|
||||||
|
|
||||||
@ -4188,6 +4318,11 @@ class PylotParaBox(QtWidgets.QWidget):
|
|||||||
grid = QtWidgets.QGridLayout()
|
grid = QtWidgets.QGridLayout()
|
||||||
|
|
||||||
for index1, name in enumerate(parameter_names):
|
for index1, name in enumerate(parameter_names):
|
||||||
|
if name in ['rootpath', 'database']:
|
||||||
|
logging.warning(
|
||||||
|
f'Deprecated parameter loaded: {name}. Check if datapath is still correct in parameter widget.'
|
||||||
|
)
|
||||||
|
continue
|
||||||
default_item = self.parameter.get_defaults()[name]
|
default_item = self.parameter.get_defaults()[name]
|
||||||
tooltip = default_item['tooltip']
|
tooltip = default_item['tooltip']
|
||||||
tooltip += ' | type: {}'.format(default_item['type'])
|
tooltip += ' | type: {}'.format(default_item['type'])
|
||||||
@ -4757,7 +4892,7 @@ class PropTab(QWidget):
|
|||||||
def getValues(self):
|
def getValues(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def resetValues(self, infile=None):
|
def resetValues(self, infile):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -4794,8 +4929,8 @@ class InputsTab(PropTab):
|
|||||||
self.tstopBox = QSpinBox()
|
self.tstopBox = QSpinBox()
|
||||||
for spinbox in [self.tstartBox, self.tstopBox]:
|
for spinbox in [self.tstartBox, self.tstopBox]:
|
||||||
spinbox.setRange(-99999, 99999)
|
spinbox.setRange(-99999, 99999)
|
||||||
self.tstartBox.setValue(float(settings.value('tstart')) if get_None(settings.value('tstart')) else 0)
|
self.tstartBox.setValue(float(settings.value('tstart')) if get_none(settings.value('tstart')) else 0)
|
||||||
self.tstopBox.setValue(float(settings.value('tstop')) if get_None(settings.value('tstop')) else 1e6)
|
self.tstopBox.setValue(float(settings.value('tstop')) if get_none(settings.value('tstop')) else 1e6)
|
||||||
self.cuttimesLayout.addWidget(self.tstartBox, 10)
|
self.cuttimesLayout.addWidget(self.tstartBox, 10)
|
||||||
self.cuttimesLayout.addWidget(QLabel('[s] and'), 0)
|
self.cuttimesLayout.addWidget(QLabel('[s] and'), 0)
|
||||||
self.cuttimesLayout.addWidget(self.tstopBox, 10)
|
self.cuttimesLayout.addWidget(self.tstopBox, 10)
|
||||||
@ -4854,12 +4989,7 @@ class InputsTab(PropTab):
|
|||||||
else:
|
else:
|
||||||
index = 2
|
index = 2
|
||||||
datapath = para.get('datapath') if not para.get('datapath') is None else ''
|
datapath = para.get('datapath') if not para.get('datapath') is None else ''
|
||||||
rootpath = para.get('rootpath') if not para.get('rootpath') is None else ''
|
values = {"data/dataRoot": self.dataDirEdit.setText("%s" % datapath),
|
||||||
database = para.get('database') if not para.get('database') is None else ''
|
|
||||||
if isinstance(database, int):
|
|
||||||
database = str(database)
|
|
||||||
path = os.path.join(os.path.expanduser('~'), rootpath, datapath, database)
|
|
||||||
values = {"data/dataRoot": self.dataDirEdit.setText("%s" % path),
|
|
||||||
"user/FullName": self.fullNameEdit.text(),
|
"user/FullName": self.fullNameEdit.text(),
|
||||||
"data/Structure": self.structureSelect.setCurrentIndex(index),
|
"data/Structure": self.structureSelect.setCurrentIndex(index),
|
||||||
"tstart": self.tstartBox.setValue(0),
|
"tstart": self.tstartBox.setValue(0),
|
||||||
@ -5791,7 +5921,7 @@ class ChooseWaveFormWindow(QWidget):
|
|||||||
#self.currentSpectro = self.traces[
|
#self.currentSpectro = self.traces[
|
||||||
# self.chooseBoxTraces.currentText()[3:]][self.chooseBoxComponent.currentText()].spectrogram(show=False, title=t)
|
# self.chooseBoxTraces.currentText()[3:]][self.chooseBoxComponent.currentText()].spectrogram(show=False, title=t)
|
||||||
#self.currentSpectro.show()
|
#self.currentSpectro.show()
|
||||||
applyFFT()
|
self.applyFFT()
|
||||||
|
|
||||||
def applyFFT(self, trace):
|
def applyFFT(self, trace):
|
||||||
tra = self.traces[self.chooseBoxTraces.currentText()[3:]]['Z']
|
tra = self.traces[self.chooseBoxTraces.currentText()[3:]]['Z']
|
||||||
|
2
pylot/correlation/__init__.py
Normal file
2
pylot/correlation/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
95
pylot/correlation/parameters_adriaarray.yaml
Normal file
95
pylot/correlation/parameters_adriaarray.yaml
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
############################# correlation parameters #####################################
|
||||||
|
# min_corr_stacking: minimum correlation coefficient for building beam trace
|
||||||
|
# min_corr_export: minimum correlation coefficient for pick export
|
||||||
|
# min_stack: minimum number of stations for building beam trace
|
||||||
|
# t_before: correlation window before pick
|
||||||
|
# t_after: correlation window after pick#
|
||||||
|
# cc_maxlag: maximum shift for initial correlation
|
||||||
|
# cc_maxlag2: maximum shift for second (final) correlation (also for calculating pick uncertainty)
|
||||||
|
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
|
||||||
|
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
|
||||||
|
# min_picks_export: minimum number of correlated picks for export
|
||||||
|
# min_picks_autopylot: minimum number of reference auto picks to continue with event
|
||||||
|
# check_RMS: do RMS check to search for restitution errors (very experimental)
|
||||||
|
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
|
||||||
|
# station_list: use the following stations as reference for stacking
|
||||||
|
# use_stacked_trace: use existing stacked trace if found (spare re-computation)
|
||||||
|
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
|
||||||
|
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
|
||||||
|
# such as 'PyLoT_20170501_141822_autopylot.xml'
|
||||||
|
# dt_stacking: time shift for stacking (e.g. [0, 250] for 0 and 250 seconds shift)
|
||||||
|
# filter_options: filter for first correlation (rough)
|
||||||
|
# filter_options_final: filter for second correlation (fine)
|
||||||
|
# filter_type: e.g. 'bandpass'
|
||||||
|
# sampfreq: sampling frequency of the data
|
||||||
|
|
||||||
|
logging: info
|
||||||
|
pick_phases: ['P', 'S']
|
||||||
|
|
||||||
|
# P-phase
|
||||||
|
P:
|
||||||
|
min_corr_stacking: 0.8
|
||||||
|
min_corr_export: 0.6
|
||||||
|
min_stack: 20
|
||||||
|
t_before: 30.
|
||||||
|
t_after: 50.
|
||||||
|
cc_maxlag: 50.
|
||||||
|
cc_maxlag2: 5.
|
||||||
|
initial_pick_outlier_threshold: 30.
|
||||||
|
export_threshold: 2.5
|
||||||
|
min_picks_export: 100
|
||||||
|
min_picks_autopylot: 50
|
||||||
|
check_RMS: True
|
||||||
|
use_taupy_onsets: False
|
||||||
|
station_list: ['HU.MORH', 'HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||||
|
use_stacked_trace: False
|
||||||
|
data_dir: 'processed'
|
||||||
|
pickfile_extension: '_autopylot'
|
||||||
|
dt_stacking: [250, 250]
|
||||||
|
|
||||||
|
# filter for first correlation (rough)
|
||||||
|
filter_options:
|
||||||
|
freqmax: 0.5
|
||||||
|
freqmin: 0.03
|
||||||
|
# filter for second correlation (fine)
|
||||||
|
filter_options_final:
|
||||||
|
freqmax: 0.5
|
||||||
|
freqmin: 0.03
|
||||||
|
|
||||||
|
filter_type: bandpass
|
||||||
|
sampfreq: 20.0
|
||||||
|
|
||||||
|
# S-phase
|
||||||
|
S:
|
||||||
|
min_corr_stacking: 0.7
|
||||||
|
min_corr_export: 0.6
|
||||||
|
min_stack: 20
|
||||||
|
t_before: 60.
|
||||||
|
t_after: 60.
|
||||||
|
cc_maxlag: 100.
|
||||||
|
cc_maxlag2: 25.
|
||||||
|
initial_pick_outlier_threshold: 30.
|
||||||
|
export_threshold: 5.0
|
||||||
|
min_picks_export: 200
|
||||||
|
min_picks_autopylot: 50
|
||||||
|
check_RMS: True
|
||||||
|
use_taupy_onsets: False
|
||||||
|
station_list: ['HU.MORH','HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||||
|
use_stacked_trace: False
|
||||||
|
data_dir: 'processed'
|
||||||
|
pickfile_extension: '_autopylot'
|
||||||
|
dt_stacking: [250, 250]
|
||||||
|
|
||||||
|
# filter for first correlation (rough)
|
||||||
|
filter_options:
|
||||||
|
freqmax: 0.1
|
||||||
|
freqmin: 0.01
|
||||||
|
|
||||||
|
# filter for second correlation (fine)
|
||||||
|
filter_options_final:
|
||||||
|
freqmax: 0.2
|
||||||
|
freqmin: 0.01
|
||||||
|
|
||||||
|
filter_type: bandpass
|
||||||
|
sampfreq: 20.0
|
||||||
|
|
1991
pylot/correlation/pick_correlation_correction.py
Normal file
1991
pylot/correlation/pick_correlation_correction.py
Normal file
File diff suppressed because it is too large
Load Diff
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
#ulimit -s 8192
|
||||||
|
#ulimit -v $(ulimit -v | awk '{printf("%d",$1*0.95)}')
|
||||||
|
#ulimit -v
|
||||||
|
|
||||||
|
#655360
|
||||||
|
|
||||||
|
source /opt/anaconda3/etc/profile.d/conda.sh
|
||||||
|
conda activate pylot_311
|
||||||
|
NSLOTS=20
|
||||||
|
|
||||||
|
#qsub -l low -cwd -l "os=*stretch" -pe smp 40 submit_pick_corr_correction.sh
|
||||||
|
#$ -l low
|
||||||
|
#$ -l h_vmem=6G
|
||||||
|
#$ -cwd
|
||||||
|
#$ -pe smp 20
|
||||||
|
#$ -N corr_pick
|
||||||
|
|
||||||
|
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||||
|
|
||||||
|
#export MKL_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
#export NUMEXPR_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
#export OMP_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M6.0-6.5' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||||
|
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
|
||||||
|
|
||||||
|
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||||
|
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||||
|
|
||||||
|
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
|
||||||
|
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
|
||||||
|
#--event_blacklist eventlist.txt
|
23
pylot/correlation/submit_to_grid_engine.py
Executable file
23
pylot/correlation/submit_to_grid_engine.py
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
fnames = [
|
||||||
|
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
|
||||||
|
]
|
||||||
|
|
||||||
|
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
|
||||||
|
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
|
||||||
|
|
||||||
|
####
|
||||||
|
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
|
||||||
|
####
|
||||||
|
|
||||||
|
for fnin, istart in fnames:
|
||||||
|
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
|
||||||
|
|
||||||
|
print(input_cmds)
|
||||||
|
print(subprocess.check_output(input_cmds.split()))
|
||||||
|
|
||||||
|
|
||||||
|
|
61
pylot/correlation/utils.py
Normal file
61
pylot/correlation/utils.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
|
||||||
|
from obspy import read_events
|
||||||
|
|
||||||
|
from pylot.core.util.dataprocessing import Metadata
|
||||||
|
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_obspy_dmt(eventdir):
|
||||||
|
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
|
||||||
|
if not os.path.exists(event_pkl_file):
|
||||||
|
raise IOError('Could not find event path for event: {}'.format(eventdir))
|
||||||
|
event = qml_from_obspyDMT(event_pkl_file)
|
||||||
|
return event
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_pylot(eventdir, extension=''):
|
||||||
|
event_id = get_event_id(eventdir)
|
||||||
|
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
|
||||||
|
if not os.path.isfile(filename):
|
||||||
|
return
|
||||||
|
cat = read_events(filename)
|
||||||
|
return cat[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_id(eventdir):
|
||||||
|
event_id = os.path.split(eventdir)[-1]
|
||||||
|
return event_id
|
||||||
|
|
||||||
|
|
||||||
|
def get_picks(eventdir, extension=''):
|
||||||
|
event_id = get_event_id(eventdir)
|
||||||
|
filename = 'PyLoT_{}{}.xml'
|
||||||
|
filename = filename.format(event_id, extension)
|
||||||
|
fpath = os.path.join(eventdir, filename)
|
||||||
|
fpaths = glob.glob(fpath)
|
||||||
|
if len(fpaths) == 1:
|
||||||
|
cat = read_events(fpaths[0])
|
||||||
|
picks = cat[0].picks
|
||||||
|
return picks
|
||||||
|
elif len(fpaths) == 0:
|
||||||
|
print('get_picks: File not found: {}'.format(fpath))
|
||||||
|
return
|
||||||
|
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def write_json(object, fname):
|
||||||
|
with open(fname, 'w') as outfile:
|
||||||
|
json.dump(object, outfile, sort_keys=True, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
def get_metadata(eventdir):
|
||||||
|
metadata_path = os.path.join(eventdir, 'resp')
|
||||||
|
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||||
|
return metadata
|
@ -1,12 +1,7 @@
|
|||||||
# This file may be used to create an environment using:
|
Cartopy==0.23.0
|
||||||
# $ conda create --name <env> --file <this file>
|
joblib==1.4.2
|
||||||
# platform: win-64
|
obspy==1.4.1
|
||||||
cartopy=0.20.2
|
pyaml==24.7.0
|
||||||
matplotlib-base=3.3.4
|
pyqtgraph==0.13.7
|
||||||
numpy=1.22.3
|
PySide2==5.15.8
|
||||||
obspy=1.3.0
|
pytest==8.3.2
|
||||||
pyqtgraph=0.12.4
|
|
||||||
pyside2=5.13.2
|
|
||||||
python=3.8.12
|
|
||||||
qt=5.12.9
|
|
||||||
scipy=1.8.0
|
|
18634
tests/test_autopicker/PyLoT_20171010_063224.a_autopylot.xml
Normal file
18634
tests/test_autopicker/PyLoT_20171010_063224.a_autopylot.xml
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
18634
tests/test_autopicker/PyLoT_20171010_063224.a_saved_from_GUI.xml
Normal file
18634
tests/test_autopicker/PyLoT_20171010_063224.a_saved_from_GUI.xml
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,99 @@
|
|||||||
|
%This is a parameter input file for PyLoT/autoPyLoT.
|
||||||
|
%All main and special settings regarding data handling
|
||||||
|
%and picking are to be set here!
|
||||||
|
%Parameters are optimized for %extent data sets!
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#main settings#
|
||||||
|
dmt_database_test #datapath# %data path
|
||||||
|
20171010_063224.a #eventID# %event ID for single event processing (* for all events found in database)
|
||||||
|
#invdir# %full path to inventory or dataless-seed file
|
||||||
|
PILOT #datastructure# %choose data structure
|
||||||
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#NLLoc settings#
|
||||||
|
None #nllocbin# %path to NLLoc executable
|
||||||
|
None #nllocroot# %root of NLLoc-processing directory
|
||||||
|
None #phasefile# %name of autoPyLoT-output phase file for NLLoc
|
||||||
|
None #ctrfile# %name of autoPyLoT-output control file for NLLoc
|
||||||
|
ttime #ttpatter# %pattern of NLLoc ttimes from grid
|
||||||
|
AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#parameters for seismic moment estimation#
|
||||||
|
3530.0 #vp# %average P-wave velocity
|
||||||
|
2500.0 #rho# %average rock density [kg/m^3]
|
||||||
|
300.0 0.8 #Qp# %quality factor for P waves (Qp*f^a); list(Qp, a)
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#settings local magnitude#
|
||||||
|
1.0 1.0 1.0 #WAscaling# %Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] If zeros are set, original Richter magnitude is calculated!
|
||||||
|
1.0 1.0 #magscaling# %Scaling relation for derived local magnitude [a*Ml+b]. If zeros are set, no scaling of network magnitude is applied!
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#filter settings#
|
||||||
|
0.03 0.03 #minfreq# %Lower filter frequency [P, S]
|
||||||
|
0.5 0.5 #maxfreq# %Upper filter frequency [P, S]
|
||||||
|
4 4 #filter_order# %filter order [P, S]
|
||||||
|
bandpass bandpass #filter_type# %filter type (bandpass, bandstop, lowpass, highpass) [P, S]
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#common settings picker#
|
||||||
|
global #extent# %extent of array ("local", "regional" or "global")
|
||||||
|
-100.0 #pstart# %start time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||||
|
50.0 #pstop# %end time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||||
|
-50.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||||
|
50.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
|
ak135 #taup_model# %Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff,S,SKS #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
|
0.03 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
|
0.01 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
|
0.03 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
0.01 0.5 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
|
||||||
|
#special settings for calculating CF#
|
||||||
|
%!!Edit the following only if you know what you are doing!!%
|
||||||
|
#Z-component#
|
||||||
|
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
|
||||||
|
300.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
|
||||||
|
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
|
||||||
|
2 #Parorder# %for AR-picker, order of AR process of Z-component
|
||||||
|
16.0 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
|
||||||
|
10.0 #tpred1z# %for AR-picker, length of AR prediction window [s] for Z-component, 1st pick
|
||||||
|
12.0 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
|
||||||
|
6.0 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
|
||||||
|
0.001 #addnoise# %add noise to seismogram for stable AR prediction
|
||||||
|
60.0 5.0 20.0 12.0 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||||
|
50.0 #pickwinP# %for initial AIC pick, length of P-pick window [s]
|
||||||
|
30.0 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
|
||||||
|
2.0 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
|
||||||
|
2.0 #tsmoothP# %for HOS/AR, take average of samples in this time window for smoothing CF [s]
|
||||||
|
0.006 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
|
||||||
|
2.0 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
|
||||||
|
#H-components#
|
||||||
|
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
|
||||||
|
12.0 #tdet1h# %for HOS/AR, length of AR-determination window [s], H-components, 1st pick
|
||||||
|
6.0 #tpred1h# %for HOS/AR, length of AR-prediction window [s], H-components, 1st pick
|
||||||
|
8.0 #tdet2h# %for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick
|
||||||
|
4.0 #tpred2h# %for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick
|
||||||
|
4 #Sarorder# %for AR-picker, order of AR process of H-components
|
||||||
|
100.0 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
|
||||||
|
195.0 #pickwinS# %for initial AIC pick, length of S-pick window [s]
|
||||||
|
60.0 10.0 30.0 12.0 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||||
|
22.0 #aictsmoothS# %for AIC-picker, take average of samples in this time window for smoothing of AIC-function [s]
|
||||||
|
20.0 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
|
||||||
|
0.001 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
|
||||||
|
2.0 #nfacS# %for AR-picker, noise factor for noise level determination (S)
|
||||||
|
#first-motion picker#
|
||||||
|
1 #minfmweight# %minimum required P weight for first-motion determination
|
||||||
|
3.0 #minFMSNR# %miniumum required SNR for first-motion determination
|
||||||
|
10.0 #fmpickwin# %pick window [s] around P onset for calculating zero crossings
|
||||||
|
#quality assessment#
|
||||||
|
0.1 0.2 0.4 0.8 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
|
||||||
|
4.0 8.0 16.0 32.0 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
|
||||||
|
0.005 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
|
||||||
|
1.1 #minAICPSNR# %below this SNR the initial P pick is rejected
|
||||||
|
0.002 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
|
||||||
|
1.3 #minAICSSNR# %below this SNR the initial S pick is rejected
|
||||||
|
20.0 #minsiglength# %length of signal part for which amplitudes must exceed noiselevel [s]
|
||||||
|
1.0 #noisefactor# %noiselevel*noisefactor=threshold
|
||||||
|
10.0 #minpercent# %required percentage of amplitudes exceeding threshold
|
||||||
|
0.1 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
|
||||||
|
100.0 #mdttolerance# %maximum allowed deviation of P picks from median [s]
|
||||||
|
50.0 #wdttolerance# %maximum allowed deviation from Wadati-diagram
|
||||||
|
25.0 #jackfactor# %pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor
|
67
tests/test_autopicker/test_autopylot.py
Normal file
67
tests/test_autopicker/test_autopylot.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from obspy import read_events
|
||||||
|
|
||||||
|
from autoPyLoT import autoPyLoT
|
||||||
|
|
||||||
|
|
||||||
|
class TestAutopickerGlobal():
|
||||||
|
def init(self):
|
||||||
|
self.params_infile = 'pylot_alparray_mantle_corr_stack_0.03-0.5.in'
|
||||||
|
self.test_event_dir = 'dmt_database_test'
|
||||||
|
self.fname_outfile_xml = os.path.join(
|
||||||
|
self.test_event_dir, '20171010_063224.a', 'PyLoT_20171010_063224.a_autopylot.xml'
|
||||||
|
)
|
||||||
|
|
||||||
|
# check if the input files exist
|
||||||
|
if not os.path.isfile(self.params_infile):
|
||||||
|
print(f'Test input file {os.path.abspath(self.params_infile)} not found.')
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not os.path.exists(self.test_event_dir):
|
||||||
|
print(
|
||||||
|
f'Test event directory not found at location "{os.path.abspath(self.test_event_dir)}". '
|
||||||
|
f'Make sure to load it from the website first.'
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_autopicker(self):
|
||||||
|
assert self.init(), 'Initialization failed due to missing input files.'
|
||||||
|
# check for output file in test directory and remove it if necessary
|
||||||
|
if os.path.isfile(self.fname_outfile_xml):
|
||||||
|
os.remove(self.fname_outfile_xml)
|
||||||
|
autoPyLoT(inputfile=self.params_infile, eventid='20171010_063224.a', obspyDMT_wfpath='processed')
|
||||||
|
|
||||||
|
# test for different known output files if they are identical or not
|
||||||
|
compare_pickfiles(self.fname_outfile_xml, 'PyLoT_20171010_063224.a_autopylot.xml', True)
|
||||||
|
compare_pickfiles(self.fname_outfile_xml, 'PyLoT_20171010_063224.a_saved_from_GUI.xml', True)
|
||||||
|
compare_pickfiles(self.fname_outfile_xml, 'PyLoT_20171010_063224.a_corrected_taup_times_0.03-0.5_P.xml', False)
|
||||||
|
|
||||||
|
|
||||||
|
def compare_pickfiles(pickfile1: str, pickfile2: str, samefile: bool = True) -> None:
|
||||||
|
"""
|
||||||
|
Compare the pick times and errors from two pick files.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
pickfile1 (str): The path to the first pick file.
|
||||||
|
pickfile2 (str): The path to the second pick file.
|
||||||
|
samefile (bool): A flag indicating whether the two files are expected to be the same. Defaults to True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
cat1 = read_events(pickfile1)
|
||||||
|
cat2 = read_events(pickfile2)
|
||||||
|
picks1 = sorted(cat1[0].picks, key=lambda pick: str(pick.waveform_id))
|
||||||
|
picks2 = sorted(cat2[0].picks, key=lambda pick: str(pick.waveform_id))
|
||||||
|
pick_times1 = [pick.time for pick in picks1]
|
||||||
|
pick_times2 = [pick.time for pick in picks2]
|
||||||
|
pick_terrs1 = [pick.time_errors for pick in picks1]
|
||||||
|
pick_terrs2 = [pick.time_errors for pick in picks2]
|
||||||
|
|
||||||
|
# check if times and errors are identical or not depending on the samefile flag
|
||||||
|
assert (pick_times1 == pick_times2) is samefile, 'Pick times error'
|
||||||
|
assert (pick_terrs1 == pick_terrs2) is samefile, 'Pick time errors errors'
|
@ -4,10 +4,8 @@
|
|||||||
%Parameters are optimized for %extent data sets!
|
%Parameters are optimized for %extent data sets!
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
#main settings#
|
#main settings#
|
||||||
/home/darius #rootpath# %project path
|
/home/darius/alparray/waveforms_used #datapath# %data path
|
||||||
alparray #datapath# %data path
|
e0093.173.16 #eventID# %event ID for single event processing (* for all events found in datapath)
|
||||||
waveforms_used #database# %name of data base
|
|
||||||
e0093.173.16 #eventID# %event ID for single event processing (* for all events found in database)
|
|
||||||
/home/darius/alparray/metadata #invdir# %full path to inventory or dataless-seed file
|
/home/darius/alparray/metadata #invdir# %full path to inventory or dataless-seed file
|
||||||
PILOT #datastructure# %choose data structure
|
PILOT #datastructure# %choose data structure
|
||||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
@ -43,6 +41,7 @@ global #extent# %extent of a
|
|||||||
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff,S,Sdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
@ -4,10 +4,8 @@
|
|||||||
%Parameters are optimized for %extent data sets!
|
%Parameters are optimized for %extent data sets!
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
#main settings#
|
#main settings#
|
||||||
/home/darius #rootpath# %project path
|
/home/darius/alparray/waveforms_used #datapath# %data path
|
||||||
alparray #datapath# %data path
|
e0093.173.16 #eventID# %event ID for single event processing (* for all events found in datapath)
|
||||||
waveforms_used #database# %name of data base
|
|
||||||
e0093.173.16 #eventID# %event ID for single event processing (* for all events found in database)
|
|
||||||
/home/darius/alparray/metadata #invdir# %full path to inventory or dataless-seed file
|
/home/darius/alparray/metadata #invdir# %full path to inventory or dataless-seed file
|
||||||
PILOT #datastructure# %choose data structure
|
PILOT #datastructure# %choose data structure
|
||||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
@ -43,6 +41,7 @@ global #extent# %extent of a
|
|||||||
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff,S,Sdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
import pytest
|
||||||
|
|
||||||
import obspy
|
import obspy
|
||||||
from obspy import UTCDateTime
|
from obspy import UTCDateTime
|
||||||
@ -105,7 +106,6 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
# show complete diff when difference in results dictionaries are found
|
# show complete diff when difference in results dictionaries are found
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
|
|
||||||
# @skip("Works")
|
|
||||||
def test_autopickstation_taupy_disabled_gra1(self):
|
def test_autopickstation_taupy_disabled_gra1(self):
|
||||||
expected = {
|
expected = {
|
||||||
'P': {'picker': 'auto', 'snrdb': 15.405649120980094, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None,
|
'P': {'picker': 'auto', 'snrdb': 15.405649120980094, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None,
|
||||||
@ -121,8 +121,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_disabled,
|
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_disabled,
|
||||||
metadata=(None, None))
|
metadata=(None, None))
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA1', station)
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_gra1(self):
|
def test_autopickstation_taupy_enabled_gra1(self):
|
||||||
@ -140,8 +140,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA1', station)
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_disabled_gra2(self):
|
def test_autopickstation_taupy_disabled_gra2(self):
|
||||||
@ -157,8 +157,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_disabled,
|
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_disabled,
|
||||||
metadata=(None, None))
|
metadata=(None, None))
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA2', station)
|
self.assertEqual('GRA2', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_gra2(self):
|
def test_autopickstation_taupy_enabled_gra2(self):
|
||||||
@ -175,8 +175,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA2', station)
|
self.assertEqual('GRA2', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_disabled_ech(self):
|
def test_autopickstation_taupy_disabled_ech(self):
|
||||||
@ -190,8 +190,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_disabled)
|
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_disabled)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('ECH', station)
|
self.assertEqual('ECH', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_ech(self):
|
def test_autopickstation_taupy_enabled_ech(self):
|
||||||
@ -208,8 +208,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('ECH', station)
|
self.assertEqual('ECH', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_disabled_fiesa(self):
|
def test_autopickstation_taupy_disabled_fiesa(self):
|
||||||
@ -224,8 +224,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_disabled)
|
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_disabled)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('FIESA', station)
|
self.assertEqual('FIESA', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_fiesa(self):
|
def test_autopickstation_taupy_enabled_fiesa(self):
|
||||||
@ -242,8 +242,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('FIESA', station)
|
self.assertEqual('FIESA', station)
|
||||||
|
|
||||||
def test_autopickstation_gra1_z_comp_missing(self):
|
def test_autopickstation_gra1_z_comp_missing(self):
|
||||||
@ -272,7 +272,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled,
|
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled,
|
||||||
metadata=(None, None))
|
metadata=(None, None))
|
||||||
self.assertEqual(expected, result)
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA1', station)
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
def test_autopickstation_a106_taupy_enabled(self):
|
def test_autopickstation_a106_taupy_enabled(self):
|
||||||
@ -290,7 +291,9 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.a106, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.a106, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertEqual(expected, result)
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
|
|
||||||
|
|
||||||
def test_autopickstation_station_missing_in_metadata(self):
|
def test_autopickstation_station_missing_in_metadata(self):
|
||||||
"""This station is not in the metadata, but Taupy is enabled. Taupy should exit cleanly and modify the starttime
|
"""This station is not in the metadata, but Taupy is enabled. Taupy should exit cleanly and modify the starttime
|
||||||
@ -311,8 +314,37 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.a005a, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.a005a, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertEqual(expected, result)
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
|
|
||||||
|
|
||||||
|
def run_dict_comparison(result, expected):
|
||||||
|
for key, expected_value in expected.items():
|
||||||
|
if isinstance(expected_value, dict):
|
||||||
|
run_dict_comparison(result[key], expected[key])
|
||||||
|
else:
|
||||||
|
res = result[key]
|
||||||
|
if isinstance(res, UTCDateTime) and isinstance(expected_value, UTCDateTime):
|
||||||
|
res = res.timestamp
|
||||||
|
expected_value = expected_value.timestamp
|
||||||
|
assert expected_value == pytest.approx(res), f'{key}: {expected_value} != {res}'
|
||||||
|
|
||||||
|
|
||||||
|
def compare_dicts(result, expected, hint=''):
|
||||||
|
try:
|
||||||
|
run_dict_comparison(result, expected)
|
||||||
|
except AssertionError:
|
||||||
|
raise AssertionError(f'{hint}Dictionaries not equal.'
|
||||||
|
f'\n\n<<Expected>>\n{pretty_print_dict(expected)}'
|
||||||
|
f'\n\n<<Result>>\n{pretty_print_dict(result)}')
|
||||||
|
|
||||||
|
|
||||||
|
def pretty_print_dict(dct):
|
||||||
|
retstr = ''
|
||||||
|
for key, value in sorted(dct.items(), key=lambda x: x[0]):
|
||||||
|
retstr += f"{key} : {value}\n"
|
||||||
|
|
||||||
|
return retstr
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -0,0 +1,76 @@
|
|||||||
|
import pytest
|
||||||
|
from obspy import read, Trace, UTCDateTime
|
||||||
|
|
||||||
|
from pylot.correlation.pick_correlation_correction import XCorrPickCorrection
|
||||||
|
|
||||||
|
|
||||||
|
class TestXCorrPickCorrection():
|
||||||
|
def setup(self):
|
||||||
|
self.make_test_traces()
|
||||||
|
self.make_test_picks()
|
||||||
|
self.t_before = 2.
|
||||||
|
self.t_after = 2.
|
||||||
|
self.cc_maxlag = 0.5
|
||||||
|
|
||||||
|
def make_test_traces(self):
|
||||||
|
# take first trace of test Stream from obspy
|
||||||
|
tr1 = read()[0]
|
||||||
|
# filter trace
|
||||||
|
tr1.filter('bandpass', freqmin=1, freqmax=20)
|
||||||
|
# make a copy and shift the copy by 0.1 s
|
||||||
|
tr2 = tr1.copy()
|
||||||
|
tr2.stats.starttime += 0.1
|
||||||
|
|
||||||
|
self.trace1 = tr1
|
||||||
|
self.trace2 = tr2
|
||||||
|
|
||||||
|
def make_test_picks(self):
|
||||||
|
# create an artificial reference pick on reference trace (trace1) and another one on the 0.1 s shifted trace
|
||||||
|
self.tpick1 = UTCDateTime('2009-08-24T00:20:07.7')
|
||||||
|
# shift the second pick by 0.2 s, the correction should be around 0.1 s now
|
||||||
|
self.tpick2 = self.tpick1 + 0.2
|
||||||
|
|
||||||
|
def test_slice_trace_okay(self):
|
||||||
|
|
||||||
|
self.setup()
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
|
||||||
|
test_trace = self.trace1
|
||||||
|
pick_time = self.tpick2
|
||||||
|
|
||||||
|
sliced_trace = xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
assert ((sliced_trace.stats.starttime == pick_time - self.t_before - self.cc_maxlag / 2)
|
||||||
|
and (sliced_trace.stats.endtime == pick_time + self.t_after + self.cc_maxlag / 2))
|
||||||
|
|
||||||
|
def test_slice_trace_fails(self):
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
test_trace = self.trace1
|
||||||
|
pick_time = self.tpick1
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before + 20, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before, t_after=self.t_after + 50, cc_maxlag=self.cc_maxlag)
|
||||||
|
xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
|
||||||
|
def test_cross_correlation(self):
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
# create XCorrPickCorrection object
|
||||||
|
xcpc = XCorrPickCorrection(self.tpick1, self.trace1, self.tpick2, self.trace2, t_before=self.t_before,
|
||||||
|
t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
|
||||||
|
# execute correlation
|
||||||
|
correction, cc_max, uncert, fwfm = xcpc.cross_correlation(False, '', '')
|
||||||
|
|
||||||
|
# define awaited test result
|
||||||
|
test_result = (-0.09983091718314982, 0.9578431835689154, 0.0015285160561610929, 0.03625786256084631)
|
||||||
|
|
||||||
|
# check results
|
||||||
|
assert pytest.approx(test_result, rel=1e-6) == (correction, cc_max, uncert, fwfm)
|
Loading…
Reference in New Issue
Block a user