Compare commits
31 Commits
29107ee40c
...
38-simplif
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e434bda993 | ||
| c0c3cbbd7b | |||
| 76d4ec290c | |||
| 63dac0fff6 | |||
| b15cfe2e1d | |||
| 29a1e4ebe6 | |||
| 50cabb0038 | |||
| 8dd5789b0c | |||
| c4aeab0d89 | |||
| ef69fc429f | |||
| c8f9c1c33a | |||
| 59f2c4b46f | |||
| 8eb958c91b | |||
| 6b7f297d7a | |||
| 70d5c2d621 | |||
| 7393201b90 | |||
| d02f74ab10 | |||
| 0a5f5f0817 | |||
| db976e5ea9 | |||
| c021ca19d7 | |||
| 356988e71d | |||
| ad62284e0e | |||
| 0fb0b0f11c | |||
| e3dd4a4e28 | |||
| 2bbb84190c | |||
| fb32d5e0c5 | |||
| f043401cc0 | |||
| eb15382b5f | |||
| 7fbc3bc5ae | |||
| 221743fe20 | |||
| 554afc5a81 |
18
README.md
18
README.md
@@ -11,7 +11,7 @@ PILOT has originally been developed in Mathworks' MatLab. In order to distribute
|
||||
problems, it has been decided to redevelop the software package in Python. The great work of the ObsPy group allows easy
|
||||
handling of a bunch of seismic data and PyLoT will benefit a lot compared to the former MatLab version.
|
||||
|
||||
The development of PyLoT is part of the joint research project MAGS2, AlpArray and AdriaArray.
|
||||
The development of PyLoT is part of the joint research project MAGS2 and AlpArray.
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -27,30 +27,28 @@ Afterwards run (from the PyLoT main directory where the files *requirements.txt*
|
||||
conda env create -f pylot.yml
|
||||
or
|
||||
|
||||
conda create -c conda-forge --name pylot_311 python=3.11 --file requirements.txt
|
||||
conda create --name pylot_38 --file requirements.txt
|
||||
|
||||
to create a new Anaconda environment called *pylot_311*.
|
||||
to create a new Anaconda environment called "pylot_38".
|
||||
|
||||
Afterwards activate the environment by typing
|
||||
|
||||
conda activate pylot_311
|
||||
conda activate pylot_38
|
||||
|
||||
#### Prerequisites:
|
||||
|
||||
In order to run PyLoT you need to install:
|
||||
|
||||
- Python 3
|
||||
- cartopy
|
||||
- joblib
|
||||
- obspy
|
||||
- pyaml
|
||||
- pyqtgraph
|
||||
- pyside2
|
||||
- pyqtgraph
|
||||
- cartopy
|
||||
|
||||
(the following are already dependencies of the above packages):
|
||||
- scipy
|
||||
- numpy
|
||||
- matplotlib
|
||||
- matplotlib <= 3.3.x
|
||||
|
||||
#### Some handwork:
|
||||
|
||||
@@ -110,4 +108,4 @@ Others: A. Bruestle, T. Meier, W. Friederich
|
||||
|
||||
[ObsPy]: http://github.com/obspy/obspy/wiki
|
||||
|
||||
August 2024
|
||||
April 2022
|
||||
|
||||
@@ -243,7 +243,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
pylot_event = Event(eventpath) # event should be path to event directory
|
||||
data.setEvtData(pylot_event)
|
||||
if fnames == 'None':
|
||||
data.setWFData(glob.glob(os.path.join(datapath, event_datapath, '*')))
|
||||
data.set_wf_data(glob.glob(os.path.join(datapath, event_datapath, '*')))
|
||||
# the following is necessary because within
|
||||
# multiple event processing no event ID is provided
|
||||
# in autopylot.in
|
||||
@@ -258,7 +258,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
now.minute)
|
||||
parameter.setParam(eventID=eventID)
|
||||
else:
|
||||
data.setWFData(fnames)
|
||||
data.set_wf_data(fnames)
|
||||
|
||||
eventpath = events[0]
|
||||
# now = datetime.datetime.now()
|
||||
@@ -268,7 +268,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
# now.hour,
|
||||
# now.minute)
|
||||
parameter.setParam(eventID=eventid)
|
||||
wfdat = data.getWFData() # all available streams
|
||||
wfdat = data.get_wf_data() # all available streams
|
||||
if not station == 'all':
|
||||
wfdat = wfdat.select(station=station)
|
||||
if not wfdat:
|
||||
|
||||
18
pylot.yml
18
pylot.yml
@@ -1,12 +1,14 @@
|
||||
name: pylot_311
|
||||
name: pylot_38
|
||||
channels:
|
||||
- conda-forge
|
||||
- defaults
|
||||
dependencies:
|
||||
- cartopy=0.23.0=py311hcf9f919_1
|
||||
- joblib=1.4.2=pyhd8ed1ab_0
|
||||
- obspy=1.4.1=py311he736701_3
|
||||
- pyaml=24.7.0=pyhd8ed1ab_0
|
||||
- pyqtgraph=0.13.7=pyhd8ed1ab_0
|
||||
- pyside2=5.15.8=py311h3d699ce_4
|
||||
- pytest=8.3.2=pyhd8ed1ab_0
|
||||
- cartopy=0.20.2
|
||||
- matplotlib-base=3.3.4
|
||||
- numpy=1.22.3
|
||||
- obspy=1.3.0
|
||||
- pyqtgraph=0.12.4
|
||||
- pyside2>=5.13.2
|
||||
- python=3.8.12
|
||||
- qt>=5.12.9
|
||||
- scipy=1.8.0
|
||||
@@ -9,7 +9,7 @@ PyLoT - the Python picking and Localization Tool
|
||||
|
||||
This python library contains a graphical user interfaces for picking
|
||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||
and the Qt libraries to be installed first.
|
||||
and the Qt4 libraries to be installed first.
|
||||
|
||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||
PILOT without facing portability problems, it has been decided to re-
|
||||
|
||||
@@ -1,660 +1,69 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Union
|
||||
|
||||
from PySide2.QtWidgets import QMessageBox
|
||||
from obspy import read_events
|
||||
from obspy.core import read, Stream, UTCDateTime
|
||||
from obspy.core.event import Event as ObsPyEvent
|
||||
from obspy.io.sac import SacIOError
|
||||
from obspy import UTCDateTime
|
||||
|
||||
import pylot.core.loc.focmec as focmec
|
||||
import pylot.core.loc.hypodd as hypodd
|
||||
import pylot.core.loc.velest as velest
|
||||
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
|
||||
picksdict_from_pilot, merge_picks, PylotParameter
|
||||
from pylot.core.util.errors import FormatError, OverwriteError
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
from pylot.core.util.utils import fnConstructor, full_range, check4rotated, \
|
||||
check_for_gaps_and_merge, trim_station_components, check_for_nan
|
||||
from pylot.core.io.event import EventData
|
||||
from pylot.core.io.waveformdata import WaveformData
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
|
||||
|
||||
class Data(object):
|
||||
"""
|
||||
Data container with attributes wfdata holding ~obspy.core.stream.
|
||||
|
||||
:type parent: PySide2.QtWidgets.QWidget object, optional
|
||||
:param parent: A PySide2.QtWidgets.QWidget object utilized when
|
||||
called by a GUI to display a PySide2.QtWidgets.QMessageBox instead of printing
|
||||
to standard out.
|
||||
:type evtdata: ~obspy.core.event.Event object, optional
|
||||
:param evtdata ~obspy.core.event.Event object containing all derived or
|
||||
loaded event. Container object holding, e.g. phase arrivals, etc.
|
||||
"""
|
||||
@dataclass
|
||||
class Data:
|
||||
event_data: EventData = field(default_factory=EventData)
|
||||
waveform_data: WaveformData = field(default_factory=WaveformData)
|
||||
metadata: Metadata = field(default_factory=Metadata)
|
||||
_parent: Union[None, 'QtWidgets.QWidget'] = None
|
||||
|
||||
def __init__(self, parent=None, evtdata=None):
|
||||
self._parent = parent
|
||||
if self.getParent():
|
||||
self.comp = parent.getComponent()
|
||||
else:
|
||||
self.comp = 'Z'
|
||||
self.wfdata = Stream()
|
||||
self._new = False
|
||||
if isinstance(evtdata, ObsPyEvent) or isinstance(evtdata, Event):
|
||||
pass
|
||||
elif isinstance(evtdata, dict):
|
||||
evt = readPILOTEvent(**evtdata)
|
||||
evtdata = evt
|
||||
elif isinstance(evtdata, str):
|
||||
try:
|
||||
cat = read_events(evtdata)
|
||||
if len(cat) != 1:
|
||||
raise ValueError('ambiguous event information for file: '
|
||||
'{file}'.format(file=evtdata))
|
||||
evtdata = cat[0]
|
||||
except TypeError as e:
|
||||
if 'Unknown format for file' in e.message:
|
||||
if 'PHASES' in evtdata:
|
||||
picks = picksdict_from_pilot(evtdata)
|
||||
evtdata = ObsPyEvent()
|
||||
evtdata.picks = picks_from_picksdict(picks)
|
||||
elif 'LOC' in evtdata:
|
||||
raise NotImplementedError('PILOT location information '
|
||||
'read support not yet '
|
||||
'implemented.')
|
||||
elif 'event.pkl' in evtdata:
|
||||
evtdata = qml_from_obspyDMT(evtdata)
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
raise e
|
||||
else: # create an empty Event object
|
||||
self.setNew()
|
||||
evtdata = ObsPyEvent()
|
||||
evtdata.picks = []
|
||||
self.evtdata = evtdata
|
||||
self.wforiginal = None
|
||||
self.cuttimes = None
|
||||
self.dirty = False
|
||||
self.processed = None
|
||||
self.event_data = EventData(evtdata)
|
||||
self.waveform_data = WaveformData()
|
||||
|
||||
def __str__(self):
|
||||
return str(self.wfdata)
|
||||
return str(self.waveform_data.wfdata)
|
||||
|
||||
def __add__(self, other):
|
||||
assert isinstance(other, Data), "operands must be of same type 'Data'"
|
||||
rs_id = self.get_evt_data().get('resource_id')
|
||||
rs_id_other = other.get_evt_data().get('resource_id')
|
||||
if other.isNew() and not self.isNew():
|
||||
picks_to_add = other.get_evt_data().picks
|
||||
old_picks = self.get_evt_data().picks
|
||||
wf_ids_old = [pick.waveform_id for pick in old_picks]
|
||||
for new_pick in picks_to_add:
|
||||
wf_id = new_pick.waveform_id
|
||||
if wf_id in wf_ids_old:
|
||||
for old_pick in old_picks:
|
||||
comparison = [old_pick.waveform_id == new_pick.waveform_id,
|
||||
old_pick.phase_hint == new_pick.phase_hint,
|
||||
old_pick.method_id == new_pick.method_id]
|
||||
if all(comparison):
|
||||
del (old_pick)
|
||||
old_picks.append(new_pick)
|
||||
elif not other.isNew() and self.isNew():
|
||||
new = other + self
|
||||
self.evtdata = new.get_evt_data()
|
||||
elif self.isNew() and other.isNew():
|
||||
if not isinstance(other, Data):
|
||||
raise TypeError("Operands must be of type 'Data'")
|
||||
if self.event_data.is_new() and other.event_data.is_new():
|
||||
pass
|
||||
elif rs_id == rs_id_other:
|
||||
other.setNew()
|
||||
elif other.event_data.is_new():
|
||||
new_picks = other.event_data.evtdata.picks
|
||||
old_picks = self.event_data.evtdata.picks
|
||||
old_picks.extend([pick for pick in new_picks if pick not in old_picks])
|
||||
elif self.event_data.is_new():
|
||||
return other + self
|
||||
elif self.event_data.get_id() == other.event_data.get_id():
|
||||
other.event_data.set_new()
|
||||
return self + other
|
||||
else:
|
||||
raise ValueError("both Data objects have differing "
|
||||
"unique Event identifiers")
|
||||
raise ValueError("Both Data objects have differing unique Event identifiers")
|
||||
return self
|
||||
|
||||
def getPicksStr(self):
|
||||
"""
|
||||
Return picks in event data
|
||||
:return: picks seperated by newlines
|
||||
:rtype: str
|
||||
"""
|
||||
picks_str = ''
|
||||
for pick in self.get_evt_data().picks:
|
||||
picks_str += str(pick) + '\n'
|
||||
return picks_str
|
||||
|
||||
def getParent(self):
|
||||
"""
|
||||
Get PySide.QtGui.QWidget parent object
|
||||
"""
|
||||
def get_parent(self):
|
||||
return self._parent
|
||||
|
||||
def isNew(self):
|
||||
return self._new
|
||||
def filter_wf_data(self, **kwargs):
|
||||
self.waveform_data.wfdata.detrend('linear')
|
||||
self.waveform_data.wfdata.taper(0.02, type='cosine')
|
||||
self.waveform_data.wfdata.filter(**kwargs)
|
||||
self.waveform_data.dirty = True
|
||||
|
||||
def setNew(self):
|
||||
self._new = True
|
||||
def set_wf_data(self, fnames: List[str], fnames_alt: List[str] = None, check_rotated=False, metadata=None, tstart=0, tstop=0):
|
||||
return self.waveform_data.load_waveforms(fnames, fnames_alt, check_rotated, metadata, tstart, tstop)
|
||||
|
||||
def getCutTimes(self):
|
||||
"""
|
||||
Returns earliest start and latest end of all waveform data
|
||||
:return: minimum start time and maximum end time as a tuple
|
||||
:rtype: (UTCDateTime, UTCDateTime)
|
||||
"""
|
||||
if self.cuttimes is None:
|
||||
self.updateCutTimes()
|
||||
return self.cuttimes
|
||||
def reset_wf_data(self):
|
||||
self.waveform_data.reset()
|
||||
|
||||
def updateCutTimes(self):
|
||||
"""
|
||||
Update cuttimes to contain earliest start and latest end time
|
||||
of all waveform data
|
||||
:rtype: None
|
||||
"""
|
||||
self.cuttimes = full_range(self.getWFData())
|
||||
def get_wf_data(self):
|
||||
return self.waveform_data.wfdata
|
||||
|
||||
def getEventFileName(self):
|
||||
ID = self.getID()
|
||||
# handle forbidden filenames especially on windows systems
|
||||
return fnConstructor(str(ID))
|
||||
|
||||
def checkEvent(self, event, fcheck, forceOverwrite=False):
|
||||
"""
|
||||
Check information in supplied event and own event and replace with own
|
||||
information if no other information are given or forced by forceOverwrite
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param fcheck: check and delete existing information
|
||||
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
|
||||
:type fcheck: str, [str]
|
||||
:param forceOverwrite: Set to true to force overwrite own information. If false,
|
||||
supplied information from event is only used if there is no own information in that
|
||||
category (given in fcheck: manual, auto, origin, magnitude)
|
||||
:type forceOverwrite: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if 'origin' in fcheck:
|
||||
self.replaceOrigin(event, forceOverwrite)
|
||||
if 'magnitude' in fcheck:
|
||||
self.replaceMagnitude(event, forceOverwrite)
|
||||
if 'auto' in fcheck:
|
||||
self.replacePicks(event, 'auto')
|
||||
if 'manual' in fcheck:
|
||||
self.replacePicks(event, 'manual')
|
||||
|
||||
def replaceOrigin(self, event, forceOverwrite=False):
|
||||
"""
|
||||
Replace own origin with the one supplied in event if own origin is not
|
||||
existing or forced by forceOverwrite = True
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param forceOverwrite: always replace own information with supplied one if true
|
||||
:type forceOverwrite: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if self.get_evt_data().origins or forceOverwrite:
|
||||
if event.origins:
|
||||
print("Found origin, replace it by new origin.")
|
||||
event.origins = self.get_evt_data().origins
|
||||
|
||||
def replaceMagnitude(self, event, forceOverwrite=False):
|
||||
"""
|
||||
Replace own magnitude with the one supplied in event if own magnitude is not
|
||||
existing or forced by forceOverwrite = True
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param forceOverwrite: always replace own information with supplied one if true
|
||||
:type forceOverwrite: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if self.get_evt_data().magnitudes or forceOverwrite:
|
||||
if event.magnitudes:
|
||||
print("Found magnitude, replace it by new magnitude")
|
||||
event.magnitudes = self.get_evt_data().magnitudes
|
||||
|
||||
def replacePicks(self, event, picktype):
|
||||
"""
|
||||
Replace picks in event with own picks
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param picktype: 'auto' or 'manual' picks
|
||||
:type picktype: str
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
checkflag = 1
|
||||
picks = event.picks
|
||||
# remove existing picks
|
||||
for j, pick in reversed(list(enumerate(picks))):
|
||||
try:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.pop(j)
|
||||
checkflag = 2
|
||||
except AttributeError as e:
|
||||
msg = '{}'.format(e)
|
||||
print(e)
|
||||
checkflag = 0
|
||||
if checkflag > 0:
|
||||
if checkflag == 1:
|
||||
print("Write new %s picks to catalog." % picktype)
|
||||
if checkflag == 2:
|
||||
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
|
||||
|
||||
# append new picks
|
||||
for pick in self.get_evt_data().picks:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.append(pick)
|
||||
|
||||
def exportEvent(self, fnout, fnext='.xml', fcheck='auto', upperErrors=None):
|
||||
"""
|
||||
Export event to file
|
||||
:param fnout: basename of file
|
||||
:param fnext: file extensions xml, cnv, obs, focmec, or/and pha
|
||||
:param fcheck: check and delete existing information
|
||||
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
|
||||
"""
|
||||
from pylot.core.util.defaults import OUTPUTFORMATS
|
||||
if not type(fcheck) == list:
|
||||
fcheck = [fcheck]
|
||||
|
||||
try:
|
||||
evtformat = OUTPUTFORMATS[fnext]
|
||||
except KeyError as e:
|
||||
errmsg = '{0}; selected file extension {1} not ' \
|
||||
'supported'.format(e, fnext)
|
||||
raise FormatError(errmsg)
|
||||
|
||||
if hasattr(self.get_evt_data(), 'notes'):
|
||||
try:
|
||||
with open(os.path.join(os.path.dirname(fnout), 'notes.txt'), 'w') as notes_file:
|
||||
notes_file.write(self.get_evt_data().notes)
|
||||
except Exception as e:
|
||||
print('Warning: Could not save notes.txt: ', str(e))
|
||||
|
||||
# check for already existing xml-file
|
||||
if fnext == '.xml':
|
||||
if os.path.isfile(fnout + fnext):
|
||||
print("xml-file already exists! Check content ...")
|
||||
cat = read_events(fnout + fnext)
|
||||
if len(cat) > 1:
|
||||
raise IOError('Ambigious event information in file {}'.format(fnout + fnext))
|
||||
if len(cat) < 1:
|
||||
raise IOError('No event information in file {}'.format(fnout + fnext))
|
||||
event = cat[0]
|
||||
if not event.resource_id == self.get_evt_data().resource_id:
|
||||
QMessageBox.warning(self, 'Warning', 'Different resource IDs!')
|
||||
return
|
||||
self.checkEvent(event, fcheck)
|
||||
self.setEvtData(event)
|
||||
|
||||
self.get_evt_data().write(fnout + fnext, format=evtformat)
|
||||
|
||||
# try exporting event
|
||||
else:
|
||||
evtdata_org = self.get_evt_data()
|
||||
picks = evtdata_org.picks
|
||||
eventpath = evtdata_org.path
|
||||
picks_copy = copy.deepcopy(picks)
|
||||
evtdata_copy = Event(eventpath)
|
||||
evtdata_copy.picks = picks_copy
|
||||
|
||||
# check for stations picked automatically as well as manually
|
||||
# Prefer manual picks!
|
||||
for i in range(len(picks)):
|
||||
if picks[i].method_id == 'manual':
|
||||
mstation = picks[i].waveform_id.station_code
|
||||
mstation_ext = mstation + '_'
|
||||
for k in range(len(picks_copy)):
|
||||
if ((picks_copy[k].waveform_id.station_code == mstation) or
|
||||
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(picks_copy[k].method_id == 'auto'):
|
||||
del picks_copy[k]
|
||||
break
|
||||
lendiff = len(picks) - len(picks_copy)
|
||||
if lendiff != 0:
|
||||
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
|
||||
|
||||
|
||||
no_uncertainties_p = []
|
||||
no_uncertainties_s = []
|
||||
if upperErrors:
|
||||
# check for pick uncertainties exceeding adjusted upper errors
|
||||
# Picks with larger uncertainties will not be saved in output file!
|
||||
for j in range(len(picks)):
|
||||
for i in range(len(picks_copy)):
|
||||
if picks_copy[i].phase_hint[0] == 'P':
|
||||
# Skipping pick if no upper_uncertainty is found and warning user
|
||||
if picks_copy[i].time_errors['upper_uncertainty'] is None:
|
||||
#print("{1} P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
|
||||
# picks_copy[i].waveform_id.station_code,
|
||||
# picks_copy[i].method_id))
|
||||
if not picks_copy[i].waveform_id.station_code in no_uncertainties_p:
|
||||
no_uncertainties_p.append(picks_copy[i].waveform_id.station_code)
|
||||
continue
|
||||
|
||||
#print ("checking for upper_uncertainty")
|
||||
if (picks_copy[i].time_errors['uncertainty'] is None) or \
|
||||
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[0]):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[0]))
|
||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
||||
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
|
||||
picks_copy[i].waveform_id.station_code,
|
||||
picks_copy[i].method_id))
|
||||
del picks_copy[i]
|
||||
break
|
||||
if picks_copy[i].phase_hint[0] == 'S':
|
||||
|
||||
# Skipping pick if no upper_uncertainty is found and warning user
|
||||
if picks_copy[i].time_errors['upper_uncertainty'] is None:
|
||||
#print("{1} S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
|
||||
#picks_copy[i].waveform_id.station_code,
|
||||
#picks_copy[i].method_id))
|
||||
if not picks_copy[i].waveform_id.station_code in no_uncertainties_s:
|
||||
no_uncertainties_s.append(picks_copy[i].waveform_id.station_code)
|
||||
continue
|
||||
|
||||
|
||||
if (picks_copy[i].time_errors['uncertainty'] is None) or \
|
||||
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[1]):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[1]))
|
||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
||||
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
|
||||
picks_copy[i].waveform_id.station_code,
|
||||
picks_copy[i].method_id))
|
||||
del picks_copy[i]
|
||||
break
|
||||
for s in no_uncertainties_p:
|
||||
print("P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
|
||||
for s in no_uncertainties_s:
|
||||
print("S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
|
||||
|
||||
if fnext == '.obs':
|
||||
try:
|
||||
evtdata_copy.write(fnout + fnext, format=evtformat)
|
||||
# write header afterwards
|
||||
evid = str(evtdata_org.resource_id).split('/')[1]
|
||||
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
|
||||
nllocfile = open(fnout + fnext)
|
||||
l = nllocfile.readlines()
|
||||
# Adding A0/Generic Amplitude to .obs file
|
||||
# l2 = []
|
||||
# for li in l:
|
||||
# for amp in evtdata_org.amplitudes:
|
||||
# if amp.waveform_id.station_code == li[0:5].strip():
|
||||
# li = li[0:64] + '{:0.2e}'.format(amp.generic_amplitude) + li[73:-1] + '\n'
|
||||
# l2.append(li)
|
||||
# l = l2
|
||||
nllocfile.close()
|
||||
l.insert(0, header)
|
||||
nllocfile = open(fnout + fnext, 'w')
|
||||
nllocfile.write("".join(l))
|
||||
nllocfile.close()
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '.cnv':
|
||||
try:
|
||||
velest.export(picks_copy, fnout + fnext, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '_focmec.in':
|
||||
try:
|
||||
parameter = PylotParameter()
|
||||
logging.warning('Using default input parameter')
|
||||
focmec.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '.pha':
|
||||
try:
|
||||
parameter = PylotParameter()
|
||||
logging.warning('Using default input parameter')
|
||||
hypodd.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
|
||||
def getComp(self):
|
||||
"""
|
||||
Get component (ZNE)
|
||||
"""
|
||||
return self.comp
|
||||
|
||||
def getID(self):
|
||||
"""
|
||||
Get unique resource id
|
||||
"""
|
||||
try:
|
||||
return self.evtdata.get('resource_id').id
|
||||
except:
|
||||
return None
|
||||
|
||||
def filterWFData(self, kwargs):
|
||||
"""
|
||||
Filter waveform data
|
||||
:param kwargs: arguments to pass through to filter function
|
||||
"""
|
||||
data = self.getWFData()
|
||||
data.detrend('linear')
|
||||
data.taper(0.02, type='cosine')
|
||||
data.filter(**kwargs)
|
||||
self.dirty = True
|
||||
|
||||
def setWFData(self, fnames, fnames_alt=None, checkRotated=False, metadata=None, tstart=0, tstop=0):
|
||||
"""
|
||||
Clear current waveform data and set given waveform data
|
||||
:param fnames: waveform data names to append
|
||||
:param fnames_alt: alternative data to show (e.g. synthetic/processed)
|
||||
:type fnames: list
|
||||
"""
|
||||
def check_fname_exists(filenames: list) -> list:
|
||||
if filenames:
|
||||
filenames = [fn for fn in filenames if os.path.isfile(fn)]
|
||||
return filenames
|
||||
|
||||
self.wfdata = Stream()
|
||||
self.wforiginal = None
|
||||
self.wf_alt = Stream()
|
||||
if tstart == tstop:
|
||||
tstart = tstop = None
|
||||
self.tstart = tstart
|
||||
self.tstop = tstop
|
||||
|
||||
# remove directories
|
||||
fnames = check_fname_exists(fnames)
|
||||
fnames_alt = check_fname_exists(fnames_alt)
|
||||
|
||||
# if obspy_dmt:
|
||||
# wfdir = 'raw'
|
||||
# self.processed = False
|
||||
# for fname in fnames:
|
||||
# if fname.endswith('processed'):
|
||||
# wfdir = 'processed'
|
||||
# self.processed = True
|
||||
# break
|
||||
# for fpath in fnames:
|
||||
# if fpath.endswith(wfdir):
|
||||
# wffnames = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
|
||||
# if 'syngine' in fpath.split('/')[-1]:
|
||||
# wffnames_syn = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
|
||||
# else:
|
||||
# wffnames = fnames
|
||||
if fnames is not None:
|
||||
self.appendWFData(fnames)
|
||||
if fnames_alt is not None:
|
||||
self.appendWFData(fnames_alt, alternative=True)
|
||||
else:
|
||||
return False
|
||||
|
||||
# various pre-processing steps:
|
||||
# remove possible underscores in station names
|
||||
# self.wfdata = remove_underscores(self.wfdata)
|
||||
# check for gaps and merge
|
||||
self.wfdata, _ = check_for_gaps_and_merge(self.wfdata)
|
||||
# check for nans
|
||||
check_for_nan(self.wfdata)
|
||||
# check for stations with rotated components
|
||||
if checkRotated and metadata is not None:
|
||||
self.wfdata = check4rotated(self.wfdata, metadata, verbosity=0)
|
||||
# trim station components to same start value
|
||||
trim_station_components(self.wfdata, trim_start=True, trim_end=False)
|
||||
|
||||
# make a copy of original data
|
||||
self.wforiginal = self.getWFData().copy()
|
||||
self.dirty = False
|
||||
return True
|
||||
|
||||
def appendWFData(self, fnames, alternative=False):
|
||||
"""
|
||||
Read waveform data from fnames and append it to current wf data
|
||||
:param fnames: waveform data to append
|
||||
:type fnames: list
|
||||
"""
|
||||
assert isinstance(fnames, list), "input parameter 'fnames' is " \
|
||||
"supposed to be of type 'list' " \
|
||||
"but is actually" \
|
||||
" {0}".format(type(fnames))
|
||||
if self.dirty:
|
||||
self.resetWFData()
|
||||
|
||||
orig_or_alternative_data = {True: self.wf_alt,
|
||||
False: self.wfdata}
|
||||
|
||||
warnmsg = ''
|
||||
for fname in set(fnames):
|
||||
try:
|
||||
orig_or_alternative_data[alternative] += read(fname, starttime=self.tstart, endtime=self.tstop)
|
||||
except TypeError:
|
||||
try:
|
||||
orig_or_alternative_data[alternative] += read(fname, format='GSE2', starttime=self.tstart, endtime=self.tstop)
|
||||
except Exception as e:
|
||||
try:
|
||||
orig_or_alternative_data[alternative] += read(fname, format='SEGY', starttime=self.tstart,
|
||||
endtime=self.tstop)
|
||||
except Exception as e:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, e)
|
||||
except SacIOError as se:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, se)
|
||||
if warnmsg:
|
||||
warnmsg = 'WARNING in appendWFData: unable to read waveform data\n' + warnmsg
|
||||
print(warnmsg)
|
||||
|
||||
def getWFData(self):
|
||||
return self.wfdata
|
||||
|
||||
def getOriginalWFData(self):
|
||||
return self.wforiginal
|
||||
|
||||
def getAltWFdata(self):
|
||||
return self.wf_alt
|
||||
|
||||
def resetWFData(self):
|
||||
"""
|
||||
Set waveform data to original waveform data
|
||||
"""
|
||||
if self.getOriginalWFData():
|
||||
self.wfdata = self.getOriginalWFData().copy()
|
||||
else:
|
||||
self.wfdata = Stream()
|
||||
self.dirty = False
|
||||
|
||||
def resetPicks(self):
|
||||
"""
|
||||
Clear all picks from event
|
||||
"""
|
||||
self.get_evt_data().picks = []
|
||||
|
||||
def get_evt_data(self):
|
||||
return self.evtdata
|
||||
|
||||
def setEvtData(self, event):
|
||||
self.evtdata = event
|
||||
|
||||
def applyEVTData(self, data, typ='pick'):
|
||||
"""
|
||||
Either takes an `obspy.core.event.Event` object and applies all new
|
||||
information on the event to the actual data if typ is 'event or
|
||||
creates ObsPy pick objects and append it to the picks list from the
|
||||
PyLoT dictionary contain all picks if type is pick
|
||||
:param data: data to apply, either picks or complete event
|
||||
:type data:
|
||||
:param typ: which event data to apply, 'pick' or 'event'
|
||||
:type typ: str
|
||||
:param authority_id: (currently unused)
|
||||
:type: str
|
||||
:raise OverwriteError:
|
||||
"""
|
||||
|
||||
def applyPicks(picks):
|
||||
"""
|
||||
Creates ObsPy pick objects and append it to the picks list from the
|
||||
PyLoT dictionary contain all picks.
|
||||
:param picks:
|
||||
:raise OverwriteError: raises an OverwriteError if the picks list is
|
||||
not empty. The GUI will then ask for a decision.
|
||||
"""
|
||||
# firstonset = find_firstonset(picks)
|
||||
# check for automatic picks
|
||||
print("Writing phases to ObsPy-quakeml file")
|
||||
for key in picks:
|
||||
if not picks[key].get('P'):
|
||||
continue
|
||||
if picks[key]['P']['picker'] == 'auto':
|
||||
print("Existing auto-picks will be overwritten in pick-dictionary!")
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
else:
|
||||
if self.get_evt_data().picks:
|
||||
raise OverwriteError('Existing picks would be overwritten!')
|
||||
else:
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
self.get_evt_data().picks = picks
|
||||
# if 'smi:local' in self.getID() and firstonset:
|
||||
# fonset_str = firstonset.strftime('%Y_%m_%d_%H_%M_%S')
|
||||
# ID = ResourceIdentifier('event/' + fonset_str)
|
||||
# ID.convertIDToQuakeMLURI(authority_id=authority_id)
|
||||
# self.get_evt_data().resource_id = ID
|
||||
|
||||
def applyEvent(event):
|
||||
"""
|
||||
takes an `obspy.core.event.Event` object and applies all new
|
||||
information on the event to the actual data
|
||||
:param event:
|
||||
"""
|
||||
if event is None:
|
||||
print("applyEvent: Received None")
|
||||
return
|
||||
if self.isNew():
|
||||
self.setEvtData(event)
|
||||
else:
|
||||
# prevent overwriting original pick information
|
||||
event_old = self.get_evt_data()
|
||||
if not event_old.resource_id == event.resource_id:
|
||||
print("WARNING: Missmatch in event resource id's: {} and {}".format(
|
||||
event_old.resource_id,
|
||||
event.resource_id))
|
||||
else:
|
||||
picks = copy.deepcopy(event_old.picks)
|
||||
event = merge_picks(event, picks)
|
||||
# apply event information from location
|
||||
event_old.update(event)
|
||||
|
||||
applydata = {'pick': applyPicks,
|
||||
'event': applyEvent}
|
||||
|
||||
applydata[typ](data)
|
||||
self._new = False
|
||||
def rotate_wf_data(self):
|
||||
self.waveform_data.rotate_zne(self.metadata)
|
||||
|
||||
|
||||
class GenericDataStructure(object):
|
||||
@@ -839,22 +248,6 @@ class PilotDataStructure(GenericDataStructure):
|
||||
self.setExpandFields(['root', 'database'])
|
||||
|
||||
|
||||
class ObspyDMTdataStructure(GenericDataStructure):
|
||||
"""
|
||||
Object containing the data access information for the old PILOT data
|
||||
structure.
|
||||
"""
|
||||
|
||||
def __init__(self, **fields):
|
||||
if not fields:
|
||||
fields = {'database': '',
|
||||
'root': ''}
|
||||
|
||||
GenericDataStructure.__init__(self, **fields)
|
||||
|
||||
self.setExpandFields(['root', 'database'])
|
||||
|
||||
|
||||
class SeiscompDataStructure(GenericDataStructure):
|
||||
"""
|
||||
Dictionary containing the data access information for an SDS data archive:
|
||||
|
||||
@@ -511,7 +511,7 @@ defaults = {'rootpath': {'type': str,
|
||||
|
||||
'taup_model': {'type': str,
|
||||
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
||||
'value': 'iasp91',
|
||||
'value': None,
|
||||
'namestring': 'TauPy model'},
|
||||
|
||||
'taup_phases': {'type': str,
|
||||
|
||||
106
pylot/core/io/event.py
Normal file
106
pylot/core/io/event.py
Normal file
@@ -0,0 +1,106 @@
|
||||
import copy
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from obspy import read_events
|
||||
from obspy.core.event import Event as ObsPyEvent
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventData:
|
||||
evtdata: Union[ObsPyEvent, None] = None
|
||||
_new: bool = False
|
||||
|
||||
def __init__(self, evtdata=None):
|
||||
self.set_event_data(evtdata)
|
||||
|
||||
def set_event_data(self, evtdata):
|
||||
if isinstance(evtdata, ObsPyEvent):
|
||||
self.evtdata = evtdata
|
||||
elif isinstance(evtdata, dict):
|
||||
self.evtdata = self.read_pilot_event(**evtdata)
|
||||
elif isinstance(evtdata, str):
|
||||
self.evtdata = self.read_event_file(evtdata)
|
||||
else:
|
||||
self.set_new()
|
||||
self.evtdata = ObsPyEvent(picks=[])
|
||||
|
||||
def read_event_file(self, evtdata: str) -> ObsPyEvent:
|
||||
try:
|
||||
cat = read_events(evtdata)
|
||||
if len(cat) != 1:
|
||||
raise ValueError(f'ambiguous event information for file: {evtdata}')
|
||||
return cat[0]
|
||||
except TypeError as e:
|
||||
self.handle_event_file_error(e, evtdata)
|
||||
|
||||
def handle_event_file_error(self, e: TypeError, evtdata: str):
|
||||
if 'Unknown format for file' in str(e):
|
||||
if 'PHASES' in evtdata:
|
||||
picks = self.picksdict_from_pilot(evtdata)
|
||||
evtdata = ObsPyEvent(picks=self.picks_from_picksdict(picks))
|
||||
elif 'LOC' in evtdata:
|
||||
raise NotImplementedError('PILOT location information read support not yet implemented.')
|
||||
elif 'event.pkl' in evtdata:
|
||||
evtdata = self.qml_from_obspy_dmt(evtdata)
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
raise e
|
||||
|
||||
def set_new(self):
|
||||
self._new = True
|
||||
|
||||
def is_new(self) -> bool:
|
||||
return self._new
|
||||
|
||||
def get_picks_str(self) -> str:
|
||||
return '\n'.join(str(pick) for pick in self.evtdata.picks)
|
||||
|
||||
def replace_origin(self, event: ObsPyEvent, force_overwrite: bool = False):
|
||||
if self.evtdata.origins or force_overwrite:
|
||||
event.origins = self.evtdata.origins
|
||||
|
||||
def replace_magnitude(self, event: ObsPyEvent, force_overwrite: bool = False):
|
||||
if self.evtdata.magnitudes or force_overwrite:
|
||||
event.magnitudes = self.evtdata.magnitudes
|
||||
|
||||
def replace_picks(self, event: ObsPyEvent, picktype: str):
|
||||
checkflag = 1
|
||||
picks = event.picks
|
||||
for j, pick in reversed(list(enumerate(picks))):
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.pop(j)
|
||||
checkflag = 2
|
||||
|
||||
if checkflag > 0:
|
||||
for pick in self.evtdata.picks:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.append(pick)
|
||||
|
||||
def get_id(self) -> Union[str, None]:
|
||||
try:
|
||||
return self.evtdata.resource_id.id
|
||||
except:
|
||||
return None
|
||||
|
||||
def apply_event_data(self, data, typ='pick'):
|
||||
if typ == 'pick':
|
||||
self.apply_picks(data)
|
||||
elif typ == 'event':
|
||||
self.apply_event(data)
|
||||
|
||||
def apply_picks(self, picks):
|
||||
self.evtdata.picks = picks
|
||||
|
||||
def apply_event(self, event: ObsPyEvent):
|
||||
if self.is_new():
|
||||
self.evtdata = event
|
||||
else:
|
||||
old_event = self.evtdata
|
||||
if old_event.resource_id == event.resource_id:
|
||||
picks = copy.deepcopy(old_event.picks)
|
||||
event = self.merge_picks(event, picks)
|
||||
old_event.update(event)
|
||||
else:
|
||||
print(f"WARNING: Mismatch in event resource id's: {old_event.resource_id} and {event.resource_id}")
|
||||
@@ -21,25 +21,6 @@ from pylot.core.util.utils import get_owner, full_range, four_digits, transformF
|
||||
backtransformFilterString, loopIdentifyPhase, identifyPhase
|
||||
|
||||
|
||||
def add_amplitudes(event, amplitudes):
|
||||
amplitude_list = []
|
||||
for pick in event.picks:
|
||||
try:
|
||||
a0 = amplitudes[pick.waveform_id.station_code]
|
||||
amplitude = ope.Amplitude(generic_amplitude=a0 * 1e-3)
|
||||
amplitude.unit = 'm'
|
||||
amplitude.category = 'point'
|
||||
amplitude.waveform_id = pick.waveform_id
|
||||
amplitude.magnitude_hint = 'ML'
|
||||
amplitude.pick_id = pick.resource_id
|
||||
amplitude.type = 'AML'
|
||||
amplitude_list.append(amplitude)
|
||||
except KeyError:
|
||||
continue
|
||||
event.amplitudes = amplitude_list
|
||||
return event
|
||||
|
||||
|
||||
def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
||||
"""
|
||||
readPILOTEvent - function
|
||||
@@ -193,31 +174,6 @@ def convert_pilot_times(time_array):
|
||||
return UTCDateTime(*times)
|
||||
|
||||
|
||||
def picksdict_from_obs(fn):
|
||||
"""
|
||||
create pick dictionary from obs file
|
||||
:param fn: filename
|
||||
:type fn:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
picks = dict()
|
||||
station_name = str()
|
||||
for line in open(fn, 'r'):
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
else:
|
||||
phase_line = line.split()
|
||||
if not station_name == phase_line[0]:
|
||||
phase = dict()
|
||||
station_name = phase_line[0]
|
||||
phase_name = phase_line[4].upper()
|
||||
pick = UTCDateTime(phase_line[6] + phase_line[7] + phase_line[8])
|
||||
phase[phase_name] = dict(mpp=pick, fm=phase_line[5])
|
||||
picks[station_name] = phase
|
||||
return picks
|
||||
|
||||
|
||||
def picksdict_from_picks(evt, parameter=None):
|
||||
"""
|
||||
Takes an Event object and return the pick dictionary commonly used within
|
||||
@@ -373,636 +329,228 @@ def picks_from_picksdict(picks, creation_info=None):
|
||||
return picks_list
|
||||
|
||||
|
||||
def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0):
|
||||
# TODO: change root to datapath
|
||||
db_root = os.path.join(root_dir, db_dir)
|
||||
evt_list = glob.glob1(db_root, 'e????.???.??')
|
||||
|
||||
for evt in evt_list:
|
||||
if verbosity > 0:
|
||||
print('Reassessing event {0}'.format(evt))
|
||||
reassess_pilot_event(root_dir, db_dir, evt, out_dir, fn_param, verbosity)
|
||||
|
||||
|
||||
def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None, verbosity=0):
|
||||
from obspy import read
|
||||
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.pick.utils import earllatepicker
|
||||
# TODO: change root to datapath
|
||||
|
||||
default = PylotParameter(fn_param, verbosity)
|
||||
|
||||
search_base = os.path.join(root_dir, db_dir, event_id)
|
||||
phases_file = glob.glob(os.path.join(search_base, 'PHASES.mat'))
|
||||
if not phases_file:
|
||||
return
|
||||
if verbosity > 1:
|
||||
print('Opening PILOT phases file: {fn}'.format(fn=phases_file[0]))
|
||||
picks_dict = picksdict_from_pilot(phases_file[0])
|
||||
if verbosity > 0:
|
||||
print('Dictionary read from PHASES.mat:\n{0}'.format(picks_dict))
|
||||
datacheck = list()
|
||||
info = None
|
||||
for station in picks_dict.keys():
|
||||
fn_pattern = os.path.join(search_base, '{0}*'.format(station))
|
||||
try:
|
||||
st = read(fn_pattern)
|
||||
except TypeError as e:
|
||||
if 'Unknown format for file' in e.message:
|
||||
try:
|
||||
st = read(fn_pattern, format='GSE2')
|
||||
except ValueError as e:
|
||||
if e.message == 'second must be in 0..59':
|
||||
info = 'A known Error was raised. Please find the list of corrupted files and double-check these files.'
|
||||
datacheck.append(fn_pattern + ' (time info)\n')
|
||||
continue
|
||||
else:
|
||||
raise ValueError(e.message)
|
||||
except Exception as e:
|
||||
if 'No file matching file pattern:' in e.message:
|
||||
if verbosity > 0:
|
||||
warnings.warn('no waveform data found for station {station}'.format(station=station),
|
||||
RuntimeWarning)
|
||||
datacheck.append(fn_pattern + ' (no data)\n')
|
||||
continue
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
raise e
|
||||
for phase in picks_dict[station].keys():
|
||||
try:
|
||||
mpp = picks_dict[station][phase]['mpp']
|
||||
except KeyError as e:
|
||||
print(e.message, station)
|
||||
continue
|
||||
sel_st = select_for_phase(st, phase)
|
||||
if not sel_st:
|
||||
msg = 'no waveform data found for station {station}'.format(station=station)
|
||||
warnings.warn(msg, RuntimeWarning)
|
||||
continue
|
||||
stime, etime = full_range(sel_st)
|
||||
rel_pick = mpp - stime
|
||||
epp, lpp, spe = earllatepicker(sel_st,
|
||||
default.get('nfac{0}'.format(phase)),
|
||||
default.get('tsnrz' if phase == 'P' else 'tsnrh'),
|
||||
Pick1=rel_pick,
|
||||
iplot=0,
|
||||
verbosity=0)
|
||||
if epp is None or lpp is None:
|
||||
continue
|
||||
epp = stime + epp
|
||||
lpp = stime + lpp
|
||||
min_diff = 3 * st[0].stats.delta
|
||||
if lpp - mpp < min_diff:
|
||||
lpp = mpp + min_diff
|
||||
if mpp - epp < min_diff:
|
||||
epp = mpp - min_diff
|
||||
picks_dict[station][phase] = dict(epp=epp, mpp=mpp, lpp=lpp, spe=spe)
|
||||
if datacheck:
|
||||
if info:
|
||||
if verbosity > 0:
|
||||
print(info + ': {0}'.format(search_base))
|
||||
fncheck = open(os.path.join(search_base, 'datacheck_list'), 'w')
|
||||
fncheck.writelines(datacheck)
|
||||
fncheck.close()
|
||||
del datacheck
|
||||
# create Event object for export
|
||||
evt = ope.Event(resource_id=event_id)
|
||||
evt.picks = picks_from_picksdict(picks_dict)
|
||||
# write phase information to file
|
||||
if not out_dir:
|
||||
fnout_prefix = os.path.join(root_dir, db_dir, event_id, 'PyLoT_{0}.'.format(event_id))
|
||||
else:
|
||||
out_dir = os.path.join(out_dir, db_dir)
|
||||
if not os.path.isdir(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
fnout_prefix = os.path.join(out_dir, 'PyLoT_{0}.'.format(event_id))
|
||||
evt.write(fnout_prefix + 'xml', format='QUAKEML')
|
||||
|
||||
|
||||
def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
def write_phases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
"""
|
||||
Function of methods to write phases to the following standard file
|
||||
formats used for locating earthquakes:
|
||||
Writes earthquake phase data to different file formats.
|
||||
|
||||
HYPO71, NLLoc, VELEST, HYPOSAT, FOCMEC, and hypoDD
|
||||
|
||||
:param arrivals:dictionary containing all phase information including
|
||||
station ID, phase, first motion, weight (uncertainty), ...
|
||||
:param arrivals: Dictionary containing phase information (station ID, phase, first motion, weight, etc.)
|
||||
:type arrivals: dict
|
||||
|
||||
:param fformat: chosen file format (location routine),
|
||||
choose between NLLoc, HYPO71, HYPOSAT, VELEST,
|
||||
HYPOINVERSE, FOCMEC, and hypoDD
|
||||
:param fformat: File format to write to (e.g., 'NLLoc', 'HYPO71', 'HYPOSAT', 'VELEST', 'HYPODD', 'FOCMEC')
|
||||
:type fformat: str
|
||||
|
||||
:param filename: full path and name of phase file
|
||||
:type filename: string
|
||||
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
|
||||
:param eventinfo: optional, needed for VELEST-cnv file
|
||||
and FOCMEC- and HASH-input files
|
||||
:type eventinfo: `obspy.core.event.Event` object
|
||||
:param filename: Path and name of the output phase file
|
||||
:type filename: str
|
||||
:param parameter: Additional parameters for writing the phase data
|
||||
:type parameter: object
|
||||
:param eventinfo: Event information needed for specific formats like VELEST, FOCMEC, and HASH
|
||||
:type eventinfo: obspy.core.event.Event
|
||||
"""
|
||||
if fformat == 'NLLoc':
|
||||
print("Writing phases to %s for NLLoc" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
||||
(parameter.get('database'), parameter.get('eventID')))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
if 'P' in arrivals[key]:
|
||||
try:
|
||||
fm = arrivals[key]['P']['fm']
|
||||
except KeyError as e:
|
||||
print(e)
|
||||
fm = None
|
||||
if fm is None:
|
||||
fm = '?'
|
||||
onset = arrivals[key]['P']['mpp']
|
||||
year = onset.year
|
||||
month = onset.month
|
||||
day = onset.day
|
||||
hh = onset.hour
|
||||
mm = onset.minute
|
||||
ss = onset.second
|
||||
ms = onset.microsecond
|
||||
ss_ms = ss + ms / 1000000.0
|
||||
pweight = 1 # use pick
|
||||
try:
|
||||
if arrivals[key]['P']['weight'] >= 4:
|
||||
pweight = 0 # do not use pick
|
||||
print("Station {}: Uncertain pick, do not use it!".format(key))
|
||||
except KeyError as e:
|
||||
print(e.message + '; no weight set during processing')
|
||||
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
||||
fm,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
pweight))
|
||||
# S onsets
|
||||
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
|
||||
fm = '?'
|
||||
onset = arrivals[key]['S']['mpp']
|
||||
year = onset.year
|
||||
month = onset.month
|
||||
day = onset.day
|
||||
hh = onset.hour
|
||||
mm = onset.minute
|
||||
ss = onset.second
|
||||
ms = onset.microsecond
|
||||
ss_ms = ss + ms / 1000000.0
|
||||
sweight = 1 # use pick
|
||||
try:
|
||||
if arrivals[key]['S']['weight'] >= 4:
|
||||
sweight = 0 # do not use pick
|
||||
except KeyError as e:
|
||||
print(str(e) + '; no weight set during processing')
|
||||
Ao = arrivals[key]['S']['Ao'] # peak-to-peak amplitude
|
||||
if Ao == None:
|
||||
Ao = 0.0
|
||||
# fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
||||
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 %9.2f 0 0 %d \n' % (key,
|
||||
fm,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Ao,
|
||||
sweight))
|
||||
|
||||
fid.close()
|
||||
elif fformat == 'HYPO71':
|
||||
print("Writing phases to %s for HYPO71" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write(' %s\n' %
|
||||
parameter.get('eventID'))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
for key in arrivals:
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
stat = key
|
||||
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
pweight = arrivals[key]['P']['weight']
|
||||
sweight = arrivals[key]['S']['weight']
|
||||
fm = arrivals[key]['P']['fm']
|
||||
if fm is None:
|
||||
fm = '-'
|
||||
Ao = arrivals[key]['S']['Ao']
|
||||
if Ao is None:
|
||||
Ao = ''
|
||||
else:
|
||||
Ao = str('%7.2f' % Ao)
|
||||
year = Ponset.year
|
||||
if year >= 2000:
|
||||
year = year - 2000
|
||||
else:
|
||||
year = year - 1900
|
||||
month = Ponset.month
|
||||
day = Ponset.day
|
||||
hh = Ponset.hour
|
||||
mm = Ponset.minute
|
||||
ss = Ponset.second
|
||||
ms = Ponset.microsecond
|
||||
ss_ms = ss + ms / 1000000.0
|
||||
if pweight < 2:
|
||||
pstr = 'I'
|
||||
elif pweight >= 2:
|
||||
pstr = 'E'
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
Sss = Sonset.second
|
||||
Sms = Sonset.microsecond
|
||||
Sss_ms = Sss + Sms / 1000000.0
|
||||
Sss_ms = str('%5.02f' % Sss_ms)
|
||||
if sweight < 2:
|
||||
sstr = 'I'
|
||||
elif sweight >= 2:
|
||||
sstr = 'E'
|
||||
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s%sS %d %s\n' % (stat,
|
||||
pstr,
|
||||
fm,
|
||||
pweight,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Sss_ms,
|
||||
sstr,
|
||||
sweight,
|
||||
Ao))
|
||||
else:
|
||||
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s\n' % (stat,
|
||||
pstr,
|
||||
fm,
|
||||
pweight,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Ao))
|
||||
def write_nlloc():
|
||||
with open(filename, 'w') as fid:
|
||||
fid.write('# EQEVENT: {} Label: EQ{} Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n'.format(
|
||||
parameter.get('database'), parameter.get('eventID')))
|
||||
for key, value in arrivals.items():
|
||||
for phase in ['P', 'S']:
|
||||
if phase in value:
|
||||
fm = value[phase].get('fm', '?')
|
||||
onset = value[phase]['mpp']
|
||||
ss_ms = onset.second + onset.microsecond / 1000000.0
|
||||
weight = 1 if value[phase].get('weight', 0) < 4 else 0
|
||||
amp = value[phase].get('Ao', 0.0) if phase == 'S' else ''
|
||||
fid.write('{} ? ? ? {} {}{}{} {}{} {:7.4f} GAU 0 {} 0 0 {}\n'.format(
|
||||
key, phase, fm, onset.year, onset.month, onset.day, onset.hour, onset.minute, ss_ms, amp,
|
||||
weight))
|
||||
|
||||
fid.close()
|
||||
def write_hypo71():
|
||||
with open(filename, 'w') as fid:
|
||||
fid.write(
|
||||
' {}\n'.format(parameter.get('eventID')))
|
||||
for key, value in arrivals.items():
|
||||
if value['P'].get('weight', 0) < 4:
|
||||
stat = key[:4]
|
||||
Ponset = value['P']['mpp']
|
||||
Sonset = value.get('S', {}).get('mpp')
|
||||
pweight = value['P'].get('weight', 0)
|
||||
sweight = value.get('S', {}).get('weight', 0)
|
||||
fm = value['P'].get('fm', '-')
|
||||
Ao = value.get('S', {}).get('Ao', '')
|
||||
year = Ponset.year - 2000 if Ponset.year >= 2000 else Ponset.year - 1900
|
||||
ss_ms = Ponset.second + Ponset.microsecond / 1000000.0
|
||||
if Sonset:
|
||||
Sss_ms = Sonset.second + Sonset.microsecond / 1000000.0
|
||||
fid.write('{}P{}{}{} {}{}{}{}{} {:5.2f} {}{}S {} {}\n'.format(
|
||||
stat, 'I' if pweight < 2 else 'E', fm, pweight, year, Ponset.month, Ponset.day,
|
||||
Ponset.hour, Ponset.minute, ss_ms, Sss_ms, 'I' if sweight < 2 else 'E', sweight, Ao))
|
||||
else:
|
||||
fid.write('{}P{}{}{} {}{}{}{}{} {:5.2f} {}\n'.format(
|
||||
stat, 'I' if pweight < 2 else 'E', fm, pweight, year, Ponset.month, Ponset.day,
|
||||
Ponset.hour, Ponset.minute, ss_ms, Ao))
|
||||
|
||||
elif fformat == 'HYPOSAT':
|
||||
print("Writing phases to %s for HYPOSAT" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
pyear = Ponset.year
|
||||
pmonth = Ponset.month
|
||||
pday = Ponset.day
|
||||
phh = Ponset.hour
|
||||
pmm = Ponset.minute
|
||||
pss = Ponset.second
|
||||
pms = Ponset.microsecond
|
||||
Pss = pss + pms / 1000000.0
|
||||
# use symmetrized picking error as std
|
||||
# (read the HYPOSAT manual)
|
||||
pstd = arrivals[key]['P']['spe']
|
||||
if pstd is None:
|
||||
errorsP = parameter.get('timeerrorsP')
|
||||
if arrivals[key]['P']['weight'] == 0:
|
||||
pstd = errorsP[0]
|
||||
elif arrivals[key]['P']['weight'] == 1:
|
||||
pstd = errorsP[1]
|
||||
elif arrivals[key]['P']['weight'] == 2:
|
||||
pstd = errorsP[2]
|
||||
elif arrivals[key]['P']['weight'] == 3:
|
||||
psrd = errorsP[3]
|
||||
else:
|
||||
pstd = errorsP[4]
|
||||
fid.write('%-5s P1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
|
||||
% (key, pyear, pmonth, pday, phh, pmm, Pss, pstd))
|
||||
# S onsets
|
||||
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
syear = Sonset.year
|
||||
smonth = Sonset.month
|
||||
sday = Sonset.day
|
||||
shh = Sonset.hour
|
||||
smm = Sonset.minute
|
||||
sss = Sonset.second
|
||||
sms = Sonset.microsecond
|
||||
Sss = sss + sms / 1000000.0
|
||||
sstd = arrivals[key]['S']['spe']
|
||||
if pstd is None:
|
||||
errorsS = parameter.get('timeerrorsS')
|
||||
if arrivals[key]['S']['weight'] == 0:
|
||||
pstd = errorsS[0]
|
||||
elif arrivals[key]['S']['weight'] == 1:
|
||||
pstd = errorsS[1]
|
||||
elif arrivals[key]['S']['weight'] == 2:
|
||||
pstd = errorsS[2]
|
||||
elif arrivals[key]['S']['weight'] == 3:
|
||||
psrd = errorsS[3]
|
||||
else:
|
||||
pstd = errorsP[4]
|
||||
fid.write('%-5s S1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
|
||||
% (key, syear, smonth, sday, shh, smm, Sss, sstd))
|
||||
fid.close()
|
||||
def write_hyposat():
|
||||
with open(filename, 'w') as fid:
|
||||
fid.write('{}, event {} \n'.format(parameter.get('database'), parameter.get('eventID')))
|
||||
for key, value in arrivals.items():
|
||||
for phase in ['P', 'S']:
|
||||
if phase in value and value[phase].get('weight', 0) < 4:
|
||||
onset = value[phase]['mpp']
|
||||
ss_ms = onset.second + onset.microsecond / 1000000.0
|
||||
std = value[phase].get('spe', parameter.get('timeerrorsP')[value[phase].get('weight', 0)])
|
||||
fid.write(
|
||||
'{:<5} {}1 {:4} {:02} {:02} {:02} {:02} {:05.02f} {:5.3f} -999. 0.00 -999. 0.00\n'.format(
|
||||
key, phase, onset.year, onset.month, onset.day, onset.hour, onset.minute, ss_ms, std))
|
||||
|
||||
elif fformat == 'VELEST':
|
||||
print("Writing phases to %s for VELEST" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get informations needed in cnv-file
|
||||
# check, whether latitude is N or S and longitude is E or W
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
def write_velest():
|
||||
if not eventinfo:
|
||||
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
||||
return
|
||||
if eventsource['latitude'] < 0:
|
||||
cns = 'S'
|
||||
else:
|
||||
cns = 'N'
|
||||
if eventsource['longitude'] < 0:
|
||||
cew = 'W'
|
||||
else:
|
||||
cew = 'E'
|
||||
# get last two integers of origin year
|
||||
stime = eventsource['time']
|
||||
if stime.year - 2000 >= 0:
|
||||
syear = stime.year - 2000
|
||||
else:
|
||||
syear = stime.year - 1900
|
||||
ifx = 0 # default value, see VELEST manual, pp. 22-23
|
||||
# write header
|
||||
fid.write('%s%02d%02d %02d%02d %05.2f %7.4f%c %8.4f%c %7.2f %6.2f %02.0f 0.0 0.03 1.0 1.0\n' % (
|
||||
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
|
||||
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
||||
n = 0
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
for key in usedarrivals:
|
||||
# P onsets
|
||||
if 'P' in usedarrivals[key]:
|
||||
if usedarrivals[key]['P']['weight'] < 4:
|
||||
n += 1
|
||||
stat = key
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Ponset = usedarrivals[key]['P']['mpp']
|
||||
Pweight = usedarrivals[key]['P']['weight']
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
if n % 6 != 0:
|
||||
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
|
||||
else:
|
||||
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
|
||||
# S onsets
|
||||
if 'S' in usedarrivals[key]:
|
||||
if usedarrivals[key]['S']['weight'] < 4:
|
||||
n += 1
|
||||
stat = key
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Sonset = usedarrivals[key]['S']['mpp']
|
||||
Sweight = usedarrivals[key]['S']['weight']
|
||||
Srt = Ponset - stime # onset time relative to source time
|
||||
if n % 6 != 0:
|
||||
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
|
||||
else:
|
||||
fid.write('%-4sS%d%6.2f\n' % (stat, Sweight, Srt))
|
||||
fid.close()
|
||||
with open(filename, 'w') as fid:
|
||||
origin = eventinfo.origins[0]
|
||||
lat_dir = 'S' if origin.latitude < 0 else 'N'
|
||||
lon_dir = 'W' if origin.longitude < 0 else 'E'
|
||||
year = origin.time.year - 2000 if origin.time.year >= 2000 else origin.time.year - 1900
|
||||
fid.write(
|
||||
'{}{}{} {}{} {} {:05.2f} {:7.4f}{} {:8.4f}{} {:7.2f} {:6.2f} {:02.0f} 0.0 0.03 1.0 1.0\n'.format(
|
||||
year, origin.time.month, origin.time.day, origin.time.hour, origin.time.minute, origin.time.second,
|
||||
origin.latitude, lat_dir, origin.longitude, lon_dir, origin.depth, eventinfo.magnitudes[0].mag, 0))
|
||||
for key, value in arrivals.items():
|
||||
for phase in ['P', 'S']:
|
||||
if phase in value and value[phase].get('weight', 0) < 4:
|
||||
onset = value[phase]['mpp']
|
||||
rt = (onset - origin.time).total_seconds()
|
||||
fid.write('{:<4}{}{}{:6.2f}\n'.format(key[:4], phase, value[phase].get('weight', 0), rt))
|
||||
|
||||
elif fformat == 'HYPODD':
|
||||
print("Writing phases to %s for hypoDD" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get event information needed for hypoDD-phase file
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
def write_hypodd():
|
||||
if not eventinfo:
|
||||
print("No source origin calculated yet, thus no hypoDD-infile creation possible!")
|
||||
return
|
||||
stime = eventsource['time']
|
||||
try:
|
||||
event = eventinfo['pylot_id']
|
||||
hddID = event.split('.')[0][1:5]
|
||||
except:
|
||||
print("Error 1111111!")
|
||||
hddID = "00000"
|
||||
# write header
|
||||
fid.write('# %d %d %d %d %d %5.2f %7.4f +%6.4f %7.4f %4.2f 0.1 0.5 %4.2f %s\n' % (
|
||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
for key in usedarrivals:
|
||||
if 'P' in usedarrivals[key]:
|
||||
# P onsets
|
||||
if usedarrivals[key]['P']['weight'] < 4:
|
||||
Ponset = usedarrivals[key]['P']['mpp']
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
fid.write('%s %6.3f 1 P\n' % (key, Prt))
|
||||
if 'S' in usedarrivals[key]:
|
||||
# S onsets
|
||||
if usedarrivals[key]['S']['weight'] < 4:
|
||||
Sonset = usedarrivals[key]['S']['mpp']
|
||||
Srt = Sonset - stime # onset time relative to source time
|
||||
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
|
||||
with open(filename, 'w') as fid:
|
||||
origin = eventinfo.origins[0]
|
||||
stime = origin.time
|
||||
fid.write('# {} {} {} {} {} {} {:7.4f} +{:6.4f} {:7.4f} {:4.2f} 0.1 0.5 {:4.2f} {}\n'.format(
|
||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
origin.latitude, origin.longitude, origin.depth / 1000, eventinfo.magnitudes[0].mag,
|
||||
origin.quality.standard_error, "00000"))
|
||||
for key, value in arrivals.items():
|
||||
for phase in ['P', 'S']:
|
||||
if phase in value and value[phase].get('weight', 0) < 4:
|
||||
onset = value[phase]['mpp']
|
||||
rt = (onset - stime).total_seconds()
|
||||
fid.write('{} {:6.3f} 1 {}\n'.format(key, rt, phase))
|
||||
|
||||
fid.close()
|
||||
|
||||
elif fformat == 'FOCMEC':
|
||||
print("Writing phases to %s for FOCMEC" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get event information needed for FOCMEC-input file
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
def write_focmec():
|
||||
if not eventinfo:
|
||||
print("No source origin calculated yet, thus no FOCMEC-infile creation possible!")
|
||||
return
|
||||
stime = eventsource['time']
|
||||
|
||||
# avoid printing '*' in focmec-input file
|
||||
if parameter.get('eventid') == '*' or parameter.get('eventid') is None:
|
||||
evID = 'e0000'
|
||||
else:
|
||||
evID = parameter.get('eventid')
|
||||
|
||||
# write header line including event information
|
||||
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (evID,
|
||||
stime.year, stime.month, stime.day,
|
||||
stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'],
|
||||
eventsource['longitude'],
|
||||
eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag']))
|
||||
picks = eventinfo.picks
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
for key in usedarrivals:
|
||||
if 'P' in usedarrivals[key]:
|
||||
if usedarrivals[key]['P']['weight'] < 4 and usedarrivals[key]['P']['fm'] is not None:
|
||||
stat = key
|
||||
for i in range(len(picks)):
|
||||
station = picks[i].waveform_id.station_code
|
||||
if station == stat:
|
||||
# get resource ID
|
||||
resid_picks = picks[i].get('resource_id')
|
||||
# find same ID in eventinfo
|
||||
# there it is the pick_id!!
|
||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
||||
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
||||
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
|
||||
az,
|
||||
inz,
|
||||
usedarrivals[key]['P']['fm']))
|
||||
with open(filename, 'w') as fid:
|
||||
origin = eventinfo.origins[0]
|
||||
stime = origin.time
|
||||
fid.write('{} {}{:02d}{:02d}{:02d}{:02d}{:02.0f} {:7.4f} {:6.4f} {:3.1f} {:3.1f}\n'.format(
|
||||
parameter.get('eventid', 'e0000'), stime.year, stime.month, stime.day, stime.hour, stime.minute,
|
||||
stime.second, origin.latitude, origin.longitude, origin.depth / 1000, eventinfo.magnitudes[0].mag))
|
||||
for key, value in arrivals.items():
|
||||
if 'P' in value and value['P'].get('weight', 0) < 4 and value['P'].get('fm'):
|
||||
for pick in eventinfo.picks:
|
||||
if pick.waveform_id.station_code == key:
|
||||
for arrival in origin.arrivals:
|
||||
if arrival.pick_id == pick.resource_id and arrival.phase == 'P':
|
||||
stat = key[:4]
|
||||
az = arrival.azimuth
|
||||
inz = arrival.takeoff_angle
|
||||
fid.write('{:<4} {:6.2f} {:6.2f}{}\n'.format(stat, az, inz, value['P']['fm']))
|
||||
break
|
||||
|
||||
fid.close()
|
||||
def write_hash():
|
||||
# Define filenames for HASH driver 1 and 2
|
||||
filename1 = f"{filename}drv1.phase"
|
||||
filename2 = f"{filename}drv2.phase"
|
||||
|
||||
print(f"Writing phases to {filename1} for HASH-driver 1")
|
||||
print(f"Writing phases to {filename2} for HASH-driver 2")
|
||||
|
||||
# Open files for writing
|
||||
with open(filename1, 'w') as fid1, open(filename2, 'w') as fid2:
|
||||
# Get event information needed for HASH-input file
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except IndexError:
|
||||
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
||||
return
|
||||
|
||||
event = parameter.get('eventID')
|
||||
hashID = event.split('.')[0][1:5]
|
||||
latdeg = eventsource['latitude']
|
||||
latmin = (eventsource['latitude'] * 60) / 10000
|
||||
londeg = eventsource['longitude']
|
||||
lonmin = (eventsource['longitude'] * 60) / 10000
|
||||
|
||||
erh = (eventsource.origin_uncertainty['min_horizontal_uncertainty'] +
|
||||
eventsource.origin_uncertainty['max_horizontal_uncertainty']) / 2000
|
||||
erz = eventsource.depth_errors['uncertainty']
|
||||
|
||||
stime = eventsource['time']
|
||||
syear = stime.year % 100 # Calculate two-digit year
|
||||
|
||||
picks = eventinfo.picks
|
||||
|
||||
# Write header line including event information for HASH-driver 1
|
||||
fid1.write(f"{syear:02d}{stime.month:02d}{stime.day:02d}{stime.hour:02d}{stime.minute:02d}"
|
||||
f"{stime.second:05.2f}{latdeg:2d}N{latmin:05.2f}{londeg:3d}E{lonmin:05.2f}"
|
||||
f"{eventsource['depth']:6.2f}{eventinfo.magnitudes[0]['mag']:4.2f}{erh:5.2f}{erz:5.2f}{hashID}\n")
|
||||
|
||||
# Write header line including event information for HASH-driver 2
|
||||
fid2.write(f"{syear:02d}{stime.month:02d}{stime.day:02d}{stime.hour:02d}{stime.minute:02d}"
|
||||
f"{stime.second:05.2f}{latdeg}N{latmin:05.2f}{londeg}E{lonmin:6.2f}{eventsource['depth']:5.2f}"
|
||||
f"{eventsource['quality']['used_phase_count']:3d}{erh:5.2f}{erz:5.2f}"
|
||||
f"{eventinfo.magnitudes[0]['mag']:4.2f}{hashID}\n")
|
||||
|
||||
# Write phase lines
|
||||
for key, arrival in arrivals.items():
|
||||
if 'P' in arrival and arrival['P']['weight'] < 4 and arrival['P']['fm'] is not None:
|
||||
stat = key
|
||||
ccode = arrival['P']['channel']
|
||||
ncode = arrival['P']['network']
|
||||
Pqual = 'I' if arrival['P']['weight'] < 2 else 'E'
|
||||
|
||||
for pick in picks:
|
||||
if pick.waveform_id.station_code == stat:
|
||||
resid_picks = pick.get('resource_id')
|
||||
for origin_arrival in eventinfo.origins[0].arrivals:
|
||||
if (origin_arrival.get('pick_id') == resid_picks and
|
||||
origin_arrival.phase == 'P'):
|
||||
if len(stat) > 4: # HASH handles only 4-character station IDs
|
||||
stat = stat[1:5]
|
||||
|
||||
az = origin_arrival.get('azimuth')
|
||||
inz = origin_arrival.get('takeoff_angle')
|
||||
dist = origin_arrival.get('distance')
|
||||
|
||||
# Write phase line for HASH-driver 1
|
||||
fid1.write(f"{stat:<4}{Pqual}P{arrival['P']['fm']}{arrival['P']['weight']:d}"
|
||||
f"{dist:3.1f}{inz:03d}{az:03d}{ccode}\n")
|
||||
|
||||
# Write phase line for HASH-driver 2
|
||||
fid2.write(f"{stat:<4} {ncode} {ccode} {Pqual} {arrival['P']['fm']}\n")
|
||||
break
|
||||
|
||||
fid1.write(f"{'':<36}{hashID}")
|
||||
|
||||
# Prefer Manual Picks over automatic ones if possible
|
||||
arrivals = chooseArrivals(arrivals) # Function not defined, assumed to exist
|
||||
|
||||
if fformat == 'NLLoc':
|
||||
write_nlloc()
|
||||
elif fformat == 'HYPO71':
|
||||
write_hypo71()
|
||||
elif fformat == 'HYPOSAT':
|
||||
write_hyposat()
|
||||
elif fformat == 'VELEST':
|
||||
write_velest()
|
||||
elif fformat == 'HYPODD':
|
||||
write_hypodd()
|
||||
elif fformat == 'FOCMEC':
|
||||
write_focmec()
|
||||
elif fformat == 'HASH':
|
||||
# two different input files for
|
||||
# HASH-driver 1 and 2 (see HASH manual!)
|
||||
filename1 = filename + 'drv1' + '.phase'
|
||||
filename2 = filename + 'drv2' + '.phase'
|
||||
print("Writing phases to %s for HASH for HASH-driver 1" % filename1)
|
||||
fid1 = open("%s" % filename1, 'w')
|
||||
print("Writing phases to %s for HASH for HASH-driver 2" % filename2)
|
||||
fid2 = open("%s" % filename2, 'w')
|
||||
# get event information needed for HASH-input file
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
||||
return
|
||||
eventsource = eventinfo.origins[0]
|
||||
event = parameter.get('eventID')
|
||||
hashID = event.split('.')[0][1:5]
|
||||
latdeg = eventsource['latitude']
|
||||
latmin = eventsource['latitude'] * 60 / 10000
|
||||
londeg = eventsource['longitude']
|
||||
lonmin = eventsource['longitude'] * 60 / 10000
|
||||
erh = 1 / 2 * (eventsource.origin_uncertainty['min_horizontal_uncertainty'] +
|
||||
eventsource.origin_uncertainty['max_horizontal_uncertainty']) / 1000
|
||||
erz = eventsource.depth_errors['uncertainty']
|
||||
stime = eventsource['time']
|
||||
if stime.year - 2000 >= 0:
|
||||
syear = stime.year - 2000
|
||||
else:
|
||||
syear = stime.year - 1900
|
||||
picks = eventinfo.picks
|
||||
# write header line including event information
|
||||
# for HASH-driver 1
|
||||
fid1.write('%s%02d%02d%02d%02d%5.2f%2dN%5.2f%3dE%5.2f%6.3f%4.2f%5.2f%5.2f%s\n' % (syear,
|
||||
stime.month, stime.day,
|
||||
stime.hour, stime.minute,
|
||||
stime.second,
|
||||
latdeg, latmin, londeg,
|
||||
lonmin, eventsource['depth'],
|
||||
eventinfo.magnitudes[0][
|
||||
'mag'], erh, erz,
|
||||
hashID))
|
||||
# write header line including event information
|
||||
# for HASH-driver 2
|
||||
fid2.write(
|
||||
'%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (
|
||||
syear, stime.month, stime.day,
|
||||
stime.hour, stime.minute, stime.second,
|
||||
latdeg, latmin, londeg, lonmin,
|
||||
eventsource['depth'],
|
||||
eventsource['quality']['used_phase_count'],
|
||||
erh, erz, eventinfo.magnitudes[0]['mag'],
|
||||
hashID))
|
||||
# Prefer Manual Picks over automatic ones if possible
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
# write phase lines
|
||||
for key in arrivals:
|
||||
if 'P' in arrivals[key]:
|
||||
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
|
||||
stat = key
|
||||
ccode = arrivals[key]['P']['channel']
|
||||
ncode = arrivals[key]['P']['network']
|
||||
|
||||
if arrivals[key]['P']['weight'] < 2:
|
||||
Pqual = 'I'
|
||||
else:
|
||||
Pqual = 'E'
|
||||
|
||||
for i in range(len(picks)):
|
||||
station = picks[i].waveform_id.station_code
|
||||
if station == stat:
|
||||
# get resource ID
|
||||
resid_picks = picks[i].get('resource_id')
|
||||
# find same ID in eventinfo
|
||||
# there it is the pick_id!!
|
||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
||||
if len(stat) > 4: # HASH handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
||||
dist = eventinfo.origins[0].arrivals[j].get('distance')
|
||||
# write phase line for HASH-driver 1
|
||||
fid1.write(
|
||||
'%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (
|
||||
stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
|
||||
dist, inz, az, ccode))
|
||||
# write phase line for HASH-driver 2
|
||||
fid2.write('%-4s %s %s %s %s \n' % (
|
||||
stat,
|
||||
ncode,
|
||||
ccode,
|
||||
Pqual,
|
||||
arrivals[key]['P']['fm']))
|
||||
break
|
||||
|
||||
fid1.write(' %s' % hashID)
|
||||
fid1.close()
|
||||
fid2.close()
|
||||
write_hash()
|
||||
|
||||
|
||||
def chooseArrivals(arrivals):
|
||||
@@ -1122,7 +670,7 @@ def getQualitiesfromxml(path, errorsP, errorsS, plotflag=1, figure=None, verbosi
|
||||
mstation = pick.waveform_id.station_code
|
||||
mstation_ext = mstation + '_'
|
||||
for mpick in arrivals_copy:
|
||||
phase = identifyPhase(loopIdentifyPhase(pick.phase_hint)) # MP MP catch if this fails?
|
||||
phase = identifyPhase(loopIdentifyPhase(pick.phase_hint)) # MP MP catch if this fails?
|
||||
if ((mpick.waveform_id.station_code == mstation) or
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
(mpick.method_id.id.split('/')[1] == 'auto') and \
|
||||
|
||||
177
pylot/core/io/project.py
Normal file
177
pylot/core/io/project.py
Normal file
@@ -0,0 +1,177 @@
|
||||
import os
|
||||
|
||||
from pylot.core.util.event import Event
|
||||
|
||||
|
||||
class Project(object):
|
||||
'''
|
||||
Pickable class containing information of a PyLoT project, like event lists and file locations.
|
||||
'''
|
||||
|
||||
# TODO: remove rootpath
|
||||
def __init__(self):
|
||||
self.eventlist = []
|
||||
self.location = None
|
||||
self.rootpath = None
|
||||
self.datapath = None
|
||||
self.dirty = False
|
||||
self.parameter = None
|
||||
self._table = None
|
||||
|
||||
def add_eventlist(self, eventlist):
|
||||
'''
|
||||
Add events from an eventlist containing paths to event directories.
|
||||
Will skip existing paths.
|
||||
'''
|
||||
if len(eventlist) == 0:
|
||||
return
|
||||
for item in eventlist:
|
||||
event = Event(item)
|
||||
event.rootpath = self.parameter['rootpath']
|
||||
event.database = self.parameter['database']
|
||||
event.datapath = self.parameter['datapath']
|
||||
if not event.path in self.getPaths():
|
||||
self.eventlist.append(event)
|
||||
self.setDirty()
|
||||
else:
|
||||
print('Skipping event with path {}. Already part of project.'.format(event.path))
|
||||
self.eventlist.sort(key=lambda x: x.pylot_id)
|
||||
self.search_eventfile_info()
|
||||
|
||||
def remove_event(self, event):
|
||||
self.eventlist.remove(event)
|
||||
|
||||
def remove_event_by_id(self, eventID):
|
||||
for event in self.eventlist:
|
||||
if eventID in str(event.resource_id):
|
||||
self.remove_event(event)
|
||||
break
|
||||
|
||||
def read_eventfile_info(self, filename, separator=','):
|
||||
'''
|
||||
Try to read event information from file (:param:filename) comparing specific event datetimes.
|
||||
File structure (each row): event, date, time, magnitude, latitude, longitude, depth
|
||||
separated by :param:separator each.
|
||||
'''
|
||||
with open(filename, 'r') as infile:
|
||||
for line in infile.readlines():
|
||||
eventID, date, time, mag, lat, lon, depth = line.split(separator)[:7]
|
||||
# skip first line
|
||||
try:
|
||||
day, month, year = date.split('/')
|
||||
except:
|
||||
continue
|
||||
year = int(year)
|
||||
# hardcoded, if year only consists of 2 digits (e.g. 16 instead of 2016)
|
||||
if year < 100:
|
||||
year += 2000
|
||||
datetime = '{}-{}-{}T{}'.format(year, month, day, time)
|
||||
try:
|
||||
datetime = UTCDateTime(datetime)
|
||||
except Exception as e:
|
||||
print(e, datetime, filename)
|
||||
continue
|
||||
for event in self.eventlist:
|
||||
if eventID in str(event.resource_id) or eventID in event.origins:
|
||||
if event.origins:
|
||||
origin = event.origins[0] # should have only one origin
|
||||
if origin.time == datetime:
|
||||
origin.latitude = float(lat)
|
||||
origin.longitude = float(lon)
|
||||
origin.depth = float(depth)
|
||||
else:
|
||||
continue
|
||||
elif not event.origins:
|
||||
origin = Origin(resource_id=event.resource_id,
|
||||
time=datetime, latitude=float(lat),
|
||||
longitude=float(lon), depth=float(depth))
|
||||
event.origins.append(origin)
|
||||
event.magnitudes.append(Magnitude(resource_id=event.resource_id,
|
||||
mag=float(mag),
|
||||
mag_type='M'))
|
||||
break
|
||||
|
||||
def search_eventfile_info(self):
|
||||
'''
|
||||
Search all datapaths in rootpath for filenames with given file extension fext
|
||||
and try to read event info from it
|
||||
'''
|
||||
datapaths = []
|
||||
fext = '.csv'
|
||||
for event in self.eventlist:
|
||||
if not event.datapath in datapaths:
|
||||
datapaths.append(event.datapath)
|
||||
for datapath in datapaths:
|
||||
# datapath = os.path.join(self.rootpath, datapath)
|
||||
if os.path.isdir(datapath):
|
||||
for filename in os.listdir(datapath):
|
||||
filename = os.path.join(datapath, filename)
|
||||
if os.path.isfile(filename) and filename.endswith(fext):
|
||||
try:
|
||||
self.read_eventfile_info(filename)
|
||||
except Exception as e:
|
||||
print('Failed on reading eventfile info from file {}: {}'.format(filename, e))
|
||||
else:
|
||||
print("Directory %s does not exist!" % datapath)
|
||||
|
||||
def getPaths(self):
|
||||
'''
|
||||
Returns paths (eventlist) of all events saved in the project.
|
||||
'''
|
||||
paths = []
|
||||
for event in self.eventlist:
|
||||
paths.append(event.path)
|
||||
return paths
|
||||
|
||||
def setDirty(self, value=True):
|
||||
self.dirty = value
|
||||
|
||||
def getEventFromPath(self, path):
|
||||
'''
|
||||
Search for an event in the project by event path.
|
||||
'''
|
||||
for event in self.eventlist:
|
||||
if event.path == path:
|
||||
return event
|
||||
|
||||
def save(self, filename=None):
|
||||
'''
|
||||
Save PyLoT Project to a file.
|
||||
Can be loaded by using project.load(filename).
|
||||
'''
|
||||
try:
|
||||
import pickle
|
||||
except ImportError:
|
||||
import _pickle as pickle
|
||||
|
||||
if filename:
|
||||
self.location = filename
|
||||
else:
|
||||
filename = self.location
|
||||
|
||||
table = self._table # MP: see below
|
||||
try:
|
||||
outfile = open(filename, 'wb')
|
||||
self._table = [] # MP: Workaround as long as table cannot be saved as part of project
|
||||
pickle.dump(self, outfile, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
self.setDirty(False)
|
||||
self._table = table # MP: see above
|
||||
return True
|
||||
except Exception as e:
|
||||
print('Could not pickle PyLoT project. Reason: {}'.format(e))
|
||||
self.setDirty()
|
||||
self._table = table # MP: see above
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def load(filename):
|
||||
'''
|
||||
Load project from filename.
|
||||
'''
|
||||
import pickle
|
||||
infile = open(filename, 'rb')
|
||||
project = pickle.load(infile)
|
||||
infile.close()
|
||||
project.location = filename
|
||||
print('Loaded %s' % filename)
|
||||
return project
|
||||
13
pylot/core/io/utils.py
Normal file
13
pylot/core/io/utils.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
|
||||
def validate_filenames(filenames: List[str]) -> List[str]:
|
||||
"""
|
||||
validate a list of filenames for file abundance
|
||||
:param filenames: list of possible filenames
|
||||
:type filenames: List[str]
|
||||
:return: list of valid filenames
|
||||
:rtype: List[str]
|
||||
"""
|
||||
return [fn for fn in filenames if os.path.isfile(fn)]
|
||||
123
pylot/core/io/waveformdata.py
Normal file
123
pylot/core/io/waveformdata.py
Normal file
@@ -0,0 +1,123 @@
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Union, List
|
||||
|
||||
from obspy import Stream, read
|
||||
from obspy.io.sac import SacIOError
|
||||
|
||||
from pylot.core.io.utils import validate_filenames
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from pylot.core.util.utils import get_stations, check_for_nan, check4rotated
|
||||
|
||||
|
||||
@dataclass
|
||||
class WaveformData:
|
||||
wfdata: Stream = field(default_factory=Stream)
|
||||
wforiginal: Union[Stream, None] = None
|
||||
wf_alt: Stream = field(default_factory=Stream)
|
||||
dirty: bool = False
|
||||
|
||||
def load_waveforms(self, fnames: List[str], fnames_alt: List[str] = None, check_rotated=False, metadata=None, tstart=0, tstop=0):
|
||||
fn_list = validate_filenames(fnames)
|
||||
if not fn_list:
|
||||
logging.warning('No valid filenames given for loading waveforms')
|
||||
else:
|
||||
self.clear()
|
||||
self.add_waveforms(fn_list)
|
||||
|
||||
if fnames_alt is None:
|
||||
pass
|
||||
else:
|
||||
alt_fn_list = validate_filenames(fnames_alt)
|
||||
if not alt_fn_list:
|
||||
logging.warning('No valid alternative filenames given for loading waveforms')
|
||||
else:
|
||||
self.add_waveforms(alt_fn_list, alternative=True)
|
||||
|
||||
if not fn_list and not alt_fn_list:
|
||||
logging.error('No filenames or alternative filenames given for loading waveforms')
|
||||
return False
|
||||
|
||||
self.merge()
|
||||
self.replace_nan()
|
||||
if not check_rotated or not metadata:
|
||||
pass
|
||||
else:
|
||||
self.rotate_zne()
|
||||
self.trim_station_traces()
|
||||
self.wforiginal = self.wfdata.copy()
|
||||
self.dirty = False
|
||||
return True
|
||||
|
||||
def add_waveforms(self, fnames: List[str], alternative: bool = False):
|
||||
data_stream = self.wf_alt if alternative else self.wfdata
|
||||
warnmsg = ''
|
||||
for fname in set(fnames):
|
||||
try:
|
||||
data_stream += read(fname)
|
||||
except TypeError:
|
||||
try:
|
||||
data_stream += read(fname, format='GSE2')
|
||||
except Exception as e:
|
||||
try:
|
||||
data_stream += read(fname, format='SEGY')
|
||||
except Exception as e:
|
||||
warnmsg += f'{fname}\n{e}\n'
|
||||
except SacIOError as se:
|
||||
warnmsg += f'{fname}\n{se}\n'
|
||||
|
||||
if warnmsg:
|
||||
print(f'WARNING in add_waveforms: unable to read waveform data\n{warnmsg}')
|
||||
|
||||
def clear(self):
|
||||
self.wfdata = Stream()
|
||||
self.wforiginal = None
|
||||
self.wf_alt = Stream()
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Resets the waveform data to its original state.
|
||||
"""
|
||||
if self.wforiginal:
|
||||
self.wfdata = self.wforiginal.copy()
|
||||
else:
|
||||
self.wfdata = Stream()
|
||||
self.dirty = False
|
||||
|
||||
def merge(self):
|
||||
"""
|
||||
check for gaps in Stream and merge if gaps are found
|
||||
"""
|
||||
gaps = self.wfdata.get_gaps()
|
||||
if gaps:
|
||||
merged = ['{}.{}.{}.{}'.format(*gap[:4]) for gap in gaps]
|
||||
self.wfdata.merge(method=1)
|
||||
logging.info('Merged the following stations because of gaps:')
|
||||
for station in merged:
|
||||
logging.info(station)
|
||||
|
||||
def replace_nan(self):
|
||||
"""
|
||||
Replace all NaNs in data with 0. (in place)
|
||||
"""
|
||||
self.wfdata = check_for_nan(self.wfdata)
|
||||
|
||||
def rotate_zne(self, metadata: Metadata = None):
|
||||
"""
|
||||
Check all traces in stream for rotation. If a trace is not in ZNE rotation (last symbol of channel code is numeric) and the trace
|
||||
is in the metadata with azimuth and dip, rotate it to classical ZNE orientation.
|
||||
Rotating the traces requires them to be of the same length, so, all traces will be trimmed to a common length as a
|
||||
side effect.
|
||||
"""
|
||||
|
||||
self.wfdata = check4rotated(self.wfdata, metadata)
|
||||
|
||||
def trim_station_traces(self):
|
||||
"""
|
||||
trim data stream to common time window
|
||||
"""
|
||||
|
||||
for station in get_stations(self.wfdata):
|
||||
station_traces = self.wfdata.select(station=station)
|
||||
station_traces.trim(starttime=max([trace.stats.starttime for trace in station_traces]),
|
||||
endtime=min([trace.stats.endtime for trace in station_traces]))
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
|
||||
:type eventinfo: list object
|
||||
'''
|
||||
# write phases to FOCMEC-phase file
|
||||
writephases(picks, 'FOCMEC', fnout, parameter, eventinfo)
|
||||
write_phases(picks, 'FOCMEC', fnout, parameter, eventinfo)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
|
||||
:type eventinfo: list object
|
||||
'''
|
||||
# write phases to HASH-phase file
|
||||
writephases(picks, 'HASH', fnout, parameter, eventinfo)
|
||||
write_phases(picks, 'HASH', fnout, parameter, eventinfo)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@@ -22,4 +22,4 @@ def export(picks, fnout, parameter):
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to HYPO71-phase file
|
||||
writephases(picks, 'HYPO71', fnout, parameter)
|
||||
write_phases(picks, 'HYPO71', fnout, parameter)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
|
||||
:type eventinfo: list object
|
||||
'''
|
||||
# write phases to hypoDD-phase file
|
||||
writephases(picks, 'HYPODD', fnout, parameter, eventinfo)
|
||||
write_phases(picks, 'HYPODD', fnout, parameter, eventinfo)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@@ -22,4 +22,4 @@ def export(picks, fnout, parameter):
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to HYPOSAT-phase file
|
||||
writephases(picks, 'HYPOSAT', fnout, parameter)
|
||||
write_phases(picks, 'HYPOSAT', fnout, parameter)
|
||||
|
||||
@@ -7,7 +7,7 @@ import subprocess
|
||||
|
||||
from obspy import read_events
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.gui import which
|
||||
from pylot.core.util.utils import getPatternLine, runProgram
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
@@ -34,7 +34,7 @@ def export(picks, fnout, parameter):
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to NLLoc-phase file
|
||||
writephases(picks, 'NLLoc', fnout, parameter)
|
||||
write_phases(picks, 'NLLoc', fnout, parameter)
|
||||
|
||||
|
||||
def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.io.phases import write_phases
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@@ -25,4 +25,4 @@ def export(picks, fnout, eventinfo, parameter=None):
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to VELEST-phase file
|
||||
writephases(picks, 'VELEST', fnout, parameter, eventinfo)
|
||||
write_phases(picks, 'VELEST', fnout, parameter, eventinfo)
|
||||
|
||||
@@ -474,8 +474,8 @@ class Array_map(QtWidgets.QWidget):
|
||||
transform=ccrs.PlateCarree(), label='deleted'))
|
||||
|
||||
def openPickDlg(self, ind):
|
||||
wfdata = self._parent.get_data().getWFData()
|
||||
wfdata_comp = self._parent.get_data().getAltWFdata()
|
||||
wfdata = self._parent.get_data().get_wf_data()
|
||||
wfdata_comp = self._parent.get_data().get_wf_dataComp()
|
||||
for index in ind:
|
||||
network, station = self._station_onpick_ids[index].split('.')[:2]
|
||||
pyl_mw = self._parent
|
||||
@@ -490,6 +490,7 @@ class Array_map(QtWidgets.QWidget):
|
||||
picks=self._parent.get_current_event().getPick(station),
|
||||
autopicks=self._parent.get_current_event().getAutopick(station),
|
||||
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
||||
model=self.parameter.get('taup_model'),
|
||||
event=pyl_mw.get_current_event())
|
||||
except Exception as e:
|
||||
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
||||
|
||||
@@ -338,19 +338,6 @@ class Metadata(object):
|
||||
return inv, exc
|
||||
|
||||
|
||||
def time_from_header(header):
|
||||
"""
|
||||
Function takes in the second line from a .gse file and takes out the date and time from that line.
|
||||
:param header: second line from .gse file
|
||||
:type header: string
|
||||
:return: a list of integers of form [year, month, day, hour, minute, second, microsecond]
|
||||
"""
|
||||
timeline = header.split(' ')
|
||||
time = timeline[1].split('/') + timeline[2].split(':')
|
||||
time = time[:-1] + time[-1].split('.')
|
||||
return [int(t) for t in time]
|
||||
|
||||
|
||||
def check_time(datetime):
|
||||
"""
|
||||
Function takes in date and time as list and validates it's values by trying to make an UTCDateTime object from it
|
||||
@@ -388,167 +375,6 @@ def check_time(datetime):
|
||||
return False
|
||||
|
||||
|
||||
# TODO: change root to datapath
|
||||
def get_file_list(root_dir):
|
||||
"""
|
||||
Function uses a directorie to get all the *.gse files from it.
|
||||
:param root_dir: a directorie leading to the .gse files
|
||||
:type root_dir: string
|
||||
:return: returns a list of filenames (without path to them)
|
||||
"""
|
||||
file_list = glob.glob1(root_dir, '*.gse')
|
||||
return file_list
|
||||
|
||||
|
||||
def checks_station_second(datetime, file):
|
||||
"""
|
||||
Function uses the given list to check if the parameter 'second' is set to 60 by mistake
|
||||
and sets the time correctly if so. Can only correct time if no date change would be necessary.
|
||||
:param datetime: [year, month, day, hour, minute, second, microsecond]
|
||||
:return: returns the input with the correct value for second
|
||||
"""
|
||||
if datetime[5] == 60:
|
||||
if datetime[4] == 59:
|
||||
if datetime[3] == 23:
|
||||
err_msg = 'Date should be next day. ' \
|
||||
'File not changed: {0}'.format(file)
|
||||
raise ValueError(err_msg)
|
||||
else:
|
||||
datetime[3] += 1
|
||||
datetime[4] = 0
|
||||
datetime[5] = 0
|
||||
else:
|
||||
datetime[4] += 1
|
||||
datetime[5] = 0
|
||||
return datetime
|
||||
|
||||
|
||||
def make_time_line(line, datetime):
|
||||
"""
|
||||
Function takes in the original line from a .gse file and a list of date and
|
||||
time values to make a new line with corrected date and time.
|
||||
:param line: second line from .gse file.
|
||||
:type line: string
|
||||
:param datetime: list of integers [year, month, day, hour, minute, second, microsecond]
|
||||
:type datetime: list
|
||||
:return: returns a string to write it into a file.
|
||||
"""
|
||||
ins_form = '{0:02d}:{1:02d}:{2:02d}.{3:03d}'
|
||||
insertion = ins_form.format(int(datetime[3]),
|
||||
int(datetime[4]),
|
||||
int(datetime[5]),
|
||||
int(datetime[6] * 1e-3))
|
||||
newline = line[:16] + insertion + line[28:]
|
||||
return newline
|
||||
|
||||
|
||||
def evt_head_check(root_dir, out_dir=None):
|
||||
"""
|
||||
A function to make sure that an arbitrary number of .gse files have correct values in their header.
|
||||
:param root_dir: a directory leading to the .gse files.
|
||||
:type root_dir: string
|
||||
:param out_dir: a directory to store the new files somwhere els.
|
||||
:return: returns nothing
|
||||
"""
|
||||
if not out_dir:
|
||||
print('WARNING files are going to be overwritten!')
|
||||
inp = str(input('Continue? [y/N]'))
|
||||
if not inp == 'y':
|
||||
sys.exit()
|
||||
filelist = get_file_list(root_dir)
|
||||
nfiles = 0
|
||||
for file in filelist:
|
||||
infile = open(os.path.join(root_dir, file), 'r')
|
||||
lines = infile.readlines()
|
||||
infile.close()
|
||||
datetime = time_from_header(lines[1])
|
||||
if check_time(datetime):
|
||||
continue
|
||||
else:
|
||||
nfiles += 1
|
||||
datetime = checks_station_second(datetime, file)
|
||||
print('writing ' + file)
|
||||
# write File
|
||||
lines[1] = make_time_line(lines[1], datetime)
|
||||
if not out_dir:
|
||||
out = open(os.path.join(root_dir, file), 'w')
|
||||
out.writelines(lines)
|
||||
out.close()
|
||||
else:
|
||||
out = open(os.path.join(out_dir, file), 'w')
|
||||
out.writelines(lines)
|
||||
out.close()
|
||||
print(nfiles)
|
||||
|
||||
|
||||
def read_metadata(path_to_inventory):
|
||||
"""
|
||||
take path_to_inventory and return either the corresponding list of files
|
||||
found or the Parser object for a network dataless seed volume to prevent
|
||||
read overhead for large dataless seed volumes
|
||||
:param path_to_inventory:
|
||||
:return: tuple containing a either list of files or `obspy.io.xseed.Parser`
|
||||
object and the inventory type found
|
||||
:rtype: tuple
|
||||
"""
|
||||
dlfile = list()
|
||||
invfile = list()
|
||||
respfile = list()
|
||||
# possible file extensions specified here:
|
||||
inv = dict(dless=dlfile, xml=invfile, resp=respfile, dseed=dlfile[:])
|
||||
if os.path.isfile(path_to_inventory):
|
||||
ext = os.path.splitext(path_to_inventory)[1].split('.')[1]
|
||||
inv[ext] += [path_to_inventory]
|
||||
else:
|
||||
for ext in inv.keys():
|
||||
inv[ext] += glob.glob1(path_to_inventory, '*.{0}'.format(ext))
|
||||
|
||||
invtype = key_for_set_value(inv)
|
||||
|
||||
if invtype is None:
|
||||
print("Neither dataless-SEED file, inventory-xml file nor "
|
||||
"RESP-file found!")
|
||||
print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
|
||||
robj = None,
|
||||
elif invtype == 'dless': # prevent multiple read of large dlsv
|
||||
print("Reading metadata information from dataless-SEED file ...")
|
||||
if len(inv[invtype]) == 1:
|
||||
fullpath_inv = os.path.join(path_to_inventory, inv[invtype][0])
|
||||
robj = Parser(fullpath_inv)
|
||||
else:
|
||||
robj = inv[invtype]
|
||||
else:
|
||||
print("Reading metadata information from inventory-xml file ...")
|
||||
robj = read_inventory(inv[invtype])
|
||||
return invtype, robj
|
||||
|
||||
|
||||
# idea to optimize read_metadata
|
||||
# def read_metadata_new(path_to_inventory):
|
||||
# metadata_objects = []
|
||||
# # read multiple files from directory
|
||||
# if os.path.isdir(path_to_inventory):
|
||||
# fnames = os.listdir(path_to_inventory)
|
||||
# # read single file
|
||||
# elif os.path.isfile(path_to_inventory):
|
||||
# fnames = [path_to_inventory]
|
||||
# else:
|
||||
# print("Neither dataless-SEED file, inventory-xml file nor "
|
||||
# "RESP-file found!")
|
||||
# print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
|
||||
# fnames = []
|
||||
#
|
||||
# for fname in fnames:
|
||||
# path_to_inventory_filename = os.path.join(path_to_inventory, fname)
|
||||
# try:
|
||||
# ftype, robj = read_metadata_file(path_to_inventory_filename)
|
||||
# metadata_objects.append((ftype, robj))
|
||||
# except Exception as e:
|
||||
# print('Could not read metadata file {} '
|
||||
# 'because of the following Exception: {}'.format(path_to_inventory_filename, e))
|
||||
# return metadata_objects
|
||||
|
||||
|
||||
def restitute_trace(input_tuple):
|
||||
def no_metadata(tr, seed_id):
|
||||
print('no metadata file found '
|
||||
|
||||
@@ -20,7 +20,7 @@ import matplotlib
|
||||
matplotlib.use('Qt5Agg')
|
||||
sys.path.append(os.path.join('/'.join(sys.argv[0].split('/')[:-1]), '../../..'))
|
||||
|
||||
from PyLoT import Project
|
||||
from pylot.core.io.project import Project
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from pylot.core.util.array_map import Array_map
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ Created on Wed Jan 26 17:47:25 2015
|
||||
@author: sebastianw
|
||||
"""
|
||||
|
||||
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure, ObspyDMTdataStructure
|
||||
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure
|
||||
|
||||
DATASTRUCTURE = {'PILOT': PilotDataStructure, 'SeisComP': SeiscompDataStructure,
|
||||
'obspyDMT': ObspyDMTdataStructure, None: PilotDataStructure}
|
||||
'obspyDMT': PilotDataStructure, None: PilotDataStructure}
|
||||
|
||||
@@ -8,7 +8,6 @@ import platform
|
||||
import re
|
||||
import subprocess
|
||||
import warnings
|
||||
from typing import Literal, Tuple, Type
|
||||
from functools import lru_cache
|
||||
|
||||
import numpy as np
|
||||
@@ -107,7 +106,7 @@ def gen_Pool(ncores=0):
|
||||
|
||||
print('gen_Pool: Generated multiprocessing Pool with {} cores\n'.format(ncores))
|
||||
|
||||
pool = multiprocessing.Pool(ncores, maxtasksperchild=100)
|
||||
pool = multiprocessing.Pool(ncores)
|
||||
return pool
|
||||
|
||||
|
||||
@@ -358,8 +357,10 @@ def get_bool(value):
|
||||
False
|
||||
>>> get_bool(None)
|
||||
None
|
||||
>>> get_bool('Stream')
|
||||
'Stream'
|
||||
"""
|
||||
if type(value) is bool:
|
||||
if type(value) == bool:
|
||||
return value
|
||||
elif value in ['True', 'true']:
|
||||
return True
|
||||
@@ -900,6 +901,19 @@ def trim_station_components(data, trim_start=True, trim_end=True):
|
||||
return data
|
||||
|
||||
|
||||
def merge_stream(stream):
|
||||
gaps = stream.get_gaps()
|
||||
if gaps:
|
||||
# list of merged stations (seed_ids)
|
||||
merged = ['{}.{}.{}.{}'.format(*gap[:4]) for gap in gaps]
|
||||
stream.merge(method=1)
|
||||
print('Merged the following stations because of gaps:')
|
||||
for merged_station in merged:
|
||||
print(merged_station)
|
||||
|
||||
return stream, gaps
|
||||
|
||||
|
||||
def check4gapsAndRemove(data):
|
||||
"""
|
||||
check for gaps in Stream and remove them
|
||||
@@ -920,12 +934,12 @@ def check4gapsAndRemove(data):
|
||||
return data
|
||||
|
||||
|
||||
def check_for_gaps_and_merge(data):
|
||||
def check4gapsAndMerge(data):
|
||||
"""
|
||||
check for gaps in Stream and merge if gaps are found
|
||||
:param data: stream of seismic data
|
||||
:type data: `~obspy.core.stream.Stream`
|
||||
:return: data stream, gaps returned from obspy get_gaps
|
||||
:return: data stream
|
||||
:rtype: `~obspy.core.stream.Stream`
|
||||
"""
|
||||
gaps = data.get_gaps()
|
||||
@@ -936,7 +950,7 @@ def check_for_gaps_and_merge(data):
|
||||
for merged_station in merged:
|
||||
print(merged_station)
|
||||
|
||||
return data, gaps
|
||||
return data
|
||||
|
||||
|
||||
def check4doubled(data):
|
||||
@@ -1204,7 +1218,6 @@ def identifyPhase(phase):
|
||||
return False
|
||||
|
||||
|
||||
@lru_cache
|
||||
def identifyPhaseID(phase):
|
||||
"""
|
||||
Returns phase id (capital P or S)
|
||||
|
||||
@@ -140,18 +140,6 @@ class LogWidget(QtWidgets.QWidget):
|
||||
self.stderr.append(60 * '#' + '\n\n')
|
||||
|
||||
|
||||
def getDataType(parent):
|
||||
type = QInputDialog().getItem(parent, "Select phases type", "Type:",
|
||||
["manual", "automatic"])
|
||||
|
||||
if type[0].startswith('auto'):
|
||||
type = 'auto'
|
||||
else:
|
||||
type = type[0]
|
||||
|
||||
return type
|
||||
|
||||
|
||||
def plot_pdf(_axes, x, y, annotation, bbox_props, xlabel=None, ylabel=None,
|
||||
title=None):
|
||||
# try method or data
|
||||
@@ -1009,15 +997,6 @@ class WaveformWidgetPG(QtWidgets.QWidget):
|
||||
time_ax = np.linspace(time_ax[0], time_ax[-1], num=len(data))
|
||||
return data, time_ax
|
||||
|
||||
# def getAxes(self):
|
||||
# return self.axes
|
||||
|
||||
# def getXLims(self):
|
||||
# return self.getAxes().get_xlim()
|
||||
|
||||
# def getYLims(self):
|
||||
# return self.getAxes().get_ylim()
|
||||
|
||||
def setXLims(self, lims):
|
||||
vb = self.plotWidget.getPlotItem().getViewBox()
|
||||
vb.setXRange(float(lims[0]), float(lims[1]), padding=0)
|
||||
@@ -1171,8 +1150,6 @@ class PylotCanvas(FigureCanvas):
|
||||
break
|
||||
if not ax_check: return
|
||||
|
||||
# self.updateCurrentLimits() #maybe put this down to else:
|
||||
|
||||
# calculate delta (relative values in axis)
|
||||
old_x, old_y = self.press_rel
|
||||
xdiff = gui_event.x - old_x
|
||||
@@ -1380,125 +1357,140 @@ class PylotCanvas(FigureCanvas):
|
||||
component='*', nth_sample=1, iniPick=None, verbosity=0,
|
||||
plot_additional=False, additional_channel=None, scaleToChannel=None,
|
||||
snr=None):
|
||||
def get_wf_dict(data: Stream = Stream(), linecolor = 'k', offset: float = 0., **plot_kwargs):
|
||||
return dict(data=data, linecolor=linecolor, offset=offset, plot_kwargs=plot_kwargs)
|
||||
ax = self.prepare_plot()
|
||||
self.clearPlotDict()
|
||||
|
||||
wfstart, wfend = self.get_wf_range(wfdata)
|
||||
compclass = self.get_comp_class()
|
||||
plot_streams = self.get_plot_streams(wfdata, wfdata_compare, component, compclass)
|
||||
|
||||
st_main = plot_streams['wfdata']['data']
|
||||
if mapping:
|
||||
plot_positions = self.calcPlotPositions(st_main, compclass)
|
||||
|
||||
nslc = self.get_sorted_nslc(st_main)
|
||||
nmax = self.plot_traces(ax, plot_streams, nslc, wfstart, mapping, plot_positions,
|
||||
scaleToChannel, noiselevel, scaleddata, nth_sample, verbosity)
|
||||
|
||||
if plot_additional and additional_channel:
|
||||
self.plot_additional_trace(ax, wfdata, additional_channel, scaleToChannel,
|
||||
scaleddata, nth_sample, wfstart)
|
||||
|
||||
self.finalize_plot(ax, wfstart, wfend, nmax, zoomx, zoomy, iniPick, title, snr)
|
||||
|
||||
def prepare_plot(self):
|
||||
ax = self.axes[0]
|
||||
ax.cla()
|
||||
return ax
|
||||
|
||||
self.clearPlotDict()
|
||||
wfstart, wfend = full_range(wfdata)
|
||||
nmax = 0
|
||||
def get_wf_range(self, wfdata):
|
||||
return full_range(wfdata)
|
||||
|
||||
def get_comp_class(self):
|
||||
settings = QSettings()
|
||||
compclass = SetChannelComponents.from_qsettings(settings)
|
||||
return SetChannelComponents.from_qsettings(settings)
|
||||
|
||||
def get_plot_streams(self, wfdata, wfdata_compare, component, compclass):
|
||||
def get_wf_dict(data=Stream(), linecolor='k', offset=0., **plot_kwargs):
|
||||
return dict(data=data, linecolor=linecolor, offset=offset, plot_kwargs=plot_kwargs)
|
||||
|
||||
linecolor = (0., 0., 0., 1.) if not self.style else self.style['linecolor']['rgba_mpl']
|
||||
plot_streams = {
|
||||
'wfdata': get_wf_dict(linecolor=linecolor, linewidth=0.7),
|
||||
'wfdata_comp': get_wf_dict(offset=0.1, linecolor='b', alpha=0.7, linewidth=0.5)
|
||||
}
|
||||
|
||||
plot_streams = dict(wfdata=get_wf_dict(linecolor=linecolor, linewidth=0.7),
|
||||
wfdata_comp=get_wf_dict(offset=0.1, linecolor='b', alpha=0.7, linewidth=0.5))
|
||||
|
||||
if not component == '*':
|
||||
if component != '*':
|
||||
alter_comp = compclass.getCompPosition(component)
|
||||
# alter_comp = str(alter_comp[0])
|
||||
|
||||
plot_streams['wfdata']['data'] = wfdata.select(component=component)
|
||||
plot_streams['wfdata']['data'] += wfdata.select(component=alter_comp)
|
||||
plot_streams['wfdata']['data'] = wfdata.select(component=component) + wfdata.select(component=alter_comp)
|
||||
if wfdata_compare:
|
||||
plot_streams['wfdata_comp']['data'] = wfdata_compare.select(component=component)
|
||||
plot_streams['wfdata_comp']['data'] += wfdata_compare.select(component=alter_comp)
|
||||
plot_streams['wfdata_comp']['data'] = wfdata_compare.select(
|
||||
component=component) + wfdata_compare.select(component=alter_comp)
|
||||
else:
|
||||
plot_streams['wfdata']['data'] = wfdata
|
||||
if wfdata_compare:
|
||||
plot_streams['wfdata_comp']['data'] = wfdata_compare
|
||||
|
||||
st_main = plot_streams['wfdata']['data']
|
||||
return plot_streams
|
||||
|
||||
if mapping:
|
||||
plot_positions = self.calcPlotPositions(st_main, compclass)
|
||||
|
||||
# list containing tuples of network, station, channel and plot position (for sorting)
|
||||
nslc = []
|
||||
for plot_pos, trace in enumerate(st_main):
|
||||
if not trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
|
||||
print('Warning: Unrecognized channel {}'.format(trace.stats.channel))
|
||||
continue
|
||||
nslc.append(trace.get_id())
|
||||
nslc.sort()
|
||||
nslc.reverse()
|
||||
def get_sorted_nslc(self, st_main):
|
||||
nslc = [trace.get_id() for trace in st_main if trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']]
|
||||
nslc.sort(reverse=True)
|
||||
return nslc
|
||||
|
||||
def plot_traces(self, ax, plot_streams, nslc, wfstart, mapping, plot_positions, scaleToChannel, noiselevel,
|
||||
scaleddata, nth_sample, verbosity):
|
||||
nmax = 0
|
||||
for n, seed_id in enumerate(nslc):
|
||||
network, station, location, channel = seed_id.split('.')
|
||||
for wf_name, wf_dict in plot_streams.items():
|
||||
st_select = wf_dict.get('data')
|
||||
if not st_select:
|
||||
continue
|
||||
st = st_select.select(id=seed_id)
|
||||
trace = st[0].copy()
|
||||
trace = st_select.select(id=seed_id)[0].copy()
|
||||
if mapping:
|
||||
n = plot_positions[trace.stats.channel]
|
||||
if n > nmax:
|
||||
nmax = n
|
||||
if verbosity:
|
||||
msg = 'plotting %s channel of station %s' % (channel, station)
|
||||
print(msg)
|
||||
stime = trace.stats.starttime - wfstart
|
||||
time_ax = prep_time_axis(stime, trace)
|
||||
if time_ax is not None:
|
||||
if scaleToChannel:
|
||||
st_scale = wfdata.select(channel=scaleToChannel)
|
||||
if st_scale:
|
||||
tr = st_scale[0]
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(tr.data)) * 2)
|
||||
scaleddata = True
|
||||
if not scaleddata:
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
||||
print(f'plotting {channel} channel of station {station}')
|
||||
time_ax = prep_time_axis(trace.stats.starttime - wfstart, trace)
|
||||
self.plot_trace(ax, trace, time_ax, wf_dict, n, scaleToChannel, noiselevel, scaleddata, nth_sample)
|
||||
self.setPlotDict(n, seed_id)
|
||||
return nmax
|
||||
|
||||
offset = wf_dict.get('offset')
|
||||
def plot_trace(self, ax, trace, time_ax, wf_dict, n, scaleToChannel, noiselevel, scaleddata, nth_sample):
|
||||
if time_ax is not None:
|
||||
if scaleToChannel:
|
||||
self.scale_trace(trace, scaleToChannel)
|
||||
scaleddata = True
|
||||
if not scaleddata:
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
||||
offset = wf_dict.get('offset')
|
||||
|
||||
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
||||
data = [datum + n + offset for index, datum in enumerate(trace.data) if not index % nth_sample]
|
||||
ax.axhline(n, color="0.5", lw=0.5)
|
||||
ax.plot(times, data, color=wf_dict.get('linecolor'), **wf_dict.get('plot_kwargs'))
|
||||
if noiselevel is not None:
|
||||
for level in [-noiselevel[channel], noiselevel[channel]]:
|
||||
ax.plot([time_ax[0], time_ax[-1]],
|
||||
[n + level, n + level],
|
||||
color=wf_dict.get('linecolor'),
|
||||
linestyle='dashed')
|
||||
self.setPlotDict(n, seed_id)
|
||||
if plot_additional and additional_channel:
|
||||
compare_stream = wfdata.select(channel=additional_channel)
|
||||
if compare_stream:
|
||||
trace = compare_stream[0]
|
||||
if scaleToChannel:
|
||||
st_scale = wfdata.select(channel=scaleToChannel)
|
||||
if st_scale:
|
||||
tr = st_scale[0]
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(tr.data)) * 2)
|
||||
scaleddata = True
|
||||
if not scaleddata:
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
||||
time_ax = prep_time_axis(stime, trace)
|
||||
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
||||
p_data = compare_stream[0].data
|
||||
# #normalize
|
||||
# p_max = max(abs(p_data))
|
||||
# p_data /= p_max
|
||||
for index in range(3):
|
||||
ax.plot(times, p_data, color='red', alpha=0.5, linewidth=0.7)
|
||||
p_data += 1
|
||||
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
||||
data = [datum + n + offset for index, datum in enumerate(trace.data) if not index % nth_sample]
|
||||
ax.axhline(n, color="0.5", lw=0.5)
|
||||
ax.plot(times, data, color=wf_dict.get('linecolor'), **wf_dict.get('plot_kwargs'))
|
||||
if noiselevel is not None:
|
||||
self.plot_noise_level(ax, time_ax, noiselevel, channel, n, wf_dict.get('linecolor'))
|
||||
|
||||
def scale_trace(self, trace, scaleToChannel):
|
||||
st_scale = wfdata.select(channel=scaleToChannel)
|
||||
if st_scale:
|
||||
tr = st_scale[0]
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(tr.data)) * 2)
|
||||
|
||||
def plot_noise_level(self, ax, time_ax, noiselevel, channel, n, linecolor):
|
||||
for level in [-noiselevel[channel], noiselevel[channel]]:
|
||||
ax.plot([time_ax[0], time_ax[-1]], [n + level, n + level], color=linecolor, linestyle='dashed')
|
||||
|
||||
def plot_additional_trace(self, ax, wfdata, additional_channel, scaleToChannel, scaleddata, nth_sample, wfstart):
|
||||
compare_stream = wfdata.select(channel=additional_channel)
|
||||
if compare_stream:
|
||||
trace = compare_stream[0]
|
||||
if scaleToChannel:
|
||||
self.scale_trace(trace, scaleToChannel)
|
||||
scaleddata = True
|
||||
if not scaleddata:
|
||||
trace.detrend('constant')
|
||||
trace.normalize(np.max(np.abs(trace.data)) * 2)
|
||||
time_ax = prep_time_axis(trace.stats.starttime - wfstart, trace)
|
||||
self.plot_additional_data(ax, trace, time_ax, nth_sample)
|
||||
|
||||
def plot_additional_data(self, ax, trace, time_ax, nth_sample):
|
||||
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
|
||||
p_data = trace.data
|
||||
for index in range(3):
|
||||
ax.plot(times, p_data, color='red', alpha=0.5, linewidth=0.7)
|
||||
p_data += 1
|
||||
|
||||
def finalize_plot(self, ax, wfstart, wfend, nmax, zoomx, zoomy, iniPick, title, snr):
|
||||
if iniPick:
|
||||
ax.vlines(iniPick, ax.get_ylim()[0], ax.get_ylim()[1],
|
||||
colors='m', linestyles='dashed',
|
||||
linewidth=2)
|
||||
xlabel = 'seconds since {0}'.format(wfstart)
|
||||
ax.vlines(iniPick, ax.get_ylim()[0], ax.get_ylim()[1], colors='m', linestyles='dashed', linewidth=2)
|
||||
xlabel = f'seconds since {wfstart}'
|
||||
ylabel = ''
|
||||
self.updateWidget(xlabel, ylabel, title)
|
||||
self.setXLims(ax, [0, wfend - wfstart])
|
||||
@@ -1508,15 +1500,14 @@ class PylotCanvas(FigureCanvas):
|
||||
if zoomy is not None:
|
||||
self.setYLims(ax, zoomy)
|
||||
if snr is not None:
|
||||
if snr < 2:
|
||||
warning = 'LOW SNR'
|
||||
if snr < 1.5:
|
||||
warning = 'VERY LOW SNR'
|
||||
ax.text(0.1, 0.9, 'WARNING - {}'.format(warning), ha='center', va='center', transform=ax.transAxes,
|
||||
color='red')
|
||||
|
||||
self.plot_snr_warning(ax, snr)
|
||||
self.draw()
|
||||
|
||||
def plot_snr_warning(self, ax, snr):
|
||||
if snr < 2:
|
||||
warning = 'LOW SNR' if snr >= 1.5 else 'VERY LOW SNR'
|
||||
ax.text(0.1, 0.9, f'WARNING - {warning}', ha='center', va='center', transform=ax.transAxes, color='red')
|
||||
|
||||
@staticmethod
|
||||
def getXLims(ax):
|
||||
return ax.get_xlim()
|
||||
@@ -1870,14 +1861,13 @@ class PickDlg(QDialog):
|
||||
|
||||
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
|
||||
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
|
||||
event=None, filteroptions=None, wftype=None):
|
||||
event=None, filteroptions=None, model=None, wftype=None):
|
||||
super(PickDlg, self).__init__(parent, Qt.Window)
|
||||
self.orig_parent = parent
|
||||
self.setAttribute(Qt.WA_DeleteOnClose)
|
||||
|
||||
# initialize attributes
|
||||
self.parameter = parameter
|
||||
model = self.parameter.get('taup_model')
|
||||
self._embedded = embedded
|
||||
self.showCompData = show_comp_data
|
||||
self.station = station
|
||||
@@ -1922,7 +1912,7 @@ class PickDlg(QDialog):
|
||||
# set attribute holding data
|
||||
if data is None or not data:
|
||||
try:
|
||||
data = parent.get_data().getWFData().copy()
|
||||
data = parent.get_data().get_wf_data().copy()
|
||||
self.data = data.select(station=station)
|
||||
except AttributeError as e:
|
||||
errmsg = 'You either have to put in a data or an appropriate ' \
|
||||
@@ -1956,7 +1946,7 @@ class PickDlg(QDialog):
|
||||
self.cur_xlim = None
|
||||
self.cur_ylim = None
|
||||
|
||||
self.stime, self.etime = full_range(self.getWFData())
|
||||
self.stime, self.etime = full_range(self.get_wf_data())
|
||||
|
||||
# initialize plotting widget
|
||||
self.multicompfig = PylotCanvas(parent=self, multicursor=True)
|
||||
@@ -1970,7 +1960,7 @@ class PickDlg(QDialog):
|
||||
self.referenceChannel.addItem('-', None)
|
||||
self.scaleChannel.addItem('individual', None)
|
||||
|
||||
for trace in self.getWFData():
|
||||
for trace in self.get_wf_data():
|
||||
channel = trace.stats.channel
|
||||
self.referenceChannel.addItem(channel, trace)
|
||||
if not channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
|
||||
@@ -1989,7 +1979,7 @@ class PickDlg(QDialog):
|
||||
if self.wftype is not None:
|
||||
title += ' | ({})'.format(self.wftype)
|
||||
|
||||
self.multicompfig.plotWFData(wfdata=self.getWFData(), wfdata_compare=self.getWFDataComp(),
|
||||
self.multicompfig.plotWFData(wfdata=self.get_wf_data(), wfdata_compare=self.get_wf_dataComp(),
|
||||
title=title)
|
||||
|
||||
self.multicompfig.setZoomBorders2content()
|
||||
@@ -2270,8 +2260,8 @@ class PickDlg(QDialog):
|
||||
arrivals = func[plot](source_origin.depth,
|
||||
source_origin.latitude,
|
||||
source_origin.longitude,
|
||||
station_coords.get('latitude'),
|
||||
station_coords.get('longitude'),
|
||||
station_coords['latitude'],
|
||||
station_coords['longitude'],
|
||||
phases)
|
||||
self.arrivals = arrivals
|
||||
|
||||
@@ -2524,7 +2514,7 @@ class PickDlg(QDialog):
|
||||
if self.autoFilterAction.isChecked():
|
||||
for filteraction in [self.filterActionP, self.filterActionS]:
|
||||
filteraction.setChecked(False)
|
||||
self.filterWFData()
|
||||
self.filter_wf_data()
|
||||
self.draw()
|
||||
else:
|
||||
self.draw()
|
||||
@@ -2608,10 +2598,10 @@ class PickDlg(QDialog):
|
||||
def getGlobalLimits(self, ax, axis):
|
||||
return self.multicompfig.getGlobalLimits(ax, axis)
|
||||
|
||||
def getWFData(self):
|
||||
def get_wf_data(self):
|
||||
return self.data
|
||||
|
||||
def getWFDataComp(self):
|
||||
def get_wf_dataComp(self):
|
||||
if self.showCompData:
|
||||
return self.data_compare
|
||||
else:
|
||||
@@ -2626,17 +2616,17 @@ class PickDlg(QDialog):
|
||||
return tr
|
||||
|
||||
if component == 'E' or component == 'N':
|
||||
for trace in self.getWFData():
|
||||
for trace in self.get_wf_data():
|
||||
trace = selectTrace(trace, 'NE')
|
||||
if trace:
|
||||
wfdata.append(trace)
|
||||
elif component == '1' or component == '2':
|
||||
for trace in self.getWFData():
|
||||
for trace in self.get_wf_data():
|
||||
trace = selectTrace(trace, '12')
|
||||
if trace:
|
||||
wfdata.append(trace)
|
||||
else:
|
||||
wfdata = self.getWFData().select(component=component)
|
||||
wfdata = self.get_wf_data().select(component=component)
|
||||
return wfdata
|
||||
|
||||
def getPicks(self, picktype='manual'):
|
||||
@@ -2739,8 +2729,8 @@ class PickDlg(QDialog):
|
||||
stime = self.getStartTime()
|
||||
|
||||
# copy wfdata for plotting
|
||||
wfdata = self.getWFData().copy()
|
||||
wfdata_comp = self.getWFDataComp().copy()
|
||||
wfdata = self.get_wf_data().copy()
|
||||
wfdata_comp = self.get_wf_dataComp().copy()
|
||||
wfdata = self.getPickPhases(wfdata, phase)
|
||||
wfdata_comp = self.getPickPhases(wfdata_comp, phase)
|
||||
for wfd in [wfdata, wfdata_comp]:
|
||||
@@ -2849,7 +2839,7 @@ class PickDlg(QDialog):
|
||||
filteroptions = None
|
||||
|
||||
# copy and filter data for earliest and latest possible picks
|
||||
wfdata = self.getWFData().copy().select(channel=channel)
|
||||
wfdata = self.get_wf_data().copy().select(channel=channel)
|
||||
if filteroptions:
|
||||
try:
|
||||
wfdata.detrend('linear')
|
||||
@@ -2896,7 +2886,7 @@ class PickDlg(QDialog):
|
||||
minFMSNR = parameter.get('minFMSNR')
|
||||
quality = get_quality_class(spe, parameter.get('timeerrorsP'))
|
||||
if quality <= minFMweight and snr >= minFMSNR:
|
||||
FM = fmpicker(self.getWFData().select(channel=channel).copy(), wfdata.copy(), parameter.get('fmpickwin'),
|
||||
FM = fmpicker(self.get_wf_data().select(channel=channel).copy(), wfdata.copy(), parameter.get('fmpickwin'),
|
||||
pick - stime_diff)
|
||||
|
||||
# save pick times for actual phase
|
||||
@@ -3188,7 +3178,7 @@ class PickDlg(QDialog):
|
||||
def togglePickBlocker(self):
|
||||
return not self.pick_block
|
||||
|
||||
def filterWFData(self, phase=None):
|
||||
def filter_wf_data(self, phase=None):
|
||||
if not phase:
|
||||
phase = self.currentPhase
|
||||
if self.getPhaseID(phase) == 'P':
|
||||
@@ -3206,8 +3196,8 @@ class PickDlg(QDialog):
|
||||
self.cur_xlim = self.multicompfig.axes[0].get_xlim()
|
||||
self.cur_ylim = self.multicompfig.axes[0].get_ylim()
|
||||
# self.multicompfig.updateCurrentLimits()
|
||||
wfdata = self.getWFData().copy()
|
||||
wfdata_comp = self.getWFDataComp().copy()
|
||||
wfdata = self.get_wf_data().copy()
|
||||
wfdata_comp = self.get_wf_dataComp().copy()
|
||||
title = self.getStation()
|
||||
if filter:
|
||||
filtoptions = None
|
||||
@@ -3244,14 +3234,14 @@ class PickDlg(QDialog):
|
||||
def filterP(self):
|
||||
self.filterActionS.setChecked(False)
|
||||
if self.filterActionP.isChecked():
|
||||
self.filterWFData('P')
|
||||
self.filter_wf_data('P')
|
||||
else:
|
||||
self.refreshPlot()
|
||||
|
||||
def filterS(self):
|
||||
self.filterActionP.setChecked(False)
|
||||
if self.filterActionS.isChecked():
|
||||
self.filterWFData('S')
|
||||
self.filter_wf_data('S')
|
||||
else:
|
||||
self.refreshPlot()
|
||||
|
||||
@@ -3310,7 +3300,7 @@ class PickDlg(QDialog):
|
||||
if self.autoFilterAction.isChecked():
|
||||
self.filterActionP.setChecked(False)
|
||||
self.filterActionS.setChecked(False)
|
||||
# data = self.getWFData().copy()
|
||||
# data = self.get_wf_data().copy()
|
||||
# title = self.getStation()
|
||||
filter = False
|
||||
phase = None
|
||||
@@ -3810,8 +3800,8 @@ class TuneAutopicker(QWidget):
|
||||
fnames = self.station_ids[self.get_current_station_id()]
|
||||
if not fnames == self.fnames:
|
||||
self.fnames = fnames
|
||||
self.data.setWFData(fnames)
|
||||
wfdat = self.data.getWFData() # all available streams
|
||||
self.data.set_wf_data(fnames)
|
||||
wfdat = self.data.get_wf_data() # all available streams
|
||||
# remove possible underscores in station names
|
||||
# wfdat = remove_underscores(wfdat)
|
||||
# rotate misaligned stations to ZNE
|
||||
@@ -3916,8 +3906,8 @@ class TuneAutopicker(QWidget):
|
||||
network = None
|
||||
location = None
|
||||
|
||||
wfdata = self.data.getWFData()
|
||||
wfdata_comp = self.data.getWFDataComp()
|
||||
wfdata = self.data.get_wf_data()
|
||||
wfdata_comp = self.data.get_wf_dataComp()
|
||||
metadata = self.parent().metadata
|
||||
event = self.get_current_event()
|
||||
filteroptions = self.parent().filteroptions
|
||||
@@ -3972,7 +3962,7 @@ class TuneAutopicker(QWidget):
|
||||
for plotitem in self._manual_pick_plots:
|
||||
self.clear_plotitem(plotitem)
|
||||
self._manual_pick_plots = []
|
||||
st = self.data.getWFData()
|
||||
st = self.data.get_wf_data()
|
||||
tr = st.select(station=self.get_current_station())[0]
|
||||
starttime = tr.stats.starttime
|
||||
# create two lists with figure names and subindices (for subplots) to get the correct axes
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
@@ -1,90 +0,0 @@
|
||||
############################# correlation parameters #####################################
|
||||
# min_corr_stacking: minimum correlation coefficient for building beam trace
|
||||
# min_corr_export: minimum correlation coefficient for pick export
|
||||
# min_stack: minimum number of stations for building beam trace
|
||||
# t_before: correlation window before pick
|
||||
# t_after: correlation window after pick#
|
||||
# cc_maxlag: maximum shift for initial correlation
|
||||
# cc_maxlag2: maximum shift for second (final) correlation (also for calculating pick uncertainty)
|
||||
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
|
||||
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
|
||||
# min_picks_export: minimum number of correlated picks for export
|
||||
# min_picks_autopylot: minimum number of reference autopicks picks to continue with event
|
||||
# check_RMS: do RMS check to search for restitution errors (very experimental)
|
||||
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
|
||||
# station_list: use the following stations as reference for stacking
|
||||
# use_stacked_trace: use existing stacked trace if found (spare re-computation)
|
||||
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
|
||||
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
|
||||
# such as 'PyLoT_20170501_141822_autopylot.xml'
|
||||
|
||||
logging: info
|
||||
pick_phases: ['P', 'S']
|
||||
|
||||
# P-phase
|
||||
P:
|
||||
min_corr_stacking: 0.8
|
||||
min_corr_export: 0.6
|
||||
min_stack: 20
|
||||
t_before: 30.
|
||||
t_after: 50.
|
||||
cc_maxlag: 50.
|
||||
cc_maxlag2: 5.
|
||||
initial_pick_outlier_threshold: 30.
|
||||
export_threshold: 2.5
|
||||
min_picks_export: 100
|
||||
min_picks_autopylot: 50
|
||||
check_RMS: True
|
||||
use_taupy_onsets: False
|
||||
station_list: ['HU.MORH', 'HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||
use_stacked_trace: False
|
||||
data_dir: 'processed'
|
||||
pickfile_extension: '_autopylot'
|
||||
dt_stacking: [250, 250]
|
||||
|
||||
# filter for first correlation (rough)
|
||||
filter_options:
|
||||
freqmax: 0.5
|
||||
freqmin: 0.03
|
||||
# filter for second correlation (fine)
|
||||
filter_options_final:
|
||||
freqmax: 0.5
|
||||
freqmin: 0.03
|
||||
|
||||
filter_type: bandpass
|
||||
sampfreq: 20.0
|
||||
|
||||
# S-phase
|
||||
S:
|
||||
min_corr_stacking: 0.7
|
||||
min_corr_export: 0.6
|
||||
min_stack: 20
|
||||
t_before: 60.
|
||||
t_after: 60.
|
||||
cc_maxlag: 100.
|
||||
cc_maxlag2: 25.
|
||||
initial_pick_outlier_threshold: 30.
|
||||
export_threshold: 5.0
|
||||
min_picks_export: 200
|
||||
min_picks_autopylot: 50
|
||||
check_RMS: True
|
||||
use_taupy_onsets: False
|
||||
station_list: ['HU.MORH','HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||
use_stacked_trace: False
|
||||
data_dir: 'processed'
|
||||
pickfile_extension: '_autopylot'
|
||||
dt_stacking: [250, 250]
|
||||
|
||||
# filter for first correlation (rough)
|
||||
filter_options:
|
||||
freqmax: 0.1
|
||||
freqmin: 0.01
|
||||
|
||||
# filter for second correlation (fine)
|
||||
filter_options_final:
|
||||
freqmax: 0.2
|
||||
freqmin: 0.01
|
||||
|
||||
filter_type: bandpass
|
||||
sampfreq: 20.0
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,40 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
#ulimit -s 8192
|
||||
#ulimit -v $(ulimit -v | awk '{printf("%d",$1*0.95)}')
|
||||
#ulimit -v
|
||||
|
||||
#655360
|
||||
|
||||
source /opt/anaconda3/etc/profile.d/conda.sh
|
||||
conda activate pylot_311
|
||||
NSLOTS=20
|
||||
|
||||
#qsub -l low -cwd -l "os=*stretch" -pe smp 40 submit_pick_corr_correction.sh
|
||||
#$ -l low
|
||||
#$ -l h_vmem=6G
|
||||
#$ -cwd
|
||||
#$ -pe smp 20
|
||||
#$ -N corr_pick
|
||||
|
||||
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||
|
||||
#export MKL_NUM_THREADS=${NSLOTS:=1}
|
||||
#export NUMEXPR_NUM_THREADS=${NSLOTS:=1}
|
||||
#export OMP_NUM_THREADS=${NSLOTS:=1}
|
||||
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M6.0-6.5' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
|
||||
|
||||
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||
|
||||
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
|
||||
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
|
||||
#--event_blacklist eventlist.txt
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import subprocess
|
||||
|
||||
fnames = [
|
||||
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
|
||||
]
|
||||
|
||||
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
|
||||
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
|
||||
|
||||
####
|
||||
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
|
||||
####
|
||||
|
||||
for fnin, istart in fnames:
|
||||
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
|
||||
|
||||
print(input_cmds)
|
||||
print(subprocess.check_output(input_cmds.split()))
|
||||
|
||||
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
|
||||
from obspy import read_events
|
||||
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
|
||||
|
||||
def get_event_obspy_dmt(eventdir):
|
||||
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
|
||||
if not os.path.exists(event_pkl_file):
|
||||
raise IOError('Could not find event path for event: {}'.format(eventdir))
|
||||
event = qml_from_obspyDMT(event_pkl_file)
|
||||
return event
|
||||
|
||||
|
||||
def get_event_pylot(eventdir, extension=''):
|
||||
event_id = get_event_id(eventdir)
|
||||
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
|
||||
if not os.path.isfile(filename):
|
||||
return
|
||||
cat = read_events(filename)
|
||||
return cat[0]
|
||||
|
||||
|
||||
def get_event_id(eventdir):
|
||||
event_id = os.path.split(eventdir)[-1]
|
||||
return event_id
|
||||
|
||||
|
||||
def get_picks(eventdir, extension=''):
|
||||
event_id = get_event_id(eventdir)
|
||||
filename = 'PyLoT_{}{}.xml'
|
||||
filename = filename.format(event_id, extension)
|
||||
fpath = os.path.join(eventdir, filename)
|
||||
fpaths = glob.glob(fpath)
|
||||
if len(fpaths) == 1:
|
||||
cat = read_events(fpaths[0])
|
||||
picks = cat[0].picks
|
||||
return picks
|
||||
elif len(fpaths) == 0:
|
||||
print('get_picks: File not found: {}'.format(fpath))
|
||||
return
|
||||
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
|
||||
return
|
||||
|
||||
|
||||
def write_json(object, fname):
|
||||
with open(fname, 'w') as outfile:
|
||||
json.dump(object, outfile, sort_keys=True, indent=4)
|
||||
|
||||
|
||||
def get_metadata(eventdir):
|
||||
metadata_path = os.path.join(eventdir, 'resp')
|
||||
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||
return metadata
|
||||
@@ -1,7 +1,9 @@
|
||||
Cartopy==0.23.0
|
||||
joblib==1.4.2
|
||||
obspy==1.4.1
|
||||
pyaml==24.7.0
|
||||
pyqtgraph==0.13.7
|
||||
PySide2==5.15.8
|
||||
pytest==8.3.2
|
||||
# This file may be used to create an environment using:
|
||||
# $ conda create --name <env> --file <this file>
|
||||
# platform: win-64
|
||||
cartopy>=0.20.2
|
||||
numpy<2
|
||||
obspy>=1.3.0
|
||||
pyqtgraph>=0.12.4
|
||||
pyside2>=5.13.2
|
||||
scipy>=1.8.0
|
||||
@@ -1,101 +0,0 @@
|
||||
%This is a parameter input file for PyLoT/autoPyLoT.
|
||||
%All main and special settings regarding data handling
|
||||
%and picking are to be set here!
|
||||
%Parameters are optimized for %extent data sets!
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#main settings#
|
||||
#rootpath# %project path
|
||||
#datapath# %data path
|
||||
#database# %name of data base
|
||||
20171010_063224.a #eventID# %event ID for single event processing (* for all events found in database)
|
||||
#invdir# %full path to inventory or dataless-seed file
|
||||
PILOT #datastructure# %choose data structure
|
||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#NLLoc settings#
|
||||
None #nllocbin# %path to NLLoc executable
|
||||
None #nllocroot# %root of NLLoc-processing directory
|
||||
None #phasefile# %name of autoPyLoT-output phase file for NLLoc
|
||||
None #ctrfile# %name of autoPyLoT-output control file for NLLoc
|
||||
ttime #ttpatter# %pattern of NLLoc ttimes from grid
|
||||
AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#parameters for seismic moment estimation#
|
||||
3530.0 #vp# %average P-wave velocity
|
||||
2500.0 #rho# %average rock density [kg/m^3]
|
||||
300.0 0.8 #Qp# %quality factor for P waves (Qp*f^a); list(Qp, a)
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#settings local magnitude#
|
||||
1.0 1.0 1.0 #WAscaling# %Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] If zeros are set, original Richter magnitude is calculated!
|
||||
1.0 1.0 #magscaling# %Scaling relation for derived local magnitude [a*Ml+b]. If zeros are set, no scaling of network magnitude is applied!
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#filter settings#
|
||||
0.03 0.03 #minfreq# %Lower filter frequency [P, S]
|
||||
0.5 0.5 #maxfreq# %Upper filter frequency [P, S]
|
||||
4 4 #filter_order# %filter order [P, S]
|
||||
bandpass bandpass #filter_type# %filter type (bandpass, bandstop, lowpass, highpass) [P, S]
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#common settings picker#
|
||||
global #extent# %extent of array ("local", "regional" or "global")
|
||||
-100.0 #pstart# %start time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||
50.0 #pstop# %end time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||
-50.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||
50.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||
ak135 #taup_model# %Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||
P,Pdiff,S,SKS #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||
0.03 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||
0.01 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||
0.03 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||
0.01 0.5 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
|
||||
#special settings for calculating CF#
|
||||
%!!Edit the following only if you know what you are doing!!%
|
||||
#Z-component#
|
||||
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
|
||||
300.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
|
||||
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
|
||||
2 #Parorder# %for AR-picker, order of AR process of Z-component
|
||||
16.0 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
|
||||
10.0 #tpred1z# %for AR-picker, length of AR prediction window [s] for Z-component, 1st pick
|
||||
12.0 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
|
||||
6.0 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
|
||||
0.001 #addnoise# %add noise to seismogram for stable AR prediction
|
||||
60.0 5.0 20.0 12.0 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||
50.0 #pickwinP# %for initial AIC pick, length of P-pick window [s]
|
||||
30.0 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
|
||||
2.0 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
|
||||
2.0 #tsmoothP# %for HOS/AR, take average of samples in this time window for smoothing CF [s]
|
||||
0.006 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
|
||||
2.0 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
|
||||
#H-components#
|
||||
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
|
||||
12.0 #tdet1h# %for HOS/AR, length of AR-determination window [s], H-components, 1st pick
|
||||
6.0 #tpred1h# %for HOS/AR, length of AR-prediction window [s], H-components, 1st pick
|
||||
8.0 #tdet2h# %for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick
|
||||
4.0 #tpred2h# %for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick
|
||||
4 #Sarorder# %for AR-picker, order of AR process of H-components
|
||||
100.0 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
|
||||
195.0 #pickwinS# %for initial AIC pick, length of S-pick window [s]
|
||||
60.0 10.0 30.0 12.0 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||
22.0 #aictsmoothS# %for AIC-picker, take average of samples in this time window for smoothing of AIC-function [s]
|
||||
20.0 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
|
||||
0.001 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
|
||||
2.0 #nfacS# %for AR-picker, noise factor for noise level determination (S)
|
||||
#first-motion picker#
|
||||
1 #minfmweight# %minimum required P weight for first-motion determination
|
||||
3.0 #minFMSNR# %miniumum required SNR for first-motion determination
|
||||
10.0 #fmpickwin# %pick window [s] around P onset for calculating zero crossings
|
||||
#quality assessment#
|
||||
0.1 0.2 0.4 0.8 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
|
||||
4.0 8.0 16.0 32.0 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
|
||||
0.005 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
|
||||
1.1 #minAICPSNR# %below this SNR the initial P pick is rejected
|
||||
0.002 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
|
||||
1.3 #minAICSSNR# %below this SNR the initial S pick is rejected
|
||||
20.0 #minsiglength# %length of signal part for which amplitudes must exceed noiselevel [s]
|
||||
1.0 #noisefactor# %noiselevel*noisefactor=threshold
|
||||
10.0 #minpercent# %required percentage of amplitudes exceeding threshold
|
||||
0.1 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
|
||||
100.0 #mdttolerance# %maximum allowed deviation of P picks from median [s]
|
||||
50.0 #wdttolerance# %maximum allowed deviation from Wadati-diagram
|
||||
25.0 #jackfactor# %pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor
|
||||
@@ -1,27 +0,0 @@
|
||||
import os
|
||||
import pytest
|
||||
|
||||
from autoPyLoT import autoPyLoT
|
||||
|
||||
|
||||
class TestAutopickerGlobal():
|
||||
def init(self):
|
||||
self.params_infile = 'pylot_alparray_mantle_corr_stack_0.03-0.5.in'
|
||||
self.test_event_dir = 'dmt_database_test'
|
||||
|
||||
if not os.path.isfile(self.params_infile):
|
||||
print(f'Test input file {os.path.abspath(self.params_infile)} not found.')
|
||||
return False
|
||||
|
||||
if not os.path.exists(self.test_event_dir):
|
||||
print(
|
||||
f'Test event directory not found at location "{os.path.abspath(self.test_event_dir)}". '
|
||||
f'Make sure to load it from the website first.'
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def test_autopicker(self):
|
||||
assert self.init(), 'Initialization failed due to missing input files.'
|
||||
#autoPyLoT(inputfile=self.params_infile, eventid='20171010_063224.a')
|
||||
@@ -1,76 +0,0 @@
|
||||
import pytest
|
||||
from obspy import read, Trace, UTCDateTime
|
||||
|
||||
from pylot.correlation.pick_correlation_correction import XCorrPickCorrection
|
||||
|
||||
|
||||
class TestXCorrPickCorrection():
|
||||
def setup(self):
|
||||
self.make_test_traces()
|
||||
self.make_test_picks()
|
||||
self.t_before = 2.
|
||||
self.t_after = 2.
|
||||
self.cc_maxlag = 0.5
|
||||
|
||||
def make_test_traces(self):
|
||||
# take first trace of test Stream from obspy
|
||||
tr1 = read()[0]
|
||||
# filter trace
|
||||
tr1.filter('bandpass', freqmin=1, freqmax=20)
|
||||
# make a copy and shift the copy by 0.1 s
|
||||
tr2 = tr1.copy()
|
||||
tr2.stats.starttime += 0.1
|
||||
|
||||
self.trace1 = tr1
|
||||
self.trace2 = tr2
|
||||
|
||||
def make_test_picks(self):
|
||||
# create an artificial reference pick on reference trace (trace1) and another one on the 0.1 s shifted trace
|
||||
self.tpick1 = UTCDateTime('2009-08-24T00:20:07.7')
|
||||
# shift the second pick by 0.2 s, the correction should be around 0.1 s now
|
||||
self.tpick2 = self.tpick1 + 0.2
|
||||
|
||||
def test_slice_trace_okay(self):
|
||||
|
||||
self.setup()
|
||||
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||
t_before=self.t_before, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||
|
||||
test_trace = self.trace1
|
||||
pick_time = self.tpick2
|
||||
|
||||
sliced_trace = xcpc.slice_trace(test_trace, pick_time)
|
||||
assert ((sliced_trace.stats.starttime == pick_time - self.t_before - self.cc_maxlag / 2)
|
||||
and (sliced_trace.stats.endtime == pick_time + self.t_after + self.cc_maxlag / 2))
|
||||
|
||||
def test_slice_trace_fails(self):
|
||||
self.setup()
|
||||
|
||||
test_trace = self.trace1
|
||||
pick_time = self.tpick1
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||
t_before=self.t_before + 20, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||
xcpc.slice_trace(test_trace, pick_time)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||
t_before=self.t_before, t_after=self.t_after + 50, cc_maxlag=self.cc_maxlag)
|
||||
xcpc.slice_trace(test_trace, pick_time)
|
||||
|
||||
def test_cross_correlation(self):
|
||||
self.setup()
|
||||
|
||||
# create XCorrPickCorrection object
|
||||
xcpc = XCorrPickCorrection(self.tpick1, self.trace1, self.tpick2, self.trace2, t_before=self.t_before,
|
||||
t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||
|
||||
# execute correlation
|
||||
correction, cc_max, uncert, fwfm = xcpc.cross_correlation(False, '', '')
|
||||
|
||||
# define awaited test result
|
||||
test_result = (-0.09983091718314982, 0.9578431835689154, 0.0015285160561610929, 0.03625786256084631)
|
||||
|
||||
# check results
|
||||
assert pytest.approx(test_result, rel=1e-6) == (correction, cc_max, uncert, fwfm)
|
||||
Reference in New Issue
Block a user