Compare commits

...

31 Commits

Author SHA1 Message Date
Wehling-Benatelli
e434bda993 [change] corrected for required versions 2024-08-30 15:01:28 +02:00
c0c3cbbd7b refactor: remove unused methods 2024-08-30 15:01:28 +02:00
76d4ec290c refactor: restructure data objects 2024-08-30 15:01:18 +02:00
63dac0fff6 refactor: restructure data objects 2024-08-30 15:00:44 +02:00
b15cfe2e1d bugfix: corrected for ValueError 2024-08-30 15:00:44 +02:00
29a1e4ebe6 refactor: move Project definition to individual file 2024-08-30 14:59:28 +02:00
50cabb0038 refactor: remove unused code; rewrite too complex functions 2024-08-30 14:59:28 +02:00
8dd5789b0c refactor: remove unnecessary additional declaration 2024-08-30 14:59:28 +02:00
c4aeab0d89 suggestion: add new dataclasses; remove unused code 2024-08-30 14:59:19 +02:00
ef69fc429f refactor: removed unused code 2024-08-30 14:58:53 +02:00
c8f9c1c33a refactor: rename writephases; add write hash to write_phases 2024-08-30 14:58:53 +02:00
59f2c4b46f refactor: remove unused code; restructure writephases 2024-08-30 14:58:53 +02:00
8eb958c91b change: add docu and test case 2024-08-30 14:58:52 +02:00
6b7f297d7a [update] adding possibility to display other waveform data (e.g. denoised/synthetic) together with genuine data for comparison 2024-08-30 14:58:38 +02:00
70d5c2d621 [minor] mpl.figure.canvas.draw -> draw_idle 2024-08-30 12:03:14 +02:00
7393201b90 [minor] inform if station coordinates were not found in metadata 2024-08-30 12:03:14 +02:00
d02f74ab10 [bugfix] added missing Parameter object in call for picksdict_from_picks 2024-08-30 12:03:14 +02:00
0a5f5f0817 [minor] removed unneeded imports 2024-08-30 12:03:05 +02:00
db976e5ea9 [bugfix] removing redundancy and wrong bullsh.. try-except code 2024-08-30 12:02:22 +02:00
c021ca19d7 [minor] switch default cmap for array_map to 'viridis' 2024-08-30 12:02:22 +02:00
356988e71d [update] further improved Pickfile selection dialog, now providing methods "overwrite" or "merge" 2024-08-30 12:02:22 +02:00
ad62284e0e [update] improve pickfile selection, give the ability to select only specific files 2024-08-30 12:02:06 +02:00
0fb0b0f11c [bugfix] re-implement ability of get_bool to return unidentifiable input 2024-08-30 11:59:50 +02:00
e3dd4a4e28 bugfix: remove unused functions; correct for wrong formatting (PEP) 2024-08-30 11:59:49 +02:00
2bbb84190c feat: add type hints and tests for plot utils 2024-08-30 11:56:59 +02:00
fb32d5e0c5 bugfix: add tests to key_for_set_value 2024-08-30 11:55:14 +02:00
f043401cc0 bugfix: rename is_iterable and add doc tests 2024-08-30 11:55:14 +02:00
eb15382b5f bugfix: refactor get_owner and get_hash; add tests 2024-08-30 11:55:13 +02:00
7fbc3bc5ae bugfix: add new tests and refactor get_none 2024-08-30 11:55:01 +02:00
221743fe20 bugfix: correct erroneous and add new doctests 2024-08-30 11:49:22 +02:00
554afc5a81 bugfix: update check4rotate 2024-08-29 16:12:19 +02:00
25 changed files with 1608 additions and 2532 deletions

848
PyLoT.py

File diff suppressed because it is too large Load Diff

View File

@ -28,7 +28,7 @@ from pylot.core.util.dataprocessing import restitute_data, Metadata
from pylot.core.util.defaults import SEPARATOR
from pylot.core.util.event import Event
from pylot.core.util.structure import DATASTRUCTURE
from pylot.core.util.utils import get_None, trim_station_components, check4gapsAndRemove, check4doubled, \
from pylot.core.util.utils import get_none, trim_station_components, check4gapsAndRemove, check4doubled, \
check4rotated
from pylot.core.util.version import get_git_version as _getVersionString
@ -91,9 +91,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
sp=sp_info)
print(splash)
parameter = get_None(parameter)
inputfile = get_None(inputfile)
eventid = get_None(eventid)
parameter = get_none(parameter)
inputfile = get_none(inputfile)
eventid = get_none(eventid)
fig_dict = None
fig_dict_wadatijack = None
@ -119,13 +119,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
obspyDMT_wfpath = input_dict['obspyDMT_wfpath']
if not parameter:
if inputfile:
parameter = PylotParameter(inputfile)
# iplot = parameter['iplot']
else:
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
print('Using default input file {}'.format(infile))
parameter = PylotParameter(infile)
if not inputfile:
print('Using default input parameter')
parameter = PylotParameter(inputfile)
else:
if not type(parameter) == PylotParameter:
print('Wrong input type for parameter: {}'.format(type(parameter)))
@ -154,7 +150,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
datastructure.setExpandFields(exf)
# check if default location routine NLLoc is available and all stations are used
if get_None(parameter['nllocbin']) and station == 'all':
if get_none(parameter['nllocbin']) and station == 'all':
locflag = 1
# get NLLoc-root path
nllocroot = parameter.get('nllocroot')
@ -247,7 +243,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
pylot_event = Event(eventpath) # event should be path to event directory
data.setEvtData(pylot_event)
if fnames == 'None':
data.setWFData(glob.glob(os.path.join(datapath, event_datapath, '*')))
data.set_wf_data(glob.glob(os.path.join(datapath, event_datapath, '*')))
# the following is necessary because within
# multiple event processing no event ID is provided
# in autopylot.in
@ -262,7 +258,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
now.minute)
parameter.setParam(eventID=eventID)
else:
data.setWFData(fnames)
data.set_wf_data(fnames)
eventpath = events[0]
# now = datetime.datetime.now()
@ -272,7 +268,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
# now.hour,
# now.minute)
parameter.setParam(eventID=eventid)
wfdat = data.getWFData() # all available streams
wfdat = data.get_wf_data() # all available streams
if not station == 'all':
wfdat = wfdat.select(station=station)
if not wfdat:

View File

@ -1,658 +1,69 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
import os
from dataclasses import dataclass, field
from typing import List, Union
from PySide2.QtWidgets import QMessageBox
from obspy import read_events
from obspy.core import read, Stream, UTCDateTime
from obspy.core.event import Event as ObsPyEvent
from obspy.io.sac import SacIOError
from obspy import UTCDateTime
import pylot.core.loc.focmec as focmec
import pylot.core.loc.hypodd as hypodd
import pylot.core.loc.velest as velest
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
picksdict_from_pilot, merge_picks, PylotParameter
from pylot.core.util.errors import FormatError, OverwriteError
from pylot.core.util.event import Event
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
from pylot.core.util.utils import fnConstructor, full_range, check4rotated, \
check_for_gaps_and_merge, trim_station_components, check_for_nan
from pylot.core.io.event import EventData
from pylot.core.io.waveformdata import WaveformData
from pylot.core.util.dataprocessing import Metadata
class Data(object):
"""
Data container with attributes wfdata holding ~obspy.core.stream.
:type parent: PySide2.QtWidgets.QWidget object, optional
:param parent: A PySide2.QtWidgets.QWidget object utilized when
called by a GUI to display a PySide2.QtWidgets.QMessageBox instead of printing
to standard out.
:type evtdata: ~obspy.core.event.Event object, optional
:param evtdata ~obspy.core.event.Event object containing all derived or
loaded event. Container object holding, e.g. phase arrivals, etc.
"""
@dataclass
class Data:
event_data: EventData = field(default_factory=EventData)
waveform_data: WaveformData = field(default_factory=WaveformData)
metadata: Metadata = field(default_factory=Metadata)
_parent: Union[None, 'QtWidgets.QWidget'] = None
def __init__(self, parent=None, evtdata=None):
self._parent = parent
if self.getParent():
self.comp = parent.getComponent()
else:
self.comp = 'Z'
self.wfdata = Stream()
self._new = False
if isinstance(evtdata, ObsPyEvent) or isinstance(evtdata, Event):
pass
elif isinstance(evtdata, dict):
evt = readPILOTEvent(**evtdata)
evtdata = evt
elif isinstance(evtdata, str):
try:
cat = read_events(evtdata)
if len(cat) != 1:
raise ValueError('ambiguous event information for file: '
'{file}'.format(file=evtdata))
evtdata = cat[0]
except TypeError as e:
if 'Unknown format for file' in e.message:
if 'PHASES' in evtdata:
picks = picksdict_from_pilot(evtdata)
evtdata = ObsPyEvent()
evtdata.picks = picks_from_picksdict(picks)
elif 'LOC' in evtdata:
raise NotImplementedError('PILOT location information '
'read support not yet '
'implemented.')
elif 'event.pkl' in evtdata:
evtdata = qml_from_obspyDMT(evtdata)
else:
raise e
else:
raise e
else: # create an empty Event object
self.setNew()
evtdata = ObsPyEvent()
evtdata.picks = []
self.evtdata = evtdata
self.wforiginal = None
self.cuttimes = None
self.dirty = False
self.processed = None
self.event_data = EventData(evtdata)
self.waveform_data = WaveformData()
def __str__(self):
return str(self.wfdata)
return str(self.waveform_data.wfdata)
def __add__(self, other):
assert isinstance(other, Data), "operands must be of same type 'Data'"
rs_id = self.get_evt_data().get('resource_id')
rs_id_other = other.get_evt_data().get('resource_id')
if other.isNew() and not self.isNew():
picks_to_add = other.get_evt_data().picks
old_picks = self.get_evt_data().picks
wf_ids_old = [pick.waveform_id for pick in old_picks]
for new_pick in picks_to_add:
wf_id = new_pick.waveform_id
if wf_id in wf_ids_old:
for old_pick in old_picks:
comparison = [old_pick.waveform_id == new_pick.waveform_id,
old_pick.phase_hint == new_pick.phase_hint,
old_pick.method_id == new_pick.method_id]
if all(comparison):
del (old_pick)
old_picks.append(new_pick)
elif not other.isNew() and self.isNew():
new = other + self
self.evtdata = new.get_evt_data()
elif self.isNew() and other.isNew():
if not isinstance(other, Data):
raise TypeError("Operands must be of type 'Data'")
if self.event_data.is_new() and other.event_data.is_new():
pass
elif rs_id == rs_id_other:
other.setNew()
elif other.event_data.is_new():
new_picks = other.event_data.evtdata.picks
old_picks = self.event_data.evtdata.picks
old_picks.extend([pick for pick in new_picks if pick not in old_picks])
elif self.event_data.is_new():
return other + self
elif self.event_data.get_id() == other.event_data.get_id():
other.event_data.set_new()
return self + other
else:
raise ValueError("both Data objects have differing "
"unique Event identifiers")
raise ValueError("Both Data objects have differing unique Event identifiers")
return self
def getPicksStr(self):
"""
Return picks in event data
:return: picks seperated by newlines
:rtype: str
"""
picks_str = ''
for pick in self.get_evt_data().picks:
picks_str += str(pick) + '\n'
return picks_str
def getParent(self):
"""
Get PySide.QtGui.QWidget parent object
"""
def get_parent(self):
return self._parent
def isNew(self):
return self._new
def filter_wf_data(self, **kwargs):
self.waveform_data.wfdata.detrend('linear')
self.waveform_data.wfdata.taper(0.02, type='cosine')
self.waveform_data.wfdata.filter(**kwargs)
self.waveform_data.dirty = True
def setNew(self):
self._new = True
def set_wf_data(self, fnames: List[str], fnames_alt: List[str] = None, check_rotated=False, metadata=None, tstart=0, tstop=0):
return self.waveform_data.load_waveforms(fnames, fnames_alt, check_rotated, metadata, tstart, tstop)
def getCutTimes(self):
"""
Returns earliest start and latest end of all waveform data
:return: minimum start time and maximum end time as a tuple
:rtype: (UTCDateTime, UTCDateTime)
"""
if self.cuttimes is None:
self.updateCutTimes()
return self.cuttimes
def reset_wf_data(self):
self.waveform_data.reset()
def updateCutTimes(self):
"""
Update cuttimes to contain earliest start and latest end time
of all waveform data
:rtype: None
"""
self.cuttimes = full_range(self.getWFData())
def get_wf_data(self):
return self.waveform_data.wfdata
def getEventFileName(self):
ID = self.getID()
# handle forbidden filenames especially on windows systems
return fnConstructor(str(ID))
def checkEvent(self, event, fcheck, forceOverwrite=False):
"""
Check information in supplied event and own event and replace with own
information if no other information are given or forced by forceOverwrite
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param fcheck: check and delete existing information
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
:type fcheck: str, [str]
:param forceOverwrite: Set to true to force overwrite own information. If false,
supplied information from event is only used if there is no own information in that
category (given in fcheck: manual, auto, origin, magnitude)
:type forceOverwrite: bool
:return:
:rtype: None
"""
if 'origin' in fcheck:
self.replaceOrigin(event, forceOverwrite)
if 'magnitude' in fcheck:
self.replaceMagnitude(event, forceOverwrite)
if 'auto' in fcheck:
self.replacePicks(event, 'auto')
if 'manual' in fcheck:
self.replacePicks(event, 'manual')
def replaceOrigin(self, event, forceOverwrite=False):
"""
Replace own origin with the one supplied in event if own origin is not
existing or forced by forceOverwrite = True
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param forceOverwrite: always replace own information with supplied one if true
:type forceOverwrite: bool
:return:
:rtype: None
"""
if self.get_evt_data().origins or forceOverwrite:
if event.origins:
print("Found origin, replace it by new origin.")
event.origins = self.get_evt_data().origins
def replaceMagnitude(self, event, forceOverwrite=False):
"""
Replace own magnitude with the one supplied in event if own magnitude is not
existing or forced by forceOverwrite = True
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param forceOverwrite: always replace own information with supplied one if true
:type forceOverwrite: bool
:return:
:rtype: None
"""
if self.get_evt_data().magnitudes or forceOverwrite:
if event.magnitudes:
print("Found magnitude, replace it by new magnitude")
event.magnitudes = self.get_evt_data().magnitudes
def replacePicks(self, event, picktype):
"""
Replace picks in event with own picks
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param picktype: 'auto' or 'manual' picks
:type picktype: str
:return:
:rtype: None
"""
checkflag = 1
picks = event.picks
# remove existing picks
for j, pick in reversed(list(enumerate(picks))):
try:
if picktype in str(pick.method_id.id):
picks.pop(j)
checkflag = 2
except AttributeError as e:
msg = '{}'.format(e)
print(e)
checkflag = 0
if checkflag > 0:
if checkflag == 1:
print("Write new %s picks to catalog." % picktype)
if checkflag == 2:
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
# append new picks
for pick in self.get_evt_data().picks:
if picktype in str(pick.method_id.id):
picks.append(pick)
def exportEvent(self, fnout, fnext='.xml', fcheck='auto', upperErrors=None):
"""
Export event to file
:param fnout: basename of file
:param fnext: file extensions xml, cnv, obs, focmec, or/and pha
:param fcheck: check and delete existing information
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
"""
from pylot.core.util.defaults import OUTPUTFORMATS
if not type(fcheck) == list:
fcheck = [fcheck]
try:
evtformat = OUTPUTFORMATS[fnext]
except KeyError as e:
errmsg = '{0}; selected file extension {1} not ' \
'supported'.format(e, fnext)
raise FormatError(errmsg)
if hasattr(self.get_evt_data(), 'notes'):
try:
with open(os.path.join(os.path.dirname(fnout), 'notes.txt'), 'w') as notes_file:
notes_file.write(self.get_evt_data().notes)
except Exception as e:
print('Warning: Could not save notes.txt: ', str(e))
# check for already existing xml-file
if fnext == '.xml':
if os.path.isfile(fnout + fnext):
print("xml-file already exists! Check content ...")
cat = read_events(fnout + fnext)
if len(cat) > 1:
raise IOError('Ambigious event information in file {}'.format(fnout + fnext))
if len(cat) < 1:
raise IOError('No event information in file {}'.format(fnout + fnext))
event = cat[0]
if not event.resource_id == self.get_evt_data().resource_id:
QMessageBox.warning(self, 'Warning', 'Different resource IDs!')
return
self.checkEvent(event, fcheck)
self.setEvtData(event)
self.get_evt_data().write(fnout + fnext, format=evtformat)
# try exporting event
else:
evtdata_org = self.get_evt_data()
picks = evtdata_org.picks
eventpath = evtdata_org.path
picks_copy = copy.deepcopy(picks)
evtdata_copy = Event(eventpath)
evtdata_copy.picks = picks_copy
# check for stations picked automatically as well as manually
# Prefer manual picks!
for i in range(len(picks)):
if picks[i].method_id == 'manual':
mstation = picks[i].waveform_id.station_code
mstation_ext = mstation + '_'
for k in range(len(picks_copy)):
if ((picks_copy[k].waveform_id.station_code == mstation) or
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
(picks_copy[k].method_id == 'auto'):
del picks_copy[k]
break
lendiff = len(picks) - len(picks_copy)
if lendiff != 0:
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
no_uncertainties_p = []
no_uncertainties_s = []
if upperErrors:
# check for pick uncertainties exceeding adjusted upper errors
# Picks with larger uncertainties will not be saved in output file!
for j in range(len(picks)):
for i in range(len(picks_copy)):
if picks_copy[i].phase_hint[0] == 'P':
# Skipping pick if no upper_uncertainty is found and warning user
if picks_copy[i].time_errors['upper_uncertainty'] is None:
#print("{1} P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
# picks_copy[i].waveform_id.station_code,
# picks_copy[i].method_id))
if not picks_copy[i].waveform_id.station_code in no_uncertainties_p:
no_uncertainties_p.append(picks_copy[i].waveform_id.station_code)
continue
#print ("checking for upper_uncertainty")
if (picks_copy[i].time_errors['uncertainty'] is None) or \
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[0]):
print("Uncertainty exceeds or equal adjusted upper time error!")
print("Adjusted uncertainty: {}".format(upperErrors[0]))
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
picks_copy[i].waveform_id.station_code,
picks_copy[i].method_id))
del picks_copy[i]
break
if picks_copy[i].phase_hint[0] == 'S':
# Skipping pick if no upper_uncertainty is found and warning user
if picks_copy[i].time_errors['upper_uncertainty'] is None:
#print("{1} S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
#picks_copy[i].waveform_id.station_code,
#picks_copy[i].method_id))
if not picks_copy[i].waveform_id.station_code in no_uncertainties_s:
no_uncertainties_s.append(picks_copy[i].waveform_id.station_code)
continue
if (picks_copy[i].time_errors['uncertainty'] is None) or \
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[1]):
print("Uncertainty exceeds or equal adjusted upper time error!")
print("Adjusted uncertainty: {}".format(upperErrors[1]))
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
picks_copy[i].waveform_id.station_code,
picks_copy[i].method_id))
del picks_copy[i]
break
for s in no_uncertainties_p:
print("P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
for s in no_uncertainties_s:
print("S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
if fnext == '.obs':
try:
evtdata_copy.write(fnout + fnext, format=evtformat)
# write header afterwards
evid = str(evtdata_org.resource_id).split('/')[1]
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
nllocfile = open(fnout + fnext)
l = nllocfile.readlines()
# Adding A0/Generic Amplitude to .obs file
# l2 = []
# for li in l:
# for amp in evtdata_org.amplitudes:
# if amp.waveform_id.station_code == li[0:5].strip():
# li = li[0:64] + '{:0.2e}'.format(amp.generic_amplitude) + li[73:-1] + '\n'
# l2.append(li)
# l = l2
nllocfile.close()
l.insert(0, header)
nllocfile = open(fnout + fnext, 'w')
nllocfile.write("".join(l))
nllocfile.close()
except KeyError as e:
raise KeyError('''{0} export format
not implemented: {1}'''.format(evtformat, e))
if fnext == '.cnv':
try:
velest.export(picks_copy, fnout + fnext, eventinfo=self.get_evt_data())
except KeyError as e:
raise KeyError('''{0} export format
not implemented: {1}'''.format(evtformat, e))
if fnext == '_focmec.in':
try:
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
print('Using default input file {}'.format(infile))
parameter = PylotParameter(infile)
focmec.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
except KeyError as e:
raise KeyError('''{0} export format
not implemented: {1}'''.format(evtformat, e))
if fnext == '.pha':
try:
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
print('Using default input file {}'.format(infile))
parameter = PylotParameter(infile)
hypodd.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
except KeyError as e:
raise KeyError('''{0} export format
not implemented: {1}'''.format(evtformat, e))
def getComp(self):
"""
Get component (ZNE)
"""
return self.comp
def getID(self):
"""
Get unique resource id
"""
try:
return self.evtdata.get('resource_id').id
except:
return None
def filterWFData(self, kwargs):
"""
Filter waveform data
:param kwargs: arguments to pass through to filter function
"""
data = self.getWFData()
data.detrend('linear')
data.taper(0.02, type='cosine')
data.filter(**kwargs)
self.dirty = True
def setWFData(self, fnames, fnames_syn=None, checkRotated=False, metadata=None, tstart=0, tstop=0):
"""
Clear current waveform data and set given waveform data
:param fnames: waveform data names to append
:type fnames: list
"""
def check_fname_exists(filenames: list) -> list:
if filenames:
filenames = [fn for fn in filenames if os.path.isfile(fn)]
return filenames
self.wfdata = Stream()
self.wforiginal = None
self.wfsyn = Stream()
if tstart == tstop:
tstart = tstop = None
self.tstart = tstart
self.tstop = tstop
fnames = check_fname_exists(fnames)
fnames_syn = check_fname_exists(fnames_syn)
# if obspy_dmt:
# wfdir = 'raw'
# self.processed = False
# for fname in fnames:
# if fname.endswith('processed'):
# wfdir = 'processed'
# self.processed = True
# break
# for fpath in fnames:
# if fpath.endswith(wfdir):
# wffnames = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
# if 'syngine' in fpath.split('/')[-1]:
# wffnames_syn = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
# else:
# wffnames = fnames
if fnames is not None:
self.appendWFData(fnames)
if fnames_syn is not None:
self.appendWFData(fnames_syn, synthetic=True)
else:
return False
# various pre-processing steps:
# remove possible underscores in station names
# self.wfdata = remove_underscores(self.wfdata)
# check for gaps and merge
self.wfdata, _ = check_for_gaps_and_merge(self.wfdata)
# check for nans
check_for_nan(self.wfdata)
# check for stations with rotated components
if checkRotated and metadata is not None:
self.wfdata = check4rotated(self.wfdata, metadata, verbosity=0)
# trim station components to same start value
trim_station_components(self.wfdata, trim_start=True, trim_end=False)
# make a copy of original data
self.wforiginal = self.getWFData().copy()
self.dirty = False
return True
def appendWFData(self, fnames, synthetic=False):
"""
Read waveform data from fnames and append it to current wf data
:param fnames: waveform data to append
:type fnames: list
"""
assert isinstance(fnames, list), "input parameter 'fnames' is " \
"supposed to be of type 'list' " \
"but is actually" \
" {0}".format(type(fnames))
if self.dirty:
self.resetWFData()
real_or_syn_data = {True: self.wfsyn,
False: self.wfdata}
warnmsg = ''
for fname in set(fnames):
try:
real_or_syn_data[synthetic] += read(fname, starttime=self.tstart, endtime=self.tstop)
except TypeError:
try:
real_or_syn_data[synthetic] += read(fname, format='GSE2', starttime=self.tstart, endtime=self.tstop)
except Exception as e:
try:
real_or_syn_data[synthetic] += read(fname, format='SEGY', starttime=self.tstart,
endtime=self.tstop)
except Exception as e:
warnmsg += '{0}\n{1}\n'.format(fname, e)
except SacIOError as se:
warnmsg += '{0}\n{1}\n'.format(fname, se)
if warnmsg:
warnmsg = 'WARNING in appendWFData: unable to read waveform data\n' + warnmsg
print(warnmsg)
def getWFData(self):
return self.wfdata
def getOriginalWFData(self):
return self.wforiginal
def getSynWFData(self):
return self.wfsyn
def resetWFData(self):
"""
Set waveform data to original waveform data
"""
if self.getOriginalWFData():
self.wfdata = self.getOriginalWFData().copy()
else:
self.wfdata = Stream()
self.dirty = False
def resetPicks(self):
"""
Clear all picks from event
"""
self.get_evt_data().picks = []
def get_evt_data(self):
return self.evtdata
def setEvtData(self, event):
self.evtdata = event
def applyEVTData(self, data, typ='pick'):
"""
Either takes an `obspy.core.event.Event` object and applies all new
information on the event to the actual data if typ is 'event or
creates ObsPy pick objects and append it to the picks list from the
PyLoT dictionary contain all picks if type is pick
:param data: data to apply, either picks or complete event
:type data:
:param typ: which event data to apply, 'pick' or 'event'
:type typ: str
:param authority_id: (currently unused)
:type: str
:raise OverwriteError:
"""
def applyPicks(picks):
"""
Creates ObsPy pick objects and append it to the picks list from the
PyLoT dictionary contain all picks.
:param picks:
:raise OverwriteError: raises an OverwriteError if the picks list is
not empty. The GUI will then ask for a decision.
"""
# firstonset = find_firstonset(picks)
# check for automatic picks
print("Writing phases to ObsPy-quakeml file")
for key in picks:
if not picks[key].get('P'):
continue
if picks[key]['P']['picker'] == 'auto':
print("Existing auto-picks will be overwritten in pick-dictionary!")
picks = picks_from_picksdict(picks)
break
else:
if self.get_evt_data().picks:
raise OverwriteError('Existing picks would be overwritten!')
else:
picks = picks_from_picksdict(picks)
break
self.get_evt_data().picks = picks
# if 'smi:local' in self.getID() and firstonset:
# fonset_str = firstonset.strftime('%Y_%m_%d_%H_%M_%S')
# ID = ResourceIdentifier('event/' + fonset_str)
# ID.convertIDToQuakeMLURI(authority_id=authority_id)
# self.get_evt_data().resource_id = ID
def applyEvent(event):
"""
takes an `obspy.core.event.Event` object and applies all new
information on the event to the actual data
:param event:
"""
if event is None:
print("applyEvent: Received None")
return
if self.isNew():
self.setEvtData(event)
else:
# prevent overwriting original pick information
event_old = self.get_evt_data()
if not event_old.resource_id == event.resource_id:
print("WARNING: Missmatch in event resource id's: {} and {}".format(
event_old.resource_id,
event.resource_id))
else:
picks = copy.deepcopy(event_old.picks)
event = merge_picks(event, picks)
# apply event information from location
event_old.update(event)
applydata = {'pick': applyPicks,
'event': applyEvent}
applydata[typ](data)
self._new = False
def rotate_wf_data(self):
self.waveform_data.rotate_zne(self.metadata)
class GenericDataStructure(object):
@ -837,22 +248,6 @@ class PilotDataStructure(GenericDataStructure):
self.setExpandFields(['root', 'database'])
class ObspyDMTdataStructure(GenericDataStructure):
"""
Object containing the data access information for the old PILOT data
structure.
"""
def __init__(self, **fields):
if not fields:
fields = {'database': '',
'root': ''}
GenericDataStructure.__init__(self, **fields)
self.setExpandFields(['root', 'database'])
class SeiscompDataStructure(GenericDataStructure):
"""
Dictionary containing the data access information for an SDS data archive:

106
pylot/core/io/event.py Normal file
View File

@ -0,0 +1,106 @@
import copy
from dataclasses import dataclass
from typing import Union
from obspy import read_events
from obspy.core.event import Event as ObsPyEvent
@dataclass
class EventData:
evtdata: Union[ObsPyEvent, None] = None
_new: bool = False
def __init__(self, evtdata=None):
self.set_event_data(evtdata)
def set_event_data(self, evtdata):
if isinstance(evtdata, ObsPyEvent):
self.evtdata = evtdata
elif isinstance(evtdata, dict):
self.evtdata = self.read_pilot_event(**evtdata)
elif isinstance(evtdata, str):
self.evtdata = self.read_event_file(evtdata)
else:
self.set_new()
self.evtdata = ObsPyEvent(picks=[])
def read_event_file(self, evtdata: str) -> ObsPyEvent:
try:
cat = read_events(evtdata)
if len(cat) != 1:
raise ValueError(f'ambiguous event information for file: {evtdata}')
return cat[0]
except TypeError as e:
self.handle_event_file_error(e, evtdata)
def handle_event_file_error(self, e: TypeError, evtdata: str):
if 'Unknown format for file' in str(e):
if 'PHASES' in evtdata:
picks = self.picksdict_from_pilot(evtdata)
evtdata = ObsPyEvent(picks=self.picks_from_picksdict(picks))
elif 'LOC' in evtdata:
raise NotImplementedError('PILOT location information read support not yet implemented.')
elif 'event.pkl' in evtdata:
evtdata = self.qml_from_obspy_dmt(evtdata)
else:
raise e
else:
raise e
def set_new(self):
self._new = True
def is_new(self) -> bool:
return self._new
def get_picks_str(self) -> str:
return '\n'.join(str(pick) for pick in self.evtdata.picks)
def replace_origin(self, event: ObsPyEvent, force_overwrite: bool = False):
if self.evtdata.origins or force_overwrite:
event.origins = self.evtdata.origins
def replace_magnitude(self, event: ObsPyEvent, force_overwrite: bool = False):
if self.evtdata.magnitudes or force_overwrite:
event.magnitudes = self.evtdata.magnitudes
def replace_picks(self, event: ObsPyEvent, picktype: str):
checkflag = 1
picks = event.picks
for j, pick in reversed(list(enumerate(picks))):
if picktype in str(pick.method_id.id):
picks.pop(j)
checkflag = 2
if checkflag > 0:
for pick in self.evtdata.picks:
if picktype in str(pick.method_id.id):
picks.append(pick)
def get_id(self) -> Union[str, None]:
try:
return self.evtdata.resource_id.id
except:
return None
def apply_event_data(self, data, typ='pick'):
if typ == 'pick':
self.apply_picks(data)
elif typ == 'event':
self.apply_event(data)
def apply_picks(self, picks):
self.evtdata.picks = picks
def apply_event(self, event: ObsPyEvent):
if self.is_new():
self.evtdata = event
else:
old_event = self.evtdata
if old_event.resource_id == event.resource_id:
picks = copy.deepcopy(old_event.picks)
event = self.merge_picks(event, picks)
old_event.update(event)
else:
print(f"WARNING: Mismatch in event resource id's: {old_event.resource_id} and {event.resource_id}")

View File

@ -1,7 +1,7 @@
from obspy import UTCDateTime
from obspy.core import event as ope
from pylot.core.util.utils import getLogin, getHash
from pylot.core.util.utils import get_login, get_hash
def create_amplitude(pickID, amp, unit, category, cinfo):
@ -61,7 +61,7 @@ def create_creation_info(agency_id=None, creation_time=None, author=None):
:return:
'''
if author is None:
author = getLogin()
author = get_login()
if creation_time is None:
creation_time = UTCDateTime()
return ope.CreationInfo(agency_id=agency_id, author=author,
@ -210,7 +210,7 @@ def create_resourceID(timetohash, restype, authority_id=None, hrstr=None):
'''
assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \
"UTCDateTime object"
hid = getHash(timetohash)
hid = get_hash(timetohash)
if hrstr is None:
resID = ope.ResourceIdentifier(restype + '/' + hid[0:6])
else:

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob
import logging
import os
import warnings
@ -16,29 +17,10 @@ from pylot.core.io.inputs import PylotParameter
from pylot.core.io.location import create_event, \
create_magnitude
from pylot.core.pick.utils import select_for_phase, get_quality_class
from pylot.core.util.utils import getOwner, full_range, four_digits, transformFilterString4Export, \
from pylot.core.util.utils import get_owner, full_range, four_digits, transformFilterString4Export, \
backtransformFilterString, loopIdentifyPhase, identifyPhase
def add_amplitudes(event, amplitudes):
amplitude_list = []
for pick in event.picks:
try:
a0 = amplitudes[pick.waveform_id.station_code]
amplitude = ope.Amplitude(generic_amplitude=a0 * 1e-3)
amplitude.unit = 'm'
amplitude.category = 'point'
amplitude.waveform_id = pick.waveform_id
amplitude.magnitude_hint = 'ML'
amplitude.pick_id = pick.resource_id
amplitude.type = 'AML'
amplitude_list.append(amplitude)
except KeyError:
continue
event.amplitudes = amplitude_list
return event
def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
"""
readPILOTEvent - function
@ -58,7 +40,7 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
if phasfn is not None and os.path.isfile(phasfn):
phases = sio.loadmat(phasfn)
phasctime = UTCDateTime(os.path.getmtime(phasfn))
phasauthor = getOwner(phasfn)
phasauthor = get_owner(phasfn)
else:
phases = None
phasctime = None
@ -66,7 +48,7 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
if locfn is not None and os.path.isfile(locfn):
loc = sio.loadmat(locfn)
locctime = UTCDateTime(os.path.getmtime(locfn))
locauthor = getOwner(locfn)
locauthor = get_owner(locfn)
else:
loc = None
locctime = None
@ -192,32 +174,7 @@ def convert_pilot_times(time_array):
return UTCDateTime(*times)
def picksdict_from_obs(fn):
"""
create pick dictionary from obs file
:param fn: filename
:type fn:
:return:
:rtype:
"""
picks = dict()
station_name = str()
for line in open(fn, 'r'):
if line.startswith('#'):
continue
else:
phase_line = line.split()
if not station_name == phase_line[0]:
phase = dict()
station_name = phase_line[0]
phase_name = phase_line[4].upper()
pick = UTCDateTime(phase_line[6] + phase_line[7] + phase_line[8])
phase[phase_name] = dict(mpp=pick, fm=phase_line[5])
picks[station_name] = phase
return picks
def picksdict_from_picks(evt):
def picksdict_from_picks(evt, parameter=None):
"""
Takes an Event object and return the pick dictionary commonly used within
PyLoT
@ -230,6 +187,7 @@ def picksdict_from_picks(evt):
'auto': {}
}
for pick in evt.picks:
errors = None
phase = {}
station = pick.waveform_id.station_code
if pick.waveform_id.channel_code is None:
@ -273,32 +231,28 @@ def picksdict_from_picks(evt):
phase['epp'] = epp
phase['lpp'] = lpp
phase['spe'] = spe
try:
phase['weight'] = weight
except:
# get onset weight from uncertainty
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
print('Using default input file {}'.format(infile))
parameter = PylotParameter(infile)
weight = phase.get('weight')
if not weight:
if not parameter:
logging.warning('Using ')
logging.warning('Using default input parameter')
parameter = PylotParameter()
pick.phase_hint = identifyPhase(pick.phase_hint)
if pick.phase_hint == 'P':
errors = parameter['timeerrorsP']
elif pick.phase_hint == 'S':
errors = parameter['timeerrorsS']
weight = get_quality_class(spe, errors)
phase['weight'] = weight
if errors:
weight = get_quality_class(spe, errors)
phase['weight'] = weight
phase['channel'] = channel
phase['network'] = network
phase['picker'] = pick_method
try:
if pick.polarity == 'positive':
phase['fm'] = 'U'
elif pick.polarity == 'negative':
phase['fm'] = 'D'
else:
phase['fm'] = 'N'
except:
print("No FM info available!")
if pick.polarity == 'positive':
phase['fm'] = 'U'
elif pick.polarity == 'negative':
phase['fm'] = 'D'
else:
phase['fm'] = 'N'
phase['filter_id'] = filter_id if filter_id is not None else ''
@ -375,636 +329,228 @@ def picks_from_picksdict(picks, creation_info=None):
return picks_list
def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0):
# TODO: change root to datapath
db_root = os.path.join(root_dir, db_dir)
evt_list = glob.glob1(db_root, 'e????.???.??')
for evt in evt_list:
if verbosity > 0:
print('Reassessing event {0}'.format(evt))
reassess_pilot_event(root_dir, db_dir, evt, out_dir, fn_param, verbosity)
def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None, verbosity=0):
from obspy import read
from pylot.core.io.inputs import PylotParameter
from pylot.core.pick.utils import earllatepicker
# TODO: change root to datapath
default = PylotParameter(fn_param, verbosity)
search_base = os.path.join(root_dir, db_dir, event_id)
phases_file = glob.glob(os.path.join(search_base, 'PHASES.mat'))
if not phases_file:
return
if verbosity > 1:
print('Opening PILOT phases file: {fn}'.format(fn=phases_file[0]))
picks_dict = picksdict_from_pilot(phases_file[0])
if verbosity > 0:
print('Dictionary read from PHASES.mat:\n{0}'.format(picks_dict))
datacheck = list()
info = None
for station in picks_dict.keys():
fn_pattern = os.path.join(search_base, '{0}*'.format(station))
try:
st = read(fn_pattern)
except TypeError as e:
if 'Unknown format for file' in e.message:
try:
st = read(fn_pattern, format='GSE2')
except ValueError as e:
if e.message == 'second must be in 0..59':
info = 'A known Error was raised. Please find the list of corrupted files and double-check these files.'
datacheck.append(fn_pattern + ' (time info)\n')
continue
else:
raise ValueError(e.message)
except Exception as e:
if 'No file matching file pattern:' in e.message:
if verbosity > 0:
warnings.warn('no waveform data found for station {station}'.format(station=station),
RuntimeWarning)
datacheck.append(fn_pattern + ' (no data)\n')
continue
else:
raise e
else:
raise e
for phase in picks_dict[station].keys():
try:
mpp = picks_dict[station][phase]['mpp']
except KeyError as e:
print(e.message, station)
continue
sel_st = select_for_phase(st, phase)
if not sel_st:
msg = 'no waveform data found for station {station}'.format(station=station)
warnings.warn(msg, RuntimeWarning)
continue
stime, etime = full_range(sel_st)
rel_pick = mpp - stime
epp, lpp, spe = earllatepicker(sel_st,
default.get('nfac{0}'.format(phase)),
default.get('tsnrz' if phase == 'P' else 'tsnrh'),
Pick1=rel_pick,
iplot=0,
verbosity=0)
if epp is None or lpp is None:
continue
epp = stime + epp
lpp = stime + lpp
min_diff = 3 * st[0].stats.delta
if lpp - mpp < min_diff:
lpp = mpp + min_diff
if mpp - epp < min_diff:
epp = mpp - min_diff
picks_dict[station][phase] = dict(epp=epp, mpp=mpp, lpp=lpp, spe=spe)
if datacheck:
if info:
if verbosity > 0:
print(info + ': {0}'.format(search_base))
fncheck = open(os.path.join(search_base, 'datacheck_list'), 'w')
fncheck.writelines(datacheck)
fncheck.close()
del datacheck
# create Event object for export
evt = ope.Event(resource_id=event_id)
evt.picks = picks_from_picksdict(picks_dict)
# write phase information to file
if not out_dir:
fnout_prefix = os.path.join(root_dir, db_dir, event_id, 'PyLoT_{0}.'.format(event_id))
else:
out_dir = os.path.join(out_dir, db_dir)
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fnout_prefix = os.path.join(out_dir, 'PyLoT_{0}.'.format(event_id))
evt.write(fnout_prefix + 'xml', format='QUAKEML')
def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
def write_phases(arrivals, fformat, filename, parameter=None, eventinfo=None):
"""
Function of methods to write phases to the following standard file
formats used for locating earthquakes:
Writes earthquake phase data to different file formats.
HYPO71, NLLoc, VELEST, HYPOSAT, FOCMEC, and hypoDD
:param arrivals:dictionary containing all phase information including
station ID, phase, first motion, weight (uncertainty), ...
:param arrivals: Dictionary containing phase information (station ID, phase, first motion, weight, etc.)
:type arrivals: dict
:param fformat: chosen file format (location routine),
choose between NLLoc, HYPO71, HYPOSAT, VELEST,
HYPOINVERSE, FOCMEC, and hypoDD
:param fformat: File format to write to (e.g., 'NLLoc', 'HYPO71', 'HYPOSAT', 'VELEST', 'HYPODD', 'FOCMEC')
:type fformat: str
:param filename: full path and name of phase file
:type filename: string
:param parameter: all input information
:type parameter: object
:param eventinfo: optional, needed for VELEST-cnv file
and FOCMEC- and HASH-input files
:type eventinfo: `obspy.core.event.Event` object
:param filename: Path and name of the output phase file
:type filename: str
:param parameter: Additional parameters for writing the phase data
:type parameter: object
:param eventinfo: Event information needed for specific formats like VELEST, FOCMEC, and HASH
:type eventinfo: obspy.core.event.Event
"""
if fformat == 'NLLoc':
print("Writing phases to %s for NLLoc" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
(parameter.get('database'), parameter.get('eventID')))
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
for key in arrivals:
# P onsets
if 'P' in arrivals[key]:
try:
fm = arrivals[key]['P']['fm']
except KeyError as e:
print(e)
fm = None
if fm is None:
fm = '?'
onset = arrivals[key]['P']['mpp']
year = onset.year
month = onset.month
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
pweight = 1 # use pick
try:
if arrivals[key]['P']['weight'] >= 4:
pweight = 0 # do not use pick
print("Station {}: Uncertain pick, do not use it!".format(key))
except KeyError as e:
print(e.message + '; no weight set during processing')
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
fm,
year,
month,
day,
hh,
mm,
ss_ms,
pweight))
# S onsets
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
fm = '?'
onset = arrivals[key]['S']['mpp']
year = onset.year
month = onset.month
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
sweight = 1 # use pick
try:
if arrivals[key]['S']['weight'] >= 4:
sweight = 0 # do not use pick
except KeyError as e:
print(str(e) + '; no weight set during processing')
Ao = arrivals[key]['S']['Ao'] # peak-to-peak amplitude
if Ao == None:
Ao = 0.0
# fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 %9.2f 0 0 %d \n' % (key,
fm,
year,
month,
day,
hh,
mm,
ss_ms,
Ao,
sweight))
fid.close()
elif fformat == 'HYPO71':
print("Writing phases to %s for HYPO71" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write(' %s\n' %
parameter.get('eventID'))
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
for key in arrivals:
if arrivals[key]['P']['weight'] < 4:
stat = key
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
stat = stat[1:5]
Ponset = arrivals[key]['P']['mpp']
Sonset = arrivals[key]['S']['mpp']
pweight = arrivals[key]['P']['weight']
sweight = arrivals[key]['S']['weight']
fm = arrivals[key]['P']['fm']
if fm is None:
fm = '-'
Ao = arrivals[key]['S']['Ao']
if Ao is None:
Ao = ''
else:
Ao = str('%7.2f' % Ao)
year = Ponset.year
if year >= 2000:
year = year - 2000
else:
year = year - 1900
month = Ponset.month
day = Ponset.day
hh = Ponset.hour
mm = Ponset.minute
ss = Ponset.second
ms = Ponset.microsecond
ss_ms = ss + ms / 1000000.0
if pweight < 2:
pstr = 'I'
elif pweight >= 2:
pstr = 'E'
if arrivals[key]['S']['weight'] < 4:
Sss = Sonset.second
Sms = Sonset.microsecond
Sss_ms = Sss + Sms / 1000000.0
Sss_ms = str('%5.02f' % Sss_ms)
if sweight < 2:
sstr = 'I'
elif sweight >= 2:
sstr = 'E'
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s%sS %d %s\n' % (stat,
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Sss_ms,
sstr,
sweight,
Ao))
else:
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s\n' % (stat,
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Ao))
def write_nlloc():
with open(filename, 'w') as fid:
fid.write('# EQEVENT: {} Label: EQ{} Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n'.format(
parameter.get('database'), parameter.get('eventID')))
for key, value in arrivals.items():
for phase in ['P', 'S']:
if phase in value:
fm = value[phase].get('fm', '?')
onset = value[phase]['mpp']
ss_ms = onset.second + onset.microsecond / 1000000.0
weight = 1 if value[phase].get('weight', 0) < 4 else 0
amp = value[phase].get('Ao', 0.0) if phase == 'S' else ''
fid.write('{} ? ? ? {} {}{}{} {}{} {:7.4f} GAU 0 {} 0 0 {}\n'.format(
key, phase, fm, onset.year, onset.month, onset.day, onset.hour, onset.minute, ss_ms, amp,
weight))
fid.close()
def write_hypo71():
with open(filename, 'w') as fid:
fid.write(
' {}\n'.format(parameter.get('eventID')))
for key, value in arrivals.items():
if value['P'].get('weight', 0) < 4:
stat = key[:4]
Ponset = value['P']['mpp']
Sonset = value.get('S', {}).get('mpp')
pweight = value['P'].get('weight', 0)
sweight = value.get('S', {}).get('weight', 0)
fm = value['P'].get('fm', '-')
Ao = value.get('S', {}).get('Ao', '')
year = Ponset.year - 2000 if Ponset.year >= 2000 else Ponset.year - 1900
ss_ms = Ponset.second + Ponset.microsecond / 1000000.0
if Sonset:
Sss_ms = Sonset.second + Sonset.microsecond / 1000000.0
fid.write('{}P{}{}{} {}{}{}{}{} {:5.2f} {}{}S {} {}\n'.format(
stat, 'I' if pweight < 2 else 'E', fm, pweight, year, Ponset.month, Ponset.day,
Ponset.hour, Ponset.minute, ss_ms, Sss_ms, 'I' if sweight < 2 else 'E', sweight, Ao))
else:
fid.write('{}P{}{}{} {}{}{}{}{} {:5.2f} {}\n'.format(
stat, 'I' if pweight < 2 else 'E', fm, pweight, year, Ponset.month, Ponset.day,
Ponset.hour, Ponset.minute, ss_ms, Ao))
elif fformat == 'HYPOSAT':
print("Writing phases to %s for HYPOSAT" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
for key in arrivals:
# P onsets
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
if arrivals[key]['P']['weight'] < 4:
Ponset = arrivals[key]['P']['mpp']
pyear = Ponset.year
pmonth = Ponset.month
pday = Ponset.day
phh = Ponset.hour
pmm = Ponset.minute
pss = Ponset.second
pms = Ponset.microsecond
Pss = pss + pms / 1000000.0
# use symmetrized picking error as std
# (read the HYPOSAT manual)
pstd = arrivals[key]['P']['spe']
if pstd is None:
errorsP = parameter.get('timeerrorsP')
if arrivals[key]['P']['weight'] == 0:
pstd = errorsP[0]
elif arrivals[key]['P']['weight'] == 1:
pstd = errorsP[1]
elif arrivals[key]['P']['weight'] == 2:
pstd = errorsP[2]
elif arrivals[key]['P']['weight'] == 3:
psrd = errorsP[3]
else:
pstd = errorsP[4]
fid.write('%-5s P1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
% (key, pyear, pmonth, pday, phh, pmm, Pss, pstd))
# S onsets
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
if arrivals[key]['S']['weight'] < 4:
Sonset = arrivals[key]['S']['mpp']
syear = Sonset.year
smonth = Sonset.month
sday = Sonset.day
shh = Sonset.hour
smm = Sonset.minute
sss = Sonset.second
sms = Sonset.microsecond
Sss = sss + sms / 1000000.0
sstd = arrivals[key]['S']['spe']
if pstd is None:
errorsS = parameter.get('timeerrorsS')
if arrivals[key]['S']['weight'] == 0:
pstd = errorsS[0]
elif arrivals[key]['S']['weight'] == 1:
pstd = errorsS[1]
elif arrivals[key]['S']['weight'] == 2:
pstd = errorsS[2]
elif arrivals[key]['S']['weight'] == 3:
psrd = errorsS[3]
else:
pstd = errorsP[4]
fid.write('%-5s S1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
% (key, syear, smonth, sday, shh, smm, Sss, sstd))
fid.close()
def write_hyposat():
with open(filename, 'w') as fid:
fid.write('{}, event {} \n'.format(parameter.get('database'), parameter.get('eventID')))
for key, value in arrivals.items():
for phase in ['P', 'S']:
if phase in value and value[phase].get('weight', 0) < 4:
onset = value[phase]['mpp']
ss_ms = onset.second + onset.microsecond / 1000000.0
std = value[phase].get('spe', parameter.get('timeerrorsP')[value[phase].get('weight', 0)])
fid.write(
'{:<5} {}1 {:4} {:02} {:02} {:02} {:02} {:05.02f} {:5.3f} -999. 0.00 -999. 0.00\n'.format(
key, phase, onset.year, onset.month, onset.day, onset.hour, onset.minute, ss_ms, std))
elif fformat == 'VELEST':
print("Writing phases to %s for VELEST" % filename)
fid = open("%s" % filename, 'w')
# get informations needed in cnv-file
# check, whether latitude is N or S and longitude is E or W
try:
eventsource = eventinfo.origins[0]
except:
def write_velest():
if not eventinfo:
print("No source origin calculated yet, thus no cnv-file creation possible!")
return
if eventsource['latitude'] < 0:
cns = 'S'
else:
cns = 'N'
if eventsource['longitude'] < 0:
cew = 'W'
else:
cew = 'E'
# get last two integers of origin year
stime = eventsource['time']
if stime.year - 2000 >= 0:
syear = stime.year - 2000
else:
syear = stime.year - 1900
ifx = 0 # default value, see VELEST manual, pp. 22-23
# write header
fid.write('%s%02d%02d %02d%02d %05.2f %7.4f%c %8.4f%c %7.2f %6.2f %02.0f 0.0 0.03 1.0 1.0\n' % (
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
n = 0
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
if isinstance(arrivals, dict) == False:
# convert pick object (PyLoT) into dictionary
evt = ope.Event(resource_id=eventinfo['resource_id'])
evt.picks = arrivals
arrivals = picksdict_from_picks(evt)
# check for automatic and manual picks
# prefer manual picks
usedarrivals = chooseArrivals(arrivals)
for key in usedarrivals:
# P onsets
if 'P' in usedarrivals[key]:
if usedarrivals[key]['P']['weight'] < 4:
n += 1
stat = key
if len(stat) > 4: # VELEST handles only 4-string station IDs
stat = stat[1:5]
Ponset = usedarrivals[key]['P']['mpp']
Pweight = usedarrivals[key]['P']['weight']
Prt = Ponset - stime # onset time relative to source time
if n % 6 != 0:
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
else:
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
# S onsets
if 'S' in usedarrivals[key]:
if usedarrivals[key]['S']['weight'] < 4:
n += 1
stat = key
if len(stat) > 4: # VELEST handles only 4-string station IDs
stat = stat[1:5]
Sonset = usedarrivals[key]['S']['mpp']
Sweight = usedarrivals[key]['S']['weight']
Srt = Ponset - stime # onset time relative to source time
if n % 6 != 0:
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
else:
fid.write('%-4sS%d%6.2f\n' % (stat, Sweight, Srt))
fid.close()
with open(filename, 'w') as fid:
origin = eventinfo.origins[0]
lat_dir = 'S' if origin.latitude < 0 else 'N'
lon_dir = 'W' if origin.longitude < 0 else 'E'
year = origin.time.year - 2000 if origin.time.year >= 2000 else origin.time.year - 1900
fid.write(
'{}{}{} {}{} {} {:05.2f} {:7.4f}{} {:8.4f}{} {:7.2f} {:6.2f} {:02.0f} 0.0 0.03 1.0 1.0\n'.format(
year, origin.time.month, origin.time.day, origin.time.hour, origin.time.minute, origin.time.second,
origin.latitude, lat_dir, origin.longitude, lon_dir, origin.depth, eventinfo.magnitudes[0].mag, 0))
for key, value in arrivals.items():
for phase in ['P', 'S']:
if phase in value and value[phase].get('weight', 0) < 4:
onset = value[phase]['mpp']
rt = (onset - origin.time).total_seconds()
fid.write('{:<4}{}{}{:6.2f}\n'.format(key[:4], phase, value[phase].get('weight', 0), rt))
elif fformat == 'HYPODD':
print("Writing phases to %s for hypoDD" % filename)
fid = open("%s" % filename, 'w')
# get event information needed for hypoDD-phase file
try:
eventsource = eventinfo.origins[0]
except:
def write_hypodd():
if not eventinfo:
print("No source origin calculated yet, thus no hypoDD-infile creation possible!")
return
stime = eventsource['time']
try:
event = eventinfo['pylot_id']
hddID = event.split('.')[0][1:5]
except:
print("Error 1111111!")
hddID = "00000"
# write header
fid.write('# %d %d %d %d %d %5.2f %7.4f +%6.4f %7.4f %4.2f 0.1 0.5 %4.2f %s\n' % (
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
if isinstance(arrivals, dict) == False:
# convert pick object (PyLoT) into dictionary
evt = ope.Event(resource_id=eventinfo['resource_id'])
evt.picks = arrivals
arrivals = picksdict_from_picks(evt)
# check for automatic and manual picks
# prefer manual picks
usedarrivals = chooseArrivals(arrivals)
for key in usedarrivals:
if 'P' in usedarrivals[key]:
# P onsets
if usedarrivals[key]['P']['weight'] < 4:
Ponset = usedarrivals[key]['P']['mpp']
Prt = Ponset - stime # onset time relative to source time
fid.write('%s %6.3f 1 P\n' % (key, Prt))
if 'S' in usedarrivals[key]:
# S onsets
if usedarrivals[key]['S']['weight'] < 4:
Sonset = usedarrivals[key]['S']['mpp']
Srt = Sonset - stime # onset time relative to source time
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
with open(filename, 'w') as fid:
origin = eventinfo.origins[0]
stime = origin.time
fid.write('# {} {} {} {} {} {} {:7.4f} +{:6.4f} {:7.4f} {:4.2f} 0.1 0.5 {:4.2f} {}\n'.format(
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
origin.latitude, origin.longitude, origin.depth / 1000, eventinfo.magnitudes[0].mag,
origin.quality.standard_error, "00000"))
for key, value in arrivals.items():
for phase in ['P', 'S']:
if phase in value and value[phase].get('weight', 0) < 4:
onset = value[phase]['mpp']
rt = (onset - stime).total_seconds()
fid.write('{} {:6.3f} 1 {}\n'.format(key, rt, phase))
fid.close()
elif fformat == 'FOCMEC':
print("Writing phases to %s for FOCMEC" % filename)
fid = open("%s" % filename, 'w')
# get event information needed for FOCMEC-input file
try:
eventsource = eventinfo.origins[0]
except:
def write_focmec():
if not eventinfo:
print("No source origin calculated yet, thus no FOCMEC-infile creation possible!")
return
stime = eventsource['time']
# avoid printing '*' in focmec-input file
if parameter.get('eventid') == '*' or parameter.get('eventid') is None:
evID = 'e0000'
else:
evID = parameter.get('eventid')
# write header line including event information
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (evID,
stime.year, stime.month, stime.day,
stime.hour, stime.minute, stime.second,
eventsource['latitude'],
eventsource['longitude'],
eventsource['depth'] / 1000,
eventinfo.magnitudes[0]['mag']))
picks = eventinfo.picks
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
if isinstance(arrivals, dict) == False:
# convert pick object (PyLoT) into dictionary
evt = ope.Event(resource_id=eventinfo['resource_id'])
evt.picks = arrivals
arrivals = picksdict_from_picks(evt)
# check for automatic and manual picks
# prefer manual picks
usedarrivals = chooseArrivals(arrivals)
for key in usedarrivals:
if 'P' in usedarrivals[key]:
if usedarrivals[key]['P']['weight'] < 4 and usedarrivals[key]['P']['fm'] is not None:
stat = key
for i in range(len(picks)):
station = picks[i].waveform_id.station_code
if station == stat:
# get resource ID
resid_picks = picks[i].get('resource_id')
# find same ID in eventinfo
# there it is the pick_id!!
for j in range(len(eventinfo.origins[0].arrivals)):
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
stat = stat[1:5]
az = eventinfo.origins[0].arrivals[j].get('azimuth')
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
az,
inz,
usedarrivals[key]['P']['fm']))
with open(filename, 'w') as fid:
origin = eventinfo.origins[0]
stime = origin.time
fid.write('{} {}{:02d}{:02d}{:02d}{:02d}{:02.0f} {:7.4f} {:6.4f} {:3.1f} {:3.1f}\n'.format(
parameter.get('eventid', 'e0000'), stime.year, stime.month, stime.day, stime.hour, stime.minute,
stime.second, origin.latitude, origin.longitude, origin.depth / 1000, eventinfo.magnitudes[0].mag))
for key, value in arrivals.items():
if 'P' in value and value['P'].get('weight', 0) < 4 and value['P'].get('fm'):
for pick in eventinfo.picks:
if pick.waveform_id.station_code == key:
for arrival in origin.arrivals:
if arrival.pick_id == pick.resource_id and arrival.phase == 'P':
stat = key[:4]
az = arrival.azimuth
inz = arrival.takeoff_angle
fid.write('{:<4} {:6.2f} {:6.2f}{}\n'.format(stat, az, inz, value['P']['fm']))
break
fid.close()
def write_hash():
# Define filenames for HASH driver 1 and 2
filename1 = f"{filename}drv1.phase"
filename2 = f"{filename}drv2.phase"
print(f"Writing phases to {filename1} for HASH-driver 1")
print(f"Writing phases to {filename2} for HASH-driver 2")
# Open files for writing
with open(filename1, 'w') as fid1, open(filename2, 'w') as fid2:
# Get event information needed for HASH-input file
try:
eventsource = eventinfo.origins[0]
except IndexError:
print("No source origin calculated yet, thus no cnv-file creation possible!")
return
event = parameter.get('eventID')
hashID = event.split('.')[0][1:5]
latdeg = eventsource['latitude']
latmin = (eventsource['latitude'] * 60) / 10000
londeg = eventsource['longitude']
lonmin = (eventsource['longitude'] * 60) / 10000
erh = (eventsource.origin_uncertainty['min_horizontal_uncertainty'] +
eventsource.origin_uncertainty['max_horizontal_uncertainty']) / 2000
erz = eventsource.depth_errors['uncertainty']
stime = eventsource['time']
syear = stime.year % 100 # Calculate two-digit year
picks = eventinfo.picks
# Write header line including event information for HASH-driver 1
fid1.write(f"{syear:02d}{stime.month:02d}{stime.day:02d}{stime.hour:02d}{stime.minute:02d}"
f"{stime.second:05.2f}{latdeg:2d}N{latmin:05.2f}{londeg:3d}E{lonmin:05.2f}"
f"{eventsource['depth']:6.2f}{eventinfo.magnitudes[0]['mag']:4.2f}{erh:5.2f}{erz:5.2f}{hashID}\n")
# Write header line including event information for HASH-driver 2
fid2.write(f"{syear:02d}{stime.month:02d}{stime.day:02d}{stime.hour:02d}{stime.minute:02d}"
f"{stime.second:05.2f}{latdeg}N{latmin:05.2f}{londeg}E{lonmin:6.2f}{eventsource['depth']:5.2f}"
f"{eventsource['quality']['used_phase_count']:3d}{erh:5.2f}{erz:5.2f}"
f"{eventinfo.magnitudes[0]['mag']:4.2f}{hashID}\n")
# Write phase lines
for key, arrival in arrivals.items():
if 'P' in arrival and arrival['P']['weight'] < 4 and arrival['P']['fm'] is not None:
stat = key
ccode = arrival['P']['channel']
ncode = arrival['P']['network']
Pqual = 'I' if arrival['P']['weight'] < 2 else 'E'
for pick in picks:
if pick.waveform_id.station_code == stat:
resid_picks = pick.get('resource_id')
for origin_arrival in eventinfo.origins[0].arrivals:
if (origin_arrival.get('pick_id') == resid_picks and
origin_arrival.phase == 'P'):
if len(stat) > 4: # HASH handles only 4-character station IDs
stat = stat[1:5]
az = origin_arrival.get('azimuth')
inz = origin_arrival.get('takeoff_angle')
dist = origin_arrival.get('distance')
# Write phase line for HASH-driver 1
fid1.write(f"{stat:<4}{Pqual}P{arrival['P']['fm']}{arrival['P']['weight']:d}"
f"{dist:3.1f}{inz:03d}{az:03d}{ccode}\n")
# Write phase line for HASH-driver 2
fid2.write(f"{stat:<4} {ncode} {ccode} {Pqual} {arrival['P']['fm']}\n")
break
fid1.write(f"{'':<36}{hashID}")
# Prefer Manual Picks over automatic ones if possible
arrivals = chooseArrivals(arrivals) # Function not defined, assumed to exist
if fformat == 'NLLoc':
write_nlloc()
elif fformat == 'HYPO71':
write_hypo71()
elif fformat == 'HYPOSAT':
write_hyposat()
elif fformat == 'VELEST':
write_velest()
elif fformat == 'HYPODD':
write_hypodd()
elif fformat == 'FOCMEC':
write_focmec()
elif fformat == 'HASH':
# two different input files for
# HASH-driver 1 and 2 (see HASH manual!)
filename1 = filename + 'drv1' + '.phase'
filename2 = filename + 'drv2' + '.phase'
print("Writing phases to %s for HASH for HASH-driver 1" % filename1)
fid1 = open("%s" % filename1, 'w')
print("Writing phases to %s for HASH for HASH-driver 2" % filename2)
fid2 = open("%s" % filename2, 'w')
# get event information needed for HASH-input file
try:
eventsource = eventinfo.origins[0]
except:
print("No source origin calculated yet, thus no cnv-file creation possible!")
return
eventsource = eventinfo.origins[0]
event = parameter.get('eventID')
hashID = event.split('.')[0][1:5]
latdeg = eventsource['latitude']
latmin = eventsource['latitude'] * 60 / 10000
londeg = eventsource['longitude']
lonmin = eventsource['longitude'] * 60 / 10000
erh = 1 / 2 * (eventsource.origin_uncertainty['min_horizontal_uncertainty'] +
eventsource.origin_uncertainty['max_horizontal_uncertainty']) / 1000
erz = eventsource.depth_errors['uncertainty']
stime = eventsource['time']
if stime.year - 2000 >= 0:
syear = stime.year - 2000
else:
syear = stime.year - 1900
picks = eventinfo.picks
# write header line including event information
# for HASH-driver 1
fid1.write('%s%02d%02d%02d%02d%5.2f%2dN%5.2f%3dE%5.2f%6.3f%4.2f%5.2f%5.2f%s\n' % (syear,
stime.month, stime.day,
stime.hour, stime.minute,
stime.second,
latdeg, latmin, londeg,
lonmin, eventsource['depth'],
eventinfo.magnitudes[0][
'mag'], erh, erz,
hashID))
# write header line including event information
# for HASH-driver 2
fid2.write(
'%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (
syear, stime.month, stime.day,
stime.hour, stime.minute, stime.second,
latdeg, latmin, londeg, lonmin,
eventsource['depth'],
eventsource['quality']['used_phase_count'],
erh, erz, eventinfo.magnitudes[0]['mag'],
hashID))
# Prefer Manual Picks over automatic ones if possible
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
# write phase lines
for key in arrivals:
if 'P' in arrivals[key]:
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
stat = key
ccode = arrivals[key]['P']['channel']
ncode = arrivals[key]['P']['network']
if arrivals[key]['P']['weight'] < 2:
Pqual = 'I'
else:
Pqual = 'E'
for i in range(len(picks)):
station = picks[i].waveform_id.station_code
if station == stat:
# get resource ID
resid_picks = picks[i].get('resource_id')
# find same ID in eventinfo
# there it is the pick_id!!
for j in range(len(eventinfo.origins[0].arrivals)):
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
if len(stat) > 4: # HASH handles only 4-string station IDs
stat = stat[1:5]
az = eventinfo.origins[0].arrivals[j].get('azimuth')
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
dist = eventinfo.origins[0].arrivals[j].get('distance')
# write phase line for HASH-driver 1
fid1.write(
'%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (
stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
dist, inz, az, ccode))
# write phase line for HASH-driver 2
fid2.write('%-4s %s %s %s %s \n' % (
stat,
ncode,
ccode,
Pqual,
arrivals[key]['P']['fm']))
break
fid1.write(' %s' % hashID)
fid1.close()
fid2.close()
write_hash()
def chooseArrivals(arrivals):
@ -1124,7 +670,7 @@ def getQualitiesfromxml(path, errorsP, errorsS, plotflag=1, figure=None, verbosi
mstation = pick.waveform_id.station_code
mstation_ext = mstation + '_'
for mpick in arrivals_copy:
phase = identifyPhase(loopIdentifyPhase(pick.phase_hint)) # MP MP catch if this fails?
phase = identifyPhase(loopIdentifyPhase(pick.phase_hint)) # MP MP catch if this fails?
if ((mpick.waveform_id.station_code == mstation) or
(mpick.waveform_id.station_code == mstation_ext)) and \
(mpick.method_id.id.split('/')[1] == 'auto') and \

177
pylot/core/io/project.py Normal file
View File

@ -0,0 +1,177 @@
import os
from pylot.core.util.event import Event
class Project(object):
'''
Pickable class containing information of a PyLoT project, like event lists and file locations.
'''
# TODO: remove rootpath
def __init__(self):
self.eventlist = []
self.location = None
self.rootpath = None
self.datapath = None
self.dirty = False
self.parameter = None
self._table = None
def add_eventlist(self, eventlist):
'''
Add events from an eventlist containing paths to event directories.
Will skip existing paths.
'''
if len(eventlist) == 0:
return
for item in eventlist:
event = Event(item)
event.rootpath = self.parameter['rootpath']
event.database = self.parameter['database']
event.datapath = self.parameter['datapath']
if not event.path in self.getPaths():
self.eventlist.append(event)
self.setDirty()
else:
print('Skipping event with path {}. Already part of project.'.format(event.path))
self.eventlist.sort(key=lambda x: x.pylot_id)
self.search_eventfile_info()
def remove_event(self, event):
self.eventlist.remove(event)
def remove_event_by_id(self, eventID):
for event in self.eventlist:
if eventID in str(event.resource_id):
self.remove_event(event)
break
def read_eventfile_info(self, filename, separator=','):
'''
Try to read event information from file (:param:filename) comparing specific event datetimes.
File structure (each row): event, date, time, magnitude, latitude, longitude, depth
separated by :param:separator each.
'''
with open(filename, 'r') as infile:
for line in infile.readlines():
eventID, date, time, mag, lat, lon, depth = line.split(separator)[:7]
# skip first line
try:
day, month, year = date.split('/')
except:
continue
year = int(year)
# hardcoded, if year only consists of 2 digits (e.g. 16 instead of 2016)
if year < 100:
year += 2000
datetime = '{}-{}-{}T{}'.format(year, month, day, time)
try:
datetime = UTCDateTime(datetime)
except Exception as e:
print(e, datetime, filename)
continue
for event in self.eventlist:
if eventID in str(event.resource_id) or eventID in event.origins:
if event.origins:
origin = event.origins[0] # should have only one origin
if origin.time == datetime:
origin.latitude = float(lat)
origin.longitude = float(lon)
origin.depth = float(depth)
else:
continue
elif not event.origins:
origin = Origin(resource_id=event.resource_id,
time=datetime, latitude=float(lat),
longitude=float(lon), depth=float(depth))
event.origins.append(origin)
event.magnitudes.append(Magnitude(resource_id=event.resource_id,
mag=float(mag),
mag_type='M'))
break
def search_eventfile_info(self):
'''
Search all datapaths in rootpath for filenames with given file extension fext
and try to read event info from it
'''
datapaths = []
fext = '.csv'
for event in self.eventlist:
if not event.datapath in datapaths:
datapaths.append(event.datapath)
for datapath in datapaths:
# datapath = os.path.join(self.rootpath, datapath)
if os.path.isdir(datapath):
for filename in os.listdir(datapath):
filename = os.path.join(datapath, filename)
if os.path.isfile(filename) and filename.endswith(fext):
try:
self.read_eventfile_info(filename)
except Exception as e:
print('Failed on reading eventfile info from file {}: {}'.format(filename, e))
else:
print("Directory %s does not exist!" % datapath)
def getPaths(self):
'''
Returns paths (eventlist) of all events saved in the project.
'''
paths = []
for event in self.eventlist:
paths.append(event.path)
return paths
def setDirty(self, value=True):
self.dirty = value
def getEventFromPath(self, path):
'''
Search for an event in the project by event path.
'''
for event in self.eventlist:
if event.path == path:
return event
def save(self, filename=None):
'''
Save PyLoT Project to a file.
Can be loaded by using project.load(filename).
'''
try:
import pickle
except ImportError:
import _pickle as pickle
if filename:
self.location = filename
else:
filename = self.location
table = self._table # MP: see below
try:
outfile = open(filename, 'wb')
self._table = [] # MP: Workaround as long as table cannot be saved as part of project
pickle.dump(self, outfile, protocol=pickle.HIGHEST_PROTOCOL)
self.setDirty(False)
self._table = table # MP: see above
return True
except Exception as e:
print('Could not pickle PyLoT project. Reason: {}'.format(e))
self.setDirty()
self._table = table # MP: see above
return False
@staticmethod
def load(filename):
'''
Load project from filename.
'''
import pickle
infile = open(filename, 'rb')
project = pickle.load(infile)
infile.close()
project.location = filename
print('Loaded %s' % filename)
return project

13
pylot/core/io/utils.py Normal file
View File

@ -0,0 +1,13 @@
import os
from typing import List
def validate_filenames(filenames: List[str]) -> List[str]:
"""
validate a list of filenames for file abundance
:param filenames: list of possible filenames
:type filenames: List[str]
:return: list of valid filenames
:rtype: List[str]
"""
return [fn for fn in filenames if os.path.isfile(fn)]

View File

@ -0,0 +1,123 @@
import logging
from dataclasses import dataclass, field
from typing import Union, List
from obspy import Stream, read
from obspy.io.sac import SacIOError
from pylot.core.io.utils import validate_filenames
from pylot.core.util.dataprocessing import Metadata
from pylot.core.util.utils import get_stations, check_for_nan, check4rotated
@dataclass
class WaveformData:
wfdata: Stream = field(default_factory=Stream)
wforiginal: Union[Stream, None] = None
wf_alt: Stream = field(default_factory=Stream)
dirty: bool = False
def load_waveforms(self, fnames: List[str], fnames_alt: List[str] = None, check_rotated=False, metadata=None, tstart=0, tstop=0):
fn_list = validate_filenames(fnames)
if not fn_list:
logging.warning('No valid filenames given for loading waveforms')
else:
self.clear()
self.add_waveforms(fn_list)
if fnames_alt is None:
pass
else:
alt_fn_list = validate_filenames(fnames_alt)
if not alt_fn_list:
logging.warning('No valid alternative filenames given for loading waveforms')
else:
self.add_waveforms(alt_fn_list, alternative=True)
if not fn_list and not alt_fn_list:
logging.error('No filenames or alternative filenames given for loading waveforms')
return False
self.merge()
self.replace_nan()
if not check_rotated or not metadata:
pass
else:
self.rotate_zne()
self.trim_station_traces()
self.wforiginal = self.wfdata.copy()
self.dirty = False
return True
def add_waveforms(self, fnames: List[str], alternative: bool = False):
data_stream = self.wf_alt if alternative else self.wfdata
warnmsg = ''
for fname in set(fnames):
try:
data_stream += read(fname)
except TypeError:
try:
data_stream += read(fname, format='GSE2')
except Exception as e:
try:
data_stream += read(fname, format='SEGY')
except Exception as e:
warnmsg += f'{fname}\n{e}\n'
except SacIOError as se:
warnmsg += f'{fname}\n{se}\n'
if warnmsg:
print(f'WARNING in add_waveforms: unable to read waveform data\n{warnmsg}')
def clear(self):
self.wfdata = Stream()
self.wforiginal = None
self.wf_alt = Stream()
def reset(self):
"""
Resets the waveform data to its original state.
"""
if self.wforiginal:
self.wfdata = self.wforiginal.copy()
else:
self.wfdata = Stream()
self.dirty = False
def merge(self):
"""
check for gaps in Stream and merge if gaps are found
"""
gaps = self.wfdata.get_gaps()
if gaps:
merged = ['{}.{}.{}.{}'.format(*gap[:4]) for gap in gaps]
self.wfdata.merge(method=1)
logging.info('Merged the following stations because of gaps:')
for station in merged:
logging.info(station)
def replace_nan(self):
"""
Replace all NaNs in data with 0. (in place)
"""
self.wfdata = check_for_nan(self.wfdata)
def rotate_zne(self, metadata: Metadata = None):
"""
Check all traces in stream for rotation. If a trace is not in ZNE rotation (last symbol of channel code is numeric) and the trace
is in the metadata with azimuth and dip, rotate it to classical ZNE orientation.
Rotating the traces requires them to be of the same length, so, all traces will be trimmed to a common length as a
side effect.
"""
self.wfdata = check4rotated(self.wfdata, metadata)
def trim_station_traces(self):
"""
trim data stream to common time window
"""
for station in get_stations(self.wfdata):
station_traces = self.wfdata.select(station=station)
station_traces.trim(starttime=max([trace.stats.starttime for trace in station_traces]),
endtime=min([trace.stats.endtime for trace in station_traces]))

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
:type eventinfo: list object
'''
# write phases to FOCMEC-phase file
writephases(picks, 'FOCMEC', fnout, parameter, eventinfo)
write_phases(picks, 'FOCMEC', fnout, parameter, eventinfo)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
:type eventinfo: list object
'''
# write phases to HASH-phase file
writephases(picks, 'HASH', fnout, parameter, eventinfo)
write_phases(picks, 'HASH', fnout, parameter, eventinfo)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
@ -22,4 +22,4 @@ def export(picks, fnout, parameter):
:type parameter: object
'''
# write phases to HYPO71-phase file
writephases(picks, 'HYPO71', fnout, parameter)
write_phases(picks, 'HYPO71', fnout, parameter)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
:type eventinfo: list object
'''
# write phases to hypoDD-phase file
writephases(picks, 'HYPODD', fnout, parameter, eventinfo)
write_phases(picks, 'HYPODD', fnout, parameter, eventinfo)

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
@ -22,4 +22,4 @@ def export(picks, fnout, parameter):
:type parameter: object
'''
# write phases to HYPOSAT-phase file
writephases(picks, 'HYPOSAT', fnout, parameter)
write_phases(picks, 'HYPOSAT', fnout, parameter)

View File

@ -7,7 +7,7 @@ import subprocess
from obspy import read_events
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.gui import which
from pylot.core.util.utils import getPatternLine, runProgram
from pylot.core.util.version import get_git_version as _getVersionString
@ -34,7 +34,7 @@ def export(picks, fnout, parameter):
:type parameter: object
'''
# write phases to NLLoc-phase file
writephases(picks, 'NLLoc', fnout, parameter)
write_phases(picks, 'NLLoc', fnout, parameter)
def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.io.phases import writephases
from pylot.core.io.phases import write_phases
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
@ -25,4 +25,4 @@ def export(picks, fnout, eventinfo, parameter=None):
:type parameter: object
'''
# write phases to VELEST-phase file
writephases(picks, 'VELEST', fnout, parameter, eventinfo)
write_phases(picks, 'VELEST', fnout, parameter, eventinfo)

View File

@ -22,7 +22,7 @@ from pylot.core.pick.picker import AICPicker, PragPicker
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
getSNR, fmpicker, checkPonsets, wadaticheck, get_quality_class, PickingFailedException, MissingTraceException
from pylot.core.util.utils import getPatternLine, gen_Pool, \
get_bool, identifyPhaseID, get_None, correct_iplot
get_bool, identifyPhaseID, get_none, correct_iplot
def autopickevent(data, param, iplot=0, fig_dict=None, fig_dict_wadatijack=None, ncores=0, metadata=None, origin=None):
@ -258,7 +258,7 @@ class AutopickStation(object):
self.pickparams = copy.deepcopy(pickparam)
self.verbose = verbose
self.iplot = correct_iplot(iplot)
self.fig_dict = get_None(fig_dict)
self.fig_dict = get_none(fig_dict)
self.metadata = metadata
self.origin = origin

View File

@ -15,7 +15,7 @@ import numpy as np
from obspy.core import Stream, UTCDateTime
from scipy.signal import argrelmax
from pylot.core.util.utils import get_bool, get_None, SetChannelComponents
from pylot.core.util.utils import get_bool, get_none, SetChannelComponents
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecolor='k'):
@ -136,7 +136,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecol
PickError = symmetrize_error(diffti_te, diffti_tl)
if iplot > 1:
if get_None(fig) is None:
if get_none(fig) is None:
fig = plt.figure() # iplot)
plt_flag = 1
fig._tight = True
@ -275,7 +275,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
try:
P1 = np.polyfit(xslope1, xraw[islope1], 1)
datafit1 = np.polyval(P1, xslope1)
except Exception as e:
except ValueError as e:
print("fmpicker: Problems with data fit! {}".format(e))
print("Skip first motion determination!")
return FM
@ -321,7 +321,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
try:
P2 = np.polyfit(xslope2, xfilt[islope2], 1)
datafit2 = np.polyval(P2, xslope2)
except Exception as e:
except ValueError as e:
emsg = 'fmpicker: polyfit failed: {}'.format(e)
print(emsg)
return FM
@ -344,7 +344,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
print("fmpicker: Found polarity %s" % FM)
if iplot > 1:
if get_None(fig) is None:
if get_none(fig) is None:
fig = plt.figure() # iplot)
plt_flag = 1
fig._tight = True
@ -868,7 +868,7 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
returnflag = 0
if iplot > 1:
if get_None(fig) is None:
if get_none(fig) is None:
fig = plt.figure() # iplot)
plt_flag = 1
fig._tight = True
@ -1213,14 +1213,14 @@ def checkZ4S(X, pick, pickparams, iplot, fig=None, linecolor='k'):
t = np.linspace(diff_dict[key], trace.stats.endtime - trace.stats.starttime + diff_dict[key],
trace.stats.npts)
if i == 0:
if get_None(fig) is None:
if get_none(fig) is None:
fig = plt.figure() # self.iplot) ### WHY? MP MP
plt_flag = 1
ax1 = fig.add_subplot(3, 1, i + 1)
ax = ax1
ax.set_title('CheckZ4S, Station %s' % zdat[0].stats.station)
else:
if get_None(fig) is None:
if get_none(fig) is None:
fig = plt.figure() # self.iplot) ### WHY? MP MP
plt_flag = 1
ax = fig.add_subplot(3, 1, i + 1, sharex=ax1)
@ -1494,7 +1494,7 @@ def getQualityFromUncertainty(uncertainty, Errors):
# set initial quality to 4 (worst) and change only if one condition is hit
quality = 4
if get_None(uncertainty) is None:
if get_none(uncertainty) is None:
return quality
if uncertainty <= Errors[0]:

View File

@ -124,8 +124,8 @@ class Array_map(QtWidgets.QWidget):
self.cmaps_box = QtWidgets.QComboBox()
self.cmaps_box.setMaxVisibleItems(20)
[self.cmaps_box.addItem(map_name) for map_name in sorted(plt.colormaps())]
# try to set to hsv as default
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('hsv'))
# try to set to viridis as default
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('viridis'))
self.top_row.addWidget(QtWidgets.QLabel('Select a phase: '))
self.top_row.addWidget(self.comboBox_phase)
@ -474,17 +474,19 @@ class Array_map(QtWidgets.QWidget):
transform=ccrs.PlateCarree(), label='deleted'))
def openPickDlg(self, ind):
data = self._parent.get_data().getWFData()
wfdata = self._parent.get_data().get_wf_data()
wfdata_comp = self._parent.get_data().get_wf_dataComp()
for index in ind:
network, station = self._station_onpick_ids[index].split('.')[:2]
pyl_mw = self._parent
try:
data = data.select(station=station)
if not data:
wfdata = wfdata.select(station=station)
wfdata_comp = wfdata_comp.select(station=station)
if not wfdata:
self._warn('No data for station {}'.format(station))
return
pickDlg = PickDlg(self._parent, parameter=self.parameter,
data=data, network=network, station=station,
data=wfdata.copy(), data_compare=wfdata_comp.copy(), network=network, station=station,
picks=self._parent.get_current_event().getPick(station),
autopicks=self._parent.get_current_event().getAutopick(station),
filteroptions=self._parent.filteroptions, metadata=self.metadata,

View File

@ -338,19 +338,6 @@ class Metadata(object):
return inv, exc
def time_from_header(header):
"""
Function takes in the second line from a .gse file and takes out the date and time from that line.
:param header: second line from .gse file
:type header: string
:return: a list of integers of form [year, month, day, hour, minute, second, microsecond]
"""
timeline = header.split(' ')
time = timeline[1].split('/') + timeline[2].split(':')
time = time[:-1] + time[-1].split('.')
return [int(t) for t in time]
def check_time(datetime):
"""
Function takes in date and time as list and validates it's values by trying to make an UTCDateTime object from it
@ -388,167 +375,6 @@ def check_time(datetime):
return False
# TODO: change root to datapath
def get_file_list(root_dir):
"""
Function uses a directorie to get all the *.gse files from it.
:param root_dir: a directorie leading to the .gse files
:type root_dir: string
:return: returns a list of filenames (without path to them)
"""
file_list = glob.glob1(root_dir, '*.gse')
return file_list
def checks_station_second(datetime, file):
"""
Function uses the given list to check if the parameter 'second' is set to 60 by mistake
and sets the time correctly if so. Can only correct time if no date change would be necessary.
:param datetime: [year, month, day, hour, minute, second, microsecond]
:return: returns the input with the correct value for second
"""
if datetime[5] == 60:
if datetime[4] == 59:
if datetime[3] == 23:
err_msg = 'Date should be next day. ' \
'File not changed: {0}'.format(file)
raise ValueError(err_msg)
else:
datetime[3] += 1
datetime[4] = 0
datetime[5] = 0
else:
datetime[4] += 1
datetime[5] = 0
return datetime
def make_time_line(line, datetime):
"""
Function takes in the original line from a .gse file and a list of date and
time values to make a new line with corrected date and time.
:param line: second line from .gse file.
:type line: string
:param datetime: list of integers [year, month, day, hour, minute, second, microsecond]
:type datetime: list
:return: returns a string to write it into a file.
"""
ins_form = '{0:02d}:{1:02d}:{2:02d}.{3:03d}'
insertion = ins_form.format(int(datetime[3]),
int(datetime[4]),
int(datetime[5]),
int(datetime[6] * 1e-3))
newline = line[:16] + insertion + line[28:]
return newline
def evt_head_check(root_dir, out_dir=None):
"""
A function to make sure that an arbitrary number of .gse files have correct values in their header.
:param root_dir: a directory leading to the .gse files.
:type root_dir: string
:param out_dir: a directory to store the new files somwhere els.
:return: returns nothing
"""
if not out_dir:
print('WARNING files are going to be overwritten!')
inp = str(input('Continue? [y/N]'))
if not inp == 'y':
sys.exit()
filelist = get_file_list(root_dir)
nfiles = 0
for file in filelist:
infile = open(os.path.join(root_dir, file), 'r')
lines = infile.readlines()
infile.close()
datetime = time_from_header(lines[1])
if check_time(datetime):
continue
else:
nfiles += 1
datetime = checks_station_second(datetime, file)
print('writing ' + file)
# write File
lines[1] = make_time_line(lines[1], datetime)
if not out_dir:
out = open(os.path.join(root_dir, file), 'w')
out.writelines(lines)
out.close()
else:
out = open(os.path.join(out_dir, file), 'w')
out.writelines(lines)
out.close()
print(nfiles)
def read_metadata(path_to_inventory):
"""
take path_to_inventory and return either the corresponding list of files
found or the Parser object for a network dataless seed volume to prevent
read overhead for large dataless seed volumes
:param path_to_inventory:
:return: tuple containing a either list of files or `obspy.io.xseed.Parser`
object and the inventory type found
:rtype: tuple
"""
dlfile = list()
invfile = list()
respfile = list()
# possible file extensions specified here:
inv = dict(dless=dlfile, xml=invfile, resp=respfile, dseed=dlfile[:])
if os.path.isfile(path_to_inventory):
ext = os.path.splitext(path_to_inventory)[1].split('.')[1]
inv[ext] += [path_to_inventory]
else:
for ext in inv.keys():
inv[ext] += glob.glob1(path_to_inventory, '*.{0}'.format(ext))
invtype = key_for_set_value(inv)
if invtype is None:
print("Neither dataless-SEED file, inventory-xml file nor "
"RESP-file found!")
print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
robj = None,
elif invtype == 'dless': # prevent multiple read of large dlsv
print("Reading metadata information from dataless-SEED file ...")
if len(inv[invtype]) == 1:
fullpath_inv = os.path.join(path_to_inventory, inv[invtype][0])
robj = Parser(fullpath_inv)
else:
robj = inv[invtype]
else:
print("Reading metadata information from inventory-xml file ...")
robj = read_inventory(inv[invtype])
return invtype, robj
# idea to optimize read_metadata
# def read_metadata_new(path_to_inventory):
# metadata_objects = []
# # read multiple files from directory
# if os.path.isdir(path_to_inventory):
# fnames = os.listdir(path_to_inventory)
# # read single file
# elif os.path.isfile(path_to_inventory):
# fnames = [path_to_inventory]
# else:
# print("Neither dataless-SEED file, inventory-xml file nor "
# "RESP-file found!")
# print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
# fnames = []
#
# for fname in fnames:
# path_to_inventory_filename = os.path.join(path_to_inventory, fname)
# try:
# ftype, robj = read_metadata_file(path_to_inventory_filename)
# metadata_objects.append((ftype, robj))
# except Exception as e:
# print('Could not read metadata file {} '
# 'because of the following Exception: {}'.format(path_to_inventory_filename, e))
# return metadata_objects
def restitute_trace(input_tuple):
def no_metadata(tr, seed_id):
print('no metadata file found '

View File

@ -20,7 +20,7 @@ import matplotlib
matplotlib.use('Qt5Agg')
sys.path.append(os.path.join('/'.join(sys.argv[0].split('/')[:-1]), '../../..'))
from PyLoT import Project
from pylot.core.io.project import Project
from pylot.core.util.dataprocessing import Metadata
from pylot.core.util.array_map import Array_map

View File

@ -6,7 +6,7 @@ Created on Wed Jan 26 17:47:25 2015
@author: sebastianw
"""
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure, ObspyDMTdataStructure
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure
DATASTRUCTURE = {'PILOT': PilotDataStructure, 'SeisComP': SeiscompDataStructure,
'obspyDMT': ObspyDMTdataStructure, None: PilotDataStructure}
'obspyDMT': PilotDataStructure, None: PilotDataStructure}

View File

@ -20,6 +20,10 @@ from pylot.core.io.inputs import PylotParameter, FilterOptions
from pylot.core.util.obspyDMT_interface import check_obspydmt_eventfolder
from pylot.styles import style_settings
Rgba: Type[tuple] = Tuple[int, int, int, int]
Mplrgba: Type[tuple] = Tuple[float, float, float, float]
Mplrgbastr: Type[tuple] = Tuple[str, str, str, str]
def _pickle_method(m):
if m.im_self is None:
@ -83,25 +87,6 @@ def fit_curve(x, y):
return splev, splrep(x, y)
def getindexbounds(f, eta):
"""
Get indices of values closest below and above maximum value in an array
:param f: array
:type f: `~numpy.ndarray`
:param eta: look for value in array that is closes to max_value * eta
:type eta: float
:return: tuple containing index of max value, index of value closest below max value,
index of value closest above max value
:rtype: (int, int, int)
"""
mi = f.argmax() # get indices of max values
m = max(f) # get maximum value
b = m * eta #
l = find_nearest(f[:mi], b) # find closest value below max value
u = find_nearest(f[mi:], b) + mi # find closest value above max value
return mi, l, u
def gen_Pool(ncores=0):
"""
Generate mulitprocessing pool object utilizing ncores amount of cores
@ -121,7 +106,7 @@ def gen_Pool(ncores=0):
print('gen_Pool: Generated multiprocessing Pool with {} cores\n'.format(ncores))
pool = multiprocessing.Pool(ncores, maxtasksperchild=100)
pool = multiprocessing.Pool(ncores)
return pool
@ -167,11 +152,11 @@ def clims(lim1, lim2):
"""
takes two pairs of limits and returns one pair of common limts
:param lim1: limit 1
:type lim1: int
:type lim1: List[int]
:param lim2: limit 2
:type lim2: int
:type lim2: List[int]
:return: new upper and lower limit common to both given limits
:rtype: [int, int]
:rtype: List[int]
>>> clims([0, 4], [1, 3])
[0, 4]
@ -303,7 +288,7 @@ def fnConstructor(s):
if type(s) is str:
s = s.split(':')[-1]
else:
s = getHash(UTCDateTime())
s = get_hash(UTCDateTime())
badchars = re.compile(r'[^A-Za-z0-9_. ]+|^\.|\.$|^ | $|^$')
badsuffix = re.compile(r'(aux|com[1-9]|con|lpt[1-9]|prn)(\.|$)')
@ -315,15 +300,32 @@ def fnConstructor(s):
return fn
def get_None(value):
def get_none(value):
"""
Convert "None" to None
:param value:
:type value: str, bool
:type value: str, NoneType
:return:
:rtype: bool
:rtype: type(value) or NoneType
>>> st = read()
>>> print(get_none(st))
3 Trace(s) in Stream:
BW.RJOB..EHZ | 2009-08-24T00:20:03.000000Z - 2009-08-24T00:20:32.990000Z | 100.0 Hz, 3000 samples
BW.RJOB..EHN | 2009-08-24T00:20:03.000000Z - 2009-08-24T00:20:32.990000Z | 100.0 Hz, 3000 samples
BW.RJOB..EHE | 2009-08-24T00:20:03.000000Z - 2009-08-24T00:20:32.990000Z | 100.0 Hz, 3000 samples
>>> get_none('Stream')
'Stream'
>>> get_none(0)
0
>>> get_none(0.)
0.0
>>> print(get_none('None'))
None
>>> print(get_none(None))
None
"""
if value == 'None':
if value is None or (type(value) is str and value == 'None'):
return None
else:
return value
@ -331,20 +333,47 @@ def get_None(value):
def get_bool(value):
"""
Convert string representations of bools to their true boolean value
Convert string representations of bools to their true boolean value. Return value if it cannot be identified as bool.
:param value:
:type value: str, bool
:type value: str, bool, int, float
:return: true boolean value
:rtype: bool
>>> get_bool(True)
True
>>> get_bool(False)
False
>>> get_bool(0)
False
>>> get_bool(0.)
False
>>> get_bool(0.1)
True
>>> get_bool(2)
True
>>> get_bool(-1)
False
>>> get_bool(-0.3)
False
>>> get_bool(None)
None
>>> get_bool('Stream')
'Stream'
"""
if type(value) is bool:
if type(value) == bool:
return value
elif value in ['True', 'true']:
return True
elif value in ['False', 'false']:
return False
elif isinstance(value, float) or isinstance(value, int):
if value > 0. or value > 0:
return True
else:
return False
else:
return bool(value)
return value
def four_digits(year):
"""
@ -355,8 +384,8 @@ def four_digits(year):
:return: four digit year correspondent
:rtype: int
>>> four_digits(20)
1920
>>> four_digits(75)
1975
>>> four_digits(16)
2016
>>> four_digits(00)
@ -438,36 +467,53 @@ def backtransformFilterString(st):
return st
def getHash(time):
def get_hash(time):
"""
takes a time object and returns the corresponding SHA1 hash of the formatted date string
:param time: time object for which a hash should be calculated
:type time: `~obspy.core.utcdatetime.UTCDateTime`
:return: SHA1 hash
:rtype: str
>>> time = UTCDateTime(0)
>>> get_hash(time)
'7627cce3b1b58dd21b005dac008b34d18317dd15'
>>> get_hash(0)
Traceback (most recent call last):
...
AssertionError: 'time' is not an ObsPy UTCDateTime object
"""
assert isinstance(time, UTCDateTime), '\'time\' is not an ObsPy UTCDateTime object'
hg = hashlib.sha1()
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f'))
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f').encode('utf-8'))
return hg.hexdigest()
def getLogin():
def get_login():
"""
returns the actual user's login ID
:return: login ID
returns the actual user's name
:return: login name
:rtype: str
"""
import getpass
return getpass.getuser()
def getOwner(fn):
def get_owner(fn):
"""
takes a filename and return the login ID of the actual owner of the file
:param fn: filename of the file tested
:type fn: str
:return: login ID of the file's owner
:rtype: str
>>> import tempfile
>>> with tempfile.NamedTemporaryFile() as tmpfile:
... tmpfile.write(b'') and True
... tmpfile.flush()
... get_owner(tmpfile.name) == os.path.expanduser('~').split('/')[-1]
0
True
"""
system_name = platform.system()
if system_name in ["Linux", "Darwin"]:
@ -513,6 +559,11 @@ def is_executable(fn):
:param fn: path to the file to be tested
:return: True or False
:rtype: bool
>>> is_executable('/bin/ls')
True
>>> is_executable('/var/log/system.log')
False
"""
return os.path.isfile(fn) and os.access(fn, os.X_OK)
@ -539,24 +590,36 @@ def isSorted(iterable):
>>> isSorted([2,3,1,4])
False
"""
assert isIterable(iterable), 'object is not iterable; object: {' \
'}'.format(iterable)
assert is_iterable(iterable), "object is not iterable; object: {}".format(iterable)
if type(iterable) is str:
iterable = [s for s in iterable]
return sorted(iterable) == iterable
def isIterable(obj):
def is_iterable(obj):
"""
takes a python object and returns True is the object is iterable and
False otherwise
:param obj: a python object
:type obj: object
:type obj: obj
:return: True of False
:rtype: bool
>>> is_iterable(1)
False
>>> is_iterable(True)
False
>>> is_iterable(0.)
False
>>> is_iterable((0,1,3,4))
True
>>> is_iterable([1])
True
>>> is_iterable('a')
True
"""
try:
iterator = iter(obj)
iter(obj)
except TypeError as te:
return False
return True
@ -565,13 +628,19 @@ def isIterable(obj):
def key_for_set_value(d):
"""
takes a dictionary and returns the first key for which's value the
boolean is True
boolean representation is True
:param d: dictionary containing values
:type d: dict
:return: key to the first non-False value found; None if no value's
boolean equals True
:rtype:
:rtype: bool or NoneType
>>> key_for_set_value({'one': 0, 'two': 1})
'two'
>>> print(key_for_set_value({1: 0, 2: False}))
None
"""
assert type(d) is dict, "Function only defined for inputs of type 'dict'."
r = None
for k, v in d.items():
if v:
@ -579,32 +648,53 @@ def key_for_set_value(d):
return r
def prepTimeAxis(stime, trace, verbosity=0):
def prep_time_axis(offset, trace, verbosity=0):
"""
takes a starttime and a trace object and returns a valid time axis for
takes an offset and a trace object and returns a valid time axis for
plotting
:param stime: start time of the actual seismogram as UTCDateTime
:type stime: `~obspy.core.utcdatetime.UTCDateTime`
:param offset: offset of the actual seismogram on plotting axis
:type offset: float or int
:param trace: seismic trace object
:type trace: `~obspy.core.trace.Trace`
:param verbosity: if != 0, debug output will be written to console
:type verbosity: int
:return: valid numpy array with time stamps for plotting
:rtype: `~numpy.ndarray`
>>> tr = read()[0]
>>> prep_time_axis(0., tr)
array([0.00000000e+00, 1.00033344e-02, 2.00066689e-02, ...,
2.99799933e+01, 2.99899967e+01, 3.00000000e+01])
>>> prep_time_axis(22.5, tr)
array([22.5 , 22.51000333, 22.52000667, ..., 52.47999333,
52.48999667, 52.5 ])
>>> prep_time_axis(tr.stats.starttime, tr)
Traceback (most recent call last):
...
AssertionError: 'offset' is not of type 'float' or 'int'; type: <class 'obspy.core.utcdatetime.UTCDateTime'>
>>> tr.stats.npts -= 1
>>> prep_time_axis(0, tr)
array([0.00000000e+00, 1.00033356e-02, 2.00066711e-02, ...,
2.99699933e+01, 2.99799967e+01, 2.99900000e+01])
>>> tr.stats.npts += 2
>>> prep_time_axis(0, tr)
array([0.00000000e+00, 1.00033333e-02, 2.00066667e-02, ...,
2.99899933e+01, 2.99999967e+01, 3.00100000e+01])
"""
assert isinstance(offset, (float, int)), "'offset' is not of type 'float' or 'int'; type: {}".format(type(offset))
nsamp = trace.stats.npts
srate = trace.stats.sampling_rate
tincr = trace.stats.delta
etime = stime + nsamp / srate
time_ax = np.linspace(stime, etime, nsamp)
etime = offset + nsamp / srate
time_ax = np.linspace(offset, etime, nsamp)
if len(time_ax) < nsamp:
if verbosity:
print('elongate time axes by one datum')
time_ax = np.arange(stime, etime + tincr, tincr)
time_ax = np.arange(offset, etime + tincr, tincr)
elif len(time_ax) > nsamp:
if verbosity:
print('shorten time axes by one datum')
time_ax = np.arange(stime, etime - tincr, tincr)
time_ax = np.arange(offset, etime - tincr, tincr)
if len(time_ax) != nsamp:
print('Station {0}, {1} samples of data \n '
'{2} length of time vector \n'
@ -620,13 +710,13 @@ def find_horizontals(data):
:param data: waveform data
:type data: `obspy.core.stream.Stream`
:return: components list
:rtype: list
:rtype: List(str)
..example::
>>> st = read()
>>> find_horizontals(st)
[u'N', u'E']
['N', 'E']
"""
rval = []
for tr in data:
@ -637,7 +727,7 @@ def find_horizontals(data):
return rval
def pick_color(picktype, phase, quality=0):
def pick_color(picktype: Literal['manual', 'automatic'], phase: Literal['P', 'S'], quality: int = 0) -> Rgba:
"""
Create pick color by modifying the base color by the quality.
@ -650,7 +740,7 @@ def pick_color(picktype, phase, quality=0):
:param quality: quality of pick. Decides the new intensity of the modifier color
:type quality: int
:return: tuple containing modified rgba color values
:rtype: (int, int, int, int)
:rtype: Rgba
"""
min_quality = 3
bpc = base_phase_colors(picktype, phase) # returns dict like {'modifier': 'g', 'rgba': (0, 0, 255, 255)}
@ -706,17 +796,17 @@ def pick_linestyle_plt(picktype, key):
return linestyles[picktype][key]
def modify_rgba(rgba, modifier, intensity):
def modify_rgba(rgba: Rgba, modifier: Literal['r', 'g', 'b'], intensity: float) -> Rgba:
"""
Modify rgba color by adding the given intensity to the modifier color
:param rgba: tuple containing rgba values
:type rgba: (int, int, int, int)
:param modifier: which color should be modified, eg. 'r', 'g', 'b'
:type modifier: str
:type rgba: Rgba
:param modifier: which color should be modified; options: 'r', 'g', 'b'
:type modifier: Literal['r', 'g', 'b']
:param intensity: intensity to be added to selected color
:type intensity: float
:return: tuple containing rgba values
:rtype: (int, int, int, int)
:rtype: Rgba
"""
rgba = list(rgba)
index = {'r': 0,
@ -750,18 +840,20 @@ def transform_colors_mpl_str(colors, no_alpha=False):
Transforms rgba color values to a matplotlib string of color values with a range of [0, 1]
:param colors: tuple of rgba color values ranging from [0, 255]
:type colors: (float, float, float, float)
:param no_alpha: Wether to return a alpha value in the matplotlib color string
:param no_alpha: Whether to return an alpha value in the matplotlib color string
:type no_alpha: bool
:return: String containing r, g, b values and alpha value if no_alpha is False (default)
:rtype: str
>>> transform_colors_mpl_str((255., 255., 255., 255.), True)
'(1.0, 1.0, 1.0)'
>>> transform_colors_mpl_str((255., 255., 255., 255.))
'(1.0, 1.0, 1.0, 1.0)'
"""
colors = list(colors)
colors_mpl = tuple([color / 255. for color in colors])
if no_alpha:
colors_mpl = '({}, {}, {})'.format(*colors_mpl)
return '({}, {}, {})'.format(*transform_colors_mpl(colors))
else:
colors_mpl = '({}, {}, {}, {})'.format(*colors_mpl)
return colors_mpl
return '({}, {}, {}, {})'.format(*transform_colors_mpl(colors))
def transform_colors_mpl(colors):
@ -771,27 +863,16 @@ def transform_colors_mpl(colors):
:type colors: (float, float, float, float)
:return: tuple of rgba color values ranging from [0, 1]
:rtype: (float, float, float, float)
>>> transform_colors_mpl((127.5, 0., 63.75, 255.))
(0.5, 0.0, 0.25, 1.0)
>>> transform_colors_mpl(())
"""
colors = list(colors)
colors_mpl = tuple([color / 255. for color in colors])
return colors_mpl
def remove_underscores(data):
"""
takes a `obspy.core.stream.Stream` object and removes all underscores
from station names
:param data: stream of seismic data
:type data: `~obspy.core.stream.Stream`
:return: data stream
:rtype: `~obspy.core.stream.Stream`
"""
# for tr in data:
# # remove underscores
# tr.stats.station = tr.stats.station.strip('_')
return data
def trim_station_components(data, trim_start=True, trim_end=True):
"""
cut a stream so only the part common to all three traces is kept to avoid dealing with offsets
@ -820,6 +901,19 @@ def trim_station_components(data, trim_start=True, trim_end=True):
return data
def merge_stream(stream):
gaps = stream.get_gaps()
if gaps:
# list of merged stations (seed_ids)
merged = ['{}.{}.{}.{}'.format(*gap[:4]) for gap in gaps]
stream.merge(method=1)
print('Merged the following stations because of gaps:')
for merged_station in merged:
print(merged_station)
return stream, gaps
def check4gapsAndRemove(data):
"""
check for gaps in Stream and remove them
@ -840,12 +934,12 @@ def check4gapsAndRemove(data):
return data
def check_for_gaps_and_merge(data):
def check4gapsAndMerge(data):
"""
check for gaps in Stream and merge if gaps are found
:param data: stream of seismic data
:type data: `~obspy.core.stream.Stream`
:return: data stream, gaps returned from obspy get_gaps
:return: data stream
:rtype: `~obspy.core.stream.Stream`
"""
gaps = data.get_gaps()
@ -856,7 +950,7 @@ def check_for_gaps_and_merge(data):
for merged_station in merged:
print(merged_station)
return data, gaps
return data
def check4doubled(data):
@ -928,11 +1022,11 @@ def get_possible_pylot_eventfile_extensions(event, fext):
def get_stations(data):
"""
Get list of all station names in data stream
Get list of all station names in data-stream
:param data: stream containing seismic traces
:type data: `~obspy.core.stream.Stream`
:return: list of all station names in data, no duplicates
:rtype: list of str
:rtype: List(str)
"""
stations = []
for tr in data:
@ -959,66 +1053,87 @@ def check4rotated(data, metadata=None, verbosity=1):
:rtype: `~obspy.core.stream.Stream`
"""
def rotate_components(wfstream, metadata=None):
def rotation_required(trace_ids):
"""
Derive if any rotation is required from the orientation code of the input.
:param trace_ids: string identifier of waveform data trace
:type trace_ids: List(str)
:return: boolean representing if rotation is necessary for any of the traces
:rtype: bool
"""
orientations = [trace_id[-1] for trace_id in trace_ids]
return any([orientation.isnumeric() for orientation in orientations])
def rotate_components(wfs_in, metadata=None):
"""
Rotate components if orientation code is numeric (= non traditional orientation).
Azimut and dip are fetched from metadata. To be rotated, traces of a station have to be cut to the same length.
Returns unrotated traces of no metadata is provided
:param wfstream: stream containing seismic traces of a station
:type wfstream: `~obspy.core.stream.Stream`
:param wfs_in: stream containing seismic traces of a station
:type wfs_in: `~obspy.core.stream.Stream`
:param metadata: tuple containing metadata type string and metadata parser object
:type metadata: (str, `~obspy.io.xseed.parser.Parser`)
:return: stream object with traditionally oriented traces (ZNE)
:rtype: `~obspy.core.stream.Stream`
"""
# check if any traces in this station need to be rotated
trace_ids = [trace.id for trace in wfstream]
orientations = [trace_id[-1] for trace_id in trace_ids]
rotation_required = [orientation.isnumeric() for orientation in orientations]
if any(rotation_required):
t_start = full_range(wfstream)
try:
azimuts = []
dips = []
for tr_id in trace_ids:
azimuts.append(metadata.get_coordinates(tr_id, t_start)['azimuth'])
dips.append(metadata.get_coordinates(tr_id, t_start)['dip'])
except (KeyError, TypeError) as e:
print('Failed to rotate trace {}, no azimuth or dip available in metadata'.format(tr_id))
return wfstream
if len(wfstream) < 3:
print('Failed to rotate Stream {}, not enough components available.'.format(wfstream))
return wfstream
# to rotate all traces must have same length, so trim them
wfstream = trim_station_components(wfstream, trim_start=True, trim_end=True)
try:
z, n, e = rotate2zne(wfstream[0], azimuts[0], dips[0],
wfstream[1], azimuts[1], dips[1],
wfstream[2], azimuts[2], dips[2])
print('check4rotated: rotated trace {} to ZNE'.format(trace_ids))
# replace old data with rotated data, change the channel code to ZNE
z_index = dips.index(min(
dips)) # get z-trace index, z has minimum dip of -90 (dip is measured from 0 to -90, with -90 being vertical)
wfstream[z_index].data = z
wfstream[z_index].stats.channel = wfstream[z_index].stats.channel[0:-1] + 'Z'
del trace_ids[z_index]
for trace_id in trace_ids:
coordinates = metadata.get_coordinates(trace_id, t_start)
dip, az = coordinates['dip'], coordinates['azimuth']
trace = wfstream.select(id=trace_id)[0]
if az > 315 or az <= 45 or az > 135 and az <= 225:
trace.data = n
trace.stats.channel = trace.stats.channel[0:-1] + 'N'
elif az > 45 and az <= 135 or az > 225 and az <= 315:
trace.data = e
trace.stats.channel = trace.stats.channel[0:-1] + 'E'
except (ValueError) as e:
print(e)
return wfstream
if len(wfs_in) < 3:
print(f"Stream {wfs_in=}, has not enough components to rotate.")
return wfs_in
return wfstream
# check if any traces in this station need to be rotated
trace_ids = [trace.id for trace in wfs_in]
if not rotation_required(trace_ids):
logging.debug(f"Stream does not need any rotation: Traces are {trace_ids=}")
return wfs_in
# check metadata quality
t_start = full_range(wfs_in)
try:
azimuths = []
dips = []
for tr_id in trace_ids:
azimuths.append(metadata.get_coordinates(tr_id, t_start)['azimuth'])
dips.append(metadata.get_coordinates(tr_id, t_start)['dip'])
except (KeyError, TypeError) as err:
logging.error(f"{type(err)=} occurred: {err=} Rotating not possible, not all azimuth and dip information "
f"available in metadata. Stream remains unchanged.")
return wfs_in
except Exception as err:
print(f"Unexpected {err=}, {type(err)=}")
raise
# to rotate all traces must have same length, so trim them
wfs_out = trim_station_components(wfs_in, trim_start=True, trim_end=True)
try:
z, n, e = rotate2zne(wfs_out[0], azimuths[0], dips[0],
wfs_out[1], azimuths[1], dips[1],
wfs_out[2], azimuths[2], dips[2])
print('check4rotated: rotated trace {} to ZNE'.format(trace_ids))
# replace old data with rotated data, change the channel code to ZNE
z_index = dips.index(min(
dips)) # get z-trace index, z has minimum dip of -90 (dip is measured from 0 to -90, with -90
# being vertical)
wfs_out[z_index].data = z
wfs_out[z_index].stats.channel = wfs_out[z_index].stats.channel[0:-1] + 'Z'
del trace_ids[z_index]
for trace_id in trace_ids:
coordinates = metadata.get_coordinates(trace_id, t_start)
dip, az = coordinates['dip'], coordinates['azimuth']
trace = wfs_out.select(id=trace_id)[0]
if az > 315 or az <= 45 or 135 < az <= 225:
trace.data = n
trace.stats.channel = trace.stats.channel[0:-1] + 'N'
elif 45 < az <= 135 or 225 < az <= 315:
trace.data = e
trace.stats.channel = trace.stats.channel[0:-1] + 'E'
except ValueError as err:
print(f"{err=} Rotation failed. Stream remains unchanged.")
return wfs_in
return wfs_out
if metadata is None:
if verbosity:
@ -1032,38 +1147,6 @@ def check4rotated(data, metadata=None, verbosity=1):
return data
def scaleWFData(data, factor=None, components='all'):
"""
produce scaled waveforms from given waveform data and a scaling factor,
waveform may be selected by their components name
:param data: waveform data to be scaled
:type data: `~obspy.core.stream.Stream` object
:param factor: scaling factor
:type factor: float
:param components: components labels for the traces in data to be scaled by
the scaling factor (optional, default: 'all')
:type components: tuple
:return: scaled waveform data
:rtype: `~obspy.core.stream.Stream` object
"""
if components != 'all':
for comp in components:
if factor is None:
max_val = np.max(np.abs(data.select(component=comp)[0].data))
data.select(component=comp)[0].data /= 2 * max_val
else:
data.select(component=comp)[0].data /= 2 * factor
else:
for tr in data:
if factor is None:
max_val = float(np.max(np.abs(tr.data)))
tr.data /= 2 * max_val
else:
tr.data /= 2 * factor
return data
def runProgram(cmd, parameter=None):
"""
run an external program specified by cmd with parameters input returning the
@ -1135,7 +1218,6 @@ def identifyPhase(phase):
return False
@lru_cache
def identifyPhaseID(phase):
"""
Returns phase id (capital P or S)

View File

@ -7,6 +7,7 @@ Created on Wed Mar 19 11:27:35 2014
import copy
import datetime
import getpass
import glob
import multiprocessing
import os
import subprocess
@ -16,6 +17,7 @@ import traceback
import matplotlib
import numpy as np
from pylot.core.io.phases import getQualitiesfromxml
matplotlib.use('QT5Agg')
@ -49,11 +51,11 @@ from pylot.core.pick.utils import getSNR, earllatepicker, getnoisewin, \
from pylot.core.pick.compare import Comparison
from pylot.core.pick.autopick import fmpicker
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS
from pylot.core.util.utils import prepTimeAxis, full_range, demeanTrace, isSorted, findComboBoxIndex, clims, \
from pylot.core.util.utils import prep_time_axis, full_range, demeanTrace, isSorted, findComboBoxIndex, clims, \
pick_linestyle_plt, pick_color_plt, \
check4rotated, check4doubled, check_for_gaps_and_merge, check_for_nan, identifyPhase, \
loopIdentifyPhase, trim_station_components, transformFilteroptions2String, \
identifyPhaseID, get_bool, get_None, pick_color, getAutoFilteroptions, SetChannelComponents, \
identifyPhaseID, get_bool, get_none, pick_color, getAutoFilteroptions, SetChannelComponents, \
station_id_remove_channel, get_pylot_eventfile_with_extension, get_possible_pylot_eventfile_extensions
from autoPyLoT import autoPyLoT
from pylot.core.util.thread import Thread
@ -138,18 +140,6 @@ class LogWidget(QtWidgets.QWidget):
self.stderr.append(60 * '#' + '\n\n')
def getDataType(parent):
type = QInputDialog().getItem(parent, "Select phases type", "Type:",
["manual", "automatic"])
if type[0].startswith('auto'):
type = 'auto'
else:
type = type[0]
return type
def plot_pdf(_axes, x, y, annotation, bbox_props, xlabel=None, ylabel=None,
title=None):
# try method or data
@ -793,7 +783,7 @@ class WaveformWidgetPG(QtWidgets.QWidget):
def connect_signals(self):
self.qcombo_processed.activated.connect(self.parent().newWF)
self.syn_checkbox.clicked.connect(self.parent().newWF)
self.comp_checkbox.clicked.connect(self.parent().newWF)
def init_labels(self):
self.label_layout.addWidget(self.status_label)
@ -804,13 +794,13 @@ class WaveformWidgetPG(QtWidgets.QWidget):
# use widgets as placeholder, so that child widgets keep position when others are hidden
mid_layout = QHBoxLayout()
right_layout = QHBoxLayout()
mid_layout.addWidget(self.syn_checkbox)
mid_layout.addWidget(self.comp_checkbox)
right_layout.addWidget(self.qcombo_processed)
mid_widget.setLayout(mid_layout)
right_widget.setLayout(right_layout)
self.label_layout.addWidget(mid_widget)
self.label_layout.addWidget(right_widget)
self.syn_checkbox.setLayoutDirection(Qt.RightToLeft)
self.comp_checkbox.setLayoutDirection(Qt.RightToLeft)
self.label_layout.setStretch(0, 4)
self.label_layout.setStretch(1, 0)
self.label_layout.setStretch(2, 0)
@ -825,7 +815,7 @@ class WaveformWidgetPG(QtWidgets.QWidget):
label = QtWidgets.QLabel()
self.perm_labels.append(label)
self.qcombo_processed = QtWidgets.QComboBox()
self.syn_checkbox = QtWidgets.QCheckBox('synthetics')
self.comp_checkbox = QtWidgets.QCheckBox('Load comparison data')
self.addQCboxItem('processed', 'green')
self.addQCboxItem('raw', 'black')
# self.perm_qcbox_right.setAlignment(2)
@ -834,9 +824,11 @@ class WaveformWidgetPG(QtWidgets.QWidget):
def getPlotDict(self):
return self.plotdict
def activateObspyDMToptions(self, activate):
self.syn_checkbox.setVisible(activate)
self.qcombo_processed.setVisible(activate)
def activateObspyDMToptions(self, activate: bool) -> None:
self.qcombo_processed.setEnabled(activate)
def activateCompareOptions(self, activate: bool) -> None:
self.comp_checkbox.setEnabled(activate)
def setPermText(self, number, text=None, color='black'):
if not 0 <= number < len(self.perm_labels):
@ -936,10 +928,10 @@ class WaveformWidgetPG(QtWidgets.QWidget):
msg = 'plotting %s channel of station %s' % (channel, station)
print(msg)
stime = trace.stats.starttime - self.wfstart
time_ax = prepTimeAxis(stime, trace)
time_ax = prep_time_axis(stime, trace)
if st_syn:
stime_syn = trace_syn.stats.starttime - self.wfstart
time_ax_syn = prepTimeAxis(stime_syn, trace_syn)
time_ax_syn = prep_time_axis(stime_syn, trace_syn)
if method == 'fast':
trace.data, time_ax = self.minMax(trace, time_ax)
@ -959,7 +951,7 @@ class WaveformWidgetPG(QtWidgets.QWidget):
[time for index, time in enumerate(time_ax_syn) if not index % nth_sample] if st_syn else [])
trace.data = np.array(
[datum * gain + n for index, datum in enumerate(trace.data) if not index % nth_sample])
trace_syn.data = np.array([datum + n for index, datum in enumerate(trace_syn.data)
trace_syn.data = np.array([datum + n + shift_syn for index, datum in enumerate(trace_syn.data)
if not index % nth_sample] if st_syn else [])
plots.append((times, trace.data,
times_syn, trace_syn.data))
@ -1005,15 +997,6 @@ class WaveformWidgetPG(QtWidgets.QWidget):
time_ax = np.linspace(time_ax[0], time_ax[-1], num=len(data))
return data, time_ax
# def getAxes(self):
# return self.axes
# def getXLims(self):
# return self.getAxes().get_xlim()
# def getYLims(self):
# return self.getAxes().get_ylim()
def setXLims(self, lims):
vb = self.plotWidget.getPlotItem().getViewBox()
vb.setXRange(float(lims[0]), float(lims[1]), padding=0)
@ -1148,12 +1131,12 @@ class PylotCanvas(FigureCanvas):
ax.set_xlim(self.cur_xlim)
ax.set_ylim(self.cur_ylim)
self.refreshPickDlgText()
ax.figure.canvas.draw()
ax.figure.canvas.draw_idle()
def panRelease(self, gui_event):
self.press = None
self.press_rel = None
self.figure.canvas.draw()
self.figure.canvas.draw_idle()
def panZoom(self, gui_event, threshold=2., factor=1.1):
if not gui_event.x and not gui_event.y:
@ -1167,8 +1150,6 @@ class PylotCanvas(FigureCanvas):
break
if not ax_check: return
# self.updateCurrentLimits() #maybe put this down to else:
# calculate delta (relative values in axis)
old_x, old_y = self.press_rel
xdiff = gui_event.x - old_x
@ -1371,110 +1352,145 @@ class PylotCanvas(FigureCanvas):
plot_positions[channel] = plot_pos
return plot_positions
def plotWFData(self, wfdata, title=None, zoomx=None, zoomy=None,
def plotWFData(self, wfdata, wfdata_compare=None, title=None, zoomx=None, zoomy=None,
noiselevel=None, scaleddata=False, mapping=True,
component='*', nth_sample=1, iniPick=None, verbosity=0,
plot_additional=False, additional_channel=None, scaleToChannel=None,
snr=None):
ax = self.prepare_plot()
self.clearPlotDict()
wfstart, wfend = self.get_wf_range(wfdata)
compclass = self.get_comp_class()
plot_streams = self.get_plot_streams(wfdata, wfdata_compare, component, compclass)
st_main = plot_streams['wfdata']['data']
if mapping:
plot_positions = self.calcPlotPositions(st_main, compclass)
nslc = self.get_sorted_nslc(st_main)
nmax = self.plot_traces(ax, plot_streams, nslc, wfstart, mapping, plot_positions,
scaleToChannel, noiselevel, scaleddata, nth_sample, verbosity)
if plot_additional and additional_channel:
self.plot_additional_trace(ax, wfdata, additional_channel, scaleToChannel,
scaleddata, nth_sample, wfstart)
self.finalize_plot(ax, wfstart, wfend, nmax, zoomx, zoomy, iniPick, title, snr)
def prepare_plot(self):
ax = self.axes[0]
ax.cla()
return ax
self.clearPlotDict()
wfstart, wfend = full_range(wfdata)
nmax = 0
def get_wf_range(self, wfdata):
return full_range(wfdata)
def get_comp_class(self):
settings = QSettings()
compclass = SetChannelComponents.from_qsettings(settings)
return SetChannelComponents.from_qsettings(settings)
if not component == '*':
alter_comp = compclass.getCompPosition(component)
# alter_comp = str(alter_comp[0])
st_select = wfdata.select(component=component)
st_select += wfdata.select(component=alter_comp)
else:
st_select = wfdata
if mapping:
plot_positions = self.calcPlotPositions(st_select, compclass)
# list containing tuples of network, station, channel and plot position (for sorting)
nslc = []
for plot_pos, trace in enumerate(st_select):
if not trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
print('Warning: Unrecognized channel {}'.format(trace.stats.channel))
continue
nslc.append(trace.get_id())
nslc.sort()
nslc.reverse()
def get_plot_streams(self, wfdata, wfdata_compare, component, compclass):
def get_wf_dict(data=Stream(), linecolor='k', offset=0., **plot_kwargs):
return dict(data=data, linecolor=linecolor, offset=offset, plot_kwargs=plot_kwargs)
linecolor = (0., 0., 0., 1.) if not self.style else self.style['linecolor']['rgba_mpl']
plot_streams = {
'wfdata': get_wf_dict(linecolor=linecolor, linewidth=0.7),
'wfdata_comp': get_wf_dict(offset=0.1, linecolor='b', alpha=0.7, linewidth=0.5)
}
if component != '*':
alter_comp = compclass.getCompPosition(component)
plot_streams['wfdata']['data'] = wfdata.select(component=component) + wfdata.select(component=alter_comp)
if wfdata_compare:
plot_streams['wfdata_comp']['data'] = wfdata_compare.select(
component=component) + wfdata_compare.select(component=alter_comp)
else:
plot_streams['wfdata']['data'] = wfdata
if wfdata_compare:
plot_streams['wfdata_comp']['data'] = wfdata_compare
return plot_streams
def get_sorted_nslc(self, st_main):
nslc = [trace.get_id() for trace in st_main if trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']]
nslc.sort(reverse=True)
return nslc
def plot_traces(self, ax, plot_streams, nslc, wfstart, mapping, plot_positions, scaleToChannel, noiselevel,
scaleddata, nth_sample, verbosity):
nmax = 0
for n, seed_id in enumerate(nslc):
network, station, location, channel = seed_id.split('.')
st = st_select.select(id=seed_id)
trace = st[0].copy()
if mapping:
n = plot_positions[trace.stats.channel]
if n > nmax:
nmax = n
if verbosity:
msg = 'plotting %s channel of station %s' % (channel, station)
print(msg)
stime = trace.stats.starttime - wfstart
time_ax = prepTimeAxis(stime, trace)
if time_ax is not None:
if scaleToChannel:
st_scale = wfdata.select(channel=scaleToChannel)
if st_scale:
tr = st_scale[0]
trace.detrend('constant')
trace.normalize(np.max(np.abs(tr.data)) * 2)
scaleddata = True
if not scaleddata:
trace.detrend('constant')
trace.normalize(np.max(np.abs(trace.data)) * 2)
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
data = [datum + n for index, datum in enumerate(trace.data) if not index % nth_sample]
ax.axhline(n, color="0.5", lw=0.5)
ax.plot(times, data, color=linecolor, linewidth=0.7)
if noiselevel is not None:
for level in [-noiselevel[channel], noiselevel[channel]]:
ax.plot([time_ax[0], time_ax[-1]],
[n + level, n + level],
color=linecolor,
linestyle='dashed')
for wf_name, wf_dict in plot_streams.items():
st_select = wf_dict.get('data')
if not st_select:
continue
trace = st_select.select(id=seed_id)[0].copy()
if mapping:
n = plot_positions[trace.stats.channel]
if n > nmax:
nmax = n
if verbosity:
print(f'plotting {channel} channel of station {station}')
time_ax = prep_time_axis(trace.stats.starttime - wfstart, trace)
self.plot_trace(ax, trace, time_ax, wf_dict, n, scaleToChannel, noiselevel, scaleddata, nth_sample)
self.setPlotDict(n, seed_id)
if plot_additional and additional_channel:
compare_stream = wfdata.select(channel=additional_channel)
if compare_stream:
trace = compare_stream[0]
if scaleToChannel:
st_scale = wfdata.select(channel=scaleToChannel)
if st_scale:
tr = st_scale[0]
trace.detrend('constant')
trace.normalize(np.max(np.abs(tr.data)) * 2)
scaleddata = True
if not scaleddata:
trace.detrend('constant')
trace.normalize(np.max(np.abs(trace.data)) * 2)
time_ax = prepTimeAxis(stime, trace)
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
p_data = compare_stream[0].data
# #normalize
# p_max = max(abs(p_data))
# p_data /= p_max
for index in range(3):
ax.plot(times, p_data, color='red', alpha=0.5, linewidth=0.7)
p_data += 1
return nmax
def plot_trace(self, ax, trace, time_ax, wf_dict, n, scaleToChannel, noiselevel, scaleddata, nth_sample):
if time_ax is not None:
if scaleToChannel:
self.scale_trace(trace, scaleToChannel)
scaleddata = True
if not scaleddata:
trace.detrend('constant')
trace.normalize(np.max(np.abs(trace.data)) * 2)
offset = wf_dict.get('offset')
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
data = [datum + n + offset for index, datum in enumerate(trace.data) if not index % nth_sample]
ax.axhline(n, color="0.5", lw=0.5)
ax.plot(times, data, color=wf_dict.get('linecolor'), **wf_dict.get('plot_kwargs'))
if noiselevel is not None:
self.plot_noise_level(ax, time_ax, noiselevel, channel, n, wf_dict.get('linecolor'))
def scale_trace(self, trace, scaleToChannel):
st_scale = wfdata.select(channel=scaleToChannel)
if st_scale:
tr = st_scale[0]
trace.detrend('constant')
trace.normalize(np.max(np.abs(tr.data)) * 2)
def plot_noise_level(self, ax, time_ax, noiselevel, channel, n, linecolor):
for level in [-noiselevel[channel], noiselevel[channel]]:
ax.plot([time_ax[0], time_ax[-1]], [n + level, n + level], color=linecolor, linestyle='dashed')
def plot_additional_trace(self, ax, wfdata, additional_channel, scaleToChannel, scaleddata, nth_sample, wfstart):
compare_stream = wfdata.select(channel=additional_channel)
if compare_stream:
trace = compare_stream[0]
if scaleToChannel:
self.scale_trace(trace, scaleToChannel)
scaleddata = True
if not scaleddata:
trace.detrend('constant')
trace.normalize(np.max(np.abs(trace.data)) * 2)
time_ax = prep_time_axis(trace.stats.starttime - wfstart, trace)
self.plot_additional_data(ax, trace, time_ax, nth_sample)
def plot_additional_data(self, ax, trace, time_ax, nth_sample):
times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
p_data = trace.data
for index in range(3):
ax.plot(times, p_data, color='red', alpha=0.5, linewidth=0.7)
p_data += 1
def finalize_plot(self, ax, wfstart, wfend, nmax, zoomx, zoomy, iniPick, title, snr):
if iniPick:
ax.vlines(iniPick, ax.get_ylim()[0], ax.get_ylim()[1],
colors='m', linestyles='dashed',
linewidth=2)
xlabel = 'seconds since {0}'.format(wfstart)
ax.vlines(iniPick, ax.get_ylim()[0], ax.get_ylim()[1], colors='m', linestyles='dashed', linewidth=2)
xlabel = f'seconds since {wfstart}'
ylabel = ''
self.updateWidget(xlabel, ylabel, title)
self.setXLims(ax, [0, wfend - wfstart])
@ -1484,15 +1500,14 @@ class PylotCanvas(FigureCanvas):
if zoomy is not None:
self.setYLims(ax, zoomy)
if snr is not None:
if snr < 2:
warning = 'LOW SNR'
if snr < 1.5:
warning = 'VERY LOW SNR'
ax.text(0.1, 0.9, 'WARNING - {}'.format(warning), ha='center', va='center', transform=ax.transAxes,
color='red')
self.plot_snr_warning(ax, snr)
self.draw()
def plot_snr_warning(self, ax, snr):
if snr < 2:
warning = 'LOW SNR' if snr >= 1.5 else 'VERY LOW SNR'
ax.text(0.1, 0.9, f'WARNING - {warning}', ha='center', va='center', transform=ax.transAxes, color='red')
@staticmethod
def getXLims(ax):
return ax.get_xlim()
@ -1574,6 +1589,8 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
self.events = events
self.filepaths = []
self.file_extensions = []
self.check_all_state = True
self.merge_strategy = None
self.default_text = default_text
self.label = label
self.setButtons()
@ -1581,16 +1598,17 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
self.connectSignals()
self.showPaths()
self.refreshSelectionBox()
self.refresh_timer = QTimer(self)
self.refresh_timer.timeout.connect(self.showPaths)
self.refresh_timer.start(10000)
# self.refresh_timer = QTimer(self)
# self.refresh_timer.timeout.connect(self.showPaths)
# self.refresh_timer.start(10000)
self.resize(800, 450)
def setupUi(self):
ncol = 4
self.main_layout = QtWidgets.QVBoxLayout()
self.header_layout = QtWidgets.QHBoxLayout()
self.footer_layout = QtWidgets.QHBoxLayout()
#
self.setLayout(self.main_layout)
@ -1604,11 +1622,24 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
self.searchButton = QtWidgets.QPushButton('Search')
self.searchButton.setVisible(False)
# check/uncheck button for table
self.checkAllButton = QtWidgets.QPushButton('Check/Uncheck all')
# radiobutton for merge selection
self.mergeRadioButtonGroup = QtWidgets.QButtonGroup()
self.merge_button = QtWidgets.QRadioButton('Merge')
self.overwrite_button = QtWidgets.QRadioButton('Overwrite')
self.mergeRadioButtonGroup.addButton(self.merge_button)
self.mergeRadioButtonGroup.addButton(self.overwrite_button)
self.merge_button.setChecked(True)
self.merge_strategy = self.merge_button.text()
# table
self.tableWidget = QtWidgets.QTableWidget()
tableWidget = self.tableWidget
tableWidget.setColumnCount(3)
tableWidget.setColumnCount(ncol)
tableWidget.setRowCount(len(self.events))
tableWidget.setHorizontalHeaderLabels(('Event ID', 'Filename', 'Last modified'))
tableWidget.setHorizontalHeaderLabels(('', 'Event ID', 'Filename', 'Last modified'))
tableWidget.setEditTriggers(tableWidget.NoEditTriggers)
tableWidget.setSortingEnabled(True)
header = tableWidget.horizontalHeader()
@ -1621,9 +1652,17 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
self.header_layout.addWidget(self.comboBox)
self.header_layout.addWidget(self.searchButton)
self.footer_layout.addWidget(self.checkAllButton)
self.footer_layout.addWidget(self.statusText)
self.footer_layout.addWidget(self.merge_button)
self.footer_layout.addWidget(self.overwrite_button)
self.footer_layout.setStretch(0, 0)
self.footer_layout.setStretch(1, 1)
self.main_layout.addLayout(self.header_layout)
self.main_layout.addWidget(self.tableWidget)
self.main_layout.addWidget(self.statusText)
self.main_layout.addLayout(self.footer_layout)
self.main_layout.addWidget(self._buttonbox)
def showPaths(self):
@ -1632,23 +1671,23 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
self.tableWidget.clearContents()
for index, event in enumerate(self.events):
filename = get_pylot_eventfile_with_extension(event, fext)
self.tableWidget.setItem(index, 0, QtWidgets.QTableWidgetItem(f'{event.pylot_id}'))
pf_selected_item = QtWidgets.QTableWidgetItem()
check_state = QtCore.Qt.Checked if filename else QtCore.Qt.Unchecked
pf_selected_item.setCheckState(check_state)
self.tableWidget.setItem(index, 0, pf_selected_item)
self.tableWidget.setItem(index, 1, QtWidgets.QTableWidgetItem(f'{event.pylot_id}'))
if filename:
self.filepaths.append(filename)
ts = int(os.path.getmtime(filename))
# create QTableWidgetItems of filepath and last modification time
fname_item = QtWidgets.QTableWidgetItem(f'{os.path.split(filename)[-1]}')
fname_item.setData(3, filename)
ts_item = QtWidgets.QTableWidgetItem(f'{datetime.datetime.fromtimestamp(ts)}')
self.tableWidget.setItem(index, 1, fname_item)
self.tableWidget.setItem(index, 2, ts_item)
self.tableWidget.setItem(index, 2, fname_item)
self.tableWidget.setItem(index, 3, ts_item)
# TODO: Idea -> only refresh if table contents changed. Use selection to load only a subset of files
if len(self.filepaths) > 0:
status_text = f'Found {len(self.filepaths)} eventfiles. Do you want to load them?'
else:
status_text = 'Did not find any files for specified file mask.'
self.statusText.setText(status_text)
self.update_status()
def refreshSelectionBox(self):
fext = self.comboBox.currentText()
@ -1668,12 +1707,52 @@ class SearchFileByExtensionDialog(QtWidgets.QDialog):
self._buttonbox = QDialogButtonBox(QDialogButtonBox.Ok |
QDialogButtonBox.Cancel)
def toggleCheckAll(self):
self.check_all_state = not self.check_all_state
self.checkAll(self.check_all_state)
def checkAll(self, state):
state = QtCore.Qt.Checked if state else QtCore.Qt.Unchecked
for row_ind in range(self.tableWidget.rowCount()):
item = self.tableWidget.item(row_ind, 0)
item.setCheckState(state)
def getChecked(self):
filepaths = []
for row_ind in range(self.tableWidget.rowCount()):
item_check = self.tableWidget.item(row_ind, 0)
if item_check.checkState() == QtCore.Qt.Checked:
item_fname = self.tableWidget.item(row_ind, 2)
if item_fname:
filepath = item_fname.data(3)
filepaths.append(filepath)
return filepaths
def update_status(self, row=None, col=None):
if col is not None and col != 0:
return
filepaths = self.getChecked()
if len(filepaths) > 0:
status_text = f"Found {len(filepaths)} eventfile{'s' if len(filepaths) > 1 else ''}. Do you want to load them?"
else:
status_text = 'Did not find/select any files for specified file mask.'
self.statusText.setText(status_text)
def update_merge_strategy(self):
for button in (self.merge_button, self.overwrite_button):
if button.isChecked():
self.merge_strategy = button.text()
def connectSignals(self):
self._buttonbox.accepted.connect(self.accept)
self._buttonbox.rejected.connect(self.reject)
self.comboBox.editTextChanged.connect(self.showPaths)
self.searchButton.clicked.connect(self.showPaths)
self.checkAllButton.clicked.connect(self.toggleCheckAll)
self.checkAllButton.clicked.connect(self.update_status)
self.tableWidget.cellClicked.connect(self.update_status)
self.merge_button.clicked.connect(self.update_merge_strategy)
self.overwrite_button.clicked.connect(self.update_merge_strategy)
class SingleTextLineDialog(QtWidgets.QDialog):
@ -1780,8 +1859,8 @@ class PhaseDefaults(QtWidgets.QDialog):
class PickDlg(QDialog):
update_picks = QtCore.Signal(dict)
def __init__(self, parent=None, data=None, station=None, network=None, location=None, picks=None,
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None,
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
event=None, filteroptions=None, model=None, wftype=None):
super(PickDlg, self).__init__(parent, Qt.Window)
self.orig_parent = parent
@ -1790,6 +1869,7 @@ class PickDlg(QDialog):
# initialize attributes
self.parameter = parameter
self._embedded = embedded
self.showCompData = show_comp_data
self.station = station
self.network = network
self.location = location
@ -1828,11 +1908,32 @@ class PickDlg(QDialog):
else:
self.filteroptions = FILTERDEFAULTS
self.pick_block = False
# set attribute holding data
if data is None or not data:
try:
data = parent.get_data().get_wf_data().copy()
self.data = data.select(station=station)
except AttributeError as e:
errmsg = 'You either have to put in a data or an appropriate ' \
'parent (PyLoT MainWindow) object: {0}'.format(e)
raise Exception(errmsg)
else:
self.data = data
self.data_compare = data_compare
self.nextStation = QtWidgets.QCheckBox('Continue with next station ')
# comparison channel
self.compareChannel = QtWidgets.QComboBox()
self.compareChannel.activated.connect(self.resetPlot)
self.referenceChannel = QtWidgets.QComboBox()
self.referenceChannel.activated.connect(self.resetPlot)
# comparison channel
self.compareCB = QtWidgets.QCheckBox()
self.compareCB.setChecked(self.showCompData)
self.compareCB.clicked.connect(self.switchCompData)
self.compareCB.clicked.connect(self.resetPlot)
self.compareCB.setVisible(bool(self.data_compare))
# scale channel
self.scaleChannel = QtWidgets.QComboBox()
@ -1845,19 +1946,7 @@ class PickDlg(QDialog):
self.cur_xlim = None
self.cur_ylim = None
# set attribute holding data
if data is None or not data:
try:
data = parent.get_data().getWFData().copy()
self.data = data.select(station=station)
except AttributeError as e:
errmsg = 'You either have to put in a data or an appropriate ' \
'parent (PyLoT MainWindow) object: {0}'.format(e)
raise Exception(errmsg)
else:
self.data = data
self.stime, self.etime = full_range(self.getWFData())
self.stime, self.etime = full_range(self.get_wf_data())
# initialize plotting widget
self.multicompfig = PylotCanvas(parent=self, multicursor=True)
@ -1868,12 +1957,12 @@ class PickDlg(QDialog):
self.setupUi()
# fill compare and scale channels
self.compareChannel.addItem('-', None)
self.referenceChannel.addItem('-', None)
self.scaleChannel.addItem('individual', None)
for trace in self.getWFData():
for trace in self.get_wf_data():
channel = trace.stats.channel
self.compareChannel.addItem(channel, trace)
self.referenceChannel.addItem(channel, trace)
if not channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
print('Skipping unknown channel for scaling: {}'.format(channel))
continue
@ -1890,7 +1979,7 @@ class PickDlg(QDialog):
if self.wftype is not None:
title += ' | ({})'.format(self.wftype)
self.multicompfig.plotWFData(wfdata=self.getWFData(),
self.multicompfig.plotWFData(wfdata=self.get_wf_data(), wfdata_compare=self.get_wf_dataComp(),
title=title)
self.multicompfig.setZoomBorders2content()
@ -2066,8 +2155,11 @@ class PickDlg(QDialog):
_dialtoolbar.addWidget(est_label)
_dialtoolbar.addWidget(self.plot_arrivals_button)
_dialtoolbar.addSeparator()
_dialtoolbar.addWidget(QtWidgets.QLabel('Compare to channel: '))
_dialtoolbar.addWidget(self.compareChannel)
_dialtoolbar.addWidget(QtWidgets.QLabel('Plot reference channel: '))
_dialtoolbar.addWidget(self.referenceChannel)
_dialtoolbar.addSeparator()
_dialtoolbar.addWidget(QtWidgets.QLabel('Compare: '))
_dialtoolbar.addWidget(self.compareCB)
_dialtoolbar.addSeparator()
_dialtoolbar.addWidget(QtWidgets.QLabel('Scaling: '))
_dialtoolbar.addWidget(self.scaleChannel)
@ -2152,10 +2244,12 @@ class PickDlg(QDialog):
station_id = trace.get_id()
starttime = trace.stats.starttime
station_coords = self.metadata.get_coordinates(station_id, starttime)
if not station_coords:
print('get_arrivals: No station coordinates found. Return!')
return
origins = self.pylot_event.origins
if phases == ['None', 'None']:
print("get_arrivals: Creation info (manual or auto) not available!")
print("Return!")
print("get_arrivals: Creation info (manual or auto) not available! Return!")
return
if origins:
source_origin = origins[0]
@ -2267,7 +2361,7 @@ class PickDlg(QDialog):
# create action and add to menu
# phase name transferred using lambda function
slot = lambda phase=phase, phaseID=phaseID: phaseSelect[phaseID](phase)
slot = lambda ph=phase, phID=phaseID: phaseSelect[phID](ph)
picksAction = createAction(parent=self, text=phase,
slot=slot,
shortcut=shortcut)
@ -2402,7 +2496,7 @@ class PickDlg(QDialog):
def activatePicking(self):
self.leave_rename_phase()
self.renamePhaseAction.setEnabled(False)
self.compareChannel.setEnabled(False)
self.referenceChannel.setEnabled(False)
self.scaleChannel.setEnabled(False)
phase = self.currentPhase
phaseID = self.getPhaseID(phase)
@ -2420,7 +2514,7 @@ class PickDlg(QDialog):
if self.autoFilterAction.isChecked():
for filteraction in [self.filterActionP, self.filterActionS]:
filteraction.setChecked(False)
self.filterWFData()
self.filter_wf_data()
self.draw()
else:
self.draw()
@ -2434,7 +2528,7 @@ class PickDlg(QDialog):
self.disconnectPressEvent()
self.multicompfig.connectEvents()
self.renamePhaseAction.setEnabled(True)
self.compareChannel.setEnabled(True)
self.referenceChannel.setEnabled(True)
self.scaleChannel.setEnabled(True)
self.connect_pick_delete()
self.draw()
@ -2504,9 +2598,15 @@ class PickDlg(QDialog):
def getGlobalLimits(self, ax, axis):
return self.multicompfig.getGlobalLimits(ax, axis)
def getWFData(self):
def get_wf_data(self):
return self.data
def get_wf_dataComp(self):
if self.showCompData:
return self.data_compare
else:
return Stream()
def selectWFData(self, channel):
component = channel[-1].upper()
wfdata = Stream()
@ -2516,17 +2616,17 @@ class PickDlg(QDialog):
return tr
if component == 'E' or component == 'N':
for trace in self.getWFData():
for trace in self.get_wf_data():
trace = selectTrace(trace, 'NE')
if trace:
wfdata.append(trace)
elif component == '1' or component == '2':
for trace in self.getWFData():
for trace in self.get_wf_data():
trace = selectTrace(trace, '12')
if trace:
wfdata.append(trace)
else:
wfdata = self.getWFData().select(component=component)
wfdata = self.get_wf_data().select(component=component)
return wfdata
def getPicks(self, picktype='manual'):
@ -2628,11 +2728,16 @@ class PickDlg(QDialog):
stime = self.getStartTime()
# copy data for plotting
data = self.getWFData().copy()
data = self.getPickPhases(data, phase)
data.normalize()
if not data:
# copy wfdata for plotting
wfdata = self.get_wf_data().copy()
wfdata_comp = self.get_wf_dataComp().copy()
wfdata = self.getPickPhases(wfdata, phase)
wfdata_comp = self.getPickPhases(wfdata_comp, phase)
for wfd in [wfdata, wfdata_comp]:
if wfd:
wfd.normalize()
if not wfdata:
QtWidgets.QMessageBox.warning(self, 'No channel to plot',
'No channel to plot for phase: {}. '
'Make sure to select the correct channels for P and S '
@ -2640,14 +2745,16 @@ class PickDlg(QDialog):
self.leave_picking_mode()
return
# filter data and trace on which is picked prior to determination of SNR
# filter wfdata and trace on which is picked prior to determination of SNR
filterphase = self.currentFilterPhase()
if filterphase:
filteroptions = self.getFilterOptions(filterphase).parseFilterOptions()
try:
data.detrend('linear')
data.filter(**filteroptions)
# wfdata.filter(**filteroptions)# MP MP removed filtering of original data
for wfd in [wfdata, wfdata_comp]:
if wfd:
wfd.detrend('linear')
wfd.filter(**filteroptions)
# wfdata.filter(**filteroptions)# MP MP removed filtering of original wfdata
except ValueError as e:
self.qmb = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Icon.Information,
'Denied',
@ -2657,8 +2764,8 @@ class PickDlg(QDialog):
snr = []
noiselevels = {}
# determine SNR and noiselevel
for trace in data.traces:
st = data.select(channel=trace.stats.channel)
for trace in wfdata.traces:
st = wfdata.select(channel=trace.stats.channel)
stime_diff = trace.stats.starttime - stime
result = getSNR(st, (noise_win, gap_win, signal_win), ini_pick - stime_diff)
snr.append(result[0])
@ -2669,23 +2776,25 @@ class PickDlg(QDialog):
noiselevel = nfac
noiselevels[trace.stats.channel] = noiselevel
# prepare plotting of data
for trace in data:
t = prepTimeAxis(trace.stats.starttime - stime, trace)
inoise = getnoisewin(t, ini_pick, noise_win, gap_win)
trace = demeanTrace(trace, inoise)
# upscale trace data in a way that each trace is vertically zoomed to noiselevel*factor
channel = trace.stats.channel
noiselevel = noiselevels[channel]
noiseScaleFactor = self.calcNoiseScaleFactor(noiselevel, zoomfactor=5.)
trace.data *= noiseScaleFactor
noiselevels[channel] *= noiseScaleFactor
# prepare plotting of wfdata
for wfd in [wfdata, wfdata_comp]:
if wfd:
for trace in wfd:
t = prep_time_axis(trace.stats.starttime - stime, trace)
inoise = getnoisewin(t, ini_pick, noise_win, gap_win)
trace = demeanTrace(trace, inoise)
# upscale trace wfdata in a way that each trace is vertically zoomed to noiselevel*factor
channel = trace.stats.channel
noiselevel = noiselevels[channel]
noiseScaleFactor = self.calcNoiseScaleFactor(noiselevel, zoomfactor=5.)
trace.data *= noiseScaleFactor
noiselevels[channel] *= noiseScaleFactor
mean_snr = np.mean(snr)
x_res = getResolutionWindow(mean_snr, parameter.get('extent'))
xlims = [ini_pick - x_res, ini_pick + x_res]
ylims = list(np.array([-.5, .5]) + [0, len(data) - 1])
ylims = list(np.array([-.5, .5]) + [0, len(wfdata) - 1])
title = self.getStation() + ' picking mode'
title += ' | SNR: {}'.format(mean_snr)
@ -2693,9 +2802,10 @@ class PickDlg(QDialog):
filtops_str = transformFilteroptions2String(filteroptions)
title += ' | Filteroptions: {}'.format(filtops_str)
plot_additional = bool(self.compareChannel.currentText())
additional_channel = self.compareChannel.currentText()
self.multicompfig.plotWFData(wfdata=data,
plot_additional = bool(self.referenceChannel.currentText())
additional_channel = self.referenceChannel.currentText()
self.multicompfig.plotWFData(wfdata=wfdata,
wfdata_compare=wfdata_comp,
title=title,
zoomx=xlims,
zoomy=ylims,
@ -2729,7 +2839,7 @@ class PickDlg(QDialog):
filteroptions = None
# copy and filter data for earliest and latest possible picks
wfdata = self.getWFData().copy().select(channel=channel)
wfdata = self.get_wf_data().copy().select(channel=channel)
if filteroptions:
try:
wfdata.detrend('linear')
@ -2776,7 +2886,7 @@ class PickDlg(QDialog):
minFMSNR = parameter.get('minFMSNR')
quality = get_quality_class(spe, parameter.get('timeerrorsP'))
if quality <= minFMweight and snr >= minFMSNR:
FM = fmpicker(self.getWFData().select(channel=channel).copy(), wfdata.copy(), parameter.get('fmpickwin'),
FM = fmpicker(self.get_wf_data().select(channel=channel).copy(), wfdata.copy(), parameter.get('fmpickwin'),
pick - stime_diff)
# save pick times for actual phase
@ -3068,7 +3178,7 @@ class PickDlg(QDialog):
def togglePickBlocker(self):
return not self.pick_block
def filterWFData(self, phase=None):
def filter_wf_data(self, phase=None):
if not phase:
phase = self.currentPhase
if self.getPhaseID(phase) == 'P':
@ -3086,7 +3196,8 @@ class PickDlg(QDialog):
self.cur_xlim = self.multicompfig.axes[0].get_xlim()
self.cur_ylim = self.multicompfig.axes[0].get_ylim()
# self.multicompfig.updateCurrentLimits()
data = self.getWFData().copy()
wfdata = self.get_wf_data().copy()
wfdata_comp = self.get_wf_dataComp().copy()
title = self.getStation()
if filter:
filtoptions = None
@ -3094,19 +3205,22 @@ class PickDlg(QDialog):
filtoptions = self.getFilterOptions(self.getPhaseID(phase), gui_filter=True).parseFilterOptions()
if filtoptions is not None:
data.detrend('linear')
data.taper(0.02, type='cosine')
data.filter(**filtoptions)
for wfd in [wfdata, wfdata_comp]:
if wfd:
wfd.detrend('linear')
wfd.taper(0.02, type='cosine')
wfd.filter(**filtoptions)
filtops_str = transformFilteroptions2String(filtoptions)
title += ' | Filteroptions: {}'.format(filtops_str)
if self.wftype is not None:
title += ' | ({})'.format(self.wftype)
plot_additional = bool(self.compareChannel.currentText())
additional_channel = self.compareChannel.currentText()
plot_additional = bool(self.referenceChannel.currentText())
additional_channel = self.referenceChannel.currentText()
scale_channel = self.scaleChannel.currentText()
self.multicompfig.plotWFData(wfdata=data, title=title,
self.multicompfig.plotWFData(wfdata=wfdata, wfdata_compare=wfdata_comp,
title=title,
zoomx=self.getXLims(),
zoomy=self.getYLims(),
plot_additional=plot_additional,
@ -3120,14 +3234,14 @@ class PickDlg(QDialog):
def filterP(self):
self.filterActionS.setChecked(False)
if self.filterActionP.isChecked():
self.filterWFData('P')
self.filter_wf_data('P')
else:
self.refreshPlot()
def filterS(self):
self.filterActionP.setChecked(False)
if self.filterActionS.isChecked():
self.filterWFData('S')
self.filter_wf_data('S')
else:
self.refreshPlot()
@ -3179,11 +3293,14 @@ class PickDlg(QDialog):
self.resetZoom()
self.refreshPlot()
def switchCompData(self):
self.showCompData = self.compareCB.isChecked()
def refreshPlot(self):
if self.autoFilterAction.isChecked():
self.filterActionP.setChecked(False)
self.filterActionS.setChecked(False)
# data = self.getWFData().copy()
# data = self.get_wf_data().copy()
# title = self.getStation()
filter = False
phase = None
@ -3683,8 +3800,8 @@ class TuneAutopicker(QWidget):
fnames = self.station_ids[self.get_current_station_id()]
if not fnames == self.fnames:
self.fnames = fnames
self.data.setWFData(fnames)
wfdat = self.data.getWFData() # all available streams
self.data.set_wf_data(fnames)
wfdat = self.data.get_wf_data() # all available streams
# remove possible underscores in station names
# wfdat = remove_underscores(wfdat)
# rotate misaligned stations to ZNE
@ -3789,12 +3906,14 @@ class TuneAutopicker(QWidget):
network = None
location = None
wfdata = self.data.getWFData()
wfdata = self.data.get_wf_data()
wfdata_comp = self.data.get_wf_dataComp()
metadata = self.parent().metadata
event = self.get_current_event()
filteroptions = self.parent().filteroptions
wftype = self.wftype if self.obspy_dmt else ''
self.pickDlg = PickDlg(self.parent(), data=wfdata.select(station=station).copy(),
data_comp=wfdata_comp.select(station=station).copy(),
station=station, network=network,
location=location, parameter=self.parameter,
picks=self.get_current_event_picks(station),
@ -3843,7 +3962,7 @@ class TuneAutopicker(QWidget):
for plotitem in self._manual_pick_plots:
self.clear_plotitem(plotitem)
self._manual_pick_plots = []
st = self.data.getWFData()
st = self.data.get_wf_data()
tr = st.select(station=self.get_current_station())[0]
starttime = tr.stats.starttime
# create two lists with figure names and subindices (for subplots) to get the correct axes
@ -3879,7 +3998,7 @@ class TuneAutopicker(QWidget):
self.plot_manual_pick_to_ax(ax=ax, picks=picks, phase='S',
starttime=starttime, quality=qualitySpick)
for canvas in self.parent().canvas_dict.values():
canvas.draw()
canvas.draw_idle()
def plot_manual_pick_to_ax(self, ax, picks, phase, starttime, quality):
mpp = picks[phase]['mpp'] - starttime
@ -4794,8 +4913,8 @@ class InputsTab(PropTab):
self.tstopBox = QSpinBox()
for spinbox in [self.tstartBox, self.tstopBox]:
spinbox.setRange(-99999, 99999)
self.tstartBox.setValue(float(settings.value('tstart')) if get_None(settings.value('tstart')) else 0)
self.tstopBox.setValue(float(settings.value('tstop')) if get_None(settings.value('tstop')) else 1e6)
self.tstartBox.setValue(float(settings.value('tstart')) if get_none(settings.value('tstart')) else 0)
self.tstopBox.setValue(float(settings.value('tstop')) if get_none(settings.value('tstop')) else 1e6)
self.cuttimesLayout.addWidget(self.tstartBox, 10)
self.cuttimesLayout.addWidget(QLabel('[s] and'), 0)
self.cuttimesLayout.addWidget(self.tstopBox, 10)
@ -5791,7 +5910,7 @@ class ChooseWaveFormWindow(QWidget):
#self.currentSpectro = self.traces[
# self.chooseBoxTraces.currentText()[3:]][self.chooseBoxComponent.currentText()].spectrogram(show=False, title=t)
#self.currentSpectro.show()
applyFFT()
self.applyFFT()
def applyFFT(self, trace):
tra = self.traces[self.chooseBoxTraces.currentText()[3:]]['Z']

View File

@ -1,12 +1,9 @@
# This file may be used to create an environment using:
# $ conda create --name <env> --file <this file>
# platform: win-64
cartopy=0.20.2
matplotlib-base=3.3.4
numpy=1.22.3
obspy=1.3.0
pyqtgraph=0.12.4
pyside2=5.13.2
python=3.8.12
qt=5.12.9
scipy=1.8.0
cartopy>=0.20.2
numpy<2
obspy>=1.3.0
pyqtgraph>=0.12.4
pyside2>=5.13.2
scipy>=1.8.0