27 Commits

Author SHA1 Message Date
e5c7404bb6 refactor: remove unnecessary additional declaration 2024-07-17 14:07:05 +02:00
1a148a8a72 suggestion: add new dataclasses; remove unused code 2024-07-17 14:06:13 +02:00
8623cc3dd3 refactor: removed unused code 2024-07-17 14:05:04 +02:00
cb457fc7ec refactor: rename writephases; add write hash to write_phases 2024-07-17 13:29:10 +02:00
eb077e4bd6 refactor: remove unused code; restructure writephases 2024-07-16 14:49:32 +02:00
2b01e8207e change: add docu and test case 2024-07-16 12:08:00 +02:00
6cce05b035 Merge branch 'feature/dae' into develop
# Conflicts:
#	pylot/core/io/data.py
#	pylot/core/util/widgets.py
2024-06-12 16:19:21 +02:00
7326f061e5 [minor] inform if station coordinates were not found in metadata 2024-06-12 16:11:53 +02:00
1a18401fe3 [bugfix] added missing Parameter object in call for picksdict_from_picks 2024-06-12 13:44:02 +02:00
ec930dbc12 [minor] removed unneeded imports 2024-06-07 15:56:05 +02:00
b991f771af [bugfix] removing redundancy and wrong bullsh.. try-except code 2024-06-07 15:04:16 +02:00
2c3b1876ab [minor] switch default cmap for array_map to 'viridis' 2024-06-07 14:34:27 +02:00
0acd23d4d0 [update] further improved Pickfile selection dialog, now providing methods "overwrite" or "merge" 2024-06-07 14:32:57 +02:00
f349c8bc7e [update] improve pickfile selection, give the ability to select only specific files 2024-06-07 13:09:34 +02:00
6688ef845d [bugfix] re-implement ability of get_bool to return unidentifiable input 2024-06-07 13:08:51 +02:00
5b18e9ab71 [merge] merge branch 'improve-util-utils' of pull request #35 into develop 2024-06-07 10:29:39 +02:00
65dbaad446 [update] adding possibility to display other waveform data (e.g. denoised/synthetic) together with genuine data for comparison 2024-03-22 17:12:04 +01:00
5b97d51517 [minor] mpl.figure.canvas.draw -> draw_idle 2024-03-22 17:12:04 +01:00
b3fdbc811e Merge branch 'develop' into improve-util-utils 2023-06-23 09:37:54 +02:00
9fce4998d3 bugfix: remove unused functions; correct for wrong formatting (PEP) 2023-04-23 22:05:11 +02:00
c468bfbe84 feat: add type hints and tests for plot utils 2023-04-23 21:37:20 +02:00
4861d33e9a bugfix: add tests to key_for_set_value 2023-04-16 09:58:51 +02:00
f5f4635c3d bugfix: rename is_iterable and add doc tests 2023-04-16 09:50:42 +02:00
b12d92eebb bugfix: refactor get_owner and get_hash; add tests 2023-04-12 21:22:58 +02:00
e9da81376e bugfix: add new tests and refactor get_none 2023-04-12 20:32:44 +02:00
e68fc849f0 bugfix: correct erroneous and add new doctests 2023-04-10 19:14:23 +02:00
efb117177c bugfix: update check4rotate 2023-04-10 18:35:58 +02:00
13 changed files with 1459 additions and 847 deletions

725
PyLoT.py

File diff suppressed because it is too large Load Diff

View File

@@ -243,7 +243,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
pylot_event = Event(eventpath) # event should be path to event directory pylot_event = Event(eventpath) # event should be path to event directory
data.setEvtData(pylot_event) data.setEvtData(pylot_event)
if fnames == 'None': if fnames == 'None':
data.set_wf_data(glob.glob(os.path.join(datapath, event_datapath, '*'))) data.setWFData(glob.glob(os.path.join(datapath, event_datapath, '*')))
# the following is necessary because within # the following is necessary because within
# multiple event processing no event ID is provided # multiple event processing no event ID is provided
# in autopylot.in # in autopylot.in
@@ -258,7 +258,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
now.minute) now.minute)
parameter.setParam(eventID=eventID) parameter.setParam(eventID=eventID)
else: else:
data.set_wf_data(fnames) data.setWFData(fnames)
eventpath = events[0] eventpath = events[0]
# now = datetime.datetime.now() # now = datetime.datetime.now()
@@ -268,7 +268,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
# now.hour, # now.hour,
# now.minute) # now.minute)
parameter.setParam(eventID=eventid) parameter.setParam(eventID=eventid)
wfdat = data.get_wf_data() # all available streams wfdat = data.getWFData() # all available streams
if not station == 'all': if not station == 'all':
wfdat = wfdat.select(station=station) wfdat = wfdat.select(station=station)
if not wfdat: if not wfdat:

View File

@@ -1,69 +1,604 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import copy
import logging
import os import os
import fnmatch
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import List, Union from typing import List
from obspy import UTCDateTime from PySide2.QtWidgets import QMessageBox
from obspy import read, read_events, Stream, Catalog, UTCDateTime
from obspy.core.event import Event as ObsPyEvent
from obspy.io.sac import SacIOError
from pylot.core.io.event import EventData
from pylot.core.io.waveformdata import WaveformData
from pylot.core.util.dataprocessing import Metadata
@dataclass import pylot.core.loc.focmec as focmec
class Data: import pylot.core.loc.hypodd as hypodd
event_data: EventData = field(default_factory=EventData) import pylot.core.loc.velest as velest
waveform_data: WaveformData = field(default_factory=WaveformData) from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
metadata: Metadata = field(default_factory=Metadata) picksdict_from_pilot, merge_picks, PylotParameter
_parent: Union[None, 'QtWidgets.QWidget'] = None from pylot.core.util.errors import FormatError, OverwriteError
from pylot.core.util.event import Event
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
from pylot.core.util.utils import fnConstructor, full_range, check4rotated, \
check_for_gaps_and_merge, trim_station_components, check_for_nan
class Data(object):
"""
Data container with attributes wfdata holding ~obspy.core.stream.
:type parent: PySide2.QtWidgets.QWidget object, optional
:param parent: A PySide2.QtWidgets.QWidget object utilized when
called by a GUI to display a PySide2.QtWidgets.QMessageBox instead of printing
to standard out.
:type evtdata: ~obspy.core.event.Event object, optional
:param evtdata ~obspy.core.event.Event object containing all derived or
loaded event. Container object holding, e.g. phase arrivals, etc.
"""
def __init__(self, parent=None, evtdata=None): def __init__(self, parent=None, evtdata=None):
self._parent = parent self._parent = parent
self.event_data = EventData(evtdata) if self.getParent():
self.waveform_data = WaveformData() self.comp = parent.getComponent()
else:
self.comp = 'Z'
self.wfdata = Stream()
self._new = False
if isinstance(evtdata, ObsPyEvent) or isinstance(evtdata, Event):
pass
elif isinstance(evtdata, dict):
evt = readPILOTEvent(**evtdata)
evtdata = evt
elif isinstance(evtdata, str):
try:
cat = read_events(evtdata)
if len(cat) != 1:
raise ValueError('ambiguous event information for file: '
'{file}'.format(file=evtdata))
evtdata = cat[0]
except TypeError as e:
if 'Unknown format for file' in e.message:
if 'PHASES' in evtdata:
picks = picksdict_from_pilot(evtdata)
evtdata = ObsPyEvent()
evtdata.picks = picks_from_picksdict(picks)
elif 'LOC' in evtdata:
raise NotImplementedError('PILOT location information '
'read support not yet '
'implemented.')
elif 'event.pkl' in evtdata:
evtdata = qml_from_obspyDMT(evtdata)
else:
raise e
else:
raise e
else: # create an empty Event object
self.setNew()
evtdata = ObsPyEvent()
evtdata.picks = []
self.evtdata = evtdata
self.wforiginal = None
self.cuttimes = None
self.dirty = False
self.processed = None
def __str__(self): def __str__(self):
return str(self.waveform_data.wfdata) return str(self.wfdata)
def __add__(self, other): def __add__(self, other):
if not isinstance(other, Data): assert isinstance(other, Data), "operands must be of same type 'Data'"
raise TypeError("Operands must be of type 'Data'") rs_id = self.get_evt_data().get('resource_id')
if self.event_data.is_new() and other.event_data.is_new(): rs_id_other = other.get_evt_data().get('resource_id')
if other.isNew() and not self.isNew():
picks_to_add = other.get_evt_data().picks
old_picks = self.get_evt_data().picks
wf_ids_old = [pick.waveform_id for pick in old_picks]
for new_pick in picks_to_add:
wf_id = new_pick.waveform_id
if wf_id in wf_ids_old:
for old_pick in old_picks:
comparison = [old_pick.waveform_id == new_pick.waveform_id,
old_pick.phase_hint == new_pick.phase_hint,
old_pick.method_id == new_pick.method_id]
if all(comparison):
del (old_pick)
old_picks.append(new_pick)
elif not other.isNew() and self.isNew():
new = other + self
self.evtdata = new.get_evt_data()
elif self.isNew() and other.isNew():
pass pass
elif other.event_data.is_new(): elif rs_id == rs_id_other:
new_picks = other.event_data.evtdata.picks other.setNew()
old_picks = self.event_data.evtdata.picks
old_picks.extend([pick for pick in new_picks if pick not in old_picks])
elif self.event_data.is_new():
return other + self
elif self.event_data.get_id() == other.event_data.get_id():
other.event_data.set_new()
return self + other return self + other
else: else:
raise ValueError("Both Data objects have differing unique Event identifiers") raise ValueError("both Data objects have differing "
"unique Event identifiers")
return self return self
def get_parent(self): def getPicksStr(self):
"""
Return picks in event data
:return: picks seperated by newlines
:rtype: str
"""
picks_str = ''
for pick in self.get_evt_data().picks:
picks_str += str(pick) + '\n'
return picks_str
def getParent(self):
"""
Get PySide.QtGui.QWidget parent object
"""
return self._parent return self._parent
def filter_wf_data(self, **kwargs): def isNew(self):
self.waveform_data.wfdata.detrend('linear') return self._new
self.waveform_data.wfdata.taper(0.02, type='cosine')
self.waveform_data.wfdata.filter(**kwargs)
self.waveform_data.dirty = True
def set_wf_data(self, fnames: List[str], fnames_alt: List[str] = None, check_rotated=False, metadata=None, tstart=0, tstop=0): def setNew(self):
return self.waveform_data.load_waveforms(fnames, fnames_alt, check_rotated, metadata, tstart, tstop) self._new = True
def reset_wf_data(self): def checkEvent(self, event, fcheck, forceOverwrite=False):
self.waveform_data.reset() """
Check information in supplied event and own event and replace with own
information if no other information are given or forced by forceOverwrite
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param fcheck: check and delete existing information
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
:type fcheck: str, [str]
:param forceOverwrite: Set to true to force overwrite own information. If false,
supplied information from event is only used if there is no own information in that
category (given in fcheck: manual, auto, origin, magnitude)
:type forceOverwrite: bool
:return:
:rtype: None
"""
if 'origin' in fcheck:
self.replaceOrigin(event, forceOverwrite)
if 'magnitude' in fcheck:
self.replaceMagnitude(event, forceOverwrite)
if 'auto' in fcheck:
self.replacePicks(event, 'auto')
if 'manual' in fcheck:
self.replacePicks(event, 'manual')
def get_wf_data(self): def replaceOrigin(self, event, forceOverwrite=False):
return self.waveform_data.wfdata """
Replace own origin with the one supplied in event if own origin is not
existing or forced by forceOverwrite = True
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param forceOverwrite: always replace own information with supplied one if true
:type forceOverwrite: bool
:return:
:rtype: None
"""
if self.get_evt_data().origins or forceOverwrite:
if event.origins:
print("Found origin, replace it by new origin.")
event.origins = self.get_evt_data().origins
def rotate_wf_data(self): def replaceMagnitude(self, event, forceOverwrite=False):
self.waveform_data.rotate_zne(self.metadata) """
Replace own magnitude with the one supplied in event if own magnitude is not
existing or forced by forceOverwrite = True
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param forceOverwrite: always replace own information with supplied one if true
:type forceOverwrite: bool
:return:
:rtype: None
"""
if self.get_evt_data().magnitudes or forceOverwrite:
if event.magnitudes:
print("Found magnitude, replace it by new magnitude")
event.magnitudes = self.get_evt_data().magnitudes
def replacePicks(self, event, picktype):
"""
Replace picks in event with own picks
:param event: Event that supplies information for comparison
:type event: pylot.core.util.event.Event
:param picktype: 'auto' or 'manual' picks
:type picktype: str
:return:
:rtype: None
"""
checkflag = 1
picks = event.picks
# remove existing picks
for j, pick in reversed(list(enumerate(picks))):
try:
if picktype in str(pick.method_id.id):
picks.pop(j)
checkflag = 2
except AttributeError as e:
msg = '{}'.format(e)
print(e)
checkflag = 0
if checkflag > 0:
if checkflag == 1:
print("Write new %s picks to catalog." % picktype)
if checkflag == 2:
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
# append new picks
for pick in self.get_evt_data().picks:
if picktype in str(pick.method_id.id):
picks.append(pick)
def getID(self):
"""
Get unique resource id
"""
try:
return self.evtdata.get('resource_id').id
except:
return None
def filterWFData(self, kwargs):
"""
Filter waveform data
:param kwargs: arguments to pass through to filter function
"""
data = self.getWFData()
data.detrend('linear')
data.taper(0.02, type='cosine')
data.filter(**kwargs)
self.dirty = True
def setWFData(self, fnames, fnames_alt=None, checkRotated=False, metadata=None, tstart=0, tstop=0):
"""
Clear current waveform data and set given waveform data
:param fnames: waveform data names to append
:param fnames_alt: alternative data to show (e.g. synthetic/processed)
:type fnames: list
"""
def check_fname_exists(filenames: list) -> list:
if filenames:
filenames = [fn for fn in filenames if os.path.isfile(fn)]
return filenames
self.wfdata = Stream()
self.wforiginal = None
self.wf_alt = Stream()
if tstart == tstop:
tstart = tstop = None
self.tstart = tstart
self.tstop = tstop
# remove directories
fnames = check_fname_exists(fnames)
fnames_alt = check_fname_exists(fnames_alt)
if fnames is not None:
self.appendWFData(fnames)
if fnames_alt is not None:
self.appendWFData(fnames_alt, alternative=True)
else:
return False
# check for gaps and merge
self.wfdata, _ = check_for_gaps_and_merge(self.wfdata)
# check for nans
check_for_nan(self.wfdata)
# check for stations with rotated components
if checkRotated and metadata is not None:
self.wfdata = check4rotated(self.wfdata, metadata, verbosity=0)
# trim station components to same start value
trim_station_components(self.wfdata, trim_start=True, trim_end=False)
# make a copy of original data
self.wforiginal = self.getWFData().copy()
self.dirty = False
return True
def appendWFData(self, fnames, alternative=False):
"""
Read waveform data from fnames and append it to current wf data
:param fnames: waveform data to append
:type fnames: list
"""
assert isinstance(fnames, list), "input parameter 'fnames' is " \
"supposed to be of type 'list' " \
"but is actually" \
" {0}".format(type(fnames))
if self.dirty:
self.resetWFData()
orig_or_alternative_data = {True: self.wf_alt,
False: self.wfdata}
warnmsg = ''
for fname in set(fnames):
try:
orig_or_alternative_data[alternative] += read(fname, starttime=self.tstart, endtime=self.tstop)
except TypeError:
try:
orig_or_alternative_data[alternative] += read(fname, format='GSE2', starttime=self.tstart, endtime=self.tstop)
except Exception as e:
try:
orig_or_alternative_data[alternative] += read(fname, format='SEGY', starttime=self.tstart,
endtime=self.tstop)
except Exception as e:
warnmsg += '{0}\n{1}\n'.format(fname, e)
except SacIOError as se:
warnmsg += '{0}\n{1}\n'.format(fname, se)
if warnmsg:
warnmsg = 'WARNING in appendWFData: unable to read waveform data\n' + warnmsg
print(warnmsg)
def getWFData(self):
return self.wfdata
def getOriginalWFData(self):
return self.wforiginal
def getAltWFdata(self):
return self.wf_alt
def resetWFData(self):
"""
Set waveform data to original waveform data
"""
if self.getOriginalWFData():
self.wfdata = self.getOriginalWFData().copy()
else:
self.wfdata = Stream()
self.dirty = False
def resetPicks(self):
"""
Clear all picks from event
"""
self.get_evt_data().picks = []
def get_evt_data(self):
return self.evtdata
def setEvtData(self, event):
self.evtdata = event
def applyEVTData(self, data, typ='pick'):
"""
Either takes an `obspy.core.event.Event` object and applies all new
information on the event to the actual data if typ is 'event or
creates ObsPy pick objects and append it to the picks list from the
PyLoT dictionary contain all picks if type is pick
:param data: data to apply, either picks or complete event
:type data:
:param typ: which event data to apply, 'pick' or 'event'
:type typ: str
:param authority_id: (currently unused)
:type: str
:raise OverwriteError:
"""
def applyPicks(picks):
"""
Creates ObsPy pick objects and append it to the picks list from the
PyLoT dictionary contain all picks.
:param picks:
:raise OverwriteError: raises an OverwriteError if the picks list is
not empty. The GUI will then ask for a decision.
"""
# firstonset = find_firstonset(picks)
# check for automatic picks
print("Writing phases to ObsPy-quakeml file")
for key in picks:
if not picks[key].get('P'):
continue
if picks[key]['P']['picker'] == 'auto':
print("Existing auto-picks will be overwritten in pick-dictionary!")
picks = picks_from_picksdict(picks)
break
else:
if self.get_evt_data().picks:
raise OverwriteError('Existing picks would be overwritten!')
else:
picks = picks_from_picksdict(picks)
break
self.get_evt_data().picks = picks
def applyEvent(event):
"""
takes an `obspy.core.event.Event` object and applies all new
information on the event to the actual data
:param event:
"""
if event is None:
print("applyEvent: Received None")
return
if self.isNew():
self.setEvtData(event)
else:
# prevent overwriting original pick information
event_old = self.get_evt_data()
if not event_old.resource_id == event.resource_id:
print("WARNING: Missmatch in event resource id's: {} and {}".format(
event_old.resource_id,
event.resource_id))
else:
picks = copy.deepcopy(event_old.picks)
event = merge_picks(event, picks)
# apply event information from location
event_old.update(event)
applydata = {'pick': applyPicks,
'event': applyEvent}
applydata[typ](data)
self._new = False
@dataclass
class SeismicEventData:
event_id: str = ""
catalog: Catalog = field(default_factory=Catalog)
def find_event_files(self, directory: str, extensions: List[str]) -> List[str]:
"""
Browse the directory to find event files with specified extensions.
Parameters:
directory (str): The directory path to search for event files.
extensions (List[str]): List of file extensions to search for.
Returns:
List[str]: List of file paths that match the given extensions.
Example:
>>> sed = SeismicEventData()
>>> sed.find_event_files('test_directory', ['.xml', '.quakeml']) # doctest: +SKIP
['test_directory/event1.xml', 'test_directory/event2.quakeml']
"""
matches = []
for root, _, files in os.walk(directory):
for ext in extensions:
for filename in fnmatch.filter(files, f'*{ext}'):
matches.append(os.path.join(root, filename))
return matches
def read_event_from_directory(self, directory: str, extensions: List[str], format: str) -> None:
"""
Read a seismic event from the first found file in the directory with specified format.
Parameters:
directory (str): The directory path to search for event files.
extensions (List[str]): List of file extensions to search for.
format (str): The format to read the event file.
Example:
>>> sed = SeismicEventData()
>>> sed.read_event_from_directory('test_directory', ['.xml', '.quakeml'], 'QUAKEML') # doctest: +SKIP
"""
event_files = self.find_event_files(directory, extensions)
if event_files:
self.read_event(event_files[0], format)
else:
raise FileNotFoundError(f"No event files found in directory {directory} with extensions {extensions}.")
def read_event(self, file_path: str, format: str) -> None:
"""
Read a seismic event from a file with specified format.
Parameters:
file_path (str): The path to the event file.
format (str): The format to read the event file.
Example:
>>> sed = SeismicEventData()
>>> sed.read_event('test_directory/event1.xml', 'QUAKEML') # doctest: +SKIP
"""
if os.path.exists(file_path):
self.catalog = read_events(file_path, format=format)
self.event_id = self.catalog[0].resource_id.id.split('/')[-1] if self.catalog else ""
else:
raise FileNotFoundError(f"File {file_path} does not exist.")
def write_event(self, file_path: str, format: str) -> None:
"""
Write the seismic event to a file with specified format.
Parameters:
file_path (str): The path to the output file.
format (str): The format to write the event file.
Example:
>>> sed = SeismicEventData(event_id='12345')
>>> sed.write_event('output_directory/event1.xml', 'QUAKEML') # doctest: +SKIP
"""
self.catalog.write(file_path, format=format)
@dataclass
class WaveformData:
stream: Stream = field(default_factory=Stream)
def find_waveform_files(self, directory: str, extensions: List[str]) -> List[str]:
"""
Browse the directory to find waveform files with specified extensions.
Parameters:
directory (str): The directory path to search for waveform files.
extensions (List[str]): List of file extensions to search for.
Returns:
List[str]: List of file paths that match the given extensions.
Example:
>>> wd = WaveformData()
>>> wd.find_waveform_files('test_directory', ['.mseed']) # doctest: +SKIP
['test_directory/waveform1.mseed']
"""
matches = []
for root, _, files in os.walk(directory):
for ext in extensions:
for filename in fnmatch.filter(files, f'*{ext}'):
matches.append(os.path.join(root, filename))
return matches
def read_waveform_from_directory(self, directory: str, extensions: List[str], format: str) -> None:
"""
Read waveform data from the first found file in the directory with specified format.
Parameters:
directory (str): The directory path to search for waveform files.
extensions (List[str]): List of file extensions to search for.
format (str): The format to read the waveform file.
Example:
>>> wd = WaveformData()
>>> wd.read_waveform_from_directory('test_directory', ['.mseed'], 'MSEED') # doctest: +SKIP
"""
waveform_files = self.find_waveform_files(directory, extensions)
if waveform_files:
self.read_waveform(waveform_files[0], format)
else:
raise FileNotFoundError(f"No waveform files found in directory {directory} with extensions {extensions}.")
def read_waveform(self, file_path: str, format: str) -> None:
"""
Read waveform data from a file with specified format.
Parameters:
file_path (str): The path to the waveform file.
format (str): The format to read the waveform file.
Example:
>>> wd = WaveformData()
>>> wd.read_waveform('test_directory/waveform1.mseed', 'MSEED') # doctest: +SKIP
"""
if os.path.exists(file_path):
self.stream = read(file_path, format=format)
else:
raise FileNotFoundError(f"File {file_path} does not exist.")
def write_waveform(self, file_path: str, format: str) -> None:
"""
Write the waveform data to a file with specified format.
Parameters:
file_path (str): The path to the output file.
format (str): The format to write the waveform file.
Example:
>>> wd = WaveformData()
>>> wd.write_waveform('output_directory/waveform1.mseed', 'MSEED') # doctest: +SKIP
"""
self.stream.write(file_path, format=format)
# Example usage:
# seismic_event = SeismicEventData()
# seismic_event.read_event_from_directory("path_to_directory", extensions=[".xml", ".quakeml"], format="QUAKEML")
# seismic_event.write_event("output_event_file.xml", format="QUAKEML")
# waveform_data = WaveformData()
# waveform_data.read_waveform_from_directory("path_to_directory", extensions=[".mseed"], format="MSEED")
# waveform_data.write_waveform("output_waveform_file.mseed", format="MSEED")
class GenericDataStructure(object): class GenericDataStructure(object):

View File

@@ -1,106 +0,0 @@
import copy
from dataclasses import dataclass
from typing import Union
from obspy import read_events
from obspy.core.event import Event as ObsPyEvent
@dataclass
class EventData:
evtdata: Union[ObsPyEvent, None] = None
_new: bool = False
def __init__(self, evtdata=None):
self.set_event_data(evtdata)
def set_event_data(self, evtdata):
if isinstance(evtdata, ObsPyEvent):
self.evtdata = evtdata
elif isinstance(evtdata, dict):
self.evtdata = self.read_pilot_event(**evtdata)
elif isinstance(evtdata, str):
self.evtdata = self.read_event_file(evtdata)
else:
self.set_new()
self.evtdata = ObsPyEvent(picks=[])
def read_event_file(self, evtdata: str) -> ObsPyEvent:
try:
cat = read_events(evtdata)
if len(cat) != 1:
raise ValueError(f'ambiguous event information for file: {evtdata}')
return cat[0]
except TypeError as e:
self.handle_event_file_error(e, evtdata)
def handle_event_file_error(self, e: TypeError, evtdata: str):
if 'Unknown format for file' in str(e):
if 'PHASES' in evtdata:
picks = self.picksdict_from_pilot(evtdata)
evtdata = ObsPyEvent(picks=self.picks_from_picksdict(picks))
elif 'LOC' in evtdata:
raise NotImplementedError('PILOT location information read support not yet implemented.')
elif 'event.pkl' in evtdata:
evtdata = self.qml_from_obspy_dmt(evtdata)
else:
raise e
else:
raise e
def set_new(self):
self._new = True
def is_new(self) -> bool:
return self._new
def get_picks_str(self) -> str:
return '\n'.join(str(pick) for pick in self.evtdata.picks)
def replace_origin(self, event: ObsPyEvent, force_overwrite: bool = False):
if self.evtdata.origins or force_overwrite:
event.origins = self.evtdata.origins
def replace_magnitude(self, event: ObsPyEvent, force_overwrite: bool = False):
if self.evtdata.magnitudes or force_overwrite:
event.magnitudes = self.evtdata.magnitudes
def replace_picks(self, event: ObsPyEvent, picktype: str):
checkflag = 1
picks = event.picks
for j, pick in reversed(list(enumerate(picks))):
if picktype in str(pick.method_id.id):
picks.pop(j)
checkflag = 2
if checkflag > 0:
for pick in self.evtdata.picks:
if picktype in str(pick.method_id.id):
picks.append(pick)
def get_id(self) -> Union[str, None]:
try:
return self.evtdata.resource_id.id
except:
return None
def apply_event_data(self, data, typ='pick'):
if typ == 'pick':
self.apply_picks(data)
elif typ == 'event':
self.apply_event(data)
def apply_picks(self, picks):
self.evtdata.picks = picks
def apply_event(self, event: ObsPyEvent):
if self.is_new():
self.evtdata = event
else:
old_event = self.evtdata
if old_event.resource_id == event.resource_id:
picks = copy.deepcopy(old_event.picks)
event = self.merge_picks(event, picks)
old_event.update(event)
else:
print(f"WARNING: Mismatch in event resource id's: {old_event.resource_id} and {event.resource_id}")

View File

@@ -1,177 +0,0 @@
import os
from pylot.core.util.event import Event
class Project(object):
'''
Pickable class containing information of a PyLoT project, like event lists and file locations.
'''
# TODO: remove rootpath
def __init__(self):
self.eventlist = []
self.location = None
self.rootpath = None
self.datapath = None
self.dirty = False
self.parameter = None
self._table = None
def add_eventlist(self, eventlist):
'''
Add events from an eventlist containing paths to event directories.
Will skip existing paths.
'''
if len(eventlist) == 0:
return
for item in eventlist:
event = Event(item)
event.rootpath = self.parameter['rootpath']
event.database = self.parameter['database']
event.datapath = self.parameter['datapath']
if not event.path in self.getPaths():
self.eventlist.append(event)
self.setDirty()
else:
print('Skipping event with path {}. Already part of project.'.format(event.path))
self.eventlist.sort(key=lambda x: x.pylot_id)
self.search_eventfile_info()
def remove_event(self, event):
self.eventlist.remove(event)
def remove_event_by_id(self, eventID):
for event in self.eventlist:
if eventID in str(event.resource_id):
self.remove_event(event)
break
def read_eventfile_info(self, filename, separator=','):
'''
Try to read event information from file (:param:filename) comparing specific event datetimes.
File structure (each row): event, date, time, magnitude, latitude, longitude, depth
separated by :param:separator each.
'''
with open(filename, 'r') as infile:
for line in infile.readlines():
eventID, date, time, mag, lat, lon, depth = line.split(separator)[:7]
# skip first line
try:
day, month, year = date.split('/')
except:
continue
year = int(year)
# hardcoded, if year only consists of 2 digits (e.g. 16 instead of 2016)
if year < 100:
year += 2000
datetime = '{}-{}-{}T{}'.format(year, month, day, time)
try:
datetime = UTCDateTime(datetime)
except Exception as e:
print(e, datetime, filename)
continue
for event in self.eventlist:
if eventID in str(event.resource_id) or eventID in event.origins:
if event.origins:
origin = event.origins[0] # should have only one origin
if origin.time == datetime:
origin.latitude = float(lat)
origin.longitude = float(lon)
origin.depth = float(depth)
else:
continue
elif not event.origins:
origin = Origin(resource_id=event.resource_id,
time=datetime, latitude=float(lat),
longitude=float(lon), depth=float(depth))
event.origins.append(origin)
event.magnitudes.append(Magnitude(resource_id=event.resource_id,
mag=float(mag),
mag_type='M'))
break
def search_eventfile_info(self):
'''
Search all datapaths in rootpath for filenames with given file extension fext
and try to read event info from it
'''
datapaths = []
fext = '.csv'
for event in self.eventlist:
if not event.datapath in datapaths:
datapaths.append(event.datapath)
for datapath in datapaths:
# datapath = os.path.join(self.rootpath, datapath)
if os.path.isdir(datapath):
for filename in os.listdir(datapath):
filename = os.path.join(datapath, filename)
if os.path.isfile(filename) and filename.endswith(fext):
try:
self.read_eventfile_info(filename)
except Exception as e:
print('Failed on reading eventfile info from file {}: {}'.format(filename, e))
else:
print("Directory %s does not exist!" % datapath)
def getPaths(self):
'''
Returns paths (eventlist) of all events saved in the project.
'''
paths = []
for event in self.eventlist:
paths.append(event.path)
return paths
def setDirty(self, value=True):
self.dirty = value
def getEventFromPath(self, path):
'''
Search for an event in the project by event path.
'''
for event in self.eventlist:
if event.path == path:
return event
def save(self, filename=None):
'''
Save PyLoT Project to a file.
Can be loaded by using project.load(filename).
'''
try:
import pickle
except ImportError:
import _pickle as pickle
if filename:
self.location = filename
else:
filename = self.location
table = self._table # MP: see below
try:
outfile = open(filename, 'wb')
self._table = [] # MP: Workaround as long as table cannot be saved as part of project
pickle.dump(self, outfile, protocol=pickle.HIGHEST_PROTOCOL)
self.setDirty(False)
self._table = table # MP: see above
return True
except Exception as e:
print('Could not pickle PyLoT project. Reason: {}'.format(e))
self.setDirty()
self._table = table # MP: see above
return False
@staticmethod
def load(filename):
'''
Load project from filename.
'''
import pickle
infile = open(filename, 'rb')
project = pickle.load(infile)
infile.close()
project.location = filename
print('Loaded %s' % filename)
return project

View File

@@ -1,13 +0,0 @@
import os
from typing import List
def validate_filenames(filenames: List[str]) -> List[str]:
"""
validate a list of filenames for file abundance
:param filenames: list of possible filenames
:type filenames: List[str]
:return: list of valid filenames
:rtype: List[str]
"""
return [fn for fn in filenames if os.path.isfile(fn)]

View File

@@ -1,123 +0,0 @@
import logging
from dataclasses import dataclass, field
from typing import Union, List
from obspy import Stream, read
from obspy.io.sac import SacIOError
from pylot.core.io.utils import validate_filenames
from pylot.core.util.dataprocessing import Metadata
from pylot.core.util.utils import get_stations, check_for_nan, check4rotated
@dataclass
class WaveformData:
wfdata: Stream = field(default_factory=Stream)
wforiginal: Union[Stream, None] = None
wf_alt: Stream = field(default_factory=Stream)
dirty: bool = False
def load_waveforms(self, fnames: List[str], fnames_alt: List[str] = None, check_rotated=False, metadata=None, tstart=0, tstop=0):
fn_list = validate_filenames(fnames)
if not fn_list:
logging.warning('No valid filenames given for loading waveforms')
else:
self.clear()
self.add_waveforms(fn_list)
if fnames_alt is None:
pass
else:
alt_fn_list = validate_filenames(fnames_alt)
if not alt_fn_list:
logging.warning('No valid alternative filenames given for loading waveforms')
else:
self.add_waveforms(alt_fn_list, alternative=True)
if not fn_list and not alt_fn_list:
logging.error('No filenames or alternative filenames given for loading waveforms')
return False
self.merge()
self.replace_nan()
if not check_rotated or not metadata:
pass
else:
self.rotate_zne()
self.trim_station_traces()
self.wforiginal = self.wfdata.copy()
self.dirty = False
return True
def add_waveforms(self, fnames: List[str], alternative: bool = False):
data_stream = self.wf_alt if alternative else self.wfdata
warnmsg = ''
for fname in set(fnames):
try:
data_stream += read(fname)
except TypeError:
try:
data_stream += read(fname, format='GSE2')
except Exception as e:
try:
data_stream += read(fname, format='SEGY')
except Exception as e:
warnmsg += f'{fname}\n{e}\n'
except SacIOError as se:
warnmsg += f'{fname}\n{se}\n'
if warnmsg:
print(f'WARNING in add_waveforms: unable to read waveform data\n{warnmsg}')
def clear(self):
self.wfdata = Stream()
self.wforiginal = None
self.wf_alt = Stream()
def reset(self):
"""
Resets the waveform data to its original state.
"""
if self.wforiginal:
self.wfdata = self.wforiginal.copy()
else:
self.wfdata = Stream()
self.dirty = False
def merge(self):
"""
check for gaps in Stream and merge if gaps are found
"""
gaps = self.wfdata.get_gaps()
if gaps:
merged = ['{}.{}.{}.{}'.format(*gap[:4]) for gap in gaps]
self.wfdata.merge(method=1)
logging.info('Merged the following stations because of gaps:')
for station in merged:
logging.info(station)
def replace_nan(self):
"""
Replace all NaNs in data with 0. (in place)
"""
self.wfdata = check_for_nan(self.wfdata)
def rotate_zne(self, metadata: Metadata = None):
"""
Check all traces in stream for rotation. If a trace is not in ZNE rotation (last symbol of channel code is numeric) and the trace
is in the metadata with azimuth and dip, rotate it to classical ZNE orientation.
Rotating the traces requires them to be of the same length, so, all traces will be trimmed to a common length as a
side effect.
"""
self.wfdata = check4rotated(self.wfdata, metadata)
def trim_station_traces(self):
"""
trim data stream to common time window
"""
for station in get_stations(self.wfdata):
station_traces = self.wfdata.select(station=station)
station_traces.trim(starttime=max([trace.stats.starttime for trace in station_traces]),
endtime=min([trace.stats.endtime for trace in station_traces]))

View File

@@ -474,8 +474,8 @@ class Array_map(QtWidgets.QWidget):
transform=ccrs.PlateCarree(), label='deleted')) transform=ccrs.PlateCarree(), label='deleted'))
def openPickDlg(self, ind): def openPickDlg(self, ind):
wfdata = self._parent.get_data().get_wf_data() wfdata = self._parent.get_data().getWFData()
wfdata_comp = self._parent.get_data().get_wf_dataComp() wfdata_comp = self._parent.get_data().getWFDataComp()
for index in ind: for index in ind:
network, station = self._station_onpick_ids[index].split('.')[:2] network, station = self._station_onpick_ids[index].split('.')[:2]
pyl_mw = self._parent pyl_mw = self._parent

View File

@@ -338,6 +338,19 @@ class Metadata(object):
return inv, exc return inv, exc
def time_from_header(header):
"""
Function takes in the second line from a .gse file and takes out the date and time from that line.
:param header: second line from .gse file
:type header: string
:return: a list of integers of form [year, month, day, hour, minute, second, microsecond]
"""
timeline = header.split(' ')
time = timeline[1].split('/') + timeline[2].split(':')
time = time[:-1] + time[-1].split('.')
return [int(t) for t in time]
def check_time(datetime): def check_time(datetime):
""" """
Function takes in date and time as list and validates it's values by trying to make an UTCDateTime object from it Function takes in date and time as list and validates it's values by trying to make an UTCDateTime object from it
@@ -375,6 +388,167 @@ def check_time(datetime):
return False return False
# TODO: change root to datapath
def get_file_list(root_dir):
"""
Function uses a directorie to get all the *.gse files from it.
:param root_dir: a directorie leading to the .gse files
:type root_dir: string
:return: returns a list of filenames (without path to them)
"""
file_list = glob.glob1(root_dir, '*.gse')
return file_list
def checks_station_second(datetime, file):
"""
Function uses the given list to check if the parameter 'second' is set to 60 by mistake
and sets the time correctly if so. Can only correct time if no date change would be necessary.
:param datetime: [year, month, day, hour, minute, second, microsecond]
:return: returns the input with the correct value for second
"""
if datetime[5] == 60:
if datetime[4] == 59:
if datetime[3] == 23:
err_msg = 'Date should be next day. ' \
'File not changed: {0}'.format(file)
raise ValueError(err_msg)
else:
datetime[3] += 1
datetime[4] = 0
datetime[5] = 0
else:
datetime[4] += 1
datetime[5] = 0
return datetime
def make_time_line(line, datetime):
"""
Function takes in the original line from a .gse file and a list of date and
time values to make a new line with corrected date and time.
:param line: second line from .gse file.
:type line: string
:param datetime: list of integers [year, month, day, hour, minute, second, microsecond]
:type datetime: list
:return: returns a string to write it into a file.
"""
ins_form = '{0:02d}:{1:02d}:{2:02d}.{3:03d}'
insertion = ins_form.format(int(datetime[3]),
int(datetime[4]),
int(datetime[5]),
int(datetime[6] * 1e-3))
newline = line[:16] + insertion + line[28:]
return newline
def evt_head_check(root_dir, out_dir=None):
"""
A function to make sure that an arbitrary number of .gse files have correct values in their header.
:param root_dir: a directory leading to the .gse files.
:type root_dir: string
:param out_dir: a directory to store the new files somwhere els.
:return: returns nothing
"""
if not out_dir:
print('WARNING files are going to be overwritten!')
inp = str(input('Continue? [y/N]'))
if not inp == 'y':
sys.exit()
filelist = get_file_list(root_dir)
nfiles = 0
for file in filelist:
infile = open(os.path.join(root_dir, file), 'r')
lines = infile.readlines()
infile.close()
datetime = time_from_header(lines[1])
if check_time(datetime):
continue
else:
nfiles += 1
datetime = checks_station_second(datetime, file)
print('writing ' + file)
# write File
lines[1] = make_time_line(lines[1], datetime)
if not out_dir:
out = open(os.path.join(root_dir, file), 'w')
out.writelines(lines)
out.close()
else:
out = open(os.path.join(out_dir, file), 'w')
out.writelines(lines)
out.close()
print(nfiles)
def read_metadata(path_to_inventory):
"""
take path_to_inventory and return either the corresponding list of files
found or the Parser object for a network dataless seed volume to prevent
read overhead for large dataless seed volumes
:param path_to_inventory:
:return: tuple containing a either list of files or `obspy.io.xseed.Parser`
object and the inventory type found
:rtype: tuple
"""
dlfile = list()
invfile = list()
respfile = list()
# possible file extensions specified here:
inv = dict(dless=dlfile, xml=invfile, resp=respfile, dseed=dlfile[:])
if os.path.isfile(path_to_inventory):
ext = os.path.splitext(path_to_inventory)[1].split('.')[1]
inv[ext] += [path_to_inventory]
else:
for ext in inv.keys():
inv[ext] += glob.glob1(path_to_inventory, '*.{0}'.format(ext))
invtype = key_for_set_value(inv)
if invtype is None:
print("Neither dataless-SEED file, inventory-xml file nor "
"RESP-file found!")
print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
robj = None,
elif invtype == 'dless': # prevent multiple read of large dlsv
print("Reading metadata information from dataless-SEED file ...")
if len(inv[invtype]) == 1:
fullpath_inv = os.path.join(path_to_inventory, inv[invtype][0])
robj = Parser(fullpath_inv)
else:
robj = inv[invtype]
else:
print("Reading metadata information from inventory-xml file ...")
robj = read_inventory(inv[invtype])
return invtype, robj
# idea to optimize read_metadata
# def read_metadata_new(path_to_inventory):
# metadata_objects = []
# # read multiple files from directory
# if os.path.isdir(path_to_inventory):
# fnames = os.listdir(path_to_inventory)
# # read single file
# elif os.path.isfile(path_to_inventory):
# fnames = [path_to_inventory]
# else:
# print("Neither dataless-SEED file, inventory-xml file nor "
# "RESP-file found!")
# print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
# fnames = []
#
# for fname in fnames:
# path_to_inventory_filename = os.path.join(path_to_inventory, fname)
# try:
# ftype, robj = read_metadata_file(path_to_inventory_filename)
# metadata_objects.append((ftype, robj))
# except Exception as e:
# print('Could not read metadata file {} '
# 'because of the following Exception: {}'.format(path_to_inventory_filename, e))
# return metadata_objects
def restitute_trace(input_tuple): def restitute_trace(input_tuple):
def no_metadata(tr, seed_id): def no_metadata(tr, seed_id):
print('no metadata file found ' print('no metadata file found '

View File

@@ -20,7 +20,7 @@ import matplotlib
matplotlib.use('Qt5Agg') matplotlib.use('Qt5Agg')
sys.path.append(os.path.join('/'.join(sys.argv[0].split('/')[:-1]), '../../..')) sys.path.append(os.path.join('/'.join(sys.argv[0].split('/')[:-1]), '../../..'))
from pylot.core.io.project import Project from PyLoT import Project
from pylot.core.util.dataprocessing import Metadata from pylot.core.util.dataprocessing import Metadata
from pylot.core.util.array_map import Array_map from pylot.core.util.array_map import Array_map

View File

@@ -8,6 +8,7 @@ import platform
import re import re
import subprocess import subprocess
import warnings import warnings
from typing import Literal, Tuple, Type
from functools import lru_cache from functools import lru_cache
import numpy as np import numpy as np
@@ -106,7 +107,7 @@ def gen_Pool(ncores=0):
print('gen_Pool: Generated multiprocessing Pool with {} cores\n'.format(ncores)) print('gen_Pool: Generated multiprocessing Pool with {} cores\n'.format(ncores))
pool = multiprocessing.Pool(ncores) pool = multiprocessing.Pool(ncores, maxtasksperchild=100)
return pool return pool
@@ -360,7 +361,7 @@ def get_bool(value):
>>> get_bool('Stream') >>> get_bool('Stream')
'Stream' 'Stream'
""" """
if type(value) == bool: if type(value) is bool:
return value return value
elif value in ['True', 'true']: elif value in ['True', 'true']:
return True return True
@@ -901,19 +902,6 @@ def trim_station_components(data, trim_start=True, trim_end=True):
return data return data
def merge_stream(stream):
gaps = stream.get_gaps()
if gaps:
# list of merged stations (seed_ids)
merged = ['{}.{}.{}.{}'.format(*gap[:4]) for gap in gaps]
stream.merge(method=1)
print('Merged the following stations because of gaps:')
for merged_station in merged:
print(merged_station)
return stream, gaps
def check4gapsAndRemove(data): def check4gapsAndRemove(data):
""" """
check for gaps in Stream and remove them check for gaps in Stream and remove them
@@ -934,12 +922,12 @@ def check4gapsAndRemove(data):
return data return data
def check4gapsAndMerge(data): def check_for_gaps_and_merge(data):
""" """
check for gaps in Stream and merge if gaps are found check for gaps in Stream and merge if gaps are found
:param data: stream of seismic data :param data: stream of seismic data
:type data: `~obspy.core.stream.Stream` :type data: `~obspy.core.stream.Stream`
:return: data stream :return: data stream, gaps returned from obspy get_gaps
:rtype: `~obspy.core.stream.Stream` :rtype: `~obspy.core.stream.Stream`
""" """
gaps = data.get_gaps() gaps = data.get_gaps()
@@ -950,7 +938,7 @@ def check4gapsAndMerge(data):
for merged_station in merged: for merged_station in merged:
print(merged_station) print(merged_station)
return data return data, gaps
def check4doubled(data): def check4doubled(data):
@@ -1218,6 +1206,7 @@ def identifyPhase(phase):
return False return False
@lru_cache
def identifyPhaseID(phase): def identifyPhaseID(phase):
""" """
Returns phase id (capital P or S) Returns phase id (capital P or S)

View File

@@ -140,6 +140,18 @@ class LogWidget(QtWidgets.QWidget):
self.stderr.append(60 * '#' + '\n\n') self.stderr.append(60 * '#' + '\n\n')
def getDataType(parent):
type = QInputDialog().getItem(parent, "Select phases type", "Type:",
["manual", "automatic"])
if type[0].startswith('auto'):
type = 'auto'
else:
type = type[0]
return type
def plot_pdf(_axes, x, y, annotation, bbox_props, xlabel=None, ylabel=None, def plot_pdf(_axes, x, y, annotation, bbox_props, xlabel=None, ylabel=None,
title=None): title=None):
# try method or data # try method or data
@@ -997,6 +1009,15 @@ class WaveformWidgetPG(QtWidgets.QWidget):
time_ax = np.linspace(time_ax[0], time_ax[-1], num=len(data)) time_ax = np.linspace(time_ax[0], time_ax[-1], num=len(data))
return data, time_ax return data, time_ax
# def getAxes(self):
# return self.axes
# def getXLims(self):
# return self.getAxes().get_xlim()
# def getYLims(self):
# return self.getAxes().get_ylim()
def setXLims(self, lims): def setXLims(self, lims):
vb = self.plotWidget.getPlotItem().getViewBox() vb = self.plotWidget.getPlotItem().getViewBox()
vb.setXRange(float(lims[0]), float(lims[1]), padding=0) vb.setXRange(float(lims[0]), float(lims[1]), padding=0)
@@ -1150,6 +1171,8 @@ class PylotCanvas(FigureCanvas):
break break
if not ax_check: return if not ax_check: return
# self.updateCurrentLimits() #maybe put this down to else:
# calculate delta (relative values in axis) # calculate delta (relative values in axis)
old_x, old_y = self.press_rel old_x, old_y = self.press_rel
xdiff = gui_event.x - old_x xdiff = gui_event.x - old_x
@@ -1357,96 +1380,83 @@ class PylotCanvas(FigureCanvas):
component='*', nth_sample=1, iniPick=None, verbosity=0, component='*', nth_sample=1, iniPick=None, verbosity=0,
plot_additional=False, additional_channel=None, scaleToChannel=None, plot_additional=False, additional_channel=None, scaleToChannel=None,
snr=None): snr=None):
ax = self.prepare_plot() def get_wf_dict(data: Stream = Stream(), linecolor = 'k', offset: float = 0., **plot_kwargs):
self.clearPlotDict()
wfstart, wfend = self.get_wf_range(wfdata)
compclass = self.get_comp_class()
plot_streams = self.get_plot_streams(wfdata, wfdata_compare, component, compclass)
st_main = plot_streams['wfdata']['data']
if mapping:
plot_positions = self.calcPlotPositions(st_main, compclass)
nslc = self.get_sorted_nslc(st_main)
nmax = self.plot_traces(ax, plot_streams, nslc, wfstart, mapping, plot_positions,
scaleToChannel, noiselevel, scaleddata, nth_sample, verbosity)
if plot_additional and additional_channel:
self.plot_additional_trace(ax, wfdata, additional_channel, scaleToChannel,
scaleddata, nth_sample, wfstart)
self.finalize_plot(ax, wfstart, wfend, nmax, zoomx, zoomy, iniPick, title, snr)
def prepare_plot(self):
ax = self.axes[0]
ax.cla()
return ax
def get_wf_range(self, wfdata):
return full_range(wfdata)
def get_comp_class(self):
settings = QSettings()
return SetChannelComponents.from_qsettings(settings)
def get_plot_streams(self, wfdata, wfdata_compare, component, compclass):
def get_wf_dict(data=Stream(), linecolor='k', offset=0., **plot_kwargs):
return dict(data=data, linecolor=linecolor, offset=offset, plot_kwargs=plot_kwargs) return dict(data=data, linecolor=linecolor, offset=offset, plot_kwargs=plot_kwargs)
linecolor = (0., 0., 0., 1.) if not self.style else self.style['linecolor']['rgba_mpl']
plot_streams = {
'wfdata': get_wf_dict(linecolor=linecolor, linewidth=0.7),
'wfdata_comp': get_wf_dict(offset=0.1, linecolor='b', alpha=0.7, linewidth=0.5)
}
if component != '*': ax = self.axes[0]
ax.cla()
self.clearPlotDict()
wfstart, wfend = full_range(wfdata)
nmax = 0
settings = QSettings()
compclass = SetChannelComponents.from_qsettings(settings)
linecolor = (0., 0., 0., 1.) if not self.style else self.style['linecolor']['rgba_mpl']
plot_streams = dict(wfdata=get_wf_dict(linecolor=linecolor, linewidth=0.7),
wfdata_comp=get_wf_dict(offset=0.1, linecolor='b', alpha=0.7, linewidth=0.5))
if not component == '*':
alter_comp = compclass.getCompPosition(component) alter_comp = compclass.getCompPosition(component)
plot_streams['wfdata']['data'] = wfdata.select(component=component) + wfdata.select(component=alter_comp) # alter_comp = str(alter_comp[0])
plot_streams['wfdata']['data'] = wfdata.select(component=component)
plot_streams['wfdata']['data'] += wfdata.select(component=alter_comp)
if wfdata_compare: if wfdata_compare:
plot_streams['wfdata_comp']['data'] = wfdata_compare.select( plot_streams['wfdata_comp']['data'] = wfdata_compare.select(component=component)
component=component) + wfdata_compare.select(component=alter_comp) plot_streams['wfdata_comp']['data'] += wfdata_compare.select(component=alter_comp)
else: else:
plot_streams['wfdata']['data'] = wfdata plot_streams['wfdata']['data'] = wfdata
if wfdata_compare: if wfdata_compare:
plot_streams['wfdata_comp']['data'] = wfdata_compare plot_streams['wfdata_comp']['data'] = wfdata_compare
return plot_streams st_main = plot_streams['wfdata']['data']
def get_sorted_nslc(self, st_main): if mapping:
nslc = [trace.get_id() for trace in st_main if trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']] plot_positions = self.calcPlotPositions(st_main, compclass)
nslc.sort(reverse=True)
return nslc # list containing tuples of network, station, channel and plot position (for sorting)
nslc = []
for plot_pos, trace in enumerate(st_main):
if not trace.stats.channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
print('Warning: Unrecognized channel {}'.format(trace.stats.channel))
continue
nslc.append(trace.get_id())
nslc.sort()
nslc.reverse()
def plot_traces(self, ax, plot_streams, nslc, wfstart, mapping, plot_positions, scaleToChannel, noiselevel,
scaleddata, nth_sample, verbosity):
nmax = 0
for n, seed_id in enumerate(nslc): for n, seed_id in enumerate(nslc):
network, station, location, channel = seed_id.split('.') network, station, location, channel = seed_id.split('.')
for wf_name, wf_dict in plot_streams.items(): for wf_name, wf_dict in plot_streams.items():
st_select = wf_dict.get('data') st_select = wf_dict.get('data')
if not st_select: if not st_select:
continue continue
trace = st_select.select(id=seed_id)[0].copy() st = st_select.select(id=seed_id)
trace = st[0].copy()
if mapping: if mapping:
n = plot_positions[trace.stats.channel] n = plot_positions[trace.stats.channel]
if n > nmax: if n > nmax:
nmax = n nmax = n
if verbosity: if verbosity:
print(f'plotting {channel} channel of station {station}') msg = 'plotting %s channel of station %s' % (channel, station)
time_ax = prep_time_axis(trace.stats.starttime - wfstart, trace) print(msg)
self.plot_trace(ax, trace, time_ax, wf_dict, n, scaleToChannel, noiselevel, scaleddata, nth_sample) stime = trace.stats.starttime - wfstart
self.setPlotDict(n, seed_id) time_ax = prep_time_axis(stime, trace)
return nmax
def plot_trace(self, ax, trace, time_ax, wf_dict, n, scaleToChannel, noiselevel, scaleddata, nth_sample):
if time_ax is not None: if time_ax is not None:
if scaleToChannel: if scaleToChannel:
self.scale_trace(trace, scaleToChannel) st_scale = wfdata.select(channel=scaleToChannel)
if st_scale:
tr = st_scale[0]
trace.detrend('constant')
trace.normalize(np.max(np.abs(tr.data)) * 2)
scaleddata = True scaleddata = True
if not scaleddata: if not scaleddata:
trace.detrend('constant') trace.detrend('constant')
trace.normalize(np.max(np.abs(trace.data)) * 2) trace.normalize(np.max(np.abs(trace.data)) * 2)
offset = wf_dict.get('offset') offset = wf_dict.get('offset')
times = [time for index, time in enumerate(time_ax) if not index % nth_sample] times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
@@ -1454,43 +1464,41 @@ class PylotCanvas(FigureCanvas):
ax.axhline(n, color="0.5", lw=0.5) ax.axhline(n, color="0.5", lw=0.5)
ax.plot(times, data, color=wf_dict.get('linecolor'), **wf_dict.get('plot_kwargs')) ax.plot(times, data, color=wf_dict.get('linecolor'), **wf_dict.get('plot_kwargs'))
if noiselevel is not None: if noiselevel is not None:
self.plot_noise_level(ax, time_ax, noiselevel, channel, n, wf_dict.get('linecolor')) for level in [-noiselevel[channel], noiselevel[channel]]:
ax.plot([time_ax[0], time_ax[-1]],
def scale_trace(self, trace, scaleToChannel): [n + level, n + level],
color=wf_dict.get('linecolor'),
linestyle='dashed')
self.setPlotDict(n, seed_id)
if plot_additional and additional_channel:
compare_stream = wfdata.select(channel=additional_channel)
if compare_stream:
trace = compare_stream[0]
if scaleToChannel:
st_scale = wfdata.select(channel=scaleToChannel) st_scale = wfdata.select(channel=scaleToChannel)
if st_scale: if st_scale:
tr = st_scale[0] tr = st_scale[0]
trace.detrend('constant') trace.detrend('constant')
trace.normalize(np.max(np.abs(tr.data)) * 2) trace.normalize(np.max(np.abs(tr.data)) * 2)
def plot_noise_level(self, ax, time_ax, noiselevel, channel, n, linecolor):
for level in [-noiselevel[channel], noiselevel[channel]]:
ax.plot([time_ax[0], time_ax[-1]], [n + level, n + level], color=linecolor, linestyle='dashed')
def plot_additional_trace(self, ax, wfdata, additional_channel, scaleToChannel, scaleddata, nth_sample, wfstart):
compare_stream = wfdata.select(channel=additional_channel)
if compare_stream:
trace = compare_stream[0]
if scaleToChannel:
self.scale_trace(trace, scaleToChannel)
scaleddata = True scaleddata = True
if not scaleddata: if not scaleddata:
trace.detrend('constant') trace.detrend('constant')
trace.normalize(np.max(np.abs(trace.data)) * 2) trace.normalize(np.max(np.abs(trace.data)) * 2)
time_ax = prep_time_axis(trace.stats.starttime - wfstart, trace) time_ax = prep_time_axis(stime, trace)
self.plot_additional_data(ax, trace, time_ax, nth_sample)
def plot_additional_data(self, ax, trace, time_ax, nth_sample):
times = [time for index, time in enumerate(time_ax) if not index % nth_sample] times = [time for index, time in enumerate(time_ax) if not index % nth_sample]
p_data = trace.data p_data = compare_stream[0].data
# #normalize
# p_max = max(abs(p_data))
# p_data /= p_max
for index in range(3): for index in range(3):
ax.plot(times, p_data, color='red', alpha=0.5, linewidth=0.7) ax.plot(times, p_data, color='red', alpha=0.5, linewidth=0.7)
p_data += 1 p_data += 1
def finalize_plot(self, ax, wfstart, wfend, nmax, zoomx, zoomy, iniPick, title, snr):
if iniPick: if iniPick:
ax.vlines(iniPick, ax.get_ylim()[0], ax.get_ylim()[1], colors='m', linestyles='dashed', linewidth=2) ax.vlines(iniPick, ax.get_ylim()[0], ax.get_ylim()[1],
xlabel = f'seconds since {wfstart}' colors='m', linestyles='dashed',
linewidth=2)
xlabel = 'seconds since {0}'.format(wfstart)
ylabel = '' ylabel = ''
self.updateWidget(xlabel, ylabel, title) self.updateWidget(xlabel, ylabel, title)
self.setXLims(ax, [0, wfend - wfstart]) self.setXLims(ax, [0, wfend - wfstart])
@@ -1500,13 +1508,14 @@ class PylotCanvas(FigureCanvas):
if zoomy is not None: if zoomy is not None:
self.setYLims(ax, zoomy) self.setYLims(ax, zoomy)
if snr is not None: if snr is not None:
self.plot_snr_warning(ax, snr)
self.draw()
def plot_snr_warning(self, ax, snr):
if snr < 2: if snr < 2:
warning = 'LOW SNR' if snr >= 1.5 else 'VERY LOW SNR' warning = 'LOW SNR'
ax.text(0.1, 0.9, f'WARNING - {warning}', ha='center', va='center', transform=ax.transAxes, color='red') if snr < 1.5:
warning = 'VERY LOW SNR'
ax.text(0.1, 0.9, 'WARNING - {}'.format(warning), ha='center', va='center', transform=ax.transAxes,
color='red')
self.draw()
@staticmethod @staticmethod
def getXLims(ax): def getXLims(ax):
@@ -1912,7 +1921,7 @@ class PickDlg(QDialog):
# set attribute holding data # set attribute holding data
if data is None or not data: if data is None or not data:
try: try:
data = parent.get_data().get_wf_data().copy() data = parent.get_data().getWFData().copy()
self.data = data.select(station=station) self.data = data.select(station=station)
except AttributeError as e: except AttributeError as e:
errmsg = 'You either have to put in a data or an appropriate ' \ errmsg = 'You either have to put in a data or an appropriate ' \
@@ -1946,7 +1955,7 @@ class PickDlg(QDialog):
self.cur_xlim = None self.cur_xlim = None
self.cur_ylim = None self.cur_ylim = None
self.stime, self.etime = full_range(self.get_wf_data()) self.stime, self.etime = full_range(self.getWFData())
# initialize plotting widget # initialize plotting widget
self.multicompfig = PylotCanvas(parent=self, multicursor=True) self.multicompfig = PylotCanvas(parent=self, multicursor=True)
@@ -1960,7 +1969,7 @@ class PickDlg(QDialog):
self.referenceChannel.addItem('-', None) self.referenceChannel.addItem('-', None)
self.scaleChannel.addItem('individual', None) self.scaleChannel.addItem('individual', None)
for trace in self.get_wf_data(): for trace in self.getWFData():
channel = trace.stats.channel channel = trace.stats.channel
self.referenceChannel.addItem(channel, trace) self.referenceChannel.addItem(channel, trace)
if not channel[-1] in ['Z', 'N', 'E', '1', '2', '3']: if not channel[-1] in ['Z', 'N', 'E', '1', '2', '3']:
@@ -1979,7 +1988,7 @@ class PickDlg(QDialog):
if self.wftype is not None: if self.wftype is not None:
title += ' | ({})'.format(self.wftype) title += ' | ({})'.format(self.wftype)
self.multicompfig.plotWFData(wfdata=self.get_wf_data(), wfdata_compare=self.get_wf_dataComp(), self.multicompfig.plotWFData(wfdata=self.getWFData(), wfdata_compare=self.getWFDataComp(),
title=title) title=title)
self.multicompfig.setZoomBorders2content() self.multicompfig.setZoomBorders2content()
@@ -2514,7 +2523,7 @@ class PickDlg(QDialog):
if self.autoFilterAction.isChecked(): if self.autoFilterAction.isChecked():
for filteraction in [self.filterActionP, self.filterActionS]: for filteraction in [self.filterActionP, self.filterActionS]:
filteraction.setChecked(False) filteraction.setChecked(False)
self.filter_wf_data() self.filterWFData()
self.draw() self.draw()
else: else:
self.draw() self.draw()
@@ -2598,10 +2607,10 @@ class PickDlg(QDialog):
def getGlobalLimits(self, ax, axis): def getGlobalLimits(self, ax, axis):
return self.multicompfig.getGlobalLimits(ax, axis) return self.multicompfig.getGlobalLimits(ax, axis)
def get_wf_data(self): def getWFData(self):
return self.data return self.data
def get_wf_dataComp(self): def getWFDataComp(self):
if self.showCompData: if self.showCompData:
return self.data_compare return self.data_compare
else: else:
@@ -2616,17 +2625,17 @@ class PickDlg(QDialog):
return tr return tr
if component == 'E' or component == 'N': if component == 'E' or component == 'N':
for trace in self.get_wf_data(): for trace in self.getWFData():
trace = selectTrace(trace, 'NE') trace = selectTrace(trace, 'NE')
if trace: if trace:
wfdata.append(trace) wfdata.append(trace)
elif component == '1' or component == '2': elif component == '1' or component == '2':
for trace in self.get_wf_data(): for trace in self.getWFData():
trace = selectTrace(trace, '12') trace = selectTrace(trace, '12')
if trace: if trace:
wfdata.append(trace) wfdata.append(trace)
else: else:
wfdata = self.get_wf_data().select(component=component) wfdata = self.getWFData().select(component=component)
return wfdata return wfdata
def getPicks(self, picktype='manual'): def getPicks(self, picktype='manual'):
@@ -2729,8 +2738,8 @@ class PickDlg(QDialog):
stime = self.getStartTime() stime = self.getStartTime()
# copy wfdata for plotting # copy wfdata for plotting
wfdata = self.get_wf_data().copy() wfdata = self.getWFData().copy()
wfdata_comp = self.get_wf_dataComp().copy() wfdata_comp = self.getWFDataComp().copy()
wfdata = self.getPickPhases(wfdata, phase) wfdata = self.getPickPhases(wfdata, phase)
wfdata_comp = self.getPickPhases(wfdata_comp, phase) wfdata_comp = self.getPickPhases(wfdata_comp, phase)
for wfd in [wfdata, wfdata_comp]: for wfd in [wfdata, wfdata_comp]:
@@ -2839,7 +2848,7 @@ class PickDlg(QDialog):
filteroptions = None filteroptions = None
# copy and filter data for earliest and latest possible picks # copy and filter data for earliest and latest possible picks
wfdata = self.get_wf_data().copy().select(channel=channel) wfdata = self.getWFData().copy().select(channel=channel)
if filteroptions: if filteroptions:
try: try:
wfdata.detrend('linear') wfdata.detrend('linear')
@@ -2886,7 +2895,7 @@ class PickDlg(QDialog):
minFMSNR = parameter.get('minFMSNR') minFMSNR = parameter.get('minFMSNR')
quality = get_quality_class(spe, parameter.get('timeerrorsP')) quality = get_quality_class(spe, parameter.get('timeerrorsP'))
if quality <= minFMweight and snr >= minFMSNR: if quality <= minFMweight and snr >= minFMSNR:
FM = fmpicker(self.get_wf_data().select(channel=channel).copy(), wfdata.copy(), parameter.get('fmpickwin'), FM = fmpicker(self.getWFData().select(channel=channel).copy(), wfdata.copy(), parameter.get('fmpickwin'),
pick - stime_diff) pick - stime_diff)
# save pick times for actual phase # save pick times for actual phase
@@ -3178,7 +3187,7 @@ class PickDlg(QDialog):
def togglePickBlocker(self): def togglePickBlocker(self):
return not self.pick_block return not self.pick_block
def filter_wf_data(self, phase=None): def filterWFData(self, phase=None):
if not phase: if not phase:
phase = self.currentPhase phase = self.currentPhase
if self.getPhaseID(phase) == 'P': if self.getPhaseID(phase) == 'P':
@@ -3196,8 +3205,8 @@ class PickDlg(QDialog):
self.cur_xlim = self.multicompfig.axes[0].get_xlim() self.cur_xlim = self.multicompfig.axes[0].get_xlim()
self.cur_ylim = self.multicompfig.axes[0].get_ylim() self.cur_ylim = self.multicompfig.axes[0].get_ylim()
# self.multicompfig.updateCurrentLimits() # self.multicompfig.updateCurrentLimits()
wfdata = self.get_wf_data().copy() wfdata = self.getWFData().copy()
wfdata_comp = self.get_wf_dataComp().copy() wfdata_comp = self.getWFDataComp().copy()
title = self.getStation() title = self.getStation()
if filter: if filter:
filtoptions = None filtoptions = None
@@ -3234,14 +3243,14 @@ class PickDlg(QDialog):
def filterP(self): def filterP(self):
self.filterActionS.setChecked(False) self.filterActionS.setChecked(False)
if self.filterActionP.isChecked(): if self.filterActionP.isChecked():
self.filter_wf_data('P') self.filterWFData('P')
else: else:
self.refreshPlot() self.refreshPlot()
def filterS(self): def filterS(self):
self.filterActionP.setChecked(False) self.filterActionP.setChecked(False)
if self.filterActionS.isChecked(): if self.filterActionS.isChecked():
self.filter_wf_data('S') self.filterWFData('S')
else: else:
self.refreshPlot() self.refreshPlot()
@@ -3300,7 +3309,7 @@ class PickDlg(QDialog):
if self.autoFilterAction.isChecked(): if self.autoFilterAction.isChecked():
self.filterActionP.setChecked(False) self.filterActionP.setChecked(False)
self.filterActionS.setChecked(False) self.filterActionS.setChecked(False)
# data = self.get_wf_data().copy() # data = self.getWFData().copy()
# title = self.getStation() # title = self.getStation()
filter = False filter = False
phase = None phase = None
@@ -3800,8 +3809,8 @@ class TuneAutopicker(QWidget):
fnames = self.station_ids[self.get_current_station_id()] fnames = self.station_ids[self.get_current_station_id()]
if not fnames == self.fnames: if not fnames == self.fnames:
self.fnames = fnames self.fnames = fnames
self.data.set_wf_data(fnames) self.data.setWFData(fnames)
wfdat = self.data.get_wf_data() # all available streams wfdat = self.data.getWFData() # all available streams
# remove possible underscores in station names # remove possible underscores in station names
# wfdat = remove_underscores(wfdat) # wfdat = remove_underscores(wfdat)
# rotate misaligned stations to ZNE # rotate misaligned stations to ZNE
@@ -3906,8 +3915,8 @@ class TuneAutopicker(QWidget):
network = None network = None
location = None location = None
wfdata = self.data.get_wf_data() wfdata = self.data.getWFData()
wfdata_comp = self.data.get_wf_dataComp() wfdata_comp = self.data.getWFDataComp()
metadata = self.parent().metadata metadata = self.parent().metadata
event = self.get_current_event() event = self.get_current_event()
filteroptions = self.parent().filteroptions filteroptions = self.parent().filteroptions
@@ -3962,7 +3971,7 @@ class TuneAutopicker(QWidget):
for plotitem in self._manual_pick_plots: for plotitem in self._manual_pick_plots:
self.clear_plotitem(plotitem) self.clear_plotitem(plotitem)
self._manual_pick_plots = [] self._manual_pick_plots = []
st = self.data.get_wf_data() st = self.data.getWFData()
tr = st.select(station=self.get_current_station())[0] tr = st.select(station=self.get_current_station())[0]
starttime = tr.stats.starttime starttime = tr.stats.starttime
# create two lists with figure names and subindices (for subplots) to get the correct axes # create two lists with figure names and subindices (for subplots) to get the correct axes

View File

@@ -1,9 +1,12 @@
# This file may be used to create an environment using: # This file may be used to create an environment using:
# $ conda create --name <env> --file <this file> # $ conda create --name <env> --file <this file>
# platform: win-64 # platform: win-64
cartopy>=0.20.2 cartopy=0.20.2
numpy<2 matplotlib-base=3.3.4
obspy>=1.3.0 numpy=1.22.3
pyqtgraph>=0.12.4 obspy=1.3.0
pyside2>=5.13.2 pyqtgraph=0.12.4
scipy>=1.8.0 pyside2=5.13.2
python=3.8.12
qt=5.12.9
scipy=1.8.0