Compare commits
6 Commits
improve-ut
...
e5c7404bb6
| Author | SHA1 | Date | |
|---|---|---|---|
| e5c7404bb6 | |||
| 1a148a8a72 | |||
| 8623cc3dd3 | |||
| cb457fc7ec | |||
| eb077e4bd6 | |||
| 2b01e8207e |
5
PyLoT.py
5
PyLoT.py
@@ -1553,11 +1553,6 @@ class MainWindow(QMainWindow):
|
|||||||
fname = str(action.data().toString())
|
fname = str(action.data().toString())
|
||||||
return fname
|
return fname
|
||||||
|
|
||||||
def getEventFileName(self, type='manual'):
|
|
||||||
if self.get_fnames(type) is None:
|
|
||||||
self.set_fname(self.get_data().getEventFileName(), type)
|
|
||||||
return self.get_fnames(type)
|
|
||||||
|
|
||||||
def saveData(self, event=None, directory=None, outformats=['.xml', '.cnv', '.obs', '_focmec.in', '.pha']):
|
def saveData(self, event=None, directory=None, outformats=['.xml', '.cnv', '.obs', '_focmec.in', '.pha']):
|
||||||
'''
|
'''
|
||||||
Save event data to directory with specified output formats.
|
Save event data to directory with specified output formats.
|
||||||
|
|||||||
@@ -4,13 +4,16 @@
|
|||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import fnmatch
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import List
|
||||||
|
|
||||||
from PySide2.QtWidgets import QMessageBox
|
from PySide2.QtWidgets import QMessageBox
|
||||||
from obspy import read_events
|
from obspy import read, read_events, Stream, Catalog, UTCDateTime
|
||||||
from obspy.core import read, Stream, UTCDateTime
|
|
||||||
from obspy.core.event import Event as ObsPyEvent
|
from obspy.core.event import Event as ObsPyEvent
|
||||||
from obspy.io.sac import SacIOError
|
from obspy.io.sac import SacIOError
|
||||||
|
|
||||||
|
|
||||||
import pylot.core.loc.focmec as focmec
|
import pylot.core.loc.focmec as focmec
|
||||||
import pylot.core.loc.hypodd as hypodd
|
import pylot.core.loc.hypodd as hypodd
|
||||||
import pylot.core.loc.velest as velest
|
import pylot.core.loc.velest as velest
|
||||||
@@ -139,29 +142,6 @@ class Data(object):
|
|||||||
def setNew(self):
|
def setNew(self):
|
||||||
self._new = True
|
self._new = True
|
||||||
|
|
||||||
def getCutTimes(self):
|
|
||||||
"""
|
|
||||||
Returns earliest start and latest end of all waveform data
|
|
||||||
:return: minimum start time and maximum end time as a tuple
|
|
||||||
:rtype: (UTCDateTime, UTCDateTime)
|
|
||||||
"""
|
|
||||||
if self.cuttimes is None:
|
|
||||||
self.updateCutTimes()
|
|
||||||
return self.cuttimes
|
|
||||||
|
|
||||||
def updateCutTimes(self):
|
|
||||||
"""
|
|
||||||
Update cuttimes to contain earliest start and latest end time
|
|
||||||
of all waveform data
|
|
||||||
:rtype: None
|
|
||||||
"""
|
|
||||||
self.cuttimes = full_range(self.getWFData())
|
|
||||||
|
|
||||||
def getEventFileName(self):
|
|
||||||
ID = self.getID()
|
|
||||||
# handle forbidden filenames especially on windows systems
|
|
||||||
return fnConstructor(str(ID))
|
|
||||||
|
|
||||||
def checkEvent(self, event, fcheck, forceOverwrite=False):
|
def checkEvent(self, event, fcheck, forceOverwrite=False):
|
||||||
"""
|
"""
|
||||||
Check information in supplied event and own event and replace with own
|
Check information in supplied event and own event and replace with own
|
||||||
@@ -252,184 +232,6 @@ class Data(object):
|
|||||||
if picktype in str(pick.method_id.id):
|
if picktype in str(pick.method_id.id):
|
||||||
picks.append(pick)
|
picks.append(pick)
|
||||||
|
|
||||||
def exportEvent(self, fnout, fnext='.xml', fcheck='auto', upperErrors=None):
|
|
||||||
"""
|
|
||||||
Export event to file
|
|
||||||
:param fnout: basename of file
|
|
||||||
:param fnext: file extensions xml, cnv, obs, focmec, or/and pha
|
|
||||||
:param fcheck: check and delete existing information
|
|
||||||
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
|
|
||||||
"""
|
|
||||||
from pylot.core.util.defaults import OUTPUTFORMATS
|
|
||||||
if not type(fcheck) == list:
|
|
||||||
fcheck = [fcheck]
|
|
||||||
|
|
||||||
try:
|
|
||||||
evtformat = OUTPUTFORMATS[fnext]
|
|
||||||
except KeyError as e:
|
|
||||||
errmsg = '{0}; selected file extension {1} not ' \
|
|
||||||
'supported'.format(e, fnext)
|
|
||||||
raise FormatError(errmsg)
|
|
||||||
|
|
||||||
if hasattr(self.get_evt_data(), 'notes'):
|
|
||||||
try:
|
|
||||||
with open(os.path.join(os.path.dirname(fnout), 'notes.txt'), 'w') as notes_file:
|
|
||||||
notes_file.write(self.get_evt_data().notes)
|
|
||||||
except Exception as e:
|
|
||||||
print('Warning: Could not save notes.txt: ', str(e))
|
|
||||||
|
|
||||||
# check for already existing xml-file
|
|
||||||
if fnext == '.xml':
|
|
||||||
if os.path.isfile(fnout + fnext):
|
|
||||||
print("xml-file already exists! Check content ...")
|
|
||||||
cat = read_events(fnout + fnext)
|
|
||||||
if len(cat) > 1:
|
|
||||||
raise IOError('Ambigious event information in file {}'.format(fnout + fnext))
|
|
||||||
if len(cat) < 1:
|
|
||||||
raise IOError('No event information in file {}'.format(fnout + fnext))
|
|
||||||
event = cat[0]
|
|
||||||
if not event.resource_id == self.get_evt_data().resource_id:
|
|
||||||
QMessageBox.warning(self, 'Warning', 'Different resource IDs!')
|
|
||||||
return
|
|
||||||
self.checkEvent(event, fcheck)
|
|
||||||
self.setEvtData(event)
|
|
||||||
|
|
||||||
self.get_evt_data().write(fnout + fnext, format=evtformat)
|
|
||||||
|
|
||||||
# try exporting event
|
|
||||||
else:
|
|
||||||
evtdata_org = self.get_evt_data()
|
|
||||||
picks = evtdata_org.picks
|
|
||||||
eventpath = evtdata_org.path
|
|
||||||
picks_copy = copy.deepcopy(picks)
|
|
||||||
evtdata_copy = Event(eventpath)
|
|
||||||
evtdata_copy.picks = picks_copy
|
|
||||||
|
|
||||||
# check for stations picked automatically as well as manually
|
|
||||||
# Prefer manual picks!
|
|
||||||
for i in range(len(picks)):
|
|
||||||
if picks[i].method_id == 'manual':
|
|
||||||
mstation = picks[i].waveform_id.station_code
|
|
||||||
mstation_ext = mstation + '_'
|
|
||||||
for k in range(len(picks_copy)):
|
|
||||||
if ((picks_copy[k].waveform_id.station_code == mstation) or
|
|
||||||
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
|
|
||||||
(picks_copy[k].method_id == 'auto'):
|
|
||||||
del picks_copy[k]
|
|
||||||
break
|
|
||||||
lendiff = len(picks) - len(picks_copy)
|
|
||||||
if lendiff != 0:
|
|
||||||
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
|
|
||||||
|
|
||||||
|
|
||||||
no_uncertainties_p = []
|
|
||||||
no_uncertainties_s = []
|
|
||||||
if upperErrors:
|
|
||||||
# check for pick uncertainties exceeding adjusted upper errors
|
|
||||||
# Picks with larger uncertainties will not be saved in output file!
|
|
||||||
for j in range(len(picks)):
|
|
||||||
for i in range(len(picks_copy)):
|
|
||||||
if picks_copy[i].phase_hint[0] == 'P':
|
|
||||||
# Skipping pick if no upper_uncertainty is found and warning user
|
|
||||||
if picks_copy[i].time_errors['upper_uncertainty'] is None:
|
|
||||||
#print("{1} P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
|
|
||||||
# picks_copy[i].waveform_id.station_code,
|
|
||||||
# picks_copy[i].method_id))
|
|
||||||
if not picks_copy[i].waveform_id.station_code in no_uncertainties_p:
|
|
||||||
no_uncertainties_p.append(picks_copy[i].waveform_id.station_code)
|
|
||||||
continue
|
|
||||||
|
|
||||||
#print ("checking for upper_uncertainty")
|
|
||||||
if (picks_copy[i].time_errors['uncertainty'] is None) or \
|
|
||||||
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[0]):
|
|
||||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
|
||||||
print("Adjusted uncertainty: {}".format(upperErrors[0]))
|
|
||||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
|
||||||
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
|
|
||||||
picks_copy[i].waveform_id.station_code,
|
|
||||||
picks_copy[i].method_id))
|
|
||||||
del picks_copy[i]
|
|
||||||
break
|
|
||||||
if picks_copy[i].phase_hint[0] == 'S':
|
|
||||||
|
|
||||||
# Skipping pick if no upper_uncertainty is found and warning user
|
|
||||||
if picks_copy[i].time_errors['upper_uncertainty'] is None:
|
|
||||||
#print("{1} S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
|
|
||||||
#picks_copy[i].waveform_id.station_code,
|
|
||||||
#picks_copy[i].method_id))
|
|
||||||
if not picks_copy[i].waveform_id.station_code in no_uncertainties_s:
|
|
||||||
no_uncertainties_s.append(picks_copy[i].waveform_id.station_code)
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
if (picks_copy[i].time_errors['uncertainty'] is None) or \
|
|
||||||
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[1]):
|
|
||||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
|
||||||
print("Adjusted uncertainty: {}".format(upperErrors[1]))
|
|
||||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
|
||||||
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
|
|
||||||
picks_copy[i].waveform_id.station_code,
|
|
||||||
picks_copy[i].method_id))
|
|
||||||
del picks_copy[i]
|
|
||||||
break
|
|
||||||
for s in no_uncertainties_p:
|
|
||||||
print("P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
|
|
||||||
for s in no_uncertainties_s:
|
|
||||||
print("S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
|
|
||||||
|
|
||||||
if fnext == '.obs':
|
|
||||||
try:
|
|
||||||
evtdata_copy.write(fnout + fnext, format=evtformat)
|
|
||||||
# write header afterwards
|
|
||||||
evid = str(evtdata_org.resource_id).split('/')[1]
|
|
||||||
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
|
|
||||||
nllocfile = open(fnout + fnext)
|
|
||||||
l = nllocfile.readlines()
|
|
||||||
# Adding A0/Generic Amplitude to .obs file
|
|
||||||
# l2 = []
|
|
||||||
# for li in l:
|
|
||||||
# for amp in evtdata_org.amplitudes:
|
|
||||||
# if amp.waveform_id.station_code == li[0:5].strip():
|
|
||||||
# li = li[0:64] + '{:0.2e}'.format(amp.generic_amplitude) + li[73:-1] + '\n'
|
|
||||||
# l2.append(li)
|
|
||||||
# l = l2
|
|
||||||
nllocfile.close()
|
|
||||||
l.insert(0, header)
|
|
||||||
nllocfile = open(fnout + fnext, 'w')
|
|
||||||
nllocfile.write("".join(l))
|
|
||||||
nllocfile.close()
|
|
||||||
except KeyError as e:
|
|
||||||
raise KeyError('''{0} export format
|
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
|
||||||
if fnext == '.cnv':
|
|
||||||
try:
|
|
||||||
velest.export(picks_copy, fnout + fnext, eventinfo=self.get_evt_data())
|
|
||||||
except KeyError as e:
|
|
||||||
raise KeyError('''{0} export format
|
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
|
||||||
if fnext == '_focmec.in':
|
|
||||||
try:
|
|
||||||
parameter = PylotParameter()
|
|
||||||
logging.warning('Using default input parameter')
|
|
||||||
focmec.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
|
||||||
except KeyError as e:
|
|
||||||
raise KeyError('''{0} export format
|
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
|
||||||
if fnext == '.pha':
|
|
||||||
try:
|
|
||||||
parameter = PylotParameter()
|
|
||||||
logging.warning('Using default input parameter')
|
|
||||||
hypodd.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
|
||||||
except KeyError as e:
|
|
||||||
raise KeyError('''{0} export format
|
|
||||||
not implemented: {1}'''.format(evtformat, e))
|
|
||||||
|
|
||||||
def getComp(self):
|
|
||||||
"""
|
|
||||||
Get component (ZNE)
|
|
||||||
"""
|
|
||||||
return self.comp
|
|
||||||
|
|
||||||
def getID(self):
|
def getID(self):
|
||||||
"""
|
"""
|
||||||
Get unique resource id
|
Get unique resource id
|
||||||
@@ -474,21 +276,6 @@ class Data(object):
|
|||||||
fnames = check_fname_exists(fnames)
|
fnames = check_fname_exists(fnames)
|
||||||
fnames_alt = check_fname_exists(fnames_alt)
|
fnames_alt = check_fname_exists(fnames_alt)
|
||||||
|
|
||||||
# if obspy_dmt:
|
|
||||||
# wfdir = 'raw'
|
|
||||||
# self.processed = False
|
|
||||||
# for fname in fnames:
|
|
||||||
# if fname.endswith('processed'):
|
|
||||||
# wfdir = 'processed'
|
|
||||||
# self.processed = True
|
|
||||||
# break
|
|
||||||
# for fpath in fnames:
|
|
||||||
# if fpath.endswith(wfdir):
|
|
||||||
# wffnames = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
|
|
||||||
# if 'syngine' in fpath.split('/')[-1]:
|
|
||||||
# wffnames_syn = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
|
|
||||||
# else:
|
|
||||||
# wffnames = fnames
|
|
||||||
if fnames is not None:
|
if fnames is not None:
|
||||||
self.appendWFData(fnames)
|
self.appendWFData(fnames)
|
||||||
if fnames_alt is not None:
|
if fnames_alt is not None:
|
||||||
@@ -496,9 +283,6 @@ class Data(object):
|
|||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# various pre-processing steps:
|
|
||||||
# remove possible underscores in station names
|
|
||||||
# self.wfdata = remove_underscores(self.wfdata)
|
|
||||||
# check for gaps and merge
|
# check for gaps and merge
|
||||||
self.wfdata, _ = check_for_gaps_and_merge(self.wfdata)
|
self.wfdata, _ = check_for_gaps_and_merge(self.wfdata)
|
||||||
# check for nans
|
# check for nans
|
||||||
@@ -620,11 +404,6 @@ class Data(object):
|
|||||||
picks = picks_from_picksdict(picks)
|
picks = picks_from_picksdict(picks)
|
||||||
break
|
break
|
||||||
self.get_evt_data().picks = picks
|
self.get_evt_data().picks = picks
|
||||||
# if 'smi:local' in self.getID() and firstonset:
|
|
||||||
# fonset_str = firstonset.strftime('%Y_%m_%d_%H_%M_%S')
|
|
||||||
# ID = ResourceIdentifier('event/' + fonset_str)
|
|
||||||
# ID.convertIDToQuakeMLURI(authority_id=authority_id)
|
|
||||||
# self.get_evt_data().resource_id = ID
|
|
||||||
|
|
||||||
def applyEvent(event):
|
def applyEvent(event):
|
||||||
"""
|
"""
|
||||||
@@ -656,6 +435,171 @@ class Data(object):
|
|||||||
applydata[typ](data)
|
applydata[typ](data)
|
||||||
self._new = False
|
self._new = False
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SeismicEventData:
|
||||||
|
event_id: str = ""
|
||||||
|
catalog: Catalog = field(default_factory=Catalog)
|
||||||
|
|
||||||
|
def find_event_files(self, directory: str, extensions: List[str]) -> List[str]:
|
||||||
|
"""
|
||||||
|
Browse the directory to find event files with specified extensions.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
directory (str): The directory path to search for event files.
|
||||||
|
extensions (List[str]): List of file extensions to search for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[str]: List of file paths that match the given extensions.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> sed = SeismicEventData()
|
||||||
|
>>> sed.find_event_files('test_directory', ['.xml', '.quakeml']) # doctest: +SKIP
|
||||||
|
['test_directory/event1.xml', 'test_directory/event2.quakeml']
|
||||||
|
"""
|
||||||
|
matches = []
|
||||||
|
for root, _, files in os.walk(directory):
|
||||||
|
for ext in extensions:
|
||||||
|
for filename in fnmatch.filter(files, f'*{ext}'):
|
||||||
|
matches.append(os.path.join(root, filename))
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def read_event_from_directory(self, directory: str, extensions: List[str], format: str) -> None:
|
||||||
|
"""
|
||||||
|
Read a seismic event from the first found file in the directory with specified format.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
directory (str): The directory path to search for event files.
|
||||||
|
extensions (List[str]): List of file extensions to search for.
|
||||||
|
format (str): The format to read the event file.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> sed = SeismicEventData()
|
||||||
|
>>> sed.read_event_from_directory('test_directory', ['.xml', '.quakeml'], 'QUAKEML') # doctest: +SKIP
|
||||||
|
"""
|
||||||
|
event_files = self.find_event_files(directory, extensions)
|
||||||
|
if event_files:
|
||||||
|
self.read_event(event_files[0], format)
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"No event files found in directory {directory} with extensions {extensions}.")
|
||||||
|
|
||||||
|
def read_event(self, file_path: str, format: str) -> None:
|
||||||
|
"""
|
||||||
|
Read a seismic event from a file with specified format.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
file_path (str): The path to the event file.
|
||||||
|
format (str): The format to read the event file.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> sed = SeismicEventData()
|
||||||
|
>>> sed.read_event('test_directory/event1.xml', 'QUAKEML') # doctest: +SKIP
|
||||||
|
"""
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
self.catalog = read_events(file_path, format=format)
|
||||||
|
self.event_id = self.catalog[0].resource_id.id.split('/')[-1] if self.catalog else ""
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"File {file_path} does not exist.")
|
||||||
|
|
||||||
|
def write_event(self, file_path: str, format: str) -> None:
|
||||||
|
"""
|
||||||
|
Write the seismic event to a file with specified format.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
file_path (str): The path to the output file.
|
||||||
|
format (str): The format to write the event file.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> sed = SeismicEventData(event_id='12345')
|
||||||
|
>>> sed.write_event('output_directory/event1.xml', 'QUAKEML') # doctest: +SKIP
|
||||||
|
"""
|
||||||
|
self.catalog.write(file_path, format=format)
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class WaveformData:
|
||||||
|
stream: Stream = field(default_factory=Stream)
|
||||||
|
|
||||||
|
def find_waveform_files(self, directory: str, extensions: List[str]) -> List[str]:
|
||||||
|
"""
|
||||||
|
Browse the directory to find waveform files with specified extensions.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
directory (str): The directory path to search for waveform files.
|
||||||
|
extensions (List[str]): List of file extensions to search for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[str]: List of file paths that match the given extensions.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> wd = WaveformData()
|
||||||
|
>>> wd.find_waveform_files('test_directory', ['.mseed']) # doctest: +SKIP
|
||||||
|
['test_directory/waveform1.mseed']
|
||||||
|
"""
|
||||||
|
matches = []
|
||||||
|
for root, _, files in os.walk(directory):
|
||||||
|
for ext in extensions:
|
||||||
|
for filename in fnmatch.filter(files, f'*{ext}'):
|
||||||
|
matches.append(os.path.join(root, filename))
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def read_waveform_from_directory(self, directory: str, extensions: List[str], format: str) -> None:
|
||||||
|
"""
|
||||||
|
Read waveform data from the first found file in the directory with specified format.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
directory (str): The directory path to search for waveform files.
|
||||||
|
extensions (List[str]): List of file extensions to search for.
|
||||||
|
format (str): The format to read the waveform file.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> wd = WaveformData()
|
||||||
|
>>> wd.read_waveform_from_directory('test_directory', ['.mseed'], 'MSEED') # doctest: +SKIP
|
||||||
|
"""
|
||||||
|
waveform_files = self.find_waveform_files(directory, extensions)
|
||||||
|
if waveform_files:
|
||||||
|
self.read_waveform(waveform_files[0], format)
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"No waveform files found in directory {directory} with extensions {extensions}.")
|
||||||
|
|
||||||
|
def read_waveform(self, file_path: str, format: str) -> None:
|
||||||
|
"""
|
||||||
|
Read waveform data from a file with specified format.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
file_path (str): The path to the waveform file.
|
||||||
|
format (str): The format to read the waveform file.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> wd = WaveformData()
|
||||||
|
>>> wd.read_waveform('test_directory/waveform1.mseed', 'MSEED') # doctest: +SKIP
|
||||||
|
"""
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
self.stream = read(file_path, format=format)
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"File {file_path} does not exist.")
|
||||||
|
|
||||||
|
def write_waveform(self, file_path: str, format: str) -> None:
|
||||||
|
"""
|
||||||
|
Write the waveform data to a file with specified format.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
file_path (str): The path to the output file.
|
||||||
|
format (str): The format to write the waveform file.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> wd = WaveformData()
|
||||||
|
>>> wd.write_waveform('output_directory/waveform1.mseed', 'MSEED') # doctest: +SKIP
|
||||||
|
"""
|
||||||
|
self.stream.write(file_path, format=format)
|
||||||
|
|
||||||
|
# Example usage:
|
||||||
|
# seismic_event = SeismicEventData()
|
||||||
|
# seismic_event.read_event_from_directory("path_to_directory", extensions=[".xml", ".quakeml"], format="QUAKEML")
|
||||||
|
# seismic_event.write_event("output_event_file.xml", format="QUAKEML")
|
||||||
|
|
||||||
|
# waveform_data = WaveformData()
|
||||||
|
# waveform_data.read_waveform_from_directory("path_to_directory", extensions=[".mseed"], format="MSEED")
|
||||||
|
# waveform_data.write_waveform("output_waveform_file.mseed", format="MSEED")
|
||||||
|
|
||||||
|
|
||||||
class GenericDataStructure(object):
|
class GenericDataStructure(object):
|
||||||
"""
|
"""
|
||||||
@@ -839,22 +783,6 @@ class PilotDataStructure(GenericDataStructure):
|
|||||||
self.setExpandFields(['root', 'database'])
|
self.setExpandFields(['root', 'database'])
|
||||||
|
|
||||||
|
|
||||||
class ObspyDMTdataStructure(GenericDataStructure):
|
|
||||||
"""
|
|
||||||
Object containing the data access information for the old PILOT data
|
|
||||||
structure.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, **fields):
|
|
||||||
if not fields:
|
|
||||||
fields = {'database': '',
|
|
||||||
'root': ''}
|
|
||||||
|
|
||||||
GenericDataStructure.__init__(self, **fields)
|
|
||||||
|
|
||||||
self.setExpandFields(['root', 'database'])
|
|
||||||
|
|
||||||
|
|
||||||
class SeiscompDataStructure(GenericDataStructure):
|
class SeiscompDataStructure(GenericDataStructure):
|
||||||
"""
|
"""
|
||||||
Dictionary containing the data access information for an SDS data archive:
|
Dictionary containing the data access information for an SDS data archive:
|
||||||
|
|||||||
@@ -21,25 +21,6 @@ from pylot.core.util.utils import get_owner, full_range, four_digits, transformF
|
|||||||
backtransformFilterString, loopIdentifyPhase, identifyPhase
|
backtransformFilterString, loopIdentifyPhase, identifyPhase
|
||||||
|
|
||||||
|
|
||||||
def add_amplitudes(event, amplitudes):
|
|
||||||
amplitude_list = []
|
|
||||||
for pick in event.picks:
|
|
||||||
try:
|
|
||||||
a0 = amplitudes[pick.waveform_id.station_code]
|
|
||||||
amplitude = ope.Amplitude(generic_amplitude=a0 * 1e-3)
|
|
||||||
amplitude.unit = 'm'
|
|
||||||
amplitude.category = 'point'
|
|
||||||
amplitude.waveform_id = pick.waveform_id
|
|
||||||
amplitude.magnitude_hint = 'ML'
|
|
||||||
amplitude.pick_id = pick.resource_id
|
|
||||||
amplitude.type = 'AML'
|
|
||||||
amplitude_list.append(amplitude)
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
event.amplitudes = amplitude_list
|
|
||||||
return event
|
|
||||||
|
|
||||||
|
|
||||||
def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
||||||
"""
|
"""
|
||||||
readPILOTEvent - function
|
readPILOTEvent - function
|
||||||
@@ -193,31 +174,6 @@ def convert_pilot_times(time_array):
|
|||||||
return UTCDateTime(*times)
|
return UTCDateTime(*times)
|
||||||
|
|
||||||
|
|
||||||
def picksdict_from_obs(fn):
|
|
||||||
"""
|
|
||||||
create pick dictionary from obs file
|
|
||||||
:param fn: filename
|
|
||||||
:type fn:
|
|
||||||
:return:
|
|
||||||
:rtype:
|
|
||||||
"""
|
|
||||||
picks = dict()
|
|
||||||
station_name = str()
|
|
||||||
for line in open(fn, 'r'):
|
|
||||||
if line.startswith('#'):
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
phase_line = line.split()
|
|
||||||
if not station_name == phase_line[0]:
|
|
||||||
phase = dict()
|
|
||||||
station_name = phase_line[0]
|
|
||||||
phase_name = phase_line[4].upper()
|
|
||||||
pick = UTCDateTime(phase_line[6] + phase_line[7] + phase_line[8])
|
|
||||||
phase[phase_name] = dict(mpp=pick, fm=phase_line[5])
|
|
||||||
picks[station_name] = phase
|
|
||||||
return picks
|
|
||||||
|
|
||||||
|
|
||||||
def picksdict_from_picks(evt, parameter=None):
|
def picksdict_from_picks(evt, parameter=None):
|
||||||
"""
|
"""
|
||||||
Takes an Event object and return the pick dictionary commonly used within
|
Takes an Event object and return the pick dictionary commonly used within
|
||||||
@@ -373,636 +329,228 @@ def picks_from_picksdict(picks, creation_info=None):
|
|||||||
return picks_list
|
return picks_list
|
||||||
|
|
||||||
|
|
||||||
def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0):
|
def write_phases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||||
# TODO: change root to datapath
|
|
||||||
db_root = os.path.join(root_dir, db_dir)
|
|
||||||
evt_list = glob.glob1(db_root, 'e????.???.??')
|
|
||||||
|
|
||||||
for evt in evt_list:
|
|
||||||
if verbosity > 0:
|
|
||||||
print('Reassessing event {0}'.format(evt))
|
|
||||||
reassess_pilot_event(root_dir, db_dir, evt, out_dir, fn_param, verbosity)
|
|
||||||
|
|
||||||
|
|
||||||
def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None, verbosity=0):
|
|
||||||
from obspy import read
|
|
||||||
|
|
||||||
from pylot.core.io.inputs import PylotParameter
|
|
||||||
from pylot.core.pick.utils import earllatepicker
|
|
||||||
# TODO: change root to datapath
|
|
||||||
|
|
||||||
default = PylotParameter(fn_param, verbosity)
|
|
||||||
|
|
||||||
search_base = os.path.join(root_dir, db_dir, event_id)
|
|
||||||
phases_file = glob.glob(os.path.join(search_base, 'PHASES.mat'))
|
|
||||||
if not phases_file:
|
|
||||||
return
|
|
||||||
if verbosity > 1:
|
|
||||||
print('Opening PILOT phases file: {fn}'.format(fn=phases_file[0]))
|
|
||||||
picks_dict = picksdict_from_pilot(phases_file[0])
|
|
||||||
if verbosity > 0:
|
|
||||||
print('Dictionary read from PHASES.mat:\n{0}'.format(picks_dict))
|
|
||||||
datacheck = list()
|
|
||||||
info = None
|
|
||||||
for station in picks_dict.keys():
|
|
||||||
fn_pattern = os.path.join(search_base, '{0}*'.format(station))
|
|
||||||
try:
|
|
||||||
st = read(fn_pattern)
|
|
||||||
except TypeError as e:
|
|
||||||
if 'Unknown format for file' in e.message:
|
|
||||||
try:
|
|
||||||
st = read(fn_pattern, format='GSE2')
|
|
||||||
except ValueError as e:
|
|
||||||
if e.message == 'second must be in 0..59':
|
|
||||||
info = 'A known Error was raised. Please find the list of corrupted files and double-check these files.'
|
|
||||||
datacheck.append(fn_pattern + ' (time info)\n')
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise ValueError(e.message)
|
|
||||||
except Exception as e:
|
|
||||||
if 'No file matching file pattern:' in e.message:
|
|
||||||
if verbosity > 0:
|
|
||||||
warnings.warn('no waveform data found for station {station}'.format(station=station),
|
|
||||||
RuntimeWarning)
|
|
||||||
datacheck.append(fn_pattern + ' (no data)\n')
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
for phase in picks_dict[station].keys():
|
|
||||||
try:
|
|
||||||
mpp = picks_dict[station][phase]['mpp']
|
|
||||||
except KeyError as e:
|
|
||||||
print(e.message, station)
|
|
||||||
continue
|
|
||||||
sel_st = select_for_phase(st, phase)
|
|
||||||
if not sel_st:
|
|
||||||
msg = 'no waveform data found for station {station}'.format(station=station)
|
|
||||||
warnings.warn(msg, RuntimeWarning)
|
|
||||||
continue
|
|
||||||
stime, etime = full_range(sel_st)
|
|
||||||
rel_pick = mpp - stime
|
|
||||||
epp, lpp, spe = earllatepicker(sel_st,
|
|
||||||
default.get('nfac{0}'.format(phase)),
|
|
||||||
default.get('tsnrz' if phase == 'P' else 'tsnrh'),
|
|
||||||
Pick1=rel_pick,
|
|
||||||
iplot=0,
|
|
||||||
verbosity=0)
|
|
||||||
if epp is None or lpp is None:
|
|
||||||
continue
|
|
||||||
epp = stime + epp
|
|
||||||
lpp = stime + lpp
|
|
||||||
min_diff = 3 * st[0].stats.delta
|
|
||||||
if lpp - mpp < min_diff:
|
|
||||||
lpp = mpp + min_diff
|
|
||||||
if mpp - epp < min_diff:
|
|
||||||
epp = mpp - min_diff
|
|
||||||
picks_dict[station][phase] = dict(epp=epp, mpp=mpp, lpp=lpp, spe=spe)
|
|
||||||
if datacheck:
|
|
||||||
if info:
|
|
||||||
if verbosity > 0:
|
|
||||||
print(info + ': {0}'.format(search_base))
|
|
||||||
fncheck = open(os.path.join(search_base, 'datacheck_list'), 'w')
|
|
||||||
fncheck.writelines(datacheck)
|
|
||||||
fncheck.close()
|
|
||||||
del datacheck
|
|
||||||
# create Event object for export
|
|
||||||
evt = ope.Event(resource_id=event_id)
|
|
||||||
evt.picks = picks_from_picksdict(picks_dict)
|
|
||||||
# write phase information to file
|
|
||||||
if not out_dir:
|
|
||||||
fnout_prefix = os.path.join(root_dir, db_dir, event_id, 'PyLoT_{0}.'.format(event_id))
|
|
||||||
else:
|
|
||||||
out_dir = os.path.join(out_dir, db_dir)
|
|
||||||
if not os.path.isdir(out_dir):
|
|
||||||
os.makedirs(out_dir)
|
|
||||||
fnout_prefix = os.path.join(out_dir, 'PyLoT_{0}.'.format(event_id))
|
|
||||||
evt.write(fnout_prefix + 'xml', format='QUAKEML')
|
|
||||||
|
|
||||||
|
|
||||||
def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
|
||||||
"""
|
"""
|
||||||
Function of methods to write phases to the following standard file
|
Writes earthquake phase data to different file formats.
|
||||||
formats used for locating earthquakes:
|
|
||||||
|
|
||||||
HYPO71, NLLoc, VELEST, HYPOSAT, FOCMEC, and hypoDD
|
:param arrivals: Dictionary containing phase information (station ID, phase, first motion, weight, etc.)
|
||||||
|
|
||||||
:param arrivals:dictionary containing all phase information including
|
|
||||||
station ID, phase, first motion, weight (uncertainty), ...
|
|
||||||
:type arrivals: dict
|
:type arrivals: dict
|
||||||
|
:param fformat: File format to write to (e.g., 'NLLoc', 'HYPO71', 'HYPOSAT', 'VELEST', 'HYPODD', 'FOCMEC')
|
||||||
:param fformat: chosen file format (location routine),
|
|
||||||
choose between NLLoc, HYPO71, HYPOSAT, VELEST,
|
|
||||||
HYPOINVERSE, FOCMEC, and hypoDD
|
|
||||||
:type fformat: str
|
:type fformat: str
|
||||||
|
:param filename: Path and name of the output phase file
|
||||||
:param filename: full path and name of phase file
|
:type filename: str
|
||||||
:type filename: string
|
:param parameter: Additional parameters for writing the phase data
|
||||||
|
|
||||||
:param parameter: all input information
|
|
||||||
:type parameter: object
|
:type parameter: object
|
||||||
|
:param eventinfo: Event information needed for specific formats like VELEST, FOCMEC, and HASH
|
||||||
:param eventinfo: optional, needed for VELEST-cnv file
|
:type eventinfo: obspy.core.event.Event
|
||||||
and FOCMEC- and HASH-input files
|
|
||||||
:type eventinfo: `obspy.core.event.Event` object
|
|
||||||
"""
|
"""
|
||||||
if fformat == 'NLLoc':
|
|
||||||
print("Writing phases to %s for NLLoc" % filename)
|
|
||||||
fid = open("%s" % filename, 'w')
|
|
||||||
# write header
|
|
||||||
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
|
||||||
(parameter.get('database'), parameter.get('eventID')))
|
|
||||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
|
||||||
for key in arrivals:
|
|
||||||
# P onsets
|
|
||||||
if 'P' in arrivals[key]:
|
|
||||||
try:
|
|
||||||
fm = arrivals[key]['P']['fm']
|
|
||||||
except KeyError as e:
|
|
||||||
print(e)
|
|
||||||
fm = None
|
|
||||||
if fm is None:
|
|
||||||
fm = '?'
|
|
||||||
onset = arrivals[key]['P']['mpp']
|
|
||||||
year = onset.year
|
|
||||||
month = onset.month
|
|
||||||
day = onset.day
|
|
||||||
hh = onset.hour
|
|
||||||
mm = onset.minute
|
|
||||||
ss = onset.second
|
|
||||||
ms = onset.microsecond
|
|
||||||
ss_ms = ss + ms / 1000000.0
|
|
||||||
pweight = 1 # use pick
|
|
||||||
try:
|
|
||||||
if arrivals[key]['P']['weight'] >= 4:
|
|
||||||
pweight = 0 # do not use pick
|
|
||||||
print("Station {}: Uncertain pick, do not use it!".format(key))
|
|
||||||
except KeyError as e:
|
|
||||||
print(e.message + '; no weight set during processing')
|
|
||||||
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
|
||||||
fm,
|
|
||||||
year,
|
|
||||||
month,
|
|
||||||
day,
|
|
||||||
hh,
|
|
||||||
mm,
|
|
||||||
ss_ms,
|
|
||||||
pweight))
|
|
||||||
# S onsets
|
|
||||||
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
|
|
||||||
fm = '?'
|
|
||||||
onset = arrivals[key]['S']['mpp']
|
|
||||||
year = onset.year
|
|
||||||
month = onset.month
|
|
||||||
day = onset.day
|
|
||||||
hh = onset.hour
|
|
||||||
mm = onset.minute
|
|
||||||
ss = onset.second
|
|
||||||
ms = onset.microsecond
|
|
||||||
ss_ms = ss + ms / 1000000.0
|
|
||||||
sweight = 1 # use pick
|
|
||||||
try:
|
|
||||||
if arrivals[key]['S']['weight'] >= 4:
|
|
||||||
sweight = 0 # do not use pick
|
|
||||||
except KeyError as e:
|
|
||||||
print(str(e) + '; no weight set during processing')
|
|
||||||
Ao = arrivals[key]['S']['Ao'] # peak-to-peak amplitude
|
|
||||||
if Ao == None:
|
|
||||||
Ao = 0.0
|
|
||||||
# fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
|
||||||
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 %9.2f 0 0 %d \n' % (key,
|
|
||||||
fm,
|
|
||||||
year,
|
|
||||||
month,
|
|
||||||
day,
|
|
||||||
hh,
|
|
||||||
mm,
|
|
||||||
ss_ms,
|
|
||||||
Ao,
|
|
||||||
sweight))
|
|
||||||
|
|
||||||
fid.close()
|
def write_nlloc():
|
||||||
elif fformat == 'HYPO71':
|
with open(filename, 'w') as fid:
|
||||||
print("Writing phases to %s for HYPO71" % filename)
|
fid.write('# EQEVENT: {} Label: EQ{} Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n'.format(
|
||||||
fid = open("%s" % filename, 'w')
|
parameter.get('database'), parameter.get('eventID')))
|
||||||
# write header
|
for key, value in arrivals.items():
|
||||||
fid.write(' %s\n' %
|
for phase in ['P', 'S']:
|
||||||
parameter.get('eventID'))
|
if phase in value:
|
||||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
fm = value[phase].get('fm', '?')
|
||||||
for key in arrivals:
|
onset = value[phase]['mpp']
|
||||||
if arrivals[key]['P']['weight'] < 4:
|
ss_ms = onset.second + onset.microsecond / 1000000.0
|
||||||
stat = key
|
weight = 1 if value[phase].get('weight', 0) < 4 else 0
|
||||||
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
|
amp = value[phase].get('Ao', 0.0) if phase == 'S' else ''
|
||||||
stat = stat[1:5]
|
fid.write('{} ? ? ? {} {}{}{} {}{} {:7.4f} GAU 0 {} 0 0 {}\n'.format(
|
||||||
Ponset = arrivals[key]['P']['mpp']
|
key, phase, fm, onset.year, onset.month, onset.day, onset.hour, onset.minute, ss_ms, amp,
|
||||||
Sonset = arrivals[key]['S']['mpp']
|
weight))
|
||||||
pweight = arrivals[key]['P']['weight']
|
|
||||||
sweight = arrivals[key]['S']['weight']
|
|
||||||
fm = arrivals[key]['P']['fm']
|
|
||||||
if fm is None:
|
|
||||||
fm = '-'
|
|
||||||
Ao = arrivals[key]['S']['Ao']
|
|
||||||
if Ao is None:
|
|
||||||
Ao = ''
|
|
||||||
else:
|
|
||||||
Ao = str('%7.2f' % Ao)
|
|
||||||
year = Ponset.year
|
|
||||||
if year >= 2000:
|
|
||||||
year = year - 2000
|
|
||||||
else:
|
|
||||||
year = year - 1900
|
|
||||||
month = Ponset.month
|
|
||||||
day = Ponset.day
|
|
||||||
hh = Ponset.hour
|
|
||||||
mm = Ponset.minute
|
|
||||||
ss = Ponset.second
|
|
||||||
ms = Ponset.microsecond
|
|
||||||
ss_ms = ss + ms / 1000000.0
|
|
||||||
if pweight < 2:
|
|
||||||
pstr = 'I'
|
|
||||||
elif pweight >= 2:
|
|
||||||
pstr = 'E'
|
|
||||||
if arrivals[key]['S']['weight'] < 4:
|
|
||||||
Sss = Sonset.second
|
|
||||||
Sms = Sonset.microsecond
|
|
||||||
Sss_ms = Sss + Sms / 1000000.0
|
|
||||||
Sss_ms = str('%5.02f' % Sss_ms)
|
|
||||||
if sweight < 2:
|
|
||||||
sstr = 'I'
|
|
||||||
elif sweight >= 2:
|
|
||||||
sstr = 'E'
|
|
||||||
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s%sS %d %s\n' % (stat,
|
|
||||||
pstr,
|
|
||||||
fm,
|
|
||||||
pweight,
|
|
||||||
year,
|
|
||||||
month,
|
|
||||||
day,
|
|
||||||
hh,
|
|
||||||
mm,
|
|
||||||
ss_ms,
|
|
||||||
Sss_ms,
|
|
||||||
sstr,
|
|
||||||
sweight,
|
|
||||||
Ao))
|
|
||||||
else:
|
|
||||||
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s\n' % (stat,
|
|
||||||
pstr,
|
|
||||||
fm,
|
|
||||||
pweight,
|
|
||||||
year,
|
|
||||||
month,
|
|
||||||
day,
|
|
||||||
hh,
|
|
||||||
mm,
|
|
||||||
ss_ms,
|
|
||||||
Ao))
|
|
||||||
|
|
||||||
fid.close()
|
def write_hypo71():
|
||||||
|
with open(filename, 'w') as fid:
|
||||||
elif fformat == 'HYPOSAT':
|
fid.write(
|
||||||
print("Writing phases to %s for HYPOSAT" % filename)
|
' {}\n'.format(parameter.get('eventID')))
|
||||||
fid = open("%s" % filename, 'w')
|
for key, value in arrivals.items():
|
||||||
# write header
|
if value['P'].get('weight', 0) < 4:
|
||||||
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
|
stat = key[:4]
|
||||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
Ponset = value['P']['mpp']
|
||||||
for key in arrivals:
|
Sonset = value.get('S', {}).get('mpp')
|
||||||
# P onsets
|
pweight = value['P'].get('weight', 0)
|
||||||
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
|
sweight = value.get('S', {}).get('weight', 0)
|
||||||
if arrivals[key]['P']['weight'] < 4:
|
fm = value['P'].get('fm', '-')
|
||||||
Ponset = arrivals[key]['P']['mpp']
|
Ao = value.get('S', {}).get('Ao', '')
|
||||||
pyear = Ponset.year
|
year = Ponset.year - 2000 if Ponset.year >= 2000 else Ponset.year - 1900
|
||||||
pmonth = Ponset.month
|
ss_ms = Ponset.second + Ponset.microsecond / 1000000.0
|
||||||
pday = Ponset.day
|
if Sonset:
|
||||||
phh = Ponset.hour
|
Sss_ms = Sonset.second + Sonset.microsecond / 1000000.0
|
||||||
pmm = Ponset.minute
|
fid.write('{}P{}{}{} {}{}{}{}{} {:5.2f} {}{}S {} {}\n'.format(
|
||||||
pss = Ponset.second
|
stat, 'I' if pweight < 2 else 'E', fm, pweight, year, Ponset.month, Ponset.day,
|
||||||
pms = Ponset.microsecond
|
Ponset.hour, Ponset.minute, ss_ms, Sss_ms, 'I' if sweight < 2 else 'E', sweight, Ao))
|
||||||
Pss = pss + pms / 1000000.0
|
|
||||||
# use symmetrized picking error as std
|
|
||||||
# (read the HYPOSAT manual)
|
|
||||||
pstd = arrivals[key]['P']['spe']
|
|
||||||
if pstd is None:
|
|
||||||
errorsP = parameter.get('timeerrorsP')
|
|
||||||
if arrivals[key]['P']['weight'] == 0:
|
|
||||||
pstd = errorsP[0]
|
|
||||||
elif arrivals[key]['P']['weight'] == 1:
|
|
||||||
pstd = errorsP[1]
|
|
||||||
elif arrivals[key]['P']['weight'] == 2:
|
|
||||||
pstd = errorsP[2]
|
|
||||||
elif arrivals[key]['P']['weight'] == 3:
|
|
||||||
psrd = errorsP[3]
|
|
||||||
else:
|
else:
|
||||||
pstd = errorsP[4]
|
fid.write('{}P{}{}{} {}{}{}{}{} {:5.2f} {}\n'.format(
|
||||||
fid.write('%-5s P1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
|
stat, 'I' if pweight < 2 else 'E', fm, pweight, year, Ponset.month, Ponset.day,
|
||||||
% (key, pyear, pmonth, pday, phh, pmm, Pss, pstd))
|
Ponset.hour, Ponset.minute, ss_ms, Ao))
|
||||||
# S onsets
|
|
||||||
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
|
|
||||||
if arrivals[key]['S']['weight'] < 4:
|
|
||||||
Sonset = arrivals[key]['S']['mpp']
|
|
||||||
syear = Sonset.year
|
|
||||||
smonth = Sonset.month
|
|
||||||
sday = Sonset.day
|
|
||||||
shh = Sonset.hour
|
|
||||||
smm = Sonset.minute
|
|
||||||
sss = Sonset.second
|
|
||||||
sms = Sonset.microsecond
|
|
||||||
Sss = sss + sms / 1000000.0
|
|
||||||
sstd = arrivals[key]['S']['spe']
|
|
||||||
if pstd is None:
|
|
||||||
errorsS = parameter.get('timeerrorsS')
|
|
||||||
if arrivals[key]['S']['weight'] == 0:
|
|
||||||
pstd = errorsS[0]
|
|
||||||
elif arrivals[key]['S']['weight'] == 1:
|
|
||||||
pstd = errorsS[1]
|
|
||||||
elif arrivals[key]['S']['weight'] == 2:
|
|
||||||
pstd = errorsS[2]
|
|
||||||
elif arrivals[key]['S']['weight'] == 3:
|
|
||||||
psrd = errorsS[3]
|
|
||||||
else:
|
|
||||||
pstd = errorsP[4]
|
|
||||||
fid.write('%-5s S1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
|
|
||||||
% (key, syear, smonth, sday, shh, smm, Sss, sstd))
|
|
||||||
fid.close()
|
|
||||||
|
|
||||||
elif fformat == 'VELEST':
|
def write_hyposat():
|
||||||
print("Writing phases to %s for VELEST" % filename)
|
with open(filename, 'w') as fid:
|
||||||
fid = open("%s" % filename, 'w')
|
fid.write('{}, event {} \n'.format(parameter.get('database'), parameter.get('eventID')))
|
||||||
# get informations needed in cnv-file
|
for key, value in arrivals.items():
|
||||||
# check, whether latitude is N or S and longitude is E or W
|
for phase in ['P', 'S']:
|
||||||
try:
|
if phase in value and value[phase].get('weight', 0) < 4:
|
||||||
eventsource = eventinfo.origins[0]
|
onset = value[phase]['mpp']
|
||||||
except:
|
ss_ms = onset.second + onset.microsecond / 1000000.0
|
||||||
|
std = value[phase].get('spe', parameter.get('timeerrorsP')[value[phase].get('weight', 0)])
|
||||||
|
fid.write(
|
||||||
|
'{:<5} {}1 {:4} {:02} {:02} {:02} {:02} {:05.02f} {:5.3f} -999. 0.00 -999. 0.00\n'.format(
|
||||||
|
key, phase, onset.year, onset.month, onset.day, onset.hour, onset.minute, ss_ms, std))
|
||||||
|
|
||||||
|
def write_velest():
|
||||||
|
if not eventinfo:
|
||||||
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
||||||
return
|
return
|
||||||
if eventsource['latitude'] < 0:
|
with open(filename, 'w') as fid:
|
||||||
cns = 'S'
|
origin = eventinfo.origins[0]
|
||||||
else:
|
lat_dir = 'S' if origin.latitude < 0 else 'N'
|
||||||
cns = 'N'
|
lon_dir = 'W' if origin.longitude < 0 else 'E'
|
||||||
if eventsource['longitude'] < 0:
|
year = origin.time.year - 2000 if origin.time.year >= 2000 else origin.time.year - 1900
|
||||||
cew = 'W'
|
fid.write(
|
||||||
else:
|
'{}{}{} {}{} {} {:05.2f} {:7.4f}{} {:8.4f}{} {:7.2f} {:6.2f} {:02.0f} 0.0 0.03 1.0 1.0\n'.format(
|
||||||
cew = 'E'
|
year, origin.time.month, origin.time.day, origin.time.hour, origin.time.minute, origin.time.second,
|
||||||
# get last two integers of origin year
|
origin.latitude, lat_dir, origin.longitude, lon_dir, origin.depth, eventinfo.magnitudes[0].mag, 0))
|
||||||
stime = eventsource['time']
|
for key, value in arrivals.items():
|
||||||
if stime.year - 2000 >= 0:
|
for phase in ['P', 'S']:
|
||||||
syear = stime.year - 2000
|
if phase in value and value[phase].get('weight', 0) < 4:
|
||||||
else:
|
onset = value[phase]['mpp']
|
||||||
syear = stime.year - 1900
|
rt = (onset - origin.time).total_seconds()
|
||||||
ifx = 0 # default value, see VELEST manual, pp. 22-23
|
fid.write('{:<4}{}{}{:6.2f}\n'.format(key[:4], phase, value[phase].get('weight', 0), rt))
|
||||||
# write header
|
|
||||||
fid.write('%s%02d%02d %02d%02d %05.2f %7.4f%c %8.4f%c %7.2f %6.2f %02.0f 0.0 0.03 1.0 1.0\n' % (
|
|
||||||
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
|
|
||||||
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
|
||||||
n = 0
|
|
||||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
|
||||||
if isinstance(arrivals, dict) == False:
|
|
||||||
# convert pick object (PyLoT) into dictionary
|
|
||||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
|
||||||
evt.picks = arrivals
|
|
||||||
arrivals = picksdict_from_picks(evt)
|
|
||||||
# check for automatic and manual picks
|
|
||||||
# prefer manual picks
|
|
||||||
usedarrivals = chooseArrivals(arrivals)
|
|
||||||
for key in usedarrivals:
|
|
||||||
# P onsets
|
|
||||||
if 'P' in usedarrivals[key]:
|
|
||||||
if usedarrivals[key]['P']['weight'] < 4:
|
|
||||||
n += 1
|
|
||||||
stat = key
|
|
||||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
|
||||||
stat = stat[1:5]
|
|
||||||
Ponset = usedarrivals[key]['P']['mpp']
|
|
||||||
Pweight = usedarrivals[key]['P']['weight']
|
|
||||||
Prt = Ponset - stime # onset time relative to source time
|
|
||||||
if n % 6 != 0:
|
|
||||||
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
|
|
||||||
else:
|
|
||||||
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
|
|
||||||
# S onsets
|
|
||||||
if 'S' in usedarrivals[key]:
|
|
||||||
if usedarrivals[key]['S']['weight'] < 4:
|
|
||||||
n += 1
|
|
||||||
stat = key
|
|
||||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
|
||||||
stat = stat[1:5]
|
|
||||||
Sonset = usedarrivals[key]['S']['mpp']
|
|
||||||
Sweight = usedarrivals[key]['S']['weight']
|
|
||||||
Srt = Ponset - stime # onset time relative to source time
|
|
||||||
if n % 6 != 0:
|
|
||||||
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
|
|
||||||
else:
|
|
||||||
fid.write('%-4sS%d%6.2f\n' % (stat, Sweight, Srt))
|
|
||||||
fid.close()
|
|
||||||
|
|
||||||
elif fformat == 'HYPODD':
|
def write_hypodd():
|
||||||
print("Writing phases to %s for hypoDD" % filename)
|
if not eventinfo:
|
||||||
fid = open("%s" % filename, 'w')
|
|
||||||
# get event information needed for hypoDD-phase file
|
|
||||||
try:
|
|
||||||
eventsource = eventinfo.origins[0]
|
|
||||||
except:
|
|
||||||
print("No source origin calculated yet, thus no hypoDD-infile creation possible!")
|
print("No source origin calculated yet, thus no hypoDD-infile creation possible!")
|
||||||
return
|
return
|
||||||
stime = eventsource['time']
|
with open(filename, 'w') as fid:
|
||||||
try:
|
origin = eventinfo.origins[0]
|
||||||
event = eventinfo['pylot_id']
|
stime = origin.time
|
||||||
hddID = event.split('.')[0][1:5]
|
fid.write('# {} {} {} {} {} {} {:7.4f} +{:6.4f} {:7.4f} {:4.2f} 0.1 0.5 {:4.2f} {}\n'.format(
|
||||||
except:
|
|
||||||
print("Error 1111111!")
|
|
||||||
hddID = "00000"
|
|
||||||
# write header
|
|
||||||
fid.write('# %d %d %d %d %d %5.2f %7.4f +%6.4f %7.4f %4.2f 0.1 0.5 %4.2f %s\n' % (
|
|
||||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||||
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
|
origin.latitude, origin.longitude, origin.depth / 1000, eventinfo.magnitudes[0].mag,
|
||||||
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
|
origin.quality.standard_error, "00000"))
|
||||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
for key, value in arrivals.items():
|
||||||
if isinstance(arrivals, dict) == False:
|
for phase in ['P', 'S']:
|
||||||
# convert pick object (PyLoT) into dictionary
|
if phase in value and value[phase].get('weight', 0) < 4:
|
||||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
onset = value[phase]['mpp']
|
||||||
evt.picks = arrivals
|
rt = (onset - stime).total_seconds()
|
||||||
arrivals = picksdict_from_picks(evt)
|
fid.write('{} {:6.3f} 1 {}\n'.format(key, rt, phase))
|
||||||
# check for automatic and manual picks
|
|
||||||
# prefer manual picks
|
|
||||||
usedarrivals = chooseArrivals(arrivals)
|
|
||||||
for key in usedarrivals:
|
|
||||||
if 'P' in usedarrivals[key]:
|
|
||||||
# P onsets
|
|
||||||
if usedarrivals[key]['P']['weight'] < 4:
|
|
||||||
Ponset = usedarrivals[key]['P']['mpp']
|
|
||||||
Prt = Ponset - stime # onset time relative to source time
|
|
||||||
fid.write('%s %6.3f 1 P\n' % (key, Prt))
|
|
||||||
if 'S' in usedarrivals[key]:
|
|
||||||
# S onsets
|
|
||||||
if usedarrivals[key]['S']['weight'] < 4:
|
|
||||||
Sonset = usedarrivals[key]['S']['mpp']
|
|
||||||
Srt = Sonset - stime # onset time relative to source time
|
|
||||||
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
|
|
||||||
|
|
||||||
fid.close()
|
def write_focmec():
|
||||||
|
if not eventinfo:
|
||||||
elif fformat == 'FOCMEC':
|
|
||||||
print("Writing phases to %s for FOCMEC" % filename)
|
|
||||||
fid = open("%s" % filename, 'w')
|
|
||||||
# get event information needed for FOCMEC-input file
|
|
||||||
try:
|
|
||||||
eventsource = eventinfo.origins[0]
|
|
||||||
except:
|
|
||||||
print("No source origin calculated yet, thus no FOCMEC-infile creation possible!")
|
print("No source origin calculated yet, thus no FOCMEC-infile creation possible!")
|
||||||
return
|
return
|
||||||
stime = eventsource['time']
|
with open(filename, 'w') as fid:
|
||||||
|
origin = eventinfo.origins[0]
|
||||||
# avoid printing '*' in focmec-input file
|
stime = origin.time
|
||||||
if parameter.get('eventid') == '*' or parameter.get('eventid') is None:
|
fid.write('{} {}{:02d}{:02d}{:02d}{:02d}{:02.0f} {:7.4f} {:6.4f} {:3.1f} {:3.1f}\n'.format(
|
||||||
evID = 'e0000'
|
parameter.get('eventid', 'e0000'), stime.year, stime.month, stime.day, stime.hour, stime.minute,
|
||||||
else:
|
stime.second, origin.latitude, origin.longitude, origin.depth / 1000, eventinfo.magnitudes[0].mag))
|
||||||
evID = parameter.get('eventid')
|
for key, value in arrivals.items():
|
||||||
|
if 'P' in value and value['P'].get('weight', 0) < 4 and value['P'].get('fm'):
|
||||||
# write header line including event information
|
for pick in eventinfo.picks:
|
||||||
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (evID,
|
if pick.waveform_id.station_code == key:
|
||||||
stime.year, stime.month, stime.day,
|
for arrival in origin.arrivals:
|
||||||
stime.hour, stime.minute, stime.second,
|
if arrival.pick_id == pick.resource_id and arrival.phase == 'P':
|
||||||
eventsource['latitude'],
|
stat = key[:4]
|
||||||
eventsource['longitude'],
|
az = arrival.azimuth
|
||||||
eventsource['depth'] / 1000,
|
inz = arrival.takeoff_angle
|
||||||
eventinfo.magnitudes[0]['mag']))
|
fid.write('{:<4} {:6.2f} {:6.2f}{}\n'.format(stat, az, inz, value['P']['fm']))
|
||||||
picks = eventinfo.picks
|
|
||||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
|
||||||
if isinstance(arrivals, dict) == False:
|
|
||||||
# convert pick object (PyLoT) into dictionary
|
|
||||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
|
||||||
evt.picks = arrivals
|
|
||||||
arrivals = picksdict_from_picks(evt)
|
|
||||||
# check for automatic and manual picks
|
|
||||||
# prefer manual picks
|
|
||||||
usedarrivals = chooseArrivals(arrivals)
|
|
||||||
for key in usedarrivals:
|
|
||||||
if 'P' in usedarrivals[key]:
|
|
||||||
if usedarrivals[key]['P']['weight'] < 4 and usedarrivals[key]['P']['fm'] is not None:
|
|
||||||
stat = key
|
|
||||||
for i in range(len(picks)):
|
|
||||||
station = picks[i].waveform_id.station_code
|
|
||||||
if station == stat:
|
|
||||||
# get resource ID
|
|
||||||
resid_picks = picks[i].get('resource_id')
|
|
||||||
# find same ID in eventinfo
|
|
||||||
# there it is the pick_id!!
|
|
||||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
|
||||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
|
||||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
|
||||||
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
|
|
||||||
stat = stat[1:5]
|
|
||||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
|
||||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
|
||||||
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
|
|
||||||
az,
|
|
||||||
inz,
|
|
||||||
usedarrivals[key]['P']['fm']))
|
|
||||||
break
|
break
|
||||||
|
|
||||||
fid.close()
|
def write_hash():
|
||||||
|
# Define filenames for HASH driver 1 and 2
|
||||||
|
filename1 = f"{filename}drv1.phase"
|
||||||
|
filename2 = f"{filename}drv2.phase"
|
||||||
|
|
||||||
elif fformat == 'HASH':
|
print(f"Writing phases to {filename1} for HASH-driver 1")
|
||||||
# two different input files for
|
print(f"Writing phases to {filename2} for HASH-driver 2")
|
||||||
# HASH-driver 1 and 2 (see HASH manual!)
|
|
||||||
filename1 = filename + 'drv1' + '.phase'
|
# Open files for writing
|
||||||
filename2 = filename + 'drv2' + '.phase'
|
with open(filename1, 'w') as fid1, open(filename2, 'w') as fid2:
|
||||||
print("Writing phases to %s for HASH for HASH-driver 1" % filename1)
|
# Get event information needed for HASH-input file
|
||||||
fid1 = open("%s" % filename1, 'w')
|
|
||||||
print("Writing phases to %s for HASH for HASH-driver 2" % filename2)
|
|
||||||
fid2 = open("%s" % filename2, 'w')
|
|
||||||
# get event information needed for HASH-input file
|
|
||||||
try:
|
try:
|
||||||
eventsource = eventinfo.origins[0]
|
eventsource = eventinfo.origins[0]
|
||||||
except:
|
except IndexError:
|
||||||
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
||||||
return
|
return
|
||||||
eventsource = eventinfo.origins[0]
|
|
||||||
event = parameter.get('eventID')
|
event = parameter.get('eventID')
|
||||||
hashID = event.split('.')[0][1:5]
|
hashID = event.split('.')[0][1:5]
|
||||||
latdeg = eventsource['latitude']
|
latdeg = eventsource['latitude']
|
||||||
latmin = eventsource['latitude'] * 60 / 10000
|
latmin = (eventsource['latitude'] * 60) / 10000
|
||||||
londeg = eventsource['longitude']
|
londeg = eventsource['longitude']
|
||||||
lonmin = eventsource['longitude'] * 60 / 10000
|
lonmin = (eventsource['longitude'] * 60) / 10000
|
||||||
erh = 1 / 2 * (eventsource.origin_uncertainty['min_horizontal_uncertainty'] +
|
|
||||||
eventsource.origin_uncertainty['max_horizontal_uncertainty']) / 1000
|
erh = (eventsource.origin_uncertainty['min_horizontal_uncertainty'] +
|
||||||
|
eventsource.origin_uncertainty['max_horizontal_uncertainty']) / 2000
|
||||||
erz = eventsource.depth_errors['uncertainty']
|
erz = eventsource.depth_errors['uncertainty']
|
||||||
|
|
||||||
stime = eventsource['time']
|
stime = eventsource['time']
|
||||||
if stime.year - 2000 >= 0:
|
syear = stime.year % 100 # Calculate two-digit year
|
||||||
syear = stime.year - 2000
|
|
||||||
else:
|
|
||||||
syear = stime.year - 1900
|
|
||||||
picks = eventinfo.picks
|
picks = eventinfo.picks
|
||||||
# write header line including event information
|
|
||||||
# for HASH-driver 1
|
# Write header line including event information for HASH-driver 1
|
||||||
fid1.write('%s%02d%02d%02d%02d%5.2f%2dN%5.2f%3dE%5.2f%6.3f%4.2f%5.2f%5.2f%s\n' % (syear,
|
fid1.write(f"{syear:02d}{stime.month:02d}{stime.day:02d}{stime.hour:02d}{stime.minute:02d}"
|
||||||
stime.month, stime.day,
|
f"{stime.second:05.2f}{latdeg:2d}N{latmin:05.2f}{londeg:3d}E{lonmin:05.2f}"
|
||||||
stime.hour, stime.minute,
|
f"{eventsource['depth']:6.2f}{eventinfo.magnitudes[0]['mag']:4.2f}{erh:5.2f}{erz:5.2f}{hashID}\n")
|
||||||
stime.second,
|
|
||||||
latdeg, latmin, londeg,
|
# Write header line including event information for HASH-driver 2
|
||||||
lonmin, eventsource['depth'],
|
fid2.write(f"{syear:02d}{stime.month:02d}{stime.day:02d}{stime.hour:02d}{stime.minute:02d}"
|
||||||
eventinfo.magnitudes[0][
|
f"{stime.second:05.2f}{latdeg}N{latmin:05.2f}{londeg}E{lonmin:6.2f}{eventsource['depth']:5.2f}"
|
||||||
'mag'], erh, erz,
|
f"{eventsource['quality']['used_phase_count']:3d}{erh:5.2f}{erz:5.2f}"
|
||||||
hashID))
|
f"{eventinfo.magnitudes[0]['mag']:4.2f}{hashID}\n")
|
||||||
# write header line including event information
|
|
||||||
# for HASH-driver 2
|
# Write phase lines
|
||||||
fid2.write(
|
for key, arrival in arrivals.items():
|
||||||
'%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (
|
if 'P' in arrival and arrival['P']['weight'] < 4 and arrival['P']['fm'] is not None:
|
||||||
syear, stime.month, stime.day,
|
|
||||||
stime.hour, stime.minute, stime.second,
|
|
||||||
latdeg, latmin, londeg, lonmin,
|
|
||||||
eventsource['depth'],
|
|
||||||
eventsource['quality']['used_phase_count'],
|
|
||||||
erh, erz, eventinfo.magnitudes[0]['mag'],
|
|
||||||
hashID))
|
|
||||||
# Prefer Manual Picks over automatic ones if possible
|
|
||||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
|
||||||
# write phase lines
|
|
||||||
for key in arrivals:
|
|
||||||
if 'P' in arrivals[key]:
|
|
||||||
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
|
|
||||||
stat = key
|
stat = key
|
||||||
ccode = arrivals[key]['P']['channel']
|
ccode = arrival['P']['channel']
|
||||||
ncode = arrivals[key]['P']['network']
|
ncode = arrival['P']['network']
|
||||||
|
Pqual = 'I' if arrival['P']['weight'] < 2 else 'E'
|
||||||
|
|
||||||
if arrivals[key]['P']['weight'] < 2:
|
for pick in picks:
|
||||||
Pqual = 'I'
|
if pick.waveform_id.station_code == stat:
|
||||||
else:
|
resid_picks = pick.get('resource_id')
|
||||||
Pqual = 'E'
|
for origin_arrival in eventinfo.origins[0].arrivals:
|
||||||
|
if (origin_arrival.get('pick_id') == resid_picks and
|
||||||
for i in range(len(picks)):
|
origin_arrival.phase == 'P'):
|
||||||
station = picks[i].waveform_id.station_code
|
if len(stat) > 4: # HASH handles only 4-character station IDs
|
||||||
if station == stat:
|
|
||||||
# get resource ID
|
|
||||||
resid_picks = picks[i].get('resource_id')
|
|
||||||
# find same ID in eventinfo
|
|
||||||
# there it is the pick_id!!
|
|
||||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
|
||||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
|
||||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
|
||||||
if len(stat) > 4: # HASH handles only 4-string station IDs
|
|
||||||
stat = stat[1:5]
|
stat = stat[1:5]
|
||||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
|
||||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
az = origin_arrival.get('azimuth')
|
||||||
dist = eventinfo.origins[0].arrivals[j].get('distance')
|
inz = origin_arrival.get('takeoff_angle')
|
||||||
# write phase line for HASH-driver 1
|
dist = origin_arrival.get('distance')
|
||||||
fid1.write(
|
|
||||||
'%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (
|
# Write phase line for HASH-driver 1
|
||||||
stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
|
fid1.write(f"{stat:<4}{Pqual}P{arrival['P']['fm']}{arrival['P']['weight']:d}"
|
||||||
dist, inz, az, ccode))
|
f"{dist:3.1f}{inz:03d}{az:03d}{ccode}\n")
|
||||||
# write phase line for HASH-driver 2
|
|
||||||
fid2.write('%-4s %s %s %s %s \n' % (
|
# Write phase line for HASH-driver 2
|
||||||
stat,
|
fid2.write(f"{stat:<4} {ncode} {ccode} {Pqual} {arrival['P']['fm']}\n")
|
||||||
ncode,
|
|
||||||
ccode,
|
|
||||||
Pqual,
|
|
||||||
arrivals[key]['P']['fm']))
|
|
||||||
break
|
break
|
||||||
|
|
||||||
fid1.write(' %s' % hashID)
|
fid1.write(f"{'':<36}{hashID}")
|
||||||
fid1.close()
|
|
||||||
fid2.close()
|
# Prefer Manual Picks over automatic ones if possible
|
||||||
|
arrivals = chooseArrivals(arrivals) # Function not defined, assumed to exist
|
||||||
|
|
||||||
|
if fformat == 'NLLoc':
|
||||||
|
write_nlloc()
|
||||||
|
elif fformat == 'HYPO71':
|
||||||
|
write_hypo71()
|
||||||
|
elif fformat == 'HYPOSAT':
|
||||||
|
write_hyposat()
|
||||||
|
elif fformat == 'VELEST':
|
||||||
|
write_velest()
|
||||||
|
elif fformat == 'HYPODD':
|
||||||
|
write_hypodd()
|
||||||
|
elif fformat == 'FOCMEC':
|
||||||
|
write_focmec()
|
||||||
|
elif fformat == 'HASH':
|
||||||
|
write_hash()
|
||||||
|
|
||||||
|
|
||||||
def chooseArrivals(arrivals):
|
def chooseArrivals(arrivals):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
@@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
|
|||||||
:type eventinfo: list object
|
:type eventinfo: list object
|
||||||
'''
|
'''
|
||||||
# write phases to FOCMEC-phase file
|
# write phases to FOCMEC-phase file
|
||||||
writephases(picks, 'FOCMEC', fnout, parameter, eventinfo)
|
write_phases(picks, 'FOCMEC', fnout, parameter, eventinfo)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
@@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
|
|||||||
:type eventinfo: list object
|
:type eventinfo: list object
|
||||||
'''
|
'''
|
||||||
# write phases to HASH-phase file
|
# write phases to HASH-phase file
|
||||||
writephases(picks, 'HASH', fnout, parameter, eventinfo)
|
write_phases(picks, 'HASH', fnout, parameter, eventinfo)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
@@ -22,4 +22,4 @@ def export(picks, fnout, parameter):
|
|||||||
:type parameter: object
|
:type parameter: object
|
||||||
'''
|
'''
|
||||||
# write phases to HYPO71-phase file
|
# write phases to HYPO71-phase file
|
||||||
writephases(picks, 'HYPO71', fnout, parameter)
|
write_phases(picks, 'HYPO71', fnout, parameter)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
@@ -25,4 +25,4 @@ def export(picks, fnout, parameter, eventinfo):
|
|||||||
:type eventinfo: list object
|
:type eventinfo: list object
|
||||||
'''
|
'''
|
||||||
# write phases to hypoDD-phase file
|
# write phases to hypoDD-phase file
|
||||||
writephases(picks, 'HYPODD', fnout, parameter, eventinfo)
|
write_phases(picks, 'HYPODD', fnout, parameter, eventinfo)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
@@ -22,4 +22,4 @@ def export(picks, fnout, parameter):
|
|||||||
:type parameter: object
|
:type parameter: object
|
||||||
'''
|
'''
|
||||||
# write phases to HYPOSAT-phase file
|
# write phases to HYPOSAT-phase file
|
||||||
writephases(picks, 'HYPOSAT', fnout, parameter)
|
write_phases(picks, 'HYPOSAT', fnout, parameter)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import subprocess
|
|||||||
|
|
||||||
from obspy import read_events
|
from obspy import read_events
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.gui import which
|
from pylot.core.util.gui import which
|
||||||
from pylot.core.util.utils import getPatternLine, runProgram
|
from pylot.core.util.utils import getPatternLine, runProgram
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
@@ -34,7 +34,7 @@ def export(picks, fnout, parameter):
|
|||||||
:type parameter: object
|
:type parameter: object
|
||||||
'''
|
'''
|
||||||
# write phases to NLLoc-phase file
|
# write phases to NLLoc-phase file
|
||||||
writephases(picks, 'NLLoc', fnout, parameter)
|
write_phases(picks, 'NLLoc', fnout, parameter)
|
||||||
|
|
||||||
|
|
||||||
def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from pylot.core.io.phases import writephases
|
from pylot.core.io.phases import write_phases
|
||||||
from pylot.core.util.version import get_git_version as _getVersionString
|
from pylot.core.util.version import get_git_version as _getVersionString
|
||||||
|
|
||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
@@ -25,4 +25,4 @@ def export(picks, fnout, eventinfo, parameter=None):
|
|||||||
:type parameter: object
|
:type parameter: object
|
||||||
'''
|
'''
|
||||||
# write phases to VELEST-phase file
|
# write phases to VELEST-phase file
|
||||||
writephases(picks, 'VELEST', fnout, parameter, eventinfo)
|
write_phases(picks, 'VELEST', fnout, parameter, eventinfo)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Created on Wed Jan 26 17:47:25 2015
|
|||||||
@author: sebastianw
|
@author: sebastianw
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure, ObspyDMTdataStructure
|
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure
|
||||||
|
|
||||||
DATASTRUCTURE = {'PILOT': PilotDataStructure, 'SeisComP': SeiscompDataStructure,
|
DATASTRUCTURE = {'PILOT': PilotDataStructure, 'SeisComP': SeiscompDataStructure,
|
||||||
'obspyDMT': ObspyDMTdataStructure, None: PilotDataStructure}
|
'obspyDMT': PilotDataStructure, None: PilotDataStructure}
|
||||||
|
|||||||
@@ -358,6 +358,8 @@ def get_bool(value):
|
|||||||
False
|
False
|
||||||
>>> get_bool(None)
|
>>> get_bool(None)
|
||||||
None
|
None
|
||||||
|
>>> get_bool('Stream')
|
||||||
|
'Stream'
|
||||||
"""
|
"""
|
||||||
if type(value) is bool:
|
if type(value) is bool:
|
||||||
return value
|
return value
|
||||||
|
|||||||
Reference in New Issue
Block a user