Merge branch 'refs/heads/develop'
# Conflicts: # PyLoT.py # README.md # pylot/core/util/widgets.py
5
.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
*.pyc
|
||||
*~
|
||||
.idea
|
||||
pylot/RELEASE-VERSION
|
||||
/tests/test_autopicker/dmt_database_test/
|
39
.mailmap
Normal file
@ -0,0 +1,39 @@
|
||||
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <Darius_A@web.de>
|
||||
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <darius.arnold@rub.de>
|
||||
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <darius.arnold@ruhr-uni-bochum.de>
|
||||
Darius Arnold <Darius.Arnold@ruhr-uni-bochum.de> <mail@dariusarnold.de>
|
||||
|
||||
Dennis Wlecklik <dennisw@minos02.geophysik.ruhr-uni-bochum.de>
|
||||
|
||||
Jeldrik Gaal <jeldrikgaal@gmail.com>
|
||||
|
||||
Kaan Coekerim <kaan.coekerim@ruhr-uni-bochum.de>
|
||||
Kaan Coekerim <kaan.coekerim@ruhr-uni-bochum.de> <kaan.coekerim@rub.de>
|
||||
|
||||
Ludger Kueperkoch <kueperkoch@igem-energie.de> <kueperkoch@bestec-for-nature.com>
|
||||
Ludger Kueperkoch <kueperkoch@igem-energie.de> <ludger@quake2.(none)>
|
||||
Ludger Kueperkoch <kueperkoch@igem-energie.de> <ludger@sauron.bestec-for-nature>
|
||||
|
||||
Marc S. Boxberg <marc.boxberg@rub.de>
|
||||
|
||||
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel.paffrath@rub.de>
|
||||
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@minos01.geophysik.ruhr-uni-bochum.de>
|
||||
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@minos02.geophysik.ruhr-uni-bochum.de>
|
||||
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@minos25.geophysik.ruhr-uni-bochum.de>
|
||||
Marcel Paffrath <marcel.paffrath@ruhr-uni-bochum.de> <marcel@email.com>
|
||||
|
||||
Sally Zimmermann <sally.zimmermann@ruhr-uni-bochum.de>
|
||||
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastianw@minos01.geophysik.ruhr-uni-bochum.de>
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastianw@minos02.geophysik.ruhr-uni-bochum.de>
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastianw@minos22.geophysik.ruhr-uni-bochum.de>
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastian.wehling-benatelli@scisys.de>
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastian.wehling@rub.de>
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <sebastian.wehling@rub.de>
|
||||
Sebastian Wehling-Benatelli <sebastian.wehling-benatelli@cgi.com> <DarkBeQst@users.noreply.github.com>
|
||||
|
||||
Thomas Moeller <thomas.moeller@rub.de>
|
||||
|
||||
Ann-Christin Koch <ann-christin.koch@ruhr-uni-bochum.de> <Ann-Christin.Koch@ruhr-uni-bochum.de>
|
||||
|
||||
Sebastian Priebe <sebastian.priebe@rub.de>
|
99
README.md
@ -1,40 +1,60 @@
|
||||
# PyLoT
|
||||
|
||||
version: 0.2
|
||||
version: 0.3
|
||||
|
||||
The Python picking and Localisation Tool
|
||||
|
||||
This python library contains a graphical user interfaces for picking
|
||||
seismic phases. This software needs [ObsPy][ObsPy]
|
||||
and the PySide Qt4 bindings for python to be installed first.
|
||||
This python library contains a graphical user interfaces for picking seismic phases. This software needs [ObsPy][ObsPy]
|
||||
and the PySide2 Qt5 bindings for python to be installed first.
|
||||
|
||||
PILOT has originally been developed in Mathworks' MatLab. In order to
|
||||
distribute PILOT without facing portability problems, it has been decided
|
||||
to redevelop the software package in Python. The great work of the ObsPy
|
||||
group allows easy handling of a bunch of seismic data and PyLoT will
|
||||
benefit a lot compared to the former MatLab version.
|
||||
PILOT has originally been developed in Mathworks' MatLab. In order to distribute PILOT without facing portability
|
||||
problems, it has been decided to redevelop the software package in Python. The great work of the ObsPy group allows easy
|
||||
handling of a bunch of seismic data and PyLoT will benefit a lot compared to the former MatLab version.
|
||||
|
||||
The development of PyLoT is part of the joint research project MAGS2 and AlpArray.
|
||||
The development of PyLoT is part of the joint research project MAGS2, AlpArray and AdriaArray.
|
||||
|
||||
## Installation
|
||||
|
||||
At the moment there is no automatic installation procedure available for PyLoT.
|
||||
Best way to install is to clone the repository and add the path to your Python path.
|
||||
At the moment there is no automatic installation procedure available for PyLoT. Best way to install is to clone the
|
||||
repository and add the path to your Python path.
|
||||
|
||||
It is highly recommended to use Anaconda for a simple creation of a Python installation using either the *pylot.yml* or the *requirements.txt* file found in the PyLoT root directory. First make sure that the *conda-forge* channel is available in your Anaconda installation:
|
||||
|
||||
conda config --add channels conda-forge
|
||||
|
||||
Afterwards run (from the PyLoT main directory where the files *requirements.txt* and *pylot.yml* are located)
|
||||
|
||||
conda env create -f pylot.yml
|
||||
or
|
||||
|
||||
conda create -c conda-forge --name pylot_311 python=3.11 --file requirements.txt
|
||||
|
||||
to create a new Anaconda environment called *pylot_311*.
|
||||
|
||||
Afterwards activate the environment by typing
|
||||
|
||||
conda activate pylot_311
|
||||
|
||||
#### Prerequisites:
|
||||
|
||||
In order to run PyLoT you need to install:
|
||||
|
||||
- python 2 or 3
|
||||
- Python 3
|
||||
- cartopy
|
||||
- joblib
|
||||
- obspy
|
||||
- pyaml
|
||||
- pyqtgraph
|
||||
- pyside2
|
||||
|
||||
(the following are already dependencies of the above packages):
|
||||
- scipy
|
||||
- numpy
|
||||
- matplotlib
|
||||
- obspy
|
||||
- pyside
|
||||
|
||||
#### Some handwork:
|
||||
|
||||
PyLoT needs a properties folder on your system to work. It should be situated in your home directory
|
||||
PyLoT needs a properties folder on your system to work. It should be situated in your home directory
|
||||
(on Windows usually C:/Users/*username*):
|
||||
|
||||
mkdir ~/.pylot
|
||||
@ -53,7 +73,8 @@ In the next step you have to copy some files to this directory:
|
||||
|
||||
cp path-to-pylot/inputs/pylot_global.in ~/.pylot/pylot.in
|
||||
|
||||
and some extra information on error estimates (just needed for reading old PILOT data) and the Richter magnitude scaling relation
|
||||
and some extra information on error estimates (just needed for reading old PILOT data) and the Richter magnitude scaling
|
||||
relation
|
||||
|
||||
cp path-to-pylot/inputs/PILOT_TimeErrors.in path-to-pylot/inputs/richter_scaling.data ~/.pylot/
|
||||
|
||||
@ -61,50 +82,32 @@ You may need to do some modifications to these files. Especially folder names sh
|
||||
|
||||
PyLoT has been tested on Mac OSX (10.11), Debian Linux 8 and on Windows 10.
|
||||
|
||||
|
||||
## Release notes
|
||||
|
||||
#### Features:
|
||||
|
||||
- centralize all functionalities of PyLoT and control them from within the main GUI
|
||||
- handling multiple events inside GUI with project files (save and load work progress)
|
||||
- GUI based adjustments of pick parameters and I/O
|
||||
- interactive tuning of parameters from within the GUI
|
||||
- call automatic picking algorithm from within the GUI
|
||||
- comparison of automatic with manual picks for multiple events using clear differentiation of manual picks into 'tune' and 'test-set' (beta)
|
||||
- manual picking of different (user defined) phase types
|
||||
- phase onset estimation with ObsPy TauPy
|
||||
- interactive zoom/scale functionalities in all plots (mousewheel, pan, pan-zoom)
|
||||
- array map to visualize stations and control onsets (beta feature, switch to manual picks not implemented)
|
||||
- event organisation in project files and waveform visualisation
|
||||
- consistent manual phase picking through predefined SNR dependant zoom level
|
||||
- consistent automatic phase picking routines using Higher Order Statistics, AIC and Autoregression
|
||||
- interactive tuning of auto-pick parameters
|
||||
- uniform uncertainty estimation from waveform's properties for automatic and manual picks
|
||||
- pdf representation and comparison of picks taking the uncertainty intrinsically into account
|
||||
- Richter and moment magnitude estimation
|
||||
- location determination with external installation of [NonLinLoc](http://alomax.free.fr/nlloc/index.html)
|
||||
|
||||
##### Platform support:
|
||||
- Python 3 support
|
||||
- Windows support
|
||||
#### Known issues:
|
||||
|
||||
##### Performance:
|
||||
- multiprocessing for automatic picking and restitution of multiple stations
|
||||
- use pyqtgraph library for better performance on main waveform plot
|
||||
|
||||
##### Visualization:
|
||||
- pick uncertainty (quality classes) visualization with gradients
|
||||
- pick color unification for all plots
|
||||
- new icons and stylesheets
|
||||
|
||||
#### Known Issues:
|
||||
- some Qt related errors might occur at runtime
|
||||
- filter toggle not working in pickDlg
|
||||
- PyLoT data structure requires at least three parent directories for waveform data directory
|
||||
We hope to solve these with the next release.
|
||||
|
||||
## Staff
|
||||
|
||||
Original author(s): L. Kueperkoch, S. Wehling-Benatelli, M. Bischoff (PILOT)
|
||||
Original author(s): M. Rische, S. Wehling-Benatelli, L. Kueperkoch, M. Bischoff (PILOT)
|
||||
|
||||
Developer(s): S. Wehling-Benatelli, L. Kueperkoch, M. Paffrath, K. Olbert,
|
||||
M. Bischoff, C. Wollin, M. Rische
|
||||
Developer(s): S. Wehling-Benatelli, M. Paffrath, L. Kueperkoch, K. Olbert, M. Bischoff, C. Wollin, M. Rische, D. Arnold, K. Cökerim, S. Zimmermann
|
||||
|
||||
Others: A. Bruestle, T. Meier, W. Friederich
|
||||
|
||||
|
||||
[ObsPy]: http://github.com/obspy/obspy/wiki
|
||||
|
||||
September 2017
|
||||
August 2024
|
||||
|
209
autoPyLoT.py
@ -7,6 +7,10 @@ import argparse
|
||||
import datetime
|
||||
import glob
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from obspy import read_events
|
||||
from obspy.core.event import ResourceIdentifier
|
||||
|
||||
import pylot.core.loc.focmec as focmec
|
||||
import pylot.core.loc.hash as hash
|
||||
@ -15,18 +19,16 @@ import pylot.core.loc.hypodd as hypodd
|
||||
import pylot.core.loc.hyposat as hyposat
|
||||
import pylot.core.loc.nll as nll
|
||||
import pylot.core.loc.velest as velest
|
||||
from obspy import read_events
|
||||
from obspy.core.event import ResourceIdentifier
|
||||
# from PySide.QtGui import QWidget, QInputDialog
|
||||
from pylot.core.analysis.magnitude import MomentMagnitude, LocalMagnitude
|
||||
from pylot.core.io.data import Data
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.pick.autopick import autopickevent, iteratepicker
|
||||
from pylot.core.util.dataprocessing import restitute_data, read_metadata
|
||||
from pylot.core.util.dataprocessing import restitute_data, Metadata
|
||||
from pylot.core.util.defaults import SEPARATOR
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.structure import DATASTRUCTURE
|
||||
from pylot.core.util.utils import real_None, remove_underscores, trim_station_components, check4gaps, check4doubled, \
|
||||
from pylot.core.util.utils import get_none, trim_station_components, check4gapsAndRemove, check4doubled, \
|
||||
check4rotated
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
@ -34,19 +36,34 @@ __version__ = _getVersionString()
|
||||
|
||||
|
||||
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None,
|
||||
savexml=True, station='all', iplot=0, ncores=0):
|
||||
savexml=True, station='all', iplot=0, ncores=0, obspyDMT_wfpath=False):
|
||||
"""
|
||||
Determine phase onsets automatically utilizing the automatic picking
|
||||
algorithms by Kueperkoch et al. 2010/2012.
|
||||
|
||||
:param inputfile: path to the input file containing all parameter
|
||||
information for automatic picking (for formatting details, see.
|
||||
`~pylot.core.io.inputs.PylotParameter`
|
||||
:param obspyDMT_wfpath: if obspyDMT is used, name of data directory ("raw" or "processed")
|
||||
:param input_dict:
|
||||
:type input_dict:
|
||||
:param parameter: PylotParameter object containing parameters used for automatic picking
|
||||
:type parameter: pylot.core.io.inputs.PylotParameter
|
||||
:param inputfile: path to the input file containing all parameter information for automatic picking
|
||||
(for formatting details, see. `~pylot.core.io.inputs.PylotParameter`
|
||||
:type inputfile: str
|
||||
:return:
|
||||
|
||||
.. rubric:: Example
|
||||
|
||||
:param fnames: list of data file names or None when called from GUI
|
||||
:type fnames: str
|
||||
:param eventid: event path incl. event ID (path to waveform files)
|
||||
:type eventid: str
|
||||
:param savepath: save path for autoPyLoT output, if None/"None" output will be saved in event folder
|
||||
:type savepath: str
|
||||
:param savexml: export results in XML file if True
|
||||
:type savexml: bool
|
||||
:param station: choose specific station name or 'all' to pick all stations
|
||||
:type station: str
|
||||
:param iplot: logical variable for plotting: 0=none, 1=partial, 2=all
|
||||
:type iplot: int
|
||||
:param ncores: number of cores used for parallel processing. Default (0) uses all available cores
|
||||
:type ncores: int
|
||||
:return: dictionary containing picks
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
if ncores == 1:
|
||||
@ -64,7 +81,8 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
Version {version} 2017\n
|
||||
\n
|
||||
Authors:\n
|
||||
L. Kueperkoch (BESTEC GmbH, Landau i. d. Pfalz)\n
|
||||
L. Kueperkoch (BESTEC GmbH, Landau i. d. Pfalz, \n
|
||||
now at igem GmbH, Mainz)
|
||||
M. Paffrath (Ruhr-Universitaet Bochum)\n
|
||||
S. Wehling-Benatelli (Ruhr-Universitaet Bochum)\n
|
||||
|
||||
@ -73,15 +91,13 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
sp=sp_info)
|
||||
print(splash)
|
||||
|
||||
|
||||
parameter = real_None(parameter)
|
||||
inputfile = real_None(inputfile)
|
||||
eventid = real_None(eventid)
|
||||
parameter = get_none(parameter)
|
||||
inputfile = get_none(inputfile)
|
||||
eventid = get_none(eventid)
|
||||
|
||||
fig_dict = None
|
||||
fig_dict_wadatijack = None
|
||||
|
||||
locflag = 1
|
||||
if input_dict and isinstance(input_dict, dict):
|
||||
if 'parameter' in input_dict:
|
||||
parameter = input_dict['parameter']
|
||||
@ -97,19 +113,15 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
eventid = input_dict['eventid']
|
||||
if 'iplot' in input_dict:
|
||||
iplot = input_dict['iplot']
|
||||
if 'locflag' in input_dict:
|
||||
locflag = input_dict['locflag']
|
||||
if 'savexml' in input_dict:
|
||||
savexml = input_dict['savexml']
|
||||
if 'obspyDMT_wfpath' in input_dict:
|
||||
obspyDMT_wfpath = input_dict['obspyDMT_wfpath']
|
||||
|
||||
if not parameter:
|
||||
if inputfile:
|
||||
parameter = PylotParameter(inputfile)
|
||||
#iplot = parameter['iplot']
|
||||
else:
|
||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
||||
print('Using default input file {}'.format(infile))
|
||||
parameter = PylotParameter(infile)
|
||||
if not inputfile:
|
||||
print('Using default input parameter')
|
||||
parameter = PylotParameter(inputfile)
|
||||
else:
|
||||
if not type(parameter) == PylotParameter:
|
||||
print('Wrong input type for parameter: {}'.format(type(parameter)))
|
||||
@ -124,21 +136,20 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
if parameter.hasParam('datastructure'):
|
||||
# getting information on data structure
|
||||
datastructure = DATASTRUCTURE[parameter.get('datastructure')]()
|
||||
dsfields = {'root': parameter.get('rootpath'),
|
||||
'dpath': parameter.get('datapath'),
|
||||
'dbase': parameter.get('database')}
|
||||
dsfields = {'dpath': parameter.get('datapath'),}
|
||||
|
||||
exf = ['root', 'dpath', 'dbase']
|
||||
exf = ['dpath']
|
||||
|
||||
if parameter['eventID'] is not '*' and fnames == 'None':
|
||||
if parameter['eventID'] != '*' and fnames == 'None':
|
||||
dsfields['eventID'] = parameter['eventID']
|
||||
exf.append('eventID')
|
||||
|
||||
datastructure.modifyFields(**dsfields)
|
||||
datastructure.setExpandFields(exf)
|
||||
|
||||
# check if default location routine NLLoc is available
|
||||
if real_None(parameter['nllocbin']) and locflag:
|
||||
# check if default location routine NLLoc is available and all stations are used
|
||||
if get_none(parameter['nllocbin']) and station == 'all':
|
||||
locflag = 1
|
||||
# get NLLoc-root path
|
||||
nllocroot = parameter.get('nllocroot')
|
||||
# get path to NLLoc executable
|
||||
@ -154,7 +165,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
ttpat = parameter.get('ttpatter')
|
||||
# pattern of NLLoc-output file
|
||||
nllocoutpatter = parameter.get('outpatter')
|
||||
maxnumit = 3 # maximum number of iterations for re-picking
|
||||
maxnumit = 2 # maximum number of iterations for re-picking
|
||||
else:
|
||||
locflag = 0
|
||||
print(" !!! ")
|
||||
@ -162,35 +173,41 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
print("!!No source parameter estimation possible!!")
|
||||
print(" !!! ")
|
||||
|
||||
wfpath_extension = ''
|
||||
if obspyDMT_wfpath not in [None, False, 'False', '']:
|
||||
wfpath_extension = obspyDMT_wfpath
|
||||
print('Using obspyDMT structure. There will be no restitution, as pre-processed data are expected.')
|
||||
if wfpath_extension != 'processed':
|
||||
print('WARNING: Expecting wfpath_extension to be "processed" for'
|
||||
' pre-processed data but received "{}" instead!!!'.format(wfpath_extension))
|
||||
|
||||
if not input_dict:
|
||||
# started in production mode
|
||||
datapath = datastructure.expandDataPath()
|
||||
if fnames == 'None' and parameter['eventID'] is '*':
|
||||
if fnames in [None, 'None'] and parameter['eventID'] == '*':
|
||||
# multiple event processing
|
||||
# read each event in database
|
||||
events = [events for events in glob.glob(os.path.join(datapath, '*')) if os.path.isdir(events)]
|
||||
elif fnames == 'None' and parameter['eventID'] is not '*' and not type(parameter['eventID']) == list:
|
||||
events = [event for event in glob.glob(os.path.join(datapath, '*')) if
|
||||
(os.path.isdir(event) and not event.endswith('EVENTS-INFO'))]
|
||||
elif fnames in [None, 'None'] and parameter['eventID'] != '*' and not type(parameter['eventID']) == list:
|
||||
# single event processing
|
||||
events = glob.glob(os.path.join(datapath, parameter['eventID']))
|
||||
elif fnames == 'None' and type(parameter['eventID']) == list:
|
||||
elif fnames in [None, 'None'] and type(parameter['eventID']) == list:
|
||||
# multiple event processing
|
||||
events = []
|
||||
for eventID in parameter['eventID']:
|
||||
events.append(os.path.join(datapath, eventID))
|
||||
else:
|
||||
# autoPyLoT was initialized from GUI
|
||||
events = []
|
||||
events.append(eventid)
|
||||
events = [eventid]
|
||||
evID = os.path.split(eventid)[-1]
|
||||
locflag = 2
|
||||
else:
|
||||
# started in tune or interactive mode
|
||||
datapath = os.path.join(parameter['rootpath'],
|
||||
parameter['datapath'])
|
||||
datapath = parameter['datapath']
|
||||
events = []
|
||||
for eventID in eventid:
|
||||
events.append(os.path.join(datapath,
|
||||
parameter['database'],
|
||||
eventID))
|
||||
|
||||
if not events:
|
||||
@ -204,8 +221,12 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
|
||||
allpicks = {}
|
||||
glocflag = locflag
|
||||
for eventpath in events:
|
||||
|
||||
nEvents = len(events)
|
||||
for index, eventpath in enumerate(events):
|
||||
print('Working on: {} ({}/{})'.format(eventpath, index + 1, nEvents))
|
||||
evID = os.path.split(eventpath)[-1]
|
||||
event_datapath = os.path.join(eventpath, wfpath_extension)
|
||||
fext = '.xml'
|
||||
filename = os.path.join(eventpath, 'PyLoT_' + evID + fext)
|
||||
try:
|
||||
@ -213,18 +234,21 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
data.get_evt_data().path = eventpath
|
||||
print('Reading event data from filename {}...'.format(filename))
|
||||
except Exception as e:
|
||||
print('Could not read event from file {}: {}'.format(filename, e))
|
||||
if type(e) == FileNotFoundError:
|
||||
print('Creating new event file.')
|
||||
else:
|
||||
print('Could not read event from file {}: {}'.format(filename, e))
|
||||
data = Data()
|
||||
pylot_event = Event(eventpath) # event should be path to event directory
|
||||
data.setEvtData(pylot_event)
|
||||
if fnames == 'None':
|
||||
data.setWFData(glob.glob(os.path.join(datapath, eventpath, '*')))
|
||||
if fnames in [None, 'None']:
|
||||
data.setWFData(glob.glob(os.path.join(event_datapath, '*')))
|
||||
# the following is necessary because within
|
||||
# multiple event processing no event ID is provided
|
||||
# in autopylot.in
|
||||
try:
|
||||
parameter.get('eventID')
|
||||
except:
|
||||
except Exception:
|
||||
now = datetime.datetime.now()
|
||||
eventID = '%d%02d%02d%02d%02d' % (now.year,
|
||||
now.month,
|
||||
@ -249,21 +273,29 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
if not wfdat:
|
||||
print('Could not find station {}. STOP!'.format(station))
|
||||
return
|
||||
wfdat = remove_underscores(wfdat)
|
||||
# wfdat = remove_underscores(wfdat)
|
||||
# trim components for each station to avoid problems with different trace starttimes for one station
|
||||
wfdat = check4gaps(wfdat)
|
||||
wfdat = check4gapsAndRemove(wfdat)
|
||||
wfdat = check4doubled(wfdat)
|
||||
wfdat = trim_station_components(wfdat, trim_start=True, trim_end=False)
|
||||
metadata = read_metadata(parameter.get('invdir'))
|
||||
# rotate stations to ZNE
|
||||
wfdat = check4rotated(wfdat, metadata)
|
||||
if not wfpath_extension:
|
||||
metadata = Metadata(parameter.get('invdir'))
|
||||
else:
|
||||
metadata = Metadata(os.path.join(eventpath, 'resp'))
|
||||
corr_dat = None
|
||||
if locflag:
|
||||
print("Restitute data ...")
|
||||
corr_dat = restitute_data(wfdat.copy(), *metadata, ncores=ncores)
|
||||
if metadata:
|
||||
# rotate stations to ZNE
|
||||
try:
|
||||
wfdat = check4rotated(wfdat, metadata)
|
||||
except Exception as e:
|
||||
print('Could not rotate station {} to ZNE:\n{}'.format(wfdat[0].stats.station,
|
||||
traceback.format_exc()))
|
||||
if locflag:
|
||||
print("Restitute data ...")
|
||||
corr_dat = restitute_data(wfdat.copy(), metadata, ncores=ncores)
|
||||
if not corr_dat and locflag:
|
||||
locflag = 2
|
||||
print('Working on event %s. Stations: %s' % (eventpath, station))
|
||||
print('Stations: %s' % (station))
|
||||
print(wfdat)
|
||||
##########################################################
|
||||
# !automated picking starts here!
|
||||
@ -286,7 +318,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
ttpat)
|
||||
|
||||
# locate the event
|
||||
nll.locate(ctrfile, inputfile)
|
||||
nll.locate(ctrfile, parameter)
|
||||
|
||||
# !iterative picking if traces remained unpicked or occupied with bad picks!
|
||||
# get theoretical onset times for picks with weights >= 4
|
||||
@ -318,13 +350,15 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
picks[stats]['P'].update(props)
|
||||
evt = moment_mag.updated_event()
|
||||
net_mw = moment_mag.net_magnitude()
|
||||
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
||||
if net_mw is not None:
|
||||
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
||||
# calculate local (Richter) magntiude
|
||||
WAscaling = parameter.get('WAscaling')
|
||||
magscaling = parameter.get('magscaling')
|
||||
local_mag = LocalMagnitude(corr_dat, evt,
|
||||
parameter.get('sstop'),
|
||||
WAscaling, True, iplot)
|
||||
# update pick with local magnitude property values
|
||||
for stats, amplitude in local_mag.amplitudes.items():
|
||||
picks[stats]['S']['Ao'] = amplitude.generic_amplitude
|
||||
print("Local station magnitudes scaled with:")
|
||||
@ -333,9 +367,17 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
WAscaling[2]))
|
||||
evt = local_mag.updated_event(magscaling)
|
||||
net_ml = local_mag.net_magnitude(magscaling)
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
print("Network local magnitude scaled with:")
|
||||
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
||||
if net_ml:
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
if magscaling is None:
|
||||
scaling = False
|
||||
elif magscaling[0] != 0 and magscaling[1] != 0:
|
||||
scaling = False
|
||||
else:
|
||||
scaling = True
|
||||
if scaling:
|
||||
print("Network local magnitude scaled with:")
|
||||
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
||||
else:
|
||||
print("autoPyLoT: No NLLoc-location file available!")
|
||||
print("No source parameter estimation possible!")
|
||||
@ -365,7 +407,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
# remove actual NLLoc-location file to keep only the last
|
||||
os.remove(nllocfile)
|
||||
# locate the event
|
||||
nll.locate(ctrfile, inputfile)
|
||||
nll.locate(ctrfile, parameter)
|
||||
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
|
||||
# get updated NLLoc-location file
|
||||
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
||||
@ -388,19 +430,21 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
iplot)
|
||||
# update pick with moment property values (w0, fc, Mo)
|
||||
for stats, props in moment_mag.moment_props.items():
|
||||
if picks.has_key(stats):
|
||||
if stats in picks:
|
||||
picks[stats]['P'].update(props)
|
||||
evt = moment_mag.updated_event()
|
||||
net_mw = moment_mag.net_magnitude()
|
||||
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
||||
if net_mw is not None:
|
||||
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
||||
# calculate local (Richter) magntiude
|
||||
WAscaling = parameter.get('WAscaling')
|
||||
magscaling = parameter.get('magscaling')
|
||||
local_mag = LocalMagnitude(corr_dat, evt,
|
||||
parameter.get('sstop'),
|
||||
WAscaling, True, iplot)
|
||||
# update pick with local magnitude property values
|
||||
for stats, amplitude in local_mag.amplitudes.items():
|
||||
if picks.has_key(stats):
|
||||
if stats in picks:
|
||||
picks[stats]['S']['Ao'] = amplitude.generic_amplitude
|
||||
print("Local station magnitudes scaled with:")
|
||||
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
|
||||
@ -408,9 +452,17 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
WAscaling[2]))
|
||||
evt = local_mag.updated_event(magscaling)
|
||||
net_ml = local_mag.net_magnitude(magscaling)
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
print("Network local magnitude scaled with:")
|
||||
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
||||
if net_ml:
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
if magscaling is None:
|
||||
scaling = False
|
||||
elif magscaling[0] != 0 and magscaling[1] != 0:
|
||||
scaling = False
|
||||
else:
|
||||
scaling = True
|
||||
if scaling:
|
||||
print("Network local magnitude scaled with:")
|
||||
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
||||
else:
|
||||
print("autoPyLoT: No NLLoc-location file available! Stop iteration!")
|
||||
locflag = 9
|
||||
@ -424,11 +476,11 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
data.applyEVTData(evt, 'event')
|
||||
data.applyEVTData(picks)
|
||||
if savexml:
|
||||
if savepath == 'None' or savepath == None:
|
||||
if savepath == 'None' or savepath is None:
|
||||
saveEvtPath = eventpath
|
||||
else:
|
||||
saveEvtPath = savepath
|
||||
fnqml = '%s/PyLoT_%s' % (saveEvtPath, evID)
|
||||
fnqml = '%s/PyLoT_%s_autopylot' % (saveEvtPath, evID)
|
||||
data.exportEvent(fnqml, fnext='.xml', fcheck=['auto', 'magnitude', 'origin'])
|
||||
if locflag == 1:
|
||||
# HYPO71
|
||||
@ -482,9 +534,9 @@ if __name__ == "__main__":
|
||||
action='store',
|
||||
help='''full path to the file containing the input
|
||||
parameters for autoPyLoT''')
|
||||
parser.add_argument('-p', '-P', '--iplot', type=int,
|
||||
action='store',
|
||||
help='''optional, logical variable for plotting: 0=none, 1=partial, 2=all''')
|
||||
parser.add_argument('-p', '-P', '--iplot', type=int,
|
||||
action='store', default=0,
|
||||
help='''optional, logical variable for plotting: 0=none, 1=partial, 2=all''')
|
||||
parser.add_argument('-f', '-F', '--fnames', type=str,
|
||||
action='store',
|
||||
help='''optional, list of data file names''')
|
||||
@ -497,9 +549,12 @@ if __name__ == "__main__":
|
||||
parser.add_argument('-c', '-C', '--ncores', type=int,
|
||||
action='store', default=0,
|
||||
help='''optional, number of CPU cores used for parallel processing (default: all available(=0))''')
|
||||
parser.add_argument('-dmt', '-DMT', '--obspy_dmt_wfpath', type=str,
|
||||
action='store', default=False,
|
||||
help='''optional, wftype (raw, processed) used for obspyDMT database structure''')
|
||||
|
||||
cla = parser.parse_args()
|
||||
|
||||
picks = autoPyLoT(inputfile=str(cla.inputfile), fnames=str(cla.fnames),
|
||||
eventid=str(cla.eventid), savepath=str(cla.spath),
|
||||
ncores=cla.ncores, iplot=int(cla.iplot))
|
||||
ncores=cla.ncores, iplot=int(cla.iplot), obspyDMT_wfpath=str(cla.obspy_dmt_wfpath))
|
||||
|
12
autopylot.sh
Normal file
@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
#$ -l low
|
||||
#$ -cwd
|
||||
#$ -pe smp 40
|
||||
##$ -l mem=3G
|
||||
#$ -l h_vmem=6G
|
||||
#$ -l os=*stretch
|
||||
|
||||
conda activate pylot_311
|
||||
|
||||
python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray.in -c 20 -dmt processed
|
472
docs/gui.md
Normal file
@ -0,0 +1,472 @@
|
||||
# PyLoT Documentation
|
||||
|
||||
- [PyLoT Documentation](#pylot-documentation)
|
||||
- [PyLoT GUI](#pylot-gui)
|
||||
- [First start](#first-start)
|
||||
- [Main Screen](#main-screen)
|
||||
- [Waveform Plot](#waveform-plot)
|
||||
- [Mouse view controls](#mouse-view-controls)
|
||||
- [Buttons](#buttons)
|
||||
- [Array Map](#array-map)
|
||||
- [Eventlist](#eventlist)
|
||||
- [Usage](#usage)
|
||||
- [Projects and Events](#projects-and-events)
|
||||
- [Event folder structure](#event-folder-structure)
|
||||
- [Loading event information from CSV file](#loading-event-information-from-csv-file)
|
||||
- [Adding events to project](#adding-events-to-project)
|
||||
- [Saving projects](#saving-projects)
|
||||
- [Adding metadata](#adding-metadata)
|
||||
- [Picking](#picking)
|
||||
- [Manual Picking](#manual-picking)
|
||||
- [Picking window](#picking-window)
|
||||
- [Picking Window Settings](#picking-window-settings)
|
||||
- [Filtering](#filtering)
|
||||
- [Export and Import of manual picks](#export-and-import-of-manual-picks)
|
||||
- [Export](#export)
|
||||
- [Import](#import)
|
||||
- [Automatic Picking](#automatic-picking)
|
||||
- [Tuning](#tuning)
|
||||
- [Production run of the autopicker](#production-run-of-the-autopicker)
|
||||
- [Evaluation of automatic picks](#evaluation-of-automatic-picks)
|
||||
- [1. Jackknife check](#1-jackknife-check)
|
||||
- [2. Wadati check](#2-wadati-check)
|
||||
- [Comparison between automatic and manual picks](#comparison-between-automatic-and-manual-picks)
|
||||
- [Export and Import of automatic picks](#export-and-import-of-automatic-picks)
|
||||
- [Location determination](#location-determination)
|
||||
- [FAQ](#faq)
|
||||
|
||||
# PyLoT GUI
|
||||
|
||||
This section describes how to use PyLoT graphically to view waveforms and create manual or automatic picks.
|
||||
|
||||
## First start
|
||||
|
||||
After opening PyLoT for the first time, the setup routine asks for the following information:
|
||||
|
||||
Questions:
|
||||
|
||||
1. Full Name
|
||||
2. Authority: Enter authority/institution name
|
||||
3. Format: Enter output format (*.xml, *.cnv, *.obs)
|
||||
|
||||
[//]: <> (TODO: explain what these things mean, where they are used)
|
||||
|
||||
## Main Screen
|
||||
|
||||
After entering the [information](#first-start), PyLoTs main window is shown. It defaults to a view of
|
||||
the [Waveform Plot](#waveform-plot), which starts empty.
|
||||
|
||||
<img src=images/gui/pylot-main-screen.png alt="Tune autopicks button" title="Tune autopicks button">
|
||||
|
||||
Add trace data by [loading a project](#projects-and-events) or by [adding event data](#adding-events-to-project).
|
||||
|
||||
### Waveform Plot
|
||||
|
||||
The waveform plot shows a trace list of all stations of an event.
|
||||
Click on any trace to open the stations [picking window](#picking-window), where you can review automatic and manual
|
||||
picks.
|
||||
|
||||
<img src=images/gui/pylot-waveform-plot.png alt="A Waveform Plot showing traces of one event">
|
||||
|
||||
Above the traces the currently displayed event can be selected. In the bottom bar information about the trace under the
|
||||
mouse cursor is shown. This information includes the station name (station), the absolute UTC time (T) of the point
|
||||
under the mouse cursor and the relative time since the first trace start in seconds (t) as well as a trace count.
|
||||
|
||||
#### Mouse view controls
|
||||
|
||||
Hold left mouse button and drag to pan view.
|
||||
|
||||
Hold right mouse button and Direction | Result --- | --- Move the mouse up | Increase amplitude scale Move the mouse
|
||||
down | Decrease amplitude scale Move the mouse right | Increase time scale Move the mouse left | Decrease time scale
|
||||
|
||||
Press right mouse button and click "View All" from the context menu to reset the view.
|
||||
|
||||
#### Buttons
|
||||
|
||||
[//]: <> (Hack: We need these invisible spaces to add space to the first column, otherwise )
|
||||
|
||||
| Icon | Description |
|
||||
|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| <img src="../icons/newfile.png" alt="Create new project" width="64" height="64"> | Create a new project, for more information about projects see [Projects and Events](#projects-and-events). |
|
||||
| <img src="../icons/openproject.png" alt="Open project" width="64" height="64"> | Load a project file from disk. |
|
||||
| <img src="../icons/saveproject.png" alt="Save Project" width="64" height="64"> | Save all current events into an associated project file on disk. If there is no project file currently associated, you will be asked to create a new one. |
|
||||
| <img src="../icons/saveprojectas.png" alt="Save Project as" width="64" height="64"> | Save all current events into a new project file on disk. See [Saving projects](#saving-projects). |
|
||||
| <img src="../icons/add.png" alt="Add event data" width="64" height="64"> | Add event data by selecting directories containing waveforms. For more information see [Event folder structure](#event-folder-structure). |
|
||||
| <img src="../icons/openpick.png" alt="Load event information" width="64" height="64"> | Load picks/origins from disk into the currently displayed event. If a pick already exists for a station, the one from file will overwrite the existing one. |
|
||||
| <img src="../icons/openpicks.png" alt="Load information for all events" width="64" height="64"> | Load picks/origins for all events of the current project. PyLoT searches for files within the directory of the event and tries to load them for that event. For this function to work, the files containing picks/origins have to be named as described in [Event folder structure](#event-folder-structure). If a pick already exists for a station, the one from file will overwrite the existing one. |
|
||||
| <img src="../icons/savepicks.png" alt="Save picks" width="64" height="64"> | Save event information such as picks and origin to file. You will be asked to select a directory in which this information should be saved. |
|
||||
| <img src="../icons/openloc.png" alt="Load location information" width="64" height="64"> | Load location information from disk, |
|
||||
| <img src="../icons/Matlab_PILOT_icon.png" alt="Load legacy information" width="64" height="64"> | Load event information from a previous, MatLab based PILOT version. |
|
||||
| <img src="../icons/key_Z.png" alt="Display Z" width="64" height="64"> | Display Z component of streams in waveform plot. |
|
||||
| <img src="../icons/key_N.png" alt="Display N" width="64" height="64"> | Display N component of streams in waveform plot. |
|
||||
| <img src="../icons/key_E.png" alt="Display E" width="64" height="64"> | Display E component of streams in waveform plot. |
|
||||
| <img src="../icons/tune.png" alt="Tune Autopicker" width="64" height="64"> | Open the [Tune Autopicker window](#tuning). |
|
||||
| <img src="../icons/autopylot_button.png" alt="" width="64" height="64"> | Opens a window that allows starting the autopicker for all events ([Production run of the AutoPicker](#production-run-of-the-autopicker)). |
|
||||
| <img src="../icons/compare_button.png" alt="Comparison" width="64" height="64"> | Compare automatic and manual picks, only available if automatic and manual picks for an event exist. See [Comparison between automatic and manual picks](#comparison-between-automatic-and-manual-picks). |
|
||||
| <img src="../icons/locate_button.png" alt="Locate event" width="64" height="64"> | Run a location routine (NonLinLoc) as configured in the settings on the picks. See [Location determination](#location-determination). |
|
||||
|
||||
### Array Map
|
||||
|
||||
The array map will display a color diagram to allow a visual check of the consistency of picks across multiple stations.
|
||||
This works by calculating the time difference of every onset to the earliest onset. Then isolines are drawn between
|
||||
stations with the same time difference and the areas between isolines are colored.
|
||||
The result should resemble a color gradient as the wavefront rolls over the network area. Stations where picks are
|
||||
earlier/later than their neighbours can be reviewed by clicking on them, which opens
|
||||
the [picking window](#picking-window).
|
||||
|
||||
Above the Array Map the picks that are used to create the map can be customized. The phase of picks that should be used
|
||||
can be selected, which allows checking the consistency of the P- and S-phase separately. Additionally the pick type can
|
||||
be set to manual, automatic or hybrid, meaning display only manual picks, automatic picks or only display automatic
|
||||
picks for stations where there are no manual ones.
|
||||
|
||||
![Array Map](images/gui/arraymap-example.png "Array Map")
|
||||
*Array Map for an event at the Northern Mid Atlantic Ridge, between North Africa and Mexico (Lat. 22.58, Lon. -45.11).
|
||||
The wavefront moved from west to east over the network area (Alps and Balcan region), with the earliest onsets in blue
|
||||
in the west.*
|
||||
|
||||
To be able to display an array map PyLoT needs to load an inventory file, where the metadata of seismic stations is
|
||||
kept. For more information see [Metadata](#adding-metadata). Additionally automatic or manual picks need to exist for
|
||||
the current event.
|
||||
|
||||
### Eventlist
|
||||
|
||||
The eventlist displays event parameters. The displayed parameters are saved in the .xml file in the event folder. Events
|
||||
can be deleted from the project by pressing the red X in the leftmost column of the corresponding event.
|
||||
|
||||
<img src="images/gui/eventlist.png" alt="Eventlist">
|
||||
|
||||
| Column | Description |
|
||||
|------------|--------------------------------------------------------------------------------------------------------------------|
|
||||
| Event | Full path to the events folder. |
|
||||
| Time | Time of event. |
|
||||
| Lat | Latitude in degrees of event location. |
|
||||
| Lon | Longitude in degrees of event location. |
|
||||
| Depth | Depth in km of event. |
|
||||
| Mag | Magnitude of event. |
|
||||
| [N] MP | Number of manual picks. |
|
||||
| [N] AP | Number of automatic picks. |
|
||||
| Tuning Set | Select whether this event is a Tuning event. See [Automatic Picking](#automatic-picking). |
|
||||
| Test Set | Select whether this event is a Test event. See [Automatic Picking](#automatic-picking). |
|
||||
| Notes | Free form text field for notes regarding this event. Text will be saved in the notes.txt file in the event folder. |
|
||||
|
||||
## Usage
|
||||
|
||||
### Projects and Events
|
||||
|
||||
PyLoT uses projects to categorize different seismic data. A project consists of one or multiple events. Events contain
|
||||
seismic traces from one or multiple stations. An event also contains further information, e.g. origin time, source
|
||||
parameters and automatic as well as manual picks. Projects are used to group events which should be analysed together. A
|
||||
project could contain all events from a specific region within a timeframe of interest or all recorded events of a
|
||||
seismological experiment.
|
||||
|
||||
### Event folder structure
|
||||
|
||||
PyLoT expects the following folder structure for seismic data:
|
||||
|
||||
* Every event should be in it's own folder with the following naming scheme for the folders:
|
||||
``e[id].[doy].[yy]``, where ``[id]`` is a four-digit numerical id increasing from 0001, ``[doy]`` the three digit day
|
||||
of year and ``[yy]`` the last two digits of the year of the event. This structure has to be created by the user of
|
||||
PyLoT manually.
|
||||
* These folders should contain the seismic data for their event as ``.mseed`` or other supported filetype
|
||||
* All automatic and manual picks should be in an ``.xml`` file in their event folder. PyLoT saves picks in this file.
|
||||
This file does not have to be added manually unless there are picks to be imported. The format used to save picks is
|
||||
QUAKEML.
|
||||
Picks are saved in a file with the same filename as the event folder with ``PyLoT_`` prepended.
|
||||
* The file ``notes.txt`` is used for saving analysts comments. Everything saved here will be displayed in the 'Notes'
|
||||
column of the eventlist.
|
||||
|
||||
### Loading event information from CSV file
|
||||
|
||||
Event information can be saved in a ``.csv`` file located in the rootpath. The file is made from one header line, which
|
||||
is followed by one or multiple data lines. Values are separated by comma, while a dot is used as a decimal separator.
|
||||
This information is then shown in the table in the [Eventlist tab](#Eventlist).
|
||||
|
||||
One example header and data line is shown below.
|
||||
```event,Date,Time,Magnitude,Lat,Long,Depth,Region,Basis Lat,Basis Long,Distance [km],Distance [rad],Distance [deg]```
|
||||
```e0001.024.16,24/01/16,10:30:30,7.1,59.66,-153.45,128,Southern Alaska,46.62,10.26,8104.65,1.27,72.89,7.1```
|
||||
|
||||
The meaning of the header entries is:
|
||||
|
||||
| Header | description |
|
||||
|----------------------|------------------------------------------------------------------------------------------------|
|
||||
| event | Event id, has to be the same as the folder name in which waveform data for this event is kept. |
|
||||
| Data | Origin date of the event, format DD/MM/YY or DD/MM/YYYY. |
|
||||
| Time | Origin time of the event. Format HH:MM:SS. |
|
||||
| Lat, Long | Origin latitude and longitude in decimal degrees. |
|
||||
| Region | Flinn-Engdahl region name. |
|
||||
| Basis Lat, Basis Lon | Latitude and longitude of the basis of the station network in decimal degrees. |
|
||||
| Distance [km] | Distance from origin coordinates to basis coordinates in km. |
|
||||
| Distance [rad] | Distance from origin coordinates to basis coordinates in rad. |
|
||||
|
||||
### Adding events to project
|
||||
|
||||
PyLoT GUI starts with an empty project. To add events, use the add event data button. Select one or multiple folders
|
||||
containing events.
|
||||
|
||||
### Saving projects
|
||||
|
||||
Save the current project from the menu with File->Save project or File->Save project as. PyLoT uses ``.plp`` files to
|
||||
save project information. This file format is not interchangeable between different versions of Python interpreters.
|
||||
Saved projects contain the automatic and manual picks. Seismic trace data is not included into the ``.plp`` file, but
|
||||
read from its location used when saving the file.
|
||||
|
||||
### Adding metadata
|
||||
|
||||
[//]: <> (TODO: Add picture of metadata "manager" when it is done)
|
||||
|
||||
PyLoT can handle ``.dless``, ``.xml``, ``.resp`` and ``.dseed`` file formats for Metadata. Metadata files stored on disk
|
||||
can be added to a project by clicking *Edit*->*Manage Inventories*. This opens up a window where the folders which
|
||||
contain metadata files can be selected. PyLoT will then search these files for the station names when it needs the
|
||||
information.
|
||||
|
||||
# Picking
|
||||
|
||||
PyLoTs automatic and manual pick determination works as following:
|
||||
|
||||
* Using certain parameters, a first initial/coarse pick is determined. The first manual pick is determined by visual
|
||||
review of the whole waveform and selection of the most likely onset by the analyst. The first automatic pick is
|
||||
determined by calculation of a characteristic function (CF) for the seismic trace. When a wave arrives, the CFs
|
||||
properties change, which is determined as the signals onset.
|
||||
* Afterwards, a refined set of parameters is applied to a small part of the waveform around the initial onset. For
|
||||
manual picks this means a closer view of the trace, for automatic picks this is done by a recalculated CF with
|
||||
different parameters.
|
||||
* This second picking phase results in the precise pick, which is treated as the onset time.
|
||||
|
||||
## Manual Picking
|
||||
|
||||
To create manual picks, you will need to open or create a project that contains seismic trace data (
|
||||
see [Adding events to projects](#adding-events-to-project)). Click on a trace to open
|
||||
the [Picking window](#picking-window).
|
||||
|
||||
### Picking window
|
||||
|
||||
Open the picking window of a station by leftclicking on any trace in the waveform plot. Here you can create manual picks
|
||||
for the selected station.
|
||||
|
||||
<img src="images/gui/picking/pickwindow.png" alt="Picking window">
|
||||
|
||||
*Picking window of a station.*
|
||||
|
||||
#### Picking Window Settings
|
||||
|
||||
| Icon | Shortcut | Menu Alternative | Description |
|
||||
|----------------------------------------------------------------------------------|----------------|-----------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| <img src="../icons/filter_p.png" alt="Filter P" width="64" height="64"> | p | Filter->Apply P Filter | Filter all channels according to the options specified in Filter parameter, P Filter section. |
|
||||
| <img src="../icons/filter_s.png" alt="Filter S" width="64" height="64"> | s | Filter->Apply S Filter | Filter all channels according to the options specified in Filter parameter, S Filter section. |
|
||||
| <img src="../icons/key_A.png" alt="Filter Automatically" width="64" height="64"> | Ctrl + a | Filter->Automatic Filtering | If enabled, automatically select the correct filter option (P, S) depending on the selected phase to be picked. |
|
||||
| ![desc](images/gui/picking/phase_selection.png "Phase selection") | 1 (P) or 5 (S) | Picks->P or S | Select phase to pick. If Automatic Filtering is enabled, this will apply the appropriate filter depending on the phase. |
|
||||
| ![Zoom into](../icons/zoom_in.png "Zoom into waveform") | - | - | Zoom into waveform. |
|
||||
| ![Reset zoom](../icons/zoom_0.png "Reset zoom") | - | - | Reset zoom to default view. |
|
||||
| ![Delete picks](../icons/delete.png "Delete picks") | - | - | Delete all manual picks on this station. |
|
||||
| ![Rename a phase](../icons/sync.png "Rename a phase") | - | - | Click this button and then the picked phase to rename it. |
|
||||
| ![Continue](images/gui/picking/continue.png "Continue with next station") | - | - | If checked, after accepting the manual picks for this station with 'OK', the picking window for the next station will be opened. This option is useful for fast manual picking of a complete event. |
|
||||
| Estimated onsets | - | - | Show the theoretical onsets for this station. Needs metadata and origin information. |
|
||||
| Compare to channel | - | - | Select a data channel to compare against. The selected channel will be displayed in the picking window behind every channel allowing the analyst to visually compare signal correlation between different channels. |
|
||||
| Scaling | - | - | Individual means every channel is scaled to its own maximum. If a channel is selected here, all channels will be scaled relatively to this channel. |
|
||||
|
||||
| Menu Command | Shortcut | Description |
|
||||
|---------------------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| P Channels and S Channels | - | Select which channels should be treated as P or S channels during picking. When picking a phase, only the corresponding channels will be shown during the precise pick. Normally, the Z channel should be selected for the P phase and the N and E channel for the S phase. |
|
||||
|
||||
### Filtering
|
||||
|
||||
Access the Filter options by pressing Ctrl+f on the Waveform plot or by the menu under *Edit*->*Filter Parameter*.
|
||||
|
||||
<img src=images/gui/pylot-filter-options.png>
|
||||
|
||||
Here you are able to select filter type, order and frequencies for the P and S pick separately. These settings are used
|
||||
in the GUI for displaying the filtered waveform data and during manual picking. The values used by PyLoT for automatic
|
||||
picking are displayed next to the manual values. They can be changed in the [Tune Autopicker dialog](#tuning).
|
||||
A green value automatic value means the automatic and manual filter parameter is configured the same, red means they are
|
||||
configured differently. By toggling the "Overwrite filteroptions" checkmark you can set whether the manual
|
||||
precise/second pick uses the filter settings for the automatic picker (unchecked) or whether it uses the filter options
|
||||
in this dialog (checked). To guarantee consistent picking results between automatic and manual picking it is recommended
|
||||
to use the same filter settings for the determination of automatic and manual picks.
|
||||
|
||||
### Export and Import of manual picks
|
||||
|
||||
#### Export
|
||||
|
||||
After the creation of manual picks they can either be saved in the project file (
|
||||
see [Saving projects](#saving-projects)). Alternatively the picks can be exported by pressing
|
||||
the <img src="../icons/savepicks.png" alt="Save event information button" title="Save picks button" height=24 width=24>
|
||||
button above the waveform plot or in the menu File->Save event information (shortcut Ctrl+p). Select the event directory
|
||||
in which to save the file. The filename will be ``PyLoT_[event_folder_name].[filetype selected during first startup]``
|
||||
.
|
||||
You can rename and copy this file, but PyLoT will then no longer be able to automatically recognize the correct picks
|
||||
for an event and the file will have to be manually selected when loading.
|
||||
|
||||
#### Import
|
||||
|
||||
To import previously saved picks press
|
||||
the <img src="../icons/openpick.png" alt="Load event information button" width="24" height="24"> button and select the
|
||||
file to load. You will be asked to save the current state of your project if you have not done so before. You can
|
||||
continue without saving by pressing "Discard". This does not delete any information from your project, it just means
|
||||
that no project file is saved before the changes of importing picks are applied. PyLoT will automatically load files
|
||||
named after the scheme it uses when saving picks, described in the paragraph above. If it can't find any matching files,
|
||||
a file dialogue will open and you can select the file you wish to load.
|
||||
|
||||
If you see a warning "Mismatch in event identifiers" and are asked whether to continue loading the picks, this means
|
||||
that PyLoT doesn't recognize the picks in the file as belonging to this specific event. They could have either been
|
||||
saved under a different installation of PyLoT but with the same waveform data, which means they are still compatible and
|
||||
you can continue loading them. Or they could be picks from a different event, in which case loading them is not
|
||||
recommended.
|
||||
|
||||
## Automatic Picking
|
||||
|
||||
The general workflow for automatic picking is as following:
|
||||
|
||||
- After setting up the project by loading waveforms and optionally metadata, the right parameters for the autopicker
|
||||
have to be determined
|
||||
- This [tuning](#tuning) is done for single stations with immediate graphical feedback of all picking results
|
||||
- Afterwards the autopicker can be run for all or a subset of events from the project
|
||||
|
||||
For automatic picking PyLoT discerns between tune and test events, which the user has to set as such. Tune events are
|
||||
used to calibrate the autopicking algorithm, test events are then used to test the calibration. The purpose of that is
|
||||
controlling whether the parameters found during tuning are able to reliably pick the "unknown" test events.
|
||||
If this behaviour is not desired and all events should be handled the same, dont mark any events. Since this is just a
|
||||
way to group events to compare the picking results, nothing else will change.
|
||||
|
||||
### Tuning
|
||||
|
||||
Tuning describes the process of adjusting the autopicker settings to the characteristics of your data set. To do this in
|
||||
PyLoT, use the <img src=../icons/tune.png height=24 alt="Tune autopicks button" title="Tune autopicks button"> button to
|
||||
open the Tune Autopicker.
|
||||
|
||||
<img src=images/gui/tuning/tune_autopicker.png>
|
||||
|
||||
View of a station in the Tune Autopicker window.
|
||||
|
||||
1. Select the event to be displayed and processed.
|
||||
2. Select the station from the event.
|
||||
3. To pick the currently displayed trace, click
|
||||
the <img src=images/gui/tuning/autopick_trace_button.png alt="Pick trace button" title="Autopick trace button" height=16>
|
||||
button.
|
||||
4. These tabs are used to select the current view. __Traces Plot__ contains a plot of the stations traces, where manual
|
||||
picks can be created/edited. __Overview__ contains graphical results of the automatic picking process. The __P and S
|
||||
tabs__ contain the automatic picking results of the P and S phase, while __log__ contains a useful text output of
|
||||
automatic picking.
|
||||
5. These buttons are used to load/save/reset settings for automatic picking. The parameters can be saved in PyLoT input
|
||||
files, which have the file ending *.in*. They are human readable text files, which can also be edited by hand. Saving
|
||||
the parameters allows you to load them again later, even on different machines.
|
||||
6. These menus control the behaviour of the creation of manual picks from the Tune Autopicker window. Picks allows to
|
||||
select the phase for which a manual pick should be created, Filter allows to filter waveforms and edit the filter
|
||||
parameters. P-Channels and S-Channels allow to select the channels that should be displayed when creating a manual P
|
||||
or S pick.
|
||||
7. This menu is the same as in the [Picking Window](#picking-window-settings), with the exception of the __Manual
|
||||
Onsets__ options. The __Manual Onsets__ buttons accepts or reject the manual picks created in the Tune Autopicker
|
||||
window, pressing accept adds them to the manual picks for the event, while reject removes them.
|
||||
8. The traces plot in the centre allows creating manual picks and viewing the waveforms.
|
||||
9. The parameters which influence the autopicking result are in the Main settings and Advanced settings tabs on the left
|
||||
side. For a description of all the parameters see the [tuning documentation](tuning.md).
|
||||
|
||||
### Production run of the autopicker
|
||||
|
||||
After the settings used during tuning give the desired results, the autopicker can be used on the complete dataset. To
|
||||
invoke the autopicker on the whole set of events, click
|
||||
the <img src=../icons/autopylot_button.png alt="Autopick" title="Autopick" height=32> button.
|
||||
|
||||
### Evaluation of automatic picks
|
||||
|
||||
PyLoT has two internal consistency checks for automatic picks that were determined for an event:
|
||||
|
||||
1. Jackknife check
|
||||
2. Wadati check
|
||||
|
||||
#### 1. Jackknife check
|
||||
|
||||
The jackknife test in PyLoT checks the consistency of automatically determined P-picks by checking the statistical
|
||||
variance of the picks. The variance of all P-picks is calculated and compared to the variance of subsets, in which one
|
||||
pick is removed.
|
||||
The idea is, that picks that are close together in time should not influence the estimation of the variance much, while
|
||||
picks whose positions deviates from the norm influence the variance to a greater extent. If the estimated variance of a
|
||||
subset with a pick removed differs to much from the estimated variance of all picks, the pick that was removed from the
|
||||
subset will be marked as invalid.
|
||||
The factor by which picks are allowed to skew from the estimation of variance can be configured, it is called *
|
||||
jackfactor*, see [here](tuning.md#Pick-quality-control).
|
||||
|
||||
Additionally, the deviation of picks from the median is checked. For that, the median of all P-picks that passed the
|
||||
Jackknife test is calculated. Picks whose onset times deviate from the mean onset time by more than the *mdttolerance*
|
||||
are marked as invalid.
|
||||
|
||||
<img src=images/gui/jackknife_plot.png title="Jackknife/Median test diagram">
|
||||
|
||||
*The result of both tests (Jackknife and Median) is shown in a diagram afterwards. The onset time is plotted against a
|
||||
running number of stations. Picks that failed either the Jackknife or the median test are colored red. The median is
|
||||
plotted as a green line.*
|
||||
|
||||
The Jackknife and median check are suitable to check for picks that are outside of the expected time window, for
|
||||
example, when a wrong phase was picked. It won't recognize picks that are in close proximity to the right onset which
|
||||
are just slightly to late/early.
|
||||
|
||||
#### 2. Wadati check
|
||||
|
||||
The Wadati check checks the consistency of S picks. For this the SP-time, the time difference between S and P onset is
|
||||
plotted against the P onset time. A line is fitted to the points, which minimizes the error. Then the deviation of
|
||||
single picks to this line is checked. If the deviation in seconds is above the *wdttolerance*
|
||||
parameter ([see here](tuning.md#Pick-quality-control)), the pick is marked as invalid.
|
||||
|
||||
<img src=images/gui/wadati_plot.png title="Output diagram of Wadati check">
|
||||
|
||||
*The Wadati plot in PyLoT shows the SP onset time difference over the P onset time. A first line is fitted (black). All
|
||||
picks which deviate to much from this line are marked invalid (red). Then a second line is fitted which excludes the
|
||||
invalid picks. From this lines slope, the ratio of P and S wave velocity is determined.*
|
||||
|
||||
### Comparison between automatic and manual picks
|
||||
|
||||
Every pick in PyLoT consists of an earliest possible, latest possible and most likely onset time. The earliest and
|
||||
latest possible onset time characterize the uncertainty of a pick. This approach is described in Diel, Kissling and
|
||||
Bormann (2012) - Tutorial for consistent phase picking at local to regional distances. These times are represented as a
|
||||
Probability Density Function (PDF) for every pick. The PDF is implemented as two exponential distributions around the
|
||||
most likely onset as the expected value.
|
||||
|
||||
To compare two single picks, their PDFs are cross correlated to create a new PDF. This corresponds to the subtraction of
|
||||
the automatic pick from the manual pick.
|
||||
|
||||
<img src=images/gui/comparison/comparison_pdf.png title="Comparison between automatic and manual pick">
|
||||
|
||||
*Comparison between an automatic and a manual pick for a station in PyLoT by comparing their PDFs.*
|
||||
*The upper plot shows the difference between the two single picks that are shown in the lower plot.*
|
||||
*The difference is implemented as a cross correlation between the two PDFs. and results in a new PDF, the comparison
|
||||
PDF.*
|
||||
*The expected value of the comparison PDF corresponds to the time distance between the automatic and manual picks most
|
||||
likely onset.*
|
||||
*The standard deviation corresponds to the combined uncertainty.*
|
||||
|
||||
To compare the automatic and manual picks between multiple stations of an event, the properties of all the comparison
|
||||
PDFs are shown in a histogram.
|
||||
|
||||
<img src=images/gui/comparison/compare_widget.png title="Comparison between picks of an event">
|
||||
|
||||
*Comparison between the automatic and manual picks for an event in PyLoT.*
|
||||
*The top left plot shows the standard deviation of the comparison PDFs for P picks.*
|
||||
*The bottom left plot shows the expected values of the comparison PDFs for P picks.*
|
||||
*The top right plot shows the standard deviation of the comparison PDFs for S picks.*
|
||||
*The bottom right plot shows the expected values of the comparison PDFs for S picks.*
|
||||
*The standard deviation plots show that most P picks have an uncertainty between 1 and 2 seconds, while S pick
|
||||
uncertainties have a much larger spread between 1 to 15 seconds.*
|
||||
*This means P picks have higher quality classes on average than S picks.*
|
||||
*The expected values are largely negative, meaning that the algorithm tends to pick earlier than the analyst with the
|
||||
applied settings (Manual - Automatic).*
|
||||
*The number of samples mentioned in the plots legends is the amount of stations that have an automatic and a manual P
|
||||
pick.*
|
||||
|
||||
### Export and Import of automatic picks
|
||||
|
||||
Picks can be saved in *.xml* format.
|
||||
|
||||
# Location determination
|
||||
|
||||
To be added.
|
||||
|
||||
# FAQ
|
||||
|
||||
Q: During manual picking the error "No channel to plot for phase ..." is displayed, and I am unable to create a pick.
|
||||
A: Select a channel that should be used for the corresponding phase in the Pickwindow. For further information
|
||||
read [Picking Window settings](#picking-window-settings).
|
||||
|
||||
Q: I see a warning "Mismatch in event identifiers" when loading picks from a file.
|
||||
A: This means that PyLoT doesn't recognize the picks in the file as belonging to this specific event. They could have
|
||||
been saved under a different installation of PyLoT but with the same waveform data, which means they are still
|
||||
compatible and you can continue loading them or they could be the picks of a different event, in which case loading them
|
||||
is not recommended.
|
BIN
docs/gui.pdf
Normal file
BIN
docs/images/gui/arraymap-example.png
Normal file
After Width: | Height: | Size: 1.6 MiB |
BIN
docs/images/gui/comparison/compare_widget.png
Normal file
After Width: | Height: | Size: 87 KiB |
BIN
docs/images/gui/comparison/comparison_pdf.png
Normal file
After Width: | Height: | Size: 89 KiB |
BIN
docs/images/gui/eventlist.png
Normal file
After Width: | Height: | Size: 191 KiB |
BIN
docs/images/gui/jackknife_plot.png
Normal file
After Width: | Height: | Size: 123 KiB |
BIN
docs/images/gui/picking/continue.png
Normal file
After Width: | Height: | Size: 1.7 KiB |
BIN
docs/images/gui/picking/filterphase.png
Normal file
After Width: | Height: | Size: 2.6 KiB |
BIN
docs/images/gui/picking/phase_selection.png
Normal file
After Width: | Height: | Size: 597 B |
BIN
docs/images/gui/picking/pickwindow.png
Normal file
After Width: | Height: | Size: 166 KiB |
BIN
docs/images/gui/pylot-filter-options.png
Normal file
After Width: | Height: | Size: 22 KiB |
BIN
docs/images/gui/pylot-main-screen.png
Normal file
After Width: | Height: | Size: 56 KiB |
BIN
docs/images/gui/pylot-waveform-plot.png
Normal file
After Width: | Height: | Size: 127 KiB |
BIN
docs/images/gui/tuning/autopick_trace_button.png
Normal file
After Width: | Height: | Size: 876 B |
BIN
docs/images/gui/tuning/tune_autopicker.png
Normal file
After Width: | Height: | Size: 254 KiB |
BIN
docs/images/gui/wadati_plot.png
Normal file
After Width: | Height: | Size: 46 KiB |
150
docs/tuning.md
Normal file
@ -0,0 +1,150 @@
|
||||
# AutoPyLoT Tuning
|
||||
|
||||
A description of the parameters used for determining automatic picks.
|
||||
|
||||
## Filter parameters and cut times
|
||||
|
||||
Parameters applied to the traces before picking algorithm starts.
|
||||
|
||||
| Name | Description |
|
||||
|---------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *P Start*, *P
|
||||
Stop* | Define time interval relative to trace start time for CF calculation on vertical trace. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||
| *S Start*, *S
|
||||
Stop* | Define time interval relative to trace start time for CF calculation on horizontal traces. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||
| *Bandpass
|
||||
Z1* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of initial P pick. |
|
||||
| *Bandpass
|
||||
Z2* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of precise P pick. |
|
||||
| *Bandpass
|
||||
H1* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of initial S pick. |
|
||||
| *Bandpass
|
||||
H2* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of precise S pick. |
|
||||
|
||||
## Inital P pick
|
||||
|
||||
Parameters used for determination of initial P pick.
|
||||
|
||||
| Name | Description |
|
||||
|--------------|------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
tLTA* | Size of gliding LTA window in seconds used for calculation of HOS-CF. |
|
||||
| *pickwin
|
||||
P* | Size of time window in seconds in which the minimum of the AIC-CF in front of the maximum of the HOS-CF is determined. |
|
||||
| *
|
||||
AICtsmooth* | Average of samples in this time window will be used for smoothing of the AIC-CF. |
|
||||
| *
|
||||
checkwinP* | Time in front of the global maximum of the HOS-CF in which to search for a second local extrema. |
|
||||
| *minfactorP* | Used with *
|
||||
checkwinP*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorP*. |
|
||||
| *
|
||||
tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||
| *
|
||||
tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||
| *tsafetey* | Time in seconds between *tsignal* and *
|
||||
tnoise*. |
|
||||
| *
|
||||
tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||
|
||||
## Inital S pick
|
||||
|
||||
Parameters used for determination of initial S pick
|
||||
|
||||
| Name | Description |
|
||||
|---------------|------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
tdet1h* | Length of time window in seconds in which AR params of the waveform are determined. |
|
||||
| *
|
||||
tpred1h* | Length of time window in seconds in which the waveform is predicted using the AR model. |
|
||||
| *
|
||||
AICtsmoothS* | Average of samples in this time window is used for smoothing the AIC-CF. |
|
||||
| *
|
||||
pickwinS* | Time window in which the minimum in the AIC-CF in front of the maximum in the ARH-CF is determined. |
|
||||
| *
|
||||
checkwinS* | Time in front of the global maximum of the ARH-CF in which to search for a second local extrema. |
|
||||
| *minfactorP* | Used with *
|
||||
checkwinS*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorS*. |
|
||||
| *
|
||||
tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||
| *
|
||||
tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||
| *tsafetey* | Time in seconds between *tsignal* and *
|
||||
tnoise*. |
|
||||
| *
|
||||
tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||
|
||||
## Precise P pick
|
||||
|
||||
Parameters used for determination of precise P pick.
|
||||
|
||||
| Name | Description |
|
||||
|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *Precalcwin* | Time window in seconds for recalculation of the HOS-CF. The new CF will be two times the size of *
|
||||
Precalcwin*, since it will be calculated from the initial pick to +/- *Precalcwin*. |
|
||||
| *
|
||||
tsmoothP* | Average of samples in this time window will be used for smoothing the second HOS-CF. |
|
||||
| *
|
||||
ausP* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed HOS-CF is found when the previous sample is larger or equal to the current sample times (1+*
|
||||
ausP*). |
|
||||
|
||||
## Precise S pick
|
||||
|
||||
Parameters used for determination of precise S pick.
|
||||
|
||||
| Name | Description |
|
||||
|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
tdet2h* | Time window for determination of AR coefficients. |
|
||||
| *
|
||||
tpred2h* | Time window in which the waveform is predicted using the determined AR parameters. |
|
||||
| *Srecalcwin* | Time window for recalculation of ARH-CF. New CF will be calculated from initial pick +/- *
|
||||
Srecalcwin*. |
|
||||
| *
|
||||
tsmoothS* | Average of samples in this time window will be used for smoothing the second ARH-CF. |
|
||||
| *
|
||||
ausS* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed ARH-CF is found when the previous sample is larger or equal to the current sample times (1+*
|
||||
ausS*). |
|
||||
| *
|
||||
pickwinS* | Time window around initial pick in which to look for a precise pick. |
|
||||
|
||||
## Pick quality control
|
||||
|
||||
Parameters used for checking quality and integrity of automatic picks.
|
||||
|
||||
| Name | Description |
|
||||
|--------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
minAICPslope* | Initial P picks with a slope lower than this value will be discared. |
|
||||
| *
|
||||
minAICPSNR* | Initial P picks with a SNR below this value will be discarded. |
|
||||
| *
|
||||
minAICSslope* | Initial S picks with a slope lower than this value will be discarded. |
|
||||
| *
|
||||
minAICSSNR* | Initial S picks with a SNR below this value will be discarded. |
|
||||
| *minsiglength*, *noisefacor*. *minpercent* | Parameters for checking signal length. In the time window of size *
|
||||
minsiglength* after the initial P pick *
|
||||
minpercent* of samples have to be larger than the RMS value. |
|
||||
| *
|
||||
zfac* | To recognize misattributed S picks, the RMS amplitude of vertical and horizontal traces are compared. The RMS amplitude of the vertical traces has to be at least *
|
||||
zfac* higher than the RMS amplitude on the horizontal traces for the pick to be accepted as a valid P pick. |
|
||||
| *
|
||||
jackfactor* | A P pick is removed if the jackknife pseudo value of the variance of his subgroup is larger than the variance of all picks multiplied with the *
|
||||
jackfactor*. |
|
||||
| *
|
||||
mdttolerance* | Maximum allowed deviation of P onset times from the median. Value in seconds. |
|
||||
| *
|
||||
wdttolerance* | Maximum allowed deviation of S onset times from the line during the Wadati test. Value in seconds. |
|
||||
|
||||
## Pick quality determination
|
||||
|
||||
Parameters for discrete quality classes.
|
||||
|
||||
| Name | Description |
|
||||
|------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
timeerrorsP* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for P onsets. |
|
||||
| *
|
||||
timeerrorsS* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for S onsets. |
|
||||
| *nfacP*, *nfacS* | For determination of latest possible onset time. The time when the signal reaches an amplitude of *
|
||||
nfac* * mean value of the RMS amplitude in the time window *tnoise* corresponds to the latest possible onset time. |
|
||||
|
BIN
docs/tuning.pdf
Normal file
@ -29,10 +29,13 @@
|
||||
<file>icons/map.png</file>
|
||||
<file>icons/openloc.png</file>
|
||||
<file>icons/compare_button.png</file>
|
||||
<file>icons/pick_qualities_button.png</file>
|
||||
<file>icons/eventlist_xml_button.png</file>
|
||||
<file>icons/locate_button.png</file>
|
||||
<file>icons/Matlab_PILOT_icon.png</file>
|
||||
<file>icons/printer.png</file>
|
||||
<file>icons/delete.png</file>
|
||||
<file>icons/key_A.png</file>
|
||||
<file>icons/key_E.png</file>
|
||||
<file>icons/key_N.png</file>
|
||||
<file>icons/key_P.png</file>
|
||||
@ -45,6 +48,8 @@
|
||||
<file>icons/key_W.png</file>
|
||||
<file>icons/key_Z.png</file>
|
||||
<file>icons/filter.png</file>
|
||||
<file>icons/filter_p.png</file>
|
||||
<file>icons/filter_s.png</file>
|
||||
<file>icons/sync.png</file>
|
||||
<file>icons/zoom_0.png</file>
|
||||
<file>icons/zoom_in.png</file>
|
||||
|
BIN
icons/eventlist_xml_button.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
icons/filter_p.png
Normal file
After Width: | Height: | Size: 5.4 KiB |
BIN
icons/filter_s.png
Normal file
After Width: | Height: | Size: 7.8 KiB |
BIN
icons/key_A.png
Normal file
After Width: | Height: | Size: 48 KiB |
BIN
icons/pick_qualities_button.png
Normal file
After Width: | Height: | Size: 3.2 KiB |
5447
icons_rc_2.py
214839
icons_rc_3.py
@ -4,10 +4,8 @@
|
||||
%Parameters are optimized for %extent data sets!
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#main settings#
|
||||
#rootpath# %project path
|
||||
#datapath# %data path
|
||||
#database# %name of data base
|
||||
#eventID# %event ID for single event processing (* for all events found in database)
|
||||
#datapath# %data path
|
||||
#eventID# %event ID for single event processing (* for all events found in datapath)
|
||||
#invdir# %full path to inventory or dataless-seed file
|
||||
PILOT #datastructure# %choose data structure
|
||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||
@ -43,6 +41,7 @@ global #extent# %extent of a
|
||||
1150.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||
iasp91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||
P,Pdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||
0.05 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||
0.05 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||
|
@ -4,19 +4,17 @@
|
||||
%Parameters are optimized for %extent data sets!
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#main settings#
|
||||
#rootpath# %project path
|
||||
#datapath# %data path
|
||||
#database# %name of data base
|
||||
#eventID# %event ID for single event processing (* for all events found in database)
|
||||
#invdir# %full path to inventory or dataless-seed file
|
||||
/DATA/Insheim/EVENT_DATA/LOCAL/2018.02_Insheim #datapath# %data path
|
||||
e0006.038.18 #eventID# %event ID for single event processing (* for all events found in datapath)
|
||||
/DATA/Insheim/STAT_INFO #invdir# %full path to inventory or dataless-seed file
|
||||
PILOT #datastructure# %choose data structure
|
||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#NLLoc settings#
|
||||
None #nllocbin# %path to NLLoc executable
|
||||
None #nllocroot# %root of NLLoc-processing directory
|
||||
None #phasefile# %name of autoPyLoT-output phase file for NLLoc
|
||||
None #ctrfile# %name of autoPyLoT-output control file for NLLoc
|
||||
/home/ludger/NLLOC #nllocbin# %path to NLLoc executable
|
||||
/home/ludger/NLLOC/Insheim #nllocroot# %root of NLLoc-processing directory
|
||||
AUTOPHASES.obs #phasefile# %name of autoPyLoT-output phase file for NLLoc
|
||||
Insheim_min1d032016_auto.in #ctrfile# %name of autoPyLoT-output control file for NLLoc
|
||||
ttime #ttpatter# %pattern of NLLoc ttimes from grid
|
||||
AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -27,31 +25,32 @@ AUTOLOC_nlloc #outpatter# %pattern of
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#settings local magnitude#
|
||||
1.11 0.0009 -2.0 #WAscaling# %Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] If zeros are set, original Richter magnitude is calculated!
|
||||
1.0382 -0.447 #magscaling# %Scaling relation for derived local magnitude [a*Ml+b]. If zeros are set, no scaling of network magnitude is applied!
|
||||
0.0 0.0 #magscaling# %Scaling relation for derived local magnitude [a*Ml+b]. If zeros are set, no scaling of network magnitude is applied!
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#filter settings#
|
||||
1.0 1.0 #minfreq# %Lower filter frequency [P, S]
|
||||
10.0 10.0 #maxfreq# %Upper filter frequency [P, S]
|
||||
2 2 #filter_order# %filter order [P, S]
|
||||
2.0 2.0 #minfreq# %Lower filter frequency [P, S]
|
||||
30.0 15.0 #maxfreq# %Upper filter frequency [P, S]
|
||||
3 3 #filter_order# %filter order [P, S]
|
||||
bandpass bandpass #filter_type# %filter type (bandpass, bandstop, lowpass, highpass) [P, S]
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#common settings picker#
|
||||
local #extent# %extent of array ("local", "regional" or "global")
|
||||
15.0 #pstart# %start time [s] for calculating CF for P-picking
|
||||
60.0 #pstop# %end time [s] for calculating CF for P-picking
|
||||
-1.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||
7.0 #pstart# %start time [s] for calculating CF for P-picking
|
||||
16.0 #pstop# %end time [s] for calculating CF for P-picking
|
||||
-0.5 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||
2.0 10.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||
2.0 12.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||
2.0 8.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||
2.0 10.0 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
|
||||
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||
P #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||
2.0 20.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||
2.0 30.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||
2.0 10.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||
2.0 15.0 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
|
||||
#special settings for calculating CF#
|
||||
%!!Edit the following only if you know what you are doing!!%
|
||||
#Z-component#
|
||||
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
|
||||
7.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
|
||||
4.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
|
||||
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
|
||||
2 #Parorder# %for AR-picker, order of AR process of Z-component
|
||||
1.2 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
|
||||
@ -59,12 +58,12 @@ HOS #algoP# %choose algo
|
||||
0.6 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
|
||||
0.2 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
|
||||
0.001 #addnoise# %add noise to seismogram for stable AR prediction
|
||||
3.0 0.1 0.5 1.0 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||
3.0 0.0 1.0 0.5 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||
3.0 #pickwinP# %for initial AIC pick, length of P-pick window [s]
|
||||
6.0 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
|
||||
0.2 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
|
||||
0.4 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
|
||||
0.1 #tsmoothP# %for HOS/AR, take average of samples for smoothing CF [s]
|
||||
0.001 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
|
||||
0.4 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
|
||||
1.3 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
|
||||
#H-components#
|
||||
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
|
||||
@ -75,7 +74,7 @@ ARH #algoS# %choose algo
|
||||
4 #Sarorder# %for AR-picker, order of AR process of H-components
|
||||
5.0 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
|
||||
4.0 #pickwinS# %for initial AIC pick, length of S-pick window [s]
|
||||
2.0 0.3 1.5 1.0 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||
2.0 0.2 1.5 1.0 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||
1.0 #aictsmoothS# %for AIC-picker, take average of samples for smoothing of AIC-function [s]
|
||||
0.7 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
|
||||
0.9 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
|
||||
@ -85,16 +84,16 @@ ARH #algoS# %choose algo
|
||||
2.0 #minFMSNR# %miniumum required SNR for first-motion determination
|
||||
0.2 #fmpickwin# %pick window around P onset for calculating zero crossings
|
||||
#quality assessment#
|
||||
0.02 0.04 0.08 0.16 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
|
||||
0.04 0.08 0.16 0.32 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
|
||||
0.04 0.08 0.16 0.32 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
|
||||
0.05 0.10 0.20 0.40 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
|
||||
0.8 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
|
||||
1.1 #minAICPSNR# %below this SNR the initial P pick is rejected
|
||||
1.0 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
|
||||
1.5 #minAICSSNR# %below this SNR the initial S pick is rejected
|
||||
1.0 #minsiglength# %length of signal part for which amplitudes must exceed noiselevel [s]
|
||||
1.0 #noisefactor# %noiselevel*noisefactor=threshold
|
||||
10.0 #minpercent# %required percentage of amplitudes exceeding threshold
|
||||
1.5 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
|
||||
6.0 #mdttolerance# %maximum allowed deviation of P picks from median [s]
|
||||
1.1 #noisefactor# %noiselevel*noisefactor=threshold
|
||||
50.0 #minpercent# %required percentage of amplitudes exceeding threshold
|
||||
1.1 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
|
||||
5.0 #mdttolerance# %maximum allowed deviation of P picks from median [s]
|
||||
1.0 #wdttolerance# %maximum allowed deviation from Wadati-diagram
|
||||
5.0 #jackfactor# %pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor
|
||||
2.0 #jackfactor# %pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor
|
||||
|
@ -4,10 +4,8 @@
|
||||
%Parameters are optimized for %extent data sets!
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
#main settings#
|
||||
#rootpath# %project path
|
||||
#datapath# %data path
|
||||
#database# %name of data base
|
||||
#eventID# %event ID for single event processing (* for all events found in database)
|
||||
#datapath# %data path
|
||||
#eventID# %event ID for single event processing (* for all events found in datapath)
|
||||
#invdir# %full path to inventory or dataless-seed file
|
||||
PILOT #datastructure# %choose data structure
|
||||
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||
@ -42,7 +40,8 @@ local #extent# %extent of a
|
||||
-1.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||
P #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||
2.0 10.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||
2.0 12.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||
2.0 8.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||
|
@ -166,7 +166,7 @@ def installPyLoT(verbosity=None):
|
||||
if verbosity > 1:
|
||||
print('copying input files into destination folder ...')
|
||||
ans = input('please specify scope of interest '
|
||||
'([0]=local, 1=regional, 2=global) :') or 0
|
||||
'([0]=local, 1=regional, 2=global, 3=active) :') or 0
|
||||
if not isinstance(ans, int):
|
||||
ans = int(ans)
|
||||
if ans == 0:
|
||||
@ -175,6 +175,8 @@ def installPyLoT(verbosity=None):
|
||||
ans = 'regional'
|
||||
elif ans == 2:
|
||||
ans = 'global'
|
||||
elif ans == 3:
|
||||
ans = 'active'
|
||||
link_dest = []
|
||||
for file, destination in files_to_copy.items():
|
||||
link_file = ans in file
|
||||
|
12
pylot.yml
Normal file
@ -0,0 +1,12 @@
|
||||
name: pylot_311
|
||||
channels:
|
||||
- conda-forge
|
||||
- defaults
|
||||
dependencies:
|
||||
- cartopy=0.23.0=py311hcf9f919_1
|
||||
- joblib=1.4.2=pyhd8ed1ab_0
|
||||
- obspy=1.4.1=py311he736701_3
|
||||
- pyaml=24.7.0=pyhd8ed1ab_0
|
||||
- pyqtgraph=0.13.7=pyhd8ed1ab_0
|
||||
- pyside2=5.15.8=py311h3d699ce_4
|
||||
- pytest=8.3.2=pyhd8ed1ab_0
|
@ -9,7 +9,7 @@ PyLoT - the Python picking and Localization Tool
|
||||
|
||||
This python library contains a graphical user interfaces for picking
|
||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||
and the Qt4 libraries to be installed first.
|
||||
and the Qt libraries to be installed first.
|
||||
|
||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||
PILOT without facing portability problems, it has been decided to re-
|
||||
|
@ -6,15 +6,17 @@ Revised/extended summer 2017.
|
||||
|
||||
:author: Ludger Küperkoch / MAGS2 EP3 working group
|
||||
"""
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import obspy.core.event as ope
|
||||
from obspy.geodetics import degrees2kilometers
|
||||
from scipy import integrate, signal
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
from pylot.core.pick.utils import getsignalwin, crossings_nonzero_all, \
|
||||
select_for_phase
|
||||
from pylot.core.util.utils import common_range, fit_curve
|
||||
from scipy import integrate, signal
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
|
||||
def richter_magnitude_scaling(delta):
|
||||
@ -117,12 +119,20 @@ class Magnitude(object):
|
||||
pass
|
||||
|
||||
def updated_event(self, magscaling=None):
|
||||
self.event.magnitudes.append(self.net_magnitude(magscaling))
|
||||
net_ml = self.net_magnitude(magscaling)
|
||||
if net_ml:
|
||||
self.event.magnitudes.append(net_ml)
|
||||
return self.event
|
||||
|
||||
def net_magnitude(self, magscaling=None):
|
||||
if self:
|
||||
if magscaling is not None and str(magscaling) is not '[0.0, 0.0]':
|
||||
if magscaling is None:
|
||||
scaling = False
|
||||
elif magscaling[0] == 0.0 and magscaling[1] == 0.0:
|
||||
scaling = False
|
||||
else:
|
||||
scaling = True
|
||||
if scaling == True:
|
||||
# scaling necessary
|
||||
print("Scaling network magnitude ...")
|
||||
mag = ope.Magnitude(
|
||||
@ -133,7 +143,11 @@ class Magnitude(object):
|
||||
station_count=len(self.magnitudes),
|
||||
azimuthal_gap=self.origin_id.get_referred_object().quality.azimuthal_gap)
|
||||
else:
|
||||
# no saling necessary
|
||||
# no scaling necessary
|
||||
# Temporary fix needs rework
|
||||
if (len(self.magnitudes.keys()) == 0):
|
||||
print("Error in local magnitude calculation ")
|
||||
return None
|
||||
mag = ope.Magnitude(
|
||||
mag=np.median([M.mag for M in self.magnitudes.values()]),
|
||||
magnitude_type=self.type,
|
||||
@ -141,7 +155,6 @@ class Magnitude(object):
|
||||
station_count=len(self.magnitudes),
|
||||
azimuthal_gap=self.origin_id.get_referred_object().quality.azimuthal_gap)
|
||||
return mag
|
||||
return None
|
||||
|
||||
|
||||
class LocalMagnitude(Magnitude):
|
||||
@ -212,15 +225,20 @@ class LocalMagnitude(Magnitude):
|
||||
|
||||
power = [np.power(tr.data, 2) for tr in st if tr.stats.channel[-1] not
|
||||
in 'Z3']
|
||||
if len(power) != 2:
|
||||
raise ValueError('Wood-Anderson amplitude defintion only valid for '
|
||||
'two horizontals: {0} given'.format(len(power)))
|
||||
power_sum = power[0] + power[1]
|
||||
#
|
||||
# checking horizontal count and calculating power_sum accordingly
|
||||
if len(power) == 1:
|
||||
print('WARNING: Only one horizontal found for station {0}.'.format(st[0].stats.station))
|
||||
power_sum = power[0]
|
||||
elif len(power) == 2:
|
||||
power_sum = power[0] + power[1]
|
||||
else:
|
||||
raise ValueError('Wood-Anderson aomplitude defintion only valid for'
|
||||
' up to two horizontals: {0} given'.format(len(power)))
|
||||
|
||||
sqH = np.sqrt(power_sum)
|
||||
|
||||
# get time array
|
||||
th = np.arange(0, len(sqH) * dt, dt)
|
||||
th = np.arange(0, st[0].stats.npts / st[0].stats.sampling_rate, dt)
|
||||
# get maximum peak within pick window
|
||||
iwin = getsignalwin(th, t0 - stime, self.calc_win)
|
||||
ii = min([iwin[len(iwin) - 1], len(th)])
|
||||
@ -233,17 +251,34 @@ class LocalMagnitude(Magnitude):
|
||||
# check for plot flag (for debugging only)
|
||||
fig = None
|
||||
if iplot > 1:
|
||||
st.plot()
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
ax = fig.add_subplot(211)
|
||||
ax.plot(th, st[0].data, 'k')
|
||||
ax.plot(th, sqH)
|
||||
ax.plot(th[iwin], sqH[iwin], 'g')
|
||||
ax.plot([t0, t0], [0, max(sqH)], 'r', linewidth=2)
|
||||
ax.title(
|
||||
'Station %s, RMS Horizontal Traces, WA-peak-to-peak=%4.1f mm' \
|
||||
% (st[0].stats.station, wapp))
|
||||
ax.plot([t0 - stime, t0 - stime], [0, max(sqH)], 'r', linewidth=2)
|
||||
ax.set_title('Station %s, Channel %s, RMS Horizontal Trace, '
|
||||
'WA-peak-to-peak=%6.3f mm' % (st[0].stats.station,
|
||||
st[0].stats.channel,
|
||||
wapp))
|
||||
ax.set_xlabel('Time [s]')
|
||||
ax.set_ylabel('Displacement [mm]')
|
||||
ax = fig.add_subplot(212)
|
||||
ax.plot(th, st[1].data, 'k')
|
||||
ax.plot(th, sqH)
|
||||
ax.plot(th[iwin], sqH[iwin], 'g')
|
||||
ax.plot([t0 - stime, t0 - stime], [0, max(sqH)], 'r', linewidth=2)
|
||||
ax.set_title('Channel %s, RMS Horizontal Trace, '
|
||||
'WA-peak-to-peak=%6.3f mm' % (st[1].stats.channel,
|
||||
wapp))
|
||||
ax.set_xlabel('Time [s]')
|
||||
ax.set_ylabel('Displacement [mm]')
|
||||
fig.show()
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
|
||||
return wapp, fig
|
||||
|
||||
@ -253,6 +288,11 @@ class LocalMagnitude(Magnitude):
|
||||
continue
|
||||
pick = a.pick_id.get_referred_object()
|
||||
station = pick.waveform_id.station_code
|
||||
# make sure calculating Ml only from reliable onsets
|
||||
# NLLoc: time_weight = 0 => do not use onset!
|
||||
if a.time_weight == 0:
|
||||
print("Uncertain pick at Station {}, do not use it!".format(station))
|
||||
continue
|
||||
wf = select_for_phase(self.stream.select(
|
||||
station=station), a.phase)
|
||||
if not wf:
|
||||
@ -284,8 +324,12 @@ class LocalMagnitude(Magnitude):
|
||||
a0 = a0 * 1e03 # mm to nm (see Havskov & Ottemöller, 2010)
|
||||
magnitude = ope.StationMagnitude(mag=np.log10(a0) \
|
||||
+ self.wascaling[0] * np.log10(delta) + self.wascaling[1]
|
||||
* delta + self.wascaling[
|
||||
* delta + self.wascaling[
|
||||
2])
|
||||
if self.verbose:
|
||||
print(
|
||||
"Local Magnitude for station {0}: ML = {1:3.1f}".format(
|
||||
station, magnitude.mag))
|
||||
magnitude.origin_id = self.origin_id
|
||||
magnitude.waveform_id = pick.waveform_id
|
||||
magnitude.amplitude_id = amplitude.resource_id
|
||||
@ -349,26 +393,43 @@ class MomentMagnitude(Magnitude):
|
||||
for a in self.arrivals:
|
||||
if a.phase not in 'pP':
|
||||
continue
|
||||
# make sure calculating Mo only from reliable onsets
|
||||
# NLLoc: time_weight = 0 => do not use onset!
|
||||
if a.time_weight == 0:
|
||||
continue
|
||||
pick = a.pick_id.get_referred_object()
|
||||
station = pick.waveform_id.station_code
|
||||
scopy = self.stream.copy()
|
||||
wf = scopy.select(station=station)
|
||||
if len(self.stream) <= 2:
|
||||
print("Station:" '{0}'.format(station))
|
||||
print("WARNING: No instrument corrected data available,"
|
||||
" no magnitude calculation possible! Go on.")
|
||||
continue
|
||||
wf = self.stream.select(station=station)
|
||||
if not wf:
|
||||
continue
|
||||
try:
|
||||
scopy = wf.copy()
|
||||
except AssertionError:
|
||||
print("WARNING: Something's wrong with the data,"
|
||||
"station {},"
|
||||
"no calculation of moment magnitude possible! Go on.".format(station))
|
||||
continue
|
||||
onset = pick.time
|
||||
distance = degrees2kilometers(a.distance)
|
||||
azimuth = a.azimuth
|
||||
incidence = a.takeoff_angle
|
||||
w0, fc = calcsourcespec(wf, onset, self.p_velocity, distance,
|
||||
if not 0. <= incidence <= 360.:
|
||||
if self.verbose:
|
||||
print(f'WARNING: Incidence angle outside bounds - {incidence}')
|
||||
return
|
||||
w0, fc = calcsourcespec(scopy, onset, self.p_velocity, distance,
|
||||
azimuth, incidence, self.p_attenuation,
|
||||
self.plot_flag, self.verbose)
|
||||
if w0 is None or fc is None:
|
||||
if self.verbose:
|
||||
print("WARNING: insufficient frequency information")
|
||||
continue
|
||||
WF = select_for_phase(self.stream.select(
|
||||
station=station), a.phase)
|
||||
WF = select_for_phase(WF, "P")
|
||||
WF = select_for_phase(scopy, "P")
|
||||
m0, mw = calcMoMw(WF, w0, self.rock_density, self.p_velocity,
|
||||
distance, self.verbose)
|
||||
self.moment_props = (station, dict(w0=w0, fc=fc, Mo=m0))
|
||||
@ -379,6 +440,40 @@ class MomentMagnitude(Magnitude):
|
||||
self.event.station_magnitudes.append(magnitude)
|
||||
self.magnitudes = (station, magnitude)
|
||||
|
||||
# WIP JG
|
||||
def getSourceSpec(self):
|
||||
for a in self.arrivals:
|
||||
if a.phase not in 'pP':
|
||||
continue
|
||||
# make sure calculating Mo only from reliable onsets
|
||||
# NLLoc: time_weight = 0 => do not use onset!
|
||||
if a.time_weight == 0:
|
||||
continue
|
||||
pick = a.pick_id.get_referred_object()
|
||||
station = pick.waveform_id.station_code
|
||||
if len(self.stream) <= 2:
|
||||
print("Station:" '{0}'.format(station))
|
||||
print("WARNING: No instrument corrected data available,"
|
||||
" no magnitude calculation possible! Go on.")
|
||||
continue
|
||||
wf = self.stream.select(station=station)
|
||||
if not wf:
|
||||
continue
|
||||
try:
|
||||
scopy = wf.copy()
|
||||
except AssertionError:
|
||||
print("WARNING: Something's wrong with the data,"
|
||||
"station {},"
|
||||
"no calculation of moment magnitude possible! Go on.".format(station))
|
||||
continue
|
||||
onset = pick.time
|
||||
distance = degrees2kilometers(a.distance)
|
||||
azimuth = a.azimuth
|
||||
incidence = a.takeoff_angle
|
||||
w0, fc, plt = calcsourcespec(scopy, onset, self.p_velocity, distance,
|
||||
azimuth, incidence, self.p_attenuation,
|
||||
3, self.verbose)
|
||||
return w0, fc, plt
|
||||
|
||||
def calcMoMw(wfstream, w0, rho, vp, delta, verbosity=False):
|
||||
'''
|
||||
@ -406,17 +501,18 @@ def calcMoMw(wfstream, w0, rho, vp, delta, verbosity=False):
|
||||
|
||||
if verbosity:
|
||||
print(
|
||||
"calcMoMw: Calculating seismic moment Mo and moment magnitude Mw for station {0} ...".format(
|
||||
tr.stats.station))
|
||||
"calcMoMw: Calculating seismic moment Mo and moment magnitude Mw \
|
||||
for station {0} ...".format(tr.stats.station))
|
||||
|
||||
# additional common parameters for calculating Mo
|
||||
rP = 2 / np.sqrt(
|
||||
15) # average radiation pattern of P waves (Aki & Richards, 1980)
|
||||
# average radiation pattern of P waves (Aki & Richards, 1980)
|
||||
rP = 2 / np.sqrt(15)
|
||||
freesurf = 2.0 # free surface correction, assuming vertical incidence
|
||||
|
||||
Mo = w0 * 4 * np.pi * rho * np.power(vp, 3) * delta / (rP * freesurf)
|
||||
|
||||
# Mw = np.log10(Mo * 1e07) * 2 / 3 - 10.7 # after Hanks & Kanamori (1979), defined for [dyn*cm]!
|
||||
# Mw = np.log10(Mo * 1e07) * 2 / 3 - 10.7 # after Hanks & Kanamori (1979),
|
||||
# defined for [dyn*cm]!
|
||||
Mw = np.log10(Mo) * 2 / 3 - 6.7 # for metric units
|
||||
|
||||
if verbosity:
|
||||
@ -476,11 +572,15 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
|
||||
dist = delta * 1000 # hypocentral distance in [m]
|
||||
|
||||
fc = None
|
||||
Fc = None
|
||||
w0 = None
|
||||
|
||||
zdat = select_for_phase(wfstream, "P")
|
||||
|
||||
if len(zdat) == 0:
|
||||
print("No vertical component found in stream:\n{}".format(wfstream))
|
||||
print("No calculation of source spectrum possible!")
|
||||
return w0, Fc
|
||||
|
||||
dt = zdat[0].stats.delta
|
||||
|
||||
freq = zdat[0].stats.sampling_rate
|
||||
@ -488,7 +588,6 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
# trim traces to common range (for rotation)
|
||||
trstart, trend = common_range(wfstream)
|
||||
wfstream.trim(trstart, trend)
|
||||
|
||||
# rotate into LQT (ray-coordindate-) system using Obspy's rotate
|
||||
# L: P-wave direction
|
||||
# Q: SV-wave direction
|
||||
@ -529,9 +628,7 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
print("calcsourcespec: Something is wrong with the waveform, "
|
||||
"no zero crossings derived!\n")
|
||||
print("No calculation of source spectrum possible!")
|
||||
plotflag = 0
|
||||
else:
|
||||
plotflag = 1
|
||||
index = min([3, len(zc) - 1])
|
||||
calcwin = (zc[index] - zc[0]) * dt
|
||||
iwin = getsignalwin(t, rel_onset, calcwin)
|
||||
@ -539,14 +636,15 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
|
||||
# fft
|
||||
fny = freq / 2
|
||||
l = len(xdat) / freq
|
||||
# l = len(xdat) / freq
|
||||
# number of fft bins after Bath
|
||||
n = freq * l
|
||||
# n = freq * l
|
||||
# find next power of 2 of data length
|
||||
m = pow(2, np.ceil(np.log(len(xdat)) / np.log(2)))
|
||||
N = int(np.power(m, 2))
|
||||
N = min(int(np.power(m, 2)), 16384)
|
||||
# N = int(np.power(m, 2))
|
||||
y = dt * np.fft.fft(xdat, N)
|
||||
Y = abs(y[: N / 2])
|
||||
Y = abs(y[: int(N / 2)])
|
||||
L = (N - 1) / freq
|
||||
f = np.arange(0, fny, 1 / L)
|
||||
|
||||
@ -573,26 +671,23 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
# use of implicit scipy otimization function
|
||||
fit = synthsourcespec(F, w0in, Fcin)
|
||||
[optspecfit, _] = curve_fit(synthsourcespec, F, YYcor, [w0in, Fcin])
|
||||
w0 = optspecfit[0]
|
||||
fc = optspecfit[1]
|
||||
# w01 = optspecfit[0]
|
||||
# fc1 = optspecfit[1]
|
||||
w01 = optspecfit[0]
|
||||
fc1 = optspecfit[1]
|
||||
if verbosity:
|
||||
print("calcsourcespec: Determined w0-value: %e m/Hz, \n"
|
||||
"calcsourcespec: Determined corner frequency: %f Hz" % (w0, fc))
|
||||
"calcsourcespec: Determined corner frequency: %f Hz" % (w01, fc1))
|
||||
|
||||
# use of conventional fitting
|
||||
# [w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot, verbosity)
|
||||
# use of conventional fitting
|
||||
[w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot, verbosity)
|
||||
|
||||
# get w0 and fc as median of both
|
||||
# source spectrum fits
|
||||
# w0 = np.median([w01, w02])
|
||||
# fc = np.median([fc1, fc2])
|
||||
# if verbosity:
|
||||
# print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (
|
||||
# w0, fc))
|
||||
|
||||
if iplot > 1:
|
||||
# get w0 and fc as median of both
|
||||
# source spectrum fits
|
||||
w0 = np.median([w01, w02])
|
||||
Fc = np.median([fc1, fc2])
|
||||
if verbosity:
|
||||
print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (
|
||||
w0, Fc))
|
||||
if iplot >= 1:
|
||||
f1 = plt.figure()
|
||||
tLdat = np.arange(0, len(Ldat) * dt, dt)
|
||||
plt.subplot(2, 1, 1)
|
||||
@ -600,7 +695,7 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
p1, = plt.plot(t, np.multiply(inttrz, 1000), 'k')
|
||||
p2, = plt.plot(tLdat, np.multiply(Ldat, 1000))
|
||||
plt.legend([p1, p2], ['Displacement', 'Rotated Displacement'])
|
||||
if plotflag == 1:
|
||||
if iplot == 1:
|
||||
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
|
||||
plt.title('Seismogram and P Pulse, Station %s-%s' \
|
||||
% (zdat[0].stats.station, zdat[0].stats.channel))
|
||||
@ -610,24 +705,32 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
plt.xlabel('Time since %s' % zdat[0].stats.starttime)
|
||||
plt.ylabel('Displacement [mm]')
|
||||
|
||||
if plotflag == 1:
|
||||
if iplot > 1:
|
||||
plt.subplot(2, 1, 2)
|
||||
p1, = plt.loglog(f, Y.real, 'k')
|
||||
p2, = plt.loglog(F, YY.real)
|
||||
p3, = plt.loglog(F, YYcor, 'r')
|
||||
p4, = plt.loglog(F, fit, 'g')
|
||||
plt.loglog([fc, fc], [w0 / 100, w0], 'g')
|
||||
plt.loglog([Fc, Fc], [w0 / 100, w0], 'g')
|
||||
plt.legend([p1, p2, p3, p4], ['Raw Spectrum',
|
||||
'Used Raw Spectrum',
|
||||
'Q-Corrected Spectrum',
|
||||
'Fit to Spectrum'])
|
||||
plt.title('Source Spectrum from P Pulse, w0=%e m/Hz, fc=%6.2f Hz' \
|
||||
% (w0, fc))
|
||||
% (w0, Fc))
|
||||
plt.xlabel('Frequency [Hz]')
|
||||
plt.ylabel('Amplitude [m/Hz]')
|
||||
plt.grid()
|
||||
if iplot == 3:
|
||||
return w0, Fc, plt
|
||||
plt.show()
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(f1)
|
||||
|
||||
return w0, fc
|
||||
return w0, Fc
|
||||
|
||||
|
||||
def synthsourcespec(f, omega0, fcorner):
|
||||
@ -701,13 +804,14 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
|
||||
# check difference of il and ir in order to
|
||||
# keep calculation time acceptable
|
||||
idiff = ir - il
|
||||
if idiff > 10000:
|
||||
if idiff > 100000:
|
||||
increment = 1000
|
||||
elif idiff <= 100000 and idiff > 10000:
|
||||
increment = 100
|
||||
elif idiff <= 20:
|
||||
increment = 1
|
||||
else:
|
||||
increment = 10
|
||||
|
||||
for i in range(il, ir, increment):
|
||||
FC = f[i]
|
||||
indexdc = np.where((f > 0) & (f <= FC))
|
||||
@ -725,37 +829,43 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
|
||||
|
||||
# get best found w0 anf fc from minimum
|
||||
if len(STD) > 0:
|
||||
fc = fc[np.argmin(STD)]
|
||||
Fc = fc[np.argmin(STD)]
|
||||
w0 = w0[np.argmin(STD)]
|
||||
elif len(STD) == 0:
|
||||
fc = fc0
|
||||
Fc = fc0
|
||||
w0 = max(S)
|
||||
if verbosity:
|
||||
print(
|
||||
"fitSourceModel: best fc: {0} Hz, best w0: {1} m/Hz".format(fc, w0))
|
||||
|
||||
if iplot > 1:
|
||||
"fitSourceModel: best fc: {0} Hz, best w0: {1} m/Hz".format(Fc, w0))
|
||||
if iplot >= 1:
|
||||
plt.figure() # iplot)
|
||||
plt.loglog(f, S, 'k')
|
||||
plt.loglog([f[0], fc], [w0, w0], 'g')
|
||||
plt.loglog([fc, fc], [w0 / 100, w0], 'g')
|
||||
plt.loglog([f[0], Fc], [w0, w0], 'g')
|
||||
plt.loglog([Fc, Fc], [w0 / 100, w0], 'g')
|
||||
plt.title('Calculated Source Spectrum, Omega0=%e m/Hz, fc=%6.2f Hz' \
|
||||
% (w0, fc))
|
||||
% (w0, Fc))
|
||||
plt.xlabel('Frequency [Hz]')
|
||||
plt.ylabel('Amplitude [m/Hz]')
|
||||
plt.grid()
|
||||
plt.figure() # iplot + 1)
|
||||
plt.subplot(311)
|
||||
plt.plot(f[il:ir], STD, '*')
|
||||
plt.title('Common Standard Deviations')
|
||||
plt.xticks([])
|
||||
plt.subplot(312)
|
||||
plt.plot(f[il:ir], stdw0, '*')
|
||||
plt.title('Standard Deviations of w0-Values')
|
||||
plt.xticks([])
|
||||
plt.subplot(313)
|
||||
plt.plot(f[il:ir], stdfc, '*')
|
||||
plt.title('Standard Deviations of Corner Frequencies')
|
||||
plt.xlabel('Corner Frequencies [Hz]')
|
||||
if iplot == 2:
|
||||
plt.figure() # iplot + 1)
|
||||
plt.subplot(311)
|
||||
plt.plot(fc, STD, '*')
|
||||
plt.title('Common Standard Deviations')
|
||||
plt.xticks([])
|
||||
plt.subplot(312)
|
||||
plt.plot(fc, stdw0, '*')
|
||||
plt.title('Standard Deviations of w0-Values')
|
||||
plt.xticks([])
|
||||
plt.subplot(313)
|
||||
plt.plot(fc, stdfc, '*')
|
||||
plt.title('Standard Deviations of Corner Frequencies')
|
||||
plt.xlabel('Corner Frequencies [Hz]')
|
||||
plt.show()
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close()
|
||||
|
||||
return w0, fc
|
||||
return w0, Fc
|
||||
|
@ -2,27 +2,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
|
||||
from PySide2.QtWidgets import QMessageBox
|
||||
from obspy import read_events
|
||||
from obspy.core import read, Stream, UTCDateTime
|
||||
from obspy.core.event import Event as ObsPyEvent
|
||||
from obspy.io.sac import SacIOError
|
||||
|
||||
import pylot.core.loc.focmec as focmec
|
||||
import pylot.core.loc.hypodd as hypodd
|
||||
import pylot.core.loc.velest as velest
|
||||
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
|
||||
picksdict_from_pilot, merge_picks
|
||||
picksdict_from_pilot, merge_picks, PylotParameter
|
||||
from pylot.core.util.errors import FormatError, OverwriteError
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.utils import fnConstructor, full_range
|
||||
import pylot.core.loc.velest as velest
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
from pylot.core.util.utils import fnConstructor, full_range, check4rotated, \
|
||||
check_for_gaps_and_merge, trim_station_components, check_for_nan
|
||||
|
||||
|
||||
class Data(object):
|
||||
"""
|
||||
Data container with attributes wfdata holding ~obspy.core.stream.
|
||||
|
||||
:type parent: PySide.QtGui.QWidget object, optional
|
||||
:param parent: A PySide.QtGui.QWidget object utilized when
|
||||
called by a GUI to display a PySide.QtGui.QMessageBox instead of printing
|
||||
:type parent: PySide2.QtWidgets.QWidget object, optional
|
||||
:param parent: A PySide2.QtWidgets.QWidget object utilized when
|
||||
called by a GUI to display a PySide2.QtWidgets.QMessageBox instead of printing
|
||||
to standard out.
|
||||
:type evtdata: ~obspy.core.event.Event object, optional
|
||||
:param evtdata ~obspy.core.event.Event object containing all derived or
|
||||
@ -45,7 +52,7 @@ class Data(object):
|
||||
elif isinstance(evtdata, str):
|
||||
try:
|
||||
cat = read_events(evtdata)
|
||||
if len(cat) is not 1:
|
||||
if len(cat) != 1:
|
||||
raise ValueError('ambiguous event information for file: '
|
||||
'{file}'.format(file=evtdata))
|
||||
evtdata = cat[0]
|
||||
@ -58,7 +65,9 @@ class Data(object):
|
||||
elif 'LOC' in evtdata:
|
||||
raise NotImplementedError('PILOT location information '
|
||||
'read support not yet '
|
||||
'implemeted.')
|
||||
'implemented.')
|
||||
elif 'event.pkl' in evtdata:
|
||||
evtdata = qml_from_obspyDMT(evtdata)
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
@ -71,6 +80,7 @@ class Data(object):
|
||||
self.wforiginal = None
|
||||
self.cuttimes = None
|
||||
self.dirty = False
|
||||
self.processed = None
|
||||
|
||||
def __str__(self):
|
||||
return str(self.wfdata)
|
||||
@ -82,9 +92,17 @@ class Data(object):
|
||||
if other.isNew() and not self.isNew():
|
||||
picks_to_add = other.get_evt_data().picks
|
||||
old_picks = self.get_evt_data().picks
|
||||
for pick in picks_to_add:
|
||||
if pick not in old_picks:
|
||||
old_picks.append(pick)
|
||||
wf_ids_old = [pick.waveform_id for pick in old_picks]
|
||||
for new_pick in picks_to_add:
|
||||
wf_id = new_pick.waveform_id
|
||||
if wf_id in wf_ids_old:
|
||||
for old_pick in old_picks:
|
||||
comparison = [old_pick.waveform_id == new_pick.waveform_id,
|
||||
old_pick.phase_hint == new_pick.phase_hint,
|
||||
old_pick.method_id == new_pick.method_id]
|
||||
if all(comparison):
|
||||
del (old_pick)
|
||||
old_picks.append(new_pick)
|
||||
elif not other.isNew() and self.isNew():
|
||||
new = other + self
|
||||
self.evtdata = new.get_evt_data()
|
||||
@ -99,6 +117,11 @@ class Data(object):
|
||||
return self
|
||||
|
||||
def getPicksStr(self):
|
||||
"""
|
||||
Return picks in event data
|
||||
:return: picks seperated by newlines
|
||||
:rtype: str
|
||||
"""
|
||||
picks_str = ''
|
||||
for pick in self.get_evt_data().picks:
|
||||
picks_str += str(pick) + '\n'
|
||||
@ -106,18 +129,11 @@ class Data(object):
|
||||
|
||||
def getParent(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
Get PySide.QtGui.QWidget parent object
|
||||
"""
|
||||
return self._parent
|
||||
|
||||
def isNew(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self._new
|
||||
|
||||
def setNew(self):
|
||||
@ -125,9 +141,9 @@ class Data(object):
|
||||
|
||||
def getCutTimes(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
Returns earliest start and latest end of all waveform data
|
||||
:return: minimum start time and maximum end time as a tuple
|
||||
:rtype: (UTCDateTime, UTCDateTime)
|
||||
"""
|
||||
if self.cuttimes is None:
|
||||
self.updateCutTimes()
|
||||
@ -135,22 +151,33 @@ class Data(object):
|
||||
|
||||
def updateCutTimes(self):
|
||||
"""
|
||||
|
||||
|
||||
Update cuttimes to contain earliest start and latest end time
|
||||
of all waveform data
|
||||
:rtype: None
|
||||
"""
|
||||
self.cuttimes = full_range(self.getWFData())
|
||||
|
||||
def getEventFileName(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
"""
|
||||
ID = self.getID()
|
||||
# handle forbidden filenames especially on windows systems
|
||||
return fnConstructor(str(ID))
|
||||
|
||||
def checkEvent(self, event, fcheck, forceOverwrite=False):
|
||||
"""
|
||||
Check information in supplied event and own event and replace with own
|
||||
information if no other information are given or forced by forceOverwrite
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param fcheck: check and delete existing information
|
||||
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
|
||||
:type fcheck: str, [str]
|
||||
:param forceOverwrite: Set to true to force overwrite own information. If false,
|
||||
supplied information from event is only used if there is no own information in that
|
||||
category (given in fcheck: manual, auto, origin, magnitude)
|
||||
:type forceOverwrite: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if 'origin' in fcheck:
|
||||
self.replaceOrigin(event, forceOverwrite)
|
||||
if 'magnitude' in fcheck:
|
||||
@ -161,43 +188,79 @@ class Data(object):
|
||||
self.replacePicks(event, 'manual')
|
||||
|
||||
def replaceOrigin(self, event, forceOverwrite=False):
|
||||
"""
|
||||
Replace own origin with the one supplied in event if own origin is not
|
||||
existing or forced by forceOverwrite = True
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param forceOverwrite: always replace own information with supplied one if true
|
||||
:type forceOverwrite: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if self.get_evt_data().origins or forceOverwrite:
|
||||
if event.origins:
|
||||
print("Found origin, replace it by new origin.")
|
||||
event.origins = self.get_evt_data().origins
|
||||
|
||||
def replaceMagnitude(self, event, forceOverwrite=False):
|
||||
"""
|
||||
Replace own magnitude with the one supplied in event if own magnitude is not
|
||||
existing or forced by forceOverwrite = True
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param forceOverwrite: always replace own information with supplied one if true
|
||||
:type forceOverwrite: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if self.get_evt_data().magnitudes or forceOverwrite:
|
||||
if event.magnitudes:
|
||||
print("Found magnitude, replace it by new magnitude")
|
||||
event.magnitudes = self.get_evt_data().magnitudes
|
||||
|
||||
def replacePicks(self, event, picktype):
|
||||
checkflag = 0
|
||||
"""
|
||||
Replace picks in event with own picks
|
||||
:param event: Event that supplies information for comparison
|
||||
:type event: pylot.core.util.event.Event
|
||||
:param picktype: 'auto' or 'manual' picks
|
||||
:type picktype: str
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
checkflag = 1
|
||||
picks = event.picks
|
||||
# remove existing picks
|
||||
for j, pick in reversed(list(enumerate(picks))):
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.pop(j)
|
||||
checkflag = 1
|
||||
if checkflag:
|
||||
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
|
||||
try:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.pop(j)
|
||||
checkflag = 2
|
||||
except AttributeError as e:
|
||||
msg = '{}'.format(e)
|
||||
print(e)
|
||||
checkflag = 0
|
||||
if checkflag > 0:
|
||||
if checkflag == 1:
|
||||
print("Write new %s picks to catalog." % picktype)
|
||||
if checkflag == 2:
|
||||
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
|
||||
|
||||
# append new picks
|
||||
for pick in self.get_evt_data().picks:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.append(pick)
|
||||
# append new picks
|
||||
for pick in self.get_evt_data().picks:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.append(pick)
|
||||
|
||||
def exportEvent(self, fnout, fnext='.xml', fcheck='auto', upperErrors=None):
|
||||
|
||||
"""
|
||||
Export event to file
|
||||
:param fnout: basename of file
|
||||
:param fnext: file extension
|
||||
:param fnext: file extensions xml, cnv, obs, focmec, or/and pha
|
||||
:param fcheck: check and delete existing information
|
||||
can be a str or a list of strings of ['manual', 'auto', 'origin', 'magnitude']
|
||||
"""
|
||||
from pylot.core.util.defaults import OUTPUTFORMATS
|
||||
|
||||
if not type(fcheck) == list:
|
||||
fcheck = [fcheck]
|
||||
|
||||
@ -208,6 +271,13 @@ class Data(object):
|
||||
'supported'.format(e, fnext)
|
||||
raise FormatError(errmsg)
|
||||
|
||||
if hasattr(self.get_evt_data(), 'notes'):
|
||||
try:
|
||||
with open(os.path.join(os.path.dirname(fnout), 'notes.txt'), 'w') as notes_file:
|
||||
notes_file.write(self.get_evt_data().notes)
|
||||
except Exception as e:
|
||||
print('Warning: Could not save notes.txt: ', str(e))
|
||||
|
||||
# check for already existing xml-file
|
||||
if fnext == '.xml':
|
||||
if os.path.isfile(fnout + fnext):
|
||||
@ -219,12 +289,14 @@ class Data(object):
|
||||
raise IOError('No event information in file {}'.format(fnout + fnext))
|
||||
event = cat[0]
|
||||
if not event.resource_id == self.get_evt_data().resource_id:
|
||||
raise IOError("Missmatching event resource id's: {} and {}".format(event.resource_id,
|
||||
self.get_evt_data().resource_id))
|
||||
QMessageBox.warning(self, 'Warning', 'Different resource IDs!')
|
||||
return
|
||||
self.checkEvent(event, fcheck)
|
||||
self.setEvtData(event)
|
||||
|
||||
self.get_evt_data().write(fnout + fnext, format=evtformat)
|
||||
# try exporting event
|
||||
|
||||
# try exporting event
|
||||
else:
|
||||
evtdata_org = self.get_evt_data()
|
||||
picks = evtdata_org.picks
|
||||
@ -241,43 +313,69 @@ class Data(object):
|
||||
mstation_ext = mstation + '_'
|
||||
for k in range(len(picks_copy)):
|
||||
if ((picks_copy[k].waveform_id.station_code == mstation) or
|
||||
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(picks_copy[k].method_id == 'auto'):
|
||||
del picks_copy[k]
|
||||
break
|
||||
lendiff = len(picks) - len(picks_copy)
|
||||
if lendiff is not 0:
|
||||
if lendiff != 0:
|
||||
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
|
||||
|
||||
|
||||
no_uncertainties_p = []
|
||||
no_uncertainties_s = []
|
||||
if upperErrors:
|
||||
# check for pick uncertainties exceeding adjusted upper errors
|
||||
# Picks with larger uncertainties will not be saved in output file!
|
||||
for j in range(len(picks)):
|
||||
for i in range(len(picks_copy)):
|
||||
if picks_copy[i].phase_hint[0] == 'P':
|
||||
if (picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[0]) or \
|
||||
(picks_copy[i].time_errors['uncertainty'] == None):
|
||||
# Skipping pick if no upper_uncertainty is found and warning user
|
||||
if picks_copy[i].time_errors['upper_uncertainty'] is None:
|
||||
#print("{1} P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
|
||||
# picks_copy[i].waveform_id.station_code,
|
||||
# picks_copy[i].method_id))
|
||||
if not picks_copy[i].waveform_id.station_code in no_uncertainties_p:
|
||||
no_uncertainties_p.append(picks_copy[i].waveform_id.station_code)
|
||||
continue
|
||||
|
||||
#print ("checking for upper_uncertainty")
|
||||
if (picks_copy[i].time_errors['uncertainty'] is None) or \
|
||||
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[0]):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[0]))
|
||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
||||
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
|
||||
picks_copy[i].waveform_id.station_code,
|
||||
picks_copy[i].method_id))
|
||||
print("#")
|
||||
del picks_copy[i]
|
||||
break
|
||||
if picks_copy[i].phase_hint[0] == 'S':
|
||||
if (picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[1]) or \
|
||||
(picks_copy[i].time_errors['uncertainty'] == None):
|
||||
|
||||
# Skipping pick if no upper_uncertainty is found and warning user
|
||||
if picks_copy[i].time_errors['upper_uncertainty'] is None:
|
||||
#print("{1} S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(
|
||||
#picks_copy[i].waveform_id.station_code,
|
||||
#picks_copy[i].method_id))
|
||||
if not picks_copy[i].waveform_id.station_code in no_uncertainties_s:
|
||||
no_uncertainties_s.append(picks_copy[i].waveform_id.station_code)
|
||||
continue
|
||||
|
||||
|
||||
if (picks_copy[i].time_errors['uncertainty'] is None) or \
|
||||
(picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[1]):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[1]))
|
||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
||||
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
|
||||
picks_copy[i].waveform_id.station_code,
|
||||
picks_copy[i].method_id))
|
||||
print("#")
|
||||
del picks_copy[i]
|
||||
break
|
||||
for s in no_uncertainties_p:
|
||||
print("P-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
|
||||
for s in no_uncertainties_s:
|
||||
print("S-Pick of station {0} does not have upper_uncertainty and cant be checked".format(s))
|
||||
|
||||
if fnext == '.obs':
|
||||
try:
|
||||
@ -287,6 +385,14 @@ class Data(object):
|
||||
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
|
||||
nllocfile = open(fnout + fnext)
|
||||
l = nllocfile.readlines()
|
||||
# Adding A0/Generic Amplitude to .obs file
|
||||
# l2 = []
|
||||
# for li in l:
|
||||
# for amp in evtdata_org.amplitudes:
|
||||
# if amp.waveform_id.station_code == li[0:5].strip():
|
||||
# li = li[0:64] + '{:0.2e}'.format(amp.generic_amplitude) + li[73:-1] + '\n'
|
||||
# l2.append(li)
|
||||
# l = l2
|
||||
nllocfile.close()
|
||||
l.insert(0, header)
|
||||
nllocfile = open(fnout + fnext, 'w')
|
||||
@ -301,20 +407,32 @@ class Data(object):
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '_focmec.in':
|
||||
try:
|
||||
parameter = PylotParameter()
|
||||
logging.warning('Using default input parameter')
|
||||
focmec.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '.pha':
|
||||
try:
|
||||
parameter = PylotParameter()
|
||||
logging.warning('Using default input parameter')
|
||||
hypodd.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
|
||||
def getComp(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
Get component (ZNE)
|
||||
"""
|
||||
return self.comp
|
||||
|
||||
def getID(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
Get unique resource id
|
||||
"""
|
||||
try:
|
||||
return self.evtdata.get('resource_id').id
|
||||
@ -323,31 +441,84 @@ class Data(object):
|
||||
|
||||
def filterWFData(self, kwargs):
|
||||
"""
|
||||
|
||||
:param kwargs:
|
||||
Filter waveform data
|
||||
:param kwargs: arguments to pass through to filter function
|
||||
"""
|
||||
self.getWFData().filter(**kwargs)
|
||||
data = self.getWFData()
|
||||
data.detrend('linear')
|
||||
data.taper(0.02, type='cosine')
|
||||
data.filter(**kwargs)
|
||||
self.dirty = True
|
||||
|
||||
def setWFData(self, fnames):
|
||||
def setWFData(self, fnames, fnames_alt=None, checkRotated=False, metadata=None, tstart=0, tstop=0):
|
||||
"""
|
||||
Clear current waveform data and set given waveform data
|
||||
:param fnames: waveform data names to append
|
||||
:param fnames_alt: alternative data to show (e.g. synthetic/processed)
|
||||
:type fnames: list
|
||||
"""
|
||||
def check_fname_exists(filenames: list) -> list:
|
||||
if filenames:
|
||||
filenames = [fn for fn in filenames if os.path.isfile(fn)]
|
||||
return filenames
|
||||
|
||||
:param fnames:
|
||||
"""
|
||||
self.wfdata = Stream()
|
||||
self.wforiginal = None
|
||||
self.wf_alt = Stream()
|
||||
if tstart == tstop:
|
||||
tstart = tstop = None
|
||||
self.tstart = tstart
|
||||
self.tstop = tstop
|
||||
|
||||
# remove directories
|
||||
fnames = check_fname_exists(fnames)
|
||||
fnames_alt = check_fname_exists(fnames_alt)
|
||||
|
||||
# if obspy_dmt:
|
||||
# wfdir = 'raw'
|
||||
# self.processed = False
|
||||
# for fname in fnames:
|
||||
# if fname.endswith('processed'):
|
||||
# wfdir = 'processed'
|
||||
# self.processed = True
|
||||
# break
|
||||
# for fpath in fnames:
|
||||
# if fpath.endswith(wfdir):
|
||||
# wffnames = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
|
||||
# if 'syngine' in fpath.split('/')[-1]:
|
||||
# wffnames_syn = [os.path.join(fpath, fname) for fname in os.listdir(fpath)]
|
||||
# else:
|
||||
# wffnames = fnames
|
||||
if fnames is not None:
|
||||
self.appendWFData(fnames)
|
||||
if fnames_alt is not None:
|
||||
self.appendWFData(fnames_alt, alternative=True)
|
||||
else:
|
||||
return False
|
||||
|
||||
# various pre-processing steps:
|
||||
# remove possible underscores in station names
|
||||
# self.wfdata = remove_underscores(self.wfdata)
|
||||
# check for gaps and merge
|
||||
self.wfdata, _ = check_for_gaps_and_merge(self.wfdata)
|
||||
# check for nans
|
||||
check_for_nan(self.wfdata)
|
||||
# check for stations with rotated components
|
||||
if checkRotated and metadata is not None:
|
||||
self.wfdata = check4rotated(self.wfdata, metadata, verbosity=0)
|
||||
# trim station components to same start value
|
||||
trim_station_components(self.wfdata, trim_start=True, trim_end=False)
|
||||
|
||||
# make a copy of original data
|
||||
self.wforiginal = self.getWFData().copy()
|
||||
self.dirty = False
|
||||
return True
|
||||
|
||||
def appendWFData(self, fnames):
|
||||
def appendWFData(self, fnames, alternative=False):
|
||||
"""
|
||||
|
||||
:param fnames:
|
||||
Read waveform data from fnames and append it to current wf data
|
||||
:param fnames: waveform data to append
|
||||
:type fnames: list
|
||||
"""
|
||||
assert isinstance(fnames, list), "input parameter 'fnames' is " \
|
||||
"supposed to be of type 'list' " \
|
||||
@ -356,70 +527,71 @@ class Data(object):
|
||||
if self.dirty:
|
||||
self.resetWFData()
|
||||
|
||||
orig_or_alternative_data = {True: self.wf_alt,
|
||||
False: self.wfdata}
|
||||
|
||||
warnmsg = ''
|
||||
for fname in fnames:
|
||||
for fname in set(fnames):
|
||||
try:
|
||||
self.wfdata += read(fname)
|
||||
orig_or_alternative_data[alternative] += read(fname, starttime=self.tstart, endtime=self.tstop)
|
||||
except TypeError:
|
||||
try:
|
||||
self.wfdata += read(fname, format='GSE2')
|
||||
orig_or_alternative_data[alternative] += read(fname, format='GSE2', starttime=self.tstart, endtime=self.tstop)
|
||||
except Exception as e:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, e)
|
||||
try:
|
||||
orig_or_alternative_data[alternative] += read(fname, format='SEGY', starttime=self.tstart,
|
||||
endtime=self.tstop)
|
||||
except Exception as e:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, e)
|
||||
except SacIOError as se:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, se)
|
||||
if warnmsg:
|
||||
warnmsg = 'WARNING: unable to read\n' + warnmsg
|
||||
warnmsg = 'WARNING in appendWFData: unable to read waveform data\n' + warnmsg
|
||||
print(warnmsg)
|
||||
|
||||
def getWFData(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.wfdata
|
||||
|
||||
def getOriginalWFData(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.wforiginal
|
||||
|
||||
def getAltWFdata(self):
|
||||
return self.wf_alt
|
||||
|
||||
def resetWFData(self):
|
||||
"""
|
||||
|
||||
|
||||
Set waveform data to original waveform data
|
||||
"""
|
||||
self.wfdata = self.getOriginalWFData().copy()
|
||||
if self.getOriginalWFData():
|
||||
self.wfdata = self.getOriginalWFData().copy()
|
||||
else:
|
||||
self.wfdata = Stream()
|
||||
self.dirty = False
|
||||
|
||||
def resetPicks(self):
|
||||
"""
|
||||
|
||||
|
||||
Clear all picks from event
|
||||
"""
|
||||
self.get_evt_data().picks = []
|
||||
|
||||
def get_evt_data(self):
|
||||
"""
|
||||
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.evtdata
|
||||
|
||||
def setEvtData(self, event):
|
||||
self.evtdata = event
|
||||
|
||||
def applyEVTData(self, data, typ='pick', authority_id='rub'):
|
||||
|
||||
def applyEVTData(self, data, typ='pick'):
|
||||
"""
|
||||
|
||||
:param data:
|
||||
:param typ:
|
||||
:param authority_id:
|
||||
Either takes an `obspy.core.event.Event` object and applies all new
|
||||
information on the event to the actual data if typ is 'event or
|
||||
creates ObsPy pick objects and append it to the picks list from the
|
||||
PyLoT dictionary contain all picks if type is pick
|
||||
:param data: data to apply, either picks or complete event
|
||||
:type data:
|
||||
:param typ: which event data to apply, 'pick' or 'event'
|
||||
:type typ: str
|
||||
:param authority_id: (currently unused)
|
||||
:type: str
|
||||
:raise OverwriteError:
|
||||
"""
|
||||
|
||||
@ -435,8 +607,10 @@ class Data(object):
|
||||
# check for automatic picks
|
||||
print("Writing phases to ObsPy-quakeml file")
|
||||
for key in picks:
|
||||
if not picks[key].get('P'):
|
||||
continue
|
||||
if picks[key]['P']['picker'] == 'auto':
|
||||
print("Existing picks will be overwritten!")
|
||||
print("Existing auto-picks will be overwritten in pick-dictionary!")
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
else:
|
||||
@ -458,6 +632,9 @@ class Data(object):
|
||||
information on the event to the actual data
|
||||
:param event:
|
||||
"""
|
||||
if event is None:
|
||||
print("applyEvent: Received None")
|
||||
return
|
||||
if self.isNew():
|
||||
self.setEvtData(event)
|
||||
else:
|
||||
@ -654,8 +831,24 @@ class PilotDataStructure(GenericDataStructure):
|
||||
|
||||
def __init__(self, **fields):
|
||||
if not fields:
|
||||
fields = {'database': '2006.01',
|
||||
'root': '/data/Egelados/EVENT_DATA/LOCAL'}
|
||||
fields = {'database': '',
|
||||
'root': ''}
|
||||
|
||||
GenericDataStructure.__init__(self, **fields)
|
||||
|
||||
self.setExpandFields(['root', 'database'])
|
||||
|
||||
|
||||
class ObspyDMTdataStructure(GenericDataStructure):
|
||||
"""
|
||||
Object containing the data access information for the old PILOT data
|
||||
structure.
|
||||
"""
|
||||
|
||||
def __init__(self, **fields):
|
||||
if not fields:
|
||||
fields = {'database': '',
|
||||
'root': ''}
|
||||
|
||||
GenericDataStructure.__init__(self, **fields)
|
||||
|
||||
|
@ -1,35 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import numpy as np
|
||||
|
||||
defaults = {'rootpath': {'type': str,
|
||||
'tooltip': 'project path',
|
||||
'value': '',
|
||||
'namestring': 'Root path'},
|
||||
"""
|
||||
Default parameters used for picking
|
||||
"""
|
||||
|
||||
'datapath': {'type': str,
|
||||
'tooltip': 'data path',
|
||||
defaults = {'datapath': {'type': str,
|
||||
'tooltip': 'path to eventfolders',
|
||||
'value': '',
|
||||
'namestring': 'Data path'},
|
||||
|
||||
'database': {'type': str,
|
||||
'tooltip': 'name of data base',
|
||||
'value': '',
|
||||
'namestring': 'Database path'},
|
||||
|
||||
'eventID': {'type': str,
|
||||
'tooltip': 'event ID for single event processing (* for all events found in database)',
|
||||
'value': '',
|
||||
'tooltip': 'event ID for single event processing (* for all events found in datapath)',
|
||||
'value': '*',
|
||||
'namestring': 'Event ID'},
|
||||
|
||||
'extent': {'type': str,
|
||||
'tooltip': 'extent of array ("local", "regional" or "global")',
|
||||
'tooltip': 'extent of array ("active", "local", "regional" or "global")',
|
||||
'value': 'local',
|
||||
'namestring': 'Array extent'},
|
||||
|
||||
'invdir': {'type': str,
|
||||
'tooltip': 'full path to inventory or dataless-seed file',
|
||||
'value': '',
|
||||
'namestring': 'Inversion dir'},
|
||||
'namestring': 'Inventory directory'},
|
||||
|
||||
'datastructure': {'type': str,
|
||||
'tooltip': 'choose data structure',
|
||||
@ -58,7 +53,7 @@ defaults = {'rootpath': {'type': str,
|
||||
|
||||
'ctrfile': {'type': str,
|
||||
'tooltip': 'name of autoPyLoT-output control file for NLLoc',
|
||||
'value': 'Insheim_min1d2015_auto.in',
|
||||
'value': '',
|
||||
'namestring': 'Control filename'},
|
||||
|
||||
'ttpatter': {'type': str,
|
||||
@ -74,11 +69,15 @@ defaults = {'rootpath': {'type': str,
|
||||
'vp': {'type': float,
|
||||
'tooltip': 'average P-wave velocity',
|
||||
'value': 3530.,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'P-velocity'},
|
||||
|
||||
'rho': {'type': float,
|
||||
'tooltip': 'average rock density [kg/m^3]',
|
||||
'value': 2500.,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Density'},
|
||||
|
||||
'Qp': {'type': (float, float),
|
||||
@ -90,42 +89,58 @@ defaults = {'rootpath': {'type': str,
|
||||
'tooltip': 'start time [s] for calculating CF for P-picking (if TauPy:'
|
||||
' seconds relative to estimated onset)',
|
||||
'value': 15.0,
|
||||
'min': -np.inf,
|
||||
'max': np.inf,
|
||||
'namestring': 'P start'},
|
||||
|
||||
'pstop': {'type': float,
|
||||
'tooltip': 'end time [s] for calculating CF for P-picking (if TauPy:'
|
||||
' seconds relative to estimated onset)',
|
||||
'value': 60.0,
|
||||
'min': -np.inf,
|
||||
'max': np.inf,
|
||||
'namestring': 'P stop'},
|
||||
|
||||
'sstart': {'type': float,
|
||||
'tooltip': 'start time [s] relative to P-onset for calculating CF for S-picking',
|
||||
'value': -1.0,
|
||||
'min': -np.inf,
|
||||
'max': np.inf,
|
||||
'namestring': 'S start'},
|
||||
|
||||
'sstop': {'type': float,
|
||||
'tooltip': 'end time [s] after P-onset for calculating CF for S-picking',
|
||||
'value': 10.0,
|
||||
'min': -np.inf,
|
||||
'max': np.inf,
|
||||
'namestring': 'S stop'},
|
||||
|
||||
'bpz1': {'type': (float, float),
|
||||
'tooltip': 'lower/upper corner freq. of first band pass filter Z-comp. [Hz]',
|
||||
'value': (2, 20),
|
||||
'min': (0., 0.),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('Z-bandpass 1', 'Lower', 'Upper')},
|
||||
|
||||
'bpz2': {'type': (float, float),
|
||||
'tooltip': 'lower/upper corner freq. of second band pass filter Z-comp. [Hz]',
|
||||
'value': (2, 30),
|
||||
'min': (0., 0.),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('Z-bandpass 2', 'Lower', 'Upper')},
|
||||
|
||||
'bph1': {'type': (float, float),
|
||||
'tooltip': 'lower/upper corner freq. of first band pass filter H-comp. [Hz]',
|
||||
'value': (2, 15),
|
||||
'min': (0., 0.),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('H-bandpass 1', 'Lower', 'Upper')},
|
||||
|
||||
'bph2': {'type': (float, float),
|
||||
'tooltip': 'lower/upper corner freq. of second band pass filter z-comp. [Hz]',
|
||||
'value': (2, 20),
|
||||
'min': (0., 0.),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('H-bandpass 2', 'Lower', 'Upper')},
|
||||
|
||||
'algoP': {'type': str,
|
||||
@ -136,76 +151,106 @@ defaults = {'rootpath': {'type': str,
|
||||
'tlta': {'type': float,
|
||||
'tooltip': 'for HOS-/AR-AIC-picker, length of LTA window [s]',
|
||||
'value': 7.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'LTA window'},
|
||||
|
||||
'hosorder': {'type': int,
|
||||
'tooltip': 'for HOS-picker, order of Higher Order Statistics',
|
||||
'value': 4,
|
||||
'min': 0,
|
||||
'max': np.inf,
|
||||
'namestring': 'HOS order'},
|
||||
|
||||
'Parorder': {'type': int,
|
||||
'tooltip': 'for AR-picker, order of AR process of Z-component',
|
||||
'value': 2,
|
||||
'min': 0,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR order P'},
|
||||
|
||||
'tdet1z': {'type': float,
|
||||
'tooltip': 'for AR-picker, length of AR determination window [s] for Z-component, 1st pick',
|
||||
'value': 1.2,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR det. window Z 1'},
|
||||
|
||||
'tpred1z': {'type': float,
|
||||
'tooltip': 'for AR-picker, length of AR prediction window [s] for Z-component, 1st pick',
|
||||
'value': 0.4,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR pred. window Z 1'},
|
||||
|
||||
'tdet2z': {'type': float,
|
||||
'tooltip': 'for AR-picker, length of AR determination window [s] for Z-component, 2nd pick',
|
||||
'value': 0.6,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR det. window Z 2'},
|
||||
|
||||
'tpred2z': {'type': float,
|
||||
'tooltip': 'for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick',
|
||||
'value': 0.2,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR pred. window Z 2'},
|
||||
|
||||
'addnoise': {'type': float,
|
||||
'tooltip': 'add noise to seismogram for stable AR prediction',
|
||||
'value': 0.001,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Add noise'},
|
||||
|
||||
'tsnrz': {'type': (float, float, float, float),
|
||||
'tooltip': 'for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]',
|
||||
'value': (3, 0.1, 0.5, 1.0),
|
||||
'min': (0., 0., 0., 0.),
|
||||
'max': (np.inf, np.inf, np.inf, np.inf),
|
||||
'namestring': ('SNR windows P', 'Noise', 'Safety', 'Signal', 'Slope')},
|
||||
|
||||
'pickwinP': {'type': float,
|
||||
'tooltip': 'for initial AIC pick, length of P-pick window [s]',
|
||||
'value': 3.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AIC window P'},
|
||||
|
||||
'Precalcwin': {'type': float,
|
||||
'tooltip': 'for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)',
|
||||
'value': 6.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Recal. window P'},
|
||||
|
||||
'aictsmooth': {'type': float,
|
||||
'tooltip': 'for HOS/AR, take average of samples for smoothing of AIC-function [s]',
|
||||
'value': 0.2,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AIC smooth P'},
|
||||
|
||||
'tsmoothP': {'type': float,
|
||||
'tooltip': 'for HOS/AR, take average of samples for smoothing CF [s]',
|
||||
'tooltip': 'for HOS/AR, take average of samples in this time window for smoothing CF [s]',
|
||||
'value': 0.1,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'CF smooth P'},
|
||||
|
||||
'ausP': {'type': float,
|
||||
'tooltip': 'for HOS/AR, artificial uplift of samples (aus) of CF (P)',
|
||||
'value': 0.001,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Artificial uplift P'},
|
||||
|
||||
'nfacP': {'type': float,
|
||||
'tooltip': 'for HOS/AR, noise factor for noise level determination (P)',
|
||||
'value': 1.3,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Noise factor P'},
|
||||
|
||||
'algoS': {'type': str,
|
||||
@ -216,61 +261,85 @@ defaults = {'rootpath': {'type': str,
|
||||
'tdet1h': {'type': float,
|
||||
'tooltip': 'for HOS/AR, length of AR-determination window [s], H-components, 1st pick',
|
||||
'value': 0.8,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR det. window H 1'},
|
||||
|
||||
'tpred1h': {'type': float,
|
||||
'tooltip': 'for HOS/AR, length of AR-prediction window [s], H-components, 1st pick',
|
||||
'value': 0.4,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR pred. window H 1'},
|
||||
|
||||
'tdet2h': {'type': float,
|
||||
'tooltip': 'for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick',
|
||||
'value': 0.6,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR det. window H 2'},
|
||||
|
||||
'tpred2h': {'type': float,
|
||||
'tooltip': 'for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick',
|
||||
'value': 0.3,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR pred. window H 2'},
|
||||
|
||||
'Sarorder': {'type': int,
|
||||
'tooltip': 'for AR-picker, order of AR process of H-components',
|
||||
'value': 4,
|
||||
'min': 0,
|
||||
'max': np.inf,
|
||||
'namestring': 'AR order S'},
|
||||
|
||||
'Srecalcwin': {'type': float,
|
||||
'tooltip': 'for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)',
|
||||
'value': 5.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Recal. window S'},
|
||||
|
||||
'pickwinS': {'type': float,
|
||||
'tooltip': 'for initial AIC pick, length of S-pick window [s]',
|
||||
'value': 3.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AIC window S'},
|
||||
|
||||
'tsnrh': {'type': (float, float, float, float),
|
||||
'tooltip': 'for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]',
|
||||
'value': (2, 0.2, 1.5, 0.5),
|
||||
'min': (0., 0., 0., 0.),
|
||||
'max': (np.inf, np.inf, np.inf, np.inf),
|
||||
'namestring': ('SNR windows S', 'Noise', 'Safety', 'Signal', 'Slope')},
|
||||
|
||||
'aictsmoothS': {'type': float,
|
||||
'tooltip': 'for AIC-picker, take average of samples for smoothing of AIC-function [s]',
|
||||
'tooltip': 'for AIC-picker, take average of samples in this time window for smoothing of AIC-function [s]',
|
||||
'value': 0.5,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'AIC smooth S'},
|
||||
|
||||
'tsmoothS': {'type': float,
|
||||
'tooltip': 'for AR-picker, take average of samples for smoothing CF [s] (S)',
|
||||
'value': 0.7,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'CF smooth S'},
|
||||
|
||||
'ausS': {'type': float,
|
||||
'tooltip': 'for HOS/AR, artificial uplift of samples (aus) of CF (S)',
|
||||
'value': 0.9,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Artificial uplift S'},
|
||||
|
||||
'nfacS': {'type': float,
|
||||
'tooltip': 'for AR-picker, noise factor for noise level determination (S)',
|
||||
'value': 1.5,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Noise factor S'},
|
||||
|
||||
'minfmweight': {'type': int,
|
||||
@ -281,103 +350,143 @@ defaults = {'rootpath': {'type': str,
|
||||
'minFMSNR': {'type': float,
|
||||
'tooltip': 'miniumum required SNR for first-motion determination',
|
||||
'value': 2.,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min SNR'},
|
||||
|
||||
'fmpickwin': {'type': float,
|
||||
'tooltip': 'pick window around P onset for calculating zero crossings',
|
||||
'tooltip': 'pick window [s] around P onset for calculating zero crossings',
|
||||
'value': 0.2,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Zero crossings window'},
|
||||
|
||||
'timeerrorsP': {'type': (float, float, float, float),
|
||||
'tooltip': 'discrete time errors [s] corresponding to picking weights [0 1 2 3] for P',
|
||||
'value': (0.01, 0.02, 0.04, 0.08),
|
||||
'min': (0., 0., 0., 0.),
|
||||
'max': (np.inf, np.inf, np.inf, np.inf),
|
||||
'namestring': ('Time errors P', '0', '1', '2', '3')},
|
||||
|
||||
'timeerrorsS': {'type': (float, float, float, float),
|
||||
'tooltip': 'discrete time errors [s] corresponding to picking weights [0 1 2 3] for S',
|
||||
'value': (0.04, 0.08, 0.16, 0.32),
|
||||
'min': (0., 0., 0., 0.),
|
||||
'max': (np.inf, np.inf, np.inf, np.inf),
|
||||
'namestring': ('Time errors S', '0', '1', '2', '3')},
|
||||
|
||||
'minAICPslope': {'type': float,
|
||||
'tooltip': 'below this slope [counts/s] the initial P pick is rejected',
|
||||
'value': 0.8,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min. slope P'},
|
||||
|
||||
'minAICPSNR': {'type': float,
|
||||
'tooltip': 'below this SNR the initial P pick is rejected',
|
||||
'value': 1.1,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min. SNR P'},
|
||||
|
||||
'minAICSslope': {'type': float,
|
||||
'tooltip': 'below this slope [counts/s] the initial S pick is rejected',
|
||||
'value': 1.,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min. slope S'},
|
||||
|
||||
'minAICSSNR': {'type': float,
|
||||
'tooltip': 'below this SNR the initial S pick is rejected',
|
||||
'value': 1.5,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min. SNR S'},
|
||||
|
||||
'minsiglength': {'type': float,
|
||||
'tooltip': 'length of signal part for which amplitudes must exceed noiselevel [s]',
|
||||
'value': 1.,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min. signal length'},
|
||||
|
||||
'noisefactor': {'type': float,
|
||||
'tooltip': 'noiselevel*noisefactor=threshold',
|
||||
'value': 1.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Noise factor'},
|
||||
|
||||
'minpercent': {'type': float,
|
||||
'tooltip': 'required percentage of amplitudes exceeding threshold',
|
||||
'value': 10.,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Min amplitude [%]'},
|
||||
|
||||
'zfac': {'type': float,
|
||||
'tooltip': 'P-amplitude must exceed at least zfac times RMS-S amplitude',
|
||||
'value': 1.5,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Z factor'},
|
||||
|
||||
'mdttolerance': {'type': float,
|
||||
'tooltip': 'maximum allowed deviation of P picks from median [s]',
|
||||
'value': 6.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Median tolerance'},
|
||||
|
||||
'wdttolerance': {'type': float,
|
||||
'tooltip': 'maximum allowed deviation from Wadati-diagram',
|
||||
'value': 1.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Wadati tolerance'},
|
||||
|
||||
'jackfactor': {'type': float,
|
||||
'tooltip': 'pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor',
|
||||
'value': 5.0,
|
||||
'namestring': 'Jackknife safety factor'},
|
||||
'tooltip': 'pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor',
|
||||
'value': 5.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Jackknife safety factor'},
|
||||
|
||||
'WAscaling': {'type': (float, float, float),
|
||||
'tooltip': 'Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] \
|
||||
If zeros are set, original Richter magnitude is calculated!',
|
||||
'value': (0., 0., 0.),
|
||||
'min': (-np.inf, -np.inf, -np.inf),
|
||||
'max': (np.inf, np.inf, np.inf),
|
||||
'namestring': ('Wood-Anderson scaling', '', '', '')},
|
||||
|
||||
'magscaling': {'type': (float, float),
|
||||
'tooltip': 'Scaling relation for derived local magnitude [a*Ml+b]. \
|
||||
If zeros are set, no scaling of network magnitude is applied!',
|
||||
'value': (0., 0.),
|
||||
'min': (0., -np.inf),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('Local mag. scaling', '', '')},
|
||||
|
||||
'minfreq': {'type': (float, float),
|
||||
'tooltip': 'Lower filter frequency [P, S]',
|
||||
'value': (1.0, 1.0),
|
||||
'min': (0., 0.),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('Lower freq.', 'P', 'S')},
|
||||
|
||||
'maxfreq': {'type': (float, float),
|
||||
'tooltip': 'Upper filter frequency [P, S]',
|
||||
'value': (10.0, 10.0),
|
||||
'min': (0., 0.),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('Upper freq.', 'P', 'S')},
|
||||
|
||||
'filter_order': {'type': (int, int),
|
||||
'tooltip': 'filter order [P, S]',
|
||||
'value': (2, 2),
|
||||
'min': (0, 0),
|
||||
'max': (np.inf, np.inf),
|
||||
'namestring': ('Order', 'P', 'S')},
|
||||
|
||||
'filter_type': {'type': (str, str),
|
||||
@ -391,16 +500,19 @@ defaults = {'rootpath': {'type': str,
|
||||
'namestring': 'Use TauPy'},
|
||||
|
||||
'taup_model': {'type': str,
|
||||
'tooltip': 'define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
||||
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
||||
'value': 'iasp91',
|
||||
'namestring': 'TauPy model'}
|
||||
'namestring': 'TauPy model'},
|
||||
|
||||
'taup_phases': {'type': str,
|
||||
'tooltip': 'Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.',
|
||||
'value': 'ttall',
|
||||
'namestring': 'TauPy phases'},
|
||||
}
|
||||
|
||||
settings_main = {
|
||||
'dirs': [
|
||||
'rootpath',
|
||||
'datapath',
|
||||
'database',
|
||||
'eventID',
|
||||
'invdir',
|
||||
'datastructure',
|
||||
@ -432,6 +544,7 @@ settings_main = {
|
||||
'sstop',
|
||||
'use_taup',
|
||||
'taup_model',
|
||||
'taup_phases',
|
||||
'bpz1',
|
||||
'bpz2',
|
||||
'bph1',
|
||||
|
84
pylot/core/io/getEventListFromXML.py
Normal file
@ -0,0 +1,84 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Script to get event parameters from PyLoT-xml file to write
|
||||
them into eventlist.
|
||||
LK, igem, 03/2021
|
||||
Edited for use in PyLoT
|
||||
JG, igem, 01/2022
|
||||
"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import glob
|
||||
|
||||
from obspy.core.event import read_events
|
||||
from pyproj import Proj
|
||||
|
||||
"""
|
||||
Creates an eventlist file summarizing all events found in a certain folder. Only called by pressing UI Button eventlis_xml_action
|
||||
|
||||
:rtype:
|
||||
:param path: Path to root folder where single Event folder are to found
|
||||
"""
|
||||
|
||||
|
||||
def geteventlistfromxml(path, outpath):
|
||||
p = Proj(proj='utm', zone=32, ellps='WGS84')
|
||||
|
||||
# open eventlist file and write header
|
||||
evlist = outpath + '/eventlist'
|
||||
evlistobj = open(evlist, 'w')
|
||||
evlistobj.write(
|
||||
'EventID Date To Lat Lon EAST NORTH Dep Ml NoP NoS RMS errH errZ Gap \n')
|
||||
|
||||
# data path
|
||||
dp = path + "/e*/*.xml"
|
||||
# list of all available xml-files
|
||||
xmlnames = glob.glob(dp)
|
||||
|
||||
# read all onset weights
|
||||
for names in xmlnames:
|
||||
print("Getting location parameters from {}".format(names))
|
||||
cat = read_events(names)
|
||||
try:
|
||||
st = cat.events[0].origins[0].time
|
||||
Lat = cat.events[0].origins[0].latitude
|
||||
Lon = cat.events[0].origins[0].longitude
|
||||
EAST, NORTH = p(Lon, Lat)
|
||||
Dep = cat.events[0].origins[0].depth / 1000
|
||||
Ml = cat.events[0].magnitudes[1].mag
|
||||
NoP = []
|
||||
NoS = []
|
||||
except IndexError:
|
||||
print('Insufficient data found for event (not localised): ' + names.split('/')[-1].split('_')[-1][
|
||||
:-4] + ' Skipping event for eventlist.')
|
||||
continue
|
||||
|
||||
for i in range(len(cat.events[0].origins[0].arrivals)):
|
||||
if cat.events[0].origins[0].arrivals[i].phase == 'P':
|
||||
NoP.append(cat.events[0].origins[0].arrivals[i].phase)
|
||||
elif cat.events[0].origins[0].arrivals[i].phase == 'S':
|
||||
NoS.append(cat.events[0].origins[0].arrivals[i].phase)
|
||||
# NoP = cat.events[0].origins[0].quality.used_station_count
|
||||
errH = cat.events[0].origins[0].origin_uncertainty.max_horizontal_uncertainty
|
||||
errZ = cat.events[0].origins[0].depth_errors.uncertainty
|
||||
Gap = cat.events[0].origins[0].quality.azimuthal_gap
|
||||
# evID = names.split('/')[6]
|
||||
evID = names.split('/')[-1].split('_')[-1][:-4]
|
||||
Date = str(st.year) + str('%02d' % st.month) + str('%02d' % st.day)
|
||||
To = str('%02d' % st.hour) + str('%02d' % st.minute) + str('%02d' % st.second) + \
|
||||
'.' + str('%06d' % st.microsecond)
|
||||
|
||||
# write into eventlist
|
||||
evlistobj.write('%s %s %s %9.6f %9.6f %13.6f %13.6f %8.6f %3.1f %d %d NaN %d %d %d\n' % (evID, \
|
||||
Date, To, Lat, Lon,
|
||||
EAST, NORTH, Dep, Ml,
|
||||
len(NoP), len(NoS),
|
||||
errH, errZ, Gap))
|
||||
print('Adding Event ' + names.split('/')[-1].split('_')[-1][:-4] + ' to eventlist')
|
||||
print('Eventlist created and saved in: ' + outpath)
|
||||
evlistobj.close()
|
@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pylot.core.io import default_parameters
|
||||
from pylot.core.util.errors import ParameterError
|
||||
@ -48,6 +50,7 @@ class PylotParameter(object):
|
||||
self.__init_default_paras()
|
||||
self.__init_subsettings()
|
||||
self.__filename = fnin
|
||||
self.__parameter = {}
|
||||
self._verbosity = verbosity
|
||||
self._parFileCont = {}
|
||||
# io from parsed arguments alternatively
|
||||
@ -70,6 +73,7 @@ class PylotParameter(object):
|
||||
|
||||
# Set default values of parameter names
|
||||
def __init_default_paras(self):
|
||||
"""set default values of parameter names"""
|
||||
parameters = default_parameters.defaults
|
||||
self.__defaults = parameters
|
||||
|
||||
@ -86,12 +90,17 @@ class PylotParameter(object):
|
||||
return bool(self.__parameter)
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
if key in self.__parameter:
|
||||
return self.__parameter[key]
|
||||
except:
|
||||
return None
|
||||
else:
|
||||
logging.warning(f'{key} not found in PylotParameter')
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
try:
|
||||
value = self.check_range(value, self.__defaults[key]['max'], self.__defaults[key]['min'])
|
||||
except KeyError:
|
||||
# no min/max values in defaults
|
||||
pass
|
||||
self.__parameter[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
@ -104,15 +113,32 @@ class PylotParameter(object):
|
||||
return len(self.__parameter.keys())
|
||||
|
||||
def iteritems(self):
|
||||
"""
|
||||
Iterate over parameters
|
||||
:return: key, value tupel
|
||||
:rtype:
|
||||
"""
|
||||
for key, value in self.__parameter.items():
|
||||
yield key, value
|
||||
|
||||
def hasParam(self, parameter):
|
||||
if parameter in self.__parameter.keys():
|
||||
return True
|
||||
return False
|
||||
"""
|
||||
Check if parameter is in keys
|
||||
:param parameter: parameter to look for in keys
|
||||
:type parameter:
|
||||
:return:
|
||||
:rtype: bool
|
||||
"""
|
||||
return parameter in self.__parameter.keys()
|
||||
|
||||
def get(self, *args):
|
||||
"""
|
||||
Get first available parameter in args
|
||||
:param args:
|
||||
:type args:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
try:
|
||||
for param in args:
|
||||
try:
|
||||
@ -128,15 +154,35 @@ class PylotParameter(object):
|
||||
raise ParameterError(e)
|
||||
|
||||
def get_defaults(self):
|
||||
"""
|
||||
get default parameters
|
||||
:return:
|
||||
:rtype: dict
|
||||
"""
|
||||
return self.__defaults
|
||||
|
||||
def get_main_para_names(self):
|
||||
"""
|
||||
Get main parameter names
|
||||
:return: list of keys available in parameters
|
||||
:rtype:
|
||||
"""
|
||||
return self._settings_main
|
||||
|
||||
def get_special_para_names(self):
|
||||
"""
|
||||
Get pick parameter names
|
||||
:return: list of keys available in parameters
|
||||
:rtype:
|
||||
"""
|
||||
return self._settings_special_pick
|
||||
|
||||
def get_all_para_names(self):
|
||||
"""
|
||||
Get all parameter names
|
||||
:return:
|
||||
:rtype: list
|
||||
"""
|
||||
all_names = []
|
||||
all_names += self.get_main_para_names()['dirs']
|
||||
all_names += self.get_main_para_names()['nlloc']
|
||||
@ -150,7 +196,46 @@ class PylotParameter(object):
|
||||
all_names += self.get_special_para_names()['quality']
|
||||
return all_names
|
||||
|
||||
def reinit_default_parameters(self):
|
||||
self.__init_default_paras()
|
||||
|
||||
@staticmethod
|
||||
def check_range(value, max_value, min_value):
|
||||
"""
|
||||
Check if value is within the min/max values defined in default_parameters. Works for tuple and scalar values.
|
||||
:param value: Value to be checked against min/max range
|
||||
:param max_value: Maximum allowed value, tuple or scalar
|
||||
:param min_value: Minimum allowed value, tuple or scalar
|
||||
:return: value tuple/scalar clamped to the valid range
|
||||
|
||||
>>> checkRange(-5, 10, 0)
|
||||
0
|
||||
>>> checkRange((-5., 100.), (10., 10.), (0., 0.))
|
||||
(0.0, 10.0)
|
||||
"""
|
||||
try:
|
||||
# Try handling tuples by comparing their elements
|
||||
comparisons = [(a > b) for a, b in zip(value, max_value)]
|
||||
if True in comparisons:
|
||||
value = tuple(max_value[i] if comp else value[i] for i, comp in enumerate(comparisons))
|
||||
comparisons = [(a < b) for a, b in zip(value, min_value)]
|
||||
if True in comparisons:
|
||||
value = tuple(min_value[i] if comp else value[i] for i, comp in enumerate(comparisons))
|
||||
except TypeError:
|
||||
value = max(min_value, min(max_value, value))
|
||||
return value
|
||||
|
||||
def checkValue(self, param, value):
|
||||
"""
|
||||
Check type of value against expected type of param.
|
||||
Print warning message if type check fails
|
||||
:param param:
|
||||
:type param:
|
||||
:param value:
|
||||
:type value:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
is_type = type(value)
|
||||
expect_type = self.get_defaults()[param]['type']
|
||||
if not is_type == expect_type and not is_type == tuple:
|
||||
@ -159,9 +244,25 @@ class PylotParameter(object):
|
||||
print(Warning(message))
|
||||
|
||||
def setParamKV(self, param, value):
|
||||
"""
|
||||
set parameter param to value
|
||||
:param param:
|
||||
:type param:
|
||||
:param value:
|
||||
:type value:
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self.__setitem__(param, value)
|
||||
|
||||
def setParam(self, **kwargs):
|
||||
"""
|
||||
Set multiple parameters
|
||||
:param kwargs:
|
||||
:type kwargs:
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
for key in kwargs:
|
||||
self.__setitem__(key, kwargs[key])
|
||||
|
||||
@ -170,11 +271,23 @@ class PylotParameter(object):
|
||||
print('ParameterError:\n non-existent parameter %s' % errmsg)
|
||||
|
||||
def reset_defaults(self):
|
||||
"""
|
||||
Reset current parameters to default parameters
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
defaults = self.get_defaults()
|
||||
for param in defaults:
|
||||
self.setParamKV(param, defaults[param]['value'])
|
||||
for param_name, param in defaults.items():
|
||||
self.setParamKV(param_name, param['value'])
|
||||
|
||||
def from_file(self, fnin=None):
|
||||
"""
|
||||
read parameters from file and set values to read values
|
||||
:param fnin: filename
|
||||
:type fnin:
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if not fnin:
|
||||
if self.__filename is not None:
|
||||
fnin = self.__filename
|
||||
@ -221,6 +334,13 @@ class PylotParameter(object):
|
||||
self.__parameter = self._parFileCont
|
||||
|
||||
def export2File(self, fnout):
|
||||
"""
|
||||
Export parameters to file
|
||||
:param fnout: Filename of export file
|
||||
:type fnout: str
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
fid_out = open(fnout, 'w')
|
||||
lines = []
|
||||
# for key, value in self.iteritems():
|
||||
@ -257,6 +377,19 @@ class PylotParameter(object):
|
||||
'quality assessment', None)
|
||||
|
||||
def write_section(self, fid, names, title, separator):
|
||||
"""
|
||||
write a section of parameters to file
|
||||
:param fid: File object to write to
|
||||
:type fid:
|
||||
:param names: which parameter names to write to file
|
||||
:type names:
|
||||
:param title: title of section
|
||||
:type title: str
|
||||
:param separator: section separator, written at start of section
|
||||
:type separator: str
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
if separator:
|
||||
fid.write(separator)
|
||||
fid.write('#{}#\n'.format(title))
|
||||
@ -287,6 +420,28 @@ class PylotParameter(object):
|
||||
line = value + name + ttip
|
||||
fid.write(line)
|
||||
|
||||
@staticmethod
|
||||
def check_deprecated_parameters(parameters):
|
||||
if parameters.hasParam('database') and parameters.hasParam('rootpath'):
|
||||
parameters['datapath'] = os.path.join(parameters['rootpath'], parameters['datapath'],
|
||||
parameters['database'])
|
||||
logging.warning(
|
||||
f'Parameters database and rootpath are deprecated. '
|
||||
f'Tried to merge them to now path: {parameters["datapath"]}.'
|
||||
)
|
||||
|
||||
remove_keys = []
|
||||
for key in parameters:
|
||||
if not key in default_parameters.defaults.keys():
|
||||
remove_keys.append(key)
|
||||
logging.warning(f'Removing deprecated parameter: {key}')
|
||||
|
||||
for key in remove_keys:
|
||||
del parameters[key]
|
||||
|
||||
parameters._settings_main = default_parameters.settings_main
|
||||
parameters._settings_special_pick = default_parameters.settings_special_pick
|
||||
|
||||
|
||||
class FilterOptions(object):
|
||||
'''
|
||||
@ -341,7 +496,9 @@ class FilterOptions(object):
|
||||
|
||||
def parseFilterOptions(self):
|
||||
if self:
|
||||
robject = {'type': self.getFilterType(), 'corners': self.getOrder()}
|
||||
robject = {'type': self.getFilterType(),
|
||||
'corners': self.getOrder(),
|
||||
'zerophase': False}
|
||||
if not self.getFilterType() in ['highpass', 'lowpass']:
|
||||
robject['freqmin'] = self.getFreq()[0]
|
||||
robject['freqmax'] = self.getFreq()[1]
|
||||
|
@ -1,7 +1,7 @@
|
||||
from obspy import UTCDateTime
|
||||
from obspy.core import event as ope
|
||||
|
||||
from pylot.core.util.utils import getLogin, getHash
|
||||
from pylot.core.util.utils import get_login, get_hash
|
||||
|
||||
|
||||
def create_amplitude(pickID, amp, unit, category, cinfo):
|
||||
@ -54,14 +54,14 @@ def create_arrival(pickresID, cinfo, phase, azimuth=None, dist=None):
|
||||
|
||||
def create_creation_info(agency_id=None, creation_time=None, author=None):
|
||||
'''
|
||||
|
||||
get creation info of obspy event
|
||||
:param agency_id:
|
||||
:param creation_time:
|
||||
:param author:
|
||||
:return:
|
||||
'''
|
||||
if author is None:
|
||||
author = getLogin()
|
||||
author = get_login()
|
||||
if creation_time is None:
|
||||
creation_time = UTCDateTime()
|
||||
return ope.CreationInfo(agency_id=agency_id, author=author,
|
||||
@ -197,9 +197,9 @@ def create_pick(origintime, picknum, picktime, eventnum, cinfo, phase, station,
|
||||
|
||||
def create_resourceID(timetohash, restype, authority_id=None, hrstr=None):
|
||||
'''
|
||||
|
||||
:param timetohash:
|
||||
:type timetohash
|
||||
create unique resource id
|
||||
:param timetohash: event origin time to hash
|
||||
:type timetohash: class: `~obspy.core.utcdatetime.UTCDateTime` object
|
||||
:param restype: type of the resource, e.g. 'orig', 'earthquake' ...
|
||||
:type restype: str
|
||||
:param authority_id: name of the institution carrying out the processing
|
||||
@ -210,7 +210,7 @@ def create_resourceID(timetohash, restype, authority_id=None, hrstr=None):
|
||||
'''
|
||||
assert isinstance(timetohash, UTCDateTime), "'timetohash' is not an ObsPy" \
|
||||
"UTCDateTime object"
|
||||
hid = getHash(timetohash)
|
||||
hid = get_hash(timetohash)
|
||||
if hrstr is None:
|
||||
resID = ope.ResourceIdentifier(restype + '/' + hid[0:6])
|
||||
else:
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
|
||||
@ -12,11 +12,13 @@ import scipy.io as sio
|
||||
from obspy.core import UTCDateTime
|
||||
from obspy.core.event import read_events
|
||||
from obspy.core.util import AttribDict
|
||||
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.io.location import create_event, \
|
||||
create_magnitude
|
||||
from pylot.core.pick.utils import select_for_phase
|
||||
from pylot.core.util.utils import getOwner, full_range, four_digits
|
||||
from pylot.core.pick.utils import select_for_phase, get_quality_class
|
||||
from pylot.core.util.utils import get_owner, full_range, four_digits, transformFilterString4Export, \
|
||||
backtransformFilterString, loopIdentifyPhase, identifyPhase
|
||||
|
||||
|
||||
def add_amplitudes(event, amplitudes):
|
||||
@ -57,7 +59,7 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
||||
if phasfn is not None and os.path.isfile(phasfn):
|
||||
phases = sio.loadmat(phasfn)
|
||||
phasctime = UTCDateTime(os.path.getmtime(phasfn))
|
||||
phasauthor = getOwner(phasfn)
|
||||
phasauthor = get_owner(phasfn)
|
||||
else:
|
||||
phases = None
|
||||
phasctime = None
|
||||
@ -65,7 +67,7 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
||||
if locfn is not None and os.path.isfile(locfn):
|
||||
loc = sio.loadmat(locfn)
|
||||
locctime = UTCDateTime(os.path.getmtime(locfn))
|
||||
locauthor = getOwner(locfn)
|
||||
locauthor = get_owner(locfn)
|
||||
else:
|
||||
loc = None
|
||||
locctime = None
|
||||
@ -118,6 +120,13 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
||||
|
||||
|
||||
def picksdict_from_pilot(fn):
|
||||
"""
|
||||
Create pick dictionary from matlab file
|
||||
:param fn: matlab file
|
||||
:type fn:
|
||||
:return: pick dictionary
|
||||
:rtype: dict
|
||||
"""
|
||||
from pylot.core.util.defaults import TIMEERROR_DEFAULTS
|
||||
picks = dict()
|
||||
phases_pilot = sio.loadmat(fn)
|
||||
@ -147,6 +156,13 @@ def picksdict_from_pilot(fn):
|
||||
|
||||
|
||||
def stations_from_pilot(stat_array):
|
||||
"""
|
||||
Create stations list from pilot station array
|
||||
:param stat_array:
|
||||
:type stat_array:
|
||||
:return:
|
||||
:rtype: list
|
||||
"""
|
||||
stations = list()
|
||||
cur_stat = None
|
||||
for stat in stat_array:
|
||||
@ -164,6 +180,13 @@ def stations_from_pilot(stat_array):
|
||||
|
||||
|
||||
def convert_pilot_times(time_array):
|
||||
"""
|
||||
Convert pilot times to UTCDateTimes
|
||||
:param time_array: pilot times
|
||||
:type time_array:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
times = [int(time) for time in time_array]
|
||||
microseconds = int((time_array[-1] - times[-1]) * 1e6)
|
||||
times.append(microseconds)
|
||||
@ -171,6 +194,13 @@ def convert_pilot_times(time_array):
|
||||
|
||||
|
||||
def picksdict_from_obs(fn):
|
||||
"""
|
||||
create pick dictionary from obs file
|
||||
:param fn: filename
|
||||
:type fn:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
picks = dict()
|
||||
station_name = str()
|
||||
for line in open(fn, 'r'):
|
||||
@ -188,7 +218,7 @@ def picksdict_from_obs(fn):
|
||||
return picks
|
||||
|
||||
|
||||
def picksdict_from_picks(evt):
|
||||
def picksdict_from_picks(evt, parameter=None):
|
||||
"""
|
||||
Takes an Event object and return the pick dictionary commonly used within
|
||||
PyLoT
|
||||
@ -201,20 +231,30 @@ def picksdict_from_picks(evt):
|
||||
'auto': {}
|
||||
}
|
||||
for pick in evt.picks:
|
||||
errors = None
|
||||
phase = {}
|
||||
station = pick.waveform_id.station_code
|
||||
channel = pick.waveform_id.channel_code
|
||||
if pick.waveform_id.channel_code is None:
|
||||
channel = ''
|
||||
else:
|
||||
channel = pick.waveform_id.channel_code
|
||||
network = pick.waveform_id.network_code
|
||||
mpp = pick.time
|
||||
spe = pick.time_errors.uncertainty
|
||||
if pick.filter_id:
|
||||
filter_id = backtransformFilterString(str(pick.filter_id.id))
|
||||
else:
|
||||
filter_id = None
|
||||
try:
|
||||
picker = str(pick.method_id)
|
||||
if picker.startswith('smi:local/'):
|
||||
picker = picker.split('smi:local/')[1]
|
||||
pick_method = str(pick.method_id)
|
||||
if pick_method.startswith('smi:local/'):
|
||||
pick_method = pick_method.split('smi:local/')[1]
|
||||
except IndexError:
|
||||
picker = 'manual' # MP MP TODO maybe improve statement
|
||||
pick_method = 'manual' # MP MP TODO maybe improve statement
|
||||
if pick_method == 'None':
|
||||
pick_method = 'manual'
|
||||
try:
|
||||
onsets = picksdict[picker][station]
|
||||
onsets = picksdict[pick_method][station]
|
||||
except KeyError as e:
|
||||
# print(e)
|
||||
onsets = {}
|
||||
@ -222,24 +262,60 @@ def picksdict_from_picks(evt):
|
||||
lpp = mpp + pick.time_errors.upper_uncertainty
|
||||
epp = mpp - pick.time_errors.lower_uncertainty
|
||||
except TypeError as e:
|
||||
msg = e + ',\n falling back to symmetric uncertainties'
|
||||
if not spe:
|
||||
msg = 'No uncertainties found for pick: {}. Uncertainty set to 0'.format(pick)
|
||||
lpp = mpp
|
||||
epp = mpp
|
||||
else:
|
||||
msg = str(e) + ',\n falling back to symmetric uncertainties'
|
||||
lpp = mpp + spe
|
||||
epp = mpp - spe
|
||||
warnings.warn(msg)
|
||||
lpp = mpp + spe
|
||||
epp = mpp - spe
|
||||
phase['mpp'] = mpp
|
||||
phase['epp'] = epp
|
||||
phase['lpp'] = lpp
|
||||
phase['spe'] = spe
|
||||
weight = phase.get('weight')
|
||||
if not weight:
|
||||
if not parameter:
|
||||
logging.warning('Using ')
|
||||
logging.warning('Using default input parameter')
|
||||
parameter = PylotParameter()
|
||||
pick.phase_hint = identifyPhase(pick.phase_hint)
|
||||
if pick.phase_hint == 'P':
|
||||
errors = parameter['timeerrorsP']
|
||||
elif pick.phase_hint == 'S':
|
||||
errors = parameter['timeerrorsS']
|
||||
if errors:
|
||||
weight = get_quality_class(spe, errors)
|
||||
phase['weight'] = weight
|
||||
phase['channel'] = channel
|
||||
phase['network'] = network
|
||||
phase['picker'] = picker
|
||||
phase['picker'] = pick_method
|
||||
if pick.polarity == 'positive':
|
||||
phase['fm'] = 'U'
|
||||
elif pick.polarity == 'negative':
|
||||
phase['fm'] = 'D'
|
||||
else:
|
||||
phase['fm'] = 'N'
|
||||
phase['filter_id'] = filter_id if filter_id is not None else ''
|
||||
|
||||
onsets[pick.phase_hint] = phase.copy()
|
||||
picksdict[picker][station] = onsets.copy()
|
||||
picksdict[pick_method][station] = onsets.copy()
|
||||
return picksdict
|
||||
|
||||
|
||||
def picks_from_picksdict(picks, creation_info=None):
|
||||
"""
|
||||
Create a list of picks out of a pick dictionary
|
||||
:param picks: pick dictionary
|
||||
:type picks: dict
|
||||
:param creation_info: obspy creation information to apply to picks
|
||||
:type creation_info:
|
||||
:param creation_info: obspy creation information to apply to picks
|
||||
:return: list of picks
|
||||
:rtype: list
|
||||
"""
|
||||
picks_list = list()
|
||||
for station, onsets in picks.items():
|
||||
for label, phase in onsets.items():
|
||||
@ -275,25 +351,30 @@ def picks_from_picksdict(picks, creation_info=None):
|
||||
channel_code=ccode,
|
||||
network_code=ncode)
|
||||
try:
|
||||
polarity = phase['fm']
|
||||
if polarity == 'U' or '+':
|
||||
filter_id = phase['filteroptions']
|
||||
filter_id = transformFilterString4Export(filter_id)
|
||||
except KeyError as e:
|
||||
warnings.warn(str(e), RuntimeWarning)
|
||||
filter_id = ''
|
||||
pick.filter_id = filter_id
|
||||
|
||||
try:
|
||||
polarity = picks[station][label]['fm']
|
||||
if polarity == 'U' or polarity == '+':
|
||||
pick.polarity = 'positive'
|
||||
elif polarity == 'D' or '-':
|
||||
elif polarity == 'D' or polarity == '-':
|
||||
pick.polarity = 'negative'
|
||||
else:
|
||||
pick.polarity = 'undecidable'
|
||||
except KeyError as e:
|
||||
if 'fm' in str(e): # no polarity information found for this phase
|
||||
pass
|
||||
else:
|
||||
raise e
|
||||
except:
|
||||
pick.polarity = 'undecidable'
|
||||
print("No polarity information available!")
|
||||
picks_list.append(pick)
|
||||
return picks_list
|
||||
|
||||
|
||||
def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0):
|
||||
import glob
|
||||
|
||||
# TODO: change root to datapath
|
||||
db_root = os.path.join(root_dir, db_dir)
|
||||
evt_list = glob.glob1(db_root, 'e????.???.??')
|
||||
|
||||
@ -308,9 +389,7 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
|
||||
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.pick.utils import earllatepicker
|
||||
|
||||
if fn_param is None:
|
||||
fn_param = defaults.AUTOMATIC_DEFAULTS
|
||||
# TODO: change root to datapath
|
||||
|
||||
default = PylotParameter(fn_param, verbosity)
|
||||
|
||||
@ -400,7 +479,6 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
|
||||
os.makedirs(out_dir)
|
||||
fnout_prefix = os.path.join(out_dir, 'PyLoT_{0}.'.format(event_id))
|
||||
evt.write(fnout_prefix + 'xml', format='QUAKEML')
|
||||
# evt.write(fnout_prefix + 'cnv', format='VELEST')
|
||||
|
||||
|
||||
def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
@ -408,44 +486,43 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
Function of methods to write phases to the following standard file
|
||||
formats used for locating earthquakes:
|
||||
|
||||
HYPO71, NLLoc, VELEST, HYPOSAT, and hypoDD
|
||||
HYPO71, NLLoc, VELEST, HYPOSAT, FOCMEC, and hypoDD
|
||||
|
||||
:param: arrivals
|
||||
:type: dictionary containing all phase information including
|
||||
station ID, phase, first motion, weight (uncertainty),
|
||||
....
|
||||
:param arrivals:dictionary containing all phase information including
|
||||
station ID, phase, first motion, weight (uncertainty), ...
|
||||
:type arrivals: dict
|
||||
|
||||
:param: fformat
|
||||
:type: string, chosen file format (location routine),
|
||||
choose between NLLoc, HYPO71, HYPOSAT, VELEST,
|
||||
HYPOINVERSE, and hypoDD
|
||||
:param fformat: chosen file format (location routine),
|
||||
choose between NLLoc, HYPO71, HYPOSAT, VELEST,
|
||||
HYPOINVERSE, FOCMEC, and hypoDD
|
||||
:type fformat: str
|
||||
|
||||
:param: filename, full path and name of phase file
|
||||
:type: string
|
||||
:param filename: full path and name of phase file
|
||||
:type filename: string
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
|
||||
:param: eventinfo, optional, needed for VELEST-cnv file
|
||||
:param eventinfo: optional, needed for VELEST-cnv file
|
||||
and FOCMEC- and HASH-input files
|
||||
:type: `obspy.core.event.Event` object
|
||||
:type eventinfo: `obspy.core.event.Event` object
|
||||
"""
|
||||
|
||||
if fformat == 'NLLoc':
|
||||
print("Writing phases to %s for NLLoc" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
||||
(parameter.get('database'), parameter.get('eventID')))
|
||||
(parameter.get('datapath'), parameter.get('eventID')))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
if arrivals[key].has_key('P'):
|
||||
if 'P' in arrivals[key]:
|
||||
try:
|
||||
fm = arrivals[key]['P']['fm']
|
||||
except KeyError as e:
|
||||
print(e)
|
||||
fm = None
|
||||
if fm == None:
|
||||
if fm is None:
|
||||
fm = '?'
|
||||
onset = arrivals[key]['P']['mpp']
|
||||
year = onset.year
|
||||
@ -460,6 +537,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
try:
|
||||
if arrivals[key]['P']['weight'] >= 4:
|
||||
pweight = 0 # do not use pick
|
||||
print("Station {}: Uncertain pick, do not use it!".format(key))
|
||||
except KeyError as e:
|
||||
print(e.message + '; no weight set during processing')
|
||||
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
||||
@ -472,7 +550,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
ss_ms,
|
||||
pweight))
|
||||
# S onsets
|
||||
if arrivals[key].has_key('S') and arrivals[key]['S']:
|
||||
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
|
||||
fm = '?'
|
||||
onset = arrivals[key]['S']['mpp']
|
||||
year = onset.year
|
||||
@ -489,15 +567,20 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
sweight = 0 # do not use pick
|
||||
except KeyError as e:
|
||||
print(str(e) + '; no weight set during processing')
|
||||
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
||||
fm,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
sweight))
|
||||
Ao = arrivals[key]['S']['Ao'] # peak-to-peak amplitude
|
||||
if Ao == None:
|
||||
Ao = 0.0
|
||||
# fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
|
||||
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 %9.2f 0 0 %d \n' % (key,
|
||||
fm,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Ao,
|
||||
sweight))
|
||||
|
||||
fid.close()
|
||||
elif fformat == 'HYPO71':
|
||||
@ -506,6 +589,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
# write header
|
||||
fid.write(' %s\n' %
|
||||
parameter.get('eventID'))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
for key in arrivals:
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
stat = key
|
||||
@ -581,10 +665,11 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
print("Writing phases to %s for HYPOSAT" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
|
||||
fid.write('%s, event %s \n' % (parameter.get('datapath'), parameter.get('eventID')))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
if arrivals[key].has_key('P'):
|
||||
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
pyear = Ponset.year
|
||||
@ -598,10 +683,22 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
# use symmetrized picking error as std
|
||||
# (read the HYPOSAT manual)
|
||||
pstd = arrivals[key]['P']['spe']
|
||||
if pstd is None:
|
||||
errorsP = parameter.get('timeerrorsP')
|
||||
if arrivals[key]['P']['weight'] == 0:
|
||||
pstd = errorsP[0]
|
||||
elif arrivals[key]['P']['weight'] == 1:
|
||||
pstd = errorsP[1]
|
||||
elif arrivals[key]['P']['weight'] == 2:
|
||||
pstd = errorsP[2]
|
||||
elif arrivals[key]['P']['weight'] == 3:
|
||||
psrd = errorsP[3]
|
||||
else:
|
||||
pstd = errorsP[4]
|
||||
fid.write('%-5s P1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
|
||||
% (key, pyear, pmonth, pday, phh, pmm, Pss, pstd))
|
||||
# S onsets
|
||||
if arrivals[key].has_key('S') and arrivals[key]['S']:
|
||||
if 'S' in arrivals[key] and arrivals[key]['S']['mpp'] is not None:
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
syear = Sonset.year
|
||||
@ -613,6 +710,18 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
sms = Sonset.microsecond
|
||||
Sss = sss + sms / 1000000.0
|
||||
sstd = arrivals[key]['S']['spe']
|
||||
if pstd is None:
|
||||
errorsS = parameter.get('timeerrorsS')
|
||||
if arrivals[key]['S']['weight'] == 0:
|
||||
pstd = errorsS[0]
|
||||
elif arrivals[key]['S']['weight'] == 1:
|
||||
pstd = errorsS[1]
|
||||
elif arrivals[key]['S']['weight'] == 2:
|
||||
pstd = errorsS[2]
|
||||
elif arrivals[key]['S']['weight'] == 3:
|
||||
psrd = errorsS[3]
|
||||
else:
|
||||
pstd = errorsP[4]
|
||||
fid.write('%-5s S1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
|
||||
% (key, syear, smonth, sday, shh, smm, Sss, sstd))
|
||||
fid.close()
|
||||
@ -647,60 +756,87 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
|
||||
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
||||
n = 0
|
||||
for key in arrivals:
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
for key in usedarrivals:
|
||||
# P onsets
|
||||
if arrivals[key].has_key('P'):
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
if 'P' in usedarrivals[key]:
|
||||
if usedarrivals[key]['P']['weight'] < 4:
|
||||
n += 1
|
||||
stat = key
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
Pweight = arrivals[key]['P']['weight']
|
||||
Ponset = usedarrivals[key]['P']['mpp']
|
||||
Pweight = usedarrivals[key]['P']['weight']
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
if n % 6 is not 0:
|
||||
if n % 6 != 0:
|
||||
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
|
||||
else:
|
||||
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
|
||||
# S onsets
|
||||
if arrivals[key].has_key('S'):
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
if 'S' in usedarrivals[key]:
|
||||
if usedarrivals[key]['S']['weight'] < 4:
|
||||
n += 1
|
||||
stat = key
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
Sweight = arrivals[key]['S']['weight']
|
||||
Sonset = usedarrivals[key]['S']['mpp']
|
||||
Sweight = usedarrivals[key]['S']['weight']
|
||||
Srt = Ponset - stime # onset time relative to source time
|
||||
if n % 6 is not 0:
|
||||
if n % 6 != 0:
|
||||
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
|
||||
else:
|
||||
fid.write('%-4sS%d%6.2f\n' % (stat, Sweight, Srt))
|
||||
fid.close()
|
||||
|
||||
elif fformat == 'hypoDD':
|
||||
elif fformat == 'HYPODD':
|
||||
print("Writing phases to %s for hypoDD" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get event information needed for hypoDD-phase file
|
||||
eventsource = eventinfo.origins[0]
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
print("No source origin calculated yet, thus no hypoDD-infile creation possible!")
|
||||
return
|
||||
stime = eventsource['time']
|
||||
event = parameter.get('eventID')
|
||||
hddID = event.split('.')[0][1:5]
|
||||
# write header
|
||||
try:
|
||||
event = eventinfo['pylot_id']
|
||||
hddID = event.split('.')[0][1:5]
|
||||
except:
|
||||
print("Error 1111111!")
|
||||
hddID = "00000"
|
||||
# write header
|
||||
fid.write('# %d %d %d %d %d %5.2f %7.4f +%6.4f %7.4f %4.2f 0.1 0.5 %4.2f %s\n' % (
|
||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
|
||||
for key in arrivals:
|
||||
if arrivals[key].has_key('P'):
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
for key in usedarrivals:
|
||||
if 'P' in usedarrivals[key]:
|
||||
# P onsets
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
if usedarrivals[key]['P']['weight'] < 4:
|
||||
Ponset = usedarrivals[key]['P']['mpp']
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
fid.write('%s %6.3f 1 P\n' % (key, Prt))
|
||||
# S onsets
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
if 'S' in usedarrivals[key]:
|
||||
# S onsets
|
||||
if usedarrivals[key]['S']['weight'] < 4:
|
||||
Sonset = usedarrivals[key]['S']['mpp']
|
||||
Srt = Sonset - stime # onset time relative to source time
|
||||
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
|
||||
|
||||
@ -710,10 +846,21 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
print("Writing phases to %s for FOCMEC" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get event information needed for FOCMEC-input file
|
||||
eventsource = eventinfo.origins[0]
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
print("No source origin calculated yet, thus no FOCMEC-infile creation possible!")
|
||||
return
|
||||
stime = eventsource['time']
|
||||
|
||||
# avoid printing '*' in focmec-input file
|
||||
if parameter.get('eventid') == '*' or parameter.get('eventid') is None:
|
||||
evID = 'e0000'
|
||||
else:
|
||||
evID = parameter.get('eventid')
|
||||
|
||||
# write header line including event information
|
||||
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (parameter.get('eventID'),
|
||||
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (evID,
|
||||
stime.year, stime.month, stime.day,
|
||||
stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'],
|
||||
@ -721,9 +868,18 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag']))
|
||||
picks = eventinfo.picks
|
||||
for key in arrivals:
|
||||
if arrivals[key].has_key('P'):
|
||||
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
for key in usedarrivals:
|
||||
if 'P' in usedarrivals[key]:
|
||||
if usedarrivals[key]['P']['weight'] < 4 and usedarrivals[key]['P']['fm'] is not None:
|
||||
stat = key
|
||||
for i in range(len(picks)):
|
||||
station = picks[i].waveform_id.station_code
|
||||
@ -742,7 +898,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
|
||||
az,
|
||||
inz,
|
||||
arrivals[key]['P']['fm']))
|
||||
usedarrivals[key]['P']['fm']))
|
||||
break
|
||||
|
||||
fid.close()
|
||||
@ -757,6 +913,11 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
print("Writing phases to %s for HASH for HASH-driver 2" % filename2)
|
||||
fid2 = open("%s" % filename2, 'w')
|
||||
# get event information needed for HASH-input file
|
||||
try:
|
||||
eventsource = eventinfo.origins[0]
|
||||
except:
|
||||
print("No source origin calculated yet, thus no cnv-file creation possible!")
|
||||
return
|
||||
eventsource = eventinfo.origins[0]
|
||||
event = parameter.get('eventID')
|
||||
hashID = event.split('.')[0][1:5]
|
||||
@ -795,10 +956,11 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
eventsource['quality']['used_phase_count'],
|
||||
erh, erz, eventinfo.magnitudes[0]['mag'],
|
||||
hashID))
|
||||
|
||||
# Prefer Manual Picks over automatic ones if possible
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
# write phase lines
|
||||
for key in arrivals:
|
||||
if arrivals[key].has_key('P'):
|
||||
if 'P' in arrivals[key]:
|
||||
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
|
||||
stat = key
|
||||
ccode = arrivals[key]['P']['channel']
|
||||
@ -843,6 +1005,25 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
fid2.close()
|
||||
|
||||
|
||||
def chooseArrivals(arrivals):
|
||||
"""
|
||||
takes arrivals and returns the manual picks if manual and automatic ones are there
|
||||
returns automatic picks if only automatic picks are there
|
||||
:param arrivals: 'dictionary' with automatic and or manual arrivals
|
||||
:return: arrivals but with the manual picks prefered if possible
|
||||
"""
|
||||
# If len of arrivals is greater than 2 it comes from autopicking so only autopicks are available
|
||||
if len(arrivals) > 2:
|
||||
return arrivals
|
||||
if arrivals['auto'] and arrivals['manual']:
|
||||
usedarrivals = arrivals['manual']
|
||||
elif arrivals['auto']:
|
||||
usedarrivals = arrivals['auto']
|
||||
elif arrivals['manual']:
|
||||
usedarrivals = arrivals['manual']
|
||||
return usedarrivals
|
||||
|
||||
|
||||
def merge_picks(event, picks):
|
||||
"""
|
||||
takes an event object and a list of picks and searches for matching
|
||||
@ -862,9 +1043,9 @@ def merge_picks(event, picks):
|
||||
network = pick.waveform_id.network_code
|
||||
method = pick.method_id
|
||||
for p in event.picks:
|
||||
if p.waveform_id.station_code == station\
|
||||
and p.waveform_id.network_code == network\
|
||||
and p.phase_hint == phase\
|
||||
if p.waveform_id.station_code == station \
|
||||
and p.waveform_id.network_code == network \
|
||||
and p.phase_hint == phase \
|
||||
and (str(p.method_id) in str(method)
|
||||
or str(method) in str(p.method_id)):
|
||||
p.time, p.time_errors, p.waveform_id.network_code, p.method_id = time, err, network, method
|
||||
@ -872,27 +1053,63 @@ def merge_picks(event, picks):
|
||||
return event
|
||||
|
||||
|
||||
def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
def getQualitiesfromxml(path, errorsP, errorsS, plotflag=1, figure=None, verbosity=0):
|
||||
"""
|
||||
Script to get onset uncertainties from Quakeml.xml files created by PyLoT.
|
||||
Uncertainties are tranformed into quality classes and visualized via histogram if desired.
|
||||
Ludger Küperkoch, BESTEC GmbH, 07/2017
|
||||
:param path: path containing xml files
|
||||
:type path: str
|
||||
:param errorsP: time errors of P waves for the four discrete quality classes
|
||||
:type errorsP:
|
||||
:param errorsS: time errors of S waves for the four discrete quality classes
|
||||
:type errorsS:
|
||||
:param plotflag:
|
||||
:type plotflag:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
Script to get onset uncertainties from Quakeml.xml files created by PyLoT.
|
||||
Uncertainties are tranformed into quality classes and visualized via histogram if desired.
|
||||
Ludger Küperkoch, BESTEC GmbH, 07/2017
|
||||
"""
|
||||
|
||||
from pylot.core.pick.utils import getQualityFromUncertainty
|
||||
from pylot.core.util.utils import loopIdentifyPhase, identifyPhase
|
||||
def calc_perc(uncertainties, ntotal):
|
||||
''' simple function that calculates percentage of number of uncertainties (list length)'''
|
||||
if len(uncertainties) == 0:
|
||||
return 0
|
||||
else:
|
||||
return 100. / ntotal * len(uncertainties)
|
||||
|
||||
def calc_weight_perc(psweights, weight_ids):
|
||||
''' calculate percentages of different weights (pick classes!?) of total number of uncertainties of a phase'''
|
||||
# count total number of list items for this phase
|
||||
numWeights = np.sum([len(weight) for weight in psweights.values()])
|
||||
|
||||
# iterate over all available weights to return a list with percentages for plotting
|
||||
plot_list = []
|
||||
for weight_id in weight_ids:
|
||||
plot_list.append(calc_perc(psweights[weight_id], numWeights))
|
||||
|
||||
return plot_list, numWeights
|
||||
|
||||
# get all xmlfiles in path (maybe this should be changed to one xml file for this function, selectable via GUI?)
|
||||
xmlnames = glob.glob(os.path.join(path, '*.xml'))
|
||||
if len(xmlnames) == 0:
|
||||
print(f'No files found in path {path}.')
|
||||
return False
|
||||
|
||||
# first define possible phases here
|
||||
phases = ['P', 'S']
|
||||
|
||||
# define possible weights (0-4)
|
||||
weight_ids = list(range(5))
|
||||
|
||||
# put both error lists in a dictionary with P/S key so that amount of code can be halfed by simply using P/S as key
|
||||
errors = dict(P=errorsP, S=errorsS)
|
||||
|
||||
# create dictionaries for each phase (P/S) with a dictionary of empty list for each weight defined in weights
|
||||
# tuple above
|
||||
weights = {}
|
||||
for phase in phases:
|
||||
weights[phase] = {weight_id: [] for weight_id in weight_ids}
|
||||
|
||||
# read all onset weights
|
||||
Pw0 = []
|
||||
Pw1 = []
|
||||
Pw2 = []
|
||||
Pw3 = []
|
||||
Pw4 = []
|
||||
Sw0 = []
|
||||
Sw1 = []
|
||||
Sw2 = []
|
||||
Sw3 = []
|
||||
Sw4 = []
|
||||
for names in xmlnames:
|
||||
print("Getting onset weights from {}".format(names))
|
||||
cat = read_events(names)
|
||||
@ -900,117 +1117,60 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
arrivals = cat.events[0].picks
|
||||
arrivals_copy = cat_copy.events[0].picks
|
||||
# Prefere manual picks if qualities are sufficient!
|
||||
for Pick in arrivals:
|
||||
if (Pick.method_id.id).split('/')[1] == 'manual':
|
||||
mstation = Pick.waveform_id.station_code
|
||||
for pick in arrivals:
|
||||
if pick.method_id.id.split('/')[1] == 'manual':
|
||||
mstation = pick.waveform_id.station_code
|
||||
mstation_ext = mstation + '_'
|
||||
for mpick in arrivals_copy:
|
||||
phase = identifyPhase(loopIdentifyPhase(Pick.phase_hint))
|
||||
if phase == 'P':
|
||||
if ((mpick.waveform_id.station_code == mstation) or
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsP[3]):
|
||||
del mpick
|
||||
break
|
||||
elif phase == 'S':
|
||||
if ((mpick.waveform_id.station_code == mstation) or
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsS[3]):
|
||||
del mpick
|
||||
break
|
||||
phase = identifyPhase(loopIdentifyPhase(pick.phase_hint)) # MP MP catch if this fails?
|
||||
if ((mpick.waveform_id.station_code == mstation) or
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
(mpick.method_id.id.split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= errors[phase][3]):
|
||||
del mpick
|
||||
break
|
||||
lendiff = len(arrivals) - len(arrivals_copy)
|
||||
if lendiff is not 0:
|
||||
if lendiff != 0:
|
||||
print("Found manual as well as automatic picks, prefered the {} manual ones!".format(lendiff))
|
||||
|
||||
for Pick in arrivals_copy:
|
||||
phase = identifyPhase(loopIdentifyPhase(Pick.phase_hint))
|
||||
if phase == 'P':
|
||||
Pqual = getQualityFromUncertainty(Pick.time_errors.uncertainty, ErrorsP)
|
||||
if Pqual == 0:
|
||||
Pw0.append(Pick.time_errors.uncertainty)
|
||||
elif Pqual == 1:
|
||||
Pw1.append(Pick.time_errors.uncertainty)
|
||||
elif Pqual == 2:
|
||||
Pw2.append(Pick.time_errors.uncertainty)
|
||||
elif Pqual == 3:
|
||||
Pw3.append(Pick.time_errors.uncertainty)
|
||||
elif Pqual == 4:
|
||||
Pw4.append(Pick.time_errors.uncertainty)
|
||||
elif phase == 'S':
|
||||
Squal = getQualityFromUncertainty(Pick.time_errors.uncertainty, ErrorsS)
|
||||
if Squal == 0:
|
||||
Sw0.append(Pick.time_errors.uncertainty)
|
||||
elif Squal == 1:
|
||||
Sw1.append(Pick.time_errors.uncertainty)
|
||||
elif Squal == 2:
|
||||
Sw2.append(Pick.time_errors.uncertainty)
|
||||
elif Squal == 3:
|
||||
Sw3.append(Pick.time_errors.uncertainty)
|
||||
elif Squal == 4:
|
||||
Sw4.append(Pick.time_errors.uncertainty)
|
||||
else:
|
||||
for pick in arrivals_copy:
|
||||
phase = identifyPhase(loopIdentifyPhase(pick.phase_hint))
|
||||
uncertainty = pick.time_errors.uncertainty
|
||||
if not uncertainty:
|
||||
if verbosity > 0:
|
||||
print('No uncertainty, pick {} invalid!'.format(pick.method_id.id))
|
||||
continue
|
||||
# check P/S phase
|
||||
if phase not in phases:
|
||||
print("Phase hint not defined for picking!")
|
||||
pass
|
||||
continue
|
||||
|
||||
qual = get_quality_class(uncertainty, errors[phase])
|
||||
weights[phase][qual].append(uncertainty)
|
||||
|
||||
if plotflag == 0:
|
||||
Punc = [Pw0, Pw1, Pw2, Pw3, Pw4]
|
||||
Sunc = [Sw0, Sw1, Sw2, Sw3, Sw4]
|
||||
return Punc, Sunc
|
||||
p_unc = [weights['P'][weight_id] for weight_id in weight_ids]
|
||||
s_unc = [weights['S'][weight_id] for weight_id in weight_ids]
|
||||
return p_unc, s_unc
|
||||
else:
|
||||
if not figure:
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
# get percentage of weights
|
||||
numPweights = np.sum([len(Pw0), len(Pw1), len(Pw2), len(Pw3), len(Pw4)])
|
||||
numSweights = np.sum([len(Sw0), len(Sw1), len(Sw2), len(Sw3), len(Sw4)])
|
||||
if len(Pw0) > 0:
|
||||
P0perc = 100 / numPweights * len(Pw0)
|
||||
else:
|
||||
P0perc = 0
|
||||
if len(Pw1) > 0:
|
||||
P1perc = 100 / numPweights * len(Pw1)
|
||||
else:
|
||||
P1perc = 0
|
||||
if len(Pw2) > 0:
|
||||
P2perc = 100 / numPweights * len(Pw2)
|
||||
else:
|
||||
P2perc = 0
|
||||
if len(Pw3) > 0:
|
||||
P3perc = 100 / numPweights * len(Pw3)
|
||||
else:
|
||||
P3perc = 0
|
||||
if len(Pw4) > 0:
|
||||
P4perc = 100 / numPweights * len(Pw4)
|
||||
else:
|
||||
P4perc = 0
|
||||
if len(Sw0) > 0:
|
||||
S0perc = 100 / numSweights * len(Sw0)
|
||||
else:
|
||||
S0perc = 0
|
||||
if len(Sw1) > 0:
|
||||
S1perc = 100 / numSweights * len(Sw1)
|
||||
else:
|
||||
S1perc = 0
|
||||
if len(Sw2) > 0:
|
||||
S2perc = 100 / numSweights * len(Sw2)
|
||||
else:
|
||||
S2perc = 0
|
||||
if len(Sw3) > 0:
|
||||
S3perc = 100 / numSweights * len(Sw3)
|
||||
else:
|
||||
S3perc = 0
|
||||
if len(Sw4) > 0:
|
||||
S4perc = 100 / numSweights * len(Sw4)
|
||||
else:
|
||||
S4perc = 0
|
||||
listP, numPweights = calc_weight_perc(weights['P'], weight_ids)
|
||||
listS, numSweights = calc_weight_perc(weights['S'], weight_ids)
|
||||
|
||||
weights = ('0', '1', '2', '3', '4')
|
||||
y_pos = np.arange(len(weights))
|
||||
y_pos = np.arange(len(weight_ids))
|
||||
width = 0.34
|
||||
plt.bar(y_pos - width, [P0perc, P1perc, P2perc, P3perc, P4perc], width, color='black')
|
||||
plt.bar(y_pos, [S0perc, S1perc, S2perc, S3perc, S4perc], width, color='red')
|
||||
plt.ylabel('%')
|
||||
plt.xticks(y_pos, weights)
|
||||
plt.xlim([-0.5, 4.5])
|
||||
plt.xlabel('Qualities')
|
||||
plt.title('{0} P-Qualities, {1} S-Qualities'.format(numPweights, numSweights))
|
||||
plt.show()
|
||||
ax.bar(y_pos - width, listP, width, color='black')
|
||||
ax.bar(y_pos, listS, width, color='red')
|
||||
ax.set_ylabel('%')
|
||||
ax.set_xticks(y_pos, weight_ids)
|
||||
ax.set_xlim([-0.5, 4.5])
|
||||
ax.set_xlabel('Qualities')
|
||||
ax.set_title('{0} P-Qualities, {1} S-Qualities'.format(numPweights, numSweights))
|
||||
|
||||
if not figure:
|
||||
fig.show()
|
||||
|
||||
return listP, listS
|
||||
|
@ -18,11 +18,11 @@ def export(picks, fnout, parameter, eventinfo):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
|
||||
:param: eventinfo, source information needed for focmec format
|
||||
:type: list object
|
||||
:param eventinfo: source information needed for focmec format
|
||||
:type eventinfo: list object
|
||||
'''
|
||||
# write phases to FOCMEC-phase file
|
||||
writephases(picks, 'FOCMEC', fnout, parameter, eventinfo)
|
||||
|
@ -18,11 +18,11 @@ def export(picks, fnout, parameter, eventinfo):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
|
||||
:param: eventinfo, source information needed for HASH format
|
||||
:type: list object
|
||||
:param eventinfo: source information needed for HASH format
|
||||
:type eventinfo: list object
|
||||
'''
|
||||
# write phases to HASH-phase file
|
||||
writephases(picks, 'HASH', fnout, parameter, eventinfo)
|
||||
|
@ -18,8 +18,8 @@ def export(picks, fnout, parameter):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to HYPO71-phase file
|
||||
writephases(picks, 'HYPO71', fnout, parameter)
|
||||
|
@ -18,11 +18,11 @@ def export(picks, fnout, parameter, eventinfo):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
|
||||
:param: eventinfo, source information needed for hypoDD format
|
||||
:type: list object
|
||||
:param eventinfo: source information needed for hypoDD format
|
||||
:type eventinfo: list object
|
||||
'''
|
||||
# write phases to hypoDD-phase file
|
||||
writephases(picks, 'hypoDD', fnout, parameter, eventinfo)
|
||||
writephases(picks, 'HYPODD', fnout, parameter, eventinfo)
|
||||
|
@ -18,8 +18,8 @@ def export(picks, fnout, parameter):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to HYPOSAT-phase file
|
||||
writephases(picks, 'HYPOSAT', fnout, parameter)
|
||||
|
@ -6,8 +6,10 @@ import os
|
||||
import subprocess
|
||||
|
||||
from obspy import read_events
|
||||
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.util.utils import getPatternLine, runProgram, which
|
||||
from pylot.core.util.gui import which
|
||||
from pylot.core.util.utils import getPatternLine, runProgram
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
@ -28,8 +30,8 @@ def export(picks, fnout, parameter):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to NLLoc-phase file
|
||||
writephases(picks, 'NLLoc', fnout, parameter)
|
||||
@ -38,19 +40,19 @@ def export(picks, fnout, parameter):
|
||||
def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
||||
'''
|
||||
:param ctrfn: name of NLLoc-control file
|
||||
:type: str
|
||||
:type ctrfn: str
|
||||
|
||||
:param root: root path to NLLoc working directory
|
||||
:type: str
|
||||
:type root: str
|
||||
|
||||
:param nllocoutn: name of NLLoc-location output file
|
||||
:type: str
|
||||
:type nllocoutn: str
|
||||
|
||||
:param phasefn: name of NLLoc-input phase file
|
||||
:type: str
|
||||
:type phasefn: str
|
||||
|
||||
:param tttn: pattern of precalculated NLLoc traveltime tables
|
||||
:type: str
|
||||
:type tttn: str
|
||||
'''
|
||||
# For locating the event the NLLoc-control file has to be modified!
|
||||
# create comment line for NLLoc-control file NLLoc-output file
|
||||
@ -73,18 +75,15 @@ def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
||||
nllfile.close()
|
||||
|
||||
|
||||
def locate(fnin, infile=None):
|
||||
def locate(fnin, parameter=None):
|
||||
"""
|
||||
takes an external program name
|
||||
:param fnin:
|
||||
:return:
|
||||
takes an external program name and tries to run it
|
||||
:param parameter: PyLoT Parameter object
|
||||
:param fnin: external program name
|
||||
:return: None
|
||||
"""
|
||||
|
||||
if infile is None:
|
||||
exe_path = which('NLLoc')
|
||||
else:
|
||||
exe_path = which('NLLoc', infile)
|
||||
if exe_path is None:
|
||||
exe_path = os.path.join(parameter['nllocbin'], 'NLLoc')
|
||||
if not os.path.isfile(exe_path):
|
||||
raise NLLocError('NonLinLoc executable not found; check your '
|
||||
'environment variables')
|
||||
|
||||
@ -97,10 +96,15 @@ def locate(fnin, infile=None):
|
||||
|
||||
def read_location(fn):
|
||||
path, file = os.path.split(fn)
|
||||
file = glob.glob1(path, file + '.[0-9]*.grid0.loc.hyp')
|
||||
if len(file) > 1:
|
||||
raise IOError('ambiguous location name {0}'.format(file))
|
||||
fn = os.path.join(path, file[0])
|
||||
nllfile = glob.glob1(path, file + '.[0-9]*.grid0.loc.hyp')
|
||||
if len(nllfile) > 1:
|
||||
# get most recent file
|
||||
print("Found several location files matching pattern!")
|
||||
print("Using the most recent one ...")
|
||||
files_to_search = '{0}/{1}'.format(path, file) + '.[0-9]*.grid0.loc.hyp'
|
||||
fn = max(glob.glob(files_to_search), key=os.path.getctime)
|
||||
else:
|
||||
fn = os.path.join(path, nllfile[0])
|
||||
return read_events(fn)[0]
|
||||
|
||||
|
||||
|
@ -18,11 +18,11 @@ def export(picks, fnout, eventinfo, parameter=None):
|
||||
:param fnout: complete path to the exporting obs file
|
||||
:type fnout: str
|
||||
|
||||
:param: eventinfo, source time needed for VELEST-cnv format
|
||||
:type: list object
|
||||
:param eventinfo: source time needed for VELEST-cnv format
|
||||
:type eventinfo: list object
|
||||
|
||||
:param: parameter, all input information
|
||||
:type: object
|
||||
:param parameter: all input information
|
||||
:type parameter: object
|
||||
'''
|
||||
# write phases to VELEST-phase file
|
||||
writephases(picks, 'VELEST', fnout, parameter, eventinfo)
|
||||
|
@ -16,39 +16,39 @@ autoregressive prediction: application ot local and regional distances, Geophys.
|
||||
|
||||
:author: MAGS2 EP3 working group
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
try:
|
||||
from scipy.signal import tukey
|
||||
except ImportError:
|
||||
from scipy.signal.windows import tukey
|
||||
|
||||
from obspy.core import Stream
|
||||
|
||||
from pylot.core.pick.utils import PickingFailedException
|
||||
|
||||
|
||||
class CharacteristicFunction(object):
|
||||
'''
|
||||
"""
|
||||
SuperClass for different types of characteristic functions.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, data, cut, t2=None, order=None, t1=None, fnoise=None):
|
||||
'''
|
||||
"""
|
||||
Initialize data type object with information from the original
|
||||
Seismogram.
|
||||
|
||||
:param: data
|
||||
:type: `~obspy.core.stream.Stream`
|
||||
|
||||
:param: cut
|
||||
:type: tuple
|
||||
|
||||
:param: t2
|
||||
:type: float
|
||||
|
||||
:param: order
|
||||
:type: int
|
||||
|
||||
:param: t1
|
||||
:type: float (optional, only for AR)
|
||||
|
||||
:param: fnoise
|
||||
:type: float (optional, only for AR)
|
||||
'''
|
||||
:param data: stream object containing traces for which the cf should
|
||||
be calculated
|
||||
:type data: ~obspy.core.stream.Stream
|
||||
:param cut: (starttime, endtime) in seconds relative to beginning of trace
|
||||
:type cut: tuple
|
||||
:param t2:
|
||||
:type t2: float
|
||||
:param order:
|
||||
:type order: int
|
||||
:param t1: float (optional, only for AR)
|
||||
:param fnoise: (optional, only for AR)
|
||||
:type fnoise: float
|
||||
"""
|
||||
|
||||
assert isinstance(data, Stream), "%s is not a stream object" % str(data)
|
||||
|
||||
@ -60,7 +60,7 @@ class CharacteristicFunction(object):
|
||||
self.setOrder(order)
|
||||
self.setFnoise(fnoise)
|
||||
self.setARdetStep(t2)
|
||||
self.calcCF(self.getDataArray())
|
||||
self.calcCF()
|
||||
self.arpara = np.array([])
|
||||
self.xpred = np.array([])
|
||||
|
||||
@ -78,13 +78,13 @@ class CharacteristicFunction(object):
|
||||
t2=self.getTime2(),
|
||||
order=self.getOrder(),
|
||||
fnoise=self.getFnoise(),
|
||||
ardetstep=self.getARdetStep[0]())
|
||||
ardetstep=self.getARdetStep()[0]())
|
||||
|
||||
def getCut(self):
|
||||
return self.cut
|
||||
|
||||
def setCut(self, cut):
|
||||
self.cut = cut
|
||||
self.cut = (int(cut[0]), int(cut[1]))
|
||||
|
||||
def getTime1(self):
|
||||
return self.t1
|
||||
@ -120,6 +120,10 @@ class CharacteristicFunction(object):
|
||||
return self.dt
|
||||
|
||||
def getTimeArray(self):
|
||||
"""
|
||||
:return: array if time indices
|
||||
:rtype: np.array
|
||||
"""
|
||||
incr = self.getIncrement()
|
||||
self.TimeArray = np.arange(0, len(self.getCF()) * incr, incr) + self.getCut()[0]
|
||||
return self.TimeArray
|
||||
@ -137,19 +141,21 @@ class CharacteristicFunction(object):
|
||||
return self.xcf
|
||||
|
||||
def getDataArray(self, cut=None):
|
||||
'''
|
||||
"""
|
||||
If cut times are given, time series is cut from cut[0] (start time)
|
||||
till cut[1] (stop time) in order to calculate CF for certain part
|
||||
only where you expect the signal!
|
||||
input: cut (tuple) ()
|
||||
cutting window
|
||||
'''
|
||||
:param cut: contains (start time, stop time) for cutting the time series
|
||||
:type cut: tuple
|
||||
:return: cut data/time series
|
||||
:rtype:
|
||||
"""
|
||||
if cut is not None:
|
||||
if len(self.orig_data) == 1:
|
||||
if self.cut[0] == 0 and self.cut[1] == 0:
|
||||
start = 0
|
||||
stop = len(self.orig_data[0])
|
||||
elif self.cut[0] == 0 and self.cut[1] is not 0:
|
||||
elif self.cut[0] == 0 and self.cut[1] != 0:
|
||||
start = 0
|
||||
stop = self.cut[1] / self.dt
|
||||
else:
|
||||
@ -158,13 +164,15 @@ class CharacteristicFunction(object):
|
||||
zz = self.orig_data.copy()
|
||||
z1 = zz[0].copy()
|
||||
zz[0].data = z1.data[int(start):int(stop)]
|
||||
if zz[0].stats.npts == 0: # cut times do not fit data length!
|
||||
zz[0].data = z1.data # take entire data
|
||||
data = zz
|
||||
return data
|
||||
elif len(self.orig_data) == 2:
|
||||
if self.cut[0] == 0 and self.cut[1] == 0:
|
||||
start = 0
|
||||
stop = min([len(self.orig_data[0]), len(self.orig_data[1])])
|
||||
elif self.cut[0] == 0 and self.cut[1] is not 0:
|
||||
elif self.cut[0] == 0 and self.cut[1] != 0:
|
||||
start = 0
|
||||
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
|
||||
len(self.orig_data[1])])
|
||||
@ -184,7 +192,7 @@ class CharacteristicFunction(object):
|
||||
start = 0
|
||||
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
|
||||
len(self.orig_data[1]), len(self.orig_data[2])])
|
||||
elif self.cut[0] == 0 and self.cut[1] is not 0:
|
||||
elif self.cut[0] == 0 and self.cut[1] != 0:
|
||||
start = 0
|
||||
stop = self.cut[1] / self.dt
|
||||
else:
|
||||
@ -204,27 +212,25 @@ class CharacteristicFunction(object):
|
||||
data = self.orig_data.copy()
|
||||
return data
|
||||
|
||||
def calcCF(self, data=None):
|
||||
self.cf = data
|
||||
def calcCF(self):
|
||||
pass
|
||||
|
||||
|
||||
class AICcf(CharacteristicFunction):
|
||||
'''
|
||||
Function to calculate the Akaike Information Criterion (AIC) after
|
||||
Maeda (1985).
|
||||
:param: data, time series (whether seismogram or CF)
|
||||
:type: tuple
|
||||
|
||||
Output: AIC function
|
||||
'''
|
||||
|
||||
def calcCF(self, data):
|
||||
|
||||
def calcCF(self):
|
||||
"""
|
||||
Function to calculate the Akaike Information Criterion (AIC) after Maeda (1985).
|
||||
:return: AIC function
|
||||
:rtype:
|
||||
"""
|
||||
x = self.getDataArray()
|
||||
xnp = x[0].data
|
||||
ind = np.where(~np.isnan(xnp))[0]
|
||||
if ind.size:
|
||||
xnp[:ind[0]] = xnp[ind[0]]
|
||||
xnp = tukey(len(xnp), alpha=0.05) * xnp
|
||||
xnp = xnp - np.mean(xnp)
|
||||
datlen = len(xnp)
|
||||
k = np.arange(1, datlen)
|
||||
cf = np.zeros(datlen)
|
||||
@ -235,7 +241,7 @@ class AICcf(CharacteristicFunction):
|
||||
np.log((cumsumcf[datlen - 1] - cumsumcf[k - 1]) / (datlen - k + 1)))
|
||||
cf[0] = cf[1]
|
||||
inf = np.isinf(cf)
|
||||
ff = np.where(inf == True)
|
||||
ff = np.where(inf is True)
|
||||
if len(ff) >= 1:
|
||||
cf[ff] = 0
|
||||
|
||||
@ -244,14 +250,22 @@ class AICcf(CharacteristicFunction):
|
||||
|
||||
|
||||
class HOScf(CharacteristicFunction):
|
||||
'''
|
||||
Function to calculate skewness (statistics of order 3) or kurtosis
|
||||
(statistics of order 4), using one long moving window, as published
|
||||
in Kueperkoch et al. (2010).
|
||||
'''
|
||||
|
||||
def calcCF(self, data):
|
||||
def __init__(self, data, cut, pickparams):
|
||||
"""
|
||||
Call parent constructor while extracting the right parameters:
|
||||
:param pickparams: PylotParameters instance
|
||||
"""
|
||||
super(HOScf, self).__init__(data, cut, pickparams["tlta"], pickparams["hosorder"])
|
||||
|
||||
def calcCF(self):
|
||||
"""
|
||||
Function to calculate skewness (statistics of order 3) or kurtosis
|
||||
(statistics of order 4), using one long moving window, as published
|
||||
in Kueperkoch et al. (2010), or order 2, i.e. STA/LTA.
|
||||
:return: HOS cf
|
||||
:rtype:
|
||||
"""
|
||||
x = self.getDataArray(self.getCut())
|
||||
xnp = x[0].data
|
||||
nn = np.isnan(xnp)
|
||||
@ -292,13 +306,24 @@ class HOScf(CharacteristicFunction):
|
||||
if ind.size:
|
||||
first = ind[0]
|
||||
LTA[:first] = LTA[first]
|
||||
|
||||
self.cf = LTA
|
||||
self.xcf = x
|
||||
|
||||
|
||||
class ARZcf(CharacteristicFunction):
|
||||
def calcCF(self, data):
|
||||
|
||||
def __init__(self, data, cut, t1, t2, pickparams):
|
||||
super(ARZcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Parorder"],
|
||||
fnoise=pickparams["addnoise"])
|
||||
|
||||
def calcCF(self):
|
||||
"""
|
||||
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
||||
P onsets.
|
||||
:return: ARZ cf
|
||||
:rtype:
|
||||
"""
|
||||
print('Calculating AR-prediction error from single trace ...')
|
||||
x = self.getDataArray(self.getCut())
|
||||
xnp = x[0].data
|
||||
@ -342,7 +367,7 @@ class ARZcf(CharacteristicFunction):
|
||||
self.xcf = x
|
||||
|
||||
def arDetZ(self, data, order, rind, ldet):
|
||||
'''
|
||||
"""
|
||||
Function to calculate AR parameters arpara after Thomas Meier (CAU), published
|
||||
in Kueperkoch et al. (2012). This function solves SLE using the Moore-
|
||||
Penrose inverse, i.e. the least-squares approach.
|
||||
@ -359,7 +384,7 @@ class ARZcf(CharacteristicFunction):
|
||||
:type: int
|
||||
|
||||
Output: AR parameters arpara
|
||||
'''
|
||||
"""
|
||||
|
||||
# recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
|
||||
rhs = np.zeros(self.getOrder())
|
||||
@ -383,7 +408,7 @@ class ARZcf(CharacteristicFunction):
|
||||
self.arpara = np.dot(np.linalg.pinv(A), rhs)
|
||||
|
||||
def arPredZ(self, data, arpara, rind, lpred):
|
||||
'''
|
||||
"""
|
||||
Function to predict waveform, assuming an autoregressive process of order
|
||||
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
|
||||
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
|
||||
@ -400,8 +425,8 @@ class ARZcf(CharacteristicFunction):
|
||||
:type: int
|
||||
|
||||
Output: predicted waveform z
|
||||
'''
|
||||
# be sure of the summation indeces
|
||||
"""
|
||||
# be sure of the summation indices
|
||||
if rind < len(arpara):
|
||||
rind = len(arpara)
|
||||
if rind > len(data) - lpred:
|
||||
@ -421,11 +446,27 @@ class ARZcf(CharacteristicFunction):
|
||||
|
||||
|
||||
class ARHcf(CharacteristicFunction):
|
||||
def calcCF(self, data):
|
||||
|
||||
def __init__(self, data, cut, t1, t2, pickparams):
|
||||
super(ARHcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
||||
fnoise=pickparams["addnoise"])
|
||||
|
||||
def calcCF(self):
|
||||
"""
|
||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||
both horizontal traces.
|
||||
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
||||
between the predicted and the actual waveform servers as a characteristic function.
|
||||
:return: ARH cf
|
||||
:rtype:
|
||||
"""
|
||||
|
||||
print('Calculating AR-prediction error from both horizontal traces ...')
|
||||
|
||||
xnp = self.getDataArray(self.getCut())
|
||||
if len(xnp[0]) == 0:
|
||||
raise PickingFailedException('calcCF: Found empty data trace for cut times. Return')
|
||||
|
||||
n0 = np.isnan(xnp[0].data)
|
||||
if len(n0) > 1:
|
||||
xnp[0].data[n0] = 0
|
||||
@ -457,9 +498,9 @@ class ARHcf(CharacteristicFunction):
|
||||
# AR prediction of waveform using calculated AR coefficients
|
||||
self.arPredH(xnp, self.arpara, i + 1, lpred)
|
||||
# prediction error = CF
|
||||
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
|
||||
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)) / (
|
||||
2 * lpred))
|
||||
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2)
|
||||
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)
|
||||
) / (2 * lpred))
|
||||
nn = np.isnan(cf)
|
||||
if len(nn) > 1:
|
||||
cf[nn] = 0
|
||||
@ -475,7 +516,7 @@ class ARHcf(CharacteristicFunction):
|
||||
self.xcf = xnp
|
||||
|
||||
def arDetH(self, data, order, rind, ldet):
|
||||
'''
|
||||
"""
|
||||
Function to calculate AR parameters arpara after Thomas Meier (CAU), published
|
||||
in Kueperkoch et al. (2012). This function solves SLE using the Moore-
|
||||
Penrose inverse, i.e. the least-squares approach. "data" is a structured array.
|
||||
@ -494,7 +535,7 @@ class ARHcf(CharacteristicFunction):
|
||||
:type: int
|
||||
|
||||
Output: AR parameters arpara
|
||||
'''
|
||||
"""
|
||||
|
||||
# recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
|
||||
rhs = np.zeros(self.getOrder())
|
||||
@ -509,15 +550,15 @@ class ARHcf(CharacteristicFunction):
|
||||
for i in range(rind, ldet):
|
||||
ki = k - 1
|
||||
ji = j - 1
|
||||
A[ki, ji] = A[ki, ji] + data[0, i - ji] * data[0, i - ki] + data[1, i - ji] * data[1, i - ki]
|
||||
|
||||
A[ki, ji] = A[ki, ji] + data[0, i - ji] * data[0, i - ki] \
|
||||
+ data[1, i - ji] * data[1, i - ki]
|
||||
A[ji, ki] = A[ki, ji]
|
||||
|
||||
# apply Moore-Penrose inverse for SVD yielding the AR-parameters
|
||||
self.arpara = np.dot(np.linalg.pinv(A), rhs)
|
||||
|
||||
def arPredH(self, data, arpara, rind, lpred):
|
||||
'''
|
||||
"""
|
||||
Function to predict waveform, assuming an autoregressive process of order
|
||||
p (=size(arpara)), with AR parameters arpara calculated in arDet. After
|
||||
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
|
||||
@ -535,7 +576,7 @@ class ARHcf(CharacteristicFunction):
|
||||
|
||||
Output: predicted waveform z
|
||||
:type: structured array
|
||||
'''
|
||||
"""
|
||||
# be sure of the summation indeces
|
||||
if rind < len(arpara) + 1:
|
||||
rind = len(arpara) + 1
|
||||
@ -559,8 +600,20 @@ class ARHcf(CharacteristicFunction):
|
||||
|
||||
|
||||
class AR3Ccf(CharacteristicFunction):
|
||||
def calcCF(self, data):
|
||||
|
||||
def __init__(self, data, cut, t1, t2, pickparams):
|
||||
super(AR3Ccf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
||||
fnoise=pickparams["addnoise"])
|
||||
|
||||
def calcCF(self):
|
||||
"""
|
||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||
all three traces.
|
||||
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
||||
between the predicted and the actual waveform servers as a characteristic function
|
||||
:return: AR3C cf
|
||||
:rtype:
|
||||
"""
|
||||
print('Calculating AR-prediction error from all 3 components ...')
|
||||
|
||||
xnp = self.getDataArray(self.getCut())
|
||||
@ -599,10 +652,10 @@ class AR3Ccf(CharacteristicFunction):
|
||||
# AR prediction of waveform using calculated AR coefficients
|
||||
self.arPred3C(xnp, self.arpara, i + 1, lpred)
|
||||
# prediction error = CF
|
||||
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
|
||||
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2) \
|
||||
+ np.power(self.xpred[2][i:i + lpred] - xnp[2][i:i + lpred], 2)) / (
|
||||
3 * lpred))
|
||||
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2)
|
||||
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)
|
||||
+ np.power(self.xpred[2][i:i + lpred] - xnp[2][i:i + lpred], 2)
|
||||
) / (3 * lpred))
|
||||
nn = np.isnan(cf)
|
||||
if len(nn) > 1:
|
||||
cf[nn] = 0
|
||||
@ -618,7 +671,7 @@ class AR3Ccf(CharacteristicFunction):
|
||||
self.xcf = xnp
|
||||
|
||||
def arDet3C(self, data, order, rind, ldet):
|
||||
'''
|
||||
"""
|
||||
Function to calculate AR parameters arpara after Thomas Meier (CAU), published
|
||||
in Kueperkoch et al. (2012). This function solves SLE using the Moore-
|
||||
Penrose inverse, i.e. the least-squares approach. "data" is a structured array.
|
||||
@ -637,7 +690,7 @@ class AR3Ccf(CharacteristicFunction):
|
||||
:type: int
|
||||
|
||||
Output: AR parameters arpara
|
||||
'''
|
||||
"""
|
||||
|
||||
# recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
|
||||
rhs = np.zeros(self.getOrder())
|
||||
@ -653,7 +706,8 @@ class AR3Ccf(CharacteristicFunction):
|
||||
for i in range(rind, ldet):
|
||||
ki = k - 1
|
||||
ji = j - 1
|
||||
A[ki, ji] = A[ki, ji] + data[0, i - ji] * data[0, i - ki] + data[1, i - ji] * data[1, i - ki] \
|
||||
A[ki, ji] = A[ki, ji] + data[0, i - ji] * data[0, i - ki] \
|
||||
+ data[1, i - ji] * data[1, i - ki] \
|
||||
+ data[2, i - ji] * data[2, i - ki]
|
||||
|
||||
A[ji, ki] = A[ki, ji]
|
||||
@ -662,7 +716,7 @@ class AR3Ccf(CharacteristicFunction):
|
||||
self.arpara = np.dot(np.linalg.pinv(A), rhs)
|
||||
|
||||
def arPred3C(self, data, arpara, rind, lpred):
|
||||
'''
|
||||
"""
|
||||
Function to predict waveform, assuming an autoregressive process of order
|
||||
p (=size(arpara)), with AR parameters arpara calculated in arDet3C. After
|
||||
Thomas Meier (CAU), published in Kueperkoch et al. (2012).
|
||||
@ -680,7 +734,7 @@ class AR3Ccf(CharacteristicFunction):
|
||||
|
||||
Output: predicted waveform z
|
||||
:type: structured array
|
||||
'''
|
||||
"""
|
||||
# be sure of the summation indeces
|
||||
if rind < len(arpara) + 1:
|
||||
rind = len(arpara) + 1
|
||||
|
@ -7,9 +7,8 @@ import os
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from obspy import read_events
|
||||
from obspy.core import AttribDict
|
||||
from pylot.core.io.phases import picksdict_from_picks
|
||||
|
||||
from pylot.core.util.pdf import ProbabilityDensityFunction
|
||||
from pylot.core.util.utils import find_in_list
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
@ -109,10 +108,9 @@ class Comparison(object):
|
||||
Comparison is carried out with the help of pdf representation of the picks
|
||||
and a probabilistic approach to the time difference of two onset
|
||||
measurements.
|
||||
:param a: filename for pickset A
|
||||
:type a: str
|
||||
:param b: filename for pickset B
|
||||
:type b: str
|
||||
:param type: type of the returned `~pylot.core.util.pdf.ProbabilityDensityFunction` object.
|
||||
Possible values: 'exp' and 'gauss', representing the type of branches of the PDF
|
||||
:type type: str
|
||||
:return: dictionary containing the resulting comparison pdfs for all picks
|
||||
:rtype: dict
|
||||
"""
|
||||
@ -126,8 +124,11 @@ class Comparison(object):
|
||||
compare_pdf = dict()
|
||||
for phase in phases:
|
||||
if phase in pdf_b[station].keys():
|
||||
compare_pdf[phase] = phases[phase] - pdf_b[station][
|
||||
phase]
|
||||
try:
|
||||
compare_pdf[phase] = phases[phase] - pdf_b[station][
|
||||
phase]
|
||||
except:
|
||||
compare_pdf = None
|
||||
if compare_pdf is not None:
|
||||
compare_pdfs[station] = compare_pdf
|
||||
|
||||
@ -142,8 +143,7 @@ class Comparison(object):
|
||||
istations = range(nstations)
|
||||
fig, axarr = plt.subplots(nstations, 2, sharex='col', sharey='row')
|
||||
|
||||
for n in istations:
|
||||
station = stations[n]
|
||||
for n, station in enumerate(stations):
|
||||
if station not in self.comparison.keys():
|
||||
continue
|
||||
compare_pdf = self.comparison[station]
|
||||
@ -190,6 +190,20 @@ class Comparison(object):
|
||||
return self.get_array(phase, 'standard_deviation')
|
||||
|
||||
def hist_expectation(self, phases='all', bins=20, normed=False):
|
||||
"""
|
||||
Plot a histogram of the expectation values of the PDFs.
|
||||
|
||||
Expectation represents the time difference between two most likely arrival times
|
||||
:param phases: type of phases to compare
|
||||
:type phases: str
|
||||
:param bins: number of bins in histogram
|
||||
:type bins: int
|
||||
:param normed: Normalize histogram
|
||||
:type normed: bool
|
||||
:return: None
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
phases.strip()
|
||||
if phases.find('all') is 0:
|
||||
phases = 'ps'
|
||||
@ -210,6 +224,20 @@ class Comparison(object):
|
||||
plt.show()
|
||||
|
||||
def hist_standard_deviation(self, phases='all', bins=20, normed=False):
|
||||
"""
|
||||
Plot a histogram of the compared standard deviation values of two arrivals.
|
||||
|
||||
Standard deviation of two compared picks represents the combined uncertainties/pick errors
|
||||
(earliest possible pick, latest possible pick)
|
||||
:param phases: type of phases to compare
|
||||
:type phases: str
|
||||
:param bins: number of bins in histogram
|
||||
:type bins: int
|
||||
:param normed: Normalize histogram
|
||||
:type normed: bool
|
||||
:return: None
|
||||
:rtype: None
|
||||
"""
|
||||
phases.strip()
|
||||
if phases.find('all') == 0:
|
||||
phases = 'ps'
|
||||
@ -370,10 +398,12 @@ class PDFDictionary(object):
|
||||
|
||||
class PDFstatistics(object):
|
||||
"""
|
||||
This object can be used to get various statistic values from probabillity density functions.
|
||||
This object can be used to get various statistic values from probability density functions.
|
||||
Takes a path as argument.
|
||||
"""
|
||||
|
||||
# TODO: change root to datapath
|
||||
|
||||
def __init__(self, directory):
|
||||
"""Initiates some values needed when dealing with pdfs later"""
|
||||
self._rootdir = directory
|
||||
@ -480,7 +510,8 @@ class PDFstatistics(object):
|
||||
|
||||
return rlist
|
||||
|
||||
def writeThetaToFile(self, array, out_dir):
|
||||
@staticmethod
|
||||
def writeThetaToFile(array, out_dir):
|
||||
"""
|
||||
Method to write array like data to file. Useful since acquiring can take
|
||||
serious amount of time when dealing with large databases.
|
||||
|
@ -23,49 +23,52 @@ import warnings
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from scipy.signal import argrelmax
|
||||
from scipy.signal import argrelmax, argrelmin
|
||||
|
||||
from pylot.core.pick.charfuns import CharacteristicFunction
|
||||
from pylot.core.pick.utils import getnoisewin, getsignalwin
|
||||
|
||||
|
||||
class AutoPicker(object):
|
||||
'''
|
||||
"""
|
||||
Superclass of different, automated picking algorithms applied on a CF determined
|
||||
using AIC, HOS, or AR prediction.
|
||||
'''
|
||||
"""
|
||||
|
||||
warnings.simplefilter('ignore')
|
||||
|
||||
def __init__(self, cf, TSNR, PickWindow, iplot=0, aus=None, Tsmooth=None, Pick1=None, fig=None, linecolor='k'):
|
||||
'''
|
||||
:param: cf, characteristic function, on which the picking algorithm is applied
|
||||
:type: `~pylot.core.pick.CharFuns.CharacteristicFunction` object
|
||||
|
||||
:param: TSNR, length of time windows around pick used to determine SNR [s]
|
||||
:type: tuple (T_noise, T_gap, T_signal)
|
||||
|
||||
:param: PickWindow, length of pick window [s]
|
||||
:type: float
|
||||
|
||||
:param: iplot, no. of figure window for plotting interims results
|
||||
:type: integer
|
||||
|
||||
:param: aus ("artificial uplift of samples"), find local minimum at i if aic(i-1)*(1+aus) >= aic(i)
|
||||
:type: float
|
||||
|
||||
:param: Tsmooth, length of moving smoothing window to calculate smoothed CF [s]
|
||||
:type: float
|
||||
|
||||
:param: Pick1, initial (prelimenary) onset time, starting point for PragPicker and
|
||||
EarlLatePicker
|
||||
:type: float
|
||||
|
||||
'''
|
||||
def __init__(self, cf, TSNR, PickWindow, iplot=0, aus=None, Tsmooth=None, Pick1=None,
|
||||
fig=None, linecolor='k', ogstream=None):
|
||||
"""
|
||||
Create AutoPicker object
|
||||
:param cf: characteristic function, on which the picking algorithm is applied
|
||||
:type cf: `~pylot.core.pick.CharFuns.CharacteristicFunction`
|
||||
:param TSNR: length of time windows around pick used to determine SNR [s], tuple (T_noise, T_gap, T_signal)
|
||||
:type TSNR: (float, float, float)
|
||||
:param PickWindow: length of pick window [s]
|
||||
:type PickWindow: float
|
||||
:param iplot: flag used for plotting, if > 1, results will be plotted. Use iplot = 0 to disable plotting
|
||||
:type iplot: int
|
||||
:param aus: ("artificial uplift of samples"), find local minimum at i if aic(i-1)*(1+aus) >= aic(i)
|
||||
:type aus: float
|
||||
:param Tsmooth: length of moving smoothing window to calculate smoothed CF [s]
|
||||
:type Tsmooth: float
|
||||
:param Pick1: initial (preliminary) onset time, starting point for PragPicker and EarlLatePicker
|
||||
:type Pick1: float
|
||||
:param fig: matplotlib figure used for plotting. If not given and plotting is enabled, a new figure will
|
||||
be created
|
||||
:type fig: `~matplotlib.figure.Figure`
|
||||
:param linecolor: matplotlib line color string
|
||||
:type linecolor: str
|
||||
:param ogstream: original stream (waveform), e.g. for plotting purposes
|
||||
:type ogstream: `~obspy.core.stream.Stream`
|
||||
"""
|
||||
|
||||
assert isinstance(cf, CharacteristicFunction), "%s is not a CharacteristicFunction object" % str(cf)
|
||||
self._linecolor = linecolor
|
||||
self._pickcolor_p = 'b'
|
||||
self.cf = cf.getCF()
|
||||
self.ogstream = ogstream
|
||||
self.Tcf = cf.getTimeArray()
|
||||
self.Data = cf.getXCF()
|
||||
self.dt = cf.getIncrement()
|
||||
@ -79,6 +82,11 @@ class AutoPicker(object):
|
||||
self.calcPick()
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
String representation of AutoPicker object
|
||||
:return:
|
||||
:rtype: str
|
||||
"""
|
||||
return '''\n\t{name} object:\n
|
||||
TSNR:\t\t\t{TSNR}\n
|
||||
PickWindow:\t{PickWindow}\n
|
||||
@ -142,12 +150,12 @@ class AutoPicker(object):
|
||||
|
||||
|
||||
class AICPicker(AutoPicker):
|
||||
'''
|
||||
"""
|
||||
Method to derive the onset time of an arriving phase based on CF
|
||||
derived from AIC. In order to get an impression of the quality of this inital pick,
|
||||
derived from AIC. In order to get an impression of the quality of this initial pick,
|
||||
a quality assessment is applied based on SNR and slope determination derived from the CF,
|
||||
from which the AIC has been calculated.
|
||||
'''
|
||||
"""
|
||||
|
||||
def calcPick(self):
|
||||
|
||||
@ -161,20 +169,22 @@ class AICPicker(AutoPicker):
|
||||
iplot = int(self.iplot)
|
||||
except:
|
||||
if self.iplot == True or self.iplot == 'True':
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
iplot = 0
|
||||
|
||||
# find NaN's
|
||||
nn = np.isnan(self.cf)
|
||||
if len(nn) > 1:
|
||||
self.cf[nn] = 0
|
||||
# taper AIC-CF to get rid off side maxima
|
||||
# taper AIC-CF to get rid of side maxima
|
||||
tap = np.hanning(len(self.cf))
|
||||
aic = tap * self.cf + max(abs(self.cf))
|
||||
# smooth AIC-CF
|
||||
ismooth = int(round(self.Tsmooth / self.dt))
|
||||
aicsmooth = np.zeros(len(aic))
|
||||
# MP MP better start with original data than zeros if array shall be smoothed, created artificial value before
|
||||
# when starting with i in range(1...) loop below and subtracting offset afterwards
|
||||
aicsmooth = np.copy(aic)
|
||||
if len(aic) < ismooth:
|
||||
print('AICPicker: Tsmooth larger than CF!')
|
||||
return
|
||||
@ -184,20 +194,39 @@ class AICPicker(AutoPicker):
|
||||
ii1 = i - ismooth
|
||||
aicsmooth[i] = aicsmooth[i - 1] + (aic[i] - aic[ii1]) / ismooth
|
||||
else:
|
||||
aicsmooth[i] = np.mean(aic[1: i])
|
||||
aicsmooth[i] = np.mean(aic[0: i]) # MP MP created np.nan for i=1
|
||||
# remove offset in AIC function
|
||||
offset = abs(min(aic) - min(aicsmooth))
|
||||
aicsmooth = aicsmooth - offset
|
||||
cf = self.Data[0].data
|
||||
# get maximum of HOS/AR-CF as startimg point for searching
|
||||
# minimum in AIC function
|
||||
icfmax = np.argmax(self.Data[0].data)
|
||||
icfmax = np.argmax(cf)
|
||||
|
||||
# TODO: If this shall be kept, maybe add thresh_factor to pylot parameters
|
||||
thresh_hit = False
|
||||
thresh_factor = 0.7
|
||||
thresh = thresh_factor * cf[icfmax]
|
||||
for index, sample in enumerate(cf):
|
||||
if sample >= thresh:
|
||||
thresh_hit = True
|
||||
# go on searching for the following maximum
|
||||
if index > 0 and thresh_hit:
|
||||
if sample <= cf[index - 1]:
|
||||
icfmax = index - 1
|
||||
break
|
||||
|
||||
# find minimum in AIC-CF front of maximum of HOS/AR-CF
|
||||
lpickwindow = int(round(self.PickWindow / self.dt))
|
||||
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
|
||||
if aicsmooth[i - 1] >= aicsmooth[i]:
|
||||
self.Pick = self.Tcf[i]
|
||||
break
|
||||
tsafety = self.TSNR[1] # safety gap, AIC is usually a little bit too late
|
||||
left_corner_ind = max([icfmax - lpickwindow, 2])
|
||||
right_corner_ind = icfmax + int(tsafety / self.dt)
|
||||
aic_snip = aicsmooth[left_corner_ind: right_corner_ind]
|
||||
minima = argrelmin(aic_snip)[0] # 0th entry of tuples for axes
|
||||
if len(minima) > 0:
|
||||
pickindex = minima[-1] + left_corner_ind
|
||||
self.Pick = self.Tcf[pickindex]
|
||||
|
||||
# if no minimum could be found:
|
||||
# search in 1st derivative of AIC-CF
|
||||
if self.Pick is None:
|
||||
@ -212,18 +241,12 @@ class AICPicker(AutoPicker):
|
||||
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
|
||||
if diffcf[i - 1] >= diffcf[i]:
|
||||
self.Pick = self.Tcf[i]
|
||||
pickindex = i
|
||||
break
|
||||
|
||||
# quality assessment using SNR and slope from CF
|
||||
if self.Pick is not None:
|
||||
# get noise window
|
||||
inoise = getnoisewin(self.Tcf, self.Pick, self.TSNR[0], self.TSNR[1])
|
||||
# check, if these are counts or m/s, important for slope estimation!
|
||||
# this is quick and dirty, better solution?
|
||||
if max(self.Data[0].data < 1e-3) and max(self.Data[0].data >= 1e-6):
|
||||
self.Data[0].data = self.Data[0].data * 1000000.
|
||||
elif max(self.Data[0].data < 1e-6):
|
||||
self.Data[0].data = self.Data[0].data * 1e13
|
||||
# get signal window
|
||||
isignal = getsignalwin(self.Tcf, self.Pick, self.TSNR[2])
|
||||
if len(isignal) == 0:
|
||||
@ -231,33 +254,39 @@ class AICPicker(AutoPicker):
|
||||
ii = min([isignal[len(isignal) - 1], len(self.Tcf)])
|
||||
isignal = isignal[0:ii]
|
||||
try:
|
||||
self.Data[0].data[isignal]
|
||||
cf[isignal]
|
||||
except IndexError as e:
|
||||
msg = "Time series out of bounds! {}".format(e)
|
||||
print(msg)
|
||||
return
|
||||
# calculate SNR from CF
|
||||
self.SNR = max(abs(self.Data[0].data[isignal] - np.mean(self.Data[0].data[isignal]))) / \
|
||||
max(abs(self.Data[0].data[inoise] - np.mean(self.Data[0].data[inoise])))
|
||||
self.SNR = max(abs(cf[isignal])) / \
|
||||
abs(np.mean(cf[inoise]))
|
||||
# calculate slope from CF after initial pick
|
||||
# get slope window
|
||||
tslope = self.TSNR[3] # slope determination window
|
||||
islope = np.where((self.Tcf <= min([self.Pick + tslope, self.Tcf[-1]])) \
|
||||
& (self.Tcf >= self.Pick)) # TODO: put this in a seperate function like getsignalwin
|
||||
if tsafety >= 0:
|
||||
islope = np.where((self.Tcf <= min([self.Pick + tslope + tsafety, self.Tcf[-1]])) \
|
||||
& (self.Tcf >= self.Pick)) # TODO: put this in a seperate function like getsignalwin
|
||||
else:
|
||||
islope = np.where((self.Tcf <= min([self.Pick + tslope, self.Tcf[-1]])) \
|
||||
& (
|
||||
self.Tcf >= self.Pick + tsafety)) # TODO: put this in a seperate function like getsignalwin
|
||||
# find maximum within slope determination window
|
||||
# 'cause slope should be calculated up to first local minimum only!
|
||||
try:
|
||||
dataslope = self.Data[0].data[islope[0][0:-1]]
|
||||
dataslope = cf[islope[0][0:-1]]
|
||||
except IndexError:
|
||||
print("Slope Calculation: empty array islope, check signal window")
|
||||
return
|
||||
if len(dataslope) < 1:
|
||||
print('No data in slope window found!')
|
||||
if len(dataslope) < 2:
|
||||
print('No or not enough data in slope window found!')
|
||||
return
|
||||
imaxs, = argrelmax(dataslope)
|
||||
if imaxs.size:
|
||||
try:
|
||||
imaxs, = argrelmax(dataslope)
|
||||
imax = imaxs[0]
|
||||
else:
|
||||
except (ValueError, IndexError) as e:
|
||||
print(e, 'picker: argrelmax not working!')
|
||||
imax = np.argmax(dataslope)
|
||||
iislope = islope[0][0:imax + 1]
|
||||
if len(iislope) < 2:
|
||||
@ -269,53 +298,65 @@ class AICPicker(AutoPicker):
|
||||
print("AICPicker: Maximum for slope determination right at the beginning of the window!")
|
||||
print("Choose longer slope determination window!")
|
||||
if self.iplot > 1:
|
||||
if self.fig == None or self.fig == 'None':
|
||||
if self.fig is None or self.fig == 'None':
|
||||
fig = plt.figure()
|
||||
plt_flag = 1
|
||||
plt_flag = iplot
|
||||
else:
|
||||
fig = self.fig
|
||||
ax = fig.add_subplot(111)
|
||||
x = self.Data[0].data
|
||||
ax.plot(self.Tcf, x / max(x), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
cf = cf
|
||||
ax.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
ax.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
||||
ax.legend(loc=1)
|
||||
ax.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
ax.set_yticks([])
|
||||
ax.set_title(self.Data[0].stats.station)
|
||||
if plt_flag == 1:
|
||||
if plt_flag in [1, 2]:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
return
|
||||
iislope = islope[0][0:imax+1]
|
||||
iislope = islope[0][0:imax + 1]
|
||||
dataslope = self.Data[0].data[iislope]
|
||||
# calculate slope as polynomal fit of order 1
|
||||
xslope = np.arange(0, len(dataslope), 1)
|
||||
P = np.polyfit(xslope, dataslope, 1)
|
||||
datafit = np.polyval(P, xslope)
|
||||
if datafit[0] >= datafit[-1]:
|
||||
print('AICPicker: Negative slope, bad onset skipped!')
|
||||
return
|
||||
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
||||
try:
|
||||
P = np.polyfit(xslope, dataslope, 1)
|
||||
datafit = np.polyval(P, xslope)
|
||||
if datafit[0] >= datafit[-1]:
|
||||
print('AICPicker: Negative slope, bad onset skipped!')
|
||||
else:
|
||||
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
||||
# normalize slope to maximum of cf to make it unit independent
|
||||
self.slope /= self.Data[0].data[icfmax]
|
||||
except Exception as e:
|
||||
print("AICPicker: Problems with data fitting! {}".format(e))
|
||||
|
||||
else:
|
||||
self.SNR = None
|
||||
self.slope = None
|
||||
|
||||
if iplot > 1:
|
||||
if self.fig == None or self.fig == 'None':
|
||||
if self.fig is None or self.fig == 'None':
|
||||
fig = plt.figure() # self.iplot)
|
||||
plt_flag = 1
|
||||
plt_flag = iplot
|
||||
else:
|
||||
fig = self.fig
|
||||
fig._tight = True
|
||||
ax1 = fig.add_subplot(211)
|
||||
x = self.Data[0].data
|
||||
if len(self.Tcf) > len(self.Data[0].data): # why? LK
|
||||
self.Tcf = self.Tcf[0:len(self.Tcf)-1]
|
||||
ax1.plot(self.Tcf, x / max(x), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
if len(self.Tcf) > len(cf): # why? LK
|
||||
self.Tcf = self.Tcf[0:len(self.Tcf) - 1]
|
||||
ax1.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
ax1.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
||||
# plot the original waveform also for evaluation of the CF and pick
|
||||
if self.ogstream:
|
||||
data = self.ogstream[0].data
|
||||
if len(data) == len(self.Tcf):
|
||||
ax1.plot(self.Tcf, 0.5 * data / max(data), 'k', label='Seismogram', alpha=0.3, zorder=0,
|
||||
lw=0.5)
|
||||
if self.Pick is not None:
|
||||
ax1.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2, label='AIC-Pick')
|
||||
ax1.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
@ -324,7 +365,7 @@ class AICPicker(AutoPicker):
|
||||
|
||||
if self.Pick is not None:
|
||||
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
|
||||
ax2.plot(self.Tcf, x, color=self._linecolor, linewidth=0.7, label='Data')
|
||||
ax2.plot(self.Tcf, aicsmooth, color='r', linewidth=0.7, label='Data')
|
||||
ax1.axvspan(self.Tcf[inoise[0]], self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax1.axvspan(self.Tcf[isignal[0]], self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0,
|
||||
label='Signal Window')
|
||||
@ -336,37 +377,43 @@ class AICPicker(AutoPicker):
|
||||
label='Signal Window')
|
||||
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
||||
label='Slope Window')
|
||||
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2, label='Slope')
|
||||
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2,
|
||||
label='Slope') # MP MP changed temporarily!
|
||||
|
||||
ax1.set_title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station,
|
||||
self.SNR, self.slope))
|
||||
if self.slope is not None:
|
||||
ax1.set_title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station,
|
||||
self.SNR, self.slope))
|
||||
else:
|
||||
ax1.set_title('Station %s, SNR=%7.2f' % (self.Data[0].stats.station, self.SNR))
|
||||
ax2.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
ax2.set_ylabel('Counts')
|
||||
ax2.set_yticks([])
|
||||
ax2.legend(loc=1)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
plt.close(fig)
|
||||
else:
|
||||
ax1.set_title(self.Data[0].stats.station)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
plt.close(fig)
|
||||
|
||||
if self.Pick == None:
|
||||
if plt_flag in [1, 2]:
|
||||
fig.show()
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
if plt_flag == 3:
|
||||
stats = self.Data[0].stats
|
||||
netstlc = '{}.{}.{}'.format(stats.network, stats.station, stats.location)
|
||||
fig.savefig('aicfig_{}_{}.png'.format(netstlc, stats.channel))
|
||||
|
||||
if self.Pick is None:
|
||||
print('AICPicker: Could not find minimum, picking window too short?')
|
||||
|
||||
return
|
||||
|
||||
|
||||
class PragPicker(AutoPicker):
|
||||
'''
|
||||
"""
|
||||
Method of pragmatic picking exploiting information given by CF.
|
||||
'''
|
||||
"""
|
||||
|
||||
def calcPick(self):
|
||||
|
||||
@ -374,9 +421,9 @@ class PragPicker(AutoPicker):
|
||||
iplot = int(self.getiplot())
|
||||
except:
|
||||
if self.getiplot() == True or self.getiplot() == 'True':
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
iplot = 0
|
||||
|
||||
if self.getpick1() is not None:
|
||||
print('PragPicker: Get most likely pick from HOS- or AR-CF using pragmatic picking algorithm ...')
|
||||
@ -410,16 +457,16 @@ class PragPicker(AutoPicker):
|
||||
ipick1 = np.argmin(abs(self.Tcf - self.getpick1()))
|
||||
cfpick1 = 2 * self.cf[ipick1]
|
||||
|
||||
# check trend of CF, i.e. differences of CF and adjust aus ("artificial uplift
|
||||
# check trend of CF, i.e. differences of CF and adjust aus ("artificial uplift
|
||||
# of picks") regarding this trend
|
||||
# prominent trend: decrease aus
|
||||
# flat: use given aus
|
||||
cfdiff = np.diff(cfipick)
|
||||
if len(cfdiff)<20:
|
||||
if len(cfdiff) < 20:
|
||||
print('PragPicker: Very few samples for CF. Check LTA window dimensions!')
|
||||
i0diff = np.where(cfdiff > 0)
|
||||
cfdiff = cfdiff[i0diff]
|
||||
if len(cfdiff)<1:
|
||||
if len(cfdiff) < 1:
|
||||
print('PragPicker: Negative slope for CF. Check LTA window dimensions! STOP')
|
||||
self.Pick = None
|
||||
return
|
||||
@ -432,20 +479,22 @@ class PragPicker(AutoPicker):
|
||||
cfpick_r = 0
|
||||
cfpick_l = 0
|
||||
lpickwindow = int(round(self.PickWindow / self.dt))
|
||||
for i in range(max(np.insert(ipick, 0, 2)), min([ipick1 + lpickwindow + 1, len(self.cf) - 1])):
|
||||
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
|
||||
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
|
||||
if cfpick1 >= self.cf[i]:
|
||||
pick_r = self.Tcf[i]
|
||||
self.Pick = pick_r
|
||||
flagpick_l = 1
|
||||
cfpick_r = self.cf[i]
|
||||
break
|
||||
# for i in range(max(np.insert(ipick, 0, 2)), min([ipick1 + lpickwindow + 1, len(self.cf) - 1])):
|
||||
# # local minimum
|
||||
# if self.cf[i + 1] > self.cf[i] <= self.cf[i - 1]:
|
||||
# if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
|
||||
# if cfpick1 >= self.cf[i]:
|
||||
# pick_r = self.Tcf[i]
|
||||
# self.Pick = pick_r
|
||||
# flagpick_l = 1
|
||||
# cfpick_r = self.cf[i]
|
||||
# break
|
||||
|
||||
# now we look to the left
|
||||
if len(self.cf) > ipick1 +1:
|
||||
if len(self.cf) > ipick1 + 1:
|
||||
for i in range(ipick1, max([ipick1 - lpickwindow + 1, 2]), -1):
|
||||
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
|
||||
# local minimum
|
||||
if self.cf[i + 1] > self.cf[i] <= self.cf[i - 1]:
|
||||
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
|
||||
if cfpick1 >= self.cf[i]:
|
||||
pick_l = self.Tcf[i]
|
||||
@ -454,7 +503,7 @@ class PragPicker(AutoPicker):
|
||||
cfpick_l = self.cf[i]
|
||||
break
|
||||
else:
|
||||
msg ='PragPicker: Initial onset too close to start of CF! \
|
||||
msg = 'PragPicker: Initial onset too close to start of CF! \
|
||||
Stop finalizing pick to the left.'
|
||||
print(msg)
|
||||
|
||||
@ -462,9 +511,9 @@ class PragPicker(AutoPicker):
|
||||
if flagpick_l > 0 and flagpick_r > 0 and cfpick_l <= 3 * cfpick_r:
|
||||
self.Pick = pick_l
|
||||
pickflag = 1
|
||||
elif flagpick_l > 0 and flagpick_r > 0 and cfpick_l >= cfpick_r:
|
||||
self.Pick = pick_r
|
||||
pickflag = 1
|
||||
# elif flagpick_l > 0 and flagpick_r > 0 and cfpick_l >= cfpick_r:
|
||||
# self.Pick = pick_r
|
||||
# pickflag = 1
|
||||
elif flagpick_l == 0 and flagpick_r > 0 and cfpick_l >= cfpick_r:
|
||||
self.Pick = pick_l
|
||||
pickflag = 1
|
||||
@ -474,7 +523,7 @@ class PragPicker(AutoPicker):
|
||||
pickflag = 0
|
||||
|
||||
if iplot > 1:
|
||||
if self.fig == None or self.fig == 'None':
|
||||
if self.fig is None or self.fig == 'None':
|
||||
fig = plt.figure() # self.getiplot())
|
||||
plt_flag = 1
|
||||
else:
|
||||
@ -484,15 +533,18 @@ class PragPicker(AutoPicker):
|
||||
ax.plot(Tcfpick, cfipick, color=self._linecolor, linewidth=0.7, label='CF')
|
||||
ax.plot(Tcfpick, cfsmoothipick, 'r', label='Smoothed CF')
|
||||
if pickflag > 0:
|
||||
ax.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], self._pickcolor_p, linewidth=2, label='Pick')
|
||||
ax.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], self._pickcolor_p, linewidth=2,
|
||||
label='Pick')
|
||||
ax.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
ax.set_yticks([])
|
||||
ax.set_title(self.Data[0].stats.station)
|
||||
ax.legend(loc=1)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
return
|
||||
|
||||
|
742
pylot/core/util/array_map.py
Normal file
@ -0,0 +1,742 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import traceback
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cf
|
||||
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
|
||||
import matplotlib
|
||||
import matplotlib.patheffects as PathEffects
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import obspy
|
||||
from PySide2 import QtWidgets, QtGui
|
||||
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
||||
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
|
||||
from obspy import UTCDateTime
|
||||
|
||||
from pylot.core.util.utils import identifyPhaseID
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
from pylot.core.pick.utils import get_quality_class
|
||||
from pylot.core.util.widgets import PickDlg
|
||||
|
||||
matplotlib.use('Qt5Agg')
|
||||
|
||||
|
||||
class MplCanvas(FigureCanvas):
|
||||
|
||||
def __init__(self, extern_axes=None, projection=None, width=15, height=5, dpi=100):
|
||||
if extern_axes is None:
|
||||
self.fig = plt.figure(figsize=(width, height), dpi=dpi)
|
||||
self.axes = self.fig.add_subplot(111, projection=projection)
|
||||
else:
|
||||
self.fig = extern_axes.figure
|
||||
self.axes = extern_axes
|
||||
|
||||
super(MplCanvas, self).__init__(self.fig)
|
||||
|
||||
|
||||
class Array_map(QtWidgets.QWidget):
|
||||
def __init__(self, parent, metadata, parameter=None, axes=None, annotate=True, pointsize=25.,
|
||||
linewidth=1.5, width=5e6, height=2e6):
|
||||
QtWidgets.QWidget.__init__(self, parent=parent)
|
||||
|
||||
assert (parameter is not None or parent is not None), 'either parent or parameter has to be set'
|
||||
# set properties
|
||||
self._parent = parent
|
||||
self.metadata = metadata
|
||||
self.pointsize = pointsize
|
||||
self.linewidth = linewidth
|
||||
self.extern_plot_axes = axes
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.annotate = annotate
|
||||
self.picks = None
|
||||
self.picks_dict = None
|
||||
self.uncertainties = None
|
||||
self.autopicks_dict = None
|
||||
self.hybrids_dict = None
|
||||
self.eventLoc = None
|
||||
self.parameter = parameter if parameter else parent._inputs
|
||||
|
||||
self.picks_rel = {}
|
||||
self.picks_rel_mean_corrected = {}
|
||||
self.marked_stations = []
|
||||
self.highlighted_stations = []
|
||||
|
||||
# call functions to draw everything
|
||||
self.projection = ccrs.PlateCarree()
|
||||
self.init_graphics()
|
||||
self.ax = self.canvas.axes
|
||||
self.ax.set_adjustable('datalim')
|
||||
|
||||
self.init_stations()
|
||||
self.init_crtpyMap()
|
||||
self.init_map()
|
||||
|
||||
# set original map limits to fall back on when home button is pressed
|
||||
self.org_xlim = self.ax.get_xlim()
|
||||
self.org_ylim = self.ax.get_ylim()
|
||||
|
||||
# initial map without event
|
||||
self.ax.set_xlim(self.org_xlim[0], self.org_xlim[1])
|
||||
self.ax.set_ylim(self.org_ylim[0], self.org_ylim[1])
|
||||
|
||||
self._style = None if not hasattr(parent, '_style') else parent._style
|
||||
|
||||
def init_map(self):
|
||||
self.init_colormap()
|
||||
self.connectSignals()
|
||||
self.draw_everything()
|
||||
|
||||
def init_graphics(self):
|
||||
"""
|
||||
Initializes all GUI components and figure elements to be populated by other functions
|
||||
"""
|
||||
# initialize figure elements
|
||||
|
||||
if self.extern_plot_axes is None:
|
||||
self.canvas = MplCanvas(projection=self.projection)
|
||||
else:
|
||||
self.canvas = MplCanvas(extern_axes=self.extern_plot_axes)
|
||||
|
||||
self.plotWidget = self.canvas
|
||||
|
||||
# initialize GUI elements
|
||||
self.status_label = QtWidgets.QLabel()
|
||||
self.map_reset_button = QtWidgets.QPushButton('Reset Map View')
|
||||
self.save_map_button = QtWidgets.QPushButton('Save Map')
|
||||
self.go2eq_button = QtWidgets.QPushButton('Go to Event Location')
|
||||
self.subtract_mean_cb = QtWidgets.QCheckBox('Subtract mean')
|
||||
|
||||
self.main_box = QtWidgets.QVBoxLayout()
|
||||
self.setLayout(self.main_box)
|
||||
|
||||
self.top_row = QtWidgets.QHBoxLayout()
|
||||
self.main_box.addLayout(self.top_row, 0)
|
||||
|
||||
self.comboBox_phase = QtWidgets.QComboBox()
|
||||
self.comboBox_phase.insertItem(0, 'P')
|
||||
self.comboBox_phase.insertItem(1, 'S')
|
||||
|
||||
self.comboBox_am = QtWidgets.QComboBox()
|
||||
self.comboBox_am.insertItem(0, 'hybrid (prefer manual)')
|
||||
self.comboBox_am.insertItem(1, 'manual')
|
||||
self.comboBox_am.insertItem(2, 'auto')
|
||||
|
||||
self.annotations_box = QtWidgets.QCheckBox('Annotate')
|
||||
self.annotations_box.setChecked(True)
|
||||
self.auto_refresh_box = QtWidgets.QCheckBox('Automatic refresh')
|
||||
self.auto_refresh_box.setChecked(True)
|
||||
self.refresh_button = QtWidgets.QPushButton('Refresh')
|
||||
self.cmaps_box = QtWidgets.QComboBox()
|
||||
self.cmaps_box.setMaxVisibleItems(20)
|
||||
[self.cmaps_box.addItem(map_name) for map_name in sorted(plt.colormaps())]
|
||||
# try to set to viridis as default
|
||||
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('viridis'))
|
||||
|
||||
self.top_row.addWidget(QtWidgets.QLabel('Select a phase: '))
|
||||
self.top_row.addWidget(self.comboBox_phase)
|
||||
self.top_row.setStretch(1, 1) # set stretch of item 1 to 1
|
||||
self.top_row.addWidget(QtWidgets.QLabel('Pick type: '))
|
||||
self.top_row.addWidget(self.comboBox_am)
|
||||
self.top_row.setStretch(3, 1) # set stretch of item 1 to 1
|
||||
self.top_row.addWidget(self.cmaps_box)
|
||||
self.top_row.addWidget(self.annotations_box)
|
||||
self.top_row.addWidget(self.auto_refresh_box)
|
||||
self.top_row.addWidget(self.refresh_button)
|
||||
|
||||
self.main_box.addWidget(self.plotWidget, 10)
|
||||
|
||||
self.bot_row = QtWidgets.QHBoxLayout()
|
||||
self.main_box.addLayout(self.bot_row, 0)
|
||||
self.bot_row.addWidget(QtWidgets.QLabel(''), 5)
|
||||
self.bot_row.addWidget(self.map_reset_button, 2)
|
||||
self.bot_row.addWidget(self.go2eq_button, 2)
|
||||
self.bot_row.addWidget(self.save_map_button, 2)
|
||||
self.bot_row.addWidget(self.subtract_mean_cb, 0)
|
||||
self.bot_row.addWidget(self.status_label, 5)
|
||||
|
||||
def init_colormap(self):
|
||||
self.init_lat_lon_dimensions()
|
||||
self.init_lat_lon_grid()
|
||||
|
||||
def init_crtpyMap(self):
|
||||
self.ax.add_feature(cf.LAND)
|
||||
self.ax.add_feature(cf.OCEAN)
|
||||
self.ax.add_feature(cf.COASTLINE, linewidth=1, edgecolor='gray')
|
||||
self.ax.add_feature(cf.BORDERS, alpha=0.7)
|
||||
self.ax.add_feature(cf.LAKES, alpha=0.7)
|
||||
self.ax.add_feature(cf.RIVERS, linewidth=1)
|
||||
|
||||
# parallels and meridians
|
||||
self.add_merid_paral()
|
||||
|
||||
self.canvas.fig.tight_layout()
|
||||
|
||||
def add_merid_paral(self):
|
||||
self.gridlines = self.ax.gridlines(draw_labels=False, alpha=0.6, color='gray',
|
||||
linewidth=self.linewidth / 2, zorder=7, crs=ccrs.PlateCarree())
|
||||
|
||||
def remove_merid_paral(self):
|
||||
if len(self.gridlines.xline_artists):
|
||||
self.gridlines.xline_artists[0].remove()
|
||||
self.gridlines.yline_artists[0].remove()
|
||||
|
||||
def org_map_view(self):
|
||||
self.ax.set_xlim(self.org_xlim[0], self.org_xlim[1])
|
||||
self.ax.set_ylim(self.org_ylim[0], self.org_ylim[1])
|
||||
# parallels and meridians
|
||||
#self.remove_merid_paral()
|
||||
#self.add_merid_paral()
|
||||
|
||||
self.canvas.draw_idle()
|
||||
|
||||
def go2eq(self):
|
||||
if self.eventLoc:
|
||||
lats, lons = self.eventLoc
|
||||
self.ax.set_xlim(lons - 10, lons + 10)
|
||||
self.ax.set_ylim(lats - 5, lats + 5)
|
||||
# parallels and meridians
|
||||
#self.remove_merid_paral()
|
||||
#self.add_merid_paral()
|
||||
|
||||
self.canvas.draw_idle()
|
||||
|
||||
else:
|
||||
self.status_label.setText('No event information available')
|
||||
|
||||
def connectSignals(self):
|
||||
self.comboBox_phase.currentIndexChanged.connect(self._refresh_drawings)
|
||||
self.comboBox_am.currentIndexChanged.connect(self._refresh_drawings)
|
||||
self.cmaps_box.currentIndexChanged.connect(self._refresh_drawings)
|
||||
self.annotations_box.stateChanged.connect(self.switch_annotations)
|
||||
self.refresh_button.clicked.connect(self._refresh_drawings)
|
||||
self.map_reset_button.clicked.connect(self.org_map_view)
|
||||
self.go2eq_button.clicked.connect(self.go2eq)
|
||||
self.save_map_button.clicked.connect(self.saveFigure)
|
||||
self.subtract_mean_cb.stateChanged.connect(self.toggle_subtract_mean)
|
||||
|
||||
self.plotWidget.mpl_connect('motion_notify_event', self.mouse_moved)
|
||||
self.plotWidget.mpl_connect('scroll_event', self.mouse_scroll)
|
||||
self.plotWidget.mpl_connect('button_press_event', self.mouseLeftPress)
|
||||
self.plotWidget.mpl_connect('button_release_event', self.mouseLeftRelease)
|
||||
|
||||
# set mouse events -----------------------------------------------------
|
||||
def mouse_moved(self, event):
|
||||
if not event.inaxes == self.ax:
|
||||
return
|
||||
else:
|
||||
cont, inds = self.sc.contains(event)
|
||||
lat = event.ydata
|
||||
lon = event.xdata
|
||||
text = f'Longitude: {lon:3.3f}, Latitude: {lat:3.3f}'
|
||||
|
||||
if cont:
|
||||
indices = inds['ind']
|
||||
text += ' | Station: ' if len(indices) == 1 else ' | Stations: '
|
||||
text += ' - '.join([self._station_onpick_ids[index] for index in indices[:5]])
|
||||
if len(indices) > 5:
|
||||
text += '...'
|
||||
|
||||
self.status_label.setText(text)
|
||||
|
||||
def mouse_scroll(self, event):
|
||||
if not event.inaxes == self.ax:
|
||||
return
|
||||
|
||||
zoom = {'up': 1. / 2., 'down': 2.}
|
||||
|
||||
if event.button in zoom:
|
||||
xlim = self.ax.get_xlim()
|
||||
ylim = self.ax.get_ylim()
|
||||
|
||||
x, y = event.xdata, event.ydata
|
||||
|
||||
factor = zoom[event.button]
|
||||
xdiff = (xlim[1] - xlim[0]) * factor
|
||||
xl = x - 0.5 * xdiff
|
||||
xr = x + 0.5 * xdiff
|
||||
ydiff = (ylim[1] - ylim[0]) * factor
|
||||
yb = y - 0.5 * ydiff
|
||||
yt = y + 0.5 * ydiff
|
||||
|
||||
self.ax.set_xlim(xl, xr)
|
||||
self.ax.set_ylim(yb, yt)
|
||||
# parallels and meridians
|
||||
#self.remove_merid_paral()
|
||||
#self.add_merid_paral()
|
||||
|
||||
self.ax.figure.canvas.draw_idle()
|
||||
|
||||
def mouseLeftPress(self, event):
|
||||
if not event.inaxes == self.ax:
|
||||
return
|
||||
self.map_x = event.xdata
|
||||
self.map_y = event.ydata
|
||||
self.map_xlim = self.ax.get_xlim()
|
||||
self.map_ylim = self.ax.get_ylim()
|
||||
|
||||
def mouseLeftRelease(self, event):
|
||||
if not event.inaxes == self.ax:
|
||||
return
|
||||
new_x = event.xdata
|
||||
new_y = event.ydata
|
||||
|
||||
dx = new_x - self.map_x
|
||||
dy = new_y - self.map_y
|
||||
|
||||
self.ax.set_xlim((self.map_xlim[0] - dx, self.map_xlim[1] - dx))
|
||||
self.ax.set_ylim(self.map_ylim[0] - dy, self.map_ylim[1] - dy)
|
||||
# parallels and meridians
|
||||
#self.remove_merid_paral()
|
||||
#self.add_merid_paral()
|
||||
|
||||
self.ax.figure.canvas.draw_idle()
|
||||
|
||||
def onpick(self, event):
|
||||
btn_msg = {1: ' in selection. Aborted', 2: ' to delete a pick on. Aborted', 3: ' to display info.'}
|
||||
ind = event.ind
|
||||
button = event.mouseevent.button
|
||||
msg_reason = None
|
||||
if len(ind) > 1:
|
||||
self._parent.update_status(f'Found more than one station {btn_msg.get(button)}')
|
||||
return
|
||||
if button == 1:
|
||||
self.openPickDlg(ind)
|
||||
elif button == 2:
|
||||
self.deletePick(ind)
|
||||
elif button == 3:
|
||||
self.pickInfo(ind)
|
||||
|
||||
# data handling -----------------------------------------------------
|
||||
def update_hybrids_dict(self):
|
||||
self.hybrids_dict = self.picks_dict.copy()
|
||||
for station, pick in self.autopicks_dict.items():
|
||||
if not station in self.hybrids_dict.keys():
|
||||
self.hybrids_dict[station] = pick
|
||||
return self.hybrids_dict
|
||||
|
||||
def deletePick(self, ind):
|
||||
self.update_hybrids_dict()
|
||||
for index in ind:
|
||||
network, station = self._station_onpick_ids[index].split('.')[:2]
|
||||
try:
|
||||
phase = self.comboBox_phase.currentText()
|
||||
picks = self.current_picks_dict()[station]
|
||||
pick = picks.get(phase)
|
||||
if pick:
|
||||
picker = pick['picker']
|
||||
message = 'Deleted {} pick for phase {}, station {}.{} at timestamp {}'
|
||||
message = message.format(picker, phase, network, station,
|
||||
pick['mpp'])
|
||||
if picker == 'auto':
|
||||
del (self.autopicks_dict[station])
|
||||
elif picker == 'manual':
|
||||
del (self.picks_dict[station])
|
||||
else:
|
||||
raise TypeError('Unknown "picker" {}'.format(picker))
|
||||
print(message)
|
||||
pyl_mw = self._parent
|
||||
pyl_mw.deletePicks(station, pick, type=picker)
|
||||
pyl_mw.setDirty(True)
|
||||
pyl_mw.update_status(message)
|
||||
if self.auto_refresh_box.isChecked():
|
||||
self._refresh_drawings()
|
||||
else:
|
||||
self.highlight_station(network, station, color='red')
|
||||
pyl_mw.drawPicks(station)
|
||||
pyl_mw.draw()
|
||||
except Exception as e:
|
||||
print('Could not delete pick for station {}.{}: {}'.format(network, station, e))
|
||||
|
||||
def pickInfo(self, ind):
|
||||
self.update_hybrids_dict()
|
||||
for index in ind:
|
||||
network, station = self._station_onpick_ids[index].split('.')[:2]
|
||||
dic = self.current_picks_dict()[station]
|
||||
for phase, picks in dic.items():
|
||||
# because of wadati...
|
||||
if phase == 'SPt':
|
||||
continue
|
||||
print('{} - Pick:'.format(phase))
|
||||
for key, info in picks.items():
|
||||
print('{}: {}'.format(key, info))
|
||||
|
||||
def _from_dict(self, function, key):
|
||||
return function(self.stations_dict.values(), key=lambda x: x[key])[key]
|
||||
|
||||
def get_min_from_stations(self, key):
|
||||
return self._from_dict(min, key)
|
||||
|
||||
def get_max_from_stations(self, key):
|
||||
return self._from_dict(max, key)
|
||||
|
||||
def current_picks_dict(self):
|
||||
picktype = self.comboBox_am.currentText().split(' ')[0]
|
||||
auto_manu = {'auto': self.autopicks_dict,
|
||||
'manual': self.picks_dict,
|
||||
'hybrid': self.hybrids_dict}
|
||||
return auto_manu[picktype]
|
||||
|
||||
def init_stations(self):
|
||||
self.stations_dict = self.metadata.get_all_coordinates()
|
||||
self.latmin = self.get_min_from_stations('latitude')
|
||||
self.lonmin = self.get_min_from_stations('longitude')
|
||||
self.latmax = self.get_max_from_stations('latitude')
|
||||
self.lonmax = self.get_max_from_stations('longitude')
|
||||
|
||||
def init_picks(self):
|
||||
def get_picks(station_dict):
|
||||
self.update_hybrids_dict()
|
||||
picks = {}
|
||||
uncertainties = {}
|
||||
# selected phase
|
||||
phase = self.comboBox_phase.currentText()
|
||||
for st_id in station_dict.keys():
|
||||
try:
|
||||
station_name = st_id.split('.')[-1]
|
||||
# current_picks_dict: auto or manual
|
||||
station_picks = self.current_picks_dict().get(station_name)
|
||||
if not station_picks:
|
||||
continue
|
||||
for phase_hint, pick in station_picks.items():
|
||||
if identifyPhaseID(phase_hint) == phase:
|
||||
break
|
||||
else:
|
||||
continue
|
||||
if pick['picker'] == 'auto':
|
||||
if not pick['spe']:
|
||||
continue
|
||||
picks[st_id] = pick['mpp']
|
||||
uncertainties[st_id] = pick['spe']
|
||||
except KeyError:
|
||||
continue
|
||||
except Exception as e:
|
||||
print('Cannot display pick for station {}. Reason: {}'.format(station_name, e))
|
||||
return picks, uncertainties
|
||||
|
||||
def get_picks_rel(picks, func=min):
|
||||
picks_rel = {}
|
||||
picks_utc = []
|
||||
for pick in picks.values():
|
||||
if type(pick) is UTCDateTime:
|
||||
picks_utc.append(pick.timestamp)
|
||||
if picks_utc:
|
||||
self._reference_picktime = UTCDateTime(func(picks_utc))
|
||||
for st_id, pick in picks.items():
|
||||
if type(pick) is UTCDateTime:
|
||||
pick -= self._reference_picktime
|
||||
picks_rel[st_id] = pick
|
||||
return picks_rel
|
||||
|
||||
def get_picks_rel_mean_corr(picks):
|
||||
return get_picks_rel(picks, func=np.nanmean)
|
||||
|
||||
self.picks, self.uncertainties = get_picks(self.stations_dict)
|
||||
self.picks_rel = get_picks_rel(self.picks)
|
||||
self.picks_rel_mean_corrected = get_picks_rel_mean_corr(self.picks)
|
||||
|
||||
def toggle_subtract_mean(self):
|
||||
if self.subtract_mean_cb.isChecked():
|
||||
cmap = 'seismic'
|
||||
else:
|
||||
cmap = 'viridis'
|
||||
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText(cmap))
|
||||
self._refresh_drawings()
|
||||
|
||||
def init_lat_lon_dimensions(self):
|
||||
# init minimum and maximum lon and lat dimensions
|
||||
self.londim = self.lonmax - self.lonmin
|
||||
self.latdim = self.latmax - self.latmin
|
||||
|
||||
def init_lat_lon_grid(self, nstep=250):
|
||||
# create a regular grid to display colormap
|
||||
lataxis = np.linspace(self.latmin, self.latmax, nstep)
|
||||
lonaxis = np.linspace(self.lonmin, self.lonmax, nstep)
|
||||
self.longrid, self.latgrid = np.meshgrid(lonaxis, lataxis)
|
||||
|
||||
def init_picksgrid(self):
|
||||
picks, uncertainties, lats, lons = self.get_picks_lat_lon()
|
||||
try:
|
||||
self.picksgrid_active = griddata((lats, lons), picks, (self.latgrid, self.longrid), method='linear')
|
||||
except Exception as e:
|
||||
self._warn('Could not init picksgrid: {}'.format(e))
|
||||
|
||||
def get_st_lat_lon_for_plot(self):
|
||||
stations = []
|
||||
latitudes = []
|
||||
longitudes = []
|
||||
for st_id, coords in self.stations_dict.items():
|
||||
stations.append(st_id)
|
||||
latitudes.append(coords['latitude'])
|
||||
longitudes.append(coords['longitude'])
|
||||
return stations, latitudes, longitudes
|
||||
|
||||
def get_picks_lat_lon(self):
|
||||
picks_rel = self.picks_rel_mean_corrected if self.subtract_mean_cb.isChecked() else self.picks_rel
|
||||
picks = []
|
||||
uncertainties = []
|
||||
latitudes = []
|
||||
longitudes = []
|
||||
for st_id, pick in picks_rel.items():
|
||||
picks.append(pick)
|
||||
uncertainties.append(self.uncertainties.get(st_id))
|
||||
latitudes.append(self.stations_dict[st_id]['latitude'])
|
||||
longitudes.append(self.stations_dict[st_id]['longitude'])
|
||||
return picks, uncertainties, latitudes, longitudes
|
||||
|
||||
# plotting -----------------------------------------------------
|
||||
def highlight_station(self, network, station, color):
|
||||
stat_dict = self.stations_dict['{}.{}'.format(network, station)]
|
||||
lat = stat_dict['latitude']
|
||||
lon = stat_dict['longitude']
|
||||
self.highlighted_stations.append(self.ax.scatter(lon, lat, s=self.pointsize, edgecolors=color,
|
||||
facecolors='none', zorder=12,
|
||||
transform=ccrs.PlateCarree(), label='deleted'))
|
||||
|
||||
def openPickDlg(self, ind):
|
||||
try:
|
||||
wfdata = self._parent.get_data().getWFData()
|
||||
except AttributeError:
|
||||
QtWidgets.QMessageBox.warning(
|
||||
self, "PyLoT Warning",
|
||||
"No waveform data found. Check if they were already loaded in Waveform plot tab."
|
||||
)
|
||||
return
|
||||
wfdata_comp = self._parent.get_data().getAltWFdata()
|
||||
for index in ind:
|
||||
network, station = self._station_onpick_ids[index].split('.')[:2]
|
||||
pyl_mw = self._parent
|
||||
try:
|
||||
wfdata = wfdata.select(station=station)
|
||||
wfdata_comp = wfdata_comp.select(station=station)
|
||||
if not wfdata:
|
||||
self._warn('No data for station {}'.format(station))
|
||||
return
|
||||
pickDlg = PickDlg(self._parent, parameter=self.parameter,
|
||||
data=wfdata.copy(), data_compare=wfdata_comp.copy(), network=network, station=station,
|
||||
picks=self._parent.get_current_event().getPick(station),
|
||||
autopicks=self._parent.get_current_event().getAutopick(station),
|
||||
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
||||
event=pyl_mw.get_current_event())
|
||||
except Exception as e:
|
||||
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
||||
self._warn(message)
|
||||
print(message, e)
|
||||
print(traceback.format_exc())
|
||||
return
|
||||
try:
|
||||
if pickDlg.exec_():
|
||||
pyl_mw.setDirty(True)
|
||||
pyl_mw.update_status('picks accepted ({0})'.format(station))
|
||||
pyl_mw.addPicks(station, pickDlg.getPicks(picktype='manual'), type='manual')
|
||||
pyl_mw.addPicks(station, pickDlg.getPicks(picktype='auto'), type='auto')
|
||||
if self.auto_refresh_box.isChecked():
|
||||
self._refresh_drawings()
|
||||
else:
|
||||
self.highlight_station(network, station, color='yellow')
|
||||
pyl_mw.drawPicks(station)
|
||||
pyl_mw.draw()
|
||||
else:
|
||||
pyl_mw.update_status('picks discarded ({0})'.format(station))
|
||||
except Exception as e:
|
||||
message = 'Could not save picks for station {st}.\n{er}'.format(st=station, er=e)
|
||||
self._warn(message)
|
||||
print(message, e)
|
||||
print(traceback.format_exc())
|
||||
|
||||
def draw_contour_filled(self, nlevel=51):
|
||||
if self.subtract_mean_cb.isChecked():
|
||||
abs_max = self.get_residuals_absmax()
|
||||
levels = np.linspace(-abs_max, abs_max, nlevel)
|
||||
else:
|
||||
levels = np.linspace(min(self.picks_rel.values()), max(self.picks_rel.values()), nlevel)
|
||||
|
||||
self.contourf = self.ax.contourf(self.longrid, self.latgrid, self.picksgrid_active, levels,
|
||||
linewidths=self.linewidth * 5, transform=ccrs.PlateCarree(),
|
||||
alpha=0.4, zorder=8, cmap=self.get_colormap())
|
||||
|
||||
def get_residuals_absmax(self):
|
||||
return np.max(np.absolute(list(self.picks_rel_mean_corrected.values())))
|
||||
|
||||
def get_colormap(self):
|
||||
return plt.get_cmap(self.cmaps_box.currentText())
|
||||
|
||||
def scatter_all_stations(self):
|
||||
stations, lats, lons = self.get_st_lat_lon_for_plot()
|
||||
|
||||
self.sc = self.ax.scatter(lons, lats, s=self.pointsize * 3, facecolor='none', marker='.',
|
||||
zorder=10, picker=True, edgecolor='0.5', label='Not Picked',
|
||||
transform=ccrs.PlateCarree())
|
||||
|
||||
self.cid = self.plotWidget.mpl_connect('pick_event', self.onpick)
|
||||
self._station_onpick_ids = stations
|
||||
if self.eventLoc:
|
||||
lats, lons = self.eventLoc
|
||||
self.sc_event = self.ax.scatter(lons, lats, s=5 * self.pointsize, facecolor='red', zorder=11,
|
||||
label='Event (might be outside map region)', marker='*',
|
||||
edgecolors='black',
|
||||
transform=ccrs.PlateCarree())
|
||||
|
||||
def scatter_picked_stations(self):
|
||||
picks, uncertainties, lats, lons = self.get_picks_lat_lon()
|
||||
if len(lons) < 1 and len(lats) < 1:
|
||||
return
|
||||
|
||||
phase = self.comboBox_phase.currentText()
|
||||
timeerrors = self.parameter['timeerrors{}'.format(phase)]
|
||||
sizes = np.array([self.pointsize * (5. - get_quality_class(uncertainty, timeerrors))
|
||||
for uncertainty in uncertainties])
|
||||
|
||||
cmap = self.get_colormap()
|
||||
|
||||
vmin = vmax = None
|
||||
if self.subtract_mean_cb.isChecked():
|
||||
vmin, vmax = -self.get_residuals_absmax(), self.get_residuals_absmax()
|
||||
|
||||
self.sc_picked = self.ax.scatter(lons, lats, s=sizes, edgecolors='white', cmap=cmap, vmin=vmin, vmax=vmax,
|
||||
c=picks, zorder=11, label='Picked', transform=ccrs.PlateCarree())
|
||||
|
||||
def annotate_ax(self):
|
||||
self.annotations = []
|
||||
stations, ys, xs = self.get_st_lat_lon_for_plot()
|
||||
# MP MP testing station highlighting if they have high impact on mean gradient of color map
|
||||
# if self.picks_rel:
|
||||
# self.test_gradient()
|
||||
color_marked = {True: 'red',
|
||||
False: 'white'}
|
||||
for st, x, y in zip(stations, xs, ys):
|
||||
if st in self.picks_rel:
|
||||
color = 'white'
|
||||
else:
|
||||
color = 'lightgrey'
|
||||
if st in self.marked_stations:
|
||||
color = 'red'
|
||||
self.annotations.append(
|
||||
self.ax.annotate(f'{st}', xy=(x + 0.003, y + 0.003), fontsize=self.pointsize / 4.,
|
||||
fontweight='semibold', color=color, alpha=0.8,
|
||||
transform=ccrs.PlateCarree(), zorder=14,
|
||||
path_effects=[PathEffects.withStroke(
|
||||
linewidth=self.pointsize / 15., foreground='k')]))
|
||||
|
||||
self.legend = self.ax.legend(loc=1, framealpha=1)
|
||||
self.legend.set_zorder(100)
|
||||
self.legend.get_frame().set_facecolor((1, 1, 1, 0.95))
|
||||
|
||||
def add_cbar(self, label):
|
||||
self.cbax_bg = inset_axes(self.ax, width="6%", height="75%", loc=5)
|
||||
cbax = inset_axes(self.ax, width='2%', height='70%', loc=5)
|
||||
cbar = self.ax.figure.colorbar(self.sc_picked, cax=cbax)
|
||||
cbar.set_label(label)
|
||||
cbax.yaxis.tick_left()
|
||||
cbax.yaxis.set_label_position('left')
|
||||
for spine in self.cbax_bg.spines.values():
|
||||
spine.set_visible(False)
|
||||
self.cbax_bg.yaxis.set_ticks([])
|
||||
self.cbax_bg.xaxis.set_ticks([])
|
||||
self.cbax_bg.patch.set_facecolor((1, 1, 1, 0.75))
|
||||
return cbar
|
||||
|
||||
# handle drawings -----------------------------------------------------
|
||||
def refresh_drawings(self, picks=None, autopicks=None):
|
||||
self.picks_dict = picks
|
||||
self.autopicks_dict = autopicks
|
||||
self._refresh_drawings()
|
||||
|
||||
def _refresh_drawings(self):
|
||||
self.remove_drawings()
|
||||
self.init_stations()
|
||||
self.init_colormap()
|
||||
self.draw_everything()
|
||||
|
||||
def switch_annotations(self):
|
||||
if self.annotations_box.isChecked():
|
||||
self.annotate = True
|
||||
else:
|
||||
self.annotate = False
|
||||
self._refresh_drawings()
|
||||
|
||||
def draw_everything(self):
|
||||
picktype = self.comboBox_am.currentText()
|
||||
picks_available = (self.picks_dict and picktype == 'manual') \
|
||||
or (self.autopicks_dict and picktype == 'auto') \
|
||||
or ((self.autopicks_dict or self.picks_dict) and picktype.startswith('hybrid'))
|
||||
|
||||
if picks_available:
|
||||
self.init_picks()
|
||||
if len(self.picks) >= 3:
|
||||
self.init_picksgrid()
|
||||
self.draw_contour_filled()
|
||||
self.scatter_all_stations()
|
||||
if picks_available:
|
||||
self.scatter_picked_stations()
|
||||
if hasattr(self, 'sc_picked'):
|
||||
self.cbar = self.add_cbar(
|
||||
label='Time relative to reference onset ({}) [s]'.format(self._reference_picktime)
|
||||
)
|
||||
self.comboBox_phase.setEnabled(True)
|
||||
else:
|
||||
self.comboBox_phase.setEnabled(False)
|
||||
if self.annotate:
|
||||
self.annotate_ax()
|
||||
self.plotWidget.draw_idle()
|
||||
|
||||
def remove_drawings(self):
|
||||
self.remove_annotations()
|
||||
for item in reversed(self.highlighted_stations):
|
||||
item.remove()
|
||||
self.highlighted_stations.remove(item)
|
||||
if hasattr(self, 'cbar'):
|
||||
try:
|
||||
self.cbar.remove()
|
||||
self.cbax_bg.remove()
|
||||
except Exception as e:
|
||||
print('Warning: could not remove color bar or color bar bg.\nReason: {}'.format(e))
|
||||
del (self.cbar, self.cbax_bg)
|
||||
if hasattr(self, 'sc_picked'):
|
||||
self.sc_picked.remove()
|
||||
del self.sc_picked
|
||||
if hasattr(self, 'sc_event'):
|
||||
self.sc_event.remove()
|
||||
del self.sc_event
|
||||
if hasattr(self, 'contourf'):
|
||||
self.remove_contourf()
|
||||
del self.contourf
|
||||
if hasattr(self, 'cid'):
|
||||
self.plotWidget.mpl_disconnect(self.cid)
|
||||
del self.cid
|
||||
try:
|
||||
self.sc.remove()
|
||||
except Exception as e:
|
||||
print('Warning: could not remove station scatter plot.\nReason: {}'.format(e))
|
||||
try:
|
||||
self.legend.remove()
|
||||
except Exception as e:
|
||||
print('Warning: could not remove legend. Reason: {}'.format(e))
|
||||
self.plotWidget.draw_idle()
|
||||
|
||||
def remove_contourf(self):
|
||||
for item in self.contourf.collections:
|
||||
item.remove()
|
||||
|
||||
def remove_annotations(self):
|
||||
for annotation in self.annotations:
|
||||
annotation.remove()
|
||||
self.annotations = []
|
||||
|
||||
def saveFigure(self):
|
||||
if self.canvas.fig:
|
||||
fd = QtWidgets.QFileDialog()
|
||||
fname, filter = fd.getSaveFileName(self.parent(), filter='Images (*.png *.svg *.jpg)')
|
||||
if not fname:
|
||||
return
|
||||
if not any([fname.endswith(item) for item in ['.png', '.svg', '.jpg']]):
|
||||
fname += '.png'
|
||||
self.canvas.fig.savefig(fname)
|
||||
|
||||
def _warn(self, message):
|
||||
self.qmb = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Icon.Warning, 'Warning', message)
|
||||
self.qmb.show()
|
@ -2,12 +2,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
try:
|
||||
# noinspection PyUnresolvedReferences
|
||||
from urllib2 import urlopen
|
||||
except:
|
||||
from urllib.request import urlopen
|
||||
|
||||
|
||||
def checkurl(url='https://ariadne.geophysik.ruhr-uni-bochum.de/trac/PyLoT/'):
|
||||
def checkurl(url='https://git.geophysik.ruhr-uni-bochum.de/marcel/pylot/'):
|
||||
"""
|
||||
check if URL is available
|
||||
:param url: url
|
||||
:type url: str
|
||||
:return: available: True/False
|
||||
:rtype: bool
|
||||
"""
|
||||
try:
|
||||
urlopen(url, timeout=1)
|
||||
return True
|
||||
|
@ -2,14 +2,344 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
from obspy import UTCDateTime, read_inventory, read
|
||||
from obspy.io.xseed import Parser
|
||||
|
||||
from pylot.core.util.utils import key_for_set_value, find_in_list, \
|
||||
remove_underscores, gen_Pool
|
||||
gen_Pool
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
|
||||
def __init__(self, inventory=None, verbosity=1):
|
||||
self.inventories = []
|
||||
# saves read metadata objects (Parser/inventory) for a filename
|
||||
self.inventory_files = {}
|
||||
# saves filenames holding metadata for a seed_id
|
||||
# seed id as key, path to file as value
|
||||
self.seed_ids = {}
|
||||
self.stations_dict = {}
|
||||
# saves which metadata files are from obspy dmt
|
||||
self.obspy_dmt_invs = []
|
||||
if inventory:
|
||||
# make sure that no accidental backslashes mess up the path
|
||||
if isinstance(inventory, str):
|
||||
inventory = inventory.replace('\\', '/')
|
||||
inventory = os.path.abspath(inventory)
|
||||
if os.path.isdir(inventory):
|
||||
self.add_inventory(inventory)
|
||||
if os.path.isfile(inventory):
|
||||
self.add_inventory_file(inventory)
|
||||
self.verbosity = verbosity
|
||||
|
||||
def __str__(self):
|
||||
repr = 'PyLoT Metadata object including the following inventories:\n\n'
|
||||
ntotal = len(self.inventories)
|
||||
for index, inventory in enumerate(self.inventories):
|
||||
if index < 2 or (ntotal - index) < 3:
|
||||
repr += '{}\n'.format(inventory)
|
||||
if ntotal > 4 and int(ntotal / 2) == index:
|
||||
repr += '...\n'
|
||||
if ntotal > 4:
|
||||
repr += '\nTotal of {} inventories. Use Metadata.inventories to see all.'.format(ntotal)
|
||||
return repr
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def add_inventory(self, path_to_inventory, obspy_dmt_inv=False):
|
||||
"""
|
||||
Add path to list of inventories.
|
||||
:param path_to_inventory: Path to a folder
|
||||
:type path_to_inventory: str
|
||||
:return: None
|
||||
"""
|
||||
assert (os.path.isdir(path_to_inventory)), '{} is no directory'.format(path_to_inventory)
|
||||
if path_to_inventory not in self.inventories:
|
||||
self.inventories.append(path_to_inventory)
|
||||
if obspy_dmt_inv == True:
|
||||
self.obspy_dmt_invs.append(path_to_inventory)
|
||||
|
||||
def add_inventory_file(self, path_to_inventory_file):
|
||||
"""
|
||||
Add the folder in which the file exists to the list of inventories.
|
||||
:param path_to_inventory_file: full path including filename
|
||||
:type path_to_inventory_file: str
|
||||
:return: None
|
||||
"""
|
||||
assert (os.path.isfile(path_to_inventory_file)), '{} is no file'.format(path_to_inventory_file)
|
||||
self.add_inventory(os.path.split(path_to_inventory_file)[0])
|
||||
if path_to_inventory_file not in self.inventory_files.keys():
|
||||
self.read_single_file(path_to_inventory_file)
|
||||
|
||||
def remove_all_inventories(self):
|
||||
self.__init__()
|
||||
|
||||
def remove_inventory(self, path_to_inventory):
|
||||
"""
|
||||
Remove a path from inventories list. If path is not in inventories list, do nothing.
|
||||
:param path_to_inventory: Path to a folder
|
||||
"""
|
||||
if not path_to_inventory in self.inventories:
|
||||
print('Path {} not in inventories list.'.format(path_to_inventory))
|
||||
return
|
||||
self.inventories.remove(path_to_inventory)
|
||||
for filename in list(self.inventory_files.keys()):
|
||||
if filename.startswith(path_to_inventory):
|
||||
del (self.inventory_files[filename])
|
||||
for seed_id in list(self.seed_ids.keys()):
|
||||
if self.seed_ids[seed_id].startswith(path_to_inventory):
|
||||
del (self.seed_ids[seed_id])
|
||||
# have to clean self.stations_dict as well
|
||||
# this will be rebuilt for the next init of the arraymap anyway, so just reset it
|
||||
self.stations_dict = {}
|
||||
|
||||
def clear_inventory(self):
|
||||
for inv in self.obspy_dmt_invs:
|
||||
self.remove_inventory(inv)
|
||||
self.obspy_dmt_invs = []
|
||||
|
||||
def get_metadata(self, seed_id, time=None):
|
||||
"""
|
||||
Get metadata for seed id at time. When time is not specified, metadata for current time is fetched.
|
||||
:param seed_id: Seed id such as BW.WETR..HHZ (Network.Station.Location.Channel)
|
||||
:type seed_id: str
|
||||
:param time: Time for which the metadata should be returned
|
||||
:type time: UTCDateTime
|
||||
:return: Dictionary with keys data and invtype.
|
||||
data is a obspy.io.xseed.parser.Parser or an obspy.core.inventory.inventory.Inventory depending on the metadata
|
||||
file.
|
||||
invtype is a string denoting of which type the value of the data key is. It can take the values 'dless',
|
||||
'dseed', 'xml', 'resp', according to the filetype of the metadata.
|
||||
:rtype: dict
|
||||
"""
|
||||
# try most recent data if no time is specified
|
||||
if not time:
|
||||
time = UTCDateTime()
|
||||
# get metadata for a specific seed_id, if not already read, try to read from inventories
|
||||
if not seed_id in self.seed_ids.keys():
|
||||
self._read_inventory_data(seed_id)
|
||||
# if seed id is not found read all inventories and try to find it there
|
||||
if not seed_id in self.seed_ids.keys():
|
||||
if self.verbosity:
|
||||
print('No data found for seed id {}. Trying to find it in all known inventories...'.format(seed_id))
|
||||
self.read_all()
|
||||
for inv_fname, metadata_dict in self.inventory_files.items():
|
||||
# use get_coordinates to check for seed_id
|
||||
try:
|
||||
metadata_dict['data'].get_coordinates(seed_id, time)
|
||||
self.seed_ids[seed_id] = inv_fname
|
||||
if self.verbosity:
|
||||
print('Found metadata for station {}!'.format(seed_id))
|
||||
return metadata_dict
|
||||
except Exception as e:
|
||||
continue
|
||||
print('Could not find metadata for station {}'.format(seed_id))
|
||||
return None
|
||||
fname = self.seed_ids[seed_id]
|
||||
return self.inventory_files[fname]
|
||||
|
||||
def read_all(self):
|
||||
"""
|
||||
Read all metadata files found in all inventories
|
||||
"""
|
||||
# iterate over all inventory folders
|
||||
for inventory in self.inventories:
|
||||
# iterate over all inventory files in the current folder
|
||||
for inv_fname in os.listdir(inventory):
|
||||
inv_fname = os.path.join(inventory, inv_fname)
|
||||
if not self.read_single_file(inv_fname):
|
||||
continue
|
||||
|
||||
def read_single_file(self, inv_fname):
|
||||
"""
|
||||
Try to read a single file as Parser/Inventory and add its dictionary to inventory files if reading sudceeded.
|
||||
:param inv_fname: path/filename of inventory file
|
||||
:type inv_fname: str
|
||||
:rtype: None
|
||||
"""
|
||||
# return if it was read already
|
||||
if self.inventory_files.get(inv_fname, None):
|
||||
return
|
||||
|
||||
try:
|
||||
invtype, robj = self._read_metadata_file(inv_fname)
|
||||
if robj is None:
|
||||
return
|
||||
except Exception as e:
|
||||
print('Could not read file {}'.format(inv_fname))
|
||||
return
|
||||
self.inventory_files[inv_fname] = {'invtype': invtype,
|
||||
'data': robj}
|
||||
return True
|
||||
|
||||
def get_coordinates(self, seed_id, time=None):
|
||||
"""
|
||||
Get coordinates of given seed id.
|
||||
:param seed_id: Seed id such as BW.WETR..HHZ (Network.Station.Location.Channel)
|
||||
:type seed_id: str
|
||||
:param time: Used when a station has data available at multiple time intervals
|
||||
:type time: UTCDateTime
|
||||
:return: dict containing position information of the station
|
||||
:rtype: dict
|
||||
"""
|
||||
# try most recent data if no time is specified
|
||||
if not time:
|
||||
time = UTCDateTime()
|
||||
metadata = self.get_metadata(seed_id, time)
|
||||
if not metadata:
|
||||
return
|
||||
try:
|
||||
return metadata['data'].get_coordinates(seed_id, time)
|
||||
# no specific exception defined in obspy inventory
|
||||
except Exception as e:
|
||||
logging.warning(f'Could not get metadata for {seed_id}')
|
||||
|
||||
def get_all_coordinates(self):
|
||||
def stat_info_from_parser(parser):
|
||||
for station in parser.stations:
|
||||
station_name = station[0].station_call_letters
|
||||
network_name = station[0].network_code
|
||||
if not station_name in self.stations_dict.keys():
|
||||
st_id = '{}.{}'.format(network_name, station_name)
|
||||
self.stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude,
|
||||
'elevation': station[0].elevation}
|
||||
|
||||
def stat_info_from_inventory(inventory):
|
||||
for network in inventory.networks:
|
||||
for station in network.stations:
|
||||
station_name = station.code
|
||||
network_name = network.code
|
||||
if not station_name in self.stations_dict.keys():
|
||||
st_id = '{}.{}'.format(network_name, station_name)
|
||||
self.stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude,
|
||||
'elevation': station[0].elevation}
|
||||
|
||||
read_stat = {'xml': stat_info_from_inventory,
|
||||
'dless': stat_info_from_parser}
|
||||
|
||||
self.read_all()
|
||||
for item in self.inventory_files.values():
|
||||
inventory = item['data']
|
||||
invtype = item['invtype']
|
||||
read_stat[invtype](inventory)
|
||||
|
||||
return self.stations_dict
|
||||
|
||||
def get_paz(self, seed_id, time):
|
||||
"""
|
||||
|
||||
:param seed_id: Seed id such as BW.WETR..HHZ (Network.Station.Location.Channel)
|
||||
:type seed_id: str
|
||||
:param time: Used when a station has data available at multiple time intervals
|
||||
:type time: UTCDateTime
|
||||
:rtype: dict
|
||||
"""
|
||||
metadata = self.get_metadata(seed_id)
|
||||
if not metadata:
|
||||
return
|
||||
if metadata['invtype'] in ['dless', 'dseed']:
|
||||
return metadata['data'].get_paz(seed_id, time)
|
||||
elif metadata['invtype'] in ['resp', 'xml']:
|
||||
resp = metadata['data'].get_response(seed_id, time)
|
||||
return resp.get_paz(seed_id)
|
||||
|
||||
def _read_inventory_data(self, seed_id):
|
||||
for inventory in self.inventories:
|
||||
if self._read_metadata_iterator(path_to_inventory=inventory, station_seed_id=seed_id):
|
||||
return
|
||||
|
||||
def _read_metadata_iterator(self, path_to_inventory, station_seed_id):
|
||||
"""
|
||||
Search for metadata for a specific station iteratively.
|
||||
"""
|
||||
network, station, location, channel = station_seed_id.split('.')
|
||||
# seach for station seed id in filenames in invetory
|
||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + station_seed_id + '*'))
|
||||
if not fnames:
|
||||
# search for station name in filename
|
||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + station + '*'))
|
||||
if not fnames:
|
||||
# search for network name in filename
|
||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + network + '*'))
|
||||
if not fnames:
|
||||
if self.verbosity:
|
||||
print('Could not find filenames matching station name, network name or seed id')
|
||||
return
|
||||
for fname in fnames:
|
||||
if fname in self.inventory_files.keys():
|
||||
if self.inventory_files[fname]:
|
||||
# file already read
|
||||
continue
|
||||
invtype, robj = self._read_metadata_file(os.path.join(path_to_inventory, fname))
|
||||
try:
|
||||
# robj.get_coordinates(station_seed_id) # TODO: Commented out, failed with Parser, is this needed?
|
||||
self.inventory_files[fname] = {'invtype': invtype,
|
||||
'data': robj}
|
||||
if station_seed_id in self.seed_ids.keys():
|
||||
print('WARNING: Overwriting metadata for station {}'.format(station_seed_id))
|
||||
self.seed_ids[station_seed_id] = fname
|
||||
return True
|
||||
except Exception as e:
|
||||
continue
|
||||
print('Could not find metadata for station_seed_id {} in path {}'.format(station_seed_id, path_to_inventory))
|
||||
|
||||
def _read_metadata_file(self, path_to_inventory_filename):
|
||||
"""
|
||||
function reading metadata files (either dataless seed, xml or resp)
|
||||
:param path_to_inventory_filename:
|
||||
:return: file type/ending, inventory object (Parser or Inventory)
|
||||
:rtype: (str, obspy.io.xseed.Parser or obspy.core.inventory.inventory.Inventory)
|
||||
"""
|
||||
# functions used to read metadata for different file endings (or file types)
|
||||
read_functions = {'dless': self._read_dless,
|
||||
'dataless': self._read_dless,
|
||||
'dseed': self._read_dless,
|
||||
'xml': self._read_inventory_file,
|
||||
'resp': self._read_inventory_file}
|
||||
file_ending = path_to_inventory_filename.split('.')[-1]
|
||||
if file_ending in read_functions.keys():
|
||||
robj, exc = read_functions[file_ending](path_to_inventory_filename)
|
||||
if exc is not None:
|
||||
raise exc
|
||||
return file_ending, robj
|
||||
# in case file endings did not match the above keys, try and error
|
||||
for file_type in ['dless', 'xml']:
|
||||
try:
|
||||
robj, exc = read_functions[file_type](path_to_inventory_filename)
|
||||
if exc is None:
|
||||
if self.verbosity:
|
||||
print('Read file {} as {}'.format(path_to_inventory_filename, file_type))
|
||||
return file_type, robj
|
||||
except Exception as e:
|
||||
if self.verbosity:
|
||||
print('Could not read file {} as {}'.format(path_to_inventory_filename, file_type))
|
||||
return None, None
|
||||
|
||||
@staticmethod
|
||||
def _read_dless(path_to_inventory):
|
||||
exc = None
|
||||
try:
|
||||
parser = Parser(path_to_inventory)
|
||||
except Exception as exc:
|
||||
parser = None
|
||||
return parser, exc
|
||||
|
||||
@staticmethod
|
||||
def _read_inventory_file(path_to_inventory):
|
||||
exc = None
|
||||
try:
|
||||
inv = read_inventory(path_to_inventory)
|
||||
except Exception as exc:
|
||||
inv = None
|
||||
return inv, exc
|
||||
|
||||
|
||||
def time_from_header(header):
|
||||
@ -32,25 +362,25 @@ def check_time(datetime):
|
||||
:type datetime: list
|
||||
:return: returns True if Values are in supposed range, returns False otherwise
|
||||
|
||||
>>> check_time([1999, 01, 01, 23, 59, 59, 999000])
|
||||
>>> check_time([1999, 1, 1, 23, 59, 59, 999000])
|
||||
True
|
||||
>>> check_time([1999, 01, 01, 23, 59, 60, 999000])
|
||||
>>> check_time([1999, 1, 1, 23, 59, 60, 999000])
|
||||
False
|
||||
>>> check_time([1999, 01, 01, 23, 59, 59, 1000000])
|
||||
>>> check_time([1999, 1, 1, 23, 59, 59, 1000000])
|
||||
False
|
||||
>>> check_time([1999, 01, 01, 23, 60, 59, 999000])
|
||||
>>> check_time([1999, 1, 1, 23, 60, 59, 999000])
|
||||
False
|
||||
>>> check_time([1999, 01, 01, 23, 60, 59, 999000])
|
||||
>>> check_time([1999, 1, 1, 23, 60, 59, 999000])
|
||||
False
|
||||
>>> check_time([1999, 01, 01, 24, 59, 59, 999000])
|
||||
>>> check_time([1999, 1, 1, 24, 59, 59, 999000])
|
||||
False
|
||||
>>> check_time([1999, 01, 31, 23, 59, 59, 999000])
|
||||
>>> check_time([1999, 1, 31, 23, 59, 59, 999000])
|
||||
True
|
||||
>>> check_time([1999, 02, 30, 23, 59, 59, 999000])
|
||||
>>> check_time([1999, 2, 30, 23, 59, 59, 999000])
|
||||
False
|
||||
>>> check_time([1999, 02, 29, 23, 59, 59, 999000])
|
||||
>>> check_time([1999, 2, 29, 23, 59, 59, 999000])
|
||||
False
|
||||
>>> check_time([2000, 02, 29, 23, 59, 59, 999000])
|
||||
>>> check_time([2000, 2, 29, 23, 59, 59, 999000])
|
||||
True
|
||||
>>> check_time([2000, 13, 29, 23, 59, 59, 999000])
|
||||
False
|
||||
@ -62,6 +392,7 @@ def check_time(datetime):
|
||||
return False
|
||||
|
||||
|
||||
# TODO: change root to datapath
|
||||
def get_file_list(root_dir):
|
||||
"""
|
||||
Function uses a directorie to get all the *.gse files from it.
|
||||
@ -125,7 +456,7 @@ def evt_head_check(root_dir, out_dir=None):
|
||||
"""
|
||||
if not out_dir:
|
||||
print('WARNING files are going to be overwritten!')
|
||||
inp = str(raw_input('Continue? [y/N]'))
|
||||
inp = str(input('Continue? [y/N]'))
|
||||
if not inp == 'y':
|
||||
sys.exit()
|
||||
filelist = get_file_list(root_dir)
|
||||
@ -192,16 +523,55 @@ def read_metadata(path_to_inventory):
|
||||
robj = inv[invtype]
|
||||
else:
|
||||
print("Reading metadata information from inventory-xml file ...")
|
||||
robj = inv[invtype]
|
||||
robj = read_inventory(inv[invtype])
|
||||
return invtype, robj
|
||||
|
||||
|
||||
# idea to optimize read_metadata
|
||||
# def read_metadata_new(path_to_inventory):
|
||||
# metadata_objects = []
|
||||
# # read multiple files from directory
|
||||
# if os.path.isdir(path_to_inventory):
|
||||
# fnames = os.listdir(path_to_inventory)
|
||||
# # read single file
|
||||
# elif os.path.isfile(path_to_inventory):
|
||||
# fnames = [path_to_inventory]
|
||||
# else:
|
||||
# print("Neither dataless-SEED file, inventory-xml file nor "
|
||||
# "RESP-file found!")
|
||||
# print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
|
||||
# fnames = []
|
||||
#
|
||||
# for fname in fnames:
|
||||
# path_to_inventory_filename = os.path.join(path_to_inventory, fname)
|
||||
# try:
|
||||
# ftype, robj = read_metadata_file(path_to_inventory_filename)
|
||||
# metadata_objects.append((ftype, robj))
|
||||
# except Exception as e:
|
||||
# print('Could not read metadata file {} '
|
||||
# 'because of the following Exception: {}'.format(path_to_inventory_filename, e))
|
||||
# return metadata_objects
|
||||
|
||||
|
||||
def restitute_trace(input_tuple):
|
||||
tr, invtype, inobj, unit, force = input_tuple
|
||||
def no_metadata(tr, seed_id):
|
||||
print('no metadata file found '
|
||||
'for trace {0}'.format(seed_id))
|
||||
return tr, True
|
||||
|
||||
tr, metadata, unit, force = input_tuple
|
||||
|
||||
remove_trace = False
|
||||
|
||||
seed_id = tr.get_id()
|
||||
|
||||
mdata = metadata.get_metadata(seed_id, time=tr.stats.starttime)
|
||||
if not mdata:
|
||||
return no_metadata(tr, seed_id)
|
||||
|
||||
invtype = mdata['invtype']
|
||||
inobj = mdata['data']
|
||||
|
||||
# check, whether this trace has already been corrected
|
||||
if 'processing' in tr.stats.keys() \
|
||||
and np.any(['remove' in p for p in tr.stats.processing]) \
|
||||
@ -213,8 +583,7 @@ def restitute_trace(input_tuple):
|
||||
if invtype == 'resp':
|
||||
fresp = find_in_list(inobj, seed_id)
|
||||
if not fresp:
|
||||
raise IOError('no response file found '
|
||||
'for trace {0}'.format(seed_id))
|
||||
return no_metadata(tr, seed_id)
|
||||
fname = fresp
|
||||
seedresp = dict(filename=fname,
|
||||
date=stime,
|
||||
@ -225,20 +594,16 @@ def restitute_trace(input_tuple):
|
||||
fname = Parser(find_in_list(inobj, seed_id))
|
||||
else:
|
||||
fname = inobj
|
||||
seedresp = dict(filename=fname,
|
||||
date=stime,
|
||||
units=unit)
|
||||
kwargs = dict(pre_filt=prefilt, seedresp=seedresp)
|
||||
paz = fname.get_paz(tr.id, datetime=tr.stats.starttime)
|
||||
kwargs = dict(pre_filt=prefilt, paz_remove=paz, remove_sensitivity=True)
|
||||
elif invtype == 'xml':
|
||||
invlist = inobj
|
||||
if len(invlist) > 1:
|
||||
finv = find_in_list(invlist, seed_id)
|
||||
inventory = find_in_list(invlist, seed_id)
|
||||
else:
|
||||
finv = invlist[0]
|
||||
inventory = read_inventory(finv, format='STATIONXML')
|
||||
elif invtype == None:
|
||||
print("No restitution possible, as there are no station-meta data available!")
|
||||
return tr, True
|
||||
inventory = invlist[0]
|
||||
elif invtype is None:
|
||||
return no_metadata(tr, seed_id)
|
||||
else:
|
||||
remove_trace = True
|
||||
# apply restitution to data
|
||||
@ -248,14 +613,20 @@ def restitute_trace(input_tuple):
|
||||
if invtype in ['resp', 'dless']:
|
||||
try:
|
||||
tr.simulate(**kwargs)
|
||||
print("Done")
|
||||
except ValueError as e:
|
||||
vmsg = '{0}'.format(e)
|
||||
print(vmsg)
|
||||
|
||||
else:
|
||||
tr.attach_response(inventory)
|
||||
tr.remove_response(output=unit,
|
||||
pre_filt=prefilt)
|
||||
try:
|
||||
tr.attach_response(inventory)
|
||||
tr.remove_response(output=unit,
|
||||
pre_filt=prefilt)
|
||||
except UnboundLocalError as e:
|
||||
vmsg = '{0}'.format(e)
|
||||
print(vmsg)
|
||||
|
||||
except ValueError as e:
|
||||
msg0 = 'Response for {0} not found in Parser'.format(seed_id)
|
||||
msg1 = 'evalresp failed to calculate response'
|
||||
@ -269,32 +640,27 @@ def restitute_trace(input_tuple):
|
||||
return tr, remove_trace
|
||||
|
||||
|
||||
def restitute_data(data, invtype, inobj, unit='VEL', force=False, ncores=0):
|
||||
def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
||||
"""
|
||||
takes a data stream and a path_to_inventory and returns the corrected
|
||||
waveform data stream
|
||||
:param data: seismic data stream
|
||||
:param invtype: type of found metadata
|
||||
:param inobj: either list of metadata files or `obspy.io.xseed.Parser`
|
||||
object
|
||||
:param unit: unit to correct for (default: 'VEL')
|
||||
:param force: force restitution for already corrected traces (default:
|
||||
False)
|
||||
:return: corrected data stream
|
||||
"""
|
||||
|
||||
restflag = list()
|
||||
|
||||
data = remove_underscores(data)
|
||||
# data = remove_underscores(data)
|
||||
|
||||
# loop over traces
|
||||
input_tuples = []
|
||||
for tr in data:
|
||||
input_tuples.append((tr, invtype, inobj, unit, force))
|
||||
input_tuples.append((tr, metadata, unit, force))
|
||||
data.remove(tr)
|
||||
|
||||
pool = gen_Pool(ncores)
|
||||
result = pool.map(restitute_trace, input_tuples)
|
||||
result = pool.imap_unordered(restitute_trace, input_tuples)
|
||||
pool.close()
|
||||
|
||||
for tr, remove_trace in result:
|
||||
@ -305,10 +671,6 @@ def restitute_data(data, invtype, inobj, unit='VEL', force=False, ncores=0):
|
||||
# better try restitution for smaller subsets of data (e.g. station by
|
||||
# station)
|
||||
|
||||
# if len(restflag) > 0:
|
||||
# restflag = bool(np.all(restflag))
|
||||
# else:
|
||||
# restflag = False
|
||||
return data
|
||||
|
||||
|
||||
@ -344,7 +706,7 @@ def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
|
||||
fny = trace.stats.sampling_rate / 2
|
||||
fc21 = fny - (fny * thi[0] / 100.)
|
||||
fc22 = fny - (fny * thi[1] / 100.)
|
||||
return (tlow[0], tlow[1], fc21, fc22)
|
||||
return tlow[0], tlow[1], fc21, fc22
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -9,13 +9,12 @@ Created on Wed Feb 26 12:31:25 2014
|
||||
import os
|
||||
import platform
|
||||
|
||||
from pylot.core.util.utils import readDefaultFilterInformation
|
||||
from pylot.core.loc import hypo71
|
||||
from pylot.core.loc import hypodd
|
||||
from pylot.core.loc import hyposat
|
||||
from pylot.core.loc import nll
|
||||
from pylot.core.loc import velest
|
||||
|
||||
from pylot.core.util.utils import readDefaultFilterInformation
|
||||
|
||||
# determine system dependent path separator
|
||||
system_name = platform.system()
|
||||
@ -27,9 +26,7 @@ elif system_name == "Windows":
|
||||
# suffix for phase name if not phase identified by last letter (P, p, etc.)
|
||||
ALTSUFFIX = ['diff', 'n', 'g', '1', '2', '3']
|
||||
|
||||
FILTERDEFAULTS = readDefaultFilterInformation(os.path.join(os.path.expanduser('~'),
|
||||
'.pylot',
|
||||
'pylot.in'))
|
||||
FILTERDEFAULTS = readDefaultFilterInformation()
|
||||
|
||||
TIMEERROR_DEFAULTS = os.path.join(os.path.expanduser('~'),
|
||||
'.pylot',
|
||||
@ -37,59 +34,8 @@ TIMEERROR_DEFAULTS = os.path.join(os.path.expanduser('~'),
|
||||
|
||||
OUTPUTFORMATS = {'.xml': 'QUAKEML',
|
||||
'.cnv': 'CNV',
|
||||
'.obs': 'NLLOC_OBS'}
|
||||
'.obs': 'NLLOC_OBS',
|
||||
'_focmec.in': 'FOCMEC',
|
||||
'.pha': 'HYPODD'}
|
||||
|
||||
LOCTOOLS = dict(nll=nll, hyposat=hyposat, velest=velest, hypo71=hypo71, hypodd=hypodd)
|
||||
|
||||
|
||||
class SetChannelComponents(object):
|
||||
def __init__(self):
|
||||
self.setDefaultCompPosition()
|
||||
|
||||
def setDefaultCompPosition(self):
|
||||
# default component order
|
||||
self.compPosition_Map = dict(Z=2, N=1, E=0)
|
||||
self.compName_Map = {'3': 'Z',
|
||||
'1': 'N',
|
||||
'2': 'E'}
|
||||
|
||||
def _getCurrentPosition(self, component):
|
||||
for key, value in self.compName_Map.items():
|
||||
if value == component:
|
||||
return key, value
|
||||
errMsg = 'getCurrentPosition: Could not find former position of component {}.'.format(component)
|
||||
raise ValueError(errMsg)
|
||||
|
||||
def _switch(self, component, component_alter):
|
||||
# Without switching, multiple definitions of the same alter_comp are possible
|
||||
old_alter_comp, _ = self._getCurrentPosition(component)
|
||||
old_comp = self.compName_Map[component_alter]
|
||||
if not old_alter_comp == component_alter and not old_comp == component:
|
||||
self.compName_Map[old_alter_comp] = old_comp
|
||||
print('switch: Automatically switched component {} to {}'.format(old_alter_comp, old_comp))
|
||||
|
||||
def setCompPosition(self, component_alter, component, switch=True):
|
||||
component_alter = str(component_alter)
|
||||
if not component_alter in self.compName_Map.keys():
|
||||
errMsg = 'setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component_alter, self.compName_Map.keys()))
|
||||
if not component in self.compPosition_Map.keys():
|
||||
errMsg = 'setCompPosition: Unrecognized target component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys()))
|
||||
print('setCompPosition: set component {} to {}'.format(component_alter, component))
|
||||
if switch:
|
||||
self._switch(component, component_alter)
|
||||
self.compName_Map[component_alter] = component
|
||||
|
||||
def getCompPosition(self, component):
|
||||
return self._getCurrentPosition(component)[0]
|
||||
|
||||
def getPlotPosition(self, component):
|
||||
component = str(component)
|
||||
if component in self.compPosition_Map.keys():
|
||||
return self.compPosition_Map[component]
|
||||
elif component in self.compName_Map.keys():
|
||||
return self.compPosition_Map[self.compName_Map[component]]
|
||||
else:
|
||||
errMsg = 'getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys(), self.compName_Map.keys()))
|
||||
|
@ -6,7 +6,9 @@ import os
|
||||
from obspy import UTCDateTime
|
||||
from obspy.core.event import Event as ObsPyEvent
|
||||
from obspy.core.event import Origin, ResourceIdentifier
|
||||
|
||||
from pylot.core.io.phases import picks_from_picksdict
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
|
||||
|
||||
class Event(ObsPyEvent):
|
||||
@ -15,158 +17,310 @@ class Event(ObsPyEvent):
|
||||
'''
|
||||
|
||||
def __init__(self, path):
|
||||
"""
|
||||
Initialize event by event directory
|
||||
:param path: path to event directory
|
||||
:type path: str
|
||||
"""
|
||||
self.pylot_id = path.split('/')[-1]
|
||||
# initialize super class
|
||||
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/' + self.pylot_id))
|
||||
self.path = path
|
||||
self.database = path.split('/')[-2]
|
||||
self.datapath = path.split('/')[-3]
|
||||
self.rootpath = '/' + os.path.join(*path.split('/')[:-3])
|
||||
self.datapath = os.path.split(path)[0] # path.split('/')[-3]
|
||||
self.pylot_autopicks = {}
|
||||
self.pylot_picks = {}
|
||||
self.notes = ''
|
||||
self._testEvent = False
|
||||
self._refEvent = False
|
||||
self.get_notes()
|
||||
self.get_obspy_event_info()
|
||||
self.dirty = False
|
||||
|
||||
def get_notes_path(self):
|
||||
"""
|
||||
Notes files is freely editable by the user and can contain notes regarding the event
|
||||
:return: path to notes file
|
||||
:rtype: str
|
||||
"""
|
||||
notesfile = os.path.join(self.path, 'notes.txt')
|
||||
return notesfile
|
||||
|
||||
def get_obspy_event_info(self):
|
||||
infile_pickle = os.path.join(self.path, 'info/event.pkl')
|
||||
if not os.path.isfile(infile_pickle):
|
||||
return
|
||||
try:
|
||||
event_dmt = qml_from_obspyDMT(infile_pickle)
|
||||
except Exception as e:
|
||||
print('Could not get obspy event info: {}'.format(e))
|
||||
return
|
||||
self.magnitudes = event_dmt.magnitudes
|
||||
self.origins = event_dmt.origins
|
||||
|
||||
def get_notes(self):
|
||||
"""
|
||||
set self.note attribute to content of notes file
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
notesfile = self.get_notes_path()
|
||||
if os.path.isfile(notesfile):
|
||||
with open(notesfile) as infile:
|
||||
path = str(infile.readlines()[0].split('\n')[0])
|
||||
text = '[eventInfo: ' + path + ']'
|
||||
lines = infile.readlines()
|
||||
if not lines:
|
||||
return
|
||||
text = lines[0]
|
||||
self.addNotes(text)
|
||||
try:
|
||||
datetime = UTCDateTime(path.split('/')[-1])
|
||||
datetime = UTCDateTime(self.path.split('/')[-1])
|
||||
origin = Origin(resource_id=self.resource_id, time=datetime, latitude=0, longitude=0, depth=0)
|
||||
self.origins.append(origin)
|
||||
except:
|
||||
pass
|
||||
|
||||
def addNotes(self, notes):
|
||||
"""
|
||||
Set new notes string
|
||||
:param notes: notes to save in Event object
|
||||
:type notes: str
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self.notes = str(notes)
|
||||
|
||||
def clearNotes(self):
|
||||
"""
|
||||
Clear event notes
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self.notes = None
|
||||
|
||||
def isRefEvent(self):
|
||||
"""
|
||||
Return reference event flag
|
||||
:return: True if event is refence event
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._refEvent
|
||||
|
||||
def isTestEvent(self):
|
||||
"""
|
||||
Return test event flag
|
||||
:return: True if event is test event
|
||||
:rtype: bool
|
||||
"""
|
||||
return self._testEvent
|
||||
|
||||
def setRefEvent(self, bool):
|
||||
"""
|
||||
Set reference event flag
|
||||
:param bool: new reference event flag
|
||||
:type bool: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self._refEvent = bool
|
||||
if bool: self._testEvent = False
|
||||
|
||||
def setTestEvent(self, bool):
|
||||
"""
|
||||
Set test event flag
|
||||
:param bool: new test event flag
|
||||
:type bool: bool
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self._testEvent = bool
|
||||
if bool: self._refEvent = False
|
||||
|
||||
def clearObsPyPicks(self, picktype):
|
||||
"""
|
||||
Remove picks of a certain type from event
|
||||
:param picktype: type of picks to remove, 'auto' or 'manual'
|
||||
:type picktype: str
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
for index, pick in reversed(list(enumerate(self.picks))):
|
||||
if picktype in str(pick.method_id):
|
||||
self.picks.pop(index)
|
||||
self.dirty = True
|
||||
|
||||
def addPicks(self, picks):
|
||||
'''
|
||||
"""
|
||||
add pylot picks and overwrite existing ones
|
||||
'''
|
||||
:param picks: picks to add to event in pick dictionary
|
||||
:type picks: dict
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
for station in picks:
|
||||
self.pylot_picks[station] = picks[station]
|
||||
# add ObsPy picks (clear old manual and copy all new manual from pylot)
|
||||
self.clearObsPyPicks('manual')
|
||||
self.picks += picks_from_picksdict(self.pylot_picks)
|
||||
self.dirty = True
|
||||
|
||||
def addAutopicks(self, autopicks):
|
||||
"""
|
||||
Add automatic picks to event
|
||||
:param autopicks: automatic picks to add to event
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
for station in autopicks:
|
||||
self.pylot_autopicks[station] = autopicks[station]
|
||||
# add ObsPy picks (clear old auto and copy all new auto from pylot)
|
||||
self.clearObsPyPicks('auto')
|
||||
self.picks += picks_from_picksdict(self.pylot_autopicks)
|
||||
self.dirty = True
|
||||
|
||||
def setPick(self, station, pick):
|
||||
"""
|
||||
Set pick for a station
|
||||
:param station: station name
|
||||
:type station: str
|
||||
:param pick:
|
||||
:type pick: dict
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
if pick:
|
||||
self.pylot_picks[station] = pick
|
||||
else:
|
||||
try:
|
||||
self.pylot_picks.pop(station)
|
||||
if station in self.pylot_picks:
|
||||
self.pylot_picks.pop(station)
|
||||
except Exception as e:
|
||||
print('Could not remove pick {} from station {}: {}'.format(pick, station, e))
|
||||
self.clearObsPyPicks('manual')
|
||||
self.picks += picks_from_picksdict(self.pylot_picks)
|
||||
self.dirty = True
|
||||
|
||||
def setPicks(self, picks):
|
||||
'''
|
||||
set pylot picks and delete and overwrite all existing
|
||||
'''
|
||||
"""
|
||||
Set pylot picks and delete and overwrite all existing
|
||||
:param picks: new picks
|
||||
:type picks: dict
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self.pylot_picks = picks
|
||||
self.clearObsPyPicks('manual')
|
||||
self.picks += picks_from_picksdict(self.pylot_picks)
|
||||
self.dirty = True
|
||||
|
||||
def getPick(self, station):
|
||||
"""
|
||||
Get pick at station
|
||||
:param station: station name
|
||||
:type station: str
|
||||
:return: pick dictionary of station
|
||||
:rtype: dict
|
||||
"""
|
||||
if station in self.pylot_picks.keys():
|
||||
return self.pylot_picks[station]
|
||||
|
||||
def getPicks(self):
|
||||
"""
|
||||
Return pylot picks
|
||||
:return:
|
||||
:rtype: dict
|
||||
"""
|
||||
return self.pylot_picks
|
||||
|
||||
def setAutopick(self, station, pick):
|
||||
"""
|
||||
Set autopylot pick at station
|
||||
:param station: station name
|
||||
:type station: str
|
||||
:param pick:
|
||||
:type pick: dict
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
if pick:
|
||||
self.pylot_autopicks[station] = pick
|
||||
else:
|
||||
try:
|
||||
self.pylot_autopicks.pop(station)
|
||||
if station in self.pylot_autopicks:
|
||||
self.pylot_autopicks.pop(station)
|
||||
except Exception as e:
|
||||
print('Could not remove pick {} from station {}: {}'.format(pick, station, e))
|
||||
self.clearObsPyPicks('auto')
|
||||
self.picks += picks_from_picksdict(self.pylot_autopicks)
|
||||
self.dirty = True
|
||||
|
||||
def setAutopicks(self, picks):
|
||||
'''
|
||||
set pylot picks and delete and overwrite all existing
|
||||
'''
|
||||
"""
|
||||
Set autopylot picks and delete and overwrite all existing
|
||||
:param picks: new picks
|
||||
:type picks: dict
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
self.pylot_autopicks = picks
|
||||
self.clearObsPyPicks('auto')
|
||||
self.picks += picks_from_picksdict(self.pylot_autopicks)
|
||||
self.dirty = True
|
||||
|
||||
def getAutopick(self, station):
|
||||
"""
|
||||
Return autopick at station
|
||||
:param station: station name
|
||||
:type station: str
|
||||
:return: pick dictionary
|
||||
:rtype: dict
|
||||
"""
|
||||
if station in self.pylot_autopicks.keys():
|
||||
return self.pylot_autopicks[station]
|
||||
|
||||
def getAutopicks(self):
|
||||
"""
|
||||
Get autopicks of event
|
||||
:return: dict containing automatic picks
|
||||
:rtype: dict
|
||||
"""
|
||||
return self.pylot_autopicks
|
||||
|
||||
def save(self, filename):
|
||||
'''
|
||||
Save PyLoT Event to a file.
|
||||
"""
|
||||
Save PyLoT Event to a file.
|
||||
Can be loaded by using event.load(filename).
|
||||
'''
|
||||
Uses pickling to save event object to file
|
||||
:param filename: filename to save project under
|
||||
:type filename: str
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
try:
|
||||
import cPickle
|
||||
import pickle
|
||||
except ImportError:
|
||||
import _pickle as cPickle
|
||||
import _pickle as pickle
|
||||
|
||||
try:
|
||||
outfile = open(filename, 'wb')
|
||||
cPickle.dump(self, outfile, -1)
|
||||
pickle.dump(self, outfile, -1)
|
||||
self.dirty = False
|
||||
except Exception as e:
|
||||
print('Could not pickle PyLoT event. Reason: {}'.format(e))
|
||||
|
||||
@staticmethod
|
||||
def load(filename):
|
||||
'''
|
||||
Load project from filename.
|
||||
'''
|
||||
"""
|
||||
Load project from filename
|
||||
:param filename: to load event file
|
||||
:type filename: str
|
||||
:return: event loaded from file
|
||||
:rtype: Event
|
||||
"""
|
||||
try:
|
||||
import cPickle
|
||||
import pickle
|
||||
except ImportError:
|
||||
import _pickle as cPickle
|
||||
import _pickle as pickle
|
||||
infile = open(filename, 'rb')
|
||||
event = cPickle.load(infile)
|
||||
event = pickle.load(infile)
|
||||
event.dirty = False
|
||||
print('Loaded %s' % filename)
|
||||
return event
|
||||
|
97
pylot/core/util/generate_array_maps.py
Executable file
@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# small script that creates array maps for each event within a previously generated PyLoT project
|
||||
|
||||
import os
|
||||
|
||||
num_thread = "16"
|
||||
os.environ["OMP_NUM_THREADS"] = num_thread
|
||||
os.environ["OPENBLAS_NUM_THREADS"] = num_thread
|
||||
os.environ["MKL_NUM_THREADS"] = num_thread
|
||||
os.environ["VECLIB_MAXIMUM_THREADS"] = num_thread
|
||||
os.environ["NUMEXPR_NUM_THREADS"] = num_thread
|
||||
os.environ["NUMEXPR_MAX_THREADS"] = num_thread
|
||||
|
||||
import multiprocessing
|
||||
import sys
|
||||
import glob
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use('Qt5Agg')
|
||||
sys.path.append(os.path.join('/'.join(sys.argv[0].split('/')[:-1]), '../../..'))
|
||||
|
||||
from PyLoT import Project
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from pylot.core.util.array_map import Array_map
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import argparse
|
||||
|
||||
|
||||
def main(project_file_path, manual=False, auto=True, file_format='png', f_ext='', ncores=None):
|
||||
project = Project.load(project_file_path)
|
||||
nEvents = len(project.eventlist)
|
||||
input_list = []
|
||||
print('\n')
|
||||
for index, event in enumerate(project.eventlist):
|
||||
kwargs = dict(project=project, event=event, nEvents=nEvents, index=index, manual=manual, auto=auto,
|
||||
file_format=file_format, f_ext=f_ext)
|
||||
input_list.append(kwargs)
|
||||
|
||||
if ncores == 1:
|
||||
for item in input_list:
|
||||
array_map_worker(item)
|
||||
else:
|
||||
pool = multiprocessing.Pool(ncores, maxtasksperchild=1000)
|
||||
pool.map(array_map_worker, input_list)
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
|
||||
def array_map_worker(input_dict):
|
||||
event = input_dict['event']
|
||||
eventdir = event.path
|
||||
print('Working on event: {} ({}/{})'.format(eventdir, input_dict['index'] + 1, input_dict['nEvents']))
|
||||
xml_picks = glob.glob(os.path.join(eventdir, f'*{input_dict["f_ext"]}.xml'))
|
||||
if not len(xml_picks):
|
||||
print('Event {} does not have any picks associated with event file extension {}'.format(eventdir,
|
||||
input_dict['f_ext']))
|
||||
return
|
||||
# check for picks
|
||||
manualpicks = event.getPicks()
|
||||
autopicks = event.getAutopicks()
|
||||
# prepare event and get metadata
|
||||
metadata_path = os.path.join(eventdir, 'resp')
|
||||
metadata = None
|
||||
for pick_type in ['manual', 'auto']:
|
||||
if pick_type == 'manual' and (not manualpicks or not input_dict['manual']):
|
||||
continue
|
||||
if pick_type == 'auto' and (not autopicks or not input_dict['auto']):
|
||||
continue
|
||||
if not metadata:
|
||||
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||
|
||||
# create figure to plot on
|
||||
fig, ax = plt.subplots(figsize=(15, 9))
|
||||
# create array map object
|
||||
map = Array_map(None, metadata, parameter=input_dict['project'].parameter, axes=ax,
|
||||
width=2.13e6, height=1.2e6, pointsize=25., linewidth=1.0)
|
||||
# set combobox to auto/manual to plot correct pick type
|
||||
map.comboBox_am.setCurrentIndex(map.comboBox_am.findText(pick_type))
|
||||
# add picks to map and save file
|
||||
map.refresh_drawings(manualpicks, autopicks)
|
||||
fpath_out = os.path.join(eventdir, 'array_map_{}_{}{}.{}'.format(event.pylot_id, pick_type, input_dict['f_ext'],
|
||||
input_dict['file_format']))
|
||||
fig.savefig(fpath_out, dpi=300.)
|
||||
print('Wrote file: {}'.format(fpath_out))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cl = argparse.ArgumentParser()
|
||||
cl.add_argument('--dataroot', help='Directory containing the PyLoT .plp file', type=str)
|
||||
cl.add_argument('--infiles', help='.plp files to use', nargs='+')
|
||||
cl.add_argument('--ncores', hepl='Specify number of parallel processes', type=int, default=1)
|
||||
args = cl.parse_args()
|
||||
|
||||
for infile in args.infiles:
|
||||
main(os.path.join(args.dataroot, infile), f_ext='_correlated_0.03-0.1', ncores=args.ncores)
|
104
pylot/core/util/gui.py
Normal file
@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from functools import lru_cache
|
||||
|
||||
try:
|
||||
import pyqtgraph as pg
|
||||
except Exception as e:
|
||||
print('Warning: Could not import module pyqtgraph.')
|
||||
try:
|
||||
from PySide2 import QtCore
|
||||
except Exception as e:
|
||||
print('Warning: Could not import module QtCore.')
|
||||
|
||||
from pylot.core.util.utils import pick_color
|
||||
|
||||
|
||||
def pick_linestyle_pg(picktype, key):
|
||||
"""
|
||||
Get Qt line style by picktype and pick parameter (earliest/latest possible pick, symmetric picking error or
|
||||
most probable pick)
|
||||
:param picktype: 'manual' or 'automatic'
|
||||
:type picktype: str
|
||||
:param key: which pick parameter should be plotted, 'mpp', 'epp', 'lpp' or 'spe'
|
||||
:type key: str
|
||||
:return: Qt line style parameters
|
||||
:rtype:
|
||||
"""
|
||||
linestyles_manu = {'mpp': (QtCore.Qt.SolidLine, 2),
|
||||
'epp': (QtCore.Qt.DashLine, 1),
|
||||
'lpp': (QtCore.Qt.DashLine, 1),
|
||||
'spe': (QtCore.Qt.DashLine, 1)}
|
||||
linestyles_auto = {'mpp': (QtCore.Qt.DotLine, 2),
|
||||
'epp': (QtCore.Qt.DashDotLine, 1),
|
||||
'lpp': (QtCore.Qt.DashDotLine, 1),
|
||||
'spe': (QtCore.Qt.DashDotLine, 1)}
|
||||
linestyles = {'manual': linestyles_manu,
|
||||
'auto': linestyles_auto}
|
||||
return linestyles[picktype][key]
|
||||
|
||||
|
||||
def which(program, parameter):
|
||||
"""
|
||||
takes a program name and returns the full path to the executable or None
|
||||
modified after: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
|
||||
:param program: name of the desired external program
|
||||
:type program: str
|
||||
:return: full path of the executable file
|
||||
:rtype: str
|
||||
"""
|
||||
try:
|
||||
from PySide2.QtCore import QSettings
|
||||
settings = QSettings()
|
||||
for key in settings.allKeys():
|
||||
if 'binPath' in key:
|
||||
os.environ['PATH'] += ':{0}'.format(settings.value(key))
|
||||
nllocpath = ":" + parameter.get('nllocbin')
|
||||
os.environ['PATH'] += nllocpath
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def is_exe(fpath):
|
||||
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
|
||||
|
||||
def ext_candidates(fpath):
|
||||
yield fpath
|
||||
for ext in os.environ.get("PATHEXT", "").split(os.pathsep):
|
||||
yield fpath + ext
|
||||
|
||||
fpath, fname = os.path.split(program)
|
||||
if fpath:
|
||||
if is_exe(program):
|
||||
return program
|
||||
else:
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
exe_file = os.path.join(path, program)
|
||||
for candidate in ext_candidates(exe_file):
|
||||
if is_exe(candidate):
|
||||
return candidate
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@lru_cache(maxsize=128)
|
||||
def make_pen(picktype, phase, key, quality):
|
||||
"""
|
||||
Make PyQtGraph.QPen
|
||||
:param picktype: 'manual' or 'automatic'
|
||||
:type picktype: str
|
||||
:param phase: 'P' or 'S'
|
||||
:type phase: str
|
||||
:param key: 'mpp', 'epp', 'lpp' or 'spe', (earliest/latest possible pick, symmetric picking error or
|
||||
most probable pick)
|
||||
:type key: str
|
||||
:param quality: quality class of pick, decides color modifier
|
||||
:type quality: int
|
||||
:return: PyQtGraph QPen
|
||||
:rtype: `~QPen`
|
||||
"""
|
||||
if pg:
|
||||
rgba = pick_color(picktype, phase, quality)
|
||||
linestyle, width = pick_linestyle_pg(picktype, key)
|
||||
pen = pg.mkPen(rgba, width=width, style=linestyle)
|
||||
return pen
|
@ -1,376 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import obspy
|
||||
from PySide import QtGui
|
||||
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
|
||||
from mpl_toolkits.basemap import Basemap
|
||||
from pylot.core.util.widgets import PickDlg
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
plt.interactive(False)
|
||||
|
||||
|
||||
class map_projection(QtGui.QWidget):
|
||||
def __init__(self, parent, figure=None):
|
||||
'''
|
||||
|
||||
:param: picked, can be False, auto, manual
|
||||
:value: str
|
||||
'''
|
||||
QtGui.QWidget.__init__(self)
|
||||
self._parent = parent
|
||||
self.metadata = parent.metadata
|
||||
self.parser = parent.metadata[1]
|
||||
self.picks = None
|
||||
self.picks_dict = None
|
||||
self.eventLoc = None
|
||||
self.figure = figure
|
||||
self.init_graphics()
|
||||
self.init_stations()
|
||||
self.init_basemap(resolution='l')
|
||||
self.init_map()
|
||||
# self.show()
|
||||
|
||||
def init_map(self):
|
||||
self.init_lat_lon_dimensions()
|
||||
self.init_lat_lon_grid()
|
||||
self.init_x_y_dimensions()
|
||||
self.connectSignals()
|
||||
self.draw_everything()
|
||||
|
||||
def onpick(self, event):
|
||||
ind = event.ind
|
||||
button = event.mouseevent.button
|
||||
if ind == [] or not button == 1:
|
||||
return
|
||||
data = self._parent.get_data().getWFData()
|
||||
for index in ind:
|
||||
station = str(self.station_names[index].split('.')[-1])
|
||||
try:
|
||||
pickDlg = PickDlg(self, parameter=self._parent._inputs,
|
||||
data=data.select(station=station),
|
||||
station=station,
|
||||
picks=self._parent.get_current_event().getPick(station),
|
||||
autopicks=self._parent.get_current_event().getAutopick(station))
|
||||
except Exception as e:
|
||||
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
||||
self._warn(message)
|
||||
print(message, e)
|
||||
return
|
||||
pyl_mw = self._parent
|
||||
try:
|
||||
if pickDlg.exec_():
|
||||
pyl_mw.setDirty(True)
|
||||
pyl_mw.update_status('picks accepted ({0})'.format(station))
|
||||
replot = pyl_mw.get_current_event().setPick(station, pickDlg.getPicks())
|
||||
self._refresh_drawings()
|
||||
if replot:
|
||||
pyl_mw.plotWaveformData()
|
||||
pyl_mw.drawPicks()
|
||||
pyl_mw.draw()
|
||||
else:
|
||||
pyl_mw.drawPicks(station)
|
||||
pyl_mw.draw()
|
||||
else:
|
||||
pyl_mw.update_status('picks discarded ({0})'.format(station))
|
||||
except Exception as e:
|
||||
message = 'Could not save picks for station {st}.\n{er}'.format(st=station, er=e)
|
||||
self._warn(message)
|
||||
print(message, e)
|
||||
|
||||
def connectSignals(self):
|
||||
self.comboBox_phase.currentIndexChanged.connect(self._refresh_drawings)
|
||||
self.zoom_id = self.basemap.ax.figure.canvas.mpl_connect('scroll_event', self.zoom)
|
||||
|
||||
def init_graphics(self):
|
||||
if not self.figure:
|
||||
if not hasattr(self._parent, 'am_figure'):
|
||||
self.figure = plt.figure()
|
||||
self.toolbar = NavigationToolbar(self.figure.canvas, self)
|
||||
else:
|
||||
self.figure = self._parent.am_figure
|
||||
self.toolbar = self._parent.am_toolbar
|
||||
|
||||
self.main_ax = self.figure.add_subplot(111)
|
||||
self.canvas = self.figure.canvas
|
||||
|
||||
self.main_box = QtGui.QVBoxLayout()
|
||||
self.setLayout(self.main_box)
|
||||
|
||||
self.top_row = QtGui.QHBoxLayout()
|
||||
self.main_box.addLayout(self.top_row)
|
||||
|
||||
self.comboBox_phase = QtGui.QComboBox()
|
||||
self.comboBox_phase.insertItem(0, 'P')
|
||||
self.comboBox_phase.insertItem(1, 'S')
|
||||
|
||||
self.comboBox_am = QtGui.QComboBox()
|
||||
self.comboBox_am.insertItem(0, 'auto')
|
||||
self.comboBox_am.insertItem(1, 'manual')
|
||||
|
||||
self.top_row.addWidget(QtGui.QLabel('Select a phase: '))
|
||||
self.top_row.addWidget(self.comboBox_phase)
|
||||
self.top_row.setStretch(1, 1) # set stretch of item 1 to 1
|
||||
|
||||
self.main_box.addWidget(self.canvas)
|
||||
self.main_box.addWidget(self.toolbar)
|
||||
|
||||
def init_stations(self):
|
||||
def get_station_names_lat_lon(parser):
|
||||
station_names = []
|
||||
lat = []
|
||||
lon = []
|
||||
for station in parser.stations:
|
||||
station_name = station[0].station_call_letters
|
||||
network = station[0].network_code
|
||||
if not station_name in station_names:
|
||||
station_names.append(network + '.' + station_name)
|
||||
lat.append(station[0].latitude)
|
||||
lon.append(station[0].longitude)
|
||||
return station_names, lat, lon
|
||||
|
||||
station_names, lat, lon = get_station_names_lat_lon(self.parser)
|
||||
self.station_names = station_names
|
||||
self.lat = lat
|
||||
self.lon = lon
|
||||
|
||||
def init_picks(self):
|
||||
phase = self.comboBox_phase.currentText()
|
||||
|
||||
def get_picks(station_names):
|
||||
picks = []
|
||||
for station in station_names:
|
||||
try:
|
||||
station = station.split('.')[-1]
|
||||
picks.append(self.picks_dict[station][phase]['mpp'])
|
||||
except:
|
||||
picks.append(np.nan)
|
||||
return picks
|
||||
|
||||
def get_picks_rel(picks):
|
||||
picks_rel = []
|
||||
picks_utc = []
|
||||
for pick in picks:
|
||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
||||
picks_utc.append(pick)
|
||||
minp = min(picks_utc)
|
||||
for pick in picks:
|
||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
||||
pick -= minp
|
||||
picks_rel.append(pick)
|
||||
return picks_rel
|
||||
|
||||
self.picks = get_picks(self.station_names)
|
||||
self.picks_rel = get_picks_rel(self.picks)
|
||||
|
||||
def init_picks_active(self):
|
||||
def remove_nan_picks(picks):
|
||||
picks_no_nan = []
|
||||
for pick in picks:
|
||||
if not np.isnan(pick):
|
||||
picks_no_nan.append(pick)
|
||||
return picks_no_nan
|
||||
|
||||
self.picks_no_nan = remove_nan_picks(self.picks_rel)
|
||||
|
||||
def init_stations_active(self):
|
||||
def remove_nan_lat_lon(picks, lat, lon):
|
||||
lat_no_nan = []
|
||||
lon_no_nan = []
|
||||
for index, pick in enumerate(picks):
|
||||
if not np.isnan(pick):
|
||||
lat_no_nan.append(lat[index])
|
||||
lon_no_nan.append(lon[index])
|
||||
return lat_no_nan, lon_no_nan
|
||||
|
||||
self.lat_no_nan, self.lon_no_nan = remove_nan_lat_lon(self.picks_rel, self.lat, self.lon)
|
||||
|
||||
def init_lat_lon_dimensions(self):
|
||||
def get_lon_lat_dim(lon, lat):
|
||||
londim = max(lon) - min(lon)
|
||||
latdim = max(lat) - min(lat)
|
||||
return londim, latdim
|
||||
|
||||
self.londim, self.latdim = get_lon_lat_dim(self.lon, self.lat)
|
||||
|
||||
def init_x_y_dimensions(self):
|
||||
def get_x_y_dim(x, y):
|
||||
xdim = max(x) - min(x)
|
||||
ydim = max(y) - min(y)
|
||||
return xdim, ydim
|
||||
|
||||
self.x, self.y = self.basemap(self.lon, self.lat)
|
||||
self.xdim, self.ydim = get_x_y_dim(self.x, self.y)
|
||||
|
||||
def init_basemap(self, resolution='l'):
|
||||
# basemap = Basemap(projection=projection, resolution = resolution, ax=self.main_ax)
|
||||
basemap = Basemap(projection='lcc', resolution=resolution, ax=self.main_ax,
|
||||
width=5e6, height=2e6,
|
||||
lat_0=(min(self.lat) + max(self.lat)) / 2.,
|
||||
lon_0=(min(self.lon) + max(self.lon)) / 2.)
|
||||
|
||||
# basemap.fillcontinents(color=None, lake_color='aqua',zorder=1)
|
||||
basemap.drawmapboundary(zorder=2) # fill_color='darkblue')
|
||||
basemap.shadedrelief(zorder=3)
|
||||
basemap.drawcountries(zorder=4)
|
||||
basemap.drawstates(zorder=5)
|
||||
basemap.drawcoastlines(zorder=6)
|
||||
self.basemap = basemap
|
||||
self.figure.tight_layout()
|
||||
|
||||
def init_lat_lon_grid(self):
|
||||
def get_lat_lon_axis(lat, lon):
|
||||
steplat = (max(lat) - min(lat)) / 250
|
||||
steplon = (max(lon) - min(lon)) / 250
|
||||
|
||||
lataxis = np.arange(min(lat), max(lat), steplat)
|
||||
lonaxis = np.arange(min(lon), max(lon), steplon)
|
||||
return lataxis, lonaxis
|
||||
|
||||
def get_lat_lon_grid(lataxis, lonaxis):
|
||||
longrid, latgrid = np.meshgrid(lonaxis, lataxis)
|
||||
return latgrid, longrid
|
||||
|
||||
self.lataxis, self.lonaxis = get_lat_lon_axis(self.lat, self.lon)
|
||||
self.latgrid, self.longrid = get_lat_lon_grid(self.lataxis, self.lonaxis)
|
||||
|
||||
def init_picksgrid(self):
|
||||
self.picksgrid_no_nan = griddata((self.lat_no_nan, self.lon_no_nan),
|
||||
self.picks_no_nan, (self.latgrid, self.longrid),
|
||||
method='linear') ##################
|
||||
|
||||
def draw_contour_filled(self, nlevel='50'):
|
||||
levels = np.linspace(min(self.picks_no_nan), max(self.picks_no_nan), nlevel)
|
||||
self.contourf = self.basemap.contourf(self.longrid, self.latgrid, self.picksgrid_no_nan,
|
||||
levels, latlon=True, zorder=9, alpha=0.5)
|
||||
|
||||
def scatter_all_stations(self):
|
||||
self.sc = self.basemap.scatter(self.lon, self.lat, s=50, facecolor='none', latlon=True,
|
||||
zorder=10, picker=True, edgecolor='m', label='Not Picked')
|
||||
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
|
||||
if self.eventLoc:
|
||||
lat, lon = self.eventLoc
|
||||
self.sc_event = self.basemap.scatter(lon, lat, s=100, facecolor='red',
|
||||
latlon=True, zorder=11, label='Event (might be outside map region)')
|
||||
|
||||
def scatter_picked_stations(self):
|
||||
lon = self.lon_no_nan
|
||||
lat = self.lat_no_nan
|
||||
|
||||
# workaround because of an issue with latlon transformation of arrays with len <3
|
||||
if len(lon) <= 2 and len(lat) <= 2:
|
||||
self.sc_picked = self.basemap.scatter(lon[0], lat[0], s=50, facecolor='white',
|
||||
c=self.picks_no_nan[0], latlon=True, zorder=11, label='Picked')
|
||||
if len(lon) == 2 and len(lat) == 2:
|
||||
self.sc_picked = self.basemap.scatter(lon[1], lat[1], s=50, facecolor='white',
|
||||
c=self.picks_no_nan[1], latlon=True, zorder=11)
|
||||
else:
|
||||
self.sc_picked = self.basemap.scatter(lon, lat, s=50, facecolor='white',
|
||||
c=self.picks_no_nan, latlon=True, zorder=11, label='Picked')
|
||||
|
||||
def annotate_ax(self):
|
||||
self.annotations = []
|
||||
for index, name in enumerate(self.station_names):
|
||||
self.annotations.append(self.main_ax.annotate(' %s' % name, xy=(self.x[index], self.y[index]),
|
||||
fontsize='x-small', color='white', zorder=12))
|
||||
self.legend = self.main_ax.legend(loc=1)
|
||||
|
||||
def add_cbar(self, label):
|
||||
cbar = self.main_ax.figure.colorbar(self.sc_picked, fraction=0.025)
|
||||
cbar.set_label(label)
|
||||
return cbar
|
||||
|
||||
def refresh_drawings(self, picks=None):
|
||||
self.picks_dict = picks
|
||||
self._refresh_drawings()
|
||||
|
||||
def _refresh_drawings(self):
|
||||
self.remove_drawings()
|
||||
self.draw_everything()
|
||||
|
||||
def draw_everything(self):
|
||||
if self.picks_dict:
|
||||
self.init_picks()
|
||||
self.init_picks_active()
|
||||
self.init_stations_active()
|
||||
if len(self.picks_no_nan) >= 3:
|
||||
self.init_picksgrid()
|
||||
self.draw_contour_filled()
|
||||
self.scatter_all_stations()
|
||||
if self.picks_dict:
|
||||
self.scatter_picked_stations()
|
||||
self.cbar = self.add_cbar(label='Time relative to first onset [s]')
|
||||
self.comboBox_phase.setEnabled(True)
|
||||
else:
|
||||
self.comboBox_phase.setEnabled(False)
|
||||
self.annotate_ax()
|
||||
self.canvas.draw()
|
||||
|
||||
def remove_drawings(self):
|
||||
if hasattr(self, 'sc_picked'):
|
||||
self.sc_picked.remove()
|
||||
del (self.sc_picked)
|
||||
if hasattr(self, 'sc_event'):
|
||||
self.sc_event.remove()
|
||||
del (self.sc_event)
|
||||
if hasattr(self, 'cbar'):
|
||||
self.cbar.remove()
|
||||
del (self.cbar)
|
||||
if hasattr(self, 'contourf'):
|
||||
self.remove_contourf()
|
||||
del (self.contourf)
|
||||
if hasattr(self, 'cid'):
|
||||
self.canvas.mpl_disconnect(self.cid)
|
||||
del (self.cid)
|
||||
try:
|
||||
self.sc.remove()
|
||||
except Exception as e:
|
||||
print('Warning: could not remove station scatter plot.\nReason: {}'.format(e))
|
||||
try:
|
||||
self.legend.remove()
|
||||
except Exception as e:
|
||||
print('Warning: could not remove legend. Reason: {}'.format(e))
|
||||
self.canvas.draw()
|
||||
|
||||
def remove_contourf(self):
|
||||
for item in self.contourf.collections:
|
||||
item.remove()
|
||||
|
||||
def remove_annotations(self):
|
||||
for annotation in self.annotations:
|
||||
annotation.remove()
|
||||
|
||||
def zoom(self, event):
|
||||
map = self.basemap
|
||||
xlim = map.ax.get_xlim()
|
||||
ylim = map.ax.get_ylim()
|
||||
x, y = event.xdata, event.ydata
|
||||
zoom = {'up': 1. / 2.,
|
||||
'down': 2.}
|
||||
|
||||
if not event.xdata or not event.ydata:
|
||||
return
|
||||
|
||||
if event.button in zoom:
|
||||
factor = zoom[event.button]
|
||||
xdiff = (xlim[1] - xlim[0]) * factor
|
||||
xl = x - 0.5 * xdiff
|
||||
xr = x + 0.5 * xdiff
|
||||
ydiff = (ylim[1] - ylim[0]) * factor
|
||||
yb = y - 0.5 * ydiff
|
||||
yt = y + 0.5 * ydiff
|
||||
|
||||
if xl < map.xmin or yb < map.ymin or xr > map.xmax or yt > map.ymax:
|
||||
xl, xr = map.xmin, map.xmax
|
||||
yb, yt = map.ymin, map.ymax
|
||||
map.ax.set_xlim(xl, xr)
|
||||
map.ax.set_ylim(yb, yt)
|
||||
map.ax.figure.canvas.draw()
|
||||
|
||||
def _warn(self, message):
|
||||
self.qmb = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Warning,
|
||||
'Warning', message)
|
||||
self.qmb.show()
|
59
pylot/core/util/obspyDMT_interface.py
Normal file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
|
||||
from obspy import UTCDateTime
|
||||
|
||||
|
||||
def check_obspydmt_structure(path):
|
||||
'''
|
||||
Check path for obspyDMT event structure.
|
||||
:param path:
|
||||
:return:
|
||||
'''
|
||||
ev_info = os.path.join(path, 'EVENTS-INFO')
|
||||
if os.path.isdir(ev_info):
|
||||
if os.path.isfile(os.path.join(ev_info, 'logger_command.txt')):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_obspydmt_eventfolder(folder):
|
||||
try:
|
||||
time = folder.split('.')[0]
|
||||
time = time.replace('_', 'T')
|
||||
time = UTCDateTime(time)
|
||||
return True, time
|
||||
except Exception as e:
|
||||
return False, e
|
||||
|
||||
|
||||
def qml_from_obspyDMT(path):
|
||||
import pickle
|
||||
from obspy.core.event import Event, Magnitude, Origin
|
||||
|
||||
if not os.path.exists(path):
|
||||
return IOError('Could not find Event at {}'.format(path))
|
||||
|
||||
with open(path, 'rb') as infile:
|
||||
event_dmt = pickle.load(infile) # , fix_imports=True)
|
||||
|
||||
event_dmt['origin_id'].id = str(event_dmt['origin_id'].id)
|
||||
|
||||
ev = Event(resource_id=event_dmt['event_id'])
|
||||
# small bugfix "unhashable type: 'newstr' "
|
||||
event_dmt['origin_id'].id = str(event_dmt['origin_id'].id)
|
||||
|
||||
origin = Origin(resource_id=event_dmt['origin_id'],
|
||||
time=event_dmt['datetime'],
|
||||
longitude=event_dmt['longitude'],
|
||||
latitude=event_dmt['latitude'],
|
||||
depth=event_dmt['depth'])
|
||||
mag = Magnitude(mag=event_dmt['magnitude'],
|
||||
magnitude_type=event_dmt['magnitude_type'],
|
||||
origin_id=event_dmt['origin_id'])
|
||||
|
||||
ev.magnitudes.append(mag)
|
||||
ev.origins.append(origin)
|
||||
return ev
|
@ -5,6 +5,7 @@ import warnings
|
||||
|
||||
import numpy as np
|
||||
from obspy import UTCDateTime
|
||||
|
||||
from pylot.core.util.utils import fit_curve, clims
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
|
@ -6,7 +6,7 @@ Created on Wed Jan 26 17:47:25 2015
|
||||
@author: sebastianw
|
||||
"""
|
||||
|
||||
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure
|
||||
from pylot.core.io.data import SeiscompDataStructure, PilotDataStructure, ObspyDMTdataStructure
|
||||
|
||||
DATASTRUCTURE = {'PILOT': PilotDataStructure, 'SeisComP': SeiscompDataStructure,
|
||||
None: None}
|
||||
'obspyDMT': ObspyDMTdataStructure, None: PilotDataStructure}
|
||||
|
@ -1,8 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys, os, traceback
|
||||
import multiprocessing
|
||||
from PySide.QtCore import QThread, Signal, Qt, Slot, QRunnable, QObject
|
||||
from PySide.QtGui import QDialog, QProgressBar, QLabel, QHBoxLayout, QPushButton
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from PySide2.QtCore import QThread, Signal, Qt, Slot, QRunnable, QObject
|
||||
from PySide2.QtWidgets import QDialog, QProgressBar, QLabel, QHBoxLayout, QPushButton
|
||||
|
||||
|
||||
class Thread(QThread):
|
||||
@ -19,12 +22,14 @@ class Thread(QThread):
|
||||
self.abortButton = abortButton
|
||||
self.finished.connect(self.hideProgressbar)
|
||||
self.showProgressbar()
|
||||
self.old_stdout = None
|
||||
|
||||
def run(self):
|
||||
if self.redirect_stdout:
|
||||
self.old_stdout = sys.stdout
|
||||
sys.stdout = self
|
||||
try:
|
||||
if self.arg:
|
||||
if self.arg is not None:
|
||||
self.data = self.func(self.arg)
|
||||
else:
|
||||
self.data = self.func()
|
||||
@ -33,13 +38,19 @@ class Thread(QThread):
|
||||
self._executed = False
|
||||
self._executedError = e
|
||||
traceback.print_exc()
|
||||
exctype, value = sys.exc_info ()[:2]
|
||||
self._executedErrorInfo = '{} {} {}'.\
|
||||
exctype, value = sys.exc_info()[:2]
|
||||
self._executedErrorInfo = '{} {} {}'. \
|
||||
format(exctype, value, traceback.format_exc())
|
||||
sys.stdout = sys.__stdout__
|
||||
if self.redirect_stdout:
|
||||
sys.stdout = self.old_stdout
|
||||
|
||||
def showProgressbar(self):
|
||||
if self.progressText:
|
||||
# # generate widget if not given in init
|
||||
# if not self.pb_widget:
|
||||
# self.pb_widget = ProgressBarWidget(self.parent())
|
||||
# self.pb_widget.setWindowFlags(Qt.SplashScreen)
|
||||
# self.pb_widget.setModal(True)
|
||||
|
||||
# generate widget if not given in init
|
||||
if not self.pb_widget:
|
||||
@ -75,6 +86,7 @@ class Worker(QRunnable):
|
||||
'''
|
||||
Worker class to be run by MultiThread(QThread).
|
||||
'''
|
||||
|
||||
def __init__(self, fun, args,
|
||||
progressText=None,
|
||||
pb_widget=None,
|
||||
@ -82,28 +94,30 @@ class Worker(QRunnable):
|
||||
super(Worker, self).__init__()
|
||||
self.fun = fun
|
||||
self.args = args
|
||||
#self.kwargs = kwargs
|
||||
# self.kwargs = kwargs
|
||||
self.signals = WorkerSignals()
|
||||
self.progressText = progressText
|
||||
self.pb_widget = pb_widget
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self.old_stdout = None
|
||||
|
||||
@Slot()
|
||||
def run(self):
|
||||
if self.redirect_stdout:
|
||||
self.old_stdout = sys.stdout
|
||||
sys.stdout = self
|
||||
|
||||
try:
|
||||
result = self.fun(self.args)
|
||||
except:
|
||||
exctype, value = sys.exc_info ()[:2]
|
||||
exctype, value = sys.exc_info()[:2]
|
||||
print(exctype, value, traceback.format_exc())
|
||||
self.signals.error.emit ((exctype, value, traceback.format_exc ()))
|
||||
self.signals.error.emit((exctype, value, traceback.format_exc()))
|
||||
else:
|
||||
self.signals.result.emit(result)
|
||||
finally:
|
||||
self.signals.finished.emit('Done')
|
||||
sys.stdout = sys.__stdout__
|
||||
sys.stdout = self.old_stdout
|
||||
|
||||
def write(self, text):
|
||||
self.signals.message.emit(text)
|
||||
@ -135,18 +149,20 @@ class MultiThread(QThread):
|
||||
self.progressText = progressText
|
||||
self.pb_widget = pb_widget
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self.old_stdout = None
|
||||
self.finished.connect(self.hideProgressbar)
|
||||
self.showProgressbar()
|
||||
|
||||
def run(self):
|
||||
if self.redirect_stdout:
|
||||
sys.stdout = self
|
||||
self.old_stdout = sys.stdout
|
||||
sys.stdout = self
|
||||
try:
|
||||
if not self.ncores:
|
||||
self.ncores = multiprocessing.cpu_count()
|
||||
pool = multiprocessing.Pool(self.ncores)
|
||||
pool = multiprocessing.Pool(self.ncores, maxtasksperchild=1000)
|
||||
self.data = pool.map_async(self.func, self.args, callback=self.emitDone)
|
||||
#self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
|
||||
# self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
|
||||
pool.close()
|
||||
self._executed = True
|
||||
except Exception as e:
|
||||
@ -155,7 +171,7 @@ class MultiThread(QThread):
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
print('Exception: {}, file: {}, line: {}'.format(exc_type, fname, exc_tb.tb_lineno))
|
||||
sys.stdout = sys.__stdout__
|
||||
sys.stdout = self.old_stdout
|
||||
|
||||
def showProgressbar(self):
|
||||
if self.progressText:
|
||||
|
@ -35,9 +35,9 @@ from __future__ import print_function
|
||||
|
||||
__all__ = "get_git_version"
|
||||
|
||||
import inspect
|
||||
# NO IMPORTS FROM PYLOT IN THIS FILE! (file gets used at installation time)
|
||||
import os
|
||||
import inspect
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
# NO IMPORTS FROM PYLOT IN THIS FILE! (file gets used at installation time)
|
||||
|
2
pylot/correlation/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
90
pylot/correlation/parameters_adriaarray.yaml
Normal file
@ -0,0 +1,90 @@
|
||||
############################# correlation parameters #####################################
|
||||
# min_corr_stacking: minimum correlation coefficient for building beam trace
|
||||
# min_corr_export: minimum correlation coefficient for pick export
|
||||
# min_stack: minimum number of stations for building beam trace
|
||||
# t_before: correlation window before pick
|
||||
# t_after: correlation window after pick#
|
||||
# cc_maxlag: maximum shift for initial correlation
|
||||
# cc_maxlag2: maximum shift for second (final) correlation (also for calculating pick uncertainty)
|
||||
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
|
||||
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
|
||||
# min_picks_export: minimum number of correlated picks for export
|
||||
# min_picks_autopylot: minimum number of reference autopicks picks to continue with event
|
||||
# check_RMS: do RMS check to search for restitution errors (very experimental)
|
||||
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
|
||||
# station_list: use the following stations as reference for stacking
|
||||
# use_stacked_trace: use existing stacked trace if found (spare re-computation)
|
||||
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
|
||||
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
|
||||
# such as 'PyLoT_20170501_141822_autopylot.xml'
|
||||
|
||||
logging: info
|
||||
pick_phases: ['P', 'S']
|
||||
|
||||
# P-phase
|
||||
P:
|
||||
min_corr_stacking: 0.8
|
||||
min_corr_export: 0.6
|
||||
min_stack: 20
|
||||
t_before: 30.
|
||||
t_after: 50.
|
||||
cc_maxlag: 50.
|
||||
cc_maxlag2: 5.
|
||||
initial_pick_outlier_threshold: 30.
|
||||
export_threshold: 2.5
|
||||
min_picks_export: 100
|
||||
min_picks_autopylot: 50
|
||||
check_RMS: True
|
||||
use_taupy_onsets: False
|
||||
station_list: ['HU.MORH', 'HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||
use_stacked_trace: False
|
||||
data_dir: 'processed'
|
||||
pickfile_extension: '_autopylot'
|
||||
dt_stacking: [250, 250]
|
||||
|
||||
# filter for first correlation (rough)
|
||||
filter_options:
|
||||
freqmax: 0.5
|
||||
freqmin: 0.03
|
||||
# filter for second correlation (fine)
|
||||
filter_options_final:
|
||||
freqmax: 0.5
|
||||
freqmin: 0.03
|
||||
|
||||
filter_type: bandpass
|
||||
sampfreq: 20.0
|
||||
|
||||
# S-phase
|
||||
S:
|
||||
min_corr_stacking: 0.7
|
||||
min_corr_export: 0.6
|
||||
min_stack: 20
|
||||
t_before: 60.
|
||||
t_after: 60.
|
||||
cc_maxlag: 100.
|
||||
cc_maxlag2: 25.
|
||||
initial_pick_outlier_threshold: 30.
|
||||
export_threshold: 5.0
|
||||
min_picks_export: 200
|
||||
min_picks_autopylot: 50
|
||||
check_RMS: True
|
||||
use_taupy_onsets: False
|
||||
station_list: ['HU.MORH','HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||
use_stacked_trace: False
|
||||
data_dir: 'processed'
|
||||
pickfile_extension: '_autopylot'
|
||||
dt_stacking: [250, 250]
|
||||
|
||||
# filter for first correlation (rough)
|
||||
filter_options:
|
||||
freqmax: 0.1
|
||||
freqmin: 0.01
|
||||
|
||||
# filter for second correlation (fine)
|
||||
filter_options_final:
|
||||
freqmax: 0.2
|
||||
freqmin: 0.01
|
||||
|
||||
filter_type: bandpass
|
||||
sampfreq: 20.0
|
||||
|
1987
pylot/correlation/pick_correlation_correction.py
Normal file
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
#ulimit -s 8192
|
||||
#ulimit -v $(ulimit -v | awk '{printf("%d",$1*0.95)}')
|
||||
#ulimit -v
|
||||
|
||||
#655360
|
||||
|
||||
source /opt/anaconda3/etc/profile.d/conda.sh
|
||||
conda activate pylot_311
|
||||
NSLOTS=20
|
||||
|
||||
#qsub -l low -cwd -l "os=*stretch" -pe smp 40 submit_pick_corr_correction.sh
|
||||
#$ -l low
|
||||
#$ -l h_vmem=6G
|
||||
#$ -cwd
|
||||
#$ -pe smp 20
|
||||
#$ -N corr_pick
|
||||
|
||||
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||
|
||||
#export MKL_NUM_THREADS=${NSLOTS:=1}
|
||||
#export NUMEXPR_NUM_THREADS=${NSLOTS:=1}
|
||||
#export OMP_NUM_THREADS=${NSLOTS:=1}
|
||||
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M6.0-6.5' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
|
||||
|
||||
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||
|
||||
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
|
||||
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
|
||||
#--event_blacklist eventlist.txt
|
23
pylot/correlation/submit_to_grid_engine.py
Executable file
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import subprocess
|
||||
|
||||
fnames = [
|
||||
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
|
||||
]
|
||||
|
||||
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
|
||||
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
|
||||
|
||||
####
|
||||
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
|
||||
####
|
||||
|
||||
for fnin, istart in fnames:
|
||||
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
|
||||
|
||||
print(input_cmds)
|
||||
print(subprocess.check_output(input_cmds.split()))
|
||||
|
||||
|
||||
|
61
pylot/correlation/utils.py
Normal file
@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
|
||||
from obspy import read_events
|
||||
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
|
||||
|
||||
def get_event_obspy_dmt(eventdir):
|
||||
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
|
||||
if not os.path.exists(event_pkl_file):
|
||||
raise IOError('Could not find event path for event: {}'.format(eventdir))
|
||||
event = qml_from_obspyDMT(event_pkl_file)
|
||||
return event
|
||||
|
||||
|
||||
def get_event_pylot(eventdir, extension=''):
|
||||
event_id = get_event_id(eventdir)
|
||||
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
|
||||
if not os.path.isfile(filename):
|
||||
return
|
||||
cat = read_events(filename)
|
||||
return cat[0]
|
||||
|
||||
|
||||
def get_event_id(eventdir):
|
||||
event_id = os.path.split(eventdir)[-1]
|
||||
return event_id
|
||||
|
||||
|
||||
def get_picks(eventdir, extension=''):
|
||||
event_id = get_event_id(eventdir)
|
||||
filename = 'PyLoT_{}{}.xml'
|
||||
filename = filename.format(event_id, extension)
|
||||
fpath = os.path.join(eventdir, filename)
|
||||
fpaths = glob.glob(fpath)
|
||||
if len(fpaths) == 1:
|
||||
cat = read_events(fpaths[0])
|
||||
picks = cat[0].picks
|
||||
return picks
|
||||
elif len(fpaths) == 0:
|
||||
print('get_picks: File not found: {}'.format(fpath))
|
||||
return
|
||||
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
|
||||
return
|
||||
|
||||
|
||||
def write_json(object, fname):
|
||||
with open(fname, 'w') as outfile:
|
||||
json.dump(object, outfile, sort_keys=True, indent=4)
|
||||
|
||||
|
||||
def get_metadata(eventdir):
|
||||
metadata_path = os.path.join(eventdir, 'resp')
|
||||
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||
return metadata
|
@ -124,7 +124,6 @@ color:rgba(0, 0, 0, 255);
|
||||
border-style: outset;
|
||||
border-width: 1px;
|
||||
border-color: rgba(100, 100, 120, 255);
|
||||
min-width: 6em;
|
||||
padding: 4px;
|
||||
padding-left:5px;
|
||||
padding-right:5px;
|
||||
@ -180,9 +179,9 @@ background-color:transparent;
|
||||
|
||||
QTabWidget::pane{
|
||||
background-color:rgba(0, 0, 0, 255);
|
||||
border-style:solid;
|
||||
border-color:rgba(245, 245, 245, 255);
|
||||
border-width:1px;
|
||||
margin: 0px, 0px, 0px, 0px;
|
||||
padding: 0px;
|
||||
border-width:0px;
|
||||
}
|
||||
|
||||
QTabWidget::tab{
|
||||
|
@ -123,7 +123,6 @@ color:rgba(255, 255, 255, 255);
|
||||
border-style: outset;
|
||||
border-width: 2px;
|
||||
border-color: rgba(50, 50, 60, 255);
|
||||
min-width: 6em;
|
||||
padding: 4px;
|
||||
padding-left:5px;
|
||||
padding-right:5px;
|
||||
@ -179,9 +178,9 @@ background-color:transparent;
|
||||
|
||||
QTabWidget::pane{
|
||||
background-color:rgba(70, 70, 80, 255);
|
||||
border-style:solid;
|
||||
border-color:rgba(70, 70, 80, 255);
|
||||
border-width:1px;
|
||||
margin: 0px, 0px, 0px, 0px;
|
||||
padding: 0px;
|
||||
border-width:0px;
|
||||
}
|
||||
|
||||
QTabWidget::tab{
|
||||
|
@ -5,18 +5,18 @@
|
||||
# the base color
|
||||
phasecolors = {
|
||||
'manual': {
|
||||
'P':{
|
||||
'P': {
|
||||
'rgba': (0, 0, 255, 255),
|
||||
'modifier': 'g'},
|
||||
'S':{
|
||||
'S': {
|
||||
'rgba': (255, 0, 0, 255),
|
||||
'modifier': 'b'}
|
||||
},
|
||||
'auto':{
|
||||
'P':{
|
||||
'auto': {
|
||||
'P': {
|
||||
'rgba': (140, 0, 255, 255),
|
||||
'modifier': 'g'},
|
||||
'S':{
|
||||
'S': {
|
||||
'rgba': (255, 140, 0, 255),
|
||||
'modifier': 'b'}
|
||||
}
|
||||
@ -24,8 +24,8 @@ phasecolors = {
|
||||
|
||||
# Set plot colors and stylesheet for each style
|
||||
stylecolors = {
|
||||
'default':{
|
||||
'linecolor':{
|
||||
'default': {
|
||||
'linecolor': {
|
||||
'rgba': (0, 0, 0, 255)},
|
||||
'background': {
|
||||
'rgba': (255, 255, 255, 255)},
|
||||
@ -67,4 +67,3 @@ stylecolors = {
|
||||
'filename': 'bright.qss'}
|
||||
}
|
||||
}
|
||||
|
||||
|
7
requirements.txt
Normal file
@ -0,0 +1,7 @@
|
||||
Cartopy==0.23.0
|
||||
joblib==1.4.2
|
||||
obspy==1.4.1
|
||||
pyaml==24.7.0
|
||||
pyqtgraph==0.13.7
|
||||
PySide2==5.15.8
|
||||
pytest==8.3.2
|
17
setup.py
@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
from distutils.core import setup
|
||||
|
||||
setup(
|
||||
name='PyLoT',
|
||||
version='0.2',
|
||||
packages=['pylot', 'pylot.core', 'pylot.core.loc', 'pylot.core.pick',
|
||||
'pylot.core.io', 'pylot.core.util', 'pylot.core.active',
|
||||
'pylot.core.analysis', 'pylot.testing'],
|
||||
requires=['obspy', 'PySide', 'matplotlib', 'numpy'],
|
||||
url='dummy',
|
||||
license='LGPLv3',
|
||||
author='Sebastian Wehling-Benatelli',
|
||||
author_email='sebastian.wehling@rub.de',
|
||||
description='Comprehensive Python picking and Location Toolbox for seismological data.'
|
||||
)
|
1735
tests/PyLoT_e0019.048.13.xml
Normal file
27
tests/__init__.py
Normal file
@ -0,0 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# --------------------------------------------------------
|
||||
# Purpose: Convience imports for PyLoT
|
||||
#
|
||||
'''
|
||||
================================================
|
||||
PyLoT - the Python picking and Localization Tool
|
||||
================================================
|
||||
|
||||
This python library contains a graphical user interfaces for picking
|
||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||
and the Qt4 libraries to be installed first.
|
||||
|
||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||
PILOT without facing portability problems, it has been decided to re-
|
||||
develop the software package in Python. The great work of the ObsPy
|
||||
group allows easy handling of a bunch of seismic data and PyLoT will
|
||||
benefit a lot compared to the former MatLab version.
|
||||
|
||||
The development of PyLoT is part of the joint research project MAGS2.
|
||||
|
||||
:copyright:
|
||||
The PyLoT Development Team
|
||||
:license:
|
||||
GNU Lesser General Public License, Version 3
|
||||
(http://www.gnu.org/copyleft/lesser.html)
|
||||
'''
|
80
tests/testPickingResults.py
Normal file
@ -0,0 +1,80 @@
|
||||
import unittest
|
||||
|
||||
from pylot.core.pick.autopick import PickingResults
|
||||
|
||||
|
||||
class TestPickingResults(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.pr = PickingResults()
|
||||
|
||||
def test_non_existing_key_dot_access(self):
|
||||
"""Accessing an attribute in the class that wasnt added to the dict should give a AttributeError"""
|
||||
with self.assertRaises(AttributeError):
|
||||
self.pr.doesntexist
|
||||
|
||||
def test_non_existing_key_dict_access(self):
|
||||
"""Accessing a missing attribute in a dictionary throws a KeyError"""
|
||||
with self.assertRaises(KeyError):
|
||||
self.pr['keydoesnotexist']
|
||||
|
||||
def test_dot_member_creation(self):
|
||||
self.pr.x = 0
|
||||
self.assertEqual(self.pr.x, 0)
|
||||
self.pr.x += 42
|
||||
self.assertEqual(self.pr.x, 42)
|
||||
|
||||
def test_dot_builtin_member(self):
|
||||
self.assertEqual(self.pr.weight, 4)
|
||||
self.pr.weight = 99
|
||||
self.assertEqual(self.pr.weight, 99)
|
||||
|
||||
def test_key_access(self):
|
||||
self.pr['y'] = 11
|
||||
self.assertEqual(self.pr['y'], 11)
|
||||
|
||||
def test_builtin_fields(self):
|
||||
self.assertEqual(self.pr['weight'], 4)
|
||||
|
||||
def test_in(self):
|
||||
self.assertFalse('keydoesnotexist' in self.pr)
|
||||
self.pr['k'] = 0
|
||||
self.assertTrue('k' in self.pr)
|
||||
|
||||
def test_keys_function(self):
|
||||
a = 99
|
||||
self.pr.newkey = a
|
||||
self.assertIn(a, self.pr.values())
|
||||
self.assertIn('newkey', self.pr.keys())
|
||||
|
||||
def test_len_and_clear(self):
|
||||
self.pr.clear()
|
||||
self.assertEqual(len(self.pr), 0)
|
||||
self.pr.a = 6
|
||||
self.pr['b'] = 9
|
||||
self.assertEqual(len(self.pr), 2)
|
||||
|
||||
def test_get_default(self):
|
||||
self.assertEqual(self.pr.get('keynotexisting', 42), 42)
|
||||
weight = self.pr.get('weight', -1)
|
||||
self.assertEqual(weight, 4)
|
||||
self.assertNotEqual(weight, -1)
|
||||
|
||||
def test_dunder_attributes(self):
|
||||
"""Storing Pythons special dunder method in a dictionary is valid and should not override the instances dunder
|
||||
methods"""
|
||||
prev_len = len(self.pr)
|
||||
try:
|
||||
self.pr['__len__'] = None
|
||||
except Exception:
|
||||
self.fail("test_dunder_attributes failed to add a dunder attribute to the dictionary keys")
|
||||
try:
|
||||
curr_len = len(self.pr)
|
||||
except Exception:
|
||||
self.fail("test_dunder_attributes overwrote an instance internal dunder method")
|
||||
self.assertEqual(prev_len + 1, curr_len) # +1 for the added __len__ key/value-pair
|
||||
|
||||
self.pr.__len__ = 42
|
||||
|
||||
self.assertEqual(42, self.pr['__len__'])
|
||||
self.assertEqual(prev_len + 1, curr_len, msg="__len__ was overwritten")
|
337
tests/test_Metadata/test_Metadata.py
Normal file
@ -0,0 +1,337 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from obspy import UTCDateTime
|
||||
from obspy.io.xseed import Parser
|
||||
from obspy.io.xseed.utils import SEEDParserException
|
||||
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from tests.utils import HidePrints
|
||||
|
||||
|
||||
class TestMetadata(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HH'
|
||||
self.time = UTCDateTime('2012-08-01')
|
||||
metadata_folder = os.path.join('test_data', 'dless_multiple_files', 'metadata1')
|
||||
self.m = Metadata(metadata_folder)
|
||||
|
||||
def test_get_coordinates_sucess(self):
|
||||
expected = {'Z': {u'elevation': 607.0, u'longitude': 12.87571, u'local_depth': 0.0, u'azimuth': 0.0,
|
||||
u'latitude': 49.14502, u'dip': -90.0},
|
||||
'E': {u'azimuth': 90.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502,
|
||||
u'local_depth': 0.0, u'longitude': 12.87571},
|
||||
'N': {u'azimuth': 0.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502, u'local_depth': 0.0,
|
||||
u'longitude': 12.87571}
|
||||
}
|
||||
result = {}
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
coords = self.m.get_coordinates(self.station_id + channel, time=self.time)
|
||||
result[channel] = coords
|
||||
self.assertDictEqual(result[channel], expected[channel])
|
||||
|
||||
def test_get_coordinates_sucess_no_time(self):
|
||||
expected = {'Z': {u'elevation': 607.0, u'longitude': 12.87571, u'local_depth': 0.0, u'azimuth': 0.0,
|
||||
u'latitude': 49.14502, u'dip': -90.0},
|
||||
'E': {u'azimuth': 90.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502,
|
||||
u'local_depth': 0.0, u'longitude': 12.87571},
|
||||
'N': {u'azimuth': 0.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502, u'local_depth': 0.0,
|
||||
u'longitude': 12.87571}
|
||||
}
|
||||
result = {}
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
coords = self.m.get_coordinates(self.station_id + channel)
|
||||
result[channel] = coords
|
||||
self.assertDictEqual(result[channel], expected[channel])
|
||||
|
||||
|
||||
class TestMetadataAdding(unittest.TestCase):
|
||||
"""Tests if adding files and directories to a metadata object works."""
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HH'
|
||||
self.metadata_folders = (os.path.join('test_data', 'dless_multiple_files', 'metadata1'),
|
||||
os.path.join('test_data', 'dless_multiple_files', 'metadata2'))
|
||||
self.m = Metadata()
|
||||
|
||||
def test_add_inventory_folder(self):
|
||||
"""Test if add_inventory adds the folder to the list of inventories"""
|
||||
self.m.add_inventory(self.metadata_folders[0])
|
||||
# adding an inventory folder should append it to the list of inventories
|
||||
self.assertDictEqual({}, self.m.inventory_files)
|
||||
self.assertDictEqual({}, self.m.seed_ids)
|
||||
self.assertEqual([self.metadata_folders[0]], self.m.inventories)
|
||||
|
||||
def test_add_inventory_file(self):
|
||||
"""Test if add_inventory_file adds the folder containing the file to the list of inventories and
|
||||
if the files is added to inventory_files"""
|
||||
fpath = os.path.join(self.metadata_folders[0], 'DATALESS.BW.WETR..HHZ')
|
||||
self.m.add_inventory_file(fpath)
|
||||
# adding an inventory file should append its folder to the list of inventories and the file to the
|
||||
self.assertEqual([os.path.join(self.metadata_folders[0], 'DATALESS.BW.WETR..HHZ')],
|
||||
self.m.inventory_files.keys()) # does the filename exist in inventory files?
|
||||
self.assertEqual(['data', 'invtype'], self.m.inventory_files[os.path.join(self.metadata_folders[0],
|
||||
'DATALESS.BW.WETR..HHZ')].keys()) # is the required information attacht to the filename?
|
||||
self.assertDictEqual({}, self.m.seed_ids)
|
||||
self.assertEqual([self.metadata_folders[0]], self.m.inventories)
|
||||
|
||||
def test_add_inventory_invalid_path(self):
|
||||
"""Test if adding an inventory that is not an existing directory fails with an exception"""
|
||||
with self.assertRaises(Exception):
|
||||
self.m.add_inventory('InvalidDirName')
|
||||
self.assertEqual([], self.m.inventories) # inventory list should still be empty
|
||||
|
||||
def test_add_inventory_file_invalid_path(self):
|
||||
"""Test if adding a inventory file with an invalid path fails with an exception"""
|
||||
with self.assertRaises(Exception):
|
||||
self.m.add_inventory_file('/invalid/file/name')
|
||||
self.assertEqual([], self.m.inventories) # inventory list should still be empty
|
||||
|
||||
|
||||
class TestMetadataRemoval(unittest.TestCase):
|
||||
"""Tests if removing files and directories to a metadata object works."""
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HH'
|
||||
self.metadata_folders = (os.path.join('test_data', 'dless_multiple_files', 'metadata1'),
|
||||
os.path.join('test_data', 'dless_multiple_files', 'metadata2'))
|
||||
self.m = Metadata()
|
||||
|
||||
def test_remove_all_inventories(self):
|
||||
"""Test if function remove_inventory cleans the Metadata object """
|
||||
# add multiple inventories
|
||||
for folder in self.metadata_folders:
|
||||
self.m.add_inventory(folder)
|
||||
self.m.remove_all_inventories()
|
||||
self.isEmpty(self.m)
|
||||
|
||||
def test_remove_inventory(self):
|
||||
"""Test if remove_inventory removes single inventories"""
|
||||
# add multiple inventories
|
||||
for folder in self.metadata_folders:
|
||||
self.m.add_inventory(folder)
|
||||
self.m.remove_inventory(self.metadata_folders[0])
|
||||
self.assertNotIn(self.metadata_folders[0], self.m.inventories)
|
||||
self.m.remove_inventory(self.metadata_folders[1])
|
||||
self.assertNotIn(self.metadata_folders[1], self.m.inventories)
|
||||
self.isEmpty(self.m)
|
||||
|
||||
def test_remove_inventory_not_in_inventory_list(self):
|
||||
"""Test if remove_inventory does not modify the metadata instance if the given inventory to remove does not
|
||||
exist in the instance."""
|
||||
# add multiple inventories
|
||||
self.m.add_inventory(self.metadata_folders[0])
|
||||
with HidePrints():
|
||||
self.m.remove_inventory('metadata_not_existing')
|
||||
self.assertIn(self.metadata_folders[0], self.m.inventories)
|
||||
|
||||
def isEmpty(self, metadata):
|
||||
"""Asserts if the given metadata object is empty"""
|
||||
self.assertDictEqual({}, metadata.inventory_files)
|
||||
self.assertDictEqual({}, metadata.seed_ids)
|
||||
self.assertEqual([], metadata.inventories)
|
||||
|
||||
|
||||
class TestMetadata_read_single_file(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HHZ'
|
||||
self.metadata_folders = (os.path.join('test_data', 'dless_multiple_files', 'metadata1'),
|
||||
os.path.join('test_data', 'dless_multiple_files', 'metadata2'))
|
||||
self.metadata_paths = []
|
||||
self.m = Metadata()
|
||||
|
||||
def test_read_single_file(self):
|
||||
"""Test if reading a single file works"""
|
||||
fname = os.path.join(self.metadata_folders[0], 'DATALESS.' + self.station_id)
|
||||
with HidePrints():
|
||||
res = self.m.read_single_file(fname)
|
||||
# method should return true if file is successfully read
|
||||
self.assertTrue(res)
|
||||
# list of inventories (folders) should be empty
|
||||
self.assertEqual([], self.m.inventories)
|
||||
# list of inventory files should contain the added file
|
||||
self.assertIn(fname, self.m.inventory_files.keys())
|
||||
self.assertEqual({}, self.m.seed_ids)
|
||||
|
||||
def test_read_single_file_invalid_path(self):
|
||||
"""Test if reading from a non existing file fails. The filename should not be
|
||||
added to the metadata object"""
|
||||
fname = os.path.join("this", "path", "doesnt", "exist")
|
||||
with HidePrints():
|
||||
res = self.m.read_single_file(fname)
|
||||
# method should return None if file reading fails
|
||||
self.assertIsNone(res)
|
||||
# list of inventories (folders) should be empty
|
||||
self.assertEqual([], self.m.inventories)
|
||||
# list of inventory files should not contain the added file
|
||||
self.assertNotIn(fname, self.m.inventory_files.keys())
|
||||
self.assertEqual({}, self.m.seed_ids)
|
||||
|
||||
def test_read_single_file_multiple_times(self):
|
||||
"""Test if reading a file twice doesnt add it twice to the metadata object"""
|
||||
fname = os.path.join(self.metadata_folders[0], 'DATALESS.' + self.station_id)
|
||||
with HidePrints():
|
||||
res1 = self.m.read_single_file(fname)
|
||||
res2 = self.m.read_single_file(fname)
|
||||
self.assertTrue(res1)
|
||||
self.assertIsNone(res2)
|
||||
self.assertItemsEqual([fname], self.m.inventory_files.keys())
|
||||
|
||||
|
||||
class TestMetadataMultipleTime(unittest.TestCase):
|
||||
"""Test if stations with multiple metadata entries in a single file are handled correctly.
|
||||
The user must specify the time where he wants to get metadata.
|
||||
|
||||
The station ROTT changed has metadata available at multiple times
|
||||
LE.ROTT..HNE | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-01-08 - 2015-03-19 | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNE | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-03-19 - | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNN | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-01-08 - 2015-03-19 | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNN | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-03-19 - | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNZ | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-01-08 - 2015-03-19 | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNZ | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-03-19 - | Lat: 49.1, Lng: 8.1
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.seed_id = 'LE.ROTT..HN'
|
||||
path = os.path.dirname(__file__) # gets path to currently running script
|
||||
metadata = os.path.join('test_data', 'dless_multiple_times',
|
||||
'MAGS2_LE_ROTT.dless') # specific subfolder of test data
|
||||
metadata_path = os.path.join(path, metadata)
|
||||
self.m = Metadata(metadata_path)
|
||||
self.p = Parser(metadata_path)
|
||||
|
||||
def test_get_metadata_works_without_datetime(self):
|
||||
"""Test if get_metadata works if multiple metadata entries are available but no time is
|
||||
specified."""
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
md = self.m.get_metadata(self.seed_id + channel)
|
||||
self.assertDictEqual(md['data'].get_inventory(), self.p.get_inventory())
|
||||
|
||||
def test_get_metadata_works_with_first_datetime(self):
|
||||
"""Test if get_metadata works if multiple metadata entries are available and the older time is specified."""
|
||||
t = UTCDateTime('2015-02-08')
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
md = self.m.get_metadata(self.seed_id + channel, t)
|
||||
self.assertDictEqual(md['data'].get_inventory(), self.p.get_inventory())
|
||||
|
||||
def test_get_metadata_fails_when_time_before_starttime(self):
|
||||
"""Tests if get_metadata returns None when given a data that is before the start date
|
||||
of the metadata"""
|
||||
with HidePrints():
|
||||
md = self.m.get_metadata(self.seed_id, UTCDateTime('1960-07-20'))
|
||||
self.assertIs(md, None)
|
||||
|
||||
def test_get_metadata_invalid_seed_id(self):
|
||||
"""Tes if get metadata returns none when asked for a seed id that does not exist"""
|
||||
with HidePrints():
|
||||
res = self.m.get_metadata("this.doesnt..exist")
|
||||
self.assertIsNone(res)
|
||||
|
||||
|
||||
class TestMetadataMultipleEntries(unittest.TestCase):
|
||||
"""
|
||||
The station KB.TMO07 has changed instruments multiple times.
|
||||
Networks:
|
||||
KB (KB network)
|
||||
Stations:
|
||||
KB.TMO07 (Karlsruhe GPI)
|
||||
Channels:
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Streckeisen KABBA-STS-2 | 2004-12-06 - 2005-04-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Streckeisen KABBA-STS-2 | 2005-04-18 - 2006-07-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-10-10 - 2006-11-14 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-11-24 - 2007-01-12 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-18 - 2007-03-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-25 - 2007-11-21 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-11-21 - 2008-01-17 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Streckeisen KABBA-STS-2 | 2004-12-06 - 2005-04-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Streckeisen KABBA-STS-2 | 2005-04-18 - 2006-07-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-10-10 - 2006-11-14 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-11-24 - 2007-01-12 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-18 - 2007-03-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-25 - 2007-11-21 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-11-21 - 2008-01-17 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Streckeisen KABBA-STS-2 | 2004-12-06 - 2005-04-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Streckeisen KABBA-STS-2 | 2005-04-18 - 2006-07-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-10-10 - 2006-11-14 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-11-24 - 2007-01-12 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-18 - 2007-03-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-25 - 2007-11-21 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-11-21 - 2008-01-17 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-12 - 2007-01-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-10 - 2007-10-25 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2008-07-11 - 2008-12-05 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2009-05-12 - 2010-02-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-02-15 - 2010-04-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Lennartz KABBA-LE-3D/1 | 2010-04-07 - 2010-08-03 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-08-05 - 2010-12-20 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-20 - 2010-12-22 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-22 - 2011-04-02 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2011-04-15 - 2012-05-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2012-05-07 - | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-12 - 2007-01-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-10 - 2007-10-25 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2008-07-11 - 2008-12-05 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2009-05-12 - 2010-02-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-02-15 - 2010-04-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Lennartz KABBA-LE-3D/1 | 2010-04-07 - 2010-08-03 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-08-05 - 2010-12-20 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-20 - 2010-12-22 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-22 - 2011-04-02 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2011-04-15 - 2012-05-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2012-05-07 - | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-12 - 2007-01-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-10 - 2007-10-25 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2008-07-11 - 2008-12-05 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2009-05-12 - 2010-02-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-02-15 - 2010-04-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Lennartz KABBA-LE-3D/1 | 2010-04-07 - 2010-08-03 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-08-05 - 2010-12-20 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-20 - 2010-12-22 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-22 - 2011-04-02 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2011-04-15 - 2012-05-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2012-05-07 - | Lat: 49.0, Lng: 8.4
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.seed_id = 'KB.TMO07.00.HHZ'
|
||||
path = os.path.dirname(__file__) # gets path to currently running script
|
||||
metadata = os.path.join('test_data', 'dless_multiple_instruments',
|
||||
'MAGS2_KB_TMO07.dless') # specific subfolder of test data
|
||||
metadata_path = os.path.join(path, metadata)
|
||||
self.m = Metadata(metadata_path)
|
||||
self.p = Parser(metadata_path)
|
||||
|
||||
def test_get_paz_current_time(self):
|
||||
"""Test if getting the paz from the metadata object with the current time works"""
|
||||
t = UTCDateTime()
|
||||
with HidePrints():
|
||||
pazm = self.m.get_paz(self.seed_id, t)
|
||||
pazp = self.p.get_paz(self.seed_id, t)
|
||||
self.assertEqual(pazm, pazp)
|
||||
|
||||
def test_get_paz_past(self):
|
||||
"""Test if getting paz from metadata object with a time in the past works"""
|
||||
t = UTCDateTime('2007-01-13')
|
||||
with HidePrints():
|
||||
pazm = self.m.get_paz(self.seed_id, t)
|
||||
pazp = self.p.get_paz(self.seed_id, t)
|
||||
self.assertEqual(pazm, pazp)
|
||||
|
||||
def test_get_paz_time_not_exisiting(self):
|
||||
"""Test if getting paz from metadata at a time where there is no metadata
|
||||
available fails correctly"""
|
||||
with self.assertRaises(SEEDParserException):
|
||||
with HidePrints():
|
||||
self.m.get_paz(self.seed_id, UTCDateTime('1990-1-1'))
|
||||
|
||||
def test_get_paz_seed_id_not_existing(self):
|
||||
"""Test if getting paz from a non existing seed id returns None as expected."""
|
||||
with HidePrints():
|
||||
res = self.m.get_paz('This.doesnt..exist', UTCDateTime)
|
||||
self.assertIsNone(res)
|