2015-06-19 09:08:28 +02:00
|
|
|
#!/usr/bin/python
|
2015-04-22 12:46:24 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2016-03-30 08:14:58 +02:00
|
|
|
from __future__ import print_function
|
2016-05-01 21:10:30 +02:00
|
|
|
|
2015-04-22 12:46:24 +02:00
|
|
|
import argparse
|
2017-08-03 09:41:54 +02:00
|
|
|
import datetime
|
2015-05-20 09:38:25 +02:00
|
|
|
import glob
|
2016-08-25 13:31:51 +02:00
|
|
|
import os
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-04-06 13:16:28 +02:00
|
|
|
import pylot.core.loc.focmec as focmec
|
|
|
|
import pylot.core.loc.hash as hash
|
2017-08-03 09:41:54 +02:00
|
|
|
import pylot.core.loc.hypo71 as hypo71
|
|
|
|
import pylot.core.loc.hypodd as hypodd
|
|
|
|
import pylot.core.loc.hyposat as hyposat
|
2016-09-28 11:03:53 +02:00
|
|
|
import pylot.core.loc.nll as nll
|
2017-08-03 09:41:54 +02:00
|
|
|
import pylot.core.loc.velest as velest
|
|
|
|
from obspy import read_events
|
|
|
|
from obspy.core.event import ResourceIdentifier
|
|
|
|
# from PySide.QtGui import QWidget, QInputDialog
|
2017-06-21 15:41:12 +02:00
|
|
|
from pylot.core.analysis.magnitude import MomentMagnitude, LocalMagnitude
|
2016-05-01 21:10:30 +02:00
|
|
|
from pylot.core.io.data import Data
|
2017-06-22 15:18:19 +02:00
|
|
|
from pylot.core.io.inputs import PylotParameter
|
2016-05-01 21:10:30 +02:00
|
|
|
from pylot.core.pick.autopick import autopickevent, iteratepicker
|
2018-07-10 11:35:13 +02:00
|
|
|
from pylot.core.util.dataprocessing import restitute_data, read_metadata, Metadata
|
2017-08-03 09:41:54 +02:00
|
|
|
from pylot.core.util.defaults import SEPARATOR
|
2017-07-10 17:37:54 +02:00
|
|
|
from pylot.core.util.event import Event
|
2017-08-03 09:41:54 +02:00
|
|
|
from pylot.core.util.structure import DATASTRUCTURE
|
2017-08-29 14:37:05 +02:00
|
|
|
from pylot.core.util.utils import real_None, remove_underscores, trim_station_components, check4gaps, check4doubled, \
|
|
|
|
check4rotated
|
2017-08-03 09:41:54 +02:00
|
|
|
from pylot.core.util.version import get_git_version as _getVersionString
|
2015-07-02 09:27:11 +02:00
|
|
|
|
2015-04-22 12:46:24 +02:00
|
|
|
__version__ = _getVersionString()
|
|
|
|
|
|
|
|
|
2017-08-14 13:30:27 +02:00
|
|
|
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None,
|
2018-06-20 11:47:10 +02:00
|
|
|
savexml=True, station='all', iplot=0, ncores=0, obspyDMT_wfpath=False):
|
2015-10-28 09:13:13 +01:00
|
|
|
"""
|
2015-04-29 06:29:08 +02:00
|
|
|
Determine phase onsets automatically utilizing the automatic picking
|
2015-06-19 09:08:28 +02:00
|
|
|
algorithms by Kueperkoch et al. 2010/2012.
|
2018-07-16 14:09:06 +02:00
|
|
|
:param obspyDMT_wfpath: if obspyDMT is used, name of data directory ("raw" or "processed")
|
2017-10-05 17:19:00 +02:00
|
|
|
:param input_dict:
|
|
|
|
:type input_dict:
|
|
|
|
:param parameter: PylotParameter object containing parameters used for automatic picking
|
|
|
|
:type parameter: pylot.core.io.inputs.PylotParameter
|
|
|
|
:param inputfile: path to the input file containing all parameter information for automatic picking
|
|
|
|
(for formatting details, see. `~pylot.core.io.inputs.PylotParameter`
|
2015-04-29 06:29:08 +02:00
|
|
|
:type inputfile: str
|
2017-10-05 17:19:00 +02:00
|
|
|
:param fnames: list of data file names or None when called from GUI
|
|
|
|
:type fnames: str
|
|
|
|
:param eventid: event path incl. event ID (path to waveform files)
|
|
|
|
:type eventid: str
|
|
|
|
:param savepath: save path for autoPyLoT output, if None/"None" output will be saved in event folder
|
|
|
|
:type savepath: str
|
|
|
|
:param savexml: export results in XML file if True
|
|
|
|
:type savexml: bool
|
2018-07-12 15:56:34 +02:00
|
|
|
:param station: choose specific station name or 'all' to pick all stations
|
2017-10-05 17:19:00 +02:00
|
|
|
:type station: str
|
|
|
|
:param iplot: logical variable for plotting: 0=none, 1=partial, 2=all
|
|
|
|
:type iplot: int
|
|
|
|
:param ncores: number of cores used for parallel processing. Default (0) uses all available cores
|
|
|
|
:type ncores: int
|
|
|
|
:return: dictionary containing picks
|
|
|
|
:rtype: dict
|
2015-10-28 09:13:13 +01:00
|
|
|
"""
|
2017-08-03 11:49:15 +02:00
|
|
|
|
|
|
|
if ncores == 1:
|
|
|
|
sp_info = 'autoPyLoT is running serial on 1 cores.'
|
|
|
|
else:
|
|
|
|
if ncores == 0:
|
|
|
|
ncores_readable = 'all available'
|
|
|
|
else:
|
|
|
|
ncores_readable = ncores
|
|
|
|
sp_info = 'autoPyLoT is running in parallel on {} cores.'.format(ncores_readable)
|
|
|
|
|
2015-11-04 13:11:48 +01:00
|
|
|
splash = '''************************************\n
|
|
|
|
*********autoPyLoT starting*********\n
|
|
|
|
The Python picking and Location Tool\n
|
2017-07-12 08:59:43 +02:00
|
|
|
Version {version} 2017\n
|
2015-11-04 13:11:48 +01:00
|
|
|
\n
|
|
|
|
Authors:\n
|
2017-04-06 13:16:28 +02:00
|
|
|
L. Kueperkoch (BESTEC GmbH, Landau i. d. Pfalz)\n
|
2017-07-12 08:59:43 +02:00
|
|
|
M. Paffrath (Ruhr-Universitaet Bochum)\n
|
2017-08-09 12:25:21 +02:00
|
|
|
S. Wehling-Benatelli (Ruhr-Universitaet Bochum)\n
|
2017-08-03 11:49:15 +02:00
|
|
|
|
|
|
|
{sp}
|
|
|
|
***********************************'''.format(version=_getVersionString(),
|
|
|
|
sp=sp_info)
|
2015-11-04 13:11:48 +01:00
|
|
|
print(splash)
|
2015-05-20 09:38:25 +02:00
|
|
|
|
2017-08-02 11:02:24 +02:00
|
|
|
parameter = real_None(parameter)
|
|
|
|
inputfile = real_None(inputfile)
|
|
|
|
eventid = real_None(eventid)
|
2017-08-02 11:37:17 +02:00
|
|
|
|
2017-08-22 16:43:09 +02:00
|
|
|
fig_dict = None
|
|
|
|
fig_dict_wadatijack = None
|
|
|
|
|
2017-05-18 11:58:12 +02:00
|
|
|
if input_dict and isinstance(input_dict, dict):
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'parameter' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
parameter = input_dict['parameter']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fig_dict' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
fig_dict = input_dict['fig_dict']
|
2017-08-22 16:43:09 +02:00
|
|
|
if 'fig_dict_wadatijack' in input_dict:
|
|
|
|
fig_dict_wadatijack = input_dict['fig_dict_wadatijack']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'station' in input_dict:
|
2017-05-15 17:21:22 +02:00
|
|
|
station = input_dict['station']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fnames' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
fnames = input_dict['fnames']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'eventid' in input_dict:
|
2017-07-31 13:43:06 +02:00
|
|
|
eventid = input_dict['eventid']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'iplot' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
iplot = input_dict['iplot']
|
2017-08-14 13:30:27 +02:00
|
|
|
if 'savexml' in input_dict:
|
|
|
|
savexml = input_dict['savexml']
|
2018-06-20 11:47:10 +02:00
|
|
|
if 'obspyDMT_wfpath' in input_dict:
|
|
|
|
obspyDMT_wfpath = input_dict['obspyDMT_wfpath']
|
2017-05-12 11:03:41 +02:00
|
|
|
|
2017-05-10 15:46:08 +02:00
|
|
|
if not parameter:
|
|
|
|
if inputfile:
|
2017-06-22 15:18:19 +02:00
|
|
|
parameter = PylotParameter(inputfile)
|
2018-07-16 14:21:41 +02:00
|
|
|
# iplot = parameter['iplot']
|
2017-05-10 15:46:08 +02:00
|
|
|
else:
|
2017-08-02 11:37:17 +02:00
|
|
|
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
|
|
|
print('Using default input file {}'.format(infile))
|
|
|
|
parameter = PylotParameter(infile)
|
2017-05-10 15:46:08 +02:00
|
|
|
else:
|
2017-06-22 15:18:19 +02:00
|
|
|
if not type(parameter) == PylotParameter:
|
2017-05-10 15:46:08 +02:00
|
|
|
print('Wrong input type for parameter: {}'.format(type(parameter)))
|
|
|
|
return
|
|
|
|
if inputfile:
|
|
|
|
print('Parameters set and input file given. Choose either of both.')
|
|
|
|
return
|
2017-08-02 11:37:17 +02:00
|
|
|
|
2016-09-28 11:03:53 +02:00
|
|
|
evt = None
|
2015-05-20 09:38:25 +02:00
|
|
|
|
2017-05-18 11:58:12 +02:00
|
|
|
# reading parameter file
|
2015-04-29 06:29:08 +02:00
|
|
|
if parameter.hasParam('datastructure'):
|
2017-05-18 11:58:12 +02:00
|
|
|
# getting information on data structure
|
2016-05-03 08:46:13 +02:00
|
|
|
datastructure = DATASTRUCTURE[parameter.get('datastructure')]()
|
|
|
|
dsfields = {'root': parameter.get('rootpath'),
|
|
|
|
'dpath': parameter.get('datapath'),
|
|
|
|
'dbase': parameter.get('database')}
|
2015-04-29 07:57:52 +02:00
|
|
|
|
2015-05-20 09:59:06 +02:00
|
|
|
exf = ['root', 'dpath', 'dbase']
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-06-27 09:44:38 +02:00
|
|
|
if parameter['eventID'] is not '*' and fnames == 'None':
|
|
|
|
dsfields['eventID'] = parameter['eventID']
|
2015-05-20 09:59:06 +02:00
|
|
|
exf.append('eventID')
|
2015-04-22 12:46:24 +02:00
|
|
|
|
2015-06-01 16:28:27 +02:00
|
|
|
datastructure.modifyFields(**dsfields)
|
2015-05-20 09:59:06 +02:00
|
|
|
datastructure.setExpandFields(exf)
|
2015-04-22 12:46:24 +02:00
|
|
|
|
2018-07-12 15:56:34 +02:00
|
|
|
# check if default location routine NLLoc is available and all stations are used
|
2018-07-16 14:21:41 +02:00
|
|
|
if real_None(parameter['nllocbin']) and station == 'all':
|
2018-06-28 15:22:40 +02:00
|
|
|
locflag = 1
|
2015-10-28 09:13:13 +01:00
|
|
|
# get NLLoc-root path
|
2016-05-03 08:46:13 +02:00
|
|
|
nllocroot = parameter.get('nllocroot')
|
2015-10-28 09:13:13 +01:00
|
|
|
# get path to NLLoc executable
|
2016-05-03 08:46:13 +02:00
|
|
|
nllocbin = parameter.get('nllocbin')
|
2015-10-28 09:13:13 +01:00
|
|
|
nlloccall = '%s/NLLoc' % nllocbin
|
2015-11-04 13:11:48 +01:00
|
|
|
# get name of phase file
|
2016-05-03 08:46:13 +02:00
|
|
|
phasef = parameter.get('phasefile')
|
2015-10-28 09:13:13 +01:00
|
|
|
phasefile = '%s/obs/%s' % (nllocroot, phasef)
|
|
|
|
# get name of NLLoc-control file
|
2016-05-03 08:46:13 +02:00
|
|
|
ctrf = parameter.get('ctrfile')
|
2015-11-18 10:27:42 +01:00
|
|
|
ctrfile = '%s/run/%s' % (nllocroot, ctrf)
|
|
|
|
# pattern of NLLoc ttimes from location grid
|
2016-05-03 08:46:13 +02:00
|
|
|
ttpat = parameter.get('ttpatter')
|
2015-11-18 10:27:42 +01:00
|
|
|
# pattern of NLLoc-output file
|
2016-05-03 08:46:13 +02:00
|
|
|
nllocoutpatter = parameter.get('outpatter')
|
2018-06-29 15:07:26 +02:00
|
|
|
maxnumit = 2 # maximum number of iterations for re-picking
|
2015-10-28 09:13:13 +01:00
|
|
|
else:
|
|
|
|
locflag = 0
|
2015-12-01 15:39:28 +01:00
|
|
|
print(" !!! ")
|
|
|
|
print("!!No location routine available, autoPyLoT is running in non-location mode!!")
|
|
|
|
print("!!No source parameter estimation possible!!")
|
|
|
|
print(" !!! ")
|
2015-10-27 10:00:16 +01:00
|
|
|
|
2018-06-20 11:47:10 +02:00
|
|
|
wfpath_extension = ''
|
2018-06-28 15:22:40 +02:00
|
|
|
if obspyDMT_wfpath not in [None, False, 'False', '']:
|
2018-06-20 11:47:10 +02:00
|
|
|
wfpath_extension = obspyDMT_wfpath
|
|
|
|
print('Using obspyDMT structure. There will be no restitution, as pre-processed data are expected.')
|
|
|
|
if wfpath_extension != 'processed':
|
|
|
|
print('WARNING: Expecting wfpath_extension to be "processed" for'
|
|
|
|
' pre-processed data but received "{}" instead!!!'.format(wfpath_extension))
|
|
|
|
|
2017-06-20 14:32:21 +02:00
|
|
|
if not input_dict:
|
|
|
|
# started in production mode
|
|
|
|
datapath = datastructure.expandDataPath()
|
2017-06-27 09:44:38 +02:00
|
|
|
if fnames == 'None' and parameter['eventID'] is '*':
|
2017-06-20 14:32:21 +02:00
|
|
|
# multiple event processing
|
|
|
|
# read each event in database
|
|
|
|
events = [events for events in glob.glob(os.path.join(datapath, '*')) if os.path.isdir(events)]
|
2017-08-03 14:49:32 +02:00
|
|
|
elif fnames == 'None' and parameter['eventID'] is not '*' and not type(parameter['eventID']) == list:
|
2017-06-20 14:32:21 +02:00
|
|
|
# single event processing
|
2018-06-26 17:07:38 +02:00
|
|
|
events = glob.glob(os.path.join(datapath, parameter['eventID']))
|
2017-08-03 14:49:32 +02:00
|
|
|
elif fnames == 'None' and type(parameter['eventID']) == list:
|
|
|
|
# multiple event processing
|
|
|
|
events = []
|
|
|
|
for eventID in parameter['eventID']:
|
2018-06-26 17:07:38 +02:00
|
|
|
events.append(os.path.join(datapath, eventID))
|
2017-06-20 14:32:21 +02:00
|
|
|
else:
|
|
|
|
# autoPyLoT was initialized from GUI
|
2018-07-16 14:09:06 +02:00
|
|
|
events = [eventid]
|
2017-06-20 14:32:21 +02:00
|
|
|
evID = os.path.split(eventid)[-1]
|
|
|
|
locflag = 2
|
2017-04-06 13:16:28 +02:00
|
|
|
else:
|
2017-08-22 16:43:09 +02:00
|
|
|
# started in tune or interactive mode
|
2017-06-20 14:32:21 +02:00
|
|
|
datapath = os.path.join(parameter['rootpath'],
|
|
|
|
parameter['datapath'])
|
2017-05-19 14:25:24 +02:00
|
|
|
events = []
|
2017-08-22 16:43:09 +02:00
|
|
|
for eventID in eventid:
|
|
|
|
events.append(os.path.join(datapath,
|
|
|
|
parameter['database'],
|
2018-06-26 17:07:38 +02:00
|
|
|
eventID))
|
2017-04-06 13:16:28 +02:00
|
|
|
|
2017-06-20 14:32:21 +02:00
|
|
|
if not events:
|
|
|
|
print('autoPyLoT: No events given. Return!')
|
|
|
|
return
|
2017-08-02 11:37:17 +02:00
|
|
|
|
|
|
|
# transform system path separator to '/'
|
2017-08-07 11:21:20 +02:00
|
|
|
for index, eventpath in enumerate(events):
|
|
|
|
eventpath = eventpath.replace(SEPARATOR, '/')
|
|
|
|
events[index] = eventpath
|
|
|
|
|
2017-08-22 16:43:09 +02:00
|
|
|
allpicks = {}
|
2017-08-18 16:52:39 +02:00
|
|
|
glocflag = locflag
|
2017-08-07 11:21:20 +02:00
|
|
|
for eventpath in events:
|
2018-06-26 17:07:38 +02:00
|
|
|
evID = os.path.split(eventpath)[-1]
|
2018-06-28 10:31:48 +02:00
|
|
|
event_datapath = os.path.join(eventpath, wfpath_extension)
|
2017-08-07 11:21:20 +02:00
|
|
|
fext = '.xml'
|
|
|
|
filename = os.path.join(eventpath, 'PyLoT_' + evID + fext)
|
|
|
|
try:
|
|
|
|
data = Data(evtdata=filename)
|
2017-08-17 12:31:42 +02:00
|
|
|
data.get_evt_data().path = eventpath
|
2017-08-07 11:21:20 +02:00
|
|
|
print('Reading event data from filename {}...'.format(filename))
|
|
|
|
except Exception as e:
|
|
|
|
print('Could not read event from file {}: {}'.format(filename, e))
|
|
|
|
data = Data()
|
|
|
|
pylot_event = Event(eventpath) # event should be path to event directory
|
|
|
|
data.setEvtData(pylot_event)
|
2017-04-06 13:16:28 +02:00
|
|
|
if fnames == 'None':
|
2018-06-26 17:07:38 +02:00
|
|
|
data.setWFData(glob.glob(os.path.join(datapath, event_datapath, '*')))
|
2017-04-06 13:16:28 +02:00
|
|
|
# the following is necessary because within
|
|
|
|
# multiple event processing no event ID is provided
|
|
|
|
# in autopylot.in
|
|
|
|
try:
|
|
|
|
parameter.get('eventID')
|
2017-10-05 17:26:18 +02:00
|
|
|
except Exception:
|
2017-04-06 13:16:28 +02:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
eventID = '%d%02d%02d%02d%02d' % (now.year,
|
|
|
|
now.month,
|
|
|
|
now.day,
|
|
|
|
now.hour,
|
|
|
|
now.minute)
|
|
|
|
parameter.setParam(eventID=eventID)
|
|
|
|
else:
|
|
|
|
data.setWFData(fnames)
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-08-07 11:21:20 +02:00
|
|
|
eventpath = events[0]
|
2017-08-03 09:41:54 +02:00
|
|
|
# now = datetime.datetime.now()
|
|
|
|
# evID = '%d%02d%02d%02d%02d' % (now.year,
|
2017-05-19 12:30:03 +02:00
|
|
|
# now.month,
|
|
|
|
# now.day,
|
|
|
|
# now.hour,
|
|
|
|
# now.minute)
|
|
|
|
parameter.setParam(eventID=eventid)
|
2015-10-28 09:13:13 +01:00
|
|
|
wfdat = data.getWFData() # all available streams
|
2017-05-15 17:21:22 +02:00
|
|
|
if not station == 'all':
|
|
|
|
wfdat = wfdat.select(station=station)
|
|
|
|
if not wfdat:
|
|
|
|
print('Could not find station {}. STOP!'.format(station))
|
2017-08-03 09:41:54 +02:00
|
|
|
return
|
2016-09-28 14:56:01 +02:00
|
|
|
wfdat = remove_underscores(wfdat)
|
2017-08-10 16:25:32 +02:00
|
|
|
# trim components for each station to avoid problems with different trace starttimes for one station
|
|
|
|
wfdat = check4gaps(wfdat)
|
2017-08-16 15:13:28 +02:00
|
|
|
wfdat = check4doubled(wfdat)
|
2017-08-10 16:25:32 +02:00
|
|
|
wfdat = trim_station_components(wfdat, trim_start=True, trim_end=False)
|
2018-07-10 11:35:13 +02:00
|
|
|
if not wfpath_extension:
|
|
|
|
metadata = Metadata(parameter.get('invdir'))
|
|
|
|
else:
|
|
|
|
metadata = Metadata(os.path.join(eventpath, 'resp'))
|
|
|
|
# metadata = read_metadata(parameter.get('invdir'))
|
2018-06-25 14:21:34 +02:00
|
|
|
# TODO: (idea) read metadata from obspy_dmt database
|
|
|
|
# if not wfpath_extension:
|
|
|
|
# metadata = read_metadata(parameter.get('invdir'))
|
|
|
|
# else:
|
|
|
|
# metadata = None
|
2017-08-10 16:25:32 +02:00
|
|
|
corr_dat = None
|
2018-06-20 11:47:10 +02:00
|
|
|
if metadata:
|
|
|
|
# rotate stations to ZNE
|
2018-07-16 14:21:41 +02:00
|
|
|
# wfdat = check4rotated(wfdat, metadata) # MP MP TEMPORARILY DISABLED !!!!!!!!!!!
|
2018-06-20 11:47:10 +02:00
|
|
|
if locflag:
|
|
|
|
print("Restitute data ...")
|
2018-07-10 11:35:13 +02:00
|
|
|
corr_dat = restitute_data(wfdat.copy(), metadata, ncores=ncores)
|
2017-07-17 13:40:35 +02:00
|
|
|
if not corr_dat and locflag:
|
2017-08-03 09:41:54 +02:00
|
|
|
locflag = 2
|
2017-08-07 11:21:20 +02:00
|
|
|
print('Working on event %s. Stations: %s' % (eventpath, station))
|
2017-05-15 17:21:22 +02:00
|
|
|
print(wfdat)
|
2015-06-01 16:28:27 +02:00
|
|
|
##########################################################
|
2015-07-10 09:22:58 +02:00
|
|
|
# !automated picking starts here!
|
2017-08-23 14:46:30 +02:00
|
|
|
fdwj = None
|
|
|
|
if fig_dict_wadatijack:
|
|
|
|
fdwj = fig_dict_wadatijack[evID]
|
2017-08-22 16:43:09 +02:00
|
|
|
picks = autopickevent(wfdat, parameter, iplot=iplot, fig_dict=fig_dict,
|
2017-08-23 14:46:30 +02:00
|
|
|
fig_dict_wadatijack=fdwj,
|
2017-08-22 16:43:09 +02:00
|
|
|
ncores=ncores, metadata=metadata, origin=data.get_evt_data().origins)
|
2015-10-27 15:26:25 +01:00
|
|
|
##########################################################
|
2015-10-28 09:13:13 +01:00
|
|
|
# locating
|
2017-05-19 12:30:03 +02:00
|
|
|
if locflag > 0:
|
2015-10-28 09:13:13 +01:00
|
|
|
# write phases to NLLoc-phase file
|
2017-04-06 13:16:28 +02:00
|
|
|
nll.export(picks, phasefile, parameter)
|
2015-10-28 09:13:13 +01:00
|
|
|
|
2015-11-04 16:52:39 +01:00
|
|
|
# For locating the event the NLLoc-control file has to be modified!
|
2016-09-28 11:03:53 +02:00
|
|
|
nllocout = '%s_%s' % (evID, nllocoutpatter)
|
2015-11-18 10:27:42 +01:00
|
|
|
# create comment line for NLLoc-control file
|
2016-09-28 11:03:53 +02:00
|
|
|
nll.modify_inputs(ctrf, nllocroot, nllocout, phasef,
|
|
|
|
ttpat)
|
2015-10-28 09:13:13 +01:00
|
|
|
|
|
|
|
# locate the event
|
2018-07-12 09:40:57 +02:00
|
|
|
nll.locate(ctrfile, parameter)
|
2016-09-28 11:03:53 +02:00
|
|
|
|
2015-11-18 10:27:42 +01:00
|
|
|
# !iterative picking if traces remained unpicked or occupied with bad picks!
|
2015-11-12 17:21:04 +01:00
|
|
|
# get theoretical onset times for picks with weights >= 4
|
2015-11-20 15:49:34 +01:00
|
|
|
# in order to reprocess them using smaller time windows around theoretical onset
|
|
|
|
# get stations with bad onsets
|
|
|
|
badpicks = []
|
|
|
|
for key in picks:
|
2016-03-30 08:14:58 +02:00
|
|
|
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
|
|
|
|
badpicks.append([key, picks[key]['P']['mpp']])
|
2015-11-20 15:49:34 +01:00
|
|
|
|
2016-09-28 11:03:53 +02:00
|
|
|
# TODO keep code DRY (Don't Repeat Yourself) the following part is written twice
|
|
|
|
# suggestion: delete block and modify the later similar block to work properly
|
|
|
|
|
2015-11-20 15:49:34 +01:00
|
|
|
if len(badpicks) == 0:
|
2016-03-30 08:14:58 +02:00
|
|
|
print("autoPyLoT: No bad onsets found, thus no iterative picking necessary!")
|
2015-12-01 15:39:28 +01:00
|
|
|
# get NLLoc-location file
|
|
|
|
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
|
|
|
|
if len(glob.glob(locsearch)) > 0:
|
2016-09-28 11:03:53 +02:00
|
|
|
# get latest NLLoc-location file if several are available
|
2016-03-30 08:14:58 +02:00
|
|
|
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
2016-09-28 14:56:01 +02:00
|
|
|
evt = read_events(nllocfile)[0]
|
2017-06-23 10:10:45 +02:00
|
|
|
# calculate seismic moment Mo and moment magnitude Mw
|
2016-09-28 11:03:53 +02:00
|
|
|
moment_mag = MomentMagnitude(corr_dat, evt, parameter.get('vp'),
|
2016-09-28 14:56:01 +02:00
|
|
|
parameter.get('Qp'),
|
2017-09-18 10:41:27 +02:00
|
|
|
parameter.get('rho'), True,
|
2017-05-08 15:38:41 +02:00
|
|
|
iplot)
|
2016-09-29 11:55:08 +02:00
|
|
|
# update pick with moment property values (w0, fc, Mo)
|
2017-08-18 15:38:01 +02:00
|
|
|
for stats, props in moment_mag.moment_props.items():
|
|
|
|
picks[stats]['P'].update(props)
|
2016-09-29 11:55:08 +02:00
|
|
|
evt = moment_mag.updated_event()
|
2017-06-22 15:04:16 +02:00
|
|
|
net_mw = moment_mag.net_magnitude()
|
2018-07-03 10:34:43 +02:00
|
|
|
if net_mw is not None:
|
|
|
|
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
2017-06-22 15:04:16 +02:00
|
|
|
# calculate local (Richter) magntiude
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling = parameter.get('WAscaling')
|
|
|
|
magscaling = parameter.get('magscaling')
|
2017-06-21 15:41:12 +02:00
|
|
|
local_mag = LocalMagnitude(corr_dat, evt,
|
2017-08-03 09:41:54 +02:00
|
|
|
parameter.get('sstop'),
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling, True, iplot)
|
2017-11-09 14:58:06 +01:00
|
|
|
# update pick with local magnitude property values
|
2017-08-18 15:38:01 +02:00
|
|
|
for stats, amplitude in local_mag.amplitudes.items():
|
|
|
|
picks[stats]['S']['Ao'] = amplitude.generic_amplitude
|
2017-06-23 10:10:45 +02:00
|
|
|
print("Local station magnitudes scaled with:")
|
2017-08-03 09:41:54 +02:00
|
|
|
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling[1],
|
|
|
|
WAscaling[2]))
|
|
|
|
evt = local_mag.updated_event(magscaling)
|
|
|
|
net_ml = local_mag.net_magnitude(magscaling)
|
2018-07-10 11:35:13 +02:00
|
|
|
if net_ml:
|
|
|
|
print("Network local magnitude: %4.1f" % net_ml.mag)
|
2018-07-16 13:52:28 +02:00
|
|
|
if magscaling is None:
|
2018-07-03 10:34:43 +02:00
|
|
|
scaling = False
|
|
|
|
elif magscaling[0] != 0 and magscaling[1] != 0:
|
|
|
|
scaling = False
|
|
|
|
else:
|
|
|
|
scaling = True
|
|
|
|
if scaling:
|
|
|
|
print("Network local magnitude scaled with:")
|
|
|
|
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
2015-12-01 15:39:28 +01:00
|
|
|
else:
|
|
|
|
print("autoPyLoT: No NLLoc-location file available!")
|
|
|
|
print("No source parameter estimation possible!")
|
2017-06-27 09:57:00 +02:00
|
|
|
locflag = 9
|
2015-11-20 15:49:34 +01:00
|
|
|
else:
|
|
|
|
# get theoretical P-onset times from NLLoc-location file
|
|
|
|
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
|
2015-11-23 16:38:52 +01:00
|
|
|
if len(glob.glob(locsearch)) > 0:
|
|
|
|
# get latest file if several are available
|
2016-03-30 08:14:58 +02:00
|
|
|
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
2015-11-23 16:38:52 +01:00
|
|
|
nlloccounter = 0
|
2016-03-30 08:14:58 +02:00
|
|
|
while len(badpicks) > 0 and nlloccounter <= maxnumit:
|
2015-11-23 16:38:52 +01:00
|
|
|
nlloccounter += 1
|
|
|
|
if nlloccounter > maxnumit:
|
|
|
|
print("autoPyLoT: Number of maximum iterations reached, stop iterative picking!")
|
|
|
|
break
|
|
|
|
print("autoPyLoT: Starting with iteration No. %d ..." % nlloccounter)
|
2017-05-18 10:09:51 +02:00
|
|
|
if input_dict:
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fig_dict' in input_dict:
|
2017-05-18 10:09:51 +02:00
|
|
|
fig_dict = input_dict['fig_dict']
|
2017-08-03 09:41:54 +02:00
|
|
|
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter,
|
|
|
|
fig_dict=fig_dict)
|
2017-05-18 10:09:51 +02:00
|
|
|
else:
|
|
|
|
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter)
|
2015-11-23 16:38:52 +01:00
|
|
|
# write phases to NLLoc-phase file
|
2017-04-06 13:16:28 +02:00
|
|
|
nll.export(picks, phasefile, parameter)
|
2015-12-03 14:55:07 +01:00
|
|
|
# remove actual NLLoc-location file to keep only the last
|
|
|
|
os.remove(nllocfile)
|
2015-11-23 16:38:52 +01:00
|
|
|
# locate the event
|
2018-07-12 09:40:57 +02:00
|
|
|
nll.locate(ctrfile, parameter)
|
2015-11-23 16:38:52 +01:00
|
|
|
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
|
2015-12-01 15:39:28 +01:00
|
|
|
# get updated NLLoc-location file
|
2016-03-30 08:14:58 +02:00
|
|
|
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
2015-12-01 15:39:28 +01:00
|
|
|
# check for bad picks
|
2015-11-23 16:38:52 +01:00
|
|
|
badpicks = []
|
|
|
|
for key in picks:
|
2016-03-30 08:14:58 +02:00
|
|
|
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
|
|
|
|
badpicks.append([key, picks[key]['P']['mpp']])
|
2017-09-18 10:41:27 +02:00
|
|
|
print("autoPyLoT: After iteration No. %d: %d bad onsets found ..." % (nlloccounter,
|
2016-03-30 08:14:58 +02:00
|
|
|
len(badpicks)))
|
2015-11-23 16:38:52 +01:00
|
|
|
if len(badpicks) == 0:
|
|
|
|
print("autoPyLoT: No more bad onsets found, stop iterative picking!")
|
2015-12-01 15:39:28 +01:00
|
|
|
nlloccounter = maxnumit
|
2016-09-28 14:56:01 +02:00
|
|
|
evt = read_events(nllocfile)[0]
|
2017-05-19 12:30:03 +02:00
|
|
|
if locflag < 2:
|
2017-06-23 10:10:45 +02:00
|
|
|
# calculate seismic moment Mo and moment magnitude Mw
|
2017-05-19 12:30:03 +02:00
|
|
|
moment_mag = MomentMagnitude(corr_dat, evt, parameter.get('vp'),
|
|
|
|
parameter.get('Qp'),
|
2017-09-18 10:41:27 +02:00
|
|
|
parameter.get('rho'), True,
|
2017-05-19 12:30:03 +02:00
|
|
|
iplot)
|
|
|
|
# update pick with moment property values (w0, fc, Mo)
|
2017-08-18 15:38:01 +02:00
|
|
|
for stats, props in moment_mag.moment_props.items():
|
2017-10-05 17:26:18 +02:00
|
|
|
if stats in picks:
|
2017-08-21 13:20:15 +02:00
|
|
|
picks[stats]['P'].update(props)
|
2017-05-19 12:30:03 +02:00
|
|
|
evt = moment_mag.updated_event()
|
2017-06-22 15:04:16 +02:00
|
|
|
net_mw = moment_mag.net_magnitude()
|
2018-07-03 10:34:43 +02:00
|
|
|
if net_mw is not None:
|
|
|
|
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
2017-06-22 15:04:16 +02:00
|
|
|
# calculate local (Richter) magntiude
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling = parameter.get('WAscaling')
|
|
|
|
magscaling = parameter.get('magscaling')
|
2017-06-21 15:41:12 +02:00
|
|
|
local_mag = LocalMagnitude(corr_dat, evt,
|
2017-08-03 09:41:54 +02:00
|
|
|
parameter.get('sstop'),
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling, True, iplot)
|
2017-11-09 14:58:06 +01:00
|
|
|
# update pick with local magnitude property values
|
2017-08-18 15:38:01 +02:00
|
|
|
for stats, amplitude in local_mag.amplitudes.items():
|
2017-10-05 17:26:18 +02:00
|
|
|
if stats in picks:
|
2017-08-21 13:20:15 +02:00
|
|
|
picks[stats]['S']['Ao'] = amplitude.generic_amplitude
|
2017-06-23 10:10:45 +02:00
|
|
|
print("Local station magnitudes scaled with:")
|
2017-08-03 09:41:54 +02:00
|
|
|
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling[1],
|
|
|
|
WAscaling[2]))
|
|
|
|
evt = local_mag.updated_event(magscaling)
|
|
|
|
net_ml = local_mag.net_magnitude(magscaling)
|
2018-07-10 11:35:13 +02:00
|
|
|
if net_ml:
|
|
|
|
print("Network local magnitude: %4.1f" % net_ml.mag)
|
2018-07-16 13:52:28 +02:00
|
|
|
if magscaling is None:
|
2018-06-29 15:07:26 +02:00
|
|
|
scaling = False
|
|
|
|
elif magscaling[0] != 0 and magscaling[1] != 0:
|
|
|
|
scaling = False
|
|
|
|
else:
|
|
|
|
scaling = True
|
|
|
|
if scaling:
|
|
|
|
print("Network local magnitude scaled with:")
|
|
|
|
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
2015-11-20 15:49:34 +01:00
|
|
|
else:
|
|
|
|
print("autoPyLoT: No NLLoc-location file available! Stop iteration!")
|
2017-04-06 13:16:28 +02:00
|
|
|
locflag = 9
|
2015-10-27 15:26:25 +01:00
|
|
|
##########################################################
|
2017-04-06 13:16:28 +02:00
|
|
|
# write phase files for various location
|
|
|
|
# and fault mechanism calculation routines
|
|
|
|
# ObsPy event object
|
2016-09-28 11:03:53 +02:00
|
|
|
if evt is not None:
|
2017-08-07 11:21:20 +02:00
|
|
|
event_id = eventpath.split('/')[-1]
|
2017-07-10 17:37:54 +02:00
|
|
|
evt.resource_id = ResourceIdentifier('smi:local/' + event_id)
|
2016-09-28 11:03:53 +02:00
|
|
|
data.applyEVTData(evt, 'event')
|
2017-08-15 10:17:08 +02:00
|
|
|
data.applyEVTData(picks)
|
2017-08-14 13:30:27 +02:00
|
|
|
if savexml:
|
2017-10-05 17:26:18 +02:00
|
|
|
if savepath == 'None' or savepath is None:
|
2017-08-18 11:36:58 +02:00
|
|
|
saveEvtPath = eventpath
|
|
|
|
else:
|
|
|
|
saveEvtPath = savepath
|
|
|
|
fnqml = '%s/PyLoT_%s' % (saveEvtPath, evID)
|
2017-08-14 13:30:27 +02:00
|
|
|
data.exportEvent(fnqml, fnext='.xml', fcheck=['auto', 'magnitude', 'origin'])
|
2017-04-06 13:16:28 +02:00
|
|
|
if locflag == 1:
|
2017-06-26 16:19:57 +02:00
|
|
|
# HYPO71
|
2017-08-07 11:21:20 +02:00
|
|
|
hypo71file = '%s/PyLoT_%s_HYPO71_phases' % (eventpath, evID)
|
2017-06-26 16:19:57 +02:00
|
|
|
hypo71.export(picks, hypo71file, parameter)
|
|
|
|
# HYPOSAT
|
2017-08-07 11:21:20 +02:00
|
|
|
hyposatfile = '%s/PyLoT_%s_HYPOSAT_phases' % (eventpath, evID)
|
2017-06-26 16:19:57 +02:00
|
|
|
hyposat.export(picks, hyposatfile, parameter)
|
2017-07-13 17:27:48 +02:00
|
|
|
# VELEST
|
2017-08-07 11:21:20 +02:00
|
|
|
velestfile = '%s/PyLoT_%s_VELEST_phases.cnv' % (eventpath, evID)
|
2017-08-16 16:24:35 +02:00
|
|
|
velest.export(picks, velestfile, evt, parameter)
|
2017-07-13 17:27:48 +02:00
|
|
|
# hypoDD
|
2017-08-07 11:21:20 +02:00
|
|
|
hypoddfile = '%s/PyLoT_%s_hypoDD_phases.pha' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
hypodd.export(picks, hypoddfile, parameter, evt)
|
|
|
|
# FOCMEC
|
2017-08-07 11:21:20 +02:00
|
|
|
focmecfile = '%s/PyLoT_%s_FOCMEC.in' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
focmec.export(picks, focmecfile, parameter, evt)
|
|
|
|
# HASH
|
2017-08-07 11:21:20 +02:00
|
|
|
hashfile = '%s/PyLoT_%s_HASH' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
hash.export(picks, hashfile, parameter, evt)
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2015-11-12 17:21:04 +01:00
|
|
|
endsplash = '''------------------------------------------\n'
|
2016-03-30 08:14:58 +02:00
|
|
|
-----Finished event %s!-----\n'
|
2015-11-12 17:21:04 +01:00
|
|
|
------------------------------------------'''.format \
|
2018-07-16 14:21:41 +02:00
|
|
|
(version=_getVersionString()) % evID
|
2015-11-12 17:21:04 +01:00
|
|
|
print(endsplash)
|
2017-08-18 16:52:39 +02:00
|
|
|
locflag = glocflag
|
2015-11-12 17:21:04 +01:00
|
|
|
if locflag == 0:
|
|
|
|
print("autoPyLoT was running in non-location mode!")
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2017-08-22 16:43:09 +02:00
|
|
|
# save picks for current event ID to dictionary with ALL picks
|
|
|
|
allpicks[evID] = picks
|
|
|
|
|
2015-11-12 17:21:04 +01:00
|
|
|
endsp = '''####################################\n
|
|
|
|
************************************\n
|
|
|
|
*********autoPyLoT terminates*******\n
|
|
|
|
The Python picking and Location Tool\n
|
|
|
|
************************************'''.format(version=_getVersionString())
|
|
|
|
print(endsp)
|
2017-08-22 16:43:09 +02:00
|
|
|
return allpicks
|
2015-04-29 07:57:52 +02:00
|
|
|
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2015-04-22 12:46:24 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
# parse arguments
|
|
|
|
parser = argparse.ArgumentParser(
|
2015-07-10 09:22:58 +02:00
|
|
|
description='''autoPyLoT automatically picks phase onset times using higher order statistics,
|
2017-06-02 11:09:32 +02:00
|
|
|
autoregressive prediction and AIC followed by locating the seismic events using
|
|
|
|
NLLoc''')
|
2015-04-22 12:46:24 +02:00
|
|
|
|
|
|
|
parser.add_argument('-i', '-I', '--inputfile', type=str,
|
|
|
|
action='store',
|
|
|
|
help='''full path to the file containing the input
|
2017-08-03 09:41:54 +02:00
|
|
|
parameters for autoPyLoT''')
|
2018-07-16 14:21:41 +02:00
|
|
|
parser.add_argument('-p', '-P', '--iplot', type=int,
|
2018-01-30 11:11:57 +01:00
|
|
|
action='store', default=0,
|
2018-07-16 14:21:41 +02:00
|
|
|
help='''optional, logical variable for plotting: 0=none, 1=partial, 2=all''')
|
2017-04-06 13:16:28 +02:00
|
|
|
parser.add_argument('-f', '-F', '--fnames', type=str,
|
|
|
|
action='store',
|
|
|
|
help='''optional, list of data file names''')
|
2017-08-03 14:49:32 +02:00
|
|
|
parser.add_argument('-e', '--eventid', type=str,
|
2017-05-19 12:30:03 +02:00
|
|
|
action='store',
|
2017-05-19 14:25:24 +02:00
|
|
|
help='''optional, event path incl. event ID''')
|
2017-04-06 13:16:28 +02:00
|
|
|
parser.add_argument('-s', '-S', '--spath', type=str,
|
2017-04-06 15:37:54 +02:00
|
|
|
action='store',
|
2017-04-06 13:16:28 +02:00
|
|
|
help='''optional, save path for autoPyLoT output''')
|
2017-08-03 11:49:15 +02:00
|
|
|
parser.add_argument('-c', '-C', '--ncores', type=int,
|
|
|
|
action='store', default=0,
|
2017-08-09 12:14:24 +02:00
|
|
|
help='''optional, number of CPU cores used for parallel processing (default: all available(=0))''')
|
2018-06-25 14:21:34 +02:00
|
|
|
parser.add_argument('-dmt', '-DMT', '--obspy_dmt_wfpath', type=str,
|
|
|
|
action='store', default=False,
|
|
|
|
help='''optional, wftype (raw, processed) used for obspyDMT database structure''')
|
2015-04-22 12:46:24 +02:00
|
|
|
|
|
|
|
cla = parser.parse_args()
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-08-02 11:02:24 +02:00
|
|
|
picks = autoPyLoT(inputfile=str(cla.inputfile), fnames=str(cla.fnames),
|
2017-08-03 11:49:15 +02:00
|
|
|
eventid=str(cla.eventid), savepath=str(cla.spath),
|
2018-06-25 14:21:34 +02:00
|
|
|
ncores=cla.ncores, iplot=int(cla.iplot), obspyDMT_wfpath=str(cla.obspy_dmt_wfpath))
|