2015-06-19 09:08:28 +02:00
|
|
|
#!/usr/bin/python
|
2015-04-22 12:46:24 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2016-03-30 08:14:58 +02:00
|
|
|
from __future__ import print_function
|
2016-05-01 21:10:30 +02:00
|
|
|
|
2015-04-22 12:46:24 +02:00
|
|
|
import argparse
|
2017-08-03 09:41:54 +02:00
|
|
|
import datetime
|
2015-05-20 09:38:25 +02:00
|
|
|
import glob
|
2016-08-25 13:31:51 +02:00
|
|
|
import os
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-04-06 13:16:28 +02:00
|
|
|
import pylot.core.loc.focmec as focmec
|
|
|
|
import pylot.core.loc.hash as hash
|
2017-08-03 09:41:54 +02:00
|
|
|
import pylot.core.loc.hypo71 as hypo71
|
|
|
|
import pylot.core.loc.hypodd as hypodd
|
|
|
|
import pylot.core.loc.hyposat as hyposat
|
2016-09-28 11:03:53 +02:00
|
|
|
import pylot.core.loc.nll as nll
|
2017-08-03 09:41:54 +02:00
|
|
|
import pylot.core.loc.velest as velest
|
|
|
|
from obspy import read_events
|
|
|
|
from obspy.core.event import ResourceIdentifier
|
|
|
|
# from PySide.QtGui import QWidget, QInputDialog
|
2017-06-21 15:41:12 +02:00
|
|
|
from pylot.core.analysis.magnitude import MomentMagnitude, LocalMagnitude
|
2016-05-01 21:10:30 +02:00
|
|
|
from pylot.core.io.data import Data
|
2017-06-22 15:18:19 +02:00
|
|
|
from pylot.core.io.inputs import PylotParameter
|
2016-05-01 21:10:30 +02:00
|
|
|
from pylot.core.pick.autopick import autopickevent, iteratepicker
|
2016-09-28 14:56:01 +02:00
|
|
|
from pylot.core.util.dataprocessing import restitute_data, read_metadata, \
|
|
|
|
remove_underscores
|
2017-08-03 09:41:54 +02:00
|
|
|
from pylot.core.util.defaults import SEPARATOR
|
2017-07-10 17:37:54 +02:00
|
|
|
from pylot.core.util.event import Event
|
2017-08-03 09:41:54 +02:00
|
|
|
from pylot.core.util.structure import DATASTRUCTURE
|
2017-08-02 11:02:24 +02:00
|
|
|
from pylot.core.util.utils import real_None
|
2017-08-03 09:41:54 +02:00
|
|
|
from pylot.core.util.version import get_git_version as _getVersionString
|
2015-07-02 09:27:11 +02:00
|
|
|
|
2015-04-22 12:46:24 +02:00
|
|
|
__version__ = _getVersionString()
|
|
|
|
|
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None, station='all',
|
2017-08-03 11:49:15 +02:00
|
|
|
iplot=0, ncores=0):
|
2015-10-28 09:13:13 +01:00
|
|
|
"""
|
2015-04-29 06:29:08 +02:00
|
|
|
Determine phase onsets automatically utilizing the automatic picking
|
2015-06-19 09:08:28 +02:00
|
|
|
algorithms by Kueperkoch et al. 2010/2012.
|
2015-04-29 06:29:08 +02:00
|
|
|
|
|
|
|
:param inputfile: path to the input file containing all parameter
|
|
|
|
information for automatic picking (for formatting details, see.
|
2017-06-22 15:18:19 +02:00
|
|
|
`~pylot.core.io.inputs.PylotParameter`
|
2015-04-29 06:29:08 +02:00
|
|
|
:type inputfile: str
|
|
|
|
:return:
|
|
|
|
|
|
|
|
.. rubric:: Example
|
|
|
|
|
2015-10-28 09:13:13 +01:00
|
|
|
"""
|
2017-08-03 11:49:15 +02:00
|
|
|
|
|
|
|
if ncores == 1:
|
|
|
|
sp_info = 'autoPyLoT is running serial on 1 cores.'
|
|
|
|
else:
|
|
|
|
if ncores == 0:
|
|
|
|
ncores_readable = 'all available'
|
|
|
|
else:
|
|
|
|
ncores_readable = ncores
|
|
|
|
sp_info = 'autoPyLoT is running in parallel on {} cores.'.format(ncores_readable)
|
|
|
|
|
2015-11-04 13:11:48 +01:00
|
|
|
splash = '''************************************\n
|
|
|
|
*********autoPyLoT starting*********\n
|
|
|
|
The Python picking and Location Tool\n
|
2017-07-12 08:59:43 +02:00
|
|
|
Version {version} 2017\n
|
2015-11-04 13:11:48 +01:00
|
|
|
\n
|
|
|
|
Authors:\n
|
2017-04-06 13:16:28 +02:00
|
|
|
L. Kueperkoch (BESTEC GmbH, Landau i. d. Pfalz)\n
|
2017-07-12 08:59:43 +02:00
|
|
|
M. Paffrath (Ruhr-Universitaet Bochum)\n
|
2017-08-09 12:25:21 +02:00
|
|
|
S. Wehling-Benatelli (Ruhr-Universitaet Bochum)\n
|
2017-08-03 11:49:15 +02:00
|
|
|
|
|
|
|
{sp}
|
|
|
|
***********************************'''.format(version=_getVersionString(),
|
|
|
|
sp=sp_info)
|
2015-11-04 13:11:48 +01:00
|
|
|
print(splash)
|
2015-05-20 09:38:25 +02:00
|
|
|
|
2017-08-03 11:49:15 +02:00
|
|
|
|
2017-08-02 11:02:24 +02:00
|
|
|
parameter = real_None(parameter)
|
|
|
|
inputfile = real_None(inputfile)
|
|
|
|
eventid = real_None(eventid)
|
2017-08-02 11:37:17 +02:00
|
|
|
|
2017-05-15 17:21:22 +02:00
|
|
|
locflag = 1
|
2017-05-18 11:58:12 +02:00
|
|
|
if input_dict and isinstance(input_dict, dict):
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'parameter' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
parameter = input_dict['parameter']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fig_dict' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
fig_dict = input_dict['fig_dict']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'station' in input_dict:
|
2017-05-15 17:21:22 +02:00
|
|
|
station = input_dict['station']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fnames' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
fnames = input_dict['fnames']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'eventid' in input_dict:
|
2017-07-31 13:43:06 +02:00
|
|
|
eventid = input_dict['eventid']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'iplot' in input_dict:
|
2017-05-12 11:03:41 +02:00
|
|
|
iplot = input_dict['iplot']
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'locflag' in input_dict:
|
2017-05-15 17:21:22 +02:00
|
|
|
locflag = input_dict['locflag']
|
2017-05-12 11:03:41 +02:00
|
|
|
|
2017-05-10 15:46:08 +02:00
|
|
|
if not parameter:
|
|
|
|
if inputfile:
|
2017-06-22 15:18:19 +02:00
|
|
|
parameter = PylotParameter(inputfile)
|
2017-08-03 11:49:15 +02:00
|
|
|
#iplot = parameter['iplot']
|
2017-05-10 15:46:08 +02:00
|
|
|
else:
|
2017-08-02 11:37:17 +02:00
|
|
|
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
|
|
|
print('Using default input file {}'.format(infile))
|
|
|
|
parameter = PylotParameter(infile)
|
2017-05-10 15:46:08 +02:00
|
|
|
else:
|
2017-06-22 15:18:19 +02:00
|
|
|
if not type(parameter) == PylotParameter:
|
2017-05-10 15:46:08 +02:00
|
|
|
print('Wrong input type for parameter: {}'.format(type(parameter)))
|
|
|
|
return
|
|
|
|
if inputfile:
|
|
|
|
print('Parameters set and input file given. Choose either of both.')
|
|
|
|
return
|
2017-08-02 11:37:17 +02:00
|
|
|
|
2016-09-28 11:03:53 +02:00
|
|
|
evt = None
|
2015-05-20 09:38:25 +02:00
|
|
|
|
2017-05-18 11:58:12 +02:00
|
|
|
# reading parameter file
|
2015-04-29 06:29:08 +02:00
|
|
|
if parameter.hasParam('datastructure'):
|
2017-05-18 11:58:12 +02:00
|
|
|
# getting information on data structure
|
2016-05-03 08:46:13 +02:00
|
|
|
datastructure = DATASTRUCTURE[parameter.get('datastructure')]()
|
|
|
|
dsfields = {'root': parameter.get('rootpath'),
|
|
|
|
'dpath': parameter.get('datapath'),
|
|
|
|
'dbase': parameter.get('database')}
|
2015-04-29 07:57:52 +02:00
|
|
|
|
2015-05-20 09:59:06 +02:00
|
|
|
exf = ['root', 'dpath', 'dbase']
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-06-27 09:44:38 +02:00
|
|
|
if parameter['eventID'] is not '*' and fnames == 'None':
|
|
|
|
dsfields['eventID'] = parameter['eventID']
|
2015-05-20 09:59:06 +02:00
|
|
|
exf.append('eventID')
|
2015-04-22 12:46:24 +02:00
|
|
|
|
2015-06-01 16:28:27 +02:00
|
|
|
datastructure.modifyFields(**dsfields)
|
2015-05-20 09:59:06 +02:00
|
|
|
datastructure.setExpandFields(exf)
|
2015-04-22 12:46:24 +02:00
|
|
|
|
2015-10-28 09:13:13 +01:00
|
|
|
# check if default location routine NLLoc is available
|
2017-05-15 17:21:22 +02:00
|
|
|
if parameter['nllocbin'] and locflag:
|
2015-10-28 09:13:13 +01:00
|
|
|
# get NLLoc-root path
|
2016-05-03 08:46:13 +02:00
|
|
|
nllocroot = parameter.get('nllocroot')
|
2015-10-28 09:13:13 +01:00
|
|
|
# get path to NLLoc executable
|
2016-05-03 08:46:13 +02:00
|
|
|
nllocbin = parameter.get('nllocbin')
|
2015-10-28 09:13:13 +01:00
|
|
|
nlloccall = '%s/NLLoc' % nllocbin
|
2015-11-04 13:11:48 +01:00
|
|
|
# get name of phase file
|
2016-05-03 08:46:13 +02:00
|
|
|
phasef = parameter.get('phasefile')
|
2015-10-28 09:13:13 +01:00
|
|
|
phasefile = '%s/obs/%s' % (nllocroot, phasef)
|
|
|
|
# get name of NLLoc-control file
|
2016-05-03 08:46:13 +02:00
|
|
|
ctrf = parameter.get('ctrfile')
|
2015-11-18 10:27:42 +01:00
|
|
|
ctrfile = '%s/run/%s' % (nllocroot, ctrf)
|
|
|
|
# pattern of NLLoc ttimes from location grid
|
2016-05-03 08:46:13 +02:00
|
|
|
ttpat = parameter.get('ttpatter')
|
2015-11-18 10:27:42 +01:00
|
|
|
# pattern of NLLoc-output file
|
2016-05-03 08:46:13 +02:00
|
|
|
nllocoutpatter = parameter.get('outpatter')
|
2016-03-30 08:14:58 +02:00
|
|
|
maxnumit = 3 # maximum number of iterations for re-picking
|
2015-10-28 09:13:13 +01:00
|
|
|
else:
|
|
|
|
locflag = 0
|
2015-12-01 15:39:28 +01:00
|
|
|
print(" !!! ")
|
|
|
|
print("!!No location routine available, autoPyLoT is running in non-location mode!!")
|
|
|
|
print("!!No source parameter estimation possible!!")
|
|
|
|
print(" !!! ")
|
2015-10-27 10:00:16 +01:00
|
|
|
|
2017-06-20 14:32:21 +02:00
|
|
|
if not input_dict:
|
|
|
|
# started in production mode
|
|
|
|
datapath = datastructure.expandDataPath()
|
2017-06-27 09:44:38 +02:00
|
|
|
if fnames == 'None' and parameter['eventID'] is '*':
|
2017-06-20 14:32:21 +02:00
|
|
|
# multiple event processing
|
|
|
|
# read each event in database
|
|
|
|
events = [events for events in glob.glob(os.path.join(datapath, '*')) if os.path.isdir(events)]
|
2017-08-03 14:49:32 +02:00
|
|
|
elif fnames == 'None' and parameter['eventID'] is not '*' and not type(parameter['eventID']) == list:
|
2017-06-20 14:32:21 +02:00
|
|
|
# single event processing
|
2017-06-27 09:44:38 +02:00
|
|
|
events = glob.glob(os.path.join(datapath, parameter['eventID']))
|
2017-08-03 14:49:32 +02:00
|
|
|
elif fnames == 'None' and type(parameter['eventID']) == list:
|
|
|
|
# multiple event processing
|
|
|
|
events = []
|
|
|
|
for eventID in parameter['eventID']:
|
|
|
|
events.append(os.path.join(datapath, eventID))
|
2017-06-20 14:32:21 +02:00
|
|
|
else:
|
|
|
|
# autoPyLoT was initialized from GUI
|
|
|
|
events = []
|
|
|
|
events.append(eventid)
|
|
|
|
evID = os.path.split(eventid)[-1]
|
|
|
|
locflag = 2
|
2017-04-06 13:16:28 +02:00
|
|
|
else:
|
2017-06-20 14:32:21 +02:00
|
|
|
# started in tune mode
|
|
|
|
datapath = os.path.join(parameter['rootpath'],
|
|
|
|
parameter['datapath'])
|
2017-05-19 14:25:24 +02:00
|
|
|
events = []
|
2017-06-20 14:32:21 +02:00
|
|
|
events.append(os.path.join(datapath,
|
|
|
|
parameter['database'],
|
2017-07-31 13:43:06 +02:00
|
|
|
eventid))
|
2017-04-06 13:16:28 +02:00
|
|
|
|
2017-06-20 14:32:21 +02:00
|
|
|
if not events:
|
|
|
|
print('autoPyLoT: No events given. Return!')
|
|
|
|
return
|
2017-08-02 11:37:17 +02:00
|
|
|
|
|
|
|
# transform system path separator to '/'
|
2017-08-07 11:21:20 +02:00
|
|
|
for index, eventpath in enumerate(events):
|
|
|
|
eventpath = eventpath.replace(SEPARATOR, '/')
|
|
|
|
events[index] = eventpath
|
|
|
|
|
|
|
|
for eventpath in events:
|
|
|
|
evID = os.path.split(eventpath)[-1]
|
|
|
|
fext = '.xml'
|
|
|
|
filename = os.path.join(eventpath, 'PyLoT_' + evID + fext)
|
|
|
|
try:
|
|
|
|
data = Data(evtdata=filename)
|
|
|
|
print('Reading event data from filename {}...'.format(filename))
|
|
|
|
except Exception as e:
|
|
|
|
print('Could not read event from file {}: {}'.format(filename, e))
|
|
|
|
data = Data()
|
|
|
|
pylot_event = Event(eventpath) # event should be path to event directory
|
|
|
|
data.setEvtData(pylot_event)
|
2017-04-06 13:16:28 +02:00
|
|
|
if fnames == 'None':
|
2017-08-07 11:21:20 +02:00
|
|
|
data.setWFData(glob.glob(os.path.join(datapath, eventpath, '*')))
|
2017-04-06 13:16:28 +02:00
|
|
|
# the following is necessary because within
|
|
|
|
# multiple event processing no event ID is provided
|
|
|
|
# in autopylot.in
|
|
|
|
try:
|
|
|
|
parameter.get('eventID')
|
|
|
|
except:
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
eventID = '%d%02d%02d%02d%02d' % (now.year,
|
|
|
|
now.month,
|
|
|
|
now.day,
|
|
|
|
now.hour,
|
|
|
|
now.minute)
|
|
|
|
parameter.setParam(eventID=eventID)
|
|
|
|
else:
|
|
|
|
data.setWFData(fnames)
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-08-07 11:21:20 +02:00
|
|
|
eventpath = events[0]
|
2017-08-03 09:41:54 +02:00
|
|
|
# now = datetime.datetime.now()
|
|
|
|
# evID = '%d%02d%02d%02d%02d' % (now.year,
|
2017-05-19 12:30:03 +02:00
|
|
|
# now.month,
|
|
|
|
# now.day,
|
|
|
|
# now.hour,
|
|
|
|
# now.minute)
|
|
|
|
parameter.setParam(eventID=eventid)
|
2015-10-28 09:13:13 +01:00
|
|
|
wfdat = data.getWFData() # all available streams
|
2017-05-15 17:21:22 +02:00
|
|
|
if not station == 'all':
|
|
|
|
wfdat = wfdat.select(station=station)
|
|
|
|
if not wfdat:
|
|
|
|
print('Could not find station {}. STOP!'.format(station))
|
2017-08-03 09:41:54 +02:00
|
|
|
return
|
2016-09-28 14:56:01 +02:00
|
|
|
wfdat = remove_underscores(wfdat)
|
2017-08-03 09:41:54 +02:00
|
|
|
metadata = read_metadata(parameter.get('invdir'))
|
2017-05-19 13:22:04 +02:00
|
|
|
print("Restitute data ...")
|
2017-08-03 11:49:15 +02:00
|
|
|
corr_dat = restitute_data(wfdat.copy(), *metadata, ncores=ncores)
|
2017-07-17 13:40:35 +02:00
|
|
|
if not corr_dat and locflag:
|
2017-08-03 09:41:54 +02:00
|
|
|
locflag = 2
|
2017-08-07 11:21:20 +02:00
|
|
|
print('Working on event %s. Stations: %s' % (eventpath, station))
|
2017-05-15 17:21:22 +02:00
|
|
|
print(wfdat)
|
2015-06-01 16:28:27 +02:00
|
|
|
##########################################################
|
2015-07-10 09:22:58 +02:00
|
|
|
# !automated picking starts here!
|
2017-05-18 10:09:51 +02:00
|
|
|
if input_dict:
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fig_dict' in input_dict:
|
2017-05-18 10:09:51 +02:00
|
|
|
fig_dict = input_dict['fig_dict']
|
2017-08-07 11:21:20 +02:00
|
|
|
picks = autopickevent(wfdat, parameter, iplot=iplot, fig_dict=fig_dict,
|
|
|
|
ncores=ncores, metadata=metadata, origin=data.get_evt_data().origins)
|
2017-05-18 10:09:51 +02:00
|
|
|
else:
|
2017-08-07 11:21:20 +02:00
|
|
|
picks = autopickevent(wfdat, parameter, iplot=iplot,
|
|
|
|
ncores=ncores, metadata=metadata, origin=data.get_evt_data().origins)
|
2015-10-27 15:26:25 +01:00
|
|
|
##########################################################
|
2015-10-28 09:13:13 +01:00
|
|
|
# locating
|
2017-05-19 12:30:03 +02:00
|
|
|
if locflag > 0:
|
2015-10-28 09:13:13 +01:00
|
|
|
# write phases to NLLoc-phase file
|
2017-04-06 13:16:28 +02:00
|
|
|
nll.export(picks, phasefile, parameter)
|
2015-10-28 09:13:13 +01:00
|
|
|
|
2015-11-04 16:52:39 +01:00
|
|
|
# For locating the event the NLLoc-control file has to be modified!
|
2016-09-28 11:03:53 +02:00
|
|
|
nllocout = '%s_%s' % (evID, nllocoutpatter)
|
2015-11-18 10:27:42 +01:00
|
|
|
# create comment line for NLLoc-control file
|
2016-09-28 11:03:53 +02:00
|
|
|
nll.modify_inputs(ctrf, nllocroot, nllocout, phasef,
|
|
|
|
ttpat)
|
2015-10-28 09:13:13 +01:00
|
|
|
|
|
|
|
# locate the event
|
2017-04-06 13:16:28 +02:00
|
|
|
nll.locate(ctrfile, inputfile)
|
2016-09-28 11:03:53 +02:00
|
|
|
|
2015-11-18 10:27:42 +01:00
|
|
|
# !iterative picking if traces remained unpicked or occupied with bad picks!
|
2015-11-12 17:21:04 +01:00
|
|
|
# get theoretical onset times for picks with weights >= 4
|
2015-11-20 15:49:34 +01:00
|
|
|
# in order to reprocess them using smaller time windows around theoretical onset
|
|
|
|
# get stations with bad onsets
|
|
|
|
badpicks = []
|
|
|
|
for key in picks:
|
2016-03-30 08:14:58 +02:00
|
|
|
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
|
|
|
|
badpicks.append([key, picks[key]['P']['mpp']])
|
2015-11-20 15:49:34 +01:00
|
|
|
|
2016-09-28 11:03:53 +02:00
|
|
|
# TODO keep code DRY (Don't Repeat Yourself) the following part is written twice
|
|
|
|
# suggestion: delete block and modify the later similar block to work properly
|
|
|
|
|
2015-11-20 15:49:34 +01:00
|
|
|
if len(badpicks) == 0:
|
2016-03-30 08:14:58 +02:00
|
|
|
print("autoPyLoT: No bad onsets found, thus no iterative picking necessary!")
|
2015-12-01 15:39:28 +01:00
|
|
|
# get NLLoc-location file
|
|
|
|
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
|
|
|
|
if len(glob.glob(locsearch)) > 0:
|
2016-09-28 11:03:53 +02:00
|
|
|
# get latest NLLoc-location file if several are available
|
2016-03-30 08:14:58 +02:00
|
|
|
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
2016-09-28 14:56:01 +02:00
|
|
|
evt = read_events(nllocfile)[0]
|
2017-06-23 10:10:45 +02:00
|
|
|
# calculate seismic moment Mo and moment magnitude Mw
|
2016-09-28 11:03:53 +02:00
|
|
|
moment_mag = MomentMagnitude(corr_dat, evt, parameter.get('vp'),
|
2016-09-28 14:56:01 +02:00
|
|
|
parameter.get('Qp'),
|
2017-04-06 13:16:28 +02:00
|
|
|
parameter.get('rho'), True, \
|
2017-05-08 15:38:41 +02:00
|
|
|
iplot)
|
2016-09-29 11:55:08 +02:00
|
|
|
# update pick with moment property values (w0, fc, Mo)
|
|
|
|
for station, props in moment_mag.moment_props.items():
|
|
|
|
picks[station]['P'].update(props)
|
|
|
|
evt = moment_mag.updated_event()
|
2017-06-22 15:04:16 +02:00
|
|
|
net_mw = moment_mag.net_magnitude()
|
|
|
|
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
|
|
|
# calculate local (Richter) magntiude
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling = parameter.get('WAscaling')
|
|
|
|
magscaling = parameter.get('magscaling')
|
2017-06-21 15:41:12 +02:00
|
|
|
local_mag = LocalMagnitude(corr_dat, evt,
|
2017-08-03 09:41:54 +02:00
|
|
|
parameter.get('sstop'),
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling, True, iplot)
|
2016-09-28 14:56:01 +02:00
|
|
|
for station, amplitude in local_mag.amplitudes.items():
|
2016-09-29 11:55:08 +02:00
|
|
|
picks[station]['S']['Ao'] = amplitude.generic_amplitude
|
2017-06-23 10:10:45 +02:00
|
|
|
print("Local station magnitudes scaled with:")
|
2017-08-03 09:41:54 +02:00
|
|
|
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling[1],
|
|
|
|
WAscaling[2]))
|
|
|
|
evt = local_mag.updated_event(magscaling)
|
|
|
|
net_ml = local_mag.net_magnitude(magscaling)
|
2017-06-22 15:04:16 +02:00
|
|
|
print("Network local magnitude: %4.1f" % net_ml.mag)
|
2017-06-23 10:10:45 +02:00
|
|
|
print("Network local magnitude scaled with:")
|
|
|
|
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
2015-12-01 15:39:28 +01:00
|
|
|
else:
|
|
|
|
print("autoPyLoT: No NLLoc-location file available!")
|
|
|
|
print("No source parameter estimation possible!")
|
2017-06-27 09:57:00 +02:00
|
|
|
locflag = 9
|
2015-11-20 15:49:34 +01:00
|
|
|
else:
|
|
|
|
# get theoretical P-onset times from NLLoc-location file
|
|
|
|
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
|
2015-11-23 16:38:52 +01:00
|
|
|
if len(glob.glob(locsearch)) > 0:
|
|
|
|
# get latest file if several are available
|
2016-03-30 08:14:58 +02:00
|
|
|
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
2015-11-23 16:38:52 +01:00
|
|
|
nlloccounter = 0
|
2016-03-30 08:14:58 +02:00
|
|
|
while len(badpicks) > 0 and nlloccounter <= maxnumit:
|
2015-11-23 16:38:52 +01:00
|
|
|
nlloccounter += 1
|
|
|
|
if nlloccounter > maxnumit:
|
|
|
|
print("autoPyLoT: Number of maximum iterations reached, stop iterative picking!")
|
|
|
|
break
|
|
|
|
print("autoPyLoT: Starting with iteration No. %d ..." % nlloccounter)
|
2017-05-18 10:09:51 +02:00
|
|
|
if input_dict:
|
2017-07-31 15:50:46 +02:00
|
|
|
if 'fig_dict' in input_dict:
|
2017-05-18 10:09:51 +02:00
|
|
|
fig_dict = input_dict['fig_dict']
|
2017-08-03 09:41:54 +02:00
|
|
|
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter,
|
|
|
|
fig_dict=fig_dict)
|
2017-05-18 10:09:51 +02:00
|
|
|
else:
|
|
|
|
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter)
|
2015-11-23 16:38:52 +01:00
|
|
|
# write phases to NLLoc-phase file
|
2017-04-06 13:16:28 +02:00
|
|
|
nll.export(picks, phasefile, parameter)
|
2015-12-03 14:55:07 +01:00
|
|
|
# remove actual NLLoc-location file to keep only the last
|
|
|
|
os.remove(nllocfile)
|
2015-11-23 16:38:52 +01:00
|
|
|
# locate the event
|
2017-04-06 13:16:28 +02:00
|
|
|
nll.locate(ctrfile, inputfile)
|
2015-11-23 16:38:52 +01:00
|
|
|
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
|
2015-12-01 15:39:28 +01:00
|
|
|
# get updated NLLoc-location file
|
2016-03-30 08:14:58 +02:00
|
|
|
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
|
2015-12-01 15:39:28 +01:00
|
|
|
# check for bad picks
|
2015-11-23 16:38:52 +01:00
|
|
|
badpicks = []
|
|
|
|
for key in picks:
|
2016-03-30 08:14:58 +02:00
|
|
|
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
|
|
|
|
badpicks.append([key, picks[key]['P']['mpp']])
|
2015-11-23 16:38:52 +01:00
|
|
|
print("autoPyLoT: After iteration No. %d: %d bad onsets found ..." % (nlloccounter, \
|
2016-03-30 08:14:58 +02:00
|
|
|
len(badpicks)))
|
2015-11-23 16:38:52 +01:00
|
|
|
if len(badpicks) == 0:
|
|
|
|
print("autoPyLoT: No more bad onsets found, stop iterative picking!")
|
2015-12-01 15:39:28 +01:00
|
|
|
nlloccounter = maxnumit
|
2016-09-28 14:56:01 +02:00
|
|
|
evt = read_events(nllocfile)[0]
|
2017-05-19 12:30:03 +02:00
|
|
|
if locflag < 2:
|
2017-06-23 10:10:45 +02:00
|
|
|
# calculate seismic moment Mo and moment magnitude Mw
|
2017-05-19 12:30:03 +02:00
|
|
|
moment_mag = MomentMagnitude(corr_dat, evt, parameter.get('vp'),
|
|
|
|
parameter.get('Qp'),
|
|
|
|
parameter.get('rho'), True, \
|
|
|
|
iplot)
|
|
|
|
# update pick with moment property values (w0, fc, Mo)
|
|
|
|
for station, props in moment_mag.moment_props.items():
|
|
|
|
picks[station]['P'].update(props)
|
|
|
|
evt = moment_mag.updated_event()
|
2017-06-22 15:04:16 +02:00
|
|
|
net_mw = moment_mag.net_magnitude()
|
|
|
|
print("Network moment magnitude: %4.1f" % net_mw.mag)
|
|
|
|
# calculate local (Richter) magntiude
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling = parameter.get('WAscaling')
|
|
|
|
magscaling = parameter.get('magscaling')
|
2017-06-21 15:41:12 +02:00
|
|
|
local_mag = LocalMagnitude(corr_dat, evt,
|
2017-08-03 09:41:54 +02:00
|
|
|
parameter.get('sstop'),
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling, True, iplot)
|
2017-05-19 12:30:03 +02:00
|
|
|
for station, amplitude in local_mag.amplitudes.items():
|
|
|
|
picks[station]['S']['Ao'] = amplitude.generic_amplitude
|
2017-06-23 10:10:45 +02:00
|
|
|
print("Local station magnitudes scaled with:")
|
2017-08-03 09:41:54 +02:00
|
|
|
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
|
2017-06-23 10:10:45 +02:00
|
|
|
WAscaling[1],
|
|
|
|
WAscaling[2]))
|
|
|
|
evt = local_mag.updated_event(magscaling)
|
|
|
|
net_ml = local_mag.net_magnitude(magscaling)
|
2017-06-22 15:04:16 +02:00
|
|
|
print("Network local magnitude: %4.1f" % net_ml.mag)
|
2017-06-23 10:10:45 +02:00
|
|
|
print("Network local magnitude scaled with:")
|
|
|
|
print("%f * Ml + %f" % (magscaling[0], magscaling[1]))
|
2015-11-20 15:49:34 +01:00
|
|
|
else:
|
|
|
|
print("autoPyLoT: No NLLoc-location file available! Stop iteration!")
|
2017-04-06 13:16:28 +02:00
|
|
|
locflag = 9
|
2015-10-27 15:26:25 +01:00
|
|
|
##########################################################
|
2017-04-06 13:16:28 +02:00
|
|
|
# write phase files for various location
|
|
|
|
# and fault mechanism calculation routines
|
|
|
|
# ObsPy event object
|
2016-09-28 11:03:53 +02:00
|
|
|
data.applyEVTData(picks)
|
|
|
|
if evt is not None:
|
2017-08-07 11:21:20 +02:00
|
|
|
event_id = eventpath.split('/')[-1]
|
2017-07-10 17:37:54 +02:00
|
|
|
evt.resource_id = ResourceIdentifier('smi:local/' + event_id)
|
2016-09-28 11:03:53 +02:00
|
|
|
data.applyEVTData(evt, 'event')
|
2017-08-07 11:21:20 +02:00
|
|
|
fnqml = '%s/PyLoT_%s' % (eventpath, evID)
|
2017-07-17 13:44:38 +02:00
|
|
|
data.exportEvent(fnqml, fnext='.xml', fcheck=['auto', 'magnitude', 'origin'])
|
2017-04-06 13:16:28 +02:00
|
|
|
if locflag == 1:
|
2017-06-26 16:19:57 +02:00
|
|
|
# HYPO71
|
2017-08-07 11:21:20 +02:00
|
|
|
hypo71file = '%s/PyLoT_%s_HYPO71_phases' % (eventpath, evID)
|
2017-06-26 16:19:57 +02:00
|
|
|
hypo71.export(picks, hypo71file, parameter)
|
|
|
|
# HYPOSAT
|
2017-08-07 11:21:20 +02:00
|
|
|
hyposatfile = '%s/PyLoT_%s_HYPOSAT_phases' % (eventpath, evID)
|
2017-06-26 16:19:57 +02:00
|
|
|
hyposat.export(picks, hyposatfile, parameter)
|
2017-07-13 17:27:48 +02:00
|
|
|
# VELEST
|
2017-08-07 11:21:20 +02:00
|
|
|
velestfile = '%s/PyLoT_%s_VELEST_phases.cnv' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
velest.export(picks, velestfile, parameter, evt)
|
|
|
|
# hypoDD
|
2017-08-07 11:21:20 +02:00
|
|
|
hypoddfile = '%s/PyLoT_%s_hypoDD_phases.pha' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
hypodd.export(picks, hypoddfile, parameter, evt)
|
|
|
|
# FOCMEC
|
2017-08-07 11:21:20 +02:00
|
|
|
focmecfile = '%s/PyLoT_%s_FOCMEC.in' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
focmec.export(picks, focmecfile, parameter, evt)
|
|
|
|
# HASH
|
2017-08-07 11:21:20 +02:00
|
|
|
hashfile = '%s/PyLoT_%s_HASH' % (eventpath, evID)
|
2017-07-13 17:27:48 +02:00
|
|
|
hash.export(picks, hashfile, parameter, evt)
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2015-11-12 17:21:04 +01:00
|
|
|
endsplash = '''------------------------------------------\n'
|
2016-03-30 08:14:58 +02:00
|
|
|
-----Finished event %s!-----\n'
|
2015-11-12 17:21:04 +01:00
|
|
|
------------------------------------------'''.format \
|
2016-09-28 11:03:53 +02:00
|
|
|
(version=_getVersionString()) % evID
|
2015-11-12 17:21:04 +01:00
|
|
|
print(endsplash)
|
|
|
|
if locflag == 0:
|
|
|
|
print("autoPyLoT was running in non-location mode!")
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2015-11-12 17:21:04 +01:00
|
|
|
endsp = '''####################################\n
|
|
|
|
************************************\n
|
|
|
|
*********autoPyLoT terminates*******\n
|
|
|
|
The Python picking and Location Tool\n
|
|
|
|
************************************'''.format(version=_getVersionString())
|
|
|
|
print(endsp)
|
2017-05-12 11:03:41 +02:00
|
|
|
return picks
|
2015-04-29 07:57:52 +02:00
|
|
|
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2015-04-22 12:46:24 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
# parse arguments
|
|
|
|
parser = argparse.ArgumentParser(
|
2015-07-10 09:22:58 +02:00
|
|
|
description='''autoPyLoT automatically picks phase onset times using higher order statistics,
|
2017-06-02 11:09:32 +02:00
|
|
|
autoregressive prediction and AIC followed by locating the seismic events using
|
|
|
|
NLLoc''')
|
2015-04-22 12:46:24 +02:00
|
|
|
|
|
|
|
parser.add_argument('-i', '-I', '--inputfile', type=str,
|
|
|
|
action='store',
|
|
|
|
help='''full path to the file containing the input
|
2017-08-03 09:41:54 +02:00
|
|
|
parameters for autoPyLoT''')
|
2017-08-08 16:38:29 +02:00
|
|
|
parser.add_argument('-p', '-P', '--iplot', type=int,
|
|
|
|
action='store',
|
2017-08-09 12:25:21 +02:00
|
|
|
help='''optional, logical variable for plotting: 0=none, 1=partial, 2=all''')
|
2017-04-06 13:16:28 +02:00
|
|
|
parser.add_argument('-f', '-F', '--fnames', type=str,
|
|
|
|
action='store',
|
|
|
|
help='''optional, list of data file names''')
|
2017-08-03 14:49:32 +02:00
|
|
|
parser.add_argument('-e', '--eventid', type=str,
|
2017-05-19 12:30:03 +02:00
|
|
|
action='store',
|
2017-05-19 14:25:24 +02:00
|
|
|
help='''optional, event path incl. event ID''')
|
2017-04-06 13:16:28 +02:00
|
|
|
parser.add_argument('-s', '-S', '--spath', type=str,
|
2017-04-06 15:37:54 +02:00
|
|
|
action='store',
|
2017-04-06 13:16:28 +02:00
|
|
|
help='''optional, save path for autoPyLoT output''')
|
2017-08-03 11:49:15 +02:00
|
|
|
parser.add_argument('-c', '-C', '--ncores', type=int,
|
|
|
|
action='store', default=0,
|
2017-08-09 12:14:24 +02:00
|
|
|
help='''optional, number of CPU cores used for parallel processing (default: all available(=0))''')
|
2015-04-22 12:46:24 +02:00
|
|
|
|
|
|
|
cla = parser.parse_args()
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-08-02 11:02:24 +02:00
|
|
|
picks = autoPyLoT(inputfile=str(cla.inputfile), fnames=str(cla.fnames),
|
2017-08-03 11:49:15 +02:00
|
|
|
eventid=str(cla.eventid), savepath=str(cla.spath),
|
2017-08-08 16:38:29 +02:00
|
|
|
ncores=cla.ncores, iplot=str(cla.iplot))
|