multiprocessing implemented for restitution and autopicker
This commit is contained in:
parent
ef1755a80f
commit
393289245f
@ -1054,9 +1054,9 @@ class MainWindow(QMainWindow):
|
|||||||
self.metadata = read_metadata(fninv)
|
self.metadata = read_metadata(fninv)
|
||||||
|
|
||||||
wf_copy = self.get_data().getWFData().copy()
|
wf_copy = self.get_data().getWFData().copy()
|
||||||
[corr_wf, rest_flag] = restitute_data(wf_copy, *self.metadata)
|
corr_wf = restitute_data(wf_copy, *self.metadata)
|
||||||
if not rest_flag:
|
# if not rest_flag:
|
||||||
raise ProcessingError('Restitution of waveform data failed!')
|
# raise ProcessingError('Restitution of waveform data failed!')
|
||||||
if type == 'ML':
|
if type == 'ML':
|
||||||
local_mag = RichterMagnitude(corr_wf, self.get_data().get_evt_data(), self.inputs.get('sstop'), verbosity = True)
|
local_mag = RichterMagnitude(corr_wf, self.get_data().get_evt_data(), self.inputs.get('sstop'), verbosity = True)
|
||||||
return local_mag.updated_event()
|
return local_mag.updated_event()
|
||||||
|
5
autoPyLoT.py
Normal file → Executable file
5
autoPyLoT.py
Normal file → Executable file
@ -29,6 +29,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
|||||||
__version__ = _getVersionString()
|
__version__ = _getVersionString()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def autoPyLoT(inputfile, fnames=None, savepath=None):
|
def autoPyLoT(inputfile, fnames=None, savepath=None):
|
||||||
"""
|
"""
|
||||||
Determine phase onsets automatically utilizing the automatic picking
|
Determine phase onsets automatically utilizing the automatic picking
|
||||||
@ -146,7 +147,7 @@ def autoPyLoT(inputfile, fnames=None, savepath=None):
|
|||||||
wfdat = data.getWFData() # all available streams
|
wfdat = data.getWFData() # all available streams
|
||||||
wfdat = remove_underscores(wfdat)
|
wfdat = remove_underscores(wfdat)
|
||||||
metadata = read_metadata(parameter.get('invdir'))
|
metadata = read_metadata(parameter.get('invdir'))
|
||||||
corr_dat, rest_flag = restitute_data(wfdat.copy(), *metadata)
|
corr_dat = restitute_data(wfdat.copy(), *metadata)
|
||||||
|
|
||||||
print('Working on event %s' % event)
|
print('Working on event %s' % event)
|
||||||
print(data)
|
print(data)
|
||||||
@ -319,7 +320,7 @@ if __name__ == "__main__":
|
|||||||
action='store',
|
action='store',
|
||||||
help='''optional, list of data file names''')
|
help='''optional, list of data file names''')
|
||||||
parser.add_argument('-s', '-S', '--spath', type=str,
|
parser.add_argument('-s', '-S', '--spath', type=str,
|
||||||
action=store,
|
action='store',
|
||||||
help='''optional, save path for autoPyLoT output''')
|
help='''optional, save path for autoPyLoT output''')
|
||||||
parser.add_argument('-v', '-V', '--version', action='version',
|
parser.add_argument('-v', '-V', '--version', action='version',
|
||||||
version='autoPyLoT ' + __version__,
|
version='autoPyLoT ' + __version__,
|
||||||
|
@ -1 +1 @@
|
|||||||
f5c0-dirty
|
ef17-dirty
|
||||||
|
@ -17,13 +17,14 @@ from pylot.core.pick.charfuns import CharacteristicFunction
|
|||||||
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
|
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
|
||||||
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
||||||
getSNR, fmpicker, checkPonsets, wadaticheck
|
getSNR, fmpicker, checkPonsets, wadaticheck
|
||||||
from pylot.core.util.utils import getPatternLine
|
from pylot.core.util.utils import getPatternLine, gen_Pool
|
||||||
from pylot.core.io.data import Data
|
from pylot.core.io.data import Data
|
||||||
|
|
||||||
|
|
||||||
def autopickevent(data, param):
|
def autopickevent(data, param):
|
||||||
stations = []
|
stations = []
|
||||||
all_onsets = {}
|
all_onsets = {}
|
||||||
|
input_tuples = []
|
||||||
|
|
||||||
# get some parameters for quality control from
|
# get some parameters for quality control from
|
||||||
# parameter input file (usually autoPyLoT.in).
|
# parameter input file (usually autoPyLoT.in).
|
||||||
@ -40,7 +41,17 @@ def autopickevent(data, param):
|
|||||||
|
|
||||||
for station in stations:
|
for station in stations:
|
||||||
topick = data.select(station=station)
|
topick = data.select(station=station)
|
||||||
all_onsets[station] = autopickstation(topick, param, verbose=apverbose)
|
#all_onsets[station] = autopickstation(topick, param, verbose=apverbose)
|
||||||
|
input_tuples.append((topick, param, apverbose))
|
||||||
|
|
||||||
|
pool = gen_Pool()
|
||||||
|
result = pool.map(call_autopickstation, input_tuples)
|
||||||
|
pool.close()
|
||||||
|
|
||||||
|
for pick in result:
|
||||||
|
station = pick['station']
|
||||||
|
pick.pop('station')
|
||||||
|
all_onsets[station] = pick
|
||||||
|
|
||||||
# quality control
|
# quality control
|
||||||
# median check and jackknife on P-onset times
|
# median check and jackknife on P-onset times
|
||||||
@ -49,6 +60,11 @@ def autopickevent(data, param):
|
|||||||
return wadaticheck(jk_checked_onsets, wdttolerance, iplot)
|
return wadaticheck(jk_checked_onsets, wdttolerance, iplot)
|
||||||
|
|
||||||
|
|
||||||
|
def call_autopickstation(input_tuple):
|
||||||
|
wfstream, pickparam, verbose = input_tuple
|
||||||
|
return autopickstation(wfstream, pickparam, verbose)
|
||||||
|
|
||||||
|
|
||||||
def autopickstation(wfstream, pickparam, verbose=False):
|
def autopickstation(wfstream, pickparam, verbose=False):
|
||||||
"""
|
"""
|
||||||
:param wfstream: `~obspy.core.stream.Stream` containing waveform
|
:param wfstream: `~obspy.core.stream.Stream` containing waveform
|
||||||
@ -789,7 +805,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
|||||||
spick = dict(channel=ccode, network=ncode, lpp=lpickS, epp=epickS, mpp=mpickS, spe=Serror, snr=SNRS,
|
spick = dict(channel=ccode, network=ncode, lpp=lpickS, epp=epickS, mpp=mpickS, spe=Serror, snr=SNRS,
|
||||||
snrdb=SNRSdB, weight=Sweight, fm=None, picker=picker, Ao=Ao)
|
snrdb=SNRSdB, weight=Sweight, fm=None, picker=picker, Ao=Ao)
|
||||||
# merge picks into returning dictionary
|
# merge picks into returning dictionary
|
||||||
picks = dict(P=ppick, S=spick)
|
picks = dict(P=ppick, S=spick, station=zdat[0].stats.station)
|
||||||
return picks
|
return picks
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ import numpy as np
|
|||||||
from obspy import UTCDateTime, read_inventory, read
|
from obspy import UTCDateTime, read_inventory, read
|
||||||
from obspy.io.xseed import Parser
|
from obspy.io.xseed import Parser
|
||||||
from pylot.core.util.utils import key_for_set_value, find_in_list, \
|
from pylot.core.util.utils import key_for_set_value, find_in_list, \
|
||||||
remove_underscores
|
remove_underscores, gen_Pool
|
||||||
|
|
||||||
|
|
||||||
def time_from_header(header):
|
def time_from_header(header):
|
||||||
@ -197,33 +197,18 @@ def read_metadata(path_to_inventory):
|
|||||||
return invtype, robj
|
return invtype, robj
|
||||||
|
|
||||||
|
|
||||||
def restitute_data(data, invtype, inobj, unit='VEL', force=False):
|
def restitute_trace(input_tuple):
|
||||||
"""
|
tr, invtype, inobj, unit, force = input_tuple
|
||||||
takes a data stream and a path_to_inventory and returns the corrected
|
|
||||||
waveform data stream
|
|
||||||
:param data: seismic data stream
|
|
||||||
:param invtype: type of found metadata
|
|
||||||
:param inobj: either list of metadata files or `obspy.io.xseed.Parser`
|
|
||||||
object
|
|
||||||
:param unit: unit to correct for (default: 'VEL')
|
|
||||||
:param force: force restitution for already corrected traces (default:
|
|
||||||
False)
|
|
||||||
:return: corrected data stream
|
|
||||||
"""
|
|
||||||
|
|
||||||
restflag = list()
|
remove_trace = False
|
||||||
|
|
||||||
data = remove_underscores(data)
|
|
||||||
|
|
||||||
# loop over traces
|
|
||||||
for tr in data:
|
|
||||||
seed_id = tr.get_id()
|
seed_id = tr.get_id()
|
||||||
# check, whether this trace has already been corrected
|
# check, whether this trace has already been corrected
|
||||||
if 'processing' in tr.stats.keys() \
|
if 'processing' in tr.stats.keys() \
|
||||||
and np.any(['remove' in p for p in tr.stats.processing]) \
|
and np.any(['remove' in p for p in tr.stats.processing]) \
|
||||||
and not force:
|
and not force:
|
||||||
print("Trace {0} has already been corrected!".format(seed_id))
|
print("Trace {0} has already been corrected!".format(seed_id))
|
||||||
continue
|
return tr, False
|
||||||
stime = tr.stats.starttime
|
stime = tr.stats.starttime
|
||||||
prefilt = get_prefilt(tr)
|
prefilt = get_prefilt(tr)
|
||||||
if invtype == 'resp':
|
if invtype == 'resp':
|
||||||
@ -254,10 +239,9 @@ def restitute_data(data, invtype, inobj, unit='VEL', force=False):
|
|||||||
inventory = read_inventory(finv, format='STATIONXML')
|
inventory = read_inventory(finv, format='STATIONXML')
|
||||||
elif invtype == None:
|
elif invtype == None:
|
||||||
print("No restitution possible, as there are no station-meta data available!")
|
print("No restitution possible, as there are no station-meta data available!")
|
||||||
break
|
return tr, True
|
||||||
else:
|
else:
|
||||||
data.remove(tr)
|
remove_trace = True
|
||||||
continue
|
|
||||||
# apply restitution to data
|
# apply restitution to data
|
||||||
print("Correcting instrument at station %s, channel %s" \
|
print("Correcting instrument at station %s, channel %s" \
|
||||||
% (tr.stats.station, tr.stats.channel))
|
% (tr.stats.station, tr.stats.channel))
|
||||||
@ -281,17 +265,52 @@ def restitute_data(data, invtype, inobj, unit='VEL', force=False):
|
|||||||
else:
|
else:
|
||||||
# restitution done to copies of data thus deleting traces
|
# restitution done to copies of data thus deleting traces
|
||||||
# that failed should not be a problem
|
# that failed should not be a problem
|
||||||
|
remove_trace = True
|
||||||
|
|
||||||
|
return tr, remove_trace
|
||||||
|
|
||||||
|
|
||||||
|
def restitute_data(data, invtype, inobj, unit='VEL', force=False):
|
||||||
|
"""
|
||||||
|
takes a data stream and a path_to_inventory and returns the corrected
|
||||||
|
waveform data stream
|
||||||
|
:param data: seismic data stream
|
||||||
|
:param invtype: type of found metadata
|
||||||
|
:param inobj: either list of metadata files or `obspy.io.xseed.Parser`
|
||||||
|
object
|
||||||
|
:param unit: unit to correct for (default: 'VEL')
|
||||||
|
:param force: force restitution for already corrected traces (default:
|
||||||
|
False)
|
||||||
|
:return: corrected data stream
|
||||||
|
"""
|
||||||
|
|
||||||
|
restflag = list()
|
||||||
|
|
||||||
|
data = remove_underscores(data)
|
||||||
|
|
||||||
|
# loop over traces
|
||||||
|
input_tuples = []
|
||||||
|
for tr in data:
|
||||||
|
input_tuples.append((tr, invtype, inobj, unit, force))
|
||||||
data.remove(tr)
|
data.remove(tr)
|
||||||
continue
|
|
||||||
restflag.append(True)
|
pool = gen_Pool()
|
||||||
|
result = pool.map(restitute_trace, input_tuples)
|
||||||
|
pool.close()
|
||||||
|
|
||||||
|
for tr, remove_trace in result:
|
||||||
|
if not remove_trace:
|
||||||
|
data.traces.append(tr)
|
||||||
|
|
||||||
# check if ALL traces could be restituted, take care of large datasets
|
# check if ALL traces could be restituted, take care of large datasets
|
||||||
# better try restitution for smaller subsets of data (e.g. station by
|
# better try restitution for smaller subsets of data (e.g. station by
|
||||||
# station)
|
# station)
|
||||||
if len(restflag) > 0:
|
|
||||||
restflag = bool(np.all(restflag))
|
# if len(restflag) > 0:
|
||||||
else:
|
# restflag = bool(np.all(restflag))
|
||||||
restflag = False
|
# else:
|
||||||
return data, restflag
|
# restflag = False
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
|
def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
|
||||||
|
@ -29,19 +29,16 @@ def getindexbounds(f, eta):
|
|||||||
u = find_nearest(f[mi:], b) + mi
|
u = find_nearest(f[mi:], b) + mi
|
||||||
return mi, l, u
|
return mi, l, u
|
||||||
|
|
||||||
def worker(func, input, cores='max', async=False):
|
|
||||||
|
def gen_Pool(ncores='max'):
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
|
||||||
if cores == 'max':
|
if ncores=='max':
|
||||||
cores = multiprocessing.cpu_count()
|
ncores=multiprocessing.cpu_count()
|
||||||
|
|
||||||
|
pool = multiprocessing.Pool(ncores)
|
||||||
|
return pool
|
||||||
|
|
||||||
pool = multiprocessing.Pool(cores)
|
|
||||||
if async == True:
|
|
||||||
result = pool.map_async(func, input)
|
|
||||||
else:
|
|
||||||
result = pool.map(func, input)
|
|
||||||
pool.close()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def clims(lim1, lim2):
|
def clims(lim1, lim2):
|
||||||
"""
|
"""
|
||||||
|
Loading…
Reference in New Issue
Block a user