From cec6cc24c51841bc36c14697248821121a529433 Mon Sep 17 00:00:00 2001 From: Marcel Date: Thu, 10 Apr 2025 13:58:01 +0200 Subject: [PATCH] [initial commit] --- autopylot.sh | 8 +- pylot/tomography/__init__.py | 0 pylot/tomography/fmtomo.py | 23 + pylot/tomography/fmtomo_tools/__init__.py | 2 + .../fmtomo_tools/compare_arrivals_hybrid.py | 177 ++ .../fmtomo_tools/create_tradeoff_runs.py | 39 + .../tomography/fmtomo_tools/dist_first_src.py | 29 + .../tomography/fmtomo_tools/event_thinning.py | 185 ++ .../filter_sources_by_dist_in_fmtomo.py | 28 + .../fmtomo_tools/fmtomo_grid_utils.py | 2019 +++++++++++++++++ .../fmtomo_tools/fmtomo_teleseismic.py | 52 + .../fmtomo_tools/fmtomo_teleseismic_utils.py | 930 ++++++++ .../fmtomo_tools/get_tt_residuals.py | 104 + .../fmtomo_tools/gmtslice_sidehook.py | 174 ++ .../fmtomo_tools/heatmap_two_models.py | 43 + .../fmtomo_tools/misfit_evaluation.py | 126 + .../fmtomo_tools/model_slice_plomerova.py | 69 + .../tomography/fmtomo_tools/modify_otimes.py | 294 +++ pylot/tomography/fmtomo_tools/modify_vgrid.py | 154 ++ pylot/tomography/fmtomo_tools/plot_obsdata.py | 26 + .../fmtomo_tools/plot_residuals_map.py | 341 +++ .../fmtomo_tools/quantify_ray_crossing.py | 270 +++ .../fmtomo_tools/residual_histograms.py | 43 + .../fmtomo_tools/station_density_kde.py | 187 ++ .../fmtomo_tools/submit_fmtomo_run.py | 21 + .../fmtomo_tools/tradeoff_misfit_norm.py | 266 +++ .../visualize_frechet_on_vgrid.py | 63 + pylot/tomography/fmtomo_utils.py | 1323 +++++++++++ pylot/tomography/map_utils.py | 160 ++ pylot/tomography/utils.py | 176 ++ 30 files changed, 7330 insertions(+), 2 deletions(-) create mode 100644 pylot/tomography/__init__.py create mode 100644 pylot/tomography/fmtomo.py create mode 100644 pylot/tomography/fmtomo_tools/__init__.py create mode 100755 pylot/tomography/fmtomo_tools/compare_arrivals_hybrid.py create mode 100644 pylot/tomography/fmtomo_tools/create_tradeoff_runs.py create mode 100755 pylot/tomography/fmtomo_tools/dist_first_src.py create mode 100644 pylot/tomography/fmtomo_tools/event_thinning.py create mode 100644 pylot/tomography/fmtomo_tools/filter_sources_by_dist_in_fmtomo.py create mode 100644 pylot/tomography/fmtomo_tools/fmtomo_grid_utils.py create mode 100644 pylot/tomography/fmtomo_tools/fmtomo_teleseismic.py create mode 100644 pylot/tomography/fmtomo_tools/fmtomo_teleseismic_utils.py create mode 100644 pylot/tomography/fmtomo_tools/get_tt_residuals.py create mode 100644 pylot/tomography/fmtomo_tools/gmtslice_sidehook.py create mode 100644 pylot/tomography/fmtomo_tools/heatmap_two_models.py create mode 100644 pylot/tomography/fmtomo_tools/misfit_evaluation.py create mode 100644 pylot/tomography/fmtomo_tools/model_slice_plomerova.py create mode 100644 pylot/tomography/fmtomo_tools/modify_otimes.py create mode 100644 pylot/tomography/fmtomo_tools/modify_vgrid.py create mode 100755 pylot/tomography/fmtomo_tools/plot_obsdata.py create mode 100755 pylot/tomography/fmtomo_tools/plot_residuals_map.py create mode 100644 pylot/tomography/fmtomo_tools/quantify_ray_crossing.py create mode 100644 pylot/tomography/fmtomo_tools/residual_histograms.py create mode 100644 pylot/tomography/fmtomo_tools/station_density_kde.py create mode 100644 pylot/tomography/fmtomo_tools/submit_fmtomo_run.py create mode 100644 pylot/tomography/fmtomo_tools/tradeoff_misfit_norm.py create mode 100755 pylot/tomography/fmtomo_tools/visualize_frechet_on_vgrid.py create mode 100644 pylot/tomography/fmtomo_utils.py create mode 100644 pylot/tomography/map_utils.py create mode 100644 pylot/tomography/utils.py diff --git a/autopylot.sh b/autopylot.sh index 35140d47..85d2747c 100644 --- a/autopylot.sh +++ b/autopylot.sh @@ -5,8 +5,12 @@ #$ -pe smp 40 ##$ -l mem=3G #$ -l h_vmem=6G -#$ -l os=*stretch +##$ -l os=*stretch +#$ -q low.q@minos11,low.q@minos12,low.q@minos13,low.q@minos14,low.q@minos15 conda activate pylot_311 -python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray.in -c 20 -dmt processed +#python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m5.0-5.4.in -c 20 -dmt processed +#python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m5.4-5.7.in -c 20 -dmt processed +#python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m5.7-6.0.in -c 20 -dmt processed +python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m6.0-10.0.in -c 20 -dmt processed diff --git a/pylot/tomography/__init__.py b/pylot/tomography/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pylot/tomography/fmtomo.py b/pylot/tomography/fmtomo.py new file mode 100644 index 00000000..a5d43a3f --- /dev/null +++ b/pylot/tomography/fmtomo.py @@ -0,0 +1,23 @@ +from pylot.tomography.fmtomo_utils import Tomo3d +import os + +citer = 0 +niter = 12 +n_proc = 4 # only four processes for minimal example with four sources + +# for some reason this did not work as expected and was commented out +#if os.path.isfile('inviter.in'): +# with open('inviter.in', 'r') as infile: +# citer = int(infile.read()) +# print ('Continue on iteration step ', citer) + +tomo = Tomo3d(os.getcwd(), os.getcwd(), overwrite=True, buildObs=False, saveRays=[6, 12], citer=citer) + +try: + tomo.runTOMO3D(n_proc, niter) +except KeyboardInterrupt: + print('runTTOMO3D interrupted by user or machine. Cleaning up.') +except Exception as e: + print(f'Catching unknown Exception in runTOMO3D: {e}. Trying to clean up...') +finally: + tomo.removeDirectories() diff --git a/pylot/tomography/fmtomo_tools/__init__.py b/pylot/tomography/fmtomo_tools/__init__.py new file mode 100644 index 00000000..ec51c5a2 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +# diff --git a/pylot/tomography/fmtomo_tools/compare_arrivals_hybrid.py b/pylot/tomography/fmtomo_tools/compare_arrivals_hybrid.py new file mode 100755 index 00000000..d9ccdd31 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/compare_arrivals_hybrid.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +# Small script to compare arrival times of hybrid tau-p/fmm with simple tau-p for standard earth model + +import os +import argparse + +import numpy as np +import matplotlib.pyplot as plt + +from obspy.taup import TauPyModel + +from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_receivers, organize_sources, organize_event_names + + +def read_file(fnin): + infile = open(fnin, 'r') + return infile.readlines() + + +def compare_arrivals(arrivals_file, receivers_file, sources_file, input_source_file, model='ak135_diehl_v2', exclude_phases=[]): + ''' + Reads FMTOMO arrivals.dat file together with corresponding receiver, source and input_source files to match + each arrival to a source and receiver combination, calculate tau-p arrivals for a given earth model and + calculate differences + ''' + arrivals_taup = {} + arrivals_tomo = {} + receiver_ids = {} + event_names = {} + events = organize_event_names(input_source_file) + model = TauPyModel(model) + + # organize sources and receivers in dictionaries containing their ID used by FMTOMO as dictionary key + receivers_dict = organize_receivers(receivers_file) + sources_dict = organize_sources(sources_file) + + # read arrivals file + with open(arrivals_file, 'r') as infile_arrivals: + arrivals = infile_arrivals.readlines() + + count = 0 + for src_number, source in sources_dict.items(): + if source['phase'] in exclude_phases: + continue + src_name = events[src_number - 1] + if not src_name in event_names.keys(): + arrivals_taup[src_name] = [] + arrivals_tomo[src_name] = [] + receiver_ids[src_name] = [] + event_names[src_name] = count + count += 1 + + for line in arrivals: + # read line by line from fmtomo_tools output file arrivals.dat + rec_id, src_id, ray_id, refl, arrival_time, diff, head = line.split() + arrival_time = float(arrival_time) + rec_id = int(rec_id) + src_id = int(src_id) + ray_id = int(ray_id) + + # identify receiver and source using dictionary + receiver = receivers_dict[rec_id] + source = sources_dict[src_id] + src_name = events[src_id - 1] + phase = source['phase'] + if phase in exclude_phases: continue + taup_arrival = model.get_travel_times_geo(source_depth_in_km=source['depth'], + source_latitude_in_deg=source['lat'], + source_longitude_in_deg=source['lon'], + receiver_latitude_in_deg=receiver['lat'], + receiver_longitude_in_deg=receiver['lon'], + phase_list=[phase]) + receiver_depth_in_km = 6371. - receiver['rad'] + if len(taup_arrival) == 1: + taup_arrival_time = taup_arrival[0].time + else: + taup_arrival_time = np.nan + arrivals_taup[src_name].append(taup_arrival_time) + arrivals_tomo[src_name].append(arrival_time) + receiver_ids[src_name].append(rec_id) + + #plt.plot([min(arrivals_taup),max(arrivals_taup)],[min(arrivals_taup), max(arrivals_taup)], 'k-') + sorted_by_first_arrival = sorted([(src_name, min(arrivals)) for src_name, arrivals in arrivals_taup.items()], + key=lambda x: x[1]) + + # print some output for analysis + for item in sorted_by_first_arrival: + print(item) + #[print(source) for source in sources_dict.items()] + #[print(item) for item in enumerate(events)] + + current_fmtomo_folder_name = os.path.split(os.path.abspath(arrivals_file))[-2] + fname_savefig = '{}'.format(current_fmtomo_folder_name) + if exclude_phases: + fname_savefig += '_e' + for phase in exclude_phases: + fname_savefig += '_{}'.format(phase) + + plot_differences(arrivals_taup, arrivals_tomo, sorted_by_first_arrival, fname_savefig) + #for event_name in ['20160124_103037.a_P.ttf', '20160729_211833.a_Pdiff.ttf', '20160729_211833.a_P.ttf']: + # plot_event(arrivals_tomo, arrivals_taup, receiver_ids, receivers_dict, src_name=event_name) + + +def plot_differences(arrivals_taup, arrivals_tomo, sorted_by_first_arrival, fname_savefig): + fig = plt.figure(figsize=(16,9)) + ax = fig.add_subplot(111) + + # init plot for tt differences + cmap = plt.get_cmap('jet') + colors = cmap(np.linspace(0, 1, len(sorted_by_first_arrival))) + + for index, item in enumerate(sorted_by_first_arrival): + src_name = item[0] + ax.scatter(arrivals_taup[src_name], np.array(arrivals_tomo[src_name]) - np.array(arrivals_taup[src_name]), + c=colors[index], s=25, marker='.', label=src_name, edgecolors='none') + + # shrink box for legend + box = ax.get_position() + ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) + + ax.legend(bbox_to_anchor=[1, 1], loc='upper left') + + plt.title(fname_savefig) + ax.set_xlabel('Absolute time $t_{tau-p}$') + ax.set_ylabel('Time difference $t_{hybrid} - t_{tau-p}$') + + print('Saving plot to {}.png'.format(fname_savefig)) + fig.savefig(fname_savefig + '.png', dpi=300) + #plt.show() + + +def plot_event(arrivals_tomo, arrivals_taup, receiver_ids_dict, receivers_dict, src_name): + arrivals_diff = np.array(arrivals_tomo[src_name]) - np.array(arrivals_taup[src_name]) + receiver_ids = receiver_ids_dict[src_name] + x = np.array([receivers_dict[rec_id]['lon'] for rec_id in receiver_ids]) + y = np.array([receivers_dict[rec_id]['lat'] for rec_id in receiver_ids]) + sc = plt.scatter(x, y, c=arrivals_diff, edgecolor='none') + plt.xlabel('Longitude [deg]') + plt.ylabel('Latitude [deg]') + cbar = plt.colorbar(sc) + cbar.ax.set_ylabel('traveltime difference ($t_{hybrid} - t_{tau-p}$)') + plt.title('{}'.format(src_name)) + plt.show() + +# folders=[ +# 'alparray_0_receiver_elev_zero', +# 'alparray_0_receiver_elev_zero_finer_pgrid_vgrid_r', +# 'alparray_0_receiver_elev_zero_finer_vgrid_llr', +# 'alparray_0_receiver_elev_zero_smaller_box', +# 'alparray_0_receiver_elev_zero_shallow_box', +# 'alparray_0_receiver_elev_zero_finer_pgrid_llr', +# 'alparray_0_receiver_elev_zero_finer_interface', +# #'alparray_0_receiver_elev_zero_bigger_box', +# ] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Compare arrivals with TauP-times') + parser.add_argument('fmtomodir', help='path containing fm3d output') + parser.add_argument('-e', dest='exclude', default=[], help='exclude phases, comma separated, no spaces') + args = parser.parse_args() + + fdir = args.fmtomodir + + arrivals_file = os.path.join(fdir, 'arrivals.dat') + receivers_file = os.path.join(fdir, 'receivers.in') + sources_file = os.path.join(fdir, 'sources.in') + input_source_file = os.path.join(fdir, 'input_source_file_P.in') + + exclude_phases = args.exclude + if exclude_phases: + exclude_phases = exclude_phases.split(',') + compare_arrivals(arrivals_file, receivers_file, sources_file, input_source_file=input_source_file, exclude_phases=exclude_phases) + + diff --git a/pylot/tomography/fmtomo_tools/create_tradeoff_runs.py b/pylot/tomography/fmtomo_tools/create_tradeoff_runs.py new file mode 100644 index 00000000..f57b2e23 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/create_tradeoff_runs.py @@ -0,0 +1,39 @@ +import os + +cwdir = '/data/AlpArray_Data/fmtomo/v5/tradeoff_curves' +parent_dir_name = 'crust_included_grad_smooth_FIXED_dts'#_grad_1.5' + +dampings = [3., 10., 30.]#, 30.] +smoothings = [5.6] + +def main(submit_run=True): + fdir_parent = os.path.join(cwdir, parent_dir_name) + for damp in dampings: + for smooth in smoothings: + fdir_out = fdir_parent + '_sm{}_damp{}'.format(smooth, damp) + if not os.path.isdir(fdir_out): + os.mkdir(fdir_out) + os.system('cp -P {}/* {}'.format(fdir_parent, fdir_out)) + invertfile = os.path.join(fdir_out, 'invert3d.in') + modify_invert_in(invertfile, damp, smooth) + if submit_run: + os.chdir(fdir_out) + os.system('qsub submit_fmtomo.sh') + +def modify_invert_in(fnin, damp, smooth): + with open(fnin, 'r') as infile: + lines = infile.readlines() + + with open(fnin, 'w') as outfile: + for line in lines: + if not line.startswith('c'): + value, comment = line.split('c:') + if 'Global damping' in comment: + line = line.replace(value.strip(), str(damp) + ' ') + elif 'Global smoothing' in comment: + line = line.replace(value.strip(), str(smooth) + ' ') + outfile.write(line) + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/dist_first_src.py b/pylot/tomography/fmtomo_tools/dist_first_src.py new file mode 100755 index 00000000..303d5238 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/dist_first_src.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import argparse + +from obspy.geodetics.base import gps2dist_azimuth + + +def main(infile): + per = 4e4 # earth perimeter + + fid = open(infile, 'r') + nrec = fid.readline() + latsrc, lonsrc, depsrc = [float(value) for value in fid.readline().split()] + phase = fid.readline() + latrec, lonrec, deprec = [float(value) for value in fid.readline().split()[:3]] + + print ('Lat/Lon Source: {} / {}'.format(latsrc, lonsrc)) + print ('Lat/Lon Receiver: {} / {}'.format(latrec, lonrec)) + + dist_deg = gps2dist_azimuth(latsrc, lonsrc, latrec, lonrec)[0] / 1e3 / per * 360 + print ('Distance: {} [deg]'.format(dist_deg)) + +if __name__ == "__main__": + parser = argparse.ArgumentParser('Estimate distance from source to first' + ' receiver in WGS84 ellipsoid in FMTOMO pick file.') + parser.add_argument('infile', help='FMTOMO pickfile (*.ttf)') + args = parser.parse_args() + main(args.infile) diff --git a/pylot/tomography/fmtomo_tools/event_thinning.py b/pylot/tomography/fmtomo_tools/event_thinning.py new file mode 100644 index 00000000..890045f0 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/event_thinning.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import glob, os, shutil +import numpy as np +import matplotlib.pyplot as plt +from matplotlib.patches import Rectangle +from matplotlib.collections import PatchCollection + +from obspy.geodetics import gps2dist_azimuth + +#pwd = '/rscratch/minos13/marcel/fmtomo_alparray/v3.5/alparray_events_thinned/picks' +pwd = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10_EASI_test_Plomerova_NS_events/picks' +os.chdir(pwd) +infiles = glob.glob('*.ttf') + +clat=46. +clon=11. + +ddist = 5 +dazim = 5 + + +def make_grid(ddist, dazim): + distgrid = np.arange(35, 135, ddist) + bazimgrid = np.arange(0, 360, dazim) + grid = [] + + for bazim in np.deg2rad(bazimgrid): + for dist in distgrid: + grid.append(np.array([bazim, dist])) + return np.array(grid) + + +def make_axes(): + fig = plt.figure(figsize=(16,9)) + ax1 = fig.add_subplot(121, projection='polar') + ax2 = fig.add_subplot(122) + ax1.set_theta_direction(-1) + ax1.set_theta_zero_location('N') + return ax1, ax2 + + +def load_events(): + events = {} + for infile in infiles: + with open(infile, 'r') as fid: + eventid = infile.split('.ttf')[0] + npicks = int(fid.readline()) + lat, lon, depth = [float(item) for item in fid.readline().split()] + dist, bazim, azim = gps2dist_azimuth(clat, clon, lat, lon, a=6.371e6, f=0) + bazim = np.deg2rad(bazim) + dist = dist / (np.pi * 6371) * 180 / 1e3 + events[eventid] = dict(dist=dist, bazim=bazim, npicks=npicks) + return events + + +def get_events_in_grid(): + events_in_grid = [] + for index, gcorner in enumerate(grid): + bazim_l, dist_l = gcorner + bazim_u = bazim_l + np.deg2rad(dazim) + dist_u = dist_l + ddist + events_in_grid.append(dict(bazims=(bazim_l, bazim_u), dists=(dist_l, dist_u), events=[])) + for eventid, event in events.items(): + if (dist_l <= event['dist'] < dist_u) and (bazim_l <= event['bazim'] <= bazim_u): + events_in_grid[index]['events'].append(eventid) + return events_in_grid + + +def filter_events(): + filtered_events = {} + for eventdict in events_in_grid: + cur_events = eventdict['events'] + if not cur_events: continue + eventid = get_best_event(cur_events) + filtered_events[eventid] = events[eventid] + return filtered_events + + +def get_best_event(cur_events): + ''' return eventid with highest number of picks''' + select_events = {key: events[key] for key in cur_events} + npicks = {key: value['npicks'] for key, value in select_events.items()} + eventid = max(npicks, key=npicks.get) + return eventid + + +def plot_distribution(events_dict): + cmap_bnd = plt.get_cmap('Greys_r') + cmap_center = plt.get_cmap('viridis') + nevents = [len(grid_dict['events']) for grid_dict in events_dict] + npicks = [] + for ev_dict in events_dict: + npick = 0 + for eventid in ev_dict['events']: + npick += events[eventid]['npicks'] + npicks.append(npick) + + npicks = np.array(npicks) + + ev_max = max(nevents) + np_max = max(npicks) + + print('N picks total:', np.sum(npicks)) + print('N picks max: ', np_max) + print('N events max: ', ev_max) + + ax_polar, ax_hist = make_axes() + patches = [] + for npick, ev_dict in zip(npicks, events_dict): + bazim_l, bazim_u = ev_dict.get('bazims') + dist_l, dist_u = ev_dict.get('dists') + n_ev = len(ev_dict.get('events')) + color_edge = cmap_bnd(n_ev / ev_max) + color_center = cmap_center(npick / np_max) + # color = cmap(np.random.rand()) + rect = Rectangle((bazim_l, dist_l), np.deg2rad(dazim), ddist, edgecolor=color_edge)#, facecolor=color_center) + patches.append(rect) + + collection = PatchCollection(patches, cmap=cmap_center) + collection.set_array(npicks) + ax_polar.add_collection(collection) + ax_polar.set_ylim((10, 135)) + cbar = plt.colorbar(collection) + cbar.set_label('N picks') + + # ax.scatter(grid[:, 0] + 0.5 * dazim, grid[:, 1] + 0.5 * ddist, c=nevents, s=50) + bazims = [] + dists = [] + for event in events.values(): + bazims.append(event.get('bazim')) + dists.append(event.get('dist')) + + ax_polar.scatter(bazims, dists, c='k', zorder=3, s=5, alpha=0.5) + ax_hist.hist(np.rad2deg(bazims), bins=np.arange(0, 360, dazim)) + ax_hist.set_xlabel('Backazimuth (deg)') + ax_hist.set_ylabel('Number of events') + plt.title('Polar event distribution and histogram of backazimuths') + + +def export_filtered_events(fdir_save='picks_save'): + if not os.path.isdir(fdir_save): + os.mkdir(fdir_save) + for infile in infiles: + eventid = infile.split('.ttf')[0] + if not eventid in events: + for fname in glob.glob('{}.*'.format(eventid)): + shutil.move(fname, fdir_save) + print('Moved file {} to path {}'.format(fname, fdir_save)) + + +def write_input_source_file(fname='input_source_file_P_new.in'): + with open(fname, 'w') as outfile: + outfile.write('{}\n'.format(len(events))) + for eventid in sorted(list(events.keys())): + outfile.write('1 1 {}.ttf\n'.format(eventid)) + + +def filter_bazim(events, bazims_list): + events_filtered = {} + for eventid, event_dict in events.items(): + for baz_min, baz_max in bazims_list: + if baz_min <= event_dict['bazim'] * 180. <= baz_max: + events_filtered[eventid] = event_dict + + return events_filtered + + +filter_bazims = [(330, 360), (0, 30), (150, 210)] + +events = load_events() +#plot_distribution(events) + +events = filter_bazim(events, bazims_list=filter_bazims) +print() +#grid = make_grid(ddist, dazim) +#events_in_grid = get_events_in_grid() +#plot_distribution() +#events = filter_events() +#events_in_grid = get_events_in_grid() +#plot_distribution(events) +#plt.show() +export_filtered_events() +write_input_source_file() diff --git a/pylot/tomography/fmtomo_tools/filter_sources_by_dist_in_fmtomo.py b/pylot/tomography/fmtomo_tools/filter_sources_by_dist_in_fmtomo.py new file mode 100644 index 00000000..c544f1d5 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/filter_sources_by_dist_in_fmtomo.py @@ -0,0 +1,28 @@ +import os + +import numpy as np +from obspy.geodetics import gps2dist_azimuth + +from fmtomo_tools.fmtomo_teleseismic_utils import organize_sources + +fmtomodir = '/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/no_crust_correction' +clon = 17.5 +clat = 42.25 + + +os.chdir(fmtomodir) + +sources = organize_sources('sources.in') + +dists = [] +lats = [] +lons = [] + +for source_id, source_dict in sources.items(): + slat = source_dict['lat'] + slon = source_dict['lon'] + lats.append(slat) + lons.append(slon) + dist_m = gps2dist_azimuth(slat, slon, clat, clon, a=6.371e6, f=0)[0] + dist = dist_m / (np.pi * 6371) * 180 / 1e3 + dists.append(dist) \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/fmtomo_grid_utils.py b/pylot/tomography/fmtomo_tools/fmtomo_grid_utils.py new file mode 100644 index 00000000..27f6c7c6 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/fmtomo_grid_utils.py @@ -0,0 +1,2019 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import os +from os.path import join as pjoin +import glob +import numpy as np +import warnings +import multiprocessing +from datetime import datetime + +import json + +import matplotlib.pyplot as plt + +from functools import reduce + +from scipy.interpolate import griddata, RegularGridInterpolator, interp1d +from scipy import signal + +from obspy.taup.tau_model import TauModel + +try: + from pyvtk.hl import gridToVTK +except Exception as e: + warnings.warn('Could not load pyevtk: {}'.format(e)) + +from pylot.tomography.utils import pol2cart, cart2pol +from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import Propgrid + + +def scale_deg(input_in_degree): + scale_factor_deg_km = 2 * np.pi * 6371. / 360. + return input_in_degree * scale_factor_deg_km + +def propgrid2vtk(infile, outfile, R=6371., arrtimes=False): + ''' + read and write a vtk file from FMTOMO propgrid.in file + ''' + fid_in = open(infile, 'r') + nR, nTheta, nPhi = [int(item) for item in fid_in.readline().split()] + deltaR, deltaTheta_deg, deltaPhi_deg = [float(item) for item in fid_in.readline().split()] + rtop, thetaS_deg, phiW_deg = [float(item) for item in fid_in.readline().split()] + + #deltaTheta = scale_deg(float(deltaTheta_deg)) + #deltaPhi = scale_deg(float(deltaPhi_deg)) + #thetaS = scale_deg(float(thetaS_deg)) + #phiW = scale_deg(float(phiW_deg)) + + nPoints = nR * nTheta * nPhi + + out_id = open(outfile, 'w') + out_id.write('# vtk DataFile Version 4.0\n') + out_id.write('Propgrid\n') + out_id.write('ASCII\n') + #out_id.write('DATASET STRUCTURED_POINTS\n') + #out_id.write('DIMENSIONS {nx:>15}{ny:>15}{nz:>15}\n'.format(nx=nTheta, ny=nPhi, nz=nR)) + #out_id.write('ORIGIN {x:>15}{y:>15}{z:>15}\n'.format(x=thetaS_deg, y=phiW_deg, z=rtop + R)) + #out_id.write('SPACING {dx:>15}{dy:>15}{dz:>15}\n'.format(dx=deltaTheta_deg, dy=deltaPhi_deg, dz=-deltaR)) + + out_id.write('DATASET STRUCTURED_GRID\n') + out_id.write('DIMENSIONS %15d %15d %15d\n' % (nR, nTheta, nPhi)) + out_id.write('POINTS %15d float\n' % (nPoints)) + + lats = np.arange(thetaS_deg, stop=thetaS_deg + (nTheta) * deltaTheta_deg, step=deltaTheta_deg) + lons = np.arange(phiW_deg, stop=phiW_deg + (nPhi) * deltaPhi_deg, step=deltaPhi_deg) + rads = np.arange(rtop, stop=rtop - (nR) * deltaR, step=-deltaR) + + for lon in lons: + for lat in lats: + for r in rads: + x, y, z = pol2cart(lat, lon, r) + out_id.write('{x} {y} {z}\n'.format(x=x, y=y, z=z)) + + fid_in.readline() + fid_in.readline() + + if arrtimes: + + # write header with number of data points + out_id.write('\nPOINT_DATA {}\n'.format(nPoints)) + + # write header for data + out_id.write('SCALARS {} float 1\n'.format('arrival_times')) + out_id.write('LOOKUP_TABLE default\n') + + for _ in range(nPoints): + out_id.write(fid_in.readline()) + # + # + # nPoints = nR * nTheta * nPhi + # + # out_id.write('VERTICES %15d %15d\n' % (nPoints, 2 * nPoints)) + # + # # write indices + # print("Writing indices to VTK file...") + # for index in range(nPoints): + # out_id.write('%10d %10d\n' % (1, index)) + + fid_in.close() + out_id.close() + print('Wrote file {}'.format(outfile)) + +def read_interfaces_file(infile): + fid_in = open(infile, 'r') + nInterfaces = int(fid_in.readline()) + nTheta, nPhi = [int(item) for item in fid_in.readline().split()] # INCLUDING CUSHION NODES (+2 each) + deltaTheta_rad, deltaPhi_rad = fid_in.readline().split() + thetaS_rad, phiW_rad = fid_in.readline().split() # INCLUDING CUSHION NODES (-delta each) + + deltaTheta = scale_deg(np.rad2deg(float(deltaTheta_rad))) + deltaPhi = scale_deg(np.rad2deg(float(deltaPhi_rad))) + thetaS = scale_deg(np.rad2deg(float(thetaS_rad))) + phiW = scale_deg(np.rad2deg(float(phiW_rad))) + + interfaces = [] + for interface_index in range(nInterfaces): + glob_index = 0 + interface = {'thetaGrid': np.empty(nTheta * nPhi), + 'phiGrid': np.empty(nTheta * nPhi), + 'points': np.empty(nTheta * nPhi)} + for theta_index in range(nTheta): + theta = thetaS + theta_index * deltaTheta + for phi_index in range(nPhi): + phi = phiW + phi_index * deltaPhi + line = fid_in.readline().split() + while len(line) == 0: + # skip blank lines + line = fid_in.readline().split() + interface['thetaGrid'][glob_index] = theta + interface['phiGrid'][glob_index] = phi + interface['points'][glob_index] = float(line[0]) + glob_index += 1 + + interfaces.append(interface) + return interfaces + +def interface2VTK(interface, filename='interface.vtk'): + ''' + Generates a vtk file from all points of an interface + ''' + outfile = open(filename, 'w') + + thetaGrid = interface['thetaGrid'] + phiGrid = interface['phiGrid'] + points = interface['points'] + ziped_grid = zip(thetaGrid, phiGrid, points) + + nPoints = len(ziped_grid) + + # write header + print("Writing header for VTK file...") + outfile.write('# vtk DataFile Version 3.1\n') + outfile.write('Surface Points\n') + outfile.write('ASCII\n') + outfile.write('DATASET POLYDATA\n') + outfile.write('POINTS %15d float\n' % (nPoints)) + + # write coordinates + print("Writing coordinates to VTK file...") + + for theta, phi, point in ziped_grid: + outfile.write('%10f %10f %10f \n' % (theta, phi, point)) + + outfile.write('VERTICES %15d %15d\n' % (nPoints, 2 * nPoints)) + + # write indices + print("Writing indices to VTK file...") + for index in range(nPoints): + outfile.write('%10d %10d\n' % (1, index)) + + # outfile.write('POINT_DATA %15d\n' %(nPoints)) + # outfile.write('SCALARS traceIDs int %d\n' %(1)) + # outfile.write('LOOKUP_TABLE default\n') + + # # write traceIDs + # print("Writing traceIDs to VTK file...") + # for traceID in traceIDs: + # outfile.write('%10d\n' %traceID) + + outfile.close() + print("Wrote %d points to file: %s" % (nPoints, filename)) + return + +def interfaces2vtk(interfaces_file, outpath='.'): + interfaces = read_interfaces_file(interfaces_file) + for index, interface in enumerate(interfaces): + interface2VTK(interface, os.path.join(outpath, 'interface_{}.vtk'.format(index + 1))) + + +def grid_differences(fname_in, fname_out, model, wdir='', percentage=True): + # read external grid file + #ext_grid = read_diehl_model(external_grid_file) + #save_json_file(ext_grid, os.path.join(wdir, 'diehl_2009_grid.json')) + + # load/save external grid to json file (speeds up operation after saving to json once) + ext_grid = load_json_file(os.path.join(wdir, fname_in)) + diff_grid = calculate_differences_grid_json_file(ext_grid, model, percentage=percentage) + save_json_file(diff_grid, os.path.join(wdir, fname_out)) + outfile_vtk = fname_out.split('.json')[0] + outfile_vtk += '.vtk' + write_vtk(diff_grid, os.path.join(wdir, outfile_vtk), write_data=['vps']) + + +def interpolate_on_vgrid(vgrid_file, external_grid_file, fname_out, fname_out_vtk=None, ncores=None, + default_cov=1., vel_perturbation_abs=False, vel_perturbation_perc=False): + if not ncores: + ncores = multiprocessing.cpu_count() + + if vel_perturbation_abs and vel_perturbation_perc: + raise IOError('Cannot be both abs and percentage perturbation.') + + ext_grid = load_json_file(external_grid_file) + + # re-sort grid contents to fit on vgrids.in + a = np.array([ext_grid['lons'], ext_grid['lats'], ext_grid['depths']]) + ind = np.lexsort((a[0, :], a[1, :], a[2, ::-1])) + for key, value in ext_grid.items(): + if value: + ext_grid[key] = np.array(value)[ind] + + + # write vtk file for external grid + #write_vtk(ext_grid, fname=os.path.join(wdir, 'crustal_model.vtk')) + + # read FMTOMO velocity/inversion grid file + vgrid, npts, delta, start = read_vgrid(vgrid_file) + + minDepth = min(ext_grid['depths']) + maxDepth = max(ext_grid['depths']) + minLat = min(ext_grid['lats']) + maxLat = max(ext_grid['lats']) + minLon = min(ext_grid['lons']) + maxLon = max(ext_grid['lons']) + + depths = np.array(vgrid['depths']) + lats = np.array(vgrid['lats']) + lons = np.array(vgrid['lons']) + + # get indices of grid within min/max depths/lats/lons + indices_depth = np.where(np.logical_and(depths >= minDepth, depths <= maxDepth)) + indices_lat = np.where(np.logical_and(lats >= minLat, lats <= maxLat)) + indices_lon = np.where(np.logical_and(lons >= minLon, lons <= maxLon)) + + # get those indices common to all 3 indices arrays + indices = reduce(np.intersect1d, (indices_depth, indices_lat, indices_lon)) + + #x_vgrid = x[indices] + #y_vgrid = y[indices] + #z_vgrid = z[indices] + + input_list = prepare_multiprocessing_input(vgrid, ext_grid, indices, ncores) + + print('Generating multiprocessing pool with {} cores'.format(ncores)) + pool = multiprocessing.Pool(ncores) + # interpolate on grid points + interp_results = pool.map(griddata_worker, input_list) + pool.close() + print('Finished interpolation.') + + # calculate new vps_ext from differences + #for index in indices: + # grid['vps'][index] += vps_new[index] + + vgrid['vps'] = np.array(vgrid['vps']) + vgrid['covs'] = list(np.ones(len(vgrid['vps'])) * default_cov) + for indices, new_vel in interp_results: + if vel_perturbation_abs: + vgrid['vps'][indices] += new_vel + elif vel_perturbation_perc: + vgrid['vps'][indices] *= 1 + new_vel/100. + else: + vgrid['vps'][indices] = new_vel + vgrid['vps'] = list(vgrid['vps']) + + write_vgrid(vgrid, npts, delta, start, fname=fname_out) + if fname_out_vtk: + write_vtk(vgrid, fname_out_vtk, write_data=['vps', 'covs'], legacy=True) + + +def slice_array(array, indices, nparts): + nparts = int(nparts) + output_list = [] + nindices = len(indices) + npercore = nindices//nparts + if not nindices%nparts: + npercore += 1 + for part in range(nparts): + start_ind = part * npercore + # slice indices array into parts for each core + sub_indices = np.arange(start_ind, min([start_ind + npercore, nindices]), 1) + output_list.append({'array': array[indices[sub_indices]], 'indices': indices[sub_indices]}) + return output_list + + +def prepare_multiprocessing_input(vgrid, ext_grid, indices, ncores): + input_list = [] + input_dict = {} + keys = ['xs', 'ys', 'zs'] + + # slice all arrays into parts and store them in a dictionary + for key in keys: + array_list = slice_array(np.array(vgrid[key]), indices, nparts=ncores) + input_dict[key] = array_list + + # now construct a list (one item for each core) with all 3 arrays in a dict and corresponding indices + for index in range(ncores): + d = {key: input_dict[key][index]['array'] for key in keys} + # fetch indices from last used key (indices are all the same) + d['indices'] = input_dict[key][index]['indices'] + # store input from external grid in dictionary (PROBLEM: might be very inefficient/memory consuming!) + d['vps_ext'] = ext_grid['vps'] + d['x_ext'] = ext_grid['xs'] + d['y_ext'] = ext_grid['ys'] + d['z_ext'] = ext_grid['zs'] + input_list.append(d) + + return input_list + + +def griddata_worker(input_dict): + data_velocity = np.array(input_dict['vps_ext']) + data_coords = (np.array(input_dict['x_ext']), np.array(input_dict['y_ext']), np.array(input_dict['z_ext'])) + interp_coords = (input_dict['xs'], input_dict['ys'], input_dict['zs']) + vps_new = griddata(data_coords, data_velocity, interp_coords, fill_value=0., method='nearest') + return input_dict['indices'], vps_new + + +def calculate_differences_grid(grid, earth_model, veltype='p', percentage=True, input_rel=False): + ''' + Calculate differences of input grid to standard earth model + :param grid: dict + :param earth_model: str + :return: new grid with velocity differences + ''' + + abs_or_percentage = {True: 'percentage', + False: 'absolute'} + print('Calculating {} differences to {} model for velocity type {}.'.format(abs_or_percentage[percentage], + earth_model, veltype)) + + veltype = veltype.lower() + vel_key = 'v{}s'.format(veltype) + + model = TauModel.from_file(earth_model) + + for index, depth in enumerate(grid['depths']): + if depth < 0.: + depth = 0. + try: + vel_ref = model.s_mod.v_mod.evaluate_below(depth, veltype) + except Exception as e: + print('Exception at depth:', depth) + raise(e) + vel = grid[vel_key][index] + if input_rel: + vel += vel_ref + if percentage: + vel = (vel - vel_ref[0]) / vel_ref[0] * 100. + else: + vel -= vel_ref[0] + grid[vel_key][index] = vel + + print('Done!') + return grid + + +def calculate_differences_grid_json_file(grid, fn_earth_model, veltype='p', percentage=True): + ''' + Calculate differences of input grid to standard earth model in json file + :param grid: dict + :param fn_earth_model: str + :return: new grid with velocity differences + ''' + + abs_or_percentage = {True: 'percentage', + False: 'absolute'} + print('Calculating {} differences to {} model for velocity type {}.'.format(abs_or_percentage[percentage], + fn_earth_model, veltype)) + + veltype = veltype.lower() + vel_key = 'v{}s'.format(veltype) + + with open(fn_earth_model, 'r') as infile: + model = json.load(infile) + + interp = interp1d(model['depths'], model['vps']) + + for index, depth in enumerate(grid['depths']): + try: + vel_ref = interp(depth) + except Exception as e: + print('Exception at depth:', depth) + raise(e) + vel = grid[vel_key][index] + if percentage: + vel = (vel - vel_ref) / vel_ref * 100. + else: + vel -= vel_ref + grid[vel_key][index] = vel + + print('Done!') + return grid + + +def convolution_filter(grid, sigma_xyz, spacing_xyz, R=6371., wdir='', fname_ext='', values_list=['vps']): + ''' + Apply 3d convolution filter on grid + :param grid: dict + :param sigma_xyz: (s_x, s_y, s_z) std_dev in 3 dimensions for filter kernel + :param R: earth radius + :param wdir: working directory + :return: + ''' + if type(values_list) not in [list, tuple]: + values_list = [values_list] + + npa = np.array + + lons_km = np.deg2rad(np.unique(grid['lons'])) * R * np.cos(np.deg2rad(46)) # reduce radius with mean at 46deg N (WHY???) + lats_km = np.deg2rad(np.unique(grid['lats'])) * R + zs = R - np.unique(grid['depths']) + + # re-sort grid contents + a = np.array([grid['lons'], grid['lats'], grid['depths']]) + ind = np.lexsort((a[2, :], a[1, :], a[0, :])) + for key, value in grid.items(): + if len(value) > 0: + grid[key] = npa(value)[ind] + # grid['lons'] = npa(grid['lons'])[ind] + # grid['lats'] = npa(grid['lats'])[ind] + # grid['depths'] = npa(grid['depths'])[ind] + + if len(grid['xs']) == 0 and len(grid['xs']) == 0 and len(grid['zs']) == 0: + grid = calc_cartesian(grid) + + for val in values_list: + if val == 'covs': + print('Taking squareroot of covariances to convolve std, then square again.') + values = np.sqrt(grid[val]) + else: + values = grid[val] + + ## use indices to sort actual values and shape correspondingly (already done above!) + #values = npa(values)[ind] + shape = (len(lons_km), len(lats_km), len(zs)) + data_vel = values.reshape(shape) + + #data_vel = np.empty((len(lons_km), len(lats_km), len(zs))) + #index = 0 + #for zind in range(len(zs)): + # for yind in range(len(lats_km)): + # for xind in range(len(lons_km)): + # data_vel[xind, yind, zind] = grid[val][index] + # index += 1 + + #write_vtk(grid, os.path.join(wdir, 'grid_b4_filter{}_{}.vtk'.format(fname_ext, val)), + # write_data=[val], sort=True) + + gk = gauss_kernel(sigma_xyz, spacing_xyz=spacing_xyz) + convolved_data_values = (signal.convolve(data_vel, gk, mode="same", method='fft') / np.sum(gk)) + + val_new = convolved_data_values.ravel() + + if val == 'covs': + print('Now square result again...') + val_new = val_new ** 2 + + grid[val] = val_new + #index = 0 + #for zind in range(len(zs)): + # for yind in range(len(lats_km)): + # for xind in range(len(lons_km)): + # grid[val][index] = convolved_data_values[xind, yind, zind] + # index += 1 + + #write_vtk(grid, os.path.join(wdir, 'grid_after_filter{}_{}.vtk'.format(fname_ext, val)), + # write_data=[val], sort=True) + return grid + + +def calc_cartesian(grid, R=6371.): + print('Calc cartesian coordinates for paraview...') + lons = np.array(grid['lons']) + lats = np.array(grid['lats']) + depths = np.array(grid['depths']) + xs, ys, zs = pol2cart(lats, lons, R - depths) + grid['xs'] = xs + grid['ys'] = ys + grid['zs'] = zs + return grid + + +# def convolution_filter_npy(data, sigma_llr, spacing_llr, R=6371., wdir='', fname_ext='', val='vps'): +# ''' +# Apply 3d convolution filter on grid +# :param grid: dict +# :param sigma_llr: (s_x, s_y, s_z) std_dev in 3 dimensions for filter kernel +# :param R: earth radius +# :param wdir: working directory +# :return: +# ''' +# #lons_km = np.deg2rad(np.unique(grid['lons'])) * R +# #lats_km = np.deg2rad(np.unique(grid['lats'])) * R # TODO This is shit? +# #zs = R - np.unique(grid['depths']) +# +# # data_vel = np.empty((len(lons_km), len(lats_km), len(zs))) +# # index = 0 +# # for zind in range(len(zs)): +# # for yind in range(len(lats_km)): +# # for xind in range(len(lons_km)): +# # data_vel[xind, yind, zind] = grid[val][index] +# # index += 1 +# +# write_vtk(grid, os.path.join(wdir, 'grid_b4_filter{}.vtk'.format(fname_ext)), write_data=[val]) +# +# gk = gauss_kernel(sigma_llr, spacing_xyz=spacing_llr) +# convolved_data_values = (signal.convolve(data_vel, gk, mode="same", method='fft') / np.sum(gk)) +# +# index = 0 +# for zind in range(len(zs)): +# for yind in range(len(lats_km)): +# for xind in range(len(lons_km)): +# grid[val][index] = convolved_data_values[xind, yind, zind] +# index += 1 +# +# write_vtk(grid, os.path.join(wdir, 'grid_after_filter{}.vtk'.format(fname_ext)), write_data=[val]) +# return grid + + +def gauss_kernel(sigma_xyz, spacing_xyz, width_factor_sigma=4): + ''' + Calculate a three dimensional gauss kernel with a width of "width_factor_sigma" * sigma + :param sigma_xyz: (sigma_x, sigma_y, sigma_z) standard deviation in x, y and z direction of kernel + :param spacing_xyz: (spacing_x, spacing_y, spacing_z) spacing of grid to be convolved (important because + convolution has no units!), e.g. Diehl grid: 2x2x2km spacing + :param width_factor_sigma: size of the kernel (width_factor_sigma * sigma) + :return: gauss kernel on a regular grid + ''' + def calc_base_array(sigma, spacing): + right_side = np.arange(0, width_factor_sigma * sigma, spacing) + left_side = -1 * np.flip(right_side, 0)[:-1] + return np.append(left_side, right_side) + + sigma_x, sigma_y, sigma_z = sigma_xyz + spacing_x, spacing_y, spacing_z = spacing_xyz + #assert type(npts) == int, 'Wrong input type {} for npts'.format(type(npts)) + #if npts%2: + # npts += 1 + # warnings.warn('Npts has to be odd for symmetric kernel. Increased by + 1') + x = np.copy(calc_base_array(sigma_x, spacing_x)) + y = np.copy(calc_base_array(sigma_y, spacing_y)) + z = np.copy(calc_base_array(sigma_z, spacing_z)) + xx, yy, zz = np.meshgrid(x, y, z) + #norm_factor = 1. / (2. * np.sqrt(2.) * np.pi ** (3. / 2.) * sigma ** 3) + #gauss = norm_factor * np.exp(-(xx ** 2 + yy ** 2 + zz ** 2) / (2. * sigma ** 2)) + gauss = np.exp((-1. / 2.) * (xx ** 2 / (sigma_x ** 2) + yy ** 2 / (sigma_y ** 2) + zz ** 2 / sigma_z ** 2)) + return gauss + + +def save_json_file(grid, fname): + for key, value in grid.items(): + if not type(value) == list: + grid[key] = list(value) + with open(fname, 'w') as outfile: + json.dump(grid, outfile) + print('Saved grid to file {}'.format(fname)) + + +def load_json_file(fname): + with open(fname, 'r') as infile: + grid = json.load(infile) + print('Loaded grid from file {}'.format(fname)) + return grid + + +def write_vgrid(vgrid, npts, delta, start, fname, default_cov=0.3, default_smooth=1.0, default_pdev=0.): + ''' + Write vgrids input file for fmtomo_tools. + :param vgrid: grid dictionary + :param npts: number of grid points (3dim) + :param delta: spacings (3dim) + :param start: start points (3dim) + :param fname: outfile + :param cov: diagonal elements of covariance matrix + :return: + ''' + nR, nTheta, nPhi = npts + dR, dTheta, dPhi = delta + sR, sTheta, sPhi = start + + if not vgrid.get('covs'): + print('No covariances given. Setting every point to default covariance of {}'.format(default_cov)) + vgrid['covs'] = list(default_cov * np.ones(len(vgrid['vps']))) + + if not vgrid.get('smoothfactors'): + print('No smoothfactors given. Setting every point to default smoothfactor of {}'.format(default_smooth)) + vgrid['smoothfactors'] = list(default_smooth * np.ones(len(vgrid['vps']))) + + if not vgrid.get('frechs'): + print('No pdev given. Setting every point to default pdev of {}'.format(default_pdev)) + vgrid['frechs'] = list(default_pdev * np.ones(len(vgrid['vps']))) + + with open(fname, 'w') as outfile: + # write header for velocity grid file (in RADIANS) + outfile.write('%10s %10s \n' % (1, 1)) + outfile.write('%10s %10s %10s\n' % (nR, nTheta, nPhi)) + outfile.write('%10s %10s %10s\n' % (dR, np.deg2rad(dTheta), np.deg2rad(dPhi))) + outfile.write('%10s %10s %10s\n' % (sR, np.deg2rad(sTheta), np.deg2rad(sPhi))) + + for vel, cov, smoothfactor, pdev in zip(vgrid['vps'], vgrid['covs'], vgrid['smoothfactors'], vgrid['frechs']): + outfile.write('{} {} {} {}\n'.format(vel, cov, smoothfactor, pdev)) + + print('Wrote {} points to file {}'.format(nR * nTheta * nPhi, fname)) + + +def read_vgrid_regular(grid_file, R=6371.): + ''' + Read fmtomo_tools velocity grid file (vgrids.in) in a regular grid structure + as used by scipy.interpolate.RegularGridInterpolator + :param grid_file: filename of fmtomo_tools velocity grid file + :param R: earth radius + :return: grid, vps, covariances, smoothings, partial_derivs + ''' + gridN, gridDelta, gridStart, velocities, covariances, smooths, pdevs = _readVgrid(grid_file) + lats, lons, rads = _generateGrids(gridN, gridDelta, gridStart) + + grid = (lons, lats, rads) + + vps = np.zeros((len(lons), len(lats), len(rads))) + covs = np.zeros((len(lons), len(lats), len(rads))) + sms = np.zeros((len(lons), len(lats), len(rads))) + pds = np.zeros((len(lons), len(lats), len(rads))) + + index = 0 + for irad, rad in enumerate(rads): + for ilat, lat in enumerate(lats): + for ilon, lon in enumerate(lons): + vps[ilon, ilat, irad] = velocities[index] + cov = np.nan if not covariances else covariances[index] + smooth = np.nan if not smooths else smooths[index] + pdev = np.nan if not pdevs else pdevs[index] + + covs[ilon, ilat, irad] = cov + sms[ilon, ilat, irad] = smooth + pds[ilon, ilat, irad] = pdev + index += 1 + + return grid, vps, covs, sms, pds + + +def read_vgrid(grid_file, R=6371., inv_index_frechet=False): + ''' + Read fmtomo_tools velocity grid file (vgrids.in) to "grid" dictionary structure. + :param grid_file: filename of fmtomo_tools velocity grid file + :param R: earth radius + :return: grid, Npoints (3dim), delta (3dim), start (3dim) + ''' + gridN, gridDelta, gridStart, velocities, covariances, smooths, pdevs = _readVgrid(grid_file) + lats, lons, rads = _generateGrids(gridN, gridDelta, gridStart) + + grid = init_dict() + + if inv_index_frechet: + nR, nLat, nLon = gridN + grid['inv_index'] = list(np.zeros(nR * nLat * nLon)) + + covariance_flag = True if len(covariances) == len(velocities) else False + if covariance_flag: + print('Found covariances in velocity model') + else: + print('No covariances found in velocity model.') + + + index = 0 + for r in rads: + for lat in lats: + for lon in lons: + # get values from grid file and append to grid dictionary + grid['lons'].append(lon) + grid['lats'].append(lat) + grid['depths'].append(R - r) + + # transform to cartesian coordinate system and append to grid dictionary + x, y, z = pol2cart(lat, lon, r) + + grid['xs'].append(x) + grid['ys'].append(y) + grid['zs'].append(z) + + grid['vps'].append(velocities[index]) + + if covariance_flag: + grid['covs'].append(covariances[index]) + + # NOTE: inv_index only valid in case of inversion using one velocity grid, else there would be an offset for each + # other parameter/inversion grid + # inversion index (starts with 1) is built differently: for lon in... for lat in... for r in... + if inv_index_frechet: + glob_inv_index = transform_point_index(index, nR, nLat, nLon) + grid['inv_index'][glob_inv_index] = index + + index += 1 + + return grid, gridN, gridDelta, gridStart + + +def add_cushion_zeros_top_bot(fname_in, fname_out, nlayers, R=6371., top=False, bot=True): + def depth_layer(lons, lats, depth): + subgrid = dict(depths=[], lats=[], lons=[], xs=[], ys=[], zs=[], vps=[]) + # note: diehl model counts all longitudes first (negative direction), then latitude (positive direction), + # then depth (positive direction) + for lat in lats: + for lon in reversed(lons): + subgrid['depths'].append(depth) + subgrid['lats'].append(lat) + subgrid['lons'].append(lon) + + # transform to cartesian coordinate system and append to grid dictionary + x, y, z = pol2cart(lat, lon, R - depth) + subgrid['xs'].append(x) + subgrid['ys'].append(y) + subgrid['zs'].append(z) + + # append zeros + subgrid['vps'].append(0.) + return subgrid + + assert any([top, bot]), 'No sense running this function...' + + with open(fname_in, 'r') as infile: + grid = json.load(infile) + + print('Read {} points from file {}'.format(len(grid['depths']), fname_in)) + + lons = np.unique(grid['lons']) + lats = np.unique(grid['lats']) + + + # add cushion n-times + for index in range(nlayers): + # calculate new depths for iterating process + depths = np.unique(grid['depths']) + if index == 0: + ddepth = abs(depths[1] - depths[0]) + + if top == True: + top_depth = min(depths) - ddepth + top_layer = depth_layer(lons, lats, top_depth) + for key in top_layer.keys(): + # add top layer to beginning of list + grid[key] = top_layer[key] + grid[key] + + if bot == True: + bot_depth = max(depths) + ddepth + bot_layer = depth_layer(lons, lats, bot_depth) + for key in bot_layer.keys(): + # add bot layer to end of list + grid[key] = grid[key] + bot_layer[key] + + with open(fname_out, 'w') as outfile: + json.dump(grid, outfile) + print('Wrote file', fname_out) + + +def read_diehl_model(grid_file, R=6371.): + with open(grid_file, 'r') as infile: + input_list = infile.readlines() + + grid = init_dict() + + for line in input_list: + # get values from grid file and append to grid dictionary + lon, lat, x_simulp, y_simulp, depth, vel = [float(item) for item in line.split()] + grid['lons'].append(lon) + grid['lats'].append(lat) + grid['depths'].append(depth) + grid['vps'].append(vel) + + # transform to cartesian coordinate system and append to grid dictionary + x, y, z = pol2cart(lat, lon, R - depth) + grid['xs'].append(x) + grid['ys'].append(y) + grid['zs'].append(z) + + return grid + + +def read_tesauro_model(grid_file, min_depth, max_depth, npts_depth, R=6371.): + with open(grid_file, 'r') as infile: + input_list = infile.readlines()[1:] + + grid = init_dict() + + depths = np.linspace(min_depth, max_depth, num=npts_depth) + + for depth in depths: + for line in input_list: + # get values from grid file and append to grid dictionary + lon, lat, uc_vel, lc_vel, avcrust, topo, base, uc_lc, moho = [float(item) for item in line.split()] + grid['lons'].append(lon) + grid['lats'].append(lat) + grid['depths'].append(depth) + if depth <= topo: + vel = uc_vel + cov = 0.05 + elif topo < depth <= uc_lc: + vel = uc_vel + cov = 0.1 + elif uc_lc < depth <= moho: + vel = lc_vel + cov = 0.1 + else: + vel = 8.0355 # TODO: BETTER USE NAN/ak135 vel? + cov = 1. + + grid['vps'].append(vel) + grid['covs'].append(cov) + + # transform to cartesian coordinate system and append to grid dictionary + x, y, z = pol2cart(lat, lon, R - depth) + grid['xs'].append(x) + grid['ys'].append(y) + grid['zs'].append(z) + + return grid + + +def add_array_layer(a, depth_val, depth_ind): + inds = np.lexsort((a[0, :], a[1, :], a[depth_ind, :])) + a = a[:, inds] + inds_tl = np.where(a[depth_ind, :] == np.unique(a[depth_ind, :])[0])[0] + if not all(np.diff(inds_tl) == 1): + print('Adding layer failed.') + return a + end_ind = len(inds_tl) + cp_layer = np.copy(a[:, :end_ind]) + cp_layer[depth_ind, :] = depth_val + return np.append(cp_layer, a, axis=1) + + +def get_rginter_tesauro_discrete(fnin, fill_value=np.nan, relative_values=False, ftype='npy', col_index=3, + extend_surface=15, depth_ind=2, min_depth=None, max_depth=None): + ''' + Create and return rginter object containing vp data of discrete Tesauro grid. + :param fnin: fname tesauro discrete (txt) + :param extend_surface: Extend uppermost value of external grid to x km above sea level + :return: rginter object accessed by: rginter((lon, lat, depth)) + ''' + if ftype == 'txt': + a = np.loadtxt(fnin).transpose() + elif ftype == 'npy': + a = np.load(fnin) + + if extend_surface: + a = add_array_layer(a, -extend_surface, depth_ind) + + # sort by depth, lat, lon (outer loop = lon) + inds = np.lexsort((a[2, :], a[1, :], a[0, :])) + a = a[:, inds] + + lons, lats, depths = a[:3] + values = a[col_index] + + if not relative_values: + # negative velocities -> np.nan + values[values < 0] = np.nan + + lonsU = np.unique(lons) + latsU = np.unique(lats) + depthsU = np.unique(depths) + grid = (lonsU, latsU, depthsU) #TODO shapes kontrollieren + shape = [len(item) for item in grid] + + if min_depth: + inds_lower_mindepth = np.where(depths < min_depth)[0] + values[inds_lower_mindepth] = np.nan + + if max_depth: + inds_over_maxdepth = np.where(depths > max_depth)[0] + values[inds_over_maxdepth] = np.nan + + values = values.reshape(shape) + rginter = RegularGridInterpolator(grid, values, method='linear', bounds_error=False, fill_value=fill_value) + return rginter + + +def get_rginter_tesauro_discrete_std(fnin, std_in=0.3162, std_out=1.0, ftype='npy'): + ''' + Create and return rginter object containing cov data based on vp of discrete Tesauro grid. If vp is negative (outside + of grid) std will be rde_out, else rde_in. + :param fnin: fname tesauro discrete (txt) + :return: rginter object accessed by: rginter((lon, lat, depth)) + ''' + if ftype == 'txt': + lons, lats, depths, values = np.loadtxt(fnin).transpose() + elif ftype == 'npy': + lons, lats, depths, values = np.load(fnin) + # negative velocities -> np.nan + values[values < 0] = std_out + values[values >= 0] = std_in + lonsU = np.unique(lons) + latsU = np.unique(lats) + depthsU = np.unique(depths) + grid = (lonsU, latsU, depthsU) + shape = [len(item) for item in grid] + values = values.reshape(shape) + rginter = RegularGridInterpolator(grid, values, method='linear', bounds_error=False, fill_value=std_out) + return rginter + + +def interpolate_ray_crossing_on_vgrid(fname_crossing='vgrids_v5_it12_crossing_bins_4-1_mincount_5_STEP_2.npy', + fname_vg='it_12/vgrids.in', fname_vgref='vgridsref_orig.in', R=6371.): + ''' + Interpolate resolution information given in fname_crossing (.npy) onto velocity grid in fname_vg. Also calculate + dvp (percentage). Return data as numpy array. + ''' + cross_grid = np.load(fname_crossing) + cross_grid[2, :] = R - cross_grid[2, :] + inds_a = np.lexsort([cross_grid[0, :], cross_grid[1, :], cross_grid[2, :]]) + cross_grid = cross_grid[:, inds_a] + lons, lats, depths, res = cross_grid + + lonsU = np.unique(lons) + latsU = np.unique(lats) + depthsU = np.unique(depths) + grid = (depthsU, latsU, lonsU) + shape = [len(item) for item in grid] + values = res.reshape(shape) + rginter = RegularGridInterpolator(grid, values, method='linear', bounds_error=False, fill_value=0) + + vg = read_vgrid(fname_vg)[0] + vg = np.array([(vg['lons'], vg['lats'], vg['depths'], vg['vps'])])[0] + inds_b = np.lexsort([vg[0, :], vg[1, :], vg[2, :]]) + vg = vg[:, inds_b] + lonsVG, latsVG, depthsVG, vpsVG = vg + + vgref = read_vgrid(fname_vgref)[0] + vgref = np.array([(vgref['lons'], vgref['lats'], vgref['depths'], vgref['vps'])])[0] + vgref = vgref[:, inds_b] + vpsVGREF = vgref[3] + dvp = (vpsVG - vpsVGREF) / vpsVGREF * 100. + vg = np.vstack([vg, dvp]) + + resVG = rginter((depthsVG, latsVG, lonsVG)) + vg = np.vstack([vg, resVG]) + + # remove velocity information where no resolution is given: + #inds_no_res = np.where(resVG == 0.)[0] + #vg[3, inds_no_res] = vgref[3, inds_no_res] + #vg[4, inds_no_res] = 0. + + return vg + + +# def get_rginter_tesauro_discrete_rde(fnin, rde_in=0.1, rde_out=0., ftype='npy'): +# ''' +# Create and return rginter object containing cov data based on vp of discrete Tesauro grid. If vp is negative (outside +# of grid) std will be rde_out, else rde_in. +# :param fnin: fname tesauro discrete (txt) +# :return: rginter object accessed by: rginter((lon, lat, depth)) +# ''' +# if ftype == 'txt': +# lons, lats, depths, values = np.loadtxt(fnin).transpose() +# elif ftype == 'npy': +# lons, lats, depths, values = np.load(fnin) +# # negative velocities -> np.nan +# #WRONG, vp +/- +# #values[values < 0] = rde_out +# #values[values >= 0] = rde_in +# lonsU = np.unique(lons) +# latsU = np.unique(lats) +# depthsU = np.unique(depths) +# grid = (lonsU, latsU, depthsU) +# shape = [len(item) for item in grid] +# values = values.reshape(shape) +# rginter = RegularGridInterpolator(grid, values, method='linear', bounds_error=False, fill_value=rde_out) +# return rginter + + +def get_index(point_index, nj, nk): + ''' + Small function to calculate i,j,k indices for a point_index counting over i, then j, then k + ''' + i = point_index // (nj * nk) + if point_index % (nj * nk): + i += 1 + j = point_index % (nj * nk) // nk + else: + j = nj + if point_index % (nj * nk) % nk: + j += 1 + k = point_index % (nj * nk) % nk + else: + k = nk + return i, j, k + + +def transform_point_index(point_index, ni, nj, nk): + ''' + Small function to calculate new point index counting over k, then j, then i instead of i, j, k + ''' + i, j, k = get_index(point_index, nj, nk) + new_point_index = (nj * ni) * (k - 1) + ni * (j - 1) + i + return new_point_index + + +#def write_vtk_pyevtk(grid, fname, write_cov=False): +# cellData = {'velocity': np.array(grid['vps'])} +# if write_cov: +# cellData['diag_covariance'] = np.array(grid['covs']) +# +# gridToVTK(fname, np.array(grid['xs'], dtype='float64'), np.array(grid['ys'], dtype='float64'), +# np.array(grid['zs'], dtype='float64'), cellData={'zs': np.array(grid['zs'], dtype='float64')}) + + +def write_vtk_legacy(grid, fname, write_data=['vps']): + with open(fname, 'w') as outfile: + nPoints = len(grid['xs']) + + outfile.write('# vtk DataFile Version 3.1\n') + outfile.write('FMM Init points\n') + outfile.write('ASCII\n') + nTheta = len(np.unique(grid['lats'])) + nPhi = len(np.unique(grid['lons'])) + nR = len(np.unique(grid['depths'])) + + outfile.write('DATASET STRUCTURED_GRID\n') + outfile.write('DIMENSIONS %15d %15d %15d\n' % (nPhi, nTheta, nR)) + outfile.write('POINTS %15d float\n' % (nPoints)) + + for x, y, z in zip(grid['xs'], grid['ys'], grid['zs']): + outfile.write('{x} {y} {z}\n'.format(x=x, y=y, z=z)) + + # write header with number of data points + outfile.write('\nPOINT_DATA {}\n'.format(nPoints)) + + if 'grid_indices' in write_data: + grid['grid_indices'] = list(range(len(grid['xs']))) + + for data_name in write_data: + if not data_name in grid.keys(): + warnings.warn('Data with name: {} not found in grid.'.format(data_name)) + continue + + # translate data name to a label for convenience + translate_names = {'vps': 'p_velocity', + 'covs': 'diag_covariance', + 'frechs': 'frechet_deriv_sum', + 'rdes': 'res_mat_diagonal_element', + 'res': 'resolution_information'} + if data_name in translate_names.keys(): + data_label = translate_names[data_name] + else: + data_label = data_name + + # write header for data + outfile.write('SCALARS {} float 1\n'.format(data_label)) + outfile.write('LOOKUP_TABLE default\n') + + for datum in grid[data_name]: + if np.isnan(datum): datum = -1. + outfile.write('{}\n'.format(datum)) + + print('Wrote {} points to file {}'.format(nPoints, fname)) + + +def write_vtk(grid, fname, write_data=['vps'], clat=None, clon=None, dlat=None, dlon=None, legacy=True, sort=False): + if sort: + import copy + grid = copy.deepcopy(grid) + grid = re_sort_grid(grid, write_data) + + if clat and clon and dlat and dlon: + lat = np.array(grid['lats']) + lon = np.array(grid['lons']) + ids = np.where((lat > clat - dlat) & (lat < clat + dlat) & (lon > clon - dlon) & (lon < clon + dlon)) + for key, value in grid.items(): + if value: + grid[key] = list(np.array(value)[ids]) + + #if legacy: + write_vtk_legacy(grid, fname, write_data) + #else: + # write_vtk_pyevtk(grid, fname, write_cov) + + +def re_sort_grid(grid, write_data): + grid_keys = ['lons', 'lats', 'depths', 'xs', 'ys', 'zs'] + [val for val in write_data] + array = np.array([grid[key] for key in grid_keys]) + + # rearrange array by different rows (outer loop inverse depth [2, ::-1] then lat [1, :] then lon [0, :]) for vtk + ind = np.lexsort((array[0, :], array[1, :], array[2, ::-1])) + array = array[:, ind] + + for index, key in enumerate(grid_keys): + grid[key] = list(array[index, :]) + + return grid + + +def _readVgrid(filename): + def readNumberOfPoints(filename): + fin = open(filename, 'r') + vglines = fin.readlines() + + nR = int(vglines[1].split()[0]) + nTheta = int(vglines[1].split()[1]) + nPhi = int(vglines[1].split()[2]) + + print('readNumberOf Points: Awaiting %d grid points in %s' + % (nR * nTheta * nPhi, filename)) + fin.close() + return nR, nTheta, nPhi + + def readDelta(filename): + fin = open(filename, 'r') + vglines = fin.readlines() + + dR = float(vglines[2].split()[0]) + dTheta = float(vglines[2].split()[1]) + dPhi = float(vglines[2].split()[2]) + + fin.close() + return dR, dTheta, dPhi + + def readStartpoints(filename): + fin = open(filename, 'r') + vglines = fin.readlines() + + sR = float(vglines[3].split()[0]) + sTheta = float(vglines[3].split()[1]) + sPhi = float(vglines[3].split()[2]) + + fin.close() + return sR, sTheta, sPhi + + def readVelocity(filename): + ''' + Reads in velocity from vgrids file and returns a list containing all values in the same order + ''' + vel = [] + cov = [] + smooths = [] + pdevs = [] + count = 0 + fin = open(filename, 'r') + vglines = fin.readlines() + + for line in vglines: + count += 1 + if count > 4: + l_split = line.split() + if l_split: + vel.append(float(l_split[0])) + if len(l_split) >= 2: + cov.append(float(l_split[1])) + if len(l_split) >= 3: + smooths.append(float(l_split[2])) + if len(l_split) >= 4: + pdevs.append(float(l_split[3])) + + + print("Read %d points out of file: %s" % (len(vel), filename)) + return vel, cov, smooths, pdevs + + # Theta, Phi in radians, R in km + nR, nTheta, nPhi = readNumberOfPoints(filename) + dR, dThetaRad, dPhiRad = readDelta(filename) + sR, sThetaRad, sPhiRad = readStartpoints(filename) + vel, cov, smooths, pdevs = readVelocity(filename) + + dTheta, dPhi = np.rad2deg((dThetaRad, dPhiRad)) + sTheta, sPhi = np.rad2deg((sThetaRad, sPhiRad)) + + number = (nR, nTheta, nPhi) + delta = (dR, dTheta, dPhi) + start = (sR, sTheta, sPhi) + return number, delta, start, vel, cov, smooths, pdevs + + +def _generateGrids(number, delta, start): + nR, nTheta, nPhi = number + dR, dTheta, dPhi = delta + sR, sTheta, sPhi = start + + eR = sR + (nR - 1) * dR + ePhi = sPhi + (nPhi - 1) * dPhi + eTheta = sTheta + (nTheta - 1) * dTheta + + thetaGrid = np.linspace(sTheta, eTheta, num=nTheta) + phiGrid = np.linspace(sPhi, ePhi, num=nPhi) + rGrid = np.linspace(sR, eR, num=nR) + + return (thetaGrid, phiGrid, rGrid) + + +def apply_convolution_filter(sigx, sigy, sigz, spacing_xyz, infile, wdir, fname_out, values=['vps'], io_json=False): + if io_json: + fname_ext = '_{}'.format(os.path.split(fname_out)[-1].split('.json')[0]) + grid = load_json_file(infile) + else: + list_in = np.load(infile) + lons, lats, depths = list_in[:3] + values_dict = {key: list_in[index + 3] for index, key in enumerate(values)} + grid = grid_from_latlon_values(lons, lats, depths, values_dict=values_dict) + fname_ext = '' + grid = convolution_filter(grid, (sigx, sigy, sigz), spacing_xyz, wdir=wdir, fname_ext=fname_ext, values_list=values) + if io_json: + save_json_file(grid, fname_out) + else: + a = np.array([grid['lons'], grid['lats'], grid['depths'], *[grid[val] for val in values]]) + np.save(fname_out, a) + + +def apply_vel_changes_to_absolute_grid(fname_abs, fname_rel, fname_out, perc=True): + grid_abs = load_json_file(fname_abs) + grid_rel = load_json_file(fname_rel) + # account for grid_rel being extended vertically + depths = np.unique(grid_abs['depths']) + vel_abs = np.array(grid_abs['vps']) + vel_rel = np.array(grid_rel['vps'])[(np.array(grid_rel['depths']) >= min(depths)) & (np.array(grid_rel['depths']) <= max(depths))] + if perc: + vel_abs *= 1 + vel_rel / 100 + else: + vel_abs += vel_rel + grid_abs['vps'] = list(vel_abs) + save_json_file(grid_abs, fname_out) + + +def init_dict(): + grid = dict(lons = [], lats = [], depths = [], vps = [], covs = [], xs = [], ys = [], zs = []) + return grid + + +def points_from_vtk(fname_in, unique_points=True): + points = [] + # assuming this value is always the same... + points_per_line = 3 + with open(fname_in, 'r') as infile: + header = infile.readline() + print('Reading file {}: {}'.format(fname_in, header)) + header2 = infile.readline() + dtype = infile.readline() + _, dtype = infile.readline().split() + assert(dtype == 'POLYDATA'), 'Expecting POLYDATA' + pts, npts, pts_type = infile.readline().split() + assert(pts == 'POINTS'), 'Expecting POINTS' + for index in range(int(npts)//points_per_line): + items = infile.readline().split() + index_start = 0 + for lineindex in range(points_per_line): + point = [float(item) for item in items[index_start: index_start + points_per_line]] + if not point in points or not unique_points: + points.append(point) + index_start += points_per_line + + return np.array(points) + + +def point_in_box(box_corner_points, points_to_check): + def check_dependancy(u, v, w): + if abs(np.linalg.det(np.array([u, v, w]))) > 1e-7: + return False + else: + return True + + p0 = box_corner_points[0] + p1 = box_corner_points[1] + p2 = box_corner_points[2] + p4 = box_corner_points[4] + + u = p1 - p0 + v = p2 - p0 + w = p4 - p0 + + assert(check_dependancy(u, v, w) == False), 'Vectors u, v, w seem to be linear dependent: {}, {}, {}'.format(u, v, w) + + print('Checking {} points.'.format(len(points_to_check))) + results = [] + for index, point in enumerate(points_to_check): + if all([np.dot(u, p0) <= np.dot(u, point) <= np.dot(u, p1), + np.dot(v, p0) <= np.dot(v, point) <= np.dot(v, p2), + np.dot(w, p0) <= np.dot(w, point) <= np.dot(w, p4)]): + results.append(True) + else: + results.append(False) + + return np.array(results) + + +def discretize_box(fname_in, fname_out, vp_diff=3., R=6371., dr=10., dLat=0.1, dLon=0.1, cf=5, earth_model='ak135_diehl_v2'): + ''' + + :param fname_in: vtk input filename generated from a box in Paraview + :param fname_out: output parameter (.json!) + :param vp_diff: p_velocity increase/decrease (%) + :param R: earth radius + :param dr: delta R + :param dLat: delta Latitude + :param dLon: delta Longitude + :param cf: cushion factor (add some spacing outside the box boundaries for the Gauss Kernel) + :return: + ''' + points = points_from_vtk(fname_in) + corner_grid = init_dict() + box_grid = init_dict() + + for x, y, z in points: + corner_grid['xs'].append(x) + corner_grid['ys'].append(y) + corner_grid['zs'].append(z) + + # transform from cartesian coordinate system and append to corner_grid dictionary + lat, lon, r = cart2pol(x, y, z) + corner_grid['lons'].append(lon) + corner_grid['lats'].append(lat) + corner_grid['depths'].append(R - r) + + print('Initialized corner grid') + + minDepth = min(corner_grid['depths']) + maxDepth = max(corner_grid['depths']) + minLat = min(corner_grid['lats']) + maxLat = max(corner_grid['lats']) + minLon = min(corner_grid['lons']) + maxLon = max(corner_grid['lons']) + + depths = np.arange(minDepth - cf * dr, maxDepth + cf * dr, step=dr) + lats = np.arange(minLat - cf * dLat, maxLat + cf * dLat, step=dLat) + lons = np.arange(minLon - cf * dLon, maxLon + cf * dLon, step=dLon) + + print('NLat, NLon, Ndepth:', len(lats), len(lons), len(depths)) + + points_to_check = [] + for depth in depths: + for lat in lats: + for lon in lons: + box_grid['depths'].append(depth) + box_grid['lats'].append(lat) + box_grid['lons'].append(lon) + + # transform to cartesian coordinate system and append to box_grid dictionary + x, y, z = pol2cart(lat, lon, R - depth) + box_grid['xs'].append(x) + box_grid['ys'].append(y) + box_grid['zs'].append(z) + + points_to_check.append([x, y, z]) + + # append zeros + box_grid['vps'].append(0.) + + print('Initialized box grid') + + box_corner_points = np.array(list(zip(corner_grid['xs'], corner_grid['ys'], corner_grid['zs']))) + pib = point_in_box(box_corner_points, np.array(points_to_check)) + + print('Calculating final grid...') + + model = TauModel.from_file(earth_model) + + + for index in range(len(box_grid['vps'])): + if pib[index]: + depth = box_grid['depths'][index] + vel_ref = model.s_mod.v_mod.evaluate_below(depth, 'P')[0] + box_grid['vps'][index] = vp_diff / 100. * vel_ref + + with open(fname_out, 'w') as outfile: + json.dump(box_grid, outfile) + print('Wrote file {}'.format(fname_out)) + + +def read_diehl_rde_layers(inputdir='/data/AlpArray_Data/crust_diehl_2009/Diehl_et_al_2009_Alpine_HorizontalXSections/', + sigma_xyz=(12.5, 12.5, 12.5), spacing_xyz=(2, 2, 15)): + depths = {infile: float(infile.replace('.xyz', '').lstrip('plane').lstrip('0')) for infile in + sorted(glob.glob1(inputdir, '*.xyz'))} + grid = init_dict() + grid['rdes'] = [] + for infile, depth in depths.items(): + print('Working on file:', infile) + points = np.loadtxt(os.path.join(inputdir, infile), skiprows=2) + lons = points[:, 2] + lats = points[:, 3] + rdes = points[:, 11] + grid['lons'] += list(lons) + grid['lats'] += list(lats) + grid['depths'] += list(np.ones(len(lons))*depth) + grid['rdes'] += list(rdes) + return grid + #grid_conv = convolution_filter(grid, sigma_llr=sigma_llr, spacing_llr=spacing_llr, val='rdes') + #return grid_conv + + +def get_rginter_diehl_legacy(grid, value='vps', max_value=np.inf, min_value=-np.inf, fill_value=0): + print('Create rginter Diehl') + lats = np.unique(grid['lats']) + lons = np.unique(grid['lons']) + depths = np.unique(grid['depths']) + values = np.zeros((len(lons), len(lats), len(depths))) + index = 0 + for idepth, depth in enumerate(depths): + for ilat, lat in enumerate(lats): + # longitude counts from high to low in Diehl grid + for ilon, lon in reversed(list(enumerate(lons))): + vel = grid[value][index] + values[ilon, ilat, idepth] = vel if max_value > abs(vel) > min_value else fill_value + index += 1 + grid = (lons, lats, depths) + rginter = RegularGridInterpolator(grid, values, method='linear', bounds_error=False, fill_value=fill_value) + print('Done!') + return rginter + + +def get_rginter_diehl(grid, value='vps', max_value=np.inf, min_value=-np.inf, fill_value=0): + latsU = np.unique(grid['lats']) + lonsU = np.unique(grid['lons']) + depthsU = np.unique(grid['depths']) + + lats = grid['lats'] + lons = grid['lons'] + depths = grid['depths'] + values = np.array(grid[value]) + + indices_invalid = np.where((values < min_value) & (values > max_value)) + print('Will set {} of {} points to fill value because of min/max value.'.format(len(indices_invalid[0]), len(values))) + values[indices_invalid] = fill_value + + # get indices to sort array with lon, lat, depth in that order + a = np.array([lons, lats, depths]) + ind = np.lexsort((a[2, :], a[1, :], a[0, :])) + + # use indices to sort actual values and shape correspondingly + values = values[ind] + shape = (len(lonsU), len(latsU), len(depthsU)) + values = values.reshape(shape) + + grid = (lonsU, latsU, depthsU) + rginter = RegularGridInterpolator(grid, values, method='linear', bounds_error=False, fill_value=fill_value) + return rginter + + +def rginter_test_plot(lons, lats, depths, rginter): + import matplotlib.pyplot as plt + LONS, LATS = np.meshgrid(lons, lats) + if not type(rginter) == list: + rginter = [rginter] + for rgi in rginter: + fig = plt.figure() + pc = plt.pcolormesh(LONS, LATS, rgi((LONS, LATS, depths))) + plt.colorbar(pc) + plt.show() + + +def depth_slice_test_plot(lons, lats, depths, depth, values, cmap, vmin, vmax): + import matplotlib.pyplot as plt + LONS, LATS = np.meshgrid(np.unique(lons), np.unique(lats)) + shape = LONS.shape + indices = np.where(depths == depth) + if len(indices[0]) == 0: + raise Exception('Invalid depth') + if not type(values) == list: + values = [values] + for val in values: + fig = plt.figure() + pc = plt.pcolormesh(LONS, LATS, val[indices].reshape(shape[:2]), cmap=cmap, vmin=vmin, vmax=vmax) + plt.colorbar(pc) + plt.show() + + +def interpolate_diehl_rde_regular_on_vgrid(vgrid_file, external_grid_file, fname_out, fname_out_vtk=None, min_depth=-4, + default_cov=1.): + ext_grid = load_json_file(external_grid_file) + + # read FMTOMO velocity/inversion grid file + vgrid, npts, delta, start = read_vgrid(vgrid_file) + + rginter_diehl = get_rginter_diehl(ext_grid, value='rdes', fill_value=0.) + + vgrid['covs'] = np.ones(len(vgrid['vps'])) * default_cov + + tstart = datetime.now() + print('Starting interpolation at {}'.format(tstart)) + rdes = rginter_diehl((vgrid['lons'], vgrid['lats'], vgrid['depths'])) + + # set covariance ~above surface to zero! + indices_air = np.where(np.array(vgrid['depths']) < min_depth) + covs = transfer_rde_cov(rdes, 500.) + covs[indices_air] = 0.1 + + vgrid['covs'] = list(covs) + vgrid['rdes'] = list(rdes) + print('Finished after {}'.format(datetime.now() - tstart)) + + write_vgrid(vgrid, npts, delta, start, fname=fname_out) + if fname_out_vtk: + write_vtk(vgrid, fname_out_vtk, write_data=['vps', 'covs', 'rdes'], legacy=True) + + +def interpolate_diehl_rde_regular_on_diehl_grid(diehl_grid_file, external_grid_file, fname_out, fname_out_vtk=None): + ext_grid = load_json_file(external_grid_file) + grid = load_json_file(diehl_grid_file) + + rginter_diehl = get_rginter_diehl(ext_grid, value='rdes', fill_value=0.) + + tstart = datetime.now() + print('Starting interpolation at {}'.format(tstart)) + rdes = rginter_diehl((grid['lons'], grid['lats'], grid['depths'])) + grid['covs'] = list(transfer_rde_cov(rdes, 500.)) + grid['rdes'] = list(rdes) + print('Finished after {}'.format(datetime.now() - tstart)) + + save_json_file(grid, fname_out) + if fname_out_vtk: + write_vtk(grid, fname_out_vtk, write_data=['vps', 'covs', 'rdes'], legacy=True) + + +def interpolate_diehl_vps_regular_on_vgrid(vgrid_file, external_grid_file, fname_out, fname_out_vtk=None, + default_cov=1.): + ext_grid = load_json_file(external_grid_file) + + # read FMTOMO velocity/inversion grid file + vgrid, npts, delta, start = read_vgrid(vgrid_file) + + rginter_diehl = get_rginter_diehl(ext_grid, value='vps', fill_value=0.) + + vgrid['covs'] = list(np.ones(len(vgrid['vps'])) * default_cov) + + tstart = datetime.now() + print('Starting interpolation at {}'.format(tstart)) + vps = rginter_diehl((vgrid['lons'], vgrid['lats'], vgrid['depths'])) + vgrid['vps'] = list(np.array(vgrid['vps']) * (1 + vps / 100)) + print('Finished after {}'.format(datetime.now() - tstart)) + + if fname_out_vtk: + write_vtk(vgrid, fname_out_vtk, write_data=['vps'], legacy=True) + write_vgrid(vgrid, npts, delta, start, fname=fname_out) + + +def interpolate_vps_regular_on_vgrid(vgrid_file, ext_grid_file, fname_out, fname_out_vtk=None, modify_vp=True, + default_cov=15., ftype='npy', rel_perc=True, covs_perc=True, scale_tf=500, + tf_linear=False, ext_rdes=True, min_depth=None, max_depth=None): + + print('Applying default covariance of {} percent.'.format(default_cov)) + + # read FMTOMO velocity/inversion grid file + vgrid, npts, delta, start = read_vgrid(vgrid_file) + + rginter_vp = get_rginter_tesauro_discrete(ext_grid_file, ftype=ftype, relative_values=True, col_index=3, + min_depth=min_depth, max_depth=max_depth) + + #rginter_std = get_rginter_tesauro_discrete_std(ext_grid_file) + + if covs_perc: + covs_array = np.array(vgrid['vps']) * default_cov / 100. + else: + covs_array = np.ones(len(np.array(vgrid['vps']))) * default_cov + + tstart = datetime.now() + print('Starting interpolation at {}'.format(tstart)) + vps_external = rginter_vp((vgrid['lons'], vgrid['lats'], vgrid['depths'])) + vps_array = np.array(vgrid['vps']) + ind_no_nan = ~np.isnan(vps_external) + + # interpolate covariances + if ext_rdes: + rginter_rde = get_rginter_tesauro_discrete(ext_grid_file, ftype=ftype, relative_values=True, col_index=4, + fill_value=0., min_depth=min_depth, max_depth=max_depth) + rde_external = rginter_rde((vgrid['lons'], vgrid['lats'], vgrid['depths'])) + ind_no_zero_rde = rde_external > 1e-3 + covs_external = np.copy(covs_array) + covs_external[ind_no_zero_rde] = transfer_rde_cov(rde_external, scale_tf, linear=tf_linear)[ind_no_zero_rde] + inds_external = covs_array > covs_external + covs_array[inds_external] = covs_external[inds_external] + vgrid['rdes'] = list(rde_external) + + if modify_vp: + if rel_perc: + vps_array[ind_no_nan] = vps_array[ind_no_nan] * (1 + vps_external[ind_no_nan] / 100.) + else: + vps_array[ind_no_nan] = vps_external[ind_no_nan] + else: + print('WARNING: MODIFY VPS IS OFF.') + vgrid['vps'] = list(vps_array) + vgrid['covs'] = list(covs_array) + + print('Finished after {}'.format(datetime.now() - tstart)) + + if fname_out_vtk: + write_vtk(vgrid, fname_out_vtk, write_data=['vps', 'covs', 'rdes'], legacy=True) + write_vgrid(vgrid, npts, delta, start, fname=fname_out) + + +def merge_tesauro_and_diehl_on_extended_diehl_grid(fn_tes, fn_diehl, fnout, _tes_only=False): + ''' + Create an extended Diehl grid (finer than Tesauro) to interpolate Tesauro on. Then vp perturbations can be + superimposed and smoothed before putting on coarser FMTOMO grid. + :param fn_tes: + :param fn_diehl: diehl differences (percentage) to ak135d + :return: + ''' + + # def extend_axis(axis, vmin, vmax): + # axisU = np.unique(axis) + # delta = axisU[1] - axisU[0] + # smin = min(axisU) - vmin + # smax = vmax - max(axisU) + # nmin = int(np.ceil(smin / delta)) + # nmax = int(np.ceil(smax / delta)) + # vmin_new = min(axisU) - delta * nmin + # vmax_new = max(axisU) + delta * nmax + # axis_down = np.arange(vmin_new, min(axisU) - delta, delta) + # axis_up = np.arange(max(axisU) + delta, vmax_new, delta) + # axisU_new = np.append(axis_down, axisU) + # axisU_new = np.append(axisU_new, axis_up) + # return axisU_new + + # load tesauro grid and diehl grid + lonsT, latsT, depthsT, vpsT, rdesT = np.load(fn_tes) + lonsD, latsD, depthsD, vpsD, rdesD = np.load(fn_diehl) + + grid_diehl = grid_from_latlon_values(lonsD, latsD, depthsD, values_dict={'vps': vpsD, 'rdes': rdesD}) + rginter_tes_vps = get_rginter_tesauro_discrete(fn_tes, fill_value=0., relative_values=True, ftype='npy', + col_index=3) + rginter_tes_rde = get_rginter_tesauro_discrete(fn_tes, fill_value=0., relative_values=True, ftype='npy', + col_index=4) + #rginter_tes_rde = get_rginter_tesauro_discrete_rde(fn_tes, ftype='txt') + rginter_diehl_vps = get_rginter_diehl(grid_diehl, 'vps') + rginter_diehl_rdes = get_rginter_diehl(grid_diehl, 'rdes', fill_value=0.) + + lonMin = min(lonsT) + lonMax = max(lonsT) + latMin = min(latsT) + latMax = max(latsT) + depthsMin = min([min(depthsD), min(depthsT)]) + depthsMax = max([max(depthsD), max(depthsT)]) + delta_lonD = np.unique(lonsD)[1] - np.unique(lonsD)[0] + delta_latD = np.unique(latsD)[1] - np.unique(latsD)[0] + delta_depthD = np.unique(depthsD)[1] - np.unique(depthsD)[0] + + lonsU_new = np.arange(lonMin, lonMax, delta_lonD) + latsU_new = np.arange(latMin, latMax, delta_latD) + depthsU_new = np.arange(depthsMin, depthsMax, delta_depthD) + + lons_new, lats_new, depths_new = np.meshgrid(lonsU_new, latsU_new, depthsU_new) + shape = lons_new.shape + + vps_tes_new = rginter_tes_vps((lons_new, lats_new, depths_new)) + rde_tes_new = rginter_tes_rde((lons_new, lats_new, depths_new)) + vps_diehl_new = rginter_diehl_vps((lons_new, lats_new, depths_new)) + rdes_diehl_new = rginter_diehl_rdes((lons_new, lats_new, depths_new)) + #cov_diehl_new = transfer_rde_cov(rdes_diehl_new, 500.) + vps_average = weighted_average_val(vps_diehl_new, val_other=vps_tes_new, rdes=rdes_diehl_new, scale_transfer_func=1000.) + rde_average = weighted_average_val(rdes_diehl_new, val_other=rde_tes_new, rdes=rdes_diehl_new, scale_transfer_func=1000.) + if _tes_only: + print('WARNING WILL ONLY USE TESAURO MESH') + vps_average = vps_tes_new + rde_average = rde_tes_new + #test_plot_rginter(np.unique(lons_new), np.unique(lats_new), 7., rginter_diehl_rdes) + #test_plot_depth_slice(lons_new, lats_new, depths_new, 49., [vps_diehl_new, vps_tes_new, vps_average], + # cmap='seismic_r', vmin=-20, vmax=20) + result = np.array([lons_new.ravel(), lats_new.ravel(), depths_new.ravel(), vps_average.ravel(), rde_average.ravel()]) + #rdes_diehl_new.ravel(), rde_tes_new.ravel()]) + #ind = np.lexsort((result[0, :], result[1, :], result[2, ::-1])) + np.save(fnout, result) + + +def weighted_average_val(val_diehl, val_other, rdes, scale_transfer_func): + weights = 1. - transfer_rde_cov(rdes, scale_transfer_func) + val_average = weights * val_diehl + (1. - weights) * val_other + return val_average + + +def grid_from_latlon_values(lons, lats, depths, values_dict): + grid = init_dict() + grid['lons'] = lons + grid['lats'] = lats + grid['depths'] = depths + for key, value in values_dict.items(): + grid[key] = value + + return grid + + +def transfer_rde_cov(rdes, scale, linear=False): + '''small transfer function based on 1/(sx**2 + 1) function''' + if linear: + return -1 * rdes + 1 + return 1. / (scale * rdes ** 2 + 1) + + +def plot_lat_slice_npy_grid(fn_grid, lat, column=3, R=6371.): + array = np.load(fn_grid) + lons = np.unique(array[0,:]) + lats = np.unique(array[1,:]) + depths = np.unique(array[2,:]) + # find closest lat value + lat = lats[np.argmin(abs(lats - lat))] + inds = np.where(array[1, :] == lat) + values = array[column, :][inds] + shape = (len(lons), len(depths)) + lons_km = np.deg2rad(array[0, :][inds] * R * np.cos(np.deg2rad(lat))) # reduce radius with lat + fig = plt.figure() + pc = plt.pcolormesh(lons_km.reshape(shape), array[2,:][inds].reshape(shape), values.reshape(shape)) + cbar = plt.colorbar(pc) + plt.xlabel('Km on lat = {} degree'.format(lat)) + plt.ylabel('Depth in km.') + ax = plt.gca() + ax.invert_yaxis() + ax.set_aspect('equal') + plt.show() + + +def plot_lon_slice_npy_grid(fn_grid, lon, column=3, R=6371.): + array = np.load(fn_grid) + lons = np.unique(array[0,:]) + lats = np.unique(array[1,:]) + depths = np.unique(array[2,:]) + # find closest lat value + lon = lons[np.argmin(abs(lons - lon))] + inds = np.where(array[0, :] == lon) + values = array[column, :][inds] + shape = (len(lats), len(depths)) + lats_km = np.deg2rad(array[1, :][inds] * R) + fig = plt.figure() + pc = plt.pcolormesh(lats_km.reshape(shape), array[2, :][inds].reshape(shape), values.reshape(shape)) + cbar = plt.colorbar(pc) + plt.xlabel('Km on lon = {} degree'.format(lon)) + plt.ylabel('Depth in km.') + ax = plt.gca() + ax.invert_yaxis() + ax.set_aspect('equal') + plt.show() + + +def write_absolute(fnin_grid, fnin_diehl1d, fnout): + with open(fnin_diehl1d, 'r') as infile: + diehl1d = json.load(infile) + depths = {depth: vp for depth, vp in zip(diehl1d['depths'], diehl1d['vps'])} + grid_rel = np.load(fnin_grid) + vprefs = np.zeros(grid_rel[0].shape) + for depth, vpref in depths.items(): + vprefs[grid_rel[2]==depth] = vpref + vpabs = vprefs * (1 + grid_rel[3] / 100.) + # now it is actually grid abs! + grid_rel[3] = vpabs + np.save(fnout, grid_rel) + + +if __name__ == "__main__": + #apply_convolution_filter('/home/marcel/marcel_scratch/alparray/diehl_2009_grid_differences_ak135.json', + # wdir='/home/marcel/marcel_scratch/alparray/', + # fname_out='diehl_2009_diff_filt_25_25_5.json') + #apply_convolution_filter(12.5, 12.5, 5., (2., 2., 2.), + # '/data/AlpArray_Data/alparray_grids_and_crust/diehl_2009_grid_differences_ak135.json', + # wdir='/data/AlpArray_Data/alparray_grids_and_crust', + # fname_out='diehl_2009_diff_filt_12.5_12.5_5.json') + + + # TESAURO MODEL + #grid = read_tesauro_model('/rscratch/minos22/marcel/alparray/grl24037-sup-0002-ds01.txt', + # min_depth=-30., max_depth=70., npts_depth=21, R=6371.) + #save_json_file(grid, '/rscratch/minos22/marcel/alparray/tesauro_discrete_5km.json') + #write_vtk(grid, '/home/marcel/marcel_scratch/alparray/tesauro.vtk', write_cov=True, legacy=True) + + # DIEHL MODEL + wdir='/home/marcel/marcel_scratch/alparray/' + fn_diehl='diehl_2009_grid.json' + fn_diehl_rel='diehl_2009_grid_differences_ak135_PERCENTAGE.json' + fn_diehl_rel_zeros='diehl_2009_grid_diff_perc_ak135d_appended_zeros.json' + fn_diehl_rel_filt='diehl_2009_diff_filt_8_8_6.json' + fn_diehl_abs_filt='diehl_2009_abs_filt_8_8_6.json' + fn_diehl_1d = '/data/AlpArray_Data/alparray_grids_and_crust/DIEHL_CRUST_GRIDS/minimum_1D_diehl.json' + + diehl_on_vgrid = False + dts_on_vgrid = False + kstl_on_vgrid = False + filter_kstl_model = False + tesauro_on_vgrid = True + diehl_RDE_on_vgrid = False + diehl_on_diehl = False + abs_from_rel = False + synthetic_box_test = False + filter_dts_model = False + filter_wolle_model = False + wolle_on_vgrid = False + + #grid_differences(fname_in=fn_diehl, fname_out=fn_diehl_rel, wdir=wdir, + # model=pjoin(wdir, 'minimum_1D_diehl.json'), percentage=True) + #add_cushion_zeros_top_bot(fname_in=pjoin(wdir, fn_diehl_rel), fname_out=pjoin(wdir, fn_diehl_rel_zeros), + # nlayers=10, bot=True, top=False) + + #apply_convolution_filter(8., 8., 6., spacing_llr=(2., 2., 2.), + # infile=pjoin(wdir, fn_diehl_rel_zeros), wdir=wdir, + # fname_out=fn_diehl_rel_filt) + + # DTS MODEL + #fn_tesauro_rel = '/data/AlpArray_Data/alparray_grids_and_crust/DTS_CRUST_GRIDS/tesauro_SPADA_grid_dd2.0_rel_ak135dv2_PERCENTAGE_rde.npy' + fn_tesauro_rel = '/data/AlpArray_Data/alparray_grids_and_crust/tesauro_grid_dd2.0_low_cov_ADA_rel_perc.npy' + fn_tesauro_rel_on_dgrid = '/data/AlpArray_Data/alparray_grids_and_crust/DTS_CRUST_GRIDS/tesauro_grid_dd2.0_rel_ak135dv2_PERCENTAGE_ADA_ON_DGRID.npy' + fn_filt_tesauro_npy = '/data/AlpArray_Data/alparray_grids_and_crust/DTS_CRUST_GRIDS/tesauro_grid_dd2.0_rel_ak135dv2_PERCENTAGE_ADA_filt_12.5_12.5_7.5.npy' + fn_diehl_rel_rde_npy = '/data/AlpArray_Data/alparray_grids_and_crust/DIEHL_CRUST_GRIDS/diehl_2009_grid_differences_with_rde_ak135_PERCENTAGE.npy' + #fn_diehl_rel_rde_npy = '/data/AlpArray_Data/alparray_grids_and_crust/DIEHL_CRUST_GRIDS/diehl_2009_grid_differences_ak135_with_rde_PERCENTAGE_CRUST_ONLY.npy' + fn_diehl_tesauro_npy = '/data/AlpArray_Data/alparray_grids_and_crust/DTS_CRUST_GRIDS/diehl_and_tesauro_grid_differences_ak135_PERCENTAGE_rde.npy' + fn_filt_diehl_tesauro_npy = '/data/AlpArray_Data/alparray_grids_and_crust/DTS_CRUST_GRIDS/diehl_and_tesauro_grid_differences_ak135_PERCENTAGE_rde_filt_12.5_12.5_7.5.npy' + #fn_filt_diehl_tesauro_ABS_npy = '/data/AlpArray_Data/alparray_grids_and_crust/DTS_CRUST_GRIDS/diehl_and_tesauro_grid_CRUST_ONLY_rde_filt_7.5_7.5_5.npy' + + # Kaestle Model + fn_kstl_npy = '/data/AlpArray_Data/alparray_grids_and_crust/KAESTLE_CRUST_GRIDS/kaestle_2018_PERCENTAGE_kstl_rde_100km_REGULAR_GRID_d1km.npy' + fn_filt_kstl_npy = '/data/AlpArray_Data/alparray_grids_and_crust/KAESTLE_CRUST_GRIDS/kaestle_2018_PERCENTAGE_kstl_rde_100km_filt_12.5_12.5_7.5.npy' + + # Wolle Model + it = 6 + #fn_wolle_npy = f'/data/AlpArray_Data/alparray_grids_and_crust/FWI_tests_Wolfgang/mk05_it{it}_rel_ak135dv2.npy' + fn_filt_wolle_npy = None#f'/data/AlpArray_Data/alparray_grids_and_crust/FWI_tests_Wolfgang/mk05_it{it}_rel_ak135dv2_filt_11_11_10.npy' + + fn_wolle_npy = f'/data/AlpArray_Data/alparray_grids_and_crust/FWI_tests_Wolfgang/mk06_it3_rel_ak135dv2.npy' + + #merge_tesauro_and_diehl_on_extended_diehl_grid(fn_tesauro_rel, fn_diehl_rel_rde_npy, fn_diehl_tesauro_npy) + + if filter_dts_model: + apply_convolution_filter(25., 25., 15., (2., 2., 2.), fn_diehl_tesauro_npy, + wdir='/data/AlpArray_Data/alparray_grids_and_crust', + fname_out=fn_filt_diehl_tesauro_npy, + values=['vps', 'rdes'], io_json=False) + + if filter_wolle_model: + # spacing old (mk01): (11., 11., 10.) + apply_convolution_filter(11, 11, 10, (22.2, 15.7, 20), fn_wolle_npy, + wdir='/data/AlpArray_Data/alparray_grids_and_crust/FWI_tests_Wolfgang', + fname_out=fn_filt_wolle_npy, + values=['vps'], io_json=False) + + if filter_kstl_model: + apply_convolution_filter(12.5, 12.5, 7.5, (11., 11., 1.), fn_kstl_npy, + wdir='/data/AlpArray_Data/alparray_grids_and_crust/KAESTLE_CRUST_GRIDS', + fname_out=fn_filt_kstl_npy, + values=['vps'], io_json=False) + + # apply_convolution_filter(7.5, 7.5, 5., (2., 2., 2.), fn_diehl_tesauro_npy, + # wdir='/data/AlpArray_Data/alparray_grids_and_crust', + # fname_out=fn_filt_diehl_tesauro_npy, + # values=['vps', 'rdes'], io_json=False) + # write_absolute(fn_filt_diehl_tesauro_npy, fn_diehl_1d, fn_filt_diehl_tesauro_ABS_npy) + + if dts_on_vgrid: + #fn_tesauro_discrete = '/data/AlpArray_Data/alparray_grids_and_crust/tesauro_SPADA_grid_dd2.0.txt' + #fpath = '/data/AlpArray_Data/fmtomo/v6/crust_incl_lf_sm_FIX_DTS_COARSE_grad_sm30_dm10' + fpath = '/data/AlpArray_Data/fmtomo/v6_S/crust_incl_hf_sm_FIX_DTS_grad_sm6_dm10' + #fpath = '/data/AlpArray_Data/fmtomo/v5_resolution_analysis/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10_checkerboard_3x3x4' + interpolate_vps_regular_on_vgrid(pjoin(fpath, 'vgridsref_orig.in'), fn_filt_diehl_tesauro_npy, + pjoin(fpath, 'vgrids_dts_crust.in'), + fname_out_vtk=pjoin(fpath, 'vgrids_dts_crust.vtk'), + default_cov=15., ftype='npy', covs_perc=True, modify_vp=True) + + if kstl_on_vgrid: + fpath = '/data/AlpArray_Data/fmtomo/v6_S/crust_incl_hf_KSTL_grad_sm6_dm10_S_SKS' + #fpath = '/data/AlpArray_Data/fmtomo/v6_S/no_crust_correction_hf_grad_sm6_dm10' + interpolate_vps_regular_on_vgrid(pjoin(fpath, 'vgridsref_orig.in'), fn_filt_kstl_npy, + pjoin(fpath, 'vgrids_kstl_crust.in'), + fname_out_vtk=pjoin(fpath, 'vgrids_kstl_crust.vtk'), + default_cov=15., ftype='npy', covs_perc=True, modify_vp=True, ext_rdes=True, + tf_linear=True) + + if wolle_on_vgrid: + fpath = '/data/AlpArray_Data/fmtomo/v6/fwi_model_wolfgang_test' + grid_file = fn_filt_wolle_npy if filter_wolle_model else fn_wolle_npy + interpolate_vps_regular_on_vgrid(vgrid_file=pjoin(fpath, 'vgridsref_orig.in'), ext_grid_file=grid_file, + fname_out=pjoin(fpath, f'vgrids_wolle_mk06_3.in'), + fname_out_vtk=pjoin(fpath, f'vgrids_wolle_mk06_3.vtk'), + default_cov=15., ftype='npy', covs_perc=True, modify_vp=True, ext_rdes=False, + tf_linear=True, max_depth=None) + + + if tesauro_on_vgrid: + # filter Tesauro/Spada only !!! + merge_tesauro_and_diehl_on_extended_diehl_grid(fn_tesauro_rel, fn_diehl_rel_rde_npy, fn_tesauro_rel_on_dgrid, + _tes_only=True) + apply_convolution_filter(12.5, 12.5, 7.5, (2., 2., 2.), fn_tesauro_rel_on_dgrid, + wdir='/data/AlpArray_Data/alparray_grids_and_crust', + fname_out=fn_filt_tesauro_npy, + values=['vps', 'rdes'], io_json=False) + + fpath = '/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/crust_incl_TESAURO_sm30_dm1' + interpolate_vps_regular_on_vgrid(pjoin(fpath, 'vgrids.in'), fn_filt_tesauro_npy, + pjoin(fpath, 'vgrids_tesauro_crust.in'), + fname_out_vtk=pjoin(fpath, 'vgrids_tesauro_crust.vtk'), + default_cov=15., ftype='npy', covs_perc=True) + + fpath = '/rscratch/minos13/marcel/fmtomo_alparray/v4/different_test_runs/alparray_mantle_diehl_crust_included_hf_1D_model_altered' + + if diehl_on_vgrid: + interpolate_diehl_vps_regular_on_vgrid(pjoin(fpath, 'vgridsref_no_crust.in'), pjoin(wdir, fn_diehl_rel_filt), + fname_out=pjoin(fpath, 'vgrids_crust.in'), + fname_out_vtk=pjoin(fpath, 'vgrids_diehl_crust.vtk')) + + # read diehl layers with RDE values and interpolate from regular grid on grid + external_grid_file = '/data/AlpArray_Data/crust_diehl_2009/layers_filtered_12.5_12.5_0.1-2_2_15.json' + #vgrid_file = '/rscratch/minos13/marcel/fmtomo_alparray/v3.5/alparray_mantle_diehl_crust_included_hf_gradient_smoothing/vgridsref_crust.in' + + fname_in = 'vgrids_crust.in' # IF NOT DOING CRUSTAL CORRECTIONS CHANGE THIS TO vgrids with crust: vgrids_crust.in + fname_out = pjoin(fpath, 'vgridsref_diehl_crust_cov.in') + fname_out_vtk = pjoin(fpath, 'vgridsref_diehl_crust_cov.vtk') + #grid = read_diehl_rde_layers() + #grid_conv = convolution_filter(grid, (12.5, 12.5, 0.1), spacing_llr=(2, 2, 15), + # wdir='/home/marcel/marcel_scratch/alparray', val='rdes') + #save_json_file(grid, external_grid_file) + + if abs_from_rel: + # get absolute velocities on crustal model from smoothed relative values + apply_vel_changes_to_absolute_grid(pjoin(wdir, fn_diehl), pjoin(wdir, fn_diehl_rel_filt), + fname_out=pjoin(wdir, fn_diehl_abs_filt)) + + if diehl_RDE_on_vgrid: + # a priori covariance on vgrid based on RDE valeus + interpolate_diehl_rde_regular_on_vgrid(pjoin(fpath, fname_in), external_grid_file, fname_out, + fname_out_vtk=pjoin(fpath, fname_out_vtk)) + + if diehl_on_diehl: + # a priori covariances on crust grid (e.g. for plotting threshold) + interpolate_diehl_rde_regular_on_diehl_grid(pjoin(wdir, fn_diehl_rel_filt), external_grid_file, + pjoin(wdir, 'diehl_2009_grid_diff_with_rdes_12.5_12.5_0.1.json'), + fname_out_vtk=pjoin(wdir, 'diehl_2009_rdes.vtk')) + + # Synthetic Box test (grid spacing for boxes ~10x10x10 km) + # discretize_box('/home/marcel/marcel_scratch/alparray/box2.1_legacy.vtk', + # '/home/marcel/marcel_scratch/alparray/box2.1.json', + # vp_diff=3., R=6371., dr=10., dLat=0.09, dLon=0.09) + + fpath = '/data/AlpArray_Data/fmtomo/v5_resolution_analysis/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10_profile_2e_EASI_detached_slab_N' + wdir = '/data/AlpArray_Data/sciebo/AlpArray_home/resolution_analysis/slice_2e_delamination' + + + if synthetic_box_test: + fn_vgrid_out = pjoin(fpath, 'vgridsref_crust_slab_E_dip_N.in') + for index in range(2): + if index == 0: + fn_box_vtk = pjoin(wdir, 'box_vminus_delamin.vtk') + fn_box_json = pjoin(wdir, 'box_vminus_delamin.json') + fn_box_filt = pjoin(wdir, 'box_vminus_delamin_12.5_12.5_7.5.json') + vp_diff = -4. + fn_vgrid = pjoin(fpath, 'vgrids_dts_crust_cov_VGREF.in') + elif index == 1: + fn_box_vtk = pjoin(wdir, 'box_slab_E_dip_N_delamin.vtk') + fn_box_json = pjoin(wdir, 'box_slab_E_dip_N_delamin.json') + fn_box_filt = pjoin(wdir, 'box_slab_E_dip_N_12.5_12.5_7.5_delamin.json') + vp_diff = 6. + fn_vgrid = fn_vgrid_out + + # interpolate box on a regular grid + discretize_box(fn_box_vtk, fn_box_json, vp_diff=vp_diff, R=6371., dr=5., dLat=0.045, dLon=0.045) + + # smooth this grid + apply_convolution_filter(12.5, 12.5, 7.5, (5., 5., 5.), fn_box_json, wdir=wdir, fname_out=fn_box_filt, + io_json=True) + + interpolate_on_vgrid(fn_vgrid, fn_box_filt, + fname_out=fn_vgrid_out, vel_perturbation_abs=True, ncores=20) + + # read and overwrite fn_vgrid_out with covariances from RDE values + # ... not necessary because forward grid for resolution analysis is not reference grid with covariances + #interpolate_diehl_rde_regular_on_vgrid(fn_vgrid_out, external_grid_file, fn_vgrid_out, + # fname_out_vtk=pjoin(fpath, 'vgridsref_crust_box_lip_b_south.vtk')) + + # apply_convolution_filter(12.5, 12.5, 10., (10., 10., 10.), + # '/home/marcel/marcel_scratch/alparray/box2.2.json', + # wdir='/home/marcel/marcel_scratch/alparray/', + # fname_out=pjoin'box2.2_grid_12.5_12.5_10.json') + # apply_convolution_filter(12.5, 12.5, 10., (10., 10., 10.), + # '/home/marcel/marcel_scratch/alparray/box2.3.json', + # wdir='/home/marcel/marcel_scratch/alparray/', + # fname_out=pjoin'box2.3_grid_12.5_12.5_10.json') + + #apply_convolution_filter(12.5, 12.5, 10., (10., 10., 10.), + # '/home/marcel/marcel_scratch/alparray/box3.3.json', + # wdir='/home/marcel/marcel_scratch/alparray/', + # fname_out=pjoin'box3.3_grid_12.5_12.5_10.json') + + #interpolate_on_vgrid('/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_diehl_crustal_corrections_v2_template/vgridsref.in', + # '/rscratch/minos22/marcel/alparray/box2.1_grid_12.5_12.5_10.json', + # fname_out='/rscratch/minos22/marcel/alparray/vgrids_box2.1.in', + # vel_perturbation_abs=True, ncores=20) + # interpolate_on_vgrid('/rscratch/minos22/marcel/alparray/vgrids_box2.1.in', + # '/rscratch/minos22/marcel/alparray/box2.2_grid_12.5_12.5_10.json', + # fname_out='/rscratch/minos22/marcel/alparray/vgrids_box2.2.in', + # vel_perturbation_abs=True, ncores=20) + # interpolate_on_vgrid('/rscratch/minos22/marcel/alparray/vgrids_box2.2.in', + # '/rscratch/minos22/marcel/alparray/box2.3_grid_12.5_12.5_10.json', + # fname_out='/rscratch/minos22/marcel/alparray/vgrids_three_boxes.in', + # fname_out_vtk='/rscratch/minos22/marcel/alparray/vgrids_three_boxes.vtk', + # vel_perturbation_abs=True, ncores=20) + #interpolate_on_vgrid('/rscratch/minos22/marcel/alparray/vgrids_box2.2.in', + # '/rscratch/minos22/marcel/alparray/box3.3_grid_12.5_12.5_10.json', + # fname_out='/rscratch/minos22/marcel/alparray/vgrids_three_boxes_inverted.in', + # fname_out_vtk='/rscratch/minos22/marcel/alparray/vgrids_three_boxes_inverted.vtk', + # vel_perturbation_abs=True, ncores=20) + + + #interpolate_on_vgrid( + # '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_diehl_crustal_corrections_v2_sm10_damp10/it_6/vgrids.in', + # '/rscratch/minos22/marcel/alparray/diehl_2009_diff_filt_12.5_12.5_10.json', + # fname_out='vgrids_it6_with_diehl.in', + # fname_out_vtk=None, + # vel_perturbation_abs=True, ncores=20, + # wdir='/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_diehl_crustal_corrections_v2_sm10_damp10/') + + + #tomo_sim_dir = '/home/marcel/marcel_scratch/asp3d_elvis/simu02' + #tomo_sim_dir = '/home/marcel/marcel_scratch/alparray/fmtomo_traveltime_tomo/example3' + #tomo_sim_dir = r'/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0/' + + #interfaces2vtk(os.path.join(tomo_sim_dir, 'interfaces.in'), outpath=tomo_sim_dir) + #propgrid2vtk(os.path.join(tomo_sim_dir, 'propgrid.in'), os.path.join(tomo_sim_dir, 'propgrid.vtk')) + #vgrids2VTK(r'/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0/vgrids.in', 'vgrids.vtk') # different coord system and units + + + + + diff --git a/pylot/tomography/fmtomo_tools/fmtomo_teleseismic.py b/pylot/tomography/fmtomo_tools/fmtomo_teleseismic.py new file mode 100644 index 00000000..5b908c5f --- /dev/null +++ b/pylot/tomography/fmtomo_tools/fmtomo_teleseismic.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import os +import argparse + +from fmtomo_teleseismic_utils import setup_fmtomo_sim + +#setup_fmtomo_sim('/data/AlpArray_Data/dmt_database/', '//data/AlpArray_Data/fmtomo/v6_S/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10') +#setup_fmtomo_sim('/rscratch/minos13/marcel/dmt_database_test_event', '/home/marcel/marcel_scratch/alparray/fmtomo_traveltime_tomo/alparray_0/') +#setup_fmtomo_sim('/rscratch/minos13/marcel/dmt_database_m7', '/home/marcel/marcel_scratch/alparray/fmtomo_traveltime_tomo/alparray_0/') +#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_smaller_box') +#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_bigger_box') +#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_shallow_box') +#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_finer_interface') +#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_bigger_box_equal_dist_2') + +#pgrid = Propgrid('/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0/propgrid.in') + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Prepare grid for fm3d teleseismic hybrid calculation.') + parser.add_argument('dmt_path', help='path containing dmt_database with PyLoT picks') + parser.add_argument('fmtomodir', help='path containing fm3d output') + parser.add_argument('fname_extension', help='filename extension of pick files') + parser.add_argument('--blacklist', default=None, + help='station blacklist file (csv). After first line: NW_id,ST_id in each line') + parser.add_argument('--no_write_init_nodes', default=False, action='store_true', + help='do not calculate and write init nodes') + parser.add_argument('--no_recalculate_init_nodes', default=False, action='store_true', + help='do not recalculate init nodes if nodes file exists') + parser.add_argument('-n', dest='ncores', default=None, help='number of cores for multiprocessing') + parser.add_argument('--model', default='ak135') + parser.add_argument('--s_phase', default=False, action='store_true') + + args = parser.parse_args() + + database_path = os.path.abspath(args.dmt_path) + fmtomodir = os.path.abspath(args.fmtomodir) + if args.ncores is not None: + ncores = int(args.ncores) + else: + ncores = None + fname_extension = args.fname_extension + + sub_phases = {'P': ['P', 'PKP'], 'S': ['S', 'SKS']} + phase_types = ['P'] + if args.s_phase: + phase_types.append('S') + + setup_fmtomo_sim(database_path, fmtomodir, fname_extension, sub_phases, ncores=ncores, check_notesfile=False, + model=args.model, fname_station_blacklist=args.blacklist, + no_write_init_nodes=args.no_write_init_nodes, + no_recalculate_init_nodes=args.no_recalculate_init_nodes, phase_types=phase_types) diff --git a/pylot/tomography/fmtomo_tools/fmtomo_teleseismic_utils.py b/pylot/tomography/fmtomo_tools/fmtomo_teleseismic_utils.py new file mode 100644 index 00000000..d25bbc12 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/fmtomo_teleseismic_utils.py @@ -0,0 +1,930 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import subprocess +import warnings +import os +import glob +from datetime import datetime + +import multiprocessing +import numpy as np +import json + +from obspy import read_events, UTCDateTime +from obspy.taup import TauPyModel +from obspy.geodetics import locations2degrees, gps2dist_azimuth + +from pylot.core.util.utils import identifyPhaseID +from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT +from pylot.tomography.utils import pol2cart, pol2cart_vector +from pylot.tomography.utils import get_metadata + + +class Propgrid(object): + ''' + small class that is built from an fm3d propgrid.in file; generates a regular grid in the same way as fm3d + ''' + def __init__(self, filename): + self.r_earth = 6371. # earth radius in km + self.init_propgrid(filename) + + def init_propgrid(self, filename_propgrid): + self.read_propgrid(filename_propgrid) + self.init_rGrid() + self.init_latGrid() + self.init_longGrid() + + def init_rGrid(self): + self.rs = np.zeros(self.nR) + self.rbot = self.r_earth + self.zTop - (self.nR - 1) * self.deltaR + for index in range(self.nR): + self.rs[index] = self.rbot + index * self.deltaR + + def init_latGrid(self): + self.lats = np.zeros(self.nLat) + self.latS = np.deg2rad(self.latSdeg) + self.deltaLat = np.deg2rad(self.deltaLatDeg) + for index in range(self.nLat): + self.lats[index] = self.latS + index * self.deltaLat + + def init_longGrid(self): + self.longs = np.zeros(self.nLong) + self.longW = np.deg2rad(self.longWdeg) + self.deltaLong = np.deg2rad(self.deltaLongDeg) + for index in range(self.nLong): + self.longs[index] = self.longW + index * self.deltaLong + + def read_propgrid(self, filename_propgrid): + infile = open(filename_propgrid, 'r') + self.nR, self.nLat, self.nLong = [int(value) for value in infile.readline().split()] + self.deltaR, self.deltaLatDeg, self.deltaLongDeg = [np.float64(value) for value in infile.readline().split()] + self.zTop, self.latSdeg, self.longWdeg = [np.float64(value) for value in infile.readline().split()] + infile.close() + + +def check_event_notes(eventdir): + eventfile = os.path.join(eventdir, 'notes.txt') + if os.path.isfile(eventfile): + with open(eventfile, 'r') as infile: + notes = infile.readline() + print(notes) + if 'exclude' in notes: + print('Will exclude this event!') + return False + else: + print('No notes file found.') + return True + + +def prepare_fmtomo_dir_first_run(fmtomo_dir): + """ + This helper function calls the file 'fm3d_prepare_tele' in the fmtomo binary after creating a "fake" receivers.in + and sources.in file so that fm3d can prepare the teleseismic run and writes the file "init_nodes.out" that contains + information on the boundary nodes of the current propagation grid. Make sure that "grid3dg" has already been called + for this function to work. + :param fmtomo_dir: + :return: + """ + def write1(fid): + fid.write('1\n') + + def write_rec_file(): + """ Writes a fake receiver (has to be inside the grid) """ + with open(recfile, 'w') as fid: + write1(fid) + fid.write(f'0 {clat} {clon}\n') + for _ in range(3): + write1(fid) + + def write_src_file(): + """ Writes a fake source """ + fakesrc_str = ''' + 1 + 1 + P + 0.0000 0.0000 0.0000 0.00000 0.00000 0.00000 + 1 + 1 + 2 1 + 1 + 1 + ''' + + with open(srcfile, 'w') as fid: + fid.write(fakesrc_str) + + def get_clat_clon(): + if not os.path.isfile(propgrid_file): + raise IOError(f'Missing file {propgrid_file} for fmtomo first run preparation.') + + with open(propgrid_file, 'r') as fid: + _, nlat, nlon = (int(item) for item in fid.readline().split()) + _, dlat, dlon = (float(item) for item in fid.readline().split()) + _, lat0, lon0 = (float(item) for item in fid.readline().split()) + clat = lat0 + nlat / 2 * dlat + clon = lon0 + nlon / 2 * dlon + return clat, clon + + # check if binary file exists + fmtomo_prep_tele = os.path.join(fmtomo_dir, 'fm3d_prepare_tele') + assert os.path.isfile(fmtomo_prep_tele), f'Missing binary file {fmtomo_prep_tele}' + + # set filenames for propgrid, sources and receivers + propgrid_file = os.path.join(fmtomo_dir, 'propgrid.in') + recfile = os.path.join(fmtomo_dir, 'receivers.in') + srcfile = os.path.join(fmtomo_dir, 'sources.in') + + # get coords from propgrid + clat, clon = get_clat_clon() + + # write fake source and receiver files + write_src_file() + write_rec_file() + + # execute fm3d_prepare tele from local directory + curdir = os.getcwd() + os.chdir(fmtomo_dir) + rval = subprocess.check_output([fmtomo_prep_tele]).decode('utf-8') + if not rval.split('\n')[-2] == ' Finished teleseismic preparation. Stop.': + raise ValueError('Unexpected output initialisation run.') + + if not os.path.isfile(os.path.join(fmtomo_dir, 'init_nodes.out')): + raise Exception('Could not create output file init_nodes.') + os.chdir(curdir) + + # clean up + os.remove(srcfile) + os.remove(recfile) + + print('Prepare fmtomo dir first run: Success') + + +def setup_fmtomo_sim(database_path_dmt, fmtomo_dir, fname_extension, sub_phases, ncores=None, model='ak135', + min_picks_per_phase=10, write_vtk=True, check_notesfile=True, fname_station_blacklist=None, + no_write_init_nodes=False, no_recalculate_init_nodes=False, phase_types=('P, S')): + ''' + main function of this program, writes picks and input source file for FMTOMO obsdata program + ''' + + assert os.path.isdir(database_path_dmt), 'Unrecognized directory {}'.format(database_path_dmt) + assert os.path.isdir(fmtomo_dir), 'Unrecognized directory {}'.format(fmtomo_dir) + + tstart = datetime.now() + print('Starting script at {}'.format(tstart)) + print('Check notesfile set to {}'.format(check_notesfile)) + + # save means of event traveltimes to dictionary for statistical analysis + #tt_means = {} + + fname_fmtomo_nodes = os.path.join(fmtomo_dir, 'init_nodes.out') + + # do first initialisation of FMTOMO to generate init_nodes file if required + if not no_recalculate_init_nodes: + prepare_fmtomo_dir_first_run(fmtomo_dir) + + eventdirs = glob.glob(os.path.join(database_path_dmt, '*.?')) + nEvents = len(eventdirs) + # create directory that will contain the picks + picksdir = os.path.join(fmtomo_dir, 'picks') + if not os.path.isdir(picksdir): + os.mkdir(picksdir) + # track and count ALL source locations & phases (P and S) for fmtomo_tools + associations_str = '' + nsrc_total = 0 + # iterate over P and S to create one model each + for phase_type in phase_types: + sourcefile_str = '' + nsrc = 0 + # iterate over all events in "database_path_dmt" + for eventindex, eventdir in enumerate(eventdirs): + print('Working on {}-picks for event {} ({}/{})'.format(phase_type, eventdir, + eventindex + 1, len(eventdirs))) + if check_notesfile and not check_event_notes(eventdir): + continue + # create ObsPy event from .pkl file in dmt eventdir + event = get_event_obspy_dmt(eventdir) + if len(event.origins) > 1: + raise Exception('Ambiguous origin information for event {}'.format(event)) + origin = event.origins[0] + # get all picks from PyLoT *.xml file + picks = get_picks(eventdir, extension=fname_extension) + if not picks: + print('No picks for event {} found.'.format(eventdir)) + continue + # remove picks for blacklisted stations + if fname_station_blacklist: + picks, n_deleted_blacklist = remove_blacklisted_station_picks(picks, fname_station_blacklist) + # get metadata for current event from dmt_database + metadata = get_metadata(eventdir) + # get a dictionary containing coordinates for all stations + stations_dict = metadata.get_all_coordinates() + # catch event id + event_id = get_event_id(eventdir) + # map specific phases to another phase (Pdiff -> P) + merge_phases = {'Pdiff': 'P'} + for key, value in merge_phases.items(): + print('Will map phase {} to phase {} if present.'.format(key, value)) + # assign all picks of this event to a phase and add to sorted_picks dictionary + sorted_picks = sort_picks_by_phase(picks, stations_dict, origin, + sub_phases[phase_type], model, merge_phases=merge_phases) + sorted_picks = remove_uncommon_picks(sorted_picks, min_picks_per_phase) + # the following should not be necessary when calculating traveltimes on borders using TauPy + #sorted_picks = translate_phase_names_TAUP(sorted_picks) + # + # iterate over sorted picks and write a traveltime file for each phase type + for phase, picks_list in sorted_picks.items(): + pickfile_base = '{}_{}'.format(event_id, phase) + pickfile_name = pickfile_base + '.ttf' + init_nodes_name = pickfile_base + '.nodes' + vtk_file_name = pickfile_base + '.vtk' + pickfile = os.path.join(picksdir, pickfile_name) + init_nodes_file = os.path.join(picksdir, init_nodes_name) + vtk_file = os.path.join(picksdir, vtk_file_name) + # remove source time + picks_list = subtract_source_time(picks_list, origin.time) + # save mean for statistical analysis + #tt_means[pickfile_base] = mean + # create a list with all "true" phases used for taupy to calculate boundary ttimes/ray parameters + phases = list(set([item['true_phase'] for item in picks_list])) + if no_write_init_nodes == False: + if no_recalculate_init_nodes and os.path.isfile(init_nodes_file): + print('Found previously calculated init nodes. Will not recalculate.') + else: + # calculate travel times and ray parameters for boundary nodes and write to file + points_list = initNodesFm3d(fname_fmtomo_nodes, origin, model, phases, ncores=ncores) + # in case initNodes fails and returns None continue with next phase + if not points_list: + print('No points list, initNodesFm3d failed for event.') + continue + if not write_init_points(points_list, init_nodes_file): + print('Write init points failed for event.') + continue + if write_vtk==True: + write_vtk_file(points_list, vtk_file) + # write picks to pickfile for fm3d + write_picks_to_pickfile(pickfile, phase, picks_list, stations_dict, origin) + # add pickfile to sourcefile string and count number of pickfiles + sourcefile_str += '1 1 {}\n'.format(pickfile_name) + # add source location, phaseID and .nodes filename to association string + source_string = '{phase} {lat} {lon} {depth} {fname}\n' + associations_str += source_string.format(phase=phase, lat=origin.latitude, + lon=origin.longitude, depth=origin.depth, + fname=init_nodes_name) + nsrc += 1 + nsrc_total += 1 + average_time = (datetime.now() - tstart) / (eventindex + 1) + print('Average time for event (phase {}): {}'.format(phase_type, average_time)) + print('ETA for {}-events: {}'.format(phase_type, tstart + nEvents * average_time)) + + write_src_file(fmtomo_dir, phase_type, nsrc, sourcefile_str) + + write_assc_file(fmtomo_dir, nsrc_total, associations_str) + #write_json(tt_means, os.path.join(fmtomo_dir, 'ttmeans.json')) + + print('Script finished! Good Bye!') + + +def write_src_file(fmtomo_dir, phase_type, nsrc, sourcefile_str): + # write input_source_file for obsdata + input_source_file = open(os.path.join(fmtomo_dir, 'input_source_file_{}.in'.format(phase_type)), 'w') + input_source_file.write('{}\n'.format(nsrc)) + input_source_file.write(sourcefile_str) + input_source_file.close() + + +def write_assc_file(fmtomo_dir, nsrc_total, associations_str): + # write input_associations file for fm3d + input_assc_file = open(os.path.join(fmtomo_dir, 'input_associations_file.in'), 'w') + input_assc_file.write('{}\n'.format(nsrc_total)) + input_assc_file.write(associations_str) + input_assc_file.close() + + +def write_json(object, fname): + with open(fname, 'w') as outfile: + json.dump(object, outfile, sort_keys=True, indent=4) + + +def write_vtk_file(points_list, fname): + outfile = open(fname, 'w') + + nPoints = len(points_list) + + outfile.write('# vtk DataFile Version 2.0\n') + outfile.write('FMM Init points\n') + outfile.write('ASCII\n') + outfile.write('DATASET POLYDATA\n') + outfile.write('POINTS {} float\n'.format(nPoints)) + + for point in points_list: + lat = point['pt_lat'] + lon = point['pt_lon'] + rad = point['pt_R'] + x, y, z = pol2cart(lat, lon, rad) + outfile.write('{x} {y} {z}\n'.format(x=x, y=y, z=z)) + + # write number of vertices and their indices + outfile.write('\nVERTICES {} {}\n'.format(nPoints, 2*nPoints)) + for index, point in enumerate(points_list): + outfile.write('{} {}\n'.format(1, index)) + + # write header with number of data points + outfile.write('\nPOINT_DATA {}\n'.format(nPoints)) + + # write header for traveltimes + outfile.write('SCALARS traveltime float 1\n') + outfile.write('LOOKUP_TABLE default\n') + + for point in points_list: + time = point.get('time') + outfile.write('{}\n'.format(time if time else 0)) + + # write header for point indices + outfile.write('SCALARS point_index integer 1\n') + outfile.write('LOOKUP_TABLE default\n') + + for point in points_list: + pt_index = point.get('pt_index') + outfile.write('{}\n'.format(pt_index if pt_index else 0)) + + outfile.write('VECTORS tt_grad float\n') + for point in points_list: + lat = point['pt_lat'] + lon = point['pt_lon'] + r_comp = point.get('ray_param_km_z_comp') + n_comp = point.get('ray_param_km_n_comp') + e_comp = point.get('ray_param_km_e_comp') + sx, sy, sz = pol2cart_vector(lat, lon, n_comp, e_comp, r_comp) + outfile.write('{sx} {sy} {sz}\n'.format(sx=sx if sx else 0., + sy=sy if sy else 0., + sz=sz if sz else 0.,)) + + outfile.close() + + + + +def write_init_points(points_list, outfile): + fid = open(outfile, 'w') + print('Writing {} init points to file {}.'.format(len(points_list), outfile)) + output_str = '' + # count number of points for file header + npoints = 0 + for point_dict in points_list: + nArrivals = point_dict.get('nArrivals') + if not nArrivals: + #print('No arrivals for point:') + #print(point_dict) + continue + if nArrivals > 1: + fid.close() + os.remove(outfile) + warning_template = 'Ambiguous information for point: {}, having {} different arrivals. Skipping event.' + print(warning_template.format(point_dict, nArrivals)) + return False + output_template = '{index} {time} {ray_param_z} {ray_param_n} {ray_param_e}\n' + output_str += (output_template.format(index=point_dict['pt_index'], + time=point_dict.get('time'), + ray_param_z=point_dict.get('ray_param_km_z_comp'), + ray_param_n=point_dict.get('ray_param_km_n_comp'), + ray_param_e=point_dict.get('ray_param_km_e_comp'))) + npoints += 1 + fid.write('{}\n'.format(npoints)) + fid.write(output_str) + fid.close() + + return True + + +def initNodesFm3d(filename_init_nodes, source_origin, model, phases, min_dist=25., ncores=None, R=6371000.): + # This function uses obspy TauPy to calculate travel times and ray parameters on boundary points of the fm3d grid + + def exposed_sides(grid_boundaries, src_lat, src_lon): + # check which sides of grid are exposed to source (taken from fm3d teleseismic.f90 code) + # here: baz is calculated, not az + north = grid_boundaries['north'] + east = grid_boundaries['east'] + south = grid_boundaries['south'] + west = grid_boundaries['west'] + + # north + baz1 = gps2dist_azimuth(north, west, src_lat, src_lon, a=R, f=0)[1] + baz2 = gps2dist_azimuth(north, east, src_lat, src_lon, a=R, f=0)[1] + exposed_north = all(np.cos(np.deg2rad(baz)) > 0.01 for baz in [baz1, baz2]) + # east + baz1 = gps2dist_azimuth(north, east, src_lat, src_lon, a=R, f=0)[1] + baz2 = gps2dist_azimuth(south, east, src_lat, src_lon, a=R, f=0)[1] + exposed_east = all(np.sin(np.deg2rad(baz)) > 0.01 for baz in [baz1, baz2]) + # south + baz1 = gps2dist_azimuth(south, west, src_lat, src_lon, a=R, f=0)[1] + baz2 = gps2dist_azimuth(south, east, src_lat, src_lon, a=R, f=0)[1] + exposed_south = all(np.cos(np.deg2rad(baz)) < -0.01 for baz in [baz1, baz2]) + # west + baz1 = gps2dist_azimuth(north, west, src_lat, src_lon, a=R, f=0)[1] + baz2 = gps2dist_azimuth(south, west, src_lat, src_lon, a=R, f=0)[1] + exposed_west = all(np.sin(np.deg2rad(baz)) < -0.01 for baz in [baz1, baz2]) + + exposed_dict = dict(north=exposed_north, east=exposed_east, south=exposed_south, west=exposed_west, + rbot=True) + + return exposed_dict + + def point_valid(pt_lat, pt_lon, pt_R, exposed_dict, grid_boundaries, R_earth): + # check whether point has to be active or not depending on its position to the source + + def check_point(actual, desired, threshold=0.001): + # checks receiver (boundary) point orientation by comparison to corner boundary points + return abs(actual - desired) < threshold + + # check for negative depth (above surface) + if pt_R > R_earth: + return False + + # check if point belongs to bottom interface + #if check_point(pt_R, grid_boundaries['rbot']): + # return True + + pt_faces_dir = {} + pt_exposed_dir = {} + + directions = {'north': pt_lat, + 'east': pt_lon, + 'south': pt_lat, + 'west': pt_lon, + 'rbot': pt_R,} + + for direction, pt_lat_or_lon in directions.items(): + # check if point belongs to boundary of this direction and save to pt_faces_dir + pt_direction_check = check_point(pt_lat_or_lon, grid_boundaries[direction]) + pt_faces_dir[direction] = pt_direction_check + # check if point is exposed to source in this direction and save to pt_exposed_dir + pt_exposed_dir[direction] = pt_direction_check and exposed_dict[direction] + + # compare number of points facing source direction to the actual direction they are facing + return sum(pt_faces_dir.values()) == sum(pt_exposed_dir.values()) + + + import datetime + now = datetime.datetime.now() + infile = open(filename_init_nodes, 'r') + R_earth = 6371. + + # read input files containing fm3d boundary points in each line in the order: lat lon radius + init_nodes = infile.readlines() + nPoints = len(init_nodes) + # split lines and convert to floats except for first value which is converted to int + init_nodes = [[float(val) if index != 0 else int(val) for index, val in enumerate(line.split())] + for line in init_nodes] + + grid_boundaries = dict(north=max(init_nodes, key=lambda x: x[1])[1], + east=max(init_nodes, key=lambda x: x[2])[2], + south=min(init_nodes, key=lambda x: x[1])[1], + west=min(init_nodes, key=lambda x: x[2])[2], + rbot=min(init_nodes, key=lambda x: x[3])[3], + rtop=max(init_nodes, key=lambda x: x[3])[3]) + + # get source coordinates + src_lat, src_lon, src_depth = source_origin.latitude, source_origin.longitude, source_origin.depth + #src_lat = 0; src_lon = 180; src_depth=50 + + # calculate which sides are exposed to source + exposed_dict = exposed_sides(grid_boundaries, src_lat, src_lon) + + # iterate over all points and calculate distance. Append a dictionary to an input list for + # multiprocessing worker for each point + points_list = [] + for point in init_nodes: + pt_index, pt_lat, pt_lon, pt_R = point + pt_depth = R_earth - pt_R + dist = locations2degrees(pt_lat, pt_lon, src_lat, src_lon) + + # check minimum distance for this point + if dist < min_dist: + warnings.warn('Distance for point {} less than minimum' + ' distance ({} km). Skipping event!'.format(point, min_dist)) + return + + # check if point is exposed to source and has to be initiated + if not point_valid(pt_lat, pt_lon, pt_R, exposed_dict, grid_boundaries, R_earth): + continue + + input_dict = {'pt_index': pt_index, + 'pt_depth': pt_depth, + 'pt_R': pt_R, + 'pt_lat': pt_lat, + 'pt_lon': pt_lon, + 'src_lat': src_lat, + 'src_lon': src_lon, + 'src_depth': src_depth, + 'dist': dist, + 'model': model, + 'phases': phases, + } + points_list.append(input_dict) + + print('n Points init:', len(points_list)) + #plot_points(points_list) + + rvals = [] + pool = multiprocessing.Pool(ncores, maxtasksperchild=1000) + for rval in pool.imap(taup_worker, points_list, chunksize=10): + rvals.append(rval) + pool.close() + pool.join() + + print('Done after {}'.format(datetime.datetime.now() - now)) + + #plot_points(points_list) + return rvals + + +def taup_worker(input_dict): + # initiate model for TauP method + model = TauPyModel(model=input_dict['model']) + huge_time = 1e7 + try: + arrivals = model.get_ray_paths(input_dict['src_depth'], input_dict['dist'], + phase_list=input_dict['phases'], + receiver_depth_in_km=input_dict['pt_depth'], ) + if len(arrivals) < 1: + #print('No arrivals for phase {}'.format(input_dict['phase'])) + #print(input_dict) + return input_dict + + arr = arrivals[0] + + input_dict['nArrivals'] = len(arrivals) + baz = gps2dist_azimuth(input_dict['pt_lat'], input_dict['pt_lon'], + input_dict['src_lat'], input_dict['src_lon'], + a=6371.*1e3, f=0)[1] + + # calculate traveltime gradient, division by R to transform from rad to km + # multiply with -1 to get values for azimuth instead of back-azimuth + ray_param_km_z_comp = arr.ray_param / np.tan(np.deg2rad(arr.incident_angle)) / input_dict['pt_R'] + ray_param_km_n_comp = arr.ray_param * (-1) * np.cos(np.deg2rad(baz)) / input_dict['pt_R'] + ray_param_km_e_comp = arr.ray_param * (-1) * np.sin(np.deg2rad(baz)) / input_dict['pt_R'] + input_dict['ray_param_km_z_comp'] = ray_param_km_z_comp + input_dict['ray_param_km_n_comp'] = ray_param_km_n_comp + input_dict['ray_param_km_e_comp'] = ray_param_km_e_comp + + input_dict['time'] = arr.time + + except Exception as e: + print(e, input_dict) + return input_dict + + +def plot_points(points_list): + import matplotlib.pyplot as plt + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + x = [] + y = [] + z = [] + color = np.full(len(points_list), np.nan) + for index, points_dict in enumerate(points_list): + x.append(points_dict['pt_lat']) + y.append(points_dict['pt_lon']) + z.append(points_dict['pt_depth']) + arrivals = points_dict.get('arrivals') + if arrivals: + color[index] = arrivals[0].time + ax.scatter(x, y, z, c=color) + plt.show() + + +def subtract_source_time(picks_list, origin_time): + # WAS FUNCTION DEMEAN, but demean after calculating residuals using rtimes_tele in FMTOMO!! + for phase_dict in picks_list: + taup_time = phase_dict['taup_time'] + pick = phase_dict['pick'] + pick_time = pick.time + ttres = pick_time - taup_time + # I think doing this is wrong because taup times do NOT include station elevation!!! 5.3.20 + phase_dict['ttres'] = ttres + + mean = np.mean([phase_dict['pick'].time.timestamp for phase_dict in picks_list]) + + for phase_dict in picks_list: + phase_dict['ttres_demeaned'] = phase_dict['ttres'] - mean # see above comment + phase_dict['abstimes'] = phase_dict['pick'].time - origin_time + + return picks_list + + +def translate_phase_names_TAUP(sorted_picks): + # PKP and PKIKP are all just 'P' phases in fm3d (not very sure about this!) + translations = {'PKIKP': 'PKP', + 'SKIKS': 'SKS',} + for phase_name in sorted_picks.keys(): + if phase_name in translations.keys(): + sorted_picks[translations[phase_name]] = sorted_picks.pop(phase_name) + return sorted_picks + + +def write_picks_to_pickfile(pickfile, phase, picks_list, stations_dict, origin, picks_mode='abs'): + fid = open(pickfile, 'w') + header = '{npicks}\n' \ + '{lat_src:<15} {lon_src:<15} {depth_src:<15}\n' \ + '{phase_name}\n' + # pickfile header including npicks, src coords, phasetype + formatted_str = header.format(npicks=len(picks_list), + lat_src=origin.latitude, + lon_src=origin.longitude, + depth_src=origin.depth, + phase_name=phase) + fid.write(formatted_str) + + # write picks for each station to pickfile + for phase_dict in picks_list: + pick = phase_dict['pick'] + seed_id = pick.waveform_id.get_seed_string() + # travel time residual (demeaned or abs) + if picks_mode == 'abs': + picktime = phase_dict['abstimes'] + elif picks_mode == 'res': + warnings.warn('Using residuals here might not be exact. See above code where ttres_demeaned is calculated.') + picktime = phase_dict['ttres_demeaned'] + else: + raise IOError(f'Unknown picks_mode {picks_mode}') + uncertainty = pick.time_errors.uncertainty + network, station = seed_id.split('.')[:2] + # get stations coords from metadata dict + station_coords = stations_dict.get('{}.{}'.format(network, station)) + # prepare line to be written to pickfile and format, use traveltime residual + line = '{lat_rec:<15} {lon_rec:<15} {depth_rec:<15} {picktime:<15} {uncert:15}\n' + formatted_line = line.format(lat_rec=station_coords['latitude'], + lon_rec=station_coords['longitude'], + depth_rec=station_coords['elevation'] * (-1e-3), + picktime=picktime, + uncert=uncertainty) + fid.write(formatted_line) + print('Wrote {} picks for phase {} to file {}'.format(len(picks_list), phase, pickfile)) + fid.close() + + +def remove_blacklisted_station_picks(picks, fname_blacklist, verbosity=1): + blacklisted_stations = get_station_blacklist(fname_blacklist) + deleted_picks = [] + deleted_stations = [] + for index, pick in list(reversed(list(enumerate(picks)))): + seed_id = pick.waveform_id.get_seed_string() + network, station = seed_id.split('.')[:2] + nwst_id = '{}.{}'.format(network, station) + if nwst_id in blacklisted_stations.keys(): + timewindow = blacklisted_stations[nwst_id] + if not timewindow == 'always': + tstart, tstop = [UTCDateTime(time) for time in timewindow.split('to')] + if not tstart <= picks[index].time <= tstop: + continue + deleted_picks.append(picks.pop(index)) + deleted_stations.append(nwst_id) + if verbosity > 0: + print('Deleted picks for blacklisted stations:\n{}'.format(deleted_stations)) + return picks, deleted_stations + + +def get_station_blacklist(fname_csv): + with open(fname_csv, 'r') as infile: + # skip first line + blacklist = infile.readlines()[1:] + blacklisted_stations = {} + for line in blacklist: + network, station, time = line.split(',')[:3] + nwst_id = '{}.{}'.format(network, station) + blacklisted_stations[nwst_id] = time + return blacklisted_stations + + +def remove_uncommon_picks(sorted_picks, min_picks_per_phase): + for phase_name, picks_list in reversed(list(sorted_picks.items())): + if len(picks_list) < min_picks_per_phase: + msg = 'Removed picks for phase {}, as there are only {} picks given (threshold is {})' + print(msg.format(phase_name, len(picks_list), min_picks_per_phase)) + del(sorted_picks[phase_name]) + return sorted_picks + + +def sort_picks_by_phase(picks, stations_dict, source_origin, phase_types, model, + max_phase_diff=50., merge_phases=None, verbosity=0): + ''' + # First: Iterate through all picks to estimate possible phases for source/location combination, then assign + # each pick to one phase and return sorted picks dictionary, as there has to be one pickfile for each phasetype + + :param picks: + :param stations_dict: + :param source_origin: + :param phase_types: + :param model: + :param max_phase_diff: + :param merge_phases: assign a phase (key) to another phase (value) e.g. {Pdiff: P} + :param verbosity: + :return: + ''' + print('Starting to sort picks by phase calculating theoretical travel times for each pick...') + phases_dict = {} + for pick in picks: + # PROBLEM: seed_id from PyLoT does not contain Location ID!!! Makes it hard to find station coords + # workaround: use stations_dict (ignore channel and location ID) instead of metadata.get_coordinates() + seed_id = pick.waveform_id.get_seed_string() + network, station = seed_id.split('.')[:2] + phaseID = pick.phase_hint + uncertainty = pick.time_errors.uncertainty + # skip different phase + if not phaseID in phase_types: + continue + # pick invalid if no uncertainty is given + if not uncertainty: + continue + station_coords = stations_dict.get('{}.{}'.format(network, station)) + if not station_coords: + print('Could not find coordinates for station: {}'.format(seed_id)) + continue + # estimate phase type by taking the closest phase from Tau-P method (time is relative to source origin) + phase_name, phase_time_rel, phase_diff = get_closest_taup_phase(source_origin, station_coords, + phaseID, pick.time, model) + if phase_diff > max_phase_diff: + if verbosity > 0: + print ('Warning, max_diff too large (> {} s) for phase {} at {}. Skipping'.format(max_phase_diff, + phase_name, seed_id)) + continue + phase_time = source_origin.time + phase_time_rel + + # merge phase if selected, keep track of original phase for TauPy node initiation + true_phase = phase_name + if merge_phases and phase_name in merge_phases.keys(): + phase_name = merge_phases[phase_name] + + if not phase_name in phases_dict.keys(): + print('Adding phase to sorted picks dictionary: {}'.format(phase_name)) + phases_dict[phase_name] = [] + phases_dict[phase_name].append({'pick': pick, + 'taup_time': phase_time, + 'true_phase': true_phase,}) + return phases_dict + +def get_closest_taup_phase(source_origin, station_coords, phase, pick_time, model='ak135'): + ''' + Estimate phase that was picked by searching for earliest P/S phase arriving at + station_coords for given source_coords using TauPy. As PyLoT only searches for first arrivals + using the same method, this should(!) yield the correct phase. + :return: + ''' + phases = {'P': [], + 'S': []} + phase_lists = {'P': ['ttp'], + 'S': ['tts']} + + # possible phases for fm3d (*K* and *KIK* -> *) + #phase_list = {'P': ['P', 'PKP', 'PKiKP', 'PKIKP', 'PcP', 'ScP', 'SKP', 'PKKP', 'SKKP', 'PP',], + # 'S': ['S', 'SKS', 'SKIKS', 'ScS']} + + # in case pick phase hint is just P or S + if phase in phases.keys(): + phase_list = phase_lists[phase] + common_phase = phase + # in case an explicit phase name is given use only this phase + else: + phase_list = [phase] + common_phase = identifyPhaseID(phase) + + model = TauPyModel(model=model) + arrivals = model.get_travel_times_geo(source_origin.depth, + source_origin.latitude, + source_origin.longitude, + station_coords['latitude'], + station_coords['longitude'], + phase_list) + + # identifies phases from arrivals as P or S phase, not necessary when using 'ttp' or 'tts' for get_travel_times_geo + for arr in arrivals: + phases[identifyPhaseID(arr.phase.name)].append(arr) + + source_time = source_origin.time + + if not arrivals: + raise Exception('No arrivals found for source.') + + # get first P or S onsets from arrivals list + arr, min_diff = min([(arr, abs(source_time + arr.time - pick_time)) for arr in phases[common_phase]], + key=lambda t: t[1]) + return arr.name, arr.time, min_diff + + +def get_event_obspy_dmt(eventdir): + event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl') + if not os.path.exists(event_pkl_file): + raise IOError('Could not find event path for event: {}'.format(eventdir)) + event = qml_from_obspyDMT(event_pkl_file) + return event + + +def get_event_pylot(eventdir, extension=''): + event_id = get_event_id(eventdir) + filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension)) + if not os.path.isfile(filename): + return + cat = read_events(filename) + return cat[0] + +def get_event_id(eventdir): + event_id = os.path.split(eventdir)[-1] + return event_id + +def get_picks(eventdir, extension=''): + event_id = get_event_id(eventdir) + filename = 'PyLoT_{}{}.xml' + filename = filename.format(event_id, extension) + fpath = os.path.join(eventdir, filename) + fpaths = glob.glob(fpath) + if len(fpaths) == 1: + cat = read_events(fpaths[0]) + picks = cat[0].picks + return picks + elif len(fpaths) == 0: + print('get_picks: File not found: {}'.format(fpath)) + return + print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}') + return + +def init_sources_file(nsrc, filename='sources.in'): + infile_source = open(filename, 'w') + infile_source.write('{}\n'.format(nsrc)) + return infile_source + +def init_receivers_file(nrec, filename='receivers.in'): + infile_rec = open(filename, 'w') + infile_rec.write('{}\n'.format(nrec)) + return infile_rec + +def append_source(fid, origin): + pass + +def append_receiver(fid, station_coords): + # TODO check if order is correct + fid.write('{rad:15} {lat:15} {lon:15} \n'.format(rad=station_coords['elevation'], + lat=station_coords['latitude'], + lon=station_coords['longitude'])) + + +def organize_receivers(fnin, unique=False): + ''' Open FMTOMO receivers.in file and read position of each receiver into a dict.''' + + with open(fnin, 'r') as infile: + nRec = int(infile.readline()) + rec_dict = {} + for rec_number in range(1, nRec + 1): + rad, lat, lon = [float(value) for value in infile.readline().split()] + # dummy read next 3 lines + for ind in range(3): + infile.readline() + + receiver = {'rad': rad, + 'lat': lat, + 'lon': lon, } + if unique: + if receiver in rec_dict.values(): + continue + + rec_dict[rec_number] = receiver + + return rec_dict + + +def organize_sources(fnin): + ''' Open FMTOMO sources.in file and read position and phase of each source into a dict.''' + with open(fnin, 'r') as infile: + nSrc = int(infile.readline()) + src_dict = {} + for src_number in range(nSrc): + src_number += 1 + teleseismic_flag = int(infile.readline()) + if teleseismic_flag != 1: raise ValueError('Source not teleseismic!') + phase = infile.readline().strip() + rad, lat, lon = [float(value) for value in infile.readline().split()[:3]] + # dummy read next 4 lines + for ind in range(4): + infile.readline() + src_dict[src_number] = {'phase': phase, + 'depth': rad, + 'lat': lat, + 'lon': lon,} + return src_dict + + +def organize_event_names(fnin): + infile = open(fnin, 'r') + events = [line.split()[-1].strip() for line in infile.readlines()[1:]] + infile.close() + return events + + +def export_otimes(otimes, fn_out): + np.savetxt(fn_out, otimes, fmt=['%8d', '%8d', '%8d', '%8d', '% 4.6f', '% 4.6f'], + header=str(len(otimes)), comments='') + print('Wrote file:', fn_out) + + +if __name__ == '__main__': + # testing area + prepare_fmtomo_dir_first_run('/data/AdriaArray_Data/fmtomo_adriaarray/v0/test_init') \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/get_tt_residuals.py b/pylot/tomography/fmtomo_tools/get_tt_residuals.py new file mode 100644 index 00000000..92cb72c7 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/get_tt_residuals.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# get traveltime residuals using synthetic 1D travel times from FMTOMO hybrid method +# +# this script is used to make sure that synthetic travel times are the same for synthetic data residuals (rtimes) +# and observed data residuals, because of small differences between hybrid 1D solver and taup method +# Therefore, correlation output times are absolute (minus source time) values and residuals are calcuated using +# FMTOMO hybrid reference times (e.g. rtimes.dat) +# +import os +import argparse +import shutil +import numpy as np +import matplotlib.pyplot as plt + +from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_event_names + +# Note: weighting of mean might not be reasonable due to uncertainty distribution along the array +def main(infile_rtimes, fn_events, kde_file, demean=True, no_deref=False, weight_kde=True, weight=False): + eventnames = organize_event_names(fn_events) + assert not(weight and weight_kde), 'Cannot use two weights' + + if not no_deref: + reftimes = np.genfromtxt(infile_rtimes) + if weight_kde: + kde_values = np.genfromtxt(kde_file) + assert len(kde_values) == len(reftimes), 'Missmatch in reftimes and kde file length' + + nSrc = len(eventnames) + + diff_in_means = [] + # iterate over all sources + for index in range(nSrc): + srcid = index + 1 + eventname = eventnames[index] + print('{} ({}/{})'.format(eventname, srcid, nSrc)) + # get pick filename and read observed times + pickfile = os.path.join('picks', eventname) + obsdata_src = np.loadtxt(pickfile, skiprows=3) + # make safety copy + picksafedir = os.path.join('picks', 'save') + if not os.path.isdir(picksafedir): + os.mkdir(picksafedir) + shutil.copy(pickfile, picksafedir) + npicks = len(obsdata_src) + # read observed times from pickfile + tt_obs = obsdata_src[:, 3] + # read uncertainties from pickfile + uncs = obsdata_src[:, 4] + + if no_deref: + tt_res = tt_obs + else: + indices = np.where(reftimes[:, 1] == srcid)[0] + assert (len(indices) == npicks), 'Missmatch in indices for srcids' + # read reference teleseismic times from FMTOMO run + tt_ref = reftimes[:, 4][indices] + # calculate residuals + tt_res = tt_obs - tt_ref + + # set new residual to obsdata array + obsdata_src[:, 3] = tt_res + # get kde values + #weights = 1./kde_values[:, 2][indices] if weight_kde else None + # + weights = 1. / uncs ** 2 if weight else None + # demean residuals for current source + mean = np.average(tt_res, weights=weights) + mean_unweighted = np.mean(tt_res) + diff_in_means.append(mean - mean_unweighted) + print('Mean:', mean) + print('Unweighted Mean:', mean_unweighted) + print('Demean setting:', demean) + if demean: + obsdata_src[:, 3] -= mean + #obsdata_src[:, 3] = mtimes[:, 4][indices] + # Write new pickfiles + with open(pickfile, 'r') as infile_pick: + header = infile_pick.readlines()[:3] + with open(pickfile, 'w') as outfile_pick: + for line in header: + outfile_pick.write(line) + for line in obsdata_src: + outfile_pick.write('{:10.4f} {:10.4f} {:10.4f} {:15.8f} {:15.8f}\n'.format(*line)) + plt.hist(diff_in_means, bins=100) + plt.title('Diffs in means (weighted - unweighted) [s]') + plt.show() + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Calculate residuals for absolute observation times using reference file from FMTOMO') + parser.add_argument('--rtimes', default='rtimes_tele.dat', help='reference_file') + parser.add_argument('--sourcefile', default='input_source_file_P.in', help='input_source_file') + #parser.add_argument('--kdefile', default='kde_weights.txt', help='input file with station kde values for weighting') + parser.add_argument('-nd', '--no_demean', action='store_true', default=False, + help='do not mean correct travel times') + parser.add_argument('-nr', '--no_reftimes', action='store_true', default=False, + help='do not use reference times (e.g. picks are already tt residuals). rtimes file still required at the moment.') + #parser.add_argument('-nwk', '--no_weight_kde', action='store_true', default=True, help='do not weight travel times') + args = parser.parse_args() + #args.kdefile + main(args.rtimes, args.sourcefile, None, not(args.no_demean), weight_kde=False, no_deref=args.no_reftimes) #not(args.no_weight)) \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/gmtslice_sidehook.py b/pylot/tomography/fmtomo_tools/gmtslice_sidehook.py new file mode 100644 index 00000000..b2405f6f --- /dev/null +++ b/pylot/tomography/fmtomo_tools/gmtslice_sidehook.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +import os +import numpy as np + +from obspy.geodetics import gps2dist_azimuth, locations2degrees +from pylot.tomography.utils import get_coordinate_from_dist_baz + +R=6371. + +def main(): + profiles_infile = '/home/marcel/AlpArray/vtk_files/points_mark.txt' + gmtslice_dir = '/rscratch/minos13/marcel/fmtomo_alparray/v3.5/alparray_mantle_diehl_crust_included_hf_gradient_smoothing/plot' + #### + # maximum point spacing in km (decreases with depth) + dpoints = 1. + dpi = 100 + model_depth = 606. + ### + with open(profiles_infile, 'r') as infile: + profiles = infile.readlines() + + for profile in profiles: + print('Profile: ', profile) + name = profile.split()[0] + lon1, lat1, lon2, lat2 = [float(item) for item in profile.split()[1:]] + + +def get_gmtslice_gc(gmtplot_dir, lat1, lon1, lat2, lon2, model_depth=606., dpoints=1, fn_out_slice='grid2dvgc.z', + rel=True, fname_vgrid='../vgrids.in', fname_vgrid_ref='../vgridsref.in'): + parfile_str_len = 28 + assert (len(fname_vgrid) < parfile_str_len), 'Check length of filename: {}'.format(fname_vgrid) + assert (len(fname_vgrid_ref) < parfile_str_len), 'Check length of filename: {}'.format(fname_vgrid_ref) + + infile_default = 'gmtslice_default.in' + fn_out = 'gmtslice.in' + bounds_out_gmt = 'boundgc.gmt' + + cwd = os.getcwd() + + os.chdir(gmtplot_dir) + with open(infile_default, 'r') as infile: + gmt_infile = infile.readlines() + + # calculate great circle distance + max_dist = gps2dist_azimuth(lat1, lon1, lat2, lon2, a=R * 1e3, f=0)[0]/1e3 + npointsR = int(model_depth / dpoints + 1) + npointsLateral = int(max_dist / dpoints + 1) + print('nR, nLateral:', npointsR, npointsLateral) + + # filename + gmt_infile[3] = '{} \n'.format(fname_vgrid) + gmt_infile[4] = '{} \n'.format(fname_vgrid_ref) + + # activate gc generation + gmt_infile[51] = '1 \n' + # modify lines with lat lon boundaries + gmt_infile[52] = '{:5.2f} {:5.2f}\n'.format(lat1, lon1) + gmt_infile[53] = '{:5.2f} {:5.2f}\n'.format(lat2, lon2) + + abs_rel = 1 if rel else 0 + gmt_infile[58] = '{} \n'.format(abs_rel) + + # modify lines with n Points + gmt_infile[65] = '{:<5d} {:<5d}\n'.format(npointsLateral, npointsR) + + with open(fn_out, 'w') as outfile: + for line in gmt_infile: + outfile.write(line) + + print('Executing gmtslice...') + os.system('gmtslice') + print('Done!') + + with open(bounds_out_gmt, 'r') as infile: + bds = [float(bd) for bd in infile.readlines()] + + bounds = '-R{bds[0]}/{bds[1]}/{bds[2]}/{bds[3]}'.format(bds=bds) + xyz_string = 'gmt xyz2grd {grid_out} -Ggrid2dvgc.grd -I{bds[4]}+/{bds[5]}+ -ZLB {bounds}'.format( + grid_out=fn_out_slice, bds=bds, bounds=bounds) + print(xyz_string) + os.system(xyz_string) + + grid = np.loadtxt(fn_out_slice) + dist_grid = [] + lon_grid = [] + lat_grid = [] + _, azim, bazim = gps2dist_azimuth(lat1, lon1, lat2, lon2, a=R * 1e3, f=0) + #ddist = / (npointsLateral - 1) + #ddepth = (bds[3] - bds[2]) / (npointsR - 1) + for depth in np.linspace(-bds[3], -bds[2], num=npointsR): + for dist in np.linspace(0, locations2degrees(lat1, lon1, lat2, lon2), num=npointsLateral): + lon, lat = get_coordinate_from_dist_baz((lon1, lat1), dist, azim) + lon_grid.append(lon) + lat_grid.append(lat) + #dist = ddist * indexLat + #depth = ddepth * indexR + dist_grid.append(np.array([dist, depth])) + + dist_grid = np.array(dist_grid) + lat_grid = np.array(lat_grid) + lon_grid = np.array(lon_grid) + + os.chdir(cwd) + return grid, dist_grid, lat_grid, lon_grid, max_dist + + +def get_gmtslice_depth(gmtplot_dir, depth, fn_out_slice='grid2dvd.z', fname_vgrid='../vgrids.in', + fname_vgrid_ref='../vgridsref.in'): + infile_default = 'gmtslice_default.in' + fn_out = 'gmtslice.in' + bounds_out_gmt = 'bounddp.gmt' + + cwd = os.getcwd() + + os.chdir(gmtplot_dir) + with open(infile_default, 'r') as infile: + gmt_infile = infile.readlines() + + # filename + gmt_infile[3] = '{} \n'.format(fname_vgrid) + gmt_infile[4] = '{} \n'.format(fname_vgrid_ref) + + # activate depth slice generation + gmt_infile[41] = '1 \n' + # set depth (negative for whatever reason) + gmt_infile[42] = '{:5.2f} \n'.format(-depth) + + with open(fn_out, 'w') as outfile: + for line in gmt_infile: + outfile.write(line) + + print('Executing gmtslice...') + os.system('gmtslice') + print('Done!') + + with open(bounds_out_gmt, 'r') as infile: + bds = [float(bd) for bd in infile.readlines()] + + bounds = '-R{bds[0]}/{bds[1]}/{bds[2]}/{bds[3]}'.format(bds=bds) + xyz_string = 'gmt xyz2grd {grid_out} -Ggrid2dvd.grd -I{bds[4]}+/{bds[5]}+ -ZLB {bounds}'.format( + grid_out=fn_out_slice, bds=bds, bounds=bounds) + print(xyz_string) + #os.system(xyz_string) + + grid = np.loadtxt(fn_out_slice) + lonlat = [] + lon0, lon1 = bds[:2] + lat0, lat1 = bds[2:4] + nlon = int(bds[4]) + nlat = int(bds[5]) + for lon in np.linspace(lon0, lon1, num=nlon): + for lat in np.linspace(lat0, lat1, num=nlat): + lonlat.append(np.array([lon, lat])) + + lonlat = np.array(lonlat) + + os.chdir(cwd) + return grid, lonlat + + +def grdimage_slice(lat1, lon1, lat2, lon2, bounds, name, npointsLateral, npointsR, dpi): + proj = "-JX{:05.2f}i/{:05.2f}i".format(npointsLateral / dpi, npointsR / dpi) + fnout = 'gmtslice_{}_{:.1f}_{:.1f}-{:.1f}_{:.1f}'.format(name, lat1, lon1, lat2, lon2) + grdimage_string = 'gmt grdimage grid2dvgc.grd {bounds} {proj} -Ba50f10/a50f10 -Cpolar_inv_3.cpt -K > {fnout}'.format( + bounds=bounds, proj=proj, fnout=fnout+'.ps') + print(grdimage_string) + os.system(grdimage_string) + print('Convert to png...') + os.system('convert {} {}'.format(fnout + '.ps', fnout + '.png')) + + + diff --git a/pylot/tomography/fmtomo_tools/heatmap_two_models.py b/pylot/tomography/fmtomo_tools/heatmap_two_models.py new file mode 100644 index 00000000..cd73653a --- /dev/null +++ b/pylot/tomography/fmtomo_tools/heatmap_two_models.py @@ -0,0 +1,43 @@ +import sys + +fnin1 = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10/it_12/vgrids.in' +fnin2 = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_TESAURO_grad_sm30_dm10/it_12/vgrids.in' +fnout = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10/vg_TES_i12sm30dm10.in' + + +def check_vgrids_header_line(l1, l2, epsilon=1e-6): + items1 = [float(item) for item in l1.split()] + items2 = [float(item) for item in l2.split()] + for i1, i2 in zip(items1, items2): + if not abs(i1 - i2) < epsilon: + return False + return True + + + +def vgrids_diff(fnin1, fnin2, fnout): + diffs = [] + with open(fnin1, 'r') as infile1: + with open(fnin2, 'r') as infile2: + with open(fnout, 'w') as outfile: + for index, l1 in enumerate(infile1): + l2 = infile2.readline() + if index < 4: + assert check_vgrids_header_line(l1, l2), 'Different grid dimensions!' + outfile.write(l1) + else: + try: + v1 = float(l1.split()[0]) + v2 = float(l2.split()[0]) + except Exception as e: + print('Read problem: {}'.format(e)) + sys.exit() + vdiff = abs(v2 - v1) + diffs.append(vdiff) + outfile.write('{}\n'.format(vdiff)) + + print('Finished writing file', fnout) + print('Max diff:', max(diffs)) + + +vgrids_diff(fnin1, fnin2, fnout) diff --git a/pylot/tomography/fmtomo_tools/misfit_evaluation.py b/pylot/tomography/fmtomo_tools/misfit_evaluation.py new file mode 100644 index 00000000..2d721b2d --- /dev/null +++ b/pylot/tomography/fmtomo_tools/misfit_evaluation.py @@ -0,0 +1,126 @@ +import os +import json + +import numpy as np +import matplotlib.pyplot as plt + +from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_receivers + +# TODO: demeaning source by source?? + +def calc_misfit(data, synth, unc): + return np.sum(((data - synth) / unc) ** 2) / len(data) + +def calc_mean_residuals(data, synth): + return np.sum(abs(data - synth)) / len(data) + + +def sort_stations_for_search(station_coords, receivers_dict): + epsilon = 1e-1 # very large epsilon actually... + coords_unique = coords_unique_receivers(receivers_dict) + latlon_dict = {} + for nwst_id, coords in station_coords.items(): + lat = coords['latitude'] + lon = coords['longitude'] + for latu, lonu in coords_unique: + if abs(lat - latu) < epsilon and abs(lon - lonu) < epsilon: + latlon_dict[(latu, lonu)] = nwst_id + return latlon_dict + + +def extract_stations_for_network(station_coords, network_id): + station_coords_extracted = {} + for nwst_id, coords in station_coords.items(): + nw, st = nwst_id.split('.') + if nw == network_id: + station_coords_extracted[nwst_id] = coords + return station_coords_extracted + + +def get_receiver_ids(latlon_list, receivers_dict): + receiver_ids = [] + for recid, coords in receivers_dict.items(): + lat, lon = (coords['lat'], coords['lon']) + if (lat, lon) in latlon_list: + receiver_ids.append(recid) + return receiver_ids + + +def coords_unique_receivers(receivers_dict): + coords_unique = [] + for coords in receivers_dict.values(): + latlon_tuple = (coords['lat'], coords['lon']) + if not latlon_tuple in coords_unique: + coords_unique.append(latlon_tuple) + return coords_unique + +#def plot_residuals() + +def main(): + #wdir = '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_diehl_crust_included_v3_hf' + wdir = '/data/AlpArray_Data/fmtomo/v5/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10' + os.chdir(wdir) + + receivers_dict = organize_receivers('receivers.in') + + print(wdir) + misfits = {} + mean_residuals = {} + for extract_network in [None, 'Z3', 'ZS', ]: + misfits[extract_network] = [] + mean_residuals[extract_network] = [] + with open('/rscratch/minos13/marcel/alparray/station_coords.json', 'r') as infile: + station_coords = json.load(infile) + + print('\nCalculating residuals for network:', extract_network) + if extract_network: + station_coords = extract_stations_for_network(station_coords, extract_network) + latlon_dict = sort_stations_for_search(station_coords, receivers_dict) + rec_ids = get_receiver_ids(list(latlon_dict.keys()), receivers_dict) + + otimes = np.genfromtxt('otimes.dat', skip_header=1) + #otimes_orig = np.genfromtxt('otimes_orig.dat', skip_header=1) + ttimes = otimes[:, 4] + uncs = otimes[:, 5] + nsrc = int(otimes[-1, 1]) + + rtimes = np.genfromtxt('rtimes_tele.dat')[:, 4] + + for itstep in range(0, 25): + nRays = 0 + mtimes_all = np.genfromtxt('it_{}/arrivals.dat'.format(itstep)) + mtimes = mtimes_all[:, 4] + mtimes_diff = mtimes - rtimes + + for srcid in range(nsrc): + srcid += 1 + # get all indices of current source id + indices = np.where(mtimes_all[:, 1] == srcid) + nRays += len(indices[0]) + mtimes_diff[indices] -= np.mean(mtimes_diff[indices]) + + # get all indices that are in rec_ids list as well + mask_recs = np.isin(mtimes_all[:, 0].astype(int), rec_ids) + + mf = calc_misfit(ttimes[mask_recs], mtimes_diff[mask_recs], uncs[mask_recs]) + sr = calc_mean_residuals(ttimes[mask_recs], mtimes_diff[mask_recs]) + misfits[extract_network].append(mf) + mean_residuals[extract_network].append(sr) + print('It: {}, misfit: {}, mean res: {}, nrays: {}'.format(itstep, mf, sr, nRays)) + + #plt.plot(mean_residuals[extract_network], label=extract_network) + plt.plot(misfits[extract_network], label=extract_network) + + plt.title(wdir) + plt.ylabel('Mean residuals [s]') + plt.xlabel('Iteration') + plt.legend() + plt.show() + +if __name__ == '__main__': + main() + + + + + diff --git a/pylot/tomography/fmtomo_tools/model_slice_plomerova.py b/pylot/tomography/fmtomo_tools/model_slice_plomerova.py new file mode 100644 index 00000000..a38dd554 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/model_slice_plomerova.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import glob, os, shutil + +from fmtomo_tools.fmtomo_grid_utils import read_vgrid, write_vgrid + +pjoin = os.path.join + +pwd = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10_EASI_test_Plomerova/' + +vgrid_file_in = pjoin(pwd, 'vgrids_dts_crust.in') +vgrid_file_out = pjoin(pwd, 'vgrids_dts_crust_variance_slice.in') + +latmin_rec, latmax_rec = 44.8, 51.3 +lonmin_rec, lonmax_rec = 11.94, 14.66 + +latmin_grid, latmax_grid = 44.45, 52.55 +lonmin_grid, lonmax_grid = 10.83, 15.77 + + +def modify_pickfiles(): + picks_path = pjoin(pwd, 'picks_orig') + os.chdir(picks_path) + + outdir = pjoin(pwd, 'picks') + + + infiles = glob.glob('*.ttf') + for infile in infiles: + lines_out = [] + with open(infile, 'r') as fid: + eventid = infile.split('.ttf')[0] + npicks = int(fid.readline()) + + # copy source/phase header + for _ in range(2): + lines_out.append(fid.readline()) + + for line in fid: + lat, lon = [float(item) for item in line.split()[:2]] + if latmin_rec < lat < latmax_rec and lonmin_rec < lon < lonmax_rec: + lines_out.append(line) + + fn_out = pjoin(outdir, infile) + with open(fn_out, 'w') as outfile: + # number of picks: list contents - 2 header lines + outfile.write(f'{len(lines_out) - 2}\n') + for line in lines_out: + outfile.write(line) + + print(f'Modified {eventid}: Removed {npicks - len(lines_out)} out of {npicks} picks/stations') + + os.chdir(pwd) + + +def modify_variance_slice(): + grid, gridN, gridDelta, gridStart = read_vgrid(vgrid_file_in) + + for index, latlon in enumerate(zip(grid['lats'], grid['lons'])): + lat, lon = latlon + if not latmin_grid < lat < latmax_grid or not lonmin_grid < lon < lonmax_grid: + grid['covs'][index] = 1e-6 + + write_vgrid(grid, gridN, gridDelta, gridStart, vgrid_file_out) + + +#modify_pickfiles() +modify_variance_slice() diff --git a/pylot/tomography/fmtomo_tools/modify_otimes.py b/pylot/tomography/fmtomo_tools/modify_otimes.py new file mode 100644 index 00000000..0a20d5dc --- /dev/null +++ b/pylot/tomography/fmtomo_tools/modify_otimes.py @@ -0,0 +1,294 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import os + +import numpy as np +from scipy.interpolate import RegularGridInterpolator +import matplotlib.pyplot as plt + +from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import export_otimes, organize_receivers + +def crustal_correction_using_differences(fname_otimes, fname_otimes_diff, fname_otimes_out): + otimes = np.loadtxt(fname_otimes, skiprows=1) + otimes_diff = np.loadtxt(fname_otimes_diff, skiprows=1) + + src_means = source_means(otimes_diff) + + #mean_diffs = np.mean([float(item.split()[4]) for item in otimes_diff[1:]]) + print('Mean_diffs of all sources: ', np.mean(list(src_means.values()))) + # TODO: Check if demeaning is useful for crustal correction using synthetics (was it b4 using ak135_diehl 1D?) + # Update 27.5.2020: If not demeaned, overall velocity perturbation shifted to negative in whole upper mantle + # BUT: demean has to be executed for each source, not with global mean! + + with open(fname_otimes_out, 'w') as outfile: + outfile.write('{}\n'.format(len(otimes))) + for index, line in enumerate(otimes): + src_id = line[1] + ttime = line[4] + ttime_diff = otimes_diff[index][4] + new_time = ttime - ttime_diff + src_means[src_id] + line[4] = new_time + for col_index, item in enumerate(line): + if col_index < 4: + fmt = '{:10g} ' + else: + fmt = '{:10.8f} ' + outfile.write(fmt.format(item)) + outfile.write('\n') + + +def source_means(otimes): + # catch all source ids from otimes file + src_ids = np.unique(otimes[:, 1]) + print('Source IDs:', src_ids) + + # create dictionary containing means of travel time (differences) for each source (key) + src_means = {src_id: np.mean(otimes[otimes[:, 1] == src_id][:, 4]) for src_id in src_ids} + + print('Source means:') + for srcid, val in src_means.items(): + print(srcid, ': ', val) + + return src_means + + +def crustal_correction_using_residual_map(fname_otimes, fname_residuals, receivers_in, fname_otimes_out): + ''' correct using residuals from numpy file with an array of shape (lons, lats, residuals)''' + otimes = np.loadtxt(fname_otimes, skiprows=1) + rec_dict = organize_receivers(receivers_in) + lons, lats, res = np.load(fname_residuals, allow_pickle=True) + + lonsU = np.unique(lons) + latsU = np.unique(lats) + #grid = (lonsU, latsU) + #shape = [len(item) for item in grid] + #res = res.reshape(shape) + + # not very efficient but very lazy (just interpolate value for each ray, i.e. receiver, not unique, + # which is quite redundant) + rginter = RegularGridInterpolator((lonsU, latsU), res, bounds_error=False, fill_value=0.) + + test_rginter(rginter) + + for line in otimes: + rec_id = int(line[0]) + lonlat = (rec_dict[rec_id]['lon'], rec_dict[rec_id]['lat']) + res = rginter(lonlat) + line[4] -= res + + src_means = source_means(otimes) + + for src_id, src_mean in src_means.items(): + otimes[otimes[:, 1] == src_id, 4] -= src_mean + + with open(fname_otimes_out, 'w') as outfile: + outfile.write('{}\n'.format(len(otimes))) + for line in otimes: + for col_index, item in enumerate(line): + if col_index < 4: + fmt = '{:10g} ' + else: + fmt = '{:10.8f} ' + outfile.write(fmt.format(item)) + outfile.write('\n') + + +def test_rginter(rginter): + lons = np.linspace(0, 22, 220) + lats = np.linspace(40, 52, 120) + lonsg, latsg = np.meshgrid(lons, lats) + data = rginter((lonsg, latsg)) + pcm = plt.pcolormesh(lonsg, latsg, data) + plt.colorbar(pcm) + plt.show() + + +def get_otimes_diff(fname_arrivals1, fname_arrivals2, fname_out='otimes_diff.dat'): + ''' + Calculate file containing travel time differences between fname_arrivals1 and fname_arrivals2, e.g. for crustal + correction. + :param fname_arrivals1: + :param fname_arrivals2: + :return: + ''' + with open(fname_arrivals1, 'r') as infile: + arrivals1 = infile.readlines() + with open(fname_arrivals2, 'r') as infile: + arrivals2 = infile.readlines() + + assert len(arrivals1) == len(arrivals2), 'Length of input arrival files differs' + + print('Calculating differences between file {} and {}'.format(fname_arrivals1, fname_arrivals2)) + columnString = '{} ' + + nArrivals = len(arrivals1) + with open(fname_out, 'w') as outfile: + outfile.write('{}\n'.format(nArrivals)) + for index in range(nArrivals): + line1 = arrivals1[index] + line2 = arrivals2[index] + for item in line1.split()[:4]: + outfile.write(columnString.format(item)) + diff = float(line1.split()[4]) - float(line2.split()[4]) + outfile.write(columnString.format(diff)) + outfile.write('\n') + print('Wrote {} lines to file {}'.format(nArrivals, fname_out)) + + +def get_synthetic_obsdata_legacy(fname_m1, fname_m2, fname_otimes_orig, fname_out='otimes_modif.dat'):#, p_err=0.1): + ''' + Create synthetic obsdata of model 1 relative to model 2 (usually 1D model synthetic travel times) + :param fname_m1: arrivals.dat file of synthetic travel times (e.g. from checkerboard test) + :param fname_m2: arrivals.dat file of synthetic travel times (e.g. ak135 model) + :param fname_otimes_orig: original otimes file for pick uncertainties + :param fname_out: otimes.dat file with output + :param p_err: picking error + :return: + ''' + with open(fname_m1, 'r') as infile: + arrivals_model1 = infile.readlines() + with open(fname_m2, 'r') as infile: + arrivals_model2 = infile.readlines() + with open(fname_otimes_orig, 'r') as infile: + otimes = infile.readlines()[1:] + + assert(len(arrivals_model1) == len(arrivals_model2) == len(otimes)), 'missmatch in lengths of files!' + + diffs = [] + with open(fname_out, 'w') as outfile: + outfile.write('{}\n'.format(len(arrivals_model1))) + + for line_model1, line_model2, line_otimes in zip(arrivals_model1, arrivals_model2, otimes): + for item in line_model2.split()[:4]: + outfile.write('{} '.format(item)) + + ttime_model1 = float(line_model1.split()[4]) + ttime_model2 = float(line_model2.split()[4]) + pickerror = float(line_otimes.split()[5]) + # pickerror = (p_err+2*p_err*abs(np.random.randn())) + + ttime_diff = ttime_model1 - ttime_model2 + diffs.append(ttime_diff) + + outfile.write('{} {}\n'.format(ttime_diff, pickerror)) + + mean_diff = np.mean(diffs) + abs_mean_diff = np.mean(np.abs(diffs)) + + print('Mean_diff: {} - abs_mean_diff: {}'.format(mean_diff, abs_mean_diff)) + print('Done with {} travel times. Output in file: {}'.format(len(arrivals_model1), fname_out)) + + +def get_synthetic_obsdata(fname_m1, fname_m2, fname_otimes_orig, fname_out='otimes_modif.dat', sigma='original'): + ''' + Create synthetic obsdata of model 1 relative to model 2 (usually 1D model synthetic travel times) + :param fname_m1: arrivals.dat file of synthetic travel times (e.g. from checkerboard test) + :param fname_m2: arrivals.dat file of synthetic travel times (e.g. ak135 model) + :param fname_otimes_orig: original otimes file for pick uncertainties + :param fname_out: otimes.dat file with output + :param p_err: picking error + :return: + ''' + arrivals_model1 = np.genfromtxt(fname_m1) + arrivals_model2 = np.genfromtxt(fname_m2) + otimes = np.genfromtxt(fname_otimes_orig, skip_header=1) + + assert(len(arrivals_model1) == len(arrivals_model2) == len(otimes)), 'missmatch in lengths of files!' + + # get new array as copy from model1 after deleting last column (not needed for otimes) + arrivals_diff = np.delete(arrivals_model1, 6, 1) + + # overwrite time column by diffs + arrivals_diff[:, 4] -= arrivals_model2[:, 4] + # overwrite last column by pick errors + arrivals_diff[:, 5] = otimes[:, 5] + + # get max nSrc (in last line of sorted arrivals files) + nSrc = int(arrivals_diff[-1, 1]) + + print('N sources:', nSrc) + if sigma not in [None, False]: + print('Applying gaussian noise with sigma={}'.format(sigma)) + if sigma == 'original': + sigma_val = otimes[:, 5] + else: + sigma_val = sigma + gauss_noise = np.random.normal(0., sigma_val, len(arrivals_diff[:, 4])) + plt.axhline(0., color='grey') + plt.vlines(np.arange(0, len(arrivals_diff)), arrivals_diff[:, 4], arrivals_diff[:, 4] + gauss_noise, + color='grey', linestyles='dashed') + plt.plot(arrivals_diff[:, 4], 'r.') + arrivals_diff[:, 4] += gauss_noise + + for index in range(nSrc): + src_id = index + 1 + # get indices for this source ID + indices = np.where(arrivals_diff[:, 1] == src_id) + # get mean for that srcid + mean = np.mean(arrivals_diff[:, 4][indices]) + print('Srcid: {}, mean: {}'.format(src_id, mean)) + # de-mean array + arrivals_diff[:, 4][indices] -= mean + + if sigma not in [None, False]: + plt.axhline(0., color='grey') + #plt.vlines(np.arange(0, len(arrivals_diff)), arrivals_diff[:, 4], arrivals_diff[:, 4] + gauss_noise) + plt.plot(arrivals_diff[:, 4], 'b.') + plt.figure() + plt.hist(gauss_noise, bins=100) + plt.show() + + export_otimes(arrivals_diff, fname_out) + + +def plot_histograms_for_crustal_corrections(): + import matplotlib.pyplot as plt + ttdiffs_no_crust = [] + ttdiffs_crust = [] + ttdiffs_corrected = [] + + #get_synthetic_obsdata('it_synth_forward/arrivals.dat', 'arrivals_ak135_diehl.dat', 'otimes_three_boxes_no_crust.dat') + #get_synthetic_obsdata('it_synth_forward_with_crust/arrivals.dat', 'arrivals_ak135_diehl.dat', 'otimes_three_boxes_with_diehl_crust.dat') + with open('otimes_three_boxes_with_diehl_crust.dat', 'r') as infile: + lines_crust = infile.readlines()[1:] + with open('otimes_three_boxes_no_crust.dat', 'r') as infile: + lines_no_crust = infile.readlines()[1:] + with open('otimes_three_boxes_crustal_corrected.dat', 'r') as infile: + lines_corrected = infile.readlines()[1:] + + for line in lines_crust: + ttdiffs_crust.append(float(line.split()[4])) + for line in lines_no_crust: + ttdiffs_no_crust.append(float(line.split()[4])) + for line in lines_corrected: + ttdiffs_corrected.append(float(line.split()[4])) + + plt.hist(ttdiffs_crust, bins=100, label='tt-diff including crustal structure.') + plt.hist(ttdiffs_no_crust, bins=100, label='tt-diff without crustal structure') + plt.hist(ttdiffs_corrected, bins=100, label='tt-diff after correcting for crustal structure') + plt.xlabel('Travel time differences to ak135_diehl 1D model [s]') + plt.legend() + +if __name__ == '__main__': + #wdir = '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_diehl_crustal_corrections_v3_hf' + #fname1 = os.path.join(wdir, 'arrivals_ak135di_crust.dat') + #fname2 = os.path.join(wdir, 'arrivals_ak135di.dat') + #get_otimes_diff(fname1, fname2, os.path.join(wdir, 'otimes_diff.dat')) + + #get_synthetic_obsdata('arrivals_cb_N2.dat', 'arrivals_ak135.dat', 'otimes_cb_N2.dat') + #os.chdir('/data/AlpArray_Data/fmtomo/v4/different_test_runs/alparray_mantle_waldhauser_crust_corrected_covs_hf_gradient_smoothing') + #os.chdir('/data/AlpArray_Data/fmtomo/v4/different_test_runs/alparray_mantle_waldhauser_crust_corrected_covs_hf_gradient_smoothing') + #crustal_correction_using_residual_map('otimes.dat', 'wh_residuals.npy', 'receivers.in', 'otimes_corr_wh.dat') + + #os.chdir('/data/AlpArray_Data/fmtomo/v4/different_test_runs/alparray_mantle_diehl_crust_corrected_residuals_stacked_gradient_smoothing') + #crustal_correction_using_residual_map('otimes.dat', 'diehl_2009_residuals.npy', 'receivers.in', 'otimes_corr_diehl.dat') + + #os.chdir('/data/AlpArray_Data/fmtomo/v6/crust_corrected_VERTICAL_hf_sm_FIX_DTS_grad_sm30_dm10') + #crustal_correction_using_residual_map('otimes.dat', 'dts_filt_12.5_12.5_7.5_CRUST_ONLY_RESIDUALS.npy', 'receivers.in', 'otimes_corr_dts.dat') + + os.chdir('/data/AlpArray_Data/fmtomo/v6/crust_corrected_WH_hf_sm_FIX_DTS_grad_sm30_dm10') + crustal_correction_using_residual_map('otimes.dat', 'wh_residuals_SORTED.npy', 'receivers.in', 'otimes_corr_wh.dat') + + + + #crustal_correction_using_residual_map('otimes.dat', 'wh_residuals.npy', 'receivers.in', 'otimes_corr_wh.dat') \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/modify_vgrid.py b/pylot/tomography/fmtomo_tools/modify_vgrid.py new file mode 100644 index 00000000..255732fd --- /dev/null +++ b/pylot/tomography/fmtomo_tools/modify_vgrid.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Different functions to modify FMTOMO velocity(inversion) grid +import argparse +import numpy as np +from math import erf + +from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid, write_vgrid, write_vtk + + +def main(fname_in, fname_out, fname_vtk_out=None, cov_perc=0.15, bounds=[40, 60]): + vgrid, gridN, gridDelta, gridStart = read_vgrid(fname_in) + + print('Modifying covariances...') + vgrid['covs'] = [] + for depth, vp in zip(vgrid['depths'], vgrid['vps']): + vgrid['covs'].append(covariance_for_depth(depth, vp, cov_perc, bounds)) + + write_vgrid(vgrid, npts=gridN, delta=gridDelta, start=gridStart, fname=fname_out) + if fname_vtk_out: + write_vtk(vgrid, fname_vtk_out, write_data=['vps', 'covs']) + + +def modify_vgrid_box(vgrid, vgrid_key, min_lon, max_lon, min_lat, max_lat, min_depth, max_depth, + val_center, val_border, scale_factor, use_erf=True, extend_to_top=False): + ''' + Modify a value (vgrid_key) of vgrids.in using an error function. Value will be increase smoothly to the borders + of the external model. This function was used to prevent changes of the inversion result in the region of the + initial crustal model that was corrected for. Value smoothly increases at the borders of the initial model. + ''' + + # get central points of external model (here value will be minimal) + c_lon = 0.5 * (min_lon + max_lon) + c_lat = 0.5 * (min_lat + max_lat) + c_depth = 0.5 * (min_depth + max_depth) + + # get half the extent of the model dimensions for function scaling + dlon = 0.5 * (max_lon - min_lon) + dlat = 0.5 * (max_lat - min_lat) + ddepth = 0.5 * (max_depth - min_depth) + + print('Modifying {}...'.format(vgrid_key)) + print('Center {key}: {val_c}, border {key}: {val_b}'.format(key=vgrid_key, val_c=val_center, val_b=val_border)) + if not vgrid_key in vgrid.keys() or not vgrid[vgrid_key]: + vgrid[vgrid_key] = list(np.ones(len(vgrid['depths']))) + for index, tup in enumerate(zip(vgrid['lons'], vgrid['lats'], vgrid['depths'])): + lon, lat, depth = tup + if (min_lon <= lon <= max_lon and min_lat <= lat <= max_lat and depth <= max_depth): + x = abs(lon - c_lon) / dlon + y = abs(lat - c_lat) / dlat + if extend_to_top and depth < c_depth: + z = 0 + else: + z = abs(depth - c_depth) / ddepth + if use_erf: + point_val = smoothing_erf(x, y, z, val_center, val_border, scale_factor=scale_factor) + else: + point_val = val_center + else: + point_val = val_border + vgrid[vgrid_key][index] *= point_val + + return vgrid + + +def modify_vgrid_gradient(vgrid, vgrid_key, val_top, val_bot): + ''' + Modify a value (vgrid_key) of vgrids.in using a linear gradient from val_top to val_bot + ''' + + print('Modifying {} with linear gradient...'.format(vgrid_key)) + print('Top: {}, bot: {}'.format(val_top, val_bot)) + if not vgrid_key in vgrid.keys(): + vgrid[vgrid_key] = list(np.ones(len(vgrid['depths']))) + depth_min = min(vgrid['depths']) + depth_max = max(vgrid['depths']) + for index, tup in enumerate(zip(vgrid['lons'], vgrid['lats'], vgrid['depths'])): + lon, lat, depth = tup + vgrid[vgrid_key][index] *= linear_gradient(depth, depth_min=depth_min, depth_max=depth_max, val_top=val_top, + val_bot=val_bot) + return vgrid + + +def linear_gradient(depth, depth_min, depth_max, val_top, val_bot): + return (val_bot - val_top) * depth / (depth_max - depth_min) + val_top + + +def smoothing_erf(x, y, z, val_border, val_center, scale_factor=1.): + a = val_center + b = val_border + sx = sy = sz = scale_factor + f = (a - b) * (1. / 3. * (erf(2. * sx * (x - 1)) + erf(2. * sy * (y - 1)) + erf(2. * sz * (z - 1))) + 1.) + b + return f + + +def covariance_for_depth(depth, vp, cov_perc, bounds): + ''' + Function that returns covariance values for certain depths, written to give low variance to crust, intermediate + to an interlayer and high variance to everything else. + :param depth: depth in kilometers + :return: covariance + ''' + if depth <= bounds[0]: + return 0.05 + elif bounds[0] < depth <= bounds[1]: + return 0.5 * cov_perc * vp + else: + return cov_perc * vp + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Prepare grid for fm3d teleseismic hybrid calculation.') + parser.add_argument('fname_in', help='input filename (vgrids.in)') + parser.add_argument('fname_out', help='output filename (vgrids_new.in)') + parser.add_argument('--fname_out_vtk', default=None, help='vtk output filename') + parser.add_argument('--cov_border', default=1.0, help='covariance for the model outside crustal model boundaries') + parser.add_argument('--cov_center', default=0.05, help='maximum covariance in the center of the crustal model') + parser.add_argument('--smooth_border', default=1., help='smoothing factor for the rest of the box') + parser.add_argument('--smooth_center', default=0.5, help='smoothing factor inside SWATH-D box') + + args = parser.parse_args() + + vgrid, gridN, gridDelta, gridStart = read_vgrid(args.fname_in) + + #main(args.fname_in, args.fname_out, args.fname_out_vtk) + + #vgrid = modify_vgrid_gradient(vgrid, 'smoothfactors', 1.0, 2.0) + + # bounds for waldhauser residual corrections + wh_bounds = dict(lon0 = 3.23, lon1 = 13.96, lat0 = 43.18, lat1 = 49.45) + vgrid = modify_vgrid_box(vgrid, vgrid_key='covs', min_lon=wh_bounds['lon0'], max_lon=wh_bounds['lon1'], + min_lat=wh_bounds['lat0'], max_lat=wh_bounds['lat1'], min_depth=-5.0, max_depth=60.0, + val_center=float(args.cov_center), val_border=float(args.cov_border), + scale_factor=3, use_erf=True, extend_to_top=True) + + #vgrid = modify_vgrid_box(vgrid, vgrid_key='covs', min_lon=2.2547, max_lon=17.0999, min_lat=41.0517, + # max_lat=50.4984, min_depth=-5.0, max_depth=70.0, val_center=float(args.cov_center), + # val_border=float(args.cov_border), scale_factor=3, use_erf=True, extend_to_top=True) + + #vgrid = modify_vgrid_box(vgrid, vgrid_key='smoothfactors', min_lon=8.9, max_lon=15.3, min_lat=45., + # max_lat=48., min_depth=-15.0, max_depth=610.0, val_center=float(args.smooth_center), + # val_border=float(args.smooth_border), scale_factor=1., use_erf=True) + + # MP MP TEST TEST TEST ++++++++++++++++++++++++++++++++++++++++++++++++ + #grid = modify_vgrid_box(grid, vgrid_key='smoothfactors', min_lon=11., max_lon=35., min_lat=30., + # max_lat=61., min_depth=-5.0, max_depth=600.0, val_center=float(args.smooth_center), + # val_border=float(args.smooth_border), scale_factor=.4) + # MP MP TEST TEST TEST ------------------------------------------ + + write_vgrid(vgrid, npts=gridN, delta=gridDelta, start=gridStart, fname=args.fname_out) + if args.fname_out_vtk: + write_vtk(vgrid, args.fname_out_vtk, write_data=['vps', 'covs', 'smoothfactors']) + + diff --git a/pylot/tomography/fmtomo_tools/plot_obsdata.py b/pylot/tomography/fmtomo_tools/plot_obsdata.py new file mode 100755 index 00000000..3be2edbd --- /dev/null +++ b/pylot/tomography/fmtomo_tools/plot_obsdata.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import argparse + +import matplotlib.pyplot as plt + +def plot_otimes(otimes_fname): + with open(otimes_fname, 'r') as infile: + input_list = infile.readlines()[1:] + ray_id = [int(line.split()[0]) for line in input_list] + ttimes = [float(line.split()[-2]) for line in input_list] + uncertainties = [float(line.split()[-1]) for line in input_list] + plt.axhline(0, linestyle=':', color='k') + plt.errorbar(ray_id, ttimes, yerr=uncertainties, fmt='o', markersize=1, ecolor='0.5', elinewidth=0.5) + plt.show() + + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Plot ttimes with errors of FMTOMO file otimes.dat') + parser.add_argument('infile', help='inputfile') + + args = parser.parse_args() + + plot_otimes(args.infile) \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/plot_residuals_map.py b/pylot/tomography/fmtomo_tools/plot_residuals_map.py new file mode 100755 index 00000000..bd2e073c --- /dev/null +++ b/pylot/tomography/fmtomo_tools/plot_residuals_map.py @@ -0,0 +1,341 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import argparse +import json + +import numpy as np +import matplotlib.pyplot as plt +from scipy.interpolate import griddata + +from obspy.geodetics.base import gps2dist_azimuth + +import cartopy.crs as ccrs + +from pylot.tomography.fmtomo_tools.compare_arrivals_hybrid import organize_receivers, organize_sources, organize_event_names +from pylot.tomography.map_utils import make_map +from pylot.tomography.utils import normed_figure + + +def plot_map(otimes_fname, sources_fname, receivers_fname, isf, rtimes_fname=None, arrivals_fname=None, colorbar=True, + title=True, stack=False, source_ids=(), savefig_dir='', demean=True, absolute=False, clat=42.25, clon=17.5,#clat=46., clon=11., + file_ext='png', max_abs=3., relative_to_otimes=False, only_otimes=False): + if savefig_dir: + if not os.path.isdir(savefig_dir): + os.mkdir(savefig_dir) + + src_dict = organize_sources(sources_fname) + rec_dict = organize_receivers(receivers_fname) + eventnames = organize_event_names(isf) + + residuals_dict = read_fmtomo_tt_file(otimes_fname, rec_dict, rtimes_fname=rtimes_fname, synth_fname=arrivals_fname, + demean=demean, absolute=absolute, height_correction_vp=5.5, + relative_to_otimes=relative_to_otimes, only_otimes=only_otimes) + + if stack: + residuals_dict = stack_sources(residuals_dict) + with open('stacked_residuals.json', 'w') as outfile: + json.dump(residuals_dict, outfile) + + count = 0 + if savefig_dir: + fig = normed_figure(width_cm=12, ratio=12./9.) + #fig = plt.figure(figsize=(12, 9)) + else: + fig = plt.figure() + if stack or source_ids is not [] or savefig_dir: + count = 1 + + # increase point size if not stacked + sizefactor_stacked = {True: 30, + False: .2e4} + + for src_id, dic in residuals_dict.items(): + eventname = eventnames[src_id - 1] + if source_ids != []: + if not src_id in source_ids: + continue + if max_abs == 'auto': + max_abs = np.max(np.abs(np.array(dic['ttimes']))) + # if not stack and not savefig_dir and source_ids == []: + # count += 1 + # if count == 1: + # ax = fig.add_subplot(3, 3, count) + # ax0 = ax + # else: + # ax = fig.add_subplot(3, 3, count, sharex=ax0, sharey=ax0) + # + # ax.text(0.1, 0.9, 'Source ID {}, {}'.format(src_id, src_dict[src_id]['phase']), transform=ax.transAxes) + # else: + # ax = fig.add_subplot(111) + if stack and title: + plt.title('Stacked {} sources. Size relates to number of stacks per receiver.'.format(len(src_dict)), + y=1.05) + ax = make_map(draw_model=False, draw_faults=True, model_legends=False, clon=clon, clat=clat, + width = 30, height = 21,) # , width=6e6, height=5e6) + if not stack: + slat, slon = src_dict[src_id]['lat'], src_dict[src_id]['lon'] + dist = plot_source(slat, slon) + baz = gps2dist_azimuth(clat, clon, slat, slon, a=6.371e6, f=0)[1] + if title: + plt.title( + 'Plot of source {}. Distance: {:.0f}$^\circ$. BAZ: {:.0f}$^\circ$'.format(eventname, dist, baz), + y=1.05) + + #ax.set_xlabel('Longitude [$^\circ$]') + #ax.set_ylabel('Latitude [$^\circ$]') + + # prepare grids for contour plot + #lonaxis = np.linspace(min(dic['lons']), max(dic['lons']), 250) + #lataxis = np.linspace(min(dic['lats']), max(dic['lats']), 250) + #longrid, latgrid = np.meshgrid(lonaxis, lataxis) + #ttimes_grid = griddata((dic['lats'], dic['lons']), dic['ttimes'], (latgrid, longrid), method='linear') + #levels = np.linspace(min(dic['ttimes']), max(dic['ttimes']), 75) + + cmap = plt.get_cmap('seismic') if not absolute else plt.get_cmap('viridis') + + vmin = -max_abs if not absolute else None + vmax = max_abs if not absolute else None + sc = ax.scatter(dic['lons'], dic['lats'], c=np.array(dic['ttimes']), cmap=cmap, + vmin=vmin, vmax=vmax, edgecolors='grey', linewidths=0.2, + s=sizefactor_stacked[stack]*np.array(dic['nttimes'])/len(src_dict), zorder=10, + transform=ccrs.PlateCarree(), alpha=0.75) + sc.set_edgecolor('0.') + if colorbar: + cb = plt.colorbar(sc, label='ttime [s]', shrink=0.5) + + if stack: + savefig_path = os.path.join(savefig_dir, 'stacked_events.{}'.format(file_ext)) + else: + #fname = f'baz{baz:03.0f}_dist{dist:03.0f}_{eventname}_srcID{src_id}.{file_ext}' + fname = f'{eventname}_srcID{src_id}.{file_ext}' + savefig_path = os.path.join(savefig_dir, fname) + + + if not savefig_dir and not count % 9: + plt.show() + fig = plt.figure()#figsize=(16, 9), dpi=300.) + count = 0 + if savefig_dir: + ax.figure.savefig(savefig_path, dpi=300., bbox_inches='tight', pad_inches=0.) + print('Wrote file {}'.format(savefig_path)) + plt.clf() + + if not savefig_dir: + plt.show() + + +def plot_source(lat, lon, clat=46., clon=11.): + #basemap.drawgreatcircle(lon, lat, clon, clat, color='k', zorder=15, linestyle='dashed') + dist, azim, bazim = gps2dist_azimuth(lat, lon, clat, clon, a=6.371e6, f=0) + + dist_deg = dist/1000./np.pi/2./6371.*360. + return dist_deg + + #x = np.cos(np.deg2rad(azim)) + #y = np.sin(np.deg2rad(azim)) + #print(x, y) + #ax.plot([0, x], [0, y], 'r', zorder=15) + + +def read_fmtomo_tt_file(otimes_fname, rec_dict, synth_fname=None, rtimes_fname=None, demean=True, absolute=False, + height_correction_vp=None, R=6371., relative_to_otimes=True, only_otimes=False): + #with open(otimes_fname, 'r') as infile: + # # skip first line for otimes.dat. Frist line contains N_rays + # obsarray = infile.readlines()[1:] + obsarray = np.loadtxt(otimes_fname, skiprows=1) + + if height_correction_vp: + if absolute: + print('APPLICATION OF HEIGHT CORRECTION FOR ABS NOT IMPLEMENTED YET. Not needed!?') + #print('Applying height correction of {} km/s'.format(height_correction_vp)) + else: + height_correction_vp = None + print('Will not apply height correction for relative values.') + + if synth_fname: + synth_array = np.genfromtxt(synth_fname) + + if rtimes_fname: + ref_array = np.genfromtxt(rtimes_fname) + + if synth_fname and rtimes_fname: + #with open(synth_fname, 'r') as infile: + # synth_array = infile.readlines() + #with open(rtimes_fname, 'r') as infile: + # ref_array = infile.readlines() + + #mean_obs = np.mean(obsarray[:, 4]) + #mean_synth = np.mean(synth_array[:, 4]) + #mean_ref = np.mean(ref_array[:, 4]) + + #mean_rel_synth = mean_synth - mean_ref + #print('Means:\nobserved: {}, relative {}'.format(mean_obs, mean_rel_synth)) + synth_src_means = {} + + nsrc = obsarray[-1, 1] + for index in range(int(nsrc)): + srcid = index + 1 + indices = np.where(synth_array[:, 1].astype(int) == srcid) + synth_src_means[srcid] = np.mean(synth_array[indices, 4] - ref_array[indices, 4]) + + print('Average mean for sources: {} s'.format(np.mean(list(synth_src_means.values())))) + + if demean: + print('Removing mean from synthetic times...') + else: + print('Will not mean-correct travel times') + # mean_rel_synth = 0. + + residuals_dict = {} + + for index, line in enumerate(obsarray): + rec_id = int(line[0]) + src_id = int(line[1]) + ttime = line[4] if relative_to_otimes or only_otimes else 0 + + # get residuals from reference file if fname is given + if synth_fname: + # get synthetic time + ttime_syn = synth_array[index][4] + if rtimes_fname: + # get reference time (1d travel time) + ttime_ref = ref_array[index][4] + if synth_fname and rtimes_fname: + # calculate synthetic time relative to 1d travel time + ttime_rel_syn = ttime_syn - ttime_ref + #print(ttime_ref, ttime_syn, ttime_rel_syn, ttime, ttime-ttime_rel_syn) + # calculate difference between relative observed and model times, multiply with -1 to get tsynth - tobs + mean_rel_src = synth_src_means[srcid] if demean else 0. + if not only_otimes: + ttime = -1 * (ttime_rel_syn - ttime - mean_rel_src) + if absolute: + ttime = ttime_syn + try: + uncertainty = line[5] + except: + uncertainty = 0 + + # create dictionary for source if not exists + if not src_id in residuals_dict.keys(): + residuals_dict[src_id] = {'lats': [], + 'lons': [], + 'rads': [], + 'ttimes':[], + 'nttimes': [], + 'uncerts': []} + # chose correct dictionary + dic = residuals_dict[src_id] + # get coordinates from organized receivers dictionary + dic['lats'].append(rec_dict[rec_id]['lat']) + dic['lons'].append(rec_dict[rec_id]['lon']) + dic['rads'].append(rec_dict[rec_id]['rad']) + # rad is depth? + #elev = - rec_dict[rec_id]['rad'] + #height_correction_vp = 0 + #station_height_corr = elev / (height_correction_vp) if height_correction_vp else 0. + #if elev < 0: print(elev, station_height_corr) + # MP MP NO HEIGHT CORRECTION FOR FMTOMO! REFTIMES CONTAIN STATION HIGHT!!!!! + dic['ttimes'].append(ttime) # - station_height_corr) + # number of ttimes will be set to 1 (determines size in scatter, only relevant for stacked ttimes) + dic['nttimes'].append(1) + dic['uncerts'].append(uncertainty) + + return residuals_dict + + +def stack_sources(residuals_dict): + all_ttimes = {} + all_uncs = {} + for src_id, dic in residuals_dict.items(): + for index in range(len(dic['lats'])): + lat = dic['lats'][index] + lon = dic['lons'][index] + rad = dic['rads'][index] + source_tuple = (lat, lon, rad) + if not source_tuple in all_ttimes.keys(): + all_ttimes[source_tuple] = [] + all_uncs[source_tuple] = [] + all_ttimes[source_tuple].append(dic['ttimes'][index]) + all_uncs[source_tuple].append(dic['uncerts'][index]) + # create new dictionary in the shape of residuals dict with only one source + stacked_dict = {} + stacked_dict[1] = {'lats': [], + 'lons': [], + 'rads': [], + 'ttimes':[], + 'nttimes': [], + 'misfits': []} + + + for source_tuple in all_ttimes.keys(): + ttimes_list = all_ttimes[source_tuple] + uncs_list = all_uncs[source_tuple] + misfit = np.sum((np.array(ttimes_list) / np.array(uncs_list))**2) / len(ttimes_list) + lat, lon, rad = source_tuple + stacked_dict[1]['lats'].append(lat) + stacked_dict[1]['lons'].append(lon) + stacked_dict[1]['rads'].append(rad) + stacked_dict[1]['ttimes'].append(np.sum(np.mean(ttimes_list))) + stacked_dict[1]['nttimes'].append(len(ttimes_list)) + stacked_dict[1]['misfits'].append(misfit) + #stacked_dict[1]['ttimes_list'].append(ttimes_list) + + ttimes = [d['ttimes'] for d in stacked_dict.values()] + plt.hist(ttimes, 100) + plt.axvline(np.mean(ttimes), c='r') + plt.xlabel('ttime [s]') + + return stacked_dict + + +def compare_residuals(): + wdir = '/rscratch/minos13/marcel/fmtomo_alparray/' + os.chdir(wdir) + with open('alparray_mantle_diehl_crust_included_v3_hf/stacked_residuals.json', 'r') as infile: + sr = json.load(infile) + with open('alparray_mantle_diehl_crust_included_v3_no_density_hf/stacked_residuals.json', 'r') as infile: + srnd = json.load(infile) + mfdiff = np.array(sr['1']['misfits']) - np.array(srnd['1']['misfits']) + np.mean(mfdiff) + sc = plt.scatter(sr['1']['lons'], sr['1']['lats'], c=mfdiff, cmap='seismic', vmin=-7.5, vmax=7.5) + cb = plt.colorbar(sc) + title = 'Difference in station misfit after 12 iterations (density kernel - no density).' \ + ' Blue: decrease in residuals using density. Mean: {:.4f}' + plt.title(title.format(np.mean(mfdiff))) + plt.xlabel('Latitude (deg)') + plt.ylabel('Longitude (deg)') + cb.set_label('Misfit') + sc.set_linewidth(0.2) + sc.set_edgecolor('k') + plt.show() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Plot residuals of fm3d output file (arrivals.dat format) on map.') + parser.add_argument('--otimes', default='otimes.dat', help='otimes.dat file (default: otimes.dat)') + parser.add_argument('--sources', default='sources.in', help='sources.in file (default: sources.in)') + parser.add_argument('--receivers', default='receivers.in', help='receivers.in file (default receivers.in)') + parser.add_argument('--sourcefile', default='input_source_file.in', help='input_source_file (default: input_source_file.in)') + parser.add_argument('--arrivals', default=None, help='arrivals_file') + parser.add_argument('--rtimes', default=None, help='reference_file') + parser.add_argument('--fext', default='png', help='file extension for images (if -w)') + parser.add_argument('-s', '--stack', action='store_true', dest='stack', help='stack picks') + parser.add_argument('-w', '--write', action='store_true', default=False, help='write figures to disk') + parser.add_argument('-a', '--abs', action='store_true', dest='abs', default=False, help='compute absolute values') + parser.add_argument('-nd', '--no_demean', action='store_true', default=False, help='do not demean travel times') + parser.add_argument('-nc', '--no_colorbar', action='store_true', default=False, help='do not plot colorbar') + parser.add_argument('-nt', '--no_title', action='store_true', default=False, help='do not plot title') + parser.add_argument('-no', '--no_otimes', action='store_true', default=False, help='do not plot relative to otimes') + parser.add_argument('-oo', '--only_otimes', action='store_true', default=False, help='only plot observed times (otimes)') + parser.add_argument('-i', '--ids', dest='source_ids', type=int, default=[], nargs='*', + help='Plot only one source with FMTOMO internal id(s).') + + args = parser.parse_args() + + savefig_dir = 'residual_maps_out' if args.write else '' + plot_map(args.otimes, args.sources, args.receivers, args.sourcefile, arrivals_fname=args.arrivals, + rtimes_fname=args.rtimes, stack=args.stack, source_ids=args.source_ids, colorbar=not args.no_colorbar, + title=not args.no_title, savefig_dir=savefig_dir, demean=not args.no_demean, absolute=args.abs, + file_ext=args.fext, relative_to_otimes=not args.no_otimes, only_otimes= args.only_otimes) \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/quantify_ray_crossing.py b/pylot/tomography/fmtomo_tools/quantify_ray_crossing.py new file mode 100644 index 00000000..c0cb3c3f --- /dev/null +++ b/pylot/tomography/fmtomo_tools/quantify_ray_crossing.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +''' +Estimate ray crossing at each depth using rays (as npy objects) precalculated from FMTOMO file rays.dat +(e.g. script: plot_rays_on_plane) +''' +import os +import glob +import json + +import numpy as np +import matplotlib.pyplot as plt +from scipy.interpolate import RegularGridInterpolator + +from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid_regular, read_vgrid +from pylot.tomography.map_utils import angle_marker, make_map + +pjoin = os.path.join + + +def main(fdir, it=1, station_coords_file='/data/AdriaArray_Data/various/station_coords.json'): + fnin_vgrid = pjoin(fdir, f'it_{it}/vgrids.in') + fnout_vtk = pjoin(fdir, f'vgrids_it{it}_w_ray_crossings.vtk') + fnout_npy = pjoin(fdir, f'vgrids_hf_it{it}' + '_crossing_bins_{}-{}_mincount_{}_STEP_{}.npy') + + # binsize (degrees), min_count for each direction to raise quality by 1 increment + binsize_az = 90. + binsize_incl = 60. + min_count = 5 + plot = False + write = True + draw_hists = True + draw_simple_hists = True + step = 2 # take nth sample of vgrid for calculating cells + + # for stations plotting + with open(station_coords_file, 'r') as infile: + stations = json.load(infile) + lonsStations, latsStations = zip(*[(sta['longitude'], sta['latitude']) for sta in stations.values()]) + + # some constants etc. + R = 6371. + bins_azim = np.arange(0, 360 + binsize_az, binsize_az) + bins_incl = np.arange(0., 60. + binsize_incl, binsize_incl) + nbins_azim = len(bins_azim) - 1 + nbins_incl = len(bins_incl) - 1 + + # for plotting: + bins = bins_azim + binsize = binsize_az + nbins = nbins_azim + + # get vgrid and estimate dlat/dlon + vgrid_reg = read_vgrid_regular(fnin_vgrid) + lons, lats, rads = [array[::step] for array in vgrid_reg[0]] + vps, covs, sms, pdvs = [array[::step, ::step, ::step] for array in vgrid_reg[1:]] + + dlat = lats[1] - lats[0] + dlon = lons[1] - lons[0] + + # grid = init_dict() + # grid['vps'] = list(vps.ravel()) + # grid['covs'] = list(covs.ravel()) + # grid['sms'] = list(sms.ravel()) + # grid['pdvs'] = list(pdvs.ravel()) + # grid['res'] = [] + # + # for rad in rads: + # for lat in lats: + # for lon in lons: + # depth = R - rad + # grid['lons'].append(lon) + # grid['lats'].append(lat) + # grid['depths'].append(depth) + # x, y, z = pol2cart(lat, lon, R - depth) + # grid['xs'].append(x) + # grid['ys'].append(y) + # grid['zs'].append(z) + + # just for plotting at certain depths (not for write!!!) + depths = np.arange(0., 800., 100) + if not write: + rads = R - depths + + resolutions = [] + lons_export = [] + lats_export = [] + rads_export = [] + + print(rads) + for rad in rads: + # prepare dict containing horizontal angle for key tuple (ilat, ilon) + vgrid_azim_ids = {} + vgrid_incl_ids = {} + + print('Working on radius', rad) + # iterate over rays + for fnin_rays in glob.glob1(fpath_events, '*.npz'): + rays = np.load(os.path.join(fpath_events, fnin_rays), allow_pickle=True) + # raypoints = np.zeros((len(rays), 3)) + #for index in range(len(rays)): + for ray in rays.values(): + #ray = rays[index] + # get index of closest depth here from ray (TAKE CARE OF ALIASING! Using nearest value) + ind_min = np.abs(ray[:, 0] - rad).argmin() + # in case ind_min is at upper boundary (surface) + if ind_min == len(ray[:, 0]) - 1: + ind_min -= 1 + # get diffs to following ray index (km, deg, deg)! + ray_diff = np.diff(ray[ind_min:ind_min + 2], axis=0)[0] + lat = ray[ind_min, 1] + lat_diff_km = ray_diff[1] * (np.pi * R) / 180. + lon_diff_km = ray_diff[2] * (np.pi * np.cos(np.deg2rad(lat)) * R) / 180. + r_diff_km = ray_diff[0] + dlateral = np.sqrt(lat_diff_km ** 2 + lon_diff_km ** 2) + # calculate horizontal angle + azim = np.rad2deg(np.arctan2(lon_diff_km, lat_diff_km)) + incl = np.rad2deg(np.arctan(dlateral / r_diff_km)) + # correct angle from -180-180 to 0-360 and also change azim to bazim + bazim = azim + 180. + # angles[index] = angle + # raypoints[index] = ray[ind_min] + lat, lon = ray[ind_min, 1:] + lati = np.where((lats <= lat + dlat / 2.) & (lats > lat - dlat / 2.))[0][0] + loni = np.where((lons <= lon + dlon / 2.) & (lons > lon - dlon / 2.))[0][0] + key = (lati, loni) + if not key in vgrid_azim_ids.keys(): + vgrid_azim_ids[key] = [] + vgrid_incl_ids[key] = [] + vgrid_azim_ids[key].append(bazim) + vgrid_incl_ids[key].append(incl) + # vgrid_ids[index] = np.array([lati, loni]) + # plt.scatter(lons[loni], lats[lati], c='r', marker='o', facecolor='none', alpha=0.5) + # sc = plt.scatter(raypoints[:,2], raypoints[:,1], c=angles[:]) + + vgrid_angles_quality = {} + vgrid_azim_hist = {} + + for inds in vgrid_azim_ids.keys(): + azims = vgrid_azim_ids[inds] + incls = vgrid_incl_ids[inds] + lati, loni = inds + hist_az, _ = np.histogram(azims, bins=bins_azim) + hist_incl, _ = np.histogram(incls, bins=bins_incl) + hist2d, _, _ = np.histogram2d(azims, incls, bins=[bins_azim, bins_incl]) + # hist_az, hist_inc = hist2d + hist = hist2d.ravel() + quality = len(hist[hist > min_count - 1]) / (nbins_azim * nbins_incl) + # quality = len(hist_az[hist_az > min_count - 1]) / nbins_azim + # quality = len(hist_incl[hist_incl > min_count - 1]) / nbins_incl + vgrid_angles_quality[inds] = quality + vgrid_azim_hist[inds] = hist_az + + # LATS = [] + # LONS = [] + qualities = [] + hists = [] + + for ilat, lat in enumerate(lats): + for ilon, lon in enumerate(lons): + # LATS.append(lat) + # LONS.append(lon) + key = (ilat, ilon) + quality = vgrid_angles_quality.get(key) + hist = vgrid_azim_hist.get(key) + # print(key, quality) + if not quality: + quality = 0. + # hist for plotting only! + if hist is None: + hist = np.zeros(nbins) + qualities.append(quality) + hists.append(hist) + lons_export.append(lon) + lats_export.append(lat) + rads_export.append(rad) + + resolutions += qualities + + if plot: + # TODO: still old basemap code + raise NotImplementedError('Still using old basemap code') + fig = plt.figure() + ax = fig.add_subplot(111) + bmap = make_map(ax, resolution='l') + + LONS, LATS = np.meshgrid(lons, lats) + + sc = bmap.pcolormesh(LONS - dlon / 2., LATS - dlat / 2., + np.array(qualities).reshape(LATS.shape), latlon=True, zorder=1.5, + shading='nearest') # , s=np.array(qualities)*100) + + # sc = plt.contourf(LONS, LATS, np.array(qualities).reshape(LATS.shape), levels=21) + + if draw_hists: + for index, bin in enumerate(bins[:-1]): + marker = angle_marker(bin, bin + binsize) + s = np.array(hists)[:, index] + if draw_simple_hists: + s[s < min_count] = 0 + s[s >= min_count] = 150 + else: + s *= 10. + sc_angle = bmap.scatter(LONS, LATS, s=s, marker=marker, edgecolors='0.75', alpha=1., linewidths=0.6, + latlon=True, zorder=1.5, ) + sc_angle.set_facecolor('none') + # sc_angle = plt.scatter(LONS, LATS, s=1, marker='.', edgecolors='1.', alpha=1., linewidths=1.) + + bmap.scatter(lonsStations, latsStations, c='k', s=0.5, latlon=True, zorder=1.5, ) # , alpha=0.5) + plt.title('Azimuthal coverage at depth of {}km, {} bins'.format((R - rad), nbins_azim * nbins_incl)) + # plt.xlim([0, 22]) + # plt.ylim([40, 53]) + # plt.gca().set_aspect('equal') + cbar = plt.colorbar(sc) + cbar.set_label('Azimuthal coverage') + plt.show() + + if write: + rginter_res = RegularGridInterpolator((rads, lats, lons), + np.array(resolutions).reshape((len(rads), len(lats), len(lons))), + bounds_error=False, fill_value=0.) + grid = read_vgrid(fnin_vgrid)[0] + grid['res'] = [] + for lon, lat, depth in zip(grid['lons'], grid['lats'], grid['depths']): + grid['res'].append(rginter_res((R - depth, lat, lon))) + a = np.array([lons_export, lats_export, rads_export, resolutions]) + np.save(fnout_npy.format(nbins_azim, nbins_incl, min_count, step), a) + write_vtk(grid, fnout_vtk, + write_data=['vps', 'res', 'covs', 'frechs']) # , clon=11., clat=46., dlon=12., dlat=6., sort=True) + + +def rays_to_npy(infile, fnout_npy, n_points=10): + rays = {} + i = 0 + src_id_old = 1 + while True: + i += 1 + l1 = infile.readline() + if l1 == '': break + rec_id, src_id, ray_id = [int(item) for item in l1.split()[:3]] + if not src_id in rays.keys(): + rays[src_id] = [] + l2 = infile.readline() + n = int(l2.split()[0]) + ray = np.zeros((n, 3)) + for index in range(n): + r, lat, lon = [float(item) for item in infile.readline().split()] + ray[index] = np.array([r, np.rad2deg(lat), np.rad2deg(lon)]) + rays[src_id].append(ray[::n_points]) + + dirname = os.path.split(fnout_npy)[0] + if not os.path.isdir(dirname): + os.mkdir(dirname) + + for src_id, ray in rays.items(): + np.savez(fnout_npy.format(src_id), *ray) + + +if __name__ == '__main__': + fmtomodir = '/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/crust_incl_TESAURO_sm30_dm1/' + it = 12 + + fpath_events = pjoin(fmtomodir, f'rays_npy_it{it}/') + infile = open(pjoin(fmtomodir, f'it_{it}/rays.dat'), 'r') + fnout_npy = pjoin(fpath_events, f'it_{it}_rays_event_' + '{}.npz') + + rays_to_npy(infile, fnout_npy) + + main(fmtomodir, it=it) \ No newline at end of file diff --git a/pylot/tomography/fmtomo_tools/residual_histograms.py b/pylot/tomography/fmtomo_tools/residual_histograms.py new file mode 100644 index 00000000..14349558 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/residual_histograms.py @@ -0,0 +1,43 @@ +import os +import numpy as np +import matplotlib.pyplot as plt + +fpaths = ['/data/AlpArray_Data/fmtomo/v6/fwi_model_wolfgang_test', + '/data/AlpArray_Data/fmtomo/v6/final'] +arr_path_ref = 'rtimes_tele.dat' + +iterations = [0, 1, 2, 3, 4] +labels = ['rel_1d'] + [f'rel_it{it}' for it in iterations] + +colors = plt.get_cmap('viridis')(np.linspace(0,1, len(iterations))) +colors = np.vstack(((.5, .5, .5, 1), colors)) + +axes = [] +for fpath in fpaths: + rtimes = np.genfromtxt(os.path.join(fpath, arr_path_ref))[:, 4] + otimes = np.genfromtxt(os.path.join(fpath, 'otimes.dat'), skip_header=1)[:, 4] + + res = np.empty((len(rtimes), len(iterations) + 1)) + + res[:, 0] = otimes # = otimes + rtimes - rtimes + + for index, itr in enumerate(iterations): + arr_path = f'it_{itr}/arrivals.dat' + + arrivals = np.genfromtxt(os.path.join(fpath, arr_path))[:, 4] + res[:, index + 1] = (otimes + rtimes) - arrivals # otimes (rel) + rtimes => otimes (abs) + + fig = plt.figure() + ax = fig.add_subplot(111) + axes.append(ax) + ax.hist(res, bins=np.linspace(-5, 5, 50), rwidth=0.9, color=colors, label=labels) + ax.set_xlim(-1.5, 1.5) + plt.title(fpath) + + ax.legend() + +ylim_max = max([ax.get_ylim()[1] for ax in axes]) +for ax in axes: + ax.set_ylim(0, ylim_max) + +plt.show() diff --git a/pylot/tomography/fmtomo_tools/station_density_kde.py b/pylot/tomography/fmtomo_tools/station_density_kde.py new file mode 100644 index 00000000..f7154531 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/station_density_kde.py @@ -0,0 +1,187 @@ +# This script estimates density of station distribution using Gaussian KDE. For each point a Gaussian Kernel with +# sigma ~ grid size (?) is summed in 2D, calculating estimate density function. If the sum of all points evaluated +# at the data points increases that means there are more regions with high density + +import os +import copy + +import matplotlib.pyplot as plt +import numpy as np +from scipy.stats import gaussian_kde + +from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_receivers, organize_event_names, export_otimes + +def get_events_dict(otimes, recs): + events_dict = {} + for otime in otimes: + rec_id, src_id = int(otime[0]), int(otime[1]) + if not src_id in events_dict.keys(): + events_dict[src_id] = dict(lons=[], lats=[], rec_ids=[]) + events_dict[src_id]['lons'].append(recs[rec_id]['lon']) + events_dict[src_id]['lats'].append(recs[rec_id]['lat']) + events_dict[src_id]['rec_ids'].append(rec_id) + return events_dict + + +def transfer_kde_to_weight(kdes): + weights = 0.1 - kdes + indices_negative = np.where(weights < 0)[0] + if indices_negative: + print('Warning! {} negative indices in weights. Set to 0.'.format(len(indices_negative))) + weights[indices_negative] = 0. + return weights + + +def calc_event_kernels(events_dict, eventnames, kde_factor, plot_single=False, fdir_out=None): + kernels = {} + evaluated_kernels = {} + + if plot_single: + fig = plt.figure(figsize=(16, 9)) + + for index in range(len(eventnames)): + eventname = eventnames[index] + source = index + 1 + lons = events_dict[source]['lons'] + lats = events_dict[source]['lats'] + kernel = gaussian_kde(np.array([lons, lats]), bw_method=kde_factor) + evaluated_kernel = kernel((lons, lats)) + kernels[source] = kernel + evaluated_kernels[source] = evaluated_kernel + if plot_single: + assert fdir_out, 'Need to specify output directory for plots' + if not os.path.isdir(fdir_out): + os.mkdir(fdir_out) + weights = transfer_kde_to_weight(evaluated_kernel) + iterdict = {'kde': evaluated_kernel, 'weight': weights} + for name, colorarray in iterdict.items(): + if name == 'kde': + vmin=0 + vmax=0.05 + else: + vmin=None + vmax=None + scs = plt.scatter(lons, lats, marker='o', c=colorarray, lw=1.0, vmin=vmin, vmax=vmax) + cbar = plt.colorbar(scs) + plt.title('Station {} for event {}'.format(name, eventname)) + fig.savefig(os.path.join(fdir_out, '{}_{}.svg'.format(eventname, name))) + fig.clear() + #plt.show() + if plot_single: + plt.close('all') + return kernels, evaluated_kernels + + +def plot_hist_kernels(evaluated_kernels): + all_kernels = np.array([]) + for kernel in evaluated_kernels.values(): + all_kernels = np.append(all_kernels, kernel) + plt.hist(all_kernels, bins=200, label='kernels') + plt.title('Distribution of all kernels.') + plt.show() + + +def plot_hist_uncertainties(otimes_new, otimes_orig, bins=200): + plt.hist(otimes_orig[:, -1], lw=1, bins=bins, label='Unmodified uncertainties', fc=(.5, .5, 0., 0.5)) + plt.hist(otimes_new[:, -1], lw=1, bins=bins, label='New uncertainties', fc=(0., .5, .5, 0.5)) + plt.legend() + plt.xlabel('Uncertainty [s]') + plt.title('Uncertainty distribution before and after correction.') + plt.show() + + +def plot_uncertainties(otimes, recs, eventnames, fdir_out, vmin=0.1, vmax=0.4): + if not os.path.isdir(fdir_out): + os.mkdir(fdir_out) + + print('Writing output to directory:', fdir_out) + epsilon = 1e-6 + + fig = plt.figure(figsize=(16, 9)) + for index, eventname in enumerate(eventnames): + src_id = index + 1 + indices = np.where(abs(otimes[:, 1] - src_id) <= epsilon) + rec_ids = otimes[:, 0][indices] + uncs = otimes[:, -1][indices] + lats = [recs[rec_id]['lat'] for rec_id in rec_ids] + lons = [recs[rec_id]['lon'] for rec_id in rec_ids] + sc = plt.scatter(lons, lats, c=uncs, vmin=vmin, vmax=vmax) + plt.title(eventname) + cb = plt.colorbar(sc) + fig.savefig(os.path.join(fdir_out, '{}.svg'.format(eventname))) + fig.clear() + plt.close('all') + + +def exp_func(uncertainty, eval_kernel, exponent=30): + return uncertainty * np.exp(exponent * eval_kernel) + + +def modify_otimes(otimes, recs, kernels): + print('Applying Kernel on otimes and modifying uncertainties...') + for otime in otimes: + rec_id, src_id = int(otime[0]), int(otime[1]) + lon = recs[rec_id]['lon'] + lat = recs[rec_id]['lat'] + eval_kernel = kernels[src_id]((lon, lat)) + otime[-1] = exp_func(otime[-1], eval_kernel) + return otimes + + +def plot_average_kernel(evaluated_kernels, eventnames): + ids_names = [(index + 1, eventname) for index, eventname in enumerate(eventnames)] + sorted_events = sorted(ids_names, key=lambda x: x[1]) + kernel_sums = {src_id: np.sum(kernel) for src_id, kernel in evaluated_kernels.items()} + src_ids = [item[0] for item in sorted_events] + eventnames = [item[1] for item in sorted_events] + sums = [kernel_sums[src_id] for src_id in src_ids] + plt.plot(sums) + xticks = np.arange(0, len(eventnames), step=len(eventnames)//10) + xticklabels = [eventname[:8] for index, eventname in enumerate(eventnames) if index in xticks] + plt.xticks(xticks, xticklabels) + plt.title('Average kernel per station. High value means inhomogeneous station distribution.') + plt.show() + + +def export_station_kdes(events_dict, evaluated_kernels, fnout): + with open(fnout, 'w') as outfile: + for src_id in range(1, len(events_dict) + 1): + kernel = evaluated_kernels[src_id] + for kde, rec_id in zip(kernel, events_dict[src_id]['rec_ids']): + outfile.write('{} {} {}\n'.format(rec_id, src_id, kde)) + + +def main(plot=False, plot_detailed=False): + working_path = '/rscratch/minos13/marcel/fmtomo_alparray/v4/alparray_mantle_diehl_crust_included_hf_gradient_smoothing' + fdir_out = 'station_density' + fnout = os.path.join(fdir_out, 'station_kdes.txt') + + os.chdir(working_path) + eventnames = organize_event_names('input_source_file_P.in') + recs = organize_receivers('receivers.in') + otimes_data = np.genfromtxt('otimes_orig.dat', skip_header=1) + + # kernel width ~ grid size? or station spacing? + kde_size = 30 # km + kde_factor = kde_size / (6371. * np.pi / 180.) + print('KDE width (degree):', kde_factor) + + events_dict = get_events_dict(otimes_data, recs) + kernels, evaluated_kernels = calc_event_kernels(events_dict, eventnames, kde_factor, plot_single=plot_detailed, + fdir_out=fdir_out) + + export_station_kdes(events_dict, evaluated_kernels, fnout) + + otimes_modif = modify_otimes(copy.deepcopy(otimes_data), recs, kernels) + #export_otimes(otimes_modif, os.path.join(working_path, 'otimes_modif_kernel.dat')) + if plot: + plot_average_kernel(evaluated_kernels, eventnames) + plot_hist_kernels(evaluated_kernels) + plot_hist_uncertainties(otimes_modif, otimes_data) + if plot_detailed: + plot_uncertainties(otimes_data, recs, eventnames, 'uncerts_old') + plot_uncertainties(otimes_modif, recs, eventnames, 'uncerts_new') + + +if __name__ == '__main__': + main(plot=True, plot_detailed=True) diff --git a/pylot/tomography/fmtomo_tools/submit_fmtomo_run.py b/pylot/tomography/fmtomo_tools/submit_fmtomo_run.py new file mode 100644 index 00000000..5302cef5 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/submit_fmtomo_run.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +from pylot.tomography.fmtomo_utils import Tomo3d + +#tomo = Tomo3d('/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_tesauro_model_on_top', +# '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_tesauro_model_on_top', +# buildObs=False, saveRays=False) + +wdir = '/rscratch/minos13/marcel/fmtomo_alparray/' + +#directories = ['alparray_mantle_from_m6.0_diehl_crustal_corrections_sm1_damp3', + #'alparray_mantle_from_m6.0_diehl_crustal_corrections_sm1_damp30',] +directories= ['alparray_mantle_from_m6.0_diehl_crustal_corrections_sm10_damp3', + 'alparray_mantle_from_m6.0_diehl_crustal_corrections_sm10_damp30'] + +for dire in directories: + path = os.path.join(wdir, dire) + tomo = Tomo3d(path, path, buildObs=False, saveRays=False) + tomo.runTOMO3D(40, 8) diff --git a/pylot/tomography/fmtomo_tools/tradeoff_misfit_norm.py b/pylot/tomography/fmtomo_tools/tradeoff_misfit_norm.py new file mode 100644 index 00000000..99177226 --- /dev/null +++ b/pylot/tomography/fmtomo_tools/tradeoff_misfit_norm.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import os +import glob +import subprocess + +import json + +import numpy as np +import numpy.polynomial.polynomial as poly + +from scipy.sparse import spdiags +from scipy.optimize import curve_fit + +import matplotlib.pyplot as plt + +from itertools import cycle + +from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid + +# def calc_dampnorm(vgrid, vgrid_ref): +# # calculate m - m0 +# m_m0 = np.array(vgrid['vps']) - np.array(vgrid_ref['vps']) +# +# # calculate inverse of diagonal elements of a priori model covariance matrix (which should be a diagonal matrix) +# # IMPORTANT: COVARIANCES ARE MOST LIKELY STANDARD DEVIATIONS -> square them +# covs = np.array(vgrid_ref['covs'])**2 +# +# covs_inv = 1. / covs +# +# #cm_inv = spdiags(covs_inv, 0, covs_inv.size, covs_inv.size)# +# +# #norm_calc_old = np.dot(m_m0.transpose(), m_m0 * cm_inv) +# +# norm = np.dot(m_m0, m_m0 * covs_inv) +# +# return norm +# +# +# def calc_smoothnorm(vgrid, gridn, R=6371.): +# nR, nTheta, nPhi = gridn +# +# vps = np.array(vgrid['vps']) +# lats = np.array(vgrid['lats']) +# lons = np.array(vgrid['lons']) +# depths = np.array(vgrid['depths']) +# #vgridref = np.array(vgridref['vps']) +# #dvgrid = vgrid - vgridref +# +# vgarray = np.zeros((nR, nTheta, nPhi)) +# lonsarray_km = np.zeros((nR, nTheta, nPhi)) +# latsarray_km = np.zeros((nR, nTheta, nPhi)) +# radsarray_km = np.zeros((nR, nTheta, nPhi)) +# #vgarray_diff = np.zeros((nR, nTheta, nPhi)) +# smootharray = np.zeros((nR, nTheta, nPhi)) +# #for iLayer in range(nlayers): +# globInd = 0 +# for iR in range(nR): +# for iTheta in range(nTheta): +# for iPhi in range(nPhi): +# r = R - depths[globInd] +# lat = lats[globInd] +# lon = lons[globInd] +# r_minor = np.cos(np.deg2rad(lat)) * r +# vgarray[iR, iTheta, iPhi] = vps[globInd] +# radsarray_km[iR, iTheta, iPhi] = r +# latsarray_km[iR, iTheta, iPhi] = np.pi * r * lat / 180. +# lonsarray_km[iR, iTheta, iPhi] = np.pi * r_minor * lon / 180. +# #vgarray_diff[iR, iTheta, iPhi] = vgrid[globInd] +# globInd += 1 +# +# # iterate over grid diffs (correct?) and sum 1 * point left -2 * point + 1 * point right in all 3 dim. +# smsum = 0. +# for iR in range(nR): +# for iTheta in range(nTheta): +# for iPhi in range(nPhi): +# vg = vgarray[iR, iTheta, iPhi] +# sum1 = sum2 = sum3 = 0. +# if 0 < iPhi < nPhi - 1: +# h = abs(lonsarray_km[iR, iTheta, iPhi + 1] - lonsarray_km[iR, iTheta, iPhi - 1]) / 2 +# sum1 = (vgarray[iR, iTheta, iPhi - 1] - 2 * vg + vgarray[iR, iTheta, iPhi + 1]) / h**2 +# if 0 < iTheta < nTheta - 1: +# h = abs(latsarray_km[iR, iTheta + 1, iPhi] - latsarray_km[iR, iTheta - 1, iPhi]) / 2 +# sum2 = (vgarray[iR, iTheta - 1, iPhi] - 2 * vg + vgarray[iR, iTheta + 1, iPhi]) / h**2 +# if 0 < iR < nR - 1: +# h = abs(radsarray_km[iR - 1, iTheta, iPhi] - radsarray_km[iR + 1, iTheta, iPhi]) / 2 +# sum3 = (vgarray[iR - 1, iTheta, iPhi] - 2 * vg + vgarray[iR + 1, iTheta, iPhi]) / h**2 +# smsum += np.sqrt(sum1**2 + sum2**2 + sum3**2) +# #print(sum1, sum2, sum3, smsum) +# smootharray[iR, iTheta, iPhi] = smsum#sum1 + sum2 + sum3 +# +# # m_T * D_T * D * m ?? todo: unsure +# norm = np.sum(smootharray ** 2) +# +# return norm, smootharray +from pylot.tomography.utils import normed_figure + + +def calc_smoothnorm(wdir, iter): + smv = np.loadtxt(os.path.join(wdir, 'it_{}/smv.out'.format(iter + 1)), skiprows=1) + dm = np.loadtxt(os.path.join(wdir, 'it_{}/dm.out'.format(iter + 1)), skiprows=1) + norm = np.sum(smv*dm) + return norm + + +def calc_dampnorm(wdir, iter): + ecmi = np.loadtxt(os.path.join(wdir, 'it_{}/ecmi.out'.format(iter + 1)), skiprows=1) + dm = np.loadtxt(os.path.join(wdir, 'it_{}/dm.out'.format(iter + 1)), skiprows=1) + norm = np.sum(ecmi * dm**2) + return norm + + +def calc_norm(wdir, iteration_number): + dampnorm = calc_dampnorm(wdir, iteration_number) + smoothnorm = calc_smoothnorm(wdir, iteration_number) + + print('dampnorm: ', dampnorm) + print('smoothnorm: ', smoothnorm) + + norm = dampnorm + smoothnorm + + print('Calculated summed norm of', norm) + + return norm, dampnorm, smoothnorm + + +def calc_tradeoff(fpath_in, fname_out=None, iteration_number = 12): + results = {} + + for wdir in glob.glob(fpath_in): + #wdir = '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_diehl_crustal_corrections_sm1000_damp100/' + smooth = float(wdir.split('_')[-2].split('sm')[-1]) + damp = float(wdir.split('_damp')[-1].split('/')[0]) + + print('Calculating tradeoff for smoothing and damping of {}, {}'.format(smooth, damp)) + if not smooth in results.keys(): + results[smooth] = {} + + iteration_number_new = iteration_number + ecmi_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'ecmi.out') + smv_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'smv.out') + while not os.path.isfile(ecmi_path) or not os.path.isfile(smv_path): + iteration_number_new -= 1 + ecmi_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'ecmi.out') + smv_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'smv.out') + print('WARNING: Iteration number lowered by 1:', iteration_number_new) + if iteration_number_new <= 1: + break + + if iteration_number_new <= 1: + continue + else: + iteration_number = iteration_number_new + + #vgrid, gridn, griddelta, gridstart = read_vgrid(vgrid_path) + #vgrid_ref = read_vgrid(os.path.join(wdir, 'vgridsref.in'))[0] + + norm, dampnorm, smoothnorm = calc_norm(wdir, iteration_number) + + try: + fpath = os.path.join(wdir, 'residuals.dat') + chi = float(subprocess.check_output(['tail', fpath]).split()[-1]) + except Exception as e: + print(e) + chi = np.nan + + results[smooth][wdir] = {'dampnorm': dampnorm, 'smoothnorm': smoothnorm, + 'norm': norm, 'chi': chi, 'damp': damp} + + #print some output + for smooth, result in results.items(): + print('Smoothing:', smooth) + for wdir, item in result.items(): + print(item['chi'], item['norm']) + print(20*'#') + + if fname_out: + with open(fname_out, 'w') as outfile: + json.dump(results, outfile) + return results + +def quadratic_function(x, a, b, c): + return a * x ** 2 + b * x + c + +def one_over_x(x, a, b, c): + return a / (x - b) + c + +def exp_func(x, a, b, c): + return a * np.exp(-b * x) + c + +def plot_tradeoff(fname_in, fix='smooth', plot_norm='both', min_smooth=0, min_damp=0, max_smooth=1e6, max_damp=1e6): + with open(fname_in, 'r') as infile: + results_smooth = json.load(infile) + + lines = ["-", "--", "-.", ":"] + linecycler = cycle(lines) + + # array will be built for each line: (smooth, damp, norm, chi) + plot_values = [] + for smooth, result in results_smooth.items(): + for item in result.values(): + smooth = float(smooth) + damping = item['damp'] + if smooth < min_smooth or damping < min_damp or smooth > max_smooth or damping > max_damp: + continue + plot_values.append(np.array([smooth, damping, item[plot_norm], item['chi']])) + + plot_values = np.array(plot_values) + + column_index = {'smooth': 0, 'damp': 1} + + keys = np.unique(plot_values[:, column_index[fix]]) + names = {'smooth': 'Smoothing', 'damp': 'Damping'} + + for key in keys: + plot_line = plot_values[plot_values[:, column_index[fix]] == key] + second_index = column_index['smooth'] if fix == 'damp' else column_index['damp'] + plot_line = np.array(sorted(plot_line, key=lambda x: x[second_index])) + norms = plot_line[:, 2] + chis = plot_line[:, 3] + #text = [str(item) for item in plot_line[:, second_index]] + + x = np.linspace(min(norms), max(norms), num=100) + + #popt, pcov = curve_fit(one_over_x, norms, chis, method='trf')#, bounds=[min(norms), max(norms)]) + #fit_result = one_over_x(x, *popt) + #line = plt.plot(x, fit_result, ':', lw=0.8)[0] + fninfo = os.path.split(fname_in)[-1].replace('.json', '').split('_f')[-1] + label = '{}: {:g}'.format(names[fix], float(key)) + + line = plt.plot(norms, chis, linestyle=next(linecycler), lw=0.8, label=label)[0] + #coefs = poly.polyfit(norms, chis, 4) + #ffit = poly.polyval(x, coefs) + #line = plt.plot(x, ffit, ':', lw=0.8)[0] + + #label = label='{}: {:g} (smgrad: {})'.format(names[fix], float(key), fninfo) + plt.plot(norms, chis, c=line.get_color(), marker='.', lw=0.) + #plt.text(norms, chis, text) + for item in plot_line: + plt.text(item[2], item[3], str(item[second_index]), horizontalalignment='left') + #plt.title('Plot of Misfit against Norm ({})'.format(plot_norm)) + + +if __name__ == '__main__': + #calc_tradeoff('/data/AlpArray_Data/fmtomo/v5/tradeoff_curves/crust_included_grad_smooth_FIXED_dts_grad_1.5_sm*_damp*/', + # '/data/AlpArray_Data/various/alparray/tradeoff_v5_f1.5.json') + #calc_tradeoff('/data/AlpArray_Data/fmtomo/v5/tradeoff_curves/crust_included_grad_smooth_FIXED_dts_sm*_damp*/', + # '/data/AlpArray_Data/various/alparray/tradeoff_v5_f2.0.json') + + fig = normed_figure(width_cm=10, ratio=1.) + #tradeoff_infiles = ['tradeoff_v4_f1.5.json', 'tradeoff_v4_f3.json', 'tradeoff_v4_f10.json'] + tradeoff_infiles = ['tradeoff_v5_f2.0.json']#, 'tradeoff_v5_f1.5.json'] + for infile in tradeoff_infiles: + infile = os.path.join('/data/AlpArray_Data/various/alparray/', infile) + plot_tradeoff(infile, fix='damp', plot_norm='norm') + + plt.xlim([1900, 16200]) + plt.ylim([2.72, 3.8]) + plt.xlabel('Norm') + #plt.ylabel(r'Misfit($\frac{\chi^2}{N}$)') + plt.ylabel(r'Misfit($\chi^2/N$)') + #plt.title('Tradeoff curve Misfit vs Norm. Numbers in plot show smoothing values.') + plt.legend() + #plt.show() + plt.savefig('/data/AlpArray_Data/sciebo/AlpArray_home/pictures/paper_II/tradeoff.pdf', dpi=300) + diff --git a/pylot/tomography/fmtomo_tools/visualize_frechet_on_vgrid.py b/pylot/tomography/fmtomo_tools/visualize_frechet_on_vgrid.py new file mode 100755 index 00000000..6c57fc0d --- /dev/null +++ b/pylot/tomography/fmtomo_tools/visualize_frechet_on_vgrid.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import numpy as np +import json + +from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid, write_vtk, calculate_differences_grid, write_vgrid + +def visualize_frechet_derivative(fname_vgrid, fname_frechet, fname_out_vtk=None, fname_out_json=None, + fname_out_vgrid=None, diff_model=None, fname_vgrid_ref=None): + vgrid, gridN, gridDelta, gridStart = read_vgrid(fname_vgrid, inv_index_frechet=True) + if diff_model and fname_vgrid_ref: + raise OverflowError('Cannot have both parameters set, diff_model and fname_vgrid_ref') + if diff_model: + vgrid = calculate_differences_grid(vgrid, earth_model=diff_model) + if fname_vgrid_ref: + vgrid_ref, gridN_ref, gridDelta_ref, gridStart_ref = read_vgrid(fname_vgrid_ref, inv_index_frechet=False) + grid_check = (gridN == gridN_ref, + compare_tuple(gridDelta, gridDelta_ref), + compare_tuple(gridStart, gridStart_ref)) + assert(all(grid_check), 'Missmatch ref grid size') + vps = np.array(vgrid['vps']) + vps_ref = np.array(vgrid_ref['vps']) + vps_rel = (vps - vps_ref) / vps_ref * 100. + print('Min/Max change {}/{}%'.format(min(vps_rel), max(vps_rel))) + vgrid['vps'] = list(vps_rel) + + add_frechet(vgrid, fname_frechet) + + if fname_out_vgrid: + write_vgrid(vgrid, gridN, gridDelta, gridStart, fname_out_vgrid) + if fname_out_vtk: + write_vtk(vgrid, fname_out_vtk, ['vps', 'frechs', 'grid_indices', 'hit_count']) + if fname_out_json: + with open(fname_out_vtk, 'w') as outfile: + json.dump(vgrid, outfile) + + +def add_frechet(vgrid, fname_frechet): + vgrid['frechs'] = list(np.zeros(len(vgrid['xs']))) + vgrid['hit_count'] = list(np.zeros(len(vgrid['xs']))) + with open(fname_frechet, 'r') as infile: + while True: + try: + n, source_id, m, k, n_pdev = [int(item) for item in infile.readline().split()] + except: + break + #print(n, source_ids, m, k, n_pdev) + for _ in range(n_pdev): + pdev_index, pdev = infile.readline().split() + pdev_index = int(pdev_index) + pdev = float(pdev) + vgrid_index = vgrid['inv_index'][pdev_index] + vgrid['frechs'][vgrid_index] += pdev + # hit by ray count + vgrid['hit_count'][vgrid_index] += 1 + + +def compare_tuple(t1, t2, epsilon=1e-6): + for item1, item2 in zip(t1, t2): + if abs(item1 - item2) > epsilon: + return False + return True diff --git a/pylot/tomography/fmtomo_utils.py b/pylot/tomography/fmtomo_utils.py new file mode 100644 index 00000000..fb3aa6c9 --- /dev/null +++ b/pylot/tomography/fmtomo_utils.py @@ -0,0 +1,1323 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#---------------------------------------------------------------------------- +# Copyright 2017 Marcel Paffrath (Ruhr-Universitaet Bochum, Germany) +# +# This file is part of ActiveSeismoPick3D +#---------------------------------------------------------------------------- + +import datetime +import numpy as np +import os +import shutil +import subprocess +import sys + + +def pol2cart(lat, lon, r): + x = r * np.cos(np.deg2rad(lat)) * np.cos(np.deg2rad(lon)) + y = r * np.cos(np.deg2rad(lat)) * np.sin(np.deg2rad(lon)) + z = r * np.sin(np.deg2rad(lat)) + return x, y, z + + +class Tomo3d(object): + def __init__(self, fmtomodir, simuldir='fmtomo_simulation', citer=0, overwrite=False, buildObs=True, + saveRays: bool | list = True): + ''' + Class build from FMTOMO script tomo3d. Can be used to run several instances of FMM code in parallel. + + :param: citer, current iteration (default = 0: start new model) + :type: integer + + :param: fmtomodir, directory containing a clean FMTOMO installation (v. 1.0) + :type: string (path) + + :param: simuldir, simulation directory (must contain FMTOMO input grid files) + :type: string (path) + ''' + self.simuldir = simuldir + self.setCWD() + self.buildFmtomodir(fmtomodir) + if buildObs: + self.buildObsdata() + self.saveRays=saveRays + self.defParas() + self.copyRef() + self.citer = citer # current iteration + self.sources = self.readSrcFile() + self.traces = self.readTraces() + self.directories = [] + self.overwrite = overwrite + + def defParas(self): + self.defFMMParas() + self.defInvParas() + + def buildFmtomodir(self, directory): + tomo_files = ['fm3d', + 'frechgen', + 'frechgen.in', + 'invert3d', + 'invert3d.in', + 'mode_set.in', + 'obsdata', + 'obsdata.in', + 'residuals', + 'residuals.in', + 'tomo3d', + 'tomo3d.in'] + + for name in tomo_files: + filename = os.path.join(directory, name) + linkname = os.path.join(self.cwd, name) + if not os.path.exists(linkname): + os.system('ln -s %s %s' % (filename, linkname)) + + def buildObsdata(self): + p = subprocess.Popen(os.path.join(self.cwd, 'obsdata'), shell=True) + p.wait() + os.system('mv sources.in sourcesref.in') + + def defFMMParas(self): + ''' + Initiates parameters for the forward calculation. + ''' + # Comments coppied from FMTOMO. + # Name of fast marching program + self.fmm = os.path.join(self.cwd, 'fm3d') + # Name of program calculating Frechet derivatives + self.frechgen = os.path.join(self.cwd, 'frechgen') + # Name of current velocity/inversion grid + self.cvg = 'vgrids.in' + # Name of current interfaces grid + self.cig = 'interfaces.in' + # Name of file containing current source locations + self.csl = 'sources.in' + # Name of file containing propagation grid + self.pg = 'propgrid.in' + # Name of file containing receiver coordinates + self.rec = 'receivers.in' + self.frech = 'frechet.in' + self.frechout = 'frechet.dat' + # Name of file containing measured data + self.ot = 'otimes.dat' + # Name of file containing output velocity information + self.ttim = 'arrivals.dat' + self.mode = 'mode_set.in' + # Name of temporary folders created for each process + self.folder = '.proc_' + + def defInvParas(self): + ''' + Initiates inversion parameters for FMTOMO. + ''' + # Name of program for performing inversion + self.inv = os.path.join(self.cwd, 'invert3d') + # Name of file containing current model traveltimes + self.mtrav = 'mtimes.dat' + # Name of file containing reference model traveltimes + self.rtrav = 'rtimes.dat' + # Name of file containing initial velocity grid + self.ivg = 'vgridsref.in' + # Name of file containing initial interface grid + self.iig = 'interfacesref.in' + # Name of file containing initial source locations + self.isl = 'sourcesref.in' + # Name of program for calculating traveltime residuals + self.resid = os.path.join(self.cwd, 'residuals') + # Name of output file for calculating traveltime residuals + self.resout = 'residuals.dat' + + def copyRef(self): + ''' + Copies reference grids to used grids (e.g. sourcesref.in to sources.in) + ''' + os.system('cp %s %s' % (self.ivg, self.cvg)) + os.system('cp %s %s' % (self.iig, self.cig)) + os.system('cp %s %s' % (self.isl, self.csl)) + + def setCWD(self, directory=None): + ''' + Set working directory containing all necessary files. + + Default: pwd + ''' + if directory == None: + directory = self.simuldir + + os.chdir(directory) + self.cwd = directory + print('Working directory is: %s' % self.cwd) + + def runFrech(self): + os.system(self.frechgen) + + def runTOMO3D(self, nproc, iterations): + ''' + Starts up the FMTOMO code for the set number of iterations on nproc parallel processes. + + :param: nproc, number of parallel processes + :type: integer + + :param: iterations, number of iterations + :type: integer + ''' + self.nproc = nproc + self.iter = iterations # number of iterations + + starttime = datetime.datetime.now() + print('Starting TOMO3D on %s parallel processes for %s iteration(s).' + % (self.nproc, self.iter)) + if self.citer == 0: + self.makeInvIterDir() + self.startForward(self.cInvIterDir) + if self.iter == 0: + print('Only 1 iteration requested. Exit after forward run.') + self.clean_up() + return + + self.raiseIter() + + while self.citer <= self.iter: + self.makeInvIterDir() + self.startInversion() + self.saveVgrid() + self.startForward(self.cInvIterDir) + self.raiseIter() + + if self.citer > self.iter: + self.clean_up() + + tdelta = datetime.datetime.now() - starttime + print('runTOMO3D: Finished %s iterations after %s.' % (self.iter, tdelta)) + print('runTOMO3D: See %s for output' % (self.cwd)) + + def runFmm(self, directory, logfile, processes): + ''' + Calls an instance of the FMM code in the process directory. + Requires a list of all active processes and returns an updated list. + ''' + os.chdir(directory) + fout = open(logfile, 'w') + processes.append(subprocess.Popen(self.fmm, stdout=fout)) + fout.close() + os.chdir(self.cwd) + return processes + + def clean_up(self): + self.removeDirectories() + self.unlink(os.path.join(self.cwd, self.frechout)) + self.unlink(os.path.join(self.cwd, self.ttim)) + + def startForward(self, logdir): + ''' + Runs an instance of the FMM code in the process directory. + ''' + self._printLine() + print('Starting forward simulation for iteration %s.' % (self.citer)) + + if self.citer == 0: + self.copyRef() + self.runFrech() + self.makeDirectories() + + starttime = datetime.datetime.now() + processes = [] + + for procID in range(1, self.nproc + 1): + directory = self.getProcDir(procID) + logfn = 'fm3dlog_' + str(procID) + '.out' + log_out = os.path.join(logdir, logfn) + + self.writeSrcFile(procID) + self.writeTracesFile(procID) + os.system('cp {cvg} {cig} {mode} {pg} {frechin} {dest}' + .format(cvg=self.cvg, cig=self.cig, frechin=self.frech, + mode=self.mode, pg=self.pg, dest=directory)) + # MP MP +++ small hack for teleseismic alparray configuration with injected teleseismic ttimes + iaf = os.path.join(self.simuldir, 'input_associations_file.in') + if os.path.isfile(iaf): + os.system('cp {iaf} {dest}'.format(iaf=iaf, dest=directory)) + picksdir = os.path.join(directory, 'picks') + os.system('ln -s {pdir_root} {pdir}'.format(pdir_root=os.path.join(self.simuldir, 'picks'), + pdir=picksdir)) + # MP MP --- + processes = self.runFmm(directory, log_out, processes) + + for p in processes: + p.wait() + + self.mergeOutput(self.cInvIterDir) + #self.clearDirectories() + self.copyArrivals() + if self.citer == 0: + self.copyArrivals(self.rtrav) + + # do not try to calculate residuals if we only do one forward run (e.g. for teleseismic reference times) + if self.iter != 0: + self.calcRes() + + tdelta = datetime.datetime.now() - starttime + print('Finished Forward calculation after %s' % tdelta) + + def startInversion(self): + ''' + Simply calls the inversion program. + ''' + print('Calling %s...' % self.inv) + os.system(self.inv) + + def calcRes(self): + ''' + Calls residual calculation program. + ''' + resout = os.path.join(self.cwd, self.resout) + if self.citer == 0: + os.system('%s > %s' % (self.resid, resout)) + else: + os.system('%s >> %s' % (self.resid, resout)) + + with open(resout, 'r') as infile: + residuals = infile.readlines() + RMS, var, chi2 = residuals[-1].split() + print('Residuals: RMS = %s, var = %s, Chi^2 = %s.' % (RMS, var, chi2)) + + def raiseIter(self): + self.citer += 1 + self._printLine() + invfile = open(self.cwd + '/inviter.in', 'w') + invfile.write('%s' % self.citer) + invfile.close() + + def makeDir(self, directory): + err = os.system('mkdir %s' % directory) + if err == 0: + self.directories.append(directory) + return + if err == 256: + if self.overwrite == True: + print('Overwriting existing files.') + self.clearDir(directory) + self.directories.append(directory) + return + raise RuntimeError('Could not create directory: %s' % directory) + + def makeDirectories(self): + ''' + Makes temporary directories for all processes. + ''' + for procID in range(1, self.nproc + 1): + directory = self.getProcDir(procID) + self.makeDir(directory) + + def makeInvIterDir(self): + ''' + Makes directories for each iteration step for the output. + ''' + invIterDir = self.cwd + '/it_%s' % (self.citer) + err = os.system('mkdir %s' % invIterDir) + if err == 256: + if self.overwrite: + self.clearDir(invIterDir) + elif err != 0: + raise RuntimeError('Could not create directory: %s' % invIterDir) + self.cInvIterDir = invIterDir + + def clearDir(self, directory): + ''' + Wipes a certain directory. + ''' + # print('Wiping directory %s...'%directory) + for filename in os.listdir(directory): + filenp = os.path.join(directory, filename) + os.remove(filenp) + + def clearDirectories(self): + ''' + Wipes all generated temporary directories. + ''' + for directory in self.directories: + self.clearDir(directory) + + def rmDir(self, directory): + # print('Removing directory %s...'%directory) + return shutil.rmtree(directory) + + def removeDirectories(self): + ''' + Removes all generated temporary directories. + ''' + for directory in self.directories: + self.rmDir(directory) + self.directories = [] + + def getProcDir(self, procID): + ''' + Returns the temporary directory for a certain process + with procID = process number. + ''' + return os.path.join(self.cwd, self.folder) + str(procID) + + def getTraceIDs4Sources(self, sourceIDs): + ''' + Returns corresponding trace IDs for a set of given source IDs. + ''' + traceIDs = [] + for traceID in self.traces.keys(): + if self.traces[traceID]['source'] in sourceIDs: + traceIDs.append(traceID) + return traceIDs + + def getTraceIDs4Source(self, sourceID): + ''' + Returns corresponding trace IDs for a source ID. + ''' + traceIDs = [] + for traceID in self.traces.keys(): + if self.traces[traceID]['source'] == sourceID: + traceIDs.append(traceID) + return traceIDs + + def copyArrivals(self, target=None): + ''' + Copies the FMM output file (self.ttim) to a specific target file. + Default target is self.mtrav (model travel times). + ''' + if target == None: + target = os.path.join(self.cwd, self.mtrav) + os.system('cp %s %s' % (os.path.join( + self.cInvIterDir, self.ttim), target)) + + def saveVgrid(self): + ''' + Saves the current velocity grid for the current iteration step. + ''' + # small hack: add smoothing file and ecmi file (smv and ecmi in invert3d.f90) for Norm calculation + smvf = os.path.join(self.cwd, 'smv.out') + ecmif = os.path.join(self.cwd, 'ecmi.out') + dmf = os.path.join(self.cwd, 'dm.out') + if os.path.isfile(smvf): + os.system('cp %s %s' % (smvf, self.cInvIterDir)) + if os.path.isfile(ecmif): + os.system('cp %s %s' % (ecmif, self.cInvIterDir)) + if os.path.isfile(dmf): + os.system('cp %s %s' % (dmf, self.cInvIterDir)) + vgpath = os.path.join(self.cwd, self.cvg) + os.system('cp %s %s' % (vgpath, self.cInvIterDir)) + + def calcSrcPerKernel(self): + ''' + (Equally) distributes all sources depending on the number of processes (kernels). + Returns two integer values. + First: minimum number of sources for each process + Second: remaining sources (always less than number of processes) + ''' + nsrc = self.readNsrc() + if self.nproc > nsrc: + print('Warning: Number of spawned processes higher than number of sources') + return nsrc // self.nproc, nsrc % self.nproc + + def srcIDs4Kernel(self, procID): + ''' + Calculates and returns all source IDs for a given process ID. + ''' + proc = procID - 1 + nsrc = self.readNsrc() + srcPK, remain = self.calcSrcPerKernel() + if procID > self.nproc: + sys.exit('STOP: Kernel ID exceeds available number.') + if proc < remain: + start = (srcPK + 1) * (proc) + 1 + return range(start, start + srcPK + 1) + elif proc == remain: + start = (srcPK + 1) * (proc) + 1 + return range(start, start + srcPK) + elif proc > remain: + start = (srcPK + 1) * remain + srcPK * (proc - remain) + 1 + return range(start, start + srcPK) + + def readNsrc(self): + srcfile = open(self.csl, 'r') + nsrc = int(srcfile.readline()) + srcfile.close() + return nsrc + + def readNtraces(self): + ''' + Reads the total number of traces from self.rec header. + ''' + recfile = open(self.rec, 'r') + nrec = int(recfile.readline()) + recfile.close() + return nrec + + def readSrcFile(self): + ''' + Reads the whole sourcefile and returns structured information in a dictionary. + ''' + nsrc = self.readNsrc() + srcfile = open(self.csl, 'r') + + sources = {} + + temp = srcfile.readline() + for index in range(nsrc): + teleflag = int(srcfile.readline()) + if teleflag == 1: + phase = srcfile.readline() + else: + phase = None + coords = srcfile.readline().split() + numpaths = int(srcfile.readline()) + steps = int(srcfile.readline()) + interactions = srcfile.readline().split() + veltype = int(srcfile.readline()) + #if teleflag is not 0: + # sys.exit('Script not yet usable for teleseismic.') + if numpaths != 1: + sys.exit('Script not yet usable for more than one path per source.') + + sources[index + 1] = {'teleflag': teleflag, + 'phase': phase, + 'coords': coords, + 'numpaths': numpaths, + 'steps': steps, + 'interactions': interactions, + 'veltype': veltype + } + + return sources + + def readTraces(self): + ''' + Reads the receiver input file and returns the information + in a structured dictionary. + ''' + recfile = open(self.rec, 'r') + ntraces = self.readNtraces() + + traces = {} + + temp = recfile.readline() + for index in range(ntraces): + coords = recfile.readline().split() + paths = int(recfile.readline()) + source = int(recfile.readline()) + path = int(recfile.readline()) + + traces[index + 1] = {'coords': coords, + 'paths': paths, + 'source': source, + 'path': path + } + + return traces + + def readArrivals(self, procID): + ''' + Reads the arrival times from a temporary process directory, + changes local to global sourceIDs and traceIDs and returns + a list of arrival times. + ''' + directory = self.getProcDir(procID) + arrfile = open(os.path.join(directory, self.ttim), 'r') + sourceIDs = self.srcIDs4Kernel(procID) + + arrivals = [] + for sourceID in sourceIDs: + traceIDs = self.getTraceIDs4Source(sourceID) + for traceID in traceIDs: + line = arrfile.readline().split() + if line != []: + # recID and srcID for the individual processor will not be needed + recID_proc, srcID_proc, ray, normal, arrtime, diff, head = line + arrivals.append([traceID, sourceID, ray, normal, arrtime, diff, head]) + + return arrivals + + def readRays(self, procID): + ''' + Reads rays output from a temporary process directory and returns + the information in a structured dictionary. + ''' + directory = self.getProcDir(procID) + raysfile = open(directory + '/rays.dat', 'r') + sourceIDs = self.srcIDs4Kernel(procID) + + rays = {} + for sourceID in sourceIDs: + traceIDs = self.getTraceIDs4Source(sourceID) + for traceID in traceIDs: + line1 = raysfile.readline().split() + if line1 != []: + # recID and srcID for the individual processor will not be needed + recID_proc, srcID_proc, ray, normal, nsec = line1 + raysecs = {} + + for sec in range(int(nsec)): + line2 = raysfile.readline().split() + npoints, region, diff, head = line2 + raypoints = [] + + for j in range(int(npoints)): + raypoints.append(raysfile.readline()) + + raysecs[sec] = {'npoints': npoints, + 'region': region, + 'diff': diff, + 'head': head, + 'raypoints': raypoints + } + + rays[traceID] = {'sourceID': sourceID, + 'raypath': ray, + 'normal': normal, + 'nsec': nsec, + 'raysections': raysecs + } + return rays + + def writeSrcFile(self, procID): + ''' + Writes a source input file for a process with ID = procID. + ''' + directory = self.getProcDir(procID) + srcfile = open(os.path.join(directory, self.csl), 'w') + sourceIDs = self.srcIDs4Kernel(procID) + + srcfile.write('%s\n' % len(sourceIDs)) + for sourceID in sourceIDs: + source = self.sources[sourceID] + coords = source['coords'] + interactions = source['interactions'] + srcfile.write('%s\n' % source['teleflag']) + if source['teleflag'] == 1: + srcfile.write('%s\n' % source['phase']) + srcfile.write('%s %s %s\n' % (float(coords[0]), float(coords[1]), float(coords[2]))) + srcfile.write('%s\n' % source['numpaths']) + srcfile.write('%s\n' % source['steps']) + srcfile.write('%s %s\n' % (int(interactions[0]), int(interactions[1]))) + srcfile.write('%s\n' % source['veltype']) + + def writeTracesFile(self, procID): + ''' + Writes a receiver input file for a process with ID = procID. + ''' + directory = self.getProcDir(procID) + recfile = open('%s/receivers.in' % directory, 'w') + sourceIDs = self.srcIDs4Kernel(procID) + traceIDs = self.getTraceIDs4Sources(sourceIDs) + + recfile.write('%s\n' % len(traceIDs)) + for traceID in traceIDs: + trace = self.traces[traceID] + coords = trace['coords'] + source = int(trace['source']) - sourceIDs[0] + 1 + recfile.write('%s %s %s\n' % (float(coords[0]), float(coords[1]), float(coords[2]))) + recfile.write('%s\n' % trace['paths']) + recfile.write('%s\n' % source) + recfile.write('%s\n' % trace['path']) + + def mergeArrivals(self, directory): + ''' + Merges the arrival times for all processes to self.cInvIterDir. + ''' + arrfn = os.path.join(directory, self.ttim) + arrivalsOut = open(arrfn, 'w') + print('Merging %s...' % self.ttim) + for procID in range(1, self.nproc + 1): + arrivals = self.readArrivals(procID) + for line in arrivals: + arrivalsOut.write('%6s %6s %6s %6s %15s %5s %5s\n' % tuple(line)) + + os.system('ln -fs %s %s' % (arrfn, os.path.join(self.cwd, self.ttim))) + + def mergeRays(self, directory): + ''' + Merges the ray paths for all processes to self.cInvIterDir. + ''' + print('Merging rays.dat...') + with open(directory + '/rays.dat', 'w') as outfile: + for procID in range(1, self.nproc + 1): + rays = self.readRays(procID) + for traceID in rays: + ray = rays[traceID] + outfile.write('%6s %6s %6s %6s %6s\n' % (traceID, + ray['sourceID'], + ray['raypath'], + ray['normal'], + ray['nsec'])) + for sec in range(int(ray['nsec'])): + raysec = ray['raysections'][sec] + outfile.write('%6s %6s %6s %6s\n' % (raysec['npoints'], + raysec['region'], + raysec['diff'], + raysec['head'])) + outfile.writelines(raysec['raypoints']) + + def mergeFrechet(self, directory): + ''' + Merges the frechet derivatives for all processes to self.cInvIterDir. + ''' + frechfnout = os.path.join(directory, self.frechout) + print('Merging %s...' % self.frechout) + with open(frechfnout, 'w') as outfile: + for procID in range(1, self.nproc + 1): + filename = os.path.join(self.getProcDir(procID), self.frechout) + with open(filename) as infile: + for sourceID in self.srcIDs4Kernel(procID): + for traceID in self.getTraceIDs4Source(sourceID): + try: + recID_proc, srcID_proc, ray, normal, NPDEV = infile.readline().split() + except ValueError as e: + print('Value error: {}. Continue with next line'.format(e)) + continue + outfile.write('%6s %6s %6s %6s %6s\n' % (traceID, sourceID, ray, normal, NPDEV)) + for index in range(int(NPDEV)): + outfile.write(infile.readline()) + + os.system('ln -fs %s %s' % (frechfnout, os.path.join(self.cwd, self.frechout))) + + def mergeOutput(self, directory): + ''' + Calls self.mergeArrivals, self.mergeFrechet and self.mergeRays. + ''' + self.mergeArrivals(directory) + self.mergeFrechet(directory) + if self.saveRays == True or (type(self.saveRays) is list and self.citer in self.saveRays): + self.mergeRays(directory) + + def unlink(self, filepath): + return os.system('unlink %s' % filepath) + + def _printLine(self): + print('----------------------------------------') + + +def vgrids2VTK(inputfile='vgrids.in', outputfile='vgrids.vtk', absOrRel='abs', + inputfileref='vgridsref.in', spherical=False): + ''' + Generate a vtk-file readable by e.g. paraview from FMTOMO output vgrids.in + ''' + R = 6371. # earth radius + outfile = open(outputfile, 'w') + + number, delta, start, vel = _readVgrid(inputfile) + + nR, nTheta, nPhi = number + dR, dTheta, dPhi = delta + sR, sTheta, sPhi = start + + thetaGrid, phiGrid, rGrid = _generateGrids(number, delta, start) + + nPoints = nR * nTheta * nPhi + + # write header2vtk + print("Writing header for VTK file...") + outfile.write('# vtk DataFile Version 3.1\n') + outfile.write('Velocity on FMTOMO vgrids.in points\n') + outfile.write('ASCII\n') + + if spherical == False: + nX = nPhi + nY = nTheta + nZ = nR + + sZ = sR - R + sX = _getDistance(sPhi) + sY = _getDistance(sTheta) + + dX = _getDistance(dPhi) + dY = _getDistance(dTheta) + dZ = dR + + outfile.write('DATASET STRUCTURED_POINTS\n') + + outfile.write('DIMENSIONS %d %d %d\n' % (nX, nY, nZ)) + outfile.write('ORIGIN %f %f %f\n' % (sX, sY, sZ)) + outfile.write('SPACING %f %f %f\n' % (dX, dY, dZ)) + elif spherical == True: + + outfile.write('DATASET STRUCTURED_GRID\n') + outfile.write('DIMENSIONS %15d %15d %15d\n' % (nPhi, nTheta, nR)) + outfile.write('POINTS %15d float\n' % (nPoints)) + + radii = np.linspace(sR, sR + dR * nR, nR) + lats = np.linspace(sTheta, sTheta + dTheta * nTheta, nTheta) + lons = np.linspace(sPhi, sPhi + dPhi * nPhi, nPhi) + for rad in radii: + for lat in lats: + for lon in lons: + x, y, z = pol2cart(lat, lon, rad) + outfile.write('%10f %10f %10f \n' % (x, y, z)) + + outfile.write('POINT_DATA %15d\n' % (nPoints)) + if absOrRel == 'abs': + outfile.write('SCALARS velocity float %d\n' % (1)) + if absOrRel == 'relDepth': + outfile.write('SCALARS velocity2depthMean float %d\n' % (1)) + elif absOrRel == 'rel': + outfile.write('SCALARS velChangePercent float %d\n' % (1)) + outfile.write('LOOKUP_TABLE default\n') + + pointsPerR = nTheta * nPhi + + # write velocity + if absOrRel == 'abs': + print("Writing velocity values to VTK file...") + for velocity in vel: + outfile.write('%10f\n' % velocity) + elif absOrRel == 'relDepth': + print("Writing velocity values to VTK file relative to mean of each depth...") + index = 0 + count = 0 + veldepth = [] + for velocity in vel: + count += 1 + veldepth.append(velocity) + if count % pointsPerR == 0: + velmean = np.mean(veldepth) + # print velmean, count, count/pointsPerR + for vel in veldepth: + outfile.write('%10f\n' % float(vel - velmean)) + veldepth = [] + elif absOrRel == 'rel': + nref, dref, sref, velref = _readVgrid(inputfileref) + nR_ref, nTheta_ref, nPhi_ref = nref + if not len(velref) == len(vel): + print('ERROR: Number of gridpoints mismatch for %s and %s' % (inputfile, inputfileref)) + return + # velrel = [((vel - velref) / velref * 100) for vel, velref in zip(vel, velref)] + velrel = [] + for velocities in zip(vel, velref): + v, vref = velocities + if not vref == 0: + velrel.append((v - vref) / vref * 100) + else: + velrel.append(0) + + if not nR_ref == nR and nTheta_ref == nTheta and nPhi_ref == nPhi: + print('ERROR: Dimension mismatch of grids %s and %s' % (inputfile, inputfileref)) + return + print("Writing velocity values to VTK file...") + for velocity in velrel: + outfile.write('%10f\n' % velocity) + print('Pertubations: min: %s %%, max: %s %%' % (min(velrel), max(velrel))) + + outfile.close() + print("Wrote velocity grid for %d points to file: %s" % (nPoints, outputfile)) + return + + +def rays2VTK(fnin, fdirout='./vtk_files/', nthPoint=50, spherical=False): + ''' + Writes VTK file(s) for FMTOMO rays from rays.dat + + :param: nthPoint, plot every nth point of the ray + :type: integer + ''' + infile = open(fnin, 'r') + R = 6371. + rays = {} + raynumber = 0 + + ### NOTE: rays.dat seems to be in km and radians + while True: + raynumber += 1 + firstline = infile.readline() + if firstline == '': + break # break at EOF + fl_list = firstline.split() + recnumber = int(fl_list[0]) + shotnumber = int(fl_list[1]) + nRaySections = int(fl_list[4]) # is zero if the ray is invalid + if nRaySections == 0: + print('Invalid ray number %d for shot number %d' % (raynumber, shotnumber)) + continue + if not shotnumber in rays.keys(): + rays[shotnumber] = {} + rays[shotnumber][recnumber] = [] + for raySection in range(nRaySections): + nRayPoints = int(infile.readline().split()[0]) + for index in range(nRayPoints): + if index % nthPoint == 0 or index == (nRayPoints - 1): + rad, lat, lon = infile.readline().split() + rays[shotnumber][recnumber].append( + [_getDistance(np.rad2deg(float(lon))), _getDistance(np.rad2deg(float(lat))), float(rad) - R]) + else: + dummy = infile.readline() + + infile.close() + + for shotnumber in rays.keys(): + fnameout = os.path.join(fdirout, 'rays%04d.vtk' % (shotnumber)) + outfile = open(fnameout, 'w') + + nPoints = 0 + for raynumber in rays[shotnumber]: + for ray in rays[shotnumber][raynumber]: + nPoints += 1 + + # write header + # print("Writing header for VTK file...") + print("Writing shot %d to file %s" % (shotnumber, fnameout)) + outfile.write('# vtk DataFile Version 3.1\n') + outfile.write('FMTOMO rays\n') + outfile.write('ASCII\n') + outfile.write('DATASET POLYDATA\n') + outfile.write('POINTS %15d float\n' % (nPoints)) + + # write coordinates + # print("Writing coordinates to VTK file...") + for raynumber in rays[shotnumber].keys(): + for raypoint in rays[shotnumber][raynumber]: + # longitude, latitude, rad - R + x, y, z = raypoint[:3] + if spherical: + x, y, z = pol2cart(_getAngle(y), _getAngle(x), z + R) + outfile.write('%10f %10f %10f \n' % (x, y, z)) + + outfile.write('LINES %15d %15d\n' % (len(rays[shotnumber]), len(rays[shotnumber]) + nPoints)) + + # write indices + # print("Writing indices to VTK file...") + count = 0 + for raynumber in rays[shotnumber].keys(): + outfile.write('%d ' % (len(rays[shotnumber][raynumber]))) + for index in range(len(rays[shotnumber][raynumber])): + outfile.write('%d ' % (count)) + count += 1 + outfile.write('\n') + + +def receivers2VTK(fnin='receivers.in', fnout='receivers.vtk', spherical=False, R=6371.): + ''' + Generates a vtk file from all receivers of the SeisArray object. + ''' + infile = open(fnin, 'r') + outfile = open(fnout, 'w') + + nLines = int(infile.readline()) + + # write header + print("Writing header for VTK file...") + outfile.write('# vtk DataFile Version 3.1\n') + outfile.write('Receivers with traceIDs\n') + outfile.write('ASCII\n') + outfile.write('DATASET POLYDATA\n') + + # check for duplicates + coord_tuples = [] + + # save write output lines + points_out = [] + + # write coordinates + print("Writing coordinates to VTK file...") + for index in range(nLines): + # elevation, lat, lon + z, y, x = [float(val) for val in infile.readline().split()] + + # read following 3 lines until next receiver locations + for _ in range(3): + infile.readline() + + # eliminate duplicates + coord_tuple = (x, y, z) + if coord_tuple in coord_tuples: + continue + + coord_tuples.append((x, y, z)) + + if not spherical: + x = _getDistance(x) + y = _getDistance(y) + z = -z + else: + x, y, z = pol2cart(y, x, R - z) + + points_out.append('%10f %10f %10f \n' % (x, y, z)) + + nPoints = len(points_out) + outfile.write('POINTS %15d float\n' % (nPoints)) + + for line in points_out: + outfile.write(line) + + outfile.write('VERTICES %15d %15d\n' % (nPoints, 2 * nPoints)) + + # write indices + print("Writing indices to VTK file...") + for index in range(nPoints): + outfile.write('%10d %10d\n' % (1, index)) + + outfile.close() + infile.close() + print("Wrote %d receivers to file: %s" % (nPoints, fnout)) + + +def sources2VTK(fnin='sources.in', fname_out='sources.vtk', spherical=False, R=6371.): + ''' + Generates a vtk-file for all source locations in the SeisArray object. + ''' + infile = open(fnin, 'r') + + nSources = int(infile.readline()) + + outfile = open(fname_out, 'w') + + points = [] + vertices = [] + + # write header + print("Writing header for VTK file...") + outfile.write('# vtk DataFile Version 3.1\n') + outfile.write('Shots with shotnumbers\n') + outfile.write('ASCII\n') + outfile.write('DATASET POLYDATA\n') + outfile.write('POINTS %15d float\n' % (nSources)) + + # write coordinates + print("Writing coordinates to VTK file...") + + for index in range(nSources): + shotnumber = index + 1 + phaseID = infile.readline() + _ = infile.readline() + # elevation, lat, lon + z, y, x = [float(val) for val in infile.readline().split()[:3]] + if not spherical: + x = _getDistance(x) + y = _getDistance(y) + z = -z + else: + x, y, z = pol2cart(y, x, R - z) + + # read following 4 lines until next source locations + for _ in range(4): + infile.readline() + + outfile.write('%10f %10f %10f \n' % (x, y, z)) + + outfile.write('VERTICES %15d %15d\n' % (1, nSources)) + + outfile.write('{} '.format(nSources - 1)) + for index in range(nSources - 1): + outfile.write('{} '.format(index)) + + outfile.close() + + print("Wrote %d sources to file %s" % (nSources, fname_out)) + + +def _readVgrid(filename): + def readNumberOfPoints(filename): + fin = open(filename, 'r') + vglines = fin.readlines() + + nR = int(vglines[1].split()[0]) + nTheta = int(vglines[1].split()[1]) + nPhi = int(vglines[1].split()[2]) + + print('readNumberOf Points: Awaiting %d grid points in %s' + % (nR * nTheta * nPhi, filename)) + fin.close() + return nR, nTheta, nPhi + + def readDelta(filename): + fin = open(filename, 'r') + vglines = fin.readlines() + + dR = float(vglines[2].split()[0]) + dTheta = float(vglines[2].split()[1]) + dPhi = float(vglines[2].split()[2]) + + fin.close() + return dR, dTheta, dPhi + + def readStartpoints(filename): + fin = open(filename, 'r') + vglines = fin.readlines() + + sR = float(vglines[3].split()[0]) + sTheta = float(vglines[3].split()[1]) + sPhi = float(vglines[3].split()[2]) + + fin.close() + return sR, sTheta, sPhi + + def readVelocity(filename): + ''' + Reads in velocity from vgrids file and returns a list containing all values in the same order + ''' + vel = [] + count = 0 + fin = open(filename, 'r') + vglines = fin.readlines() + + for line in vglines: + count += 1 + if count > 4: + if line.split(): + vel.append(float(line.split()[0])) + + print("Read %d points out of file: %s" % (len(vel), filename)) + return vel + + # Theta, Phi in radians, R in km + nR, nTheta, nPhi = readNumberOfPoints(filename) + dR, dThetaRad, dPhiRad = readDelta(filename) + sR, sThetaRad, sPhiRad = readStartpoints(filename) + vel = readVelocity(filename) + + dTheta, dPhi = np.rad2deg((dThetaRad, dPhiRad)) + sTheta, sPhi = np.rad2deg((sThetaRad, sPhiRad)) + + number = (nR, nTheta, nPhi) + delta = (dR, dTheta, dPhi) + start = (sR, sTheta, sPhi) + return number, delta, start, vel + + +def _generateGrids(number, delta, start): + nR, nTheta, nPhi = number + dR, dTheta, dPhi = delta + sR, sTheta, sPhi = start + + eR = sR + (nR - 1) * dR + ePhi = sPhi + (nPhi - 1) * dPhi + eTheta = sTheta + (nTheta - 1) * dTheta + + thetaGrid = np.linspace(sTheta, eTheta, num=nTheta) + phiGrid = np.linspace(sPhi, ePhi, num=nPhi) + rGrid = np.linspace(sR, eR, num=nR) + + return (thetaGrid, phiGrid, rGrid) + + +def addCheckerboard(spacing=10., pertubation=0.1, inputfile='vgrids.in', + outputfile='vgrids_cb.in', ampmethod='linear', rect=(None, None), + spherical=False, spacTheta=None, spacPhi=None): + ''' + Add a checkerboard to an existing vgrids.in velocity model. + + :param: spacing, size of the tiles + type: float + + :param: pertubation, pertubation (default: 0.1 = 10%) + type: float + ''' + + def correctSpacing(spacing, delta, disttype=None): + if spacing > delta: + spacing_corr = round(spacing / delta) * delta + elif spacing < delta: + spacing_corr = delta + print('The spacing of the checkerboard of %s (%s) was corrected to ' + 'a value of %s to fit the grid spacing of %s.' % (spacing, disttype, spacing_corr, delta)) + return spacing_corr + + def linearAmp(InCell): + decimal = InCell - np.floor(InCell) + return (-abs(decimal - 0.5) + 0.5) * 2 + + def rectAmp(InCell, rect): + decimal = InCell - np.floor(InCell) + r1, r2 = rect + if r1 <= decimal <= r2: + return 1 + else: + return 0 + + def ampFunc(InCell, method='linear', rect=None): + if method == 'linear': + return linearAmp(InCell) + if method == 'rect' and rect is not None: + return rectAmp(InCell, rect) + else: + print('ampFunc: Could not amplify cb pattern') + + if spherical == True: + if spacTheta == None or spacPhi == None: + print('Please give explicit values for spacing in theta and phi direction. Abort.') + return + print('In spherical mode. Spacing in R: {}, Theta: {}, Phi: {}'.format(spacing, spacTheta, spacPhi)) + + decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3) + outfile = open(outputfile, 'w') + + number, delta, start, vel = _readVgrid(inputfile) + + nR, nTheta, nPhi = number + dR, dTheta, dPhi = delta + sR, sTheta, sPhi = start + + thetaGrid, phiGrid, rGrid = _generateGrids(number, delta, start) + + nPoints = nR * nTheta * nPhi + + # write header for velocity grid file (in RADIANS) + outfile.write('%10s %10s \n' % (1, 1)) + outfile.write('%10s %10s %10s\n' % (nR, nTheta, nPhi)) + outfile.write('%10s %10s %10s\n' % (dR, np.deg2rad(dTheta), np.deg2rad(dPhi))) + outfile.write('%10s %10s %10s\n' % (sR, np.deg2rad(sTheta), np.deg2rad(sPhi))) + + spacR = correctSpacing(spacing, dR, '[meter], R') + + # correction for spherical case, angles given explicitly + if not spherical == True: + spacTheta = _getAngle(spacing) + spacPhi = _getAngle(spacing) + + spacTheta = correctSpacing(spacTheta, dTheta, '[degree], Theta') + spacPhi = correctSpacing(spacPhi, dPhi, '[degree], Phi') + + count = 0 + evenOdd = 1 + even = 0 + odd = 0 + + # In the following loop it is checked whether the positive distance from the border of the model + # for a point on the grid divided by the spacing is even or odd and then pertubated. + # The position is also shifted by half of the delta so that the position is directly on the point and + # not on the border between two points. + # "InCell" points e.g. rInCell are floats with their integer number corresponding to the cell number and + # their decimal place (0 - 1) corresponding to the position inside the cell. + # The amplification factor ampFactor comes from a linear relationship and ranges between 0 (cell border) + # and 1 (cell middle) + for radius in rGrid: + rInCell = (radius - sR - dR / 2) / spacR + ampR = ampFunc(rInCell, ampmethod, rect) + if np.floor(rInCell) % 2: + evenOddR = 1 + else: + evenOddR = -1 + for theta in thetaGrid: + thetaInCell = (theta - sTheta - dTheta / 2) / spacTheta + ampTheta = ampFunc(thetaInCell, ampmethod, rect) + if np.floor(thetaInCell) % 2: + evenOddT = 1 + else: + evenOddT = -1 + for phi in phiGrid: + phiInCell = (phi - sPhi - dPhi / 2) / spacPhi + ampPhi = ampFunc(phiInCell, ampmethod, rect) + if np.floor(phiInCell) % 2: + evenOddP = 1 + else: + evenOddP = -1 + velocity = vel[count] + ampFactor = (ampR + ampTheta + ampPhi) / 3 + evenOdd = evenOddR * evenOddT * evenOddP * ampFactor + velocity += evenOdd * pertubation * velocity + + outfile.write('%10s %10s\n' % (velocity, decm)) + count += 1 + + progress = float(count) / float(nPoints) * 100 + _update_progress(progress) + + print('Added checkerboard to the grid in file %s with a spacing of %s and a pertubation of %s %%. ' + 'Outputfile: %s.' % (inputfile, spacing, pertubation * 100, outputfile)) + outfile.close() + + +def addBox(x=(None, None), y=(None, None), z=(None, None), + boxvelocity=1.0, inputfile='vgrids.in', + outputfile='vgrids_box.in'): + ''' + Add a box with constant velocity to an existing vgrids.in velocity model. + + :param: x, borders of the box (xleft, xright) + type: tuple + + :param: y, borders of the box (yleft, yright) + type: tuple + + :param: z, borders of the box (bot, top) + type: tuple + + :param: boxvelocity, default: 1.0 km/s + type: float + ''' + R = 6371. + decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3) + outfile = open(outputfile, 'w') + + theta1 = _getAngle(y[0]) + theta2 = _getAngle(y[1]) + phi1 = _getAngle(x[0]) + phi2 = _getAngle(x[1]) + r1 = R + z[0] + r2 = R + z[1] + + print('Adding box to grid with theta = (%s, %s), phi = (%s, %s), ' + 'r = (%s, %s), velocity = %s [km/s]' + % (theta1, theta2, phi1, phi2, r1, r2, boxvelocity)) + + number, delta, start, vel = _readVgrid(inputfile) + + nR, nTheta, nPhi = number + dR, dTheta, dPhi = delta + sR, sTheta, sPhi = start + + thetaGrid, phiGrid, rGrid = _generateGrids(number, delta, start) + + nPoints = nR * nTheta * nPhi + + # write header for velocity grid file (in RADIANS) + outfile.write('%10s %10s \n' % (1, 1)) + outfile.write('%10s %10s %10s\n' % (nR, nTheta, nPhi)) + outfile.write('%10s %10s %10s\n' % (dR, np.deg2rad(dTheta), np.deg2rad(dPhi))) + outfile.write('%10s %10s %10s\n' % (sR, np.deg2rad(sTheta), np.deg2rad(sPhi))) + + count = 0 + for radius in rGrid: + if r1 <= radius <= r2: + rFlag = 1 + else: + rFlag = 0 + for theta in thetaGrid: + if theta1 <= theta <= theta2: + thetaFlag = 1 + else: + thetaFlag = 0 + for phi in phiGrid: + if phi1 <= phi <= phi2: + phiFlag = 1 + else: + phiFlag = 0 + velocity = vel[count] + if rFlag * thetaFlag * phiFlag != 0: + velocity = boxvelocity + + outfile.write('%10s %10s\n' % (velocity, decm)) + count += 1 + + progress = float(count) / float(nPoints) * 100 + _update_progress(progress) + + print('Added box to the grid in file %s. ' + 'Outputfile: %s.' % (inputfile, outputfile)) + outfile.close() + + +def _update_progress(progress): + sys.stdout.write("%d%% done \r" % (progress)) + sys.stdout.flush() + + +def _getAngle(distance, R=6371.): + ''' + Function returns the angle on a Sphere of the radius R = 6371 [km] for a distance [km]. + ''' + PI = np.pi + angle = distance * 180. / (PI * R) + return angle + + +def _getDistance(angle, R=6371.): + PI = np.pi + distance = angle / 180 * (PI * R) + return distance diff --git a/pylot/tomography/map_utils.py b/pylot/tomography/map_utils.py new file mode 100644 index 00000000..728c4ccc --- /dev/null +++ b/pylot/tomography/map_utils.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import glob +import os +import json +import numpy as np + +import matplotlib.pyplot as plt + +import cartopy +import cartopy.crs as ccrs +from cartopy.io.shapereader import Reader +from matplotlib.patches import Patch + +#from cmcrameri import cm + +from obspy.geodetics import gps2dist_azimuth + +TRANSFORM = ccrs.PlateCarree() + + +def draw_schmid_faults(ax, fnin='schmidfaults.shp'): + reader = Reader(fnin) + lines = [] + linestyles = [] + for record in reader.records(): + info = record.attributes + if info["fault_type"] == 1 or info["fault_type"] == 3: + linestyles.append('solid') + else: + linestyles.append('dashed') + line_arr = np.array(record.geometry.coords) + lines.append(line_arr) + # ax.add_collection(LineCollection(lines, linewidths=1.2, linestyles=linemarkers, colors='black', transform=ccrs.PlateCarree())) + for line, style in zip(lines, linestyles): + ax.plot(line[:, 0], line[:, 1], transform=TRANSFORM, ls=style, c='k', lw=1.2) + + +def draw_alcapadi_faults(ax, fnin='faults_alcapadi', color='black'): + reader = Reader(fnin) + lines = [] + linestyles = [] + linewidths = [] + for record in reader.records(): + info = record.attributes + if info["fault_type"] == 1: + linestyles.append('solid') + linewidths.append(.8) + elif info["fault_type"] == 2: + linestyles.append('solid') + linewidths.append(0.4) + else: + linestyles.append('dashed') + linewidths.append(.8) + line_arr = np.array(record.geometry.coords) + lines.append(line_arr) + for line, style, lwidth in zip(lines, linestyles, linewidths): + ax.plot(line[:, 0], line[:, 1], transform=TRANSFORM, ls=style, c='k', lw=lwidth) + + +def draw_alcapadi_model(ax, fnin='tect_units_alcapadi', alpha=0.2, add_legend=True): + reader = Reader(fnin) + color_dict = {"Adria accreted": (0.8, 0.7, 0.45, alpha), + "Adria autochton": (0.52, 0.32, 0.2, alpha), + "Europe accreted": (0.42, 0.67, 0.88, alpha), + "Flexural foredeep and graben fill": (0.6, 0.6, 0.6, alpha), # (1.0, 1.0, 220/255., alpha), + "Alpine Tethys": (0., 0.65, 0.3, alpha), + "Neotethys": (0.5, 0.8, 0.32, alpha)} + + patches_legend = [Patch(color=value, label=key.capitalize()) for key, value in color_dict.items()] + + for record in reader.records(): + info = record.attributes + shape = record.geometry + color = color_dict.get(info['tect_unit']) + if not color: + color = (1.0, 1.0, 1.0, alpha) + ax.add_geometries(shape, crs=TRANSFORM, facecolor=color) + + if add_legend: + ax.legend(handles=patches_legend, ncol=3, bbox_to_anchor=(0.5, -0.075), loc='center') + + +def init_cartopy_map(fig=None, draw_mapbound=True, fill_continents=False, + continents_color=None, mapbound_color=None, lakes_color=None, clon=None, clat=None): + if not fig: + fig = plt.figure() + #projection = ccrs.LambertConformal(central_longitude=clon, central_latitude=clat) + projection = ccrs.PlateCarree(central_longitude=clon) + ax = fig.add_subplot(111, projection=projection) + + if fill_continents: + ax.add_feature(cartopy.feature.LAND, color=continents_color) + ax.add_feature(cartopy.feature.LAKES, color=lakes_color) + if draw_mapbound: + #ax.add_feature(cartopy.feature.OCEAN, color='w', linewidth=0.1) + ax.add_feature(cartopy.feature.BORDERS, linewidth=0.2, color='0.3') + ax.add_feature(cartopy.feature.COASTLINE, linewidth=0.3, color='0.3') + return ax + + +def make_map(fig=None, draw_model=False, model_legends=True, draw_faults=False, width=20, + height=14, clon=11, clat=46., draw_grid=True, continents='0.8', lakes='0.85', no_content=False, + no_fill=True, alpha_model=0.2, faults_color='k', station_file=None): + + ax = init_cartopy_map(fig=fig, draw_mapbound=not no_content, fill_continents=not no_fill, + continents_color=continents, lakes_color=lakes, clon=clon, clat=clat) + + if station_file: + with open(station_file, 'r') as fid: + stations = json.load(fid) + + lons, lats = zip(*[(sta['longitude'], sta['latitude']) for sta in stations.values()]) + ax.scatter(lons, lats, c='0.3', s=1, transform=ccrs.PlateCarree(), zorder=5, edgecolors='none', alpha=0.5) + + # if draw_topo: + # draw_topo_model(basemap) + if draw_model: + fnin = '/home/marcel/sciebo/AlpArray_home/tectonic_maps_4dmb_2020_09_17/shape_files/tect_units_alcapadi' + draw_alcapadi_model(ax, fnin, add_legend=model_legends, alpha=alpha_model) + if draw_faults: + fnin = '/home/marcel/sciebo/AlpArray_home/tectonic_maps_4dmb_2020_09_17/shape_files/faults_alcapadi' + draw_alcapadi_faults(ax, fnin, faults_color) + + # if not no_content: + # basemap.drawcountries(color=line_color, linewidth=0.2) + # basemap.drawcoastlines(color=line_color, linewidth=0.3) + + if draw_grid: + gl = ax.gridlines(crs=TRANSFORM, draw_labels=True, + linewidth=0.5, color='gray', alpha=0.5, linestyle=':') + # dashes = [3, 6] + # parallels = list(np.arange(-90, 90, lgrid)) + # parallels_small = [item for item in np.arange(-90, 90, sgrid) if not item in parallels] + # basemap.drawparallels(parallels_small, dashes=dashes, color=line_color, linewidth=0.1, zorder=7) + # basemap.drawparallels(parallels, dashes=[], color=line_color, linewidth=0.2, zorder=7, labels=[1, 1, 0, 0]) + # meridians = list(np.arange(-180, 180, lgrid)) + # meridians_small = [item for item in np.arange(-180, 180, sgrid) if not item in meridians] + # basemap.drawmeridians(meridians_small, dashes=dashes, color=line_color, linewidth=0.1, zorder=7) + # basemap.drawmeridians(meridians, dashes=[], color=line_color, linewidth=0.2, zorder=7, labels=[0, 0, 1, 1]) + + ax.set_extent([clon - width/2, clon + width/2, clat - height/2, clat + height/2]) + + return ax + + +def angle_marker(a1, a2, delta=1.): + a1 = np.deg2rad(a1) + a2 = np.deg2rad(a2) + delta = np.deg2rad(delta) + x_vals = [np.sin(angle) for angle in np.arange(a1, a2 + delta, delta)] + y_vals = [np.cos(angle) for angle in np.arange(a1, a2 + delta, delta)] + xy = zip(x_vals, y_vals) + #x1 = np.sin(a1) + #y1 = np.cos(a1) + #x2 = np.sin(a2) + #y2 = np.cos(a2) + marker = [(0, 0), *xy, (0, 0)] + return marker \ No newline at end of file diff --git a/pylot/tomography/utils.py b/pylot/tomography/utils.py new file mode 100644 index 00000000..2ba6e4a2 --- /dev/null +++ b/pylot/tomography/utils.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +import glob +import numpy as np +import os +import json +import matplotlib.pyplot as plt + +from obspy import Catalog, read_events + +from pylot.core.util.dataprocessing import Metadata + + +def get_metadata(eventdir): + metadata_path = os.path.join(eventdir, 'resp') + metadata = Metadata(inventory=metadata_path, verbosity=0) + return metadata + + + +def set_rc_params(textsize=7.): + plt.style.use('/home/marcel/solid_earth.mplstyle') + #plt.rcParams.update({'font.size': textsize, + # 'font.family': 'sans-serif'}) + + +def normed_figure_ratio_width(width_cm, ratio): + width_inch = width_cm / 2.54 + height_inch = width_inch / ratio + return width_inch, height_inch + + +def normed_figure_ratio_height(height_cm, ratio): + height_inch = height_cm / 2.54 + width_inch = height_inch * ratio + return width_inch, height_inch + + +def normed_figure(width_cm=None, ratio=1.777): + #assert ((width_cm and not height_cm) or (height_cm and not width_cm)), 'Choose either of width or height!' + set_rc_params() + if width_cm: + fig = plt.figure(figsize=normed_figure_ratio_width(width_cm, ratio)) + return fig + #elif height_cm: + # fig = plt.figure(figsize=normed_figure_ratio_height(height_cm, ratio)) + return + + +def pol2cart(lat, lon, r): + x = r * np.cos(np.deg2rad(lat)) * np.cos(np.deg2rad(lon)) + y = r * np.cos(np.deg2rad(lat)) * np.sin(np.deg2rad(lon)) + z = r * np.sin(np.deg2rad(lat)) + return x, y, z + + +def cart2pol(x, y, z): + r = np.sqrt(x**2 + y**2 + z**2) + theta = np.rad2deg(np.arccos(z/r)) + phi = np.rad2deg(np.arctan2(y, x)) + lat = 90. - theta + lon = phi + return lat, lon, r + + +def pol2cart_vector(lat, lon, north, east, r_comp): + if any(val is None for val in [north, east, r_comp]): + return None, None, None + phi = np.deg2rad(lon) + # change north components to common spherical coordinate convention + theta = np.deg2rad(90. - lat) + north *= -1 + x = (np.sin(theta) * np.cos(phi) * r_comp + + np.cos(theta) * np.cos(phi) * north - + np.sin(phi) * east) + y = (np.sin(theta) * np.sin(phi) * r_comp + + np.cos(theta) * np.sin(phi) * north + + np.cos(phi) * east) + z = (np.cos(theta) * r_comp - + np.sin(theta) * north) + return x, y, z + + +def read_cat_obspy_dmt_database(databasedir, filemask): + infiles = glob.glob(os.path.join(databasedir, '*.a', filemask)) + cat = Catalog() + nPicks = 0 + for index, infile in enumerate(infiles): + print(f'Working on: {infile} ({index + 1}/{len(infiles)})') + event = read_events(infile)[0] + nPicks += len(event.picks) + cat += event + + nEvents = len(cat) + print('Number of events: {} (filemask: {})'.format(nEvents, filemask)) + print('Total # picks: {} ({:.2f} per event)'.format(nPicks, float(nPicks)/nEvents)) + return cat + + +def get_event(cat, eventid): + for event in cat.events: + if event.resource_id.id.split('/')[-1] == eventid: + return event + + +def get_pick4station(picks, network_code, station_code, method='auto'): + for pick in picks: + if pick.waveform_id.network_code == network_code: + if pick.waveform_id.station_code == station_code: + if pick.method_id.id.endswith(method): + return pick + + +def delete_picks(picks, nwst_ids_delete): + ''' Delete picks from list in picks containing ObsPy pick objects''' + for index, pick in list(reversed(list(enumerate(picks)))): + seed_id = pick.waveform_id.get_seed_string() + network, station = seed_id.split('.')[:2] + nwst_id = '{}.{}'.format(network, station) + if nwst_id in nwst_ids_delete: + picks.pop(index) + print('Removed pick: ', nwst_id) + return picks + + +def save_all_station_coordinates_dmt_database(dmt_database, fn_out): + ''' + Get all station coordinates from dmt_database and write them (unique) to json outputfile + :param dmt_database: + :param fn_out: + :return: + ''' + stations_dict = {} + eventdirs = glob.glob(os.path.join(dmt_database, '*.?')) + nEvents = len(eventdirs) + for index, eventdir in enumerate(eventdirs): + print('Working on event {} ({}/{})'.format(eventdir, index+1, nEvents)) + metadata = get_metadata(eventdir) + current_stations_dict = metadata.get_all_coordinates() + for nwst_id, coords in current_stations_dict.items(): + if not nwst_id in stations_dict.keys(): + stations_dict[nwst_id] = coords + + with open(fn_out, 'w') as outfile: + json.dump(stations_dict, outfile) + + +def get_metadata(eventdir): + metadata_path = os.path.join(eventdir, 'resp') + metadata = Metadata(inventory=metadata_path, verbosity=0) + return metadata + + +def get_coordinate_from_dist_baz(station_tmp, dist, baz, mode='deg'): + ''' function copied from Andre''' + # station_tmp: [lon, lat] + if mode!='deg' and mode!='rad': + print('mode hast to be ether deg or rad!') + return None + else: + station = np.deg2rad(station_tmp) + epi_tmp=[0.,0.] + if mode=='deg': + dist = np.deg2rad(dist) + baz = np.deg2rad(baz) + az = baz - np.pi + if az < 0: + az += 2 * np.pi + epi_tmp[1] = np.arcsin(np.sin(station[1]) * np.cos(dist) - np.cos(station[1]) * np.sin(dist) * np.cos(az)) + if (np.cos(dist) - np.sin(station[1]) * np.sin(epi_tmp[1])) / (np.cos(station[1]) * np.cos(epi_tmp[1])) >= 0.: + epi_tmp[0] = station[0] - np.arcsin(np.sin(dist) * np.sin(az) / np.cos(epi_tmp[1])) + else: + epi_tmp[0] = station[0] - np.pi + np.arcsin(np.sin(dist) * np.sin(az) / np.cos(epi_tmp[1])) + epi=np.rad2deg(epi_tmp) + return epi \ No newline at end of file