Compare commits
18 Commits
feature/re
...
tomography
| Author | SHA1 | Date | |
|---|---|---|---|
| 274bf31098 | |||
| f48793e359 | |||
| 80fd07f0f8 | |||
| cec6cc24c5 | |||
| 8a1da72d1c | |||
| c989b2abc9 | |||
| 2dc27013b2 | |||
| 4bd2e78259 | |||
| 468a7721c8 | |||
| 555fb8a719 | |||
| 5a2a1fe990 | |||
| 64b719fd54 | |||
| 71d4269a4f | |||
| 81e34875b9 | |||
| d7ee820de3 | |||
| 621cbbfbda | |||
| 050b9fb0c4 | |||
| eb3cd713c6 |
18
PyLoT.py
Normal file → Executable file
18
PyLoT.py
Normal file → Executable file
@@ -1011,7 +1011,7 @@ class MainWindow(QMainWindow):
|
||||
for event in events:
|
||||
for filename in filenames:
|
||||
if os.path.isfile(filename) and event.pylot_id in filename:
|
||||
self.load_data(filename, draw=False, event=event, ask_user=True, merge_strategy=sld.merge_strategy)
|
||||
self.load_data(filename, draw=False, event=event, ask_user=False, merge_strategy=sld.merge_strategy)
|
||||
refresh = True
|
||||
if not refresh:
|
||||
return
|
||||
@@ -1020,8 +1020,8 @@ class MainWindow(QMainWindow):
|
||||
self.fill_eventbox()
|
||||
self.setDirty(True)
|
||||
|
||||
def load_data(self, fname=None, loc=False, draw=True, event=None, ask_user=False, merge_strategy='Overwrite'):
|
||||
if not ask_user:
|
||||
def load_data(self, fname=None, loc=False, draw=True, event=None, ask_user=True, merge_strategy='Overwrite',):
|
||||
if ask_user:
|
||||
if not self.okToContinue():
|
||||
return
|
||||
if fname is None:
|
||||
@@ -1034,7 +1034,7 @@ class MainWindow(QMainWindow):
|
||||
if not event:
|
||||
event = self.get_current_event()
|
||||
|
||||
if event.picks:
|
||||
if event.picks and ask_user:
|
||||
qmb = QMessageBox(self, icon=QMessageBox.Question,
|
||||
text='Do you want to overwrite the data?',)
|
||||
overwrite_button = qmb.addButton('Overwrite', QMessageBox.YesRole)
|
||||
@@ -1537,8 +1537,8 @@ class MainWindow(QMainWindow):
|
||||
return True
|
||||
return not bool(os.listdir(wf_path))
|
||||
|
||||
def filename_from_action(self, action=None):
|
||||
if not action or action.data() is None:
|
||||
def filename_from_action(self, action):
|
||||
if action.data() is None:
|
||||
filt = "Supported file formats" \
|
||||
" (*.mat *.qml *.xml *.kor *.evt)"
|
||||
caption = "Open an event file"
|
||||
@@ -3590,7 +3590,7 @@ class MainWindow(QMainWindow):
|
||||
def calc_magnitude(self):
|
||||
self.init_metadata()
|
||||
if not self.metadata:
|
||||
return None
|
||||
return []
|
||||
|
||||
wf_copy = self.get_data().getWFData().copy()
|
||||
|
||||
@@ -3599,6 +3599,10 @@ class MainWindow(QMainWindow):
|
||||
for station in np.unique(list(self.getPicks('manual').keys()) + list(self.getPicks('auto').keys())):
|
||||
wf_select += wf_copy.select(station=station)
|
||||
|
||||
if not wf_select:
|
||||
logging.warning('Empty Stream in calc_magnitude. Return.')
|
||||
return []
|
||||
|
||||
corr_wf = restitute_data(wf_select, self.metadata)
|
||||
# calculate moment magnitude
|
||||
moment_mag = MomentMagnitude(corr_wf, self.get_data().get_evt_data(), self.inputs.get('vp'),
|
||||
|
||||
@@ -63,6 +63,9 @@ You may need to do some modifications to these files. Especially folder names sh
|
||||
|
||||
PyLoT has been tested on Mac OSX (10.11), Debian Linux 8 and on Windows 10/11.
|
||||
|
||||
## Example Dataset
|
||||
An example dataset with waveform data, metadata and automatic picks in the obspy-dmt dataset format for testing the teleseismic picking can be found at https://zenodo.org/doi/10.5281/zenodo.13759803
|
||||
|
||||
## Release notes
|
||||
|
||||
#### Features:
|
||||
@@ -83,13 +86,13 @@ Current release is still in development progress and has several issues. We are
|
||||
|
||||
## Staff
|
||||
|
||||
Original author(s): M. Rische, S. Wehling-Benatelli, L. Kueperkoch, M. Bischoff (PILOT)
|
||||
Developer(s): M. Paffrath, S. Wehling-Benatelli, L. Kueperkoch, D. Arnold, K. Cökerim, K. Olbert, M. Bischoff, C. Wollin, M. Rische, S. Zimmermann
|
||||
|
||||
Developer(s): S. Wehling-Benatelli, M. Paffrath, L. Kueperkoch, K. Olbert, M. Bischoff, C. Wollin, M. Rische, D. Arnold, K. Cökerim, S. Zimmermann
|
||||
Original author(s): M. Rische, S. Wehling-Benatelli, L. Kueperkoch, M. Bischoff (PILOT)
|
||||
|
||||
Others: A. Bruestle, T. Meier, W. Friederich
|
||||
|
||||
|
||||
[ObsPy]: http://github.com/obspy/obspy/wiki
|
||||
|
||||
September 2024
|
||||
March 2025
|
||||
|
||||
@@ -5,8 +5,12 @@
|
||||
#$ -pe smp 40
|
||||
##$ -l mem=3G
|
||||
#$ -l h_vmem=6G
|
||||
#$ -l os=*stretch
|
||||
##$ -l os=*stretch
|
||||
#$ -q low.q@minos11,low.q@minos12,low.q@minos13,low.q@minos14,low.q@minos15
|
||||
|
||||
conda activate pylot_311
|
||||
|
||||
python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray.in -c 20 -dmt processed
|
||||
#python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m5.0-5.4.in -c 20 -dmt processed
|
||||
#python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m5.4-5.7.in -c 20 -dmt processed
|
||||
#python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m5.7-6.0.in -c 20 -dmt processed
|
||||
python ./autoPyLoT.py -i /home/marcel/.pylot/pylot_adriaarray_m6.0-10.0.in -c 20 -dmt processed
|
||||
|
||||
77
docs/correlation.md
Normal file
77
docs/correlation.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Pick-Correlation Correction
|
||||
|
||||
## Introduction
|
||||
Currently, the pick-correlation correction algorithm is not accessible from they PyLoT GUI. The main file *pick_correlation_correction.py* is located in the directory *pylot\correlation*.
|
||||
The program only works for an obspy dmt database structure.
|
||||
|
||||
The basic workflow of the algorithm is shown in the following diagram. The first step **(1)** is the normal (automatic) picking procedure in PyLoT. Everything from step **(2)** to **(5)** is part of the correlation correction algorithm.
|
||||
|
||||
*Note: The first step is not required in case theoretical onsets are used instead of external picks when the parameter use_taupy_onsets is set to True. However, an existing event quakeML (.xml) file generated by PyLoT might be required for each event in case not external picks are used.*
|
||||
|
||||

|
||||
|
||||
A detailed description of the algorithm can be found in the corresponding publication:
|
||||
|
||||
*Paffrath, M., Friederich, W., and the AlpArray and AlpArray-SWATH D Working Groups: Teleseismic P waves at the AlpArray seismic network: wave fronts, absolute travel times and travel-time residuals, Solid Earth, 12, 1635–1660, https://doi.org/10.5194/se-12-1635-2021, 2021.*
|
||||
|
||||
## How to use
|
||||
To use the program you have to call the main program providing two mandatory arguments: a path to the obspy dmt database folder *dmt_database_path* and the path to the PyLoT infile *pylot.in* for picking of the beam trace:
|
||||
|
||||
```python pick_correlation_correction.py dmt_database_path pylot.in```
|
||||
|
||||
By default, the parameter file *parameters.yaml* is used. You can use the command line option *--params* to specify a different parameter file and other optional arguments such as *-pd* for plotting detailed information or *-n 4* to use 4 cores for parallel processing:
|
||||
|
||||
```python pick_correlation_correction.py dmt_database_path pylot.in --params parameters_adriaarray.yaml -pd -n 4```
|
||||
|
||||
## Cross-Correlation Parameters
|
||||
|
||||
The program uses the parameters in the file *parameters.yaml* by default. You can use the command line option *--params* to specify a different parameter file. An example of the parameter file is provided in the *correlation\parameters.yaml* file.
|
||||
|
||||
In the top level of the parameter file the logging level *logging* can be set, as well as a list of pick phases *pick_phases* (e.g. ['P', 'S']).
|
||||
|
||||
For each pick phase the different parameters can be set in the first sub-level of the parameter file, e.g.:
|
||||
|
||||
```yaml
|
||||
logging: info
|
||||
pick_phases: ['P', 'S']
|
||||
|
||||
P:
|
||||
min_corr_stacking: 0.8
|
||||
min_corr_export: 0.6
|
||||
[...]
|
||||
|
||||
S:
|
||||
min_corr_stacking: 0.7
|
||||
[...]
|
||||
```
|
||||
|
||||
The following parameters are available:
|
||||
|
||||
|
||||
| Parameter Name | Description | Parameter Type |
|
||||
|--------------------------------|----------------------------------------------------------------------------------------------------|----------------|
|
||||
| min_corr_stacking | Minimum correlation coefficient for building beam trace | float |
|
||||
| min_corr_export | Minimum correlation coefficient for pick export | float |
|
||||
| min_stack | Minimum number of stations for building beam trace | int |
|
||||
| t_before | Correlation window before reference pick | float |
|
||||
| t_after | Correlation window after reference pick | float |
|
||||
| cc_maxlag | Maximum shift for initial correlation | float |
|
||||
| cc_maxlag2 | Maximum shift for second (final) correlation (also for calculating pick uncertainty) | float |
|
||||
| initial_pick_outlier_threshold | Threshold for excluding large outliers of initial (AIC) picks | float |
|
||||
| export_threshold | Automatically exclude all onsets which deviate more than this threshold from corrected taup onsets | float |
|
||||
| min_picks_export | Minimum number of correlated picks for export | int |
|
||||
| min_picks_autopylot | Minimum number of reference auto picks to continue with event | int |
|
||||
| check_RMS | Do RMS check to search for restitution errors (very experimental) | bool |
|
||||
| use_taupy_onsets | Use taupy onsets as reference picks instead of external picks | bool |
|
||||
| station_list | Use the following stations as reference for stacking | list[str] |
|
||||
| use_stacked_trace | Use existing stacked trace if found (spare re-computation) | bool |
|
||||
| data_dir | obspyDMT data subdirectory (e.g. 'raw', 'processed') | str |
|
||||
| pickfile_extension | Use quakeML files (PyLoT output) with the following extension | str |
|
||||
| dt_stacking | Time difference for stacking window (in seconds) | list[float] |
|
||||
| filter_options | Filter for first correlation (rough) | dict |
|
||||
| filter_options_final | Filter for second correlation (fine) | dict |
|
||||
| filter_type | Filter type (e.g. bandpass) | str |
|
||||
| sampfreq | Sampling frequency (in Hz) | float |
|
||||
|
||||
## Example Dataset
|
||||
An example dataset with waveform data, metadata and automatic picks in the obspy-dmt dataset format for testing can be found at https://zenodo.org/doi/10.5281/zenodo.13759803
|
||||
BIN
docs/images/workflow_stacking.png
Normal file
BIN
docs/images/workflow_stacking.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 64 KiB |
199
docs/tuning.md
199
docs/tuning.md
@@ -6,122 +6,123 @@ A description of the parameters used for determining automatic picks.
|
||||
|
||||
Parameters applied to the traces before picking algorithm starts.
|
||||
|
||||
| Name | Description |
|
||||
|---------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *P Start*, *P
|
||||
Stop* | Define time interval relative to trace start time for CF calculation on vertical trace. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||
| *S Start*, *S
|
||||
Stop* | Define time interval relative to trace start time for CF calculation on horizontal traces. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||
| *Bandpass
|
||||
Z1* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of initial P pick. |
|
||||
| *Bandpass
|
||||
Z2* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of precise P pick. |
|
||||
| *Bandpass
|
||||
H1* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of initial S pick. |
|
||||
| *Bandpass
|
||||
H2* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of precise S pick. |
|
||||
| Name | Description |
|
||||
|---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *P Start*, *P | |
|
||||
| Stop* | Define time interval relative to trace start time for CF calculation on vertical trace. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||
| *S Start*, *S | |
|
||||
| Stop* | Define time interval relative to trace start time for CF calculation on horizontal traces. Value is relative to theoretical onset time if 'Use TauPy' option is enabled in main settings of 'Tune Autopicker' dialogue. |
|
||||
| *Bandpass | |
|
||||
| Z1* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of initial P pick. |
|
||||
| *Bandpass | |
|
||||
| Z2* | Filter settings for Butterworth bandpass applied to vertical trace for calculation of precise P pick. |
|
||||
| *Bandpass | |
|
||||
| H1* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of initial S pick. |
|
||||
| *Bandpass | |
|
||||
| H2* | Filter settings for Butterworth bandpass applied to horizontal traces for calculation of precise S pick. |
|
||||
|
||||
## Inital P pick
|
||||
|
||||
Parameters used for determination of initial P pick.
|
||||
|
||||
| Name | Description |
|
||||
|--------------|------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
tLTA* | Size of gliding LTA window in seconds used for calculation of HOS-CF. |
|
||||
| *pickwin
|
||||
P* | Size of time window in seconds in which the minimum of the AIC-CF in front of the maximum of the HOS-CF is determined. |
|
||||
| *
|
||||
AICtsmooth* | Average of samples in this time window will be used for smoothing of the AIC-CF. |
|
||||
| *
|
||||
checkwinP* | Time in front of the global maximum of the HOS-CF in which to search for a second local extrema. |
|
||||
| *minfactorP* | Used with *
|
||||
checkwinP*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorP*. |
|
||||
| *
|
||||
tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||
| *
|
||||
tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||
| *tsafetey* | Time in seconds between *tsignal* and *
|
||||
tnoise*. |
|
||||
| *
|
||||
tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||
| Name | Description |
|
||||
|-------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|
|
||||
| * | |
|
||||
| tLTA* | Size of gliding LTA window in seconds used for calculation of HOS-CF. |
|
||||
| *pickwin | |
|
||||
| P* | Size of time window in seconds in which the minimum of the AIC-CF in front of the maximum of the HOS-CF is determined. |
|
||||
| * | |
|
||||
| AICtsmooth* | Average of samples in this time window will be used for smoothing of the AIC-CF. |
|
||||
| * | |
|
||||
| checkwinP* | Time in front of the global maximum of the HOS-CF in which to search for a second local extrema. |
|
||||
| *minfactorP* | Used with * |
|
||||
| checkwinP*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorP*. | |
|
||||
| * | |
|
||||
| tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||
| * | |
|
||||
| tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||
| *tsafetey* | Time in seconds between *tsignal* and * |
|
||||
| tnoise*. | |
|
||||
| * | |
|
||||
| tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||
|
||||
## Inital S pick
|
||||
|
||||
Parameters used for determination of initial S pick
|
||||
|
||||
| Name | Description |
|
||||
|---------------|------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
tdet1h* | Length of time window in seconds in which AR params of the waveform are determined. |
|
||||
| *
|
||||
tpred1h* | Length of time window in seconds in which the waveform is predicted using the AR model. |
|
||||
| *
|
||||
AICtsmoothS* | Average of samples in this time window is used for smoothing the AIC-CF. |
|
||||
| *
|
||||
pickwinS* | Time window in which the minimum in the AIC-CF in front of the maximum in the ARH-CF is determined. |
|
||||
| *
|
||||
checkwinS* | Time in front of the global maximum of the ARH-CF in which to search for a second local extrema. |
|
||||
| *minfactorP* | Used with *
|
||||
checkwinS*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorS*. |
|
||||
| *
|
||||
tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||
| *
|
||||
tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||
| *tsafetey* | Time in seconds between *tsignal* and *
|
||||
tnoise*. |
|
||||
| *
|
||||
tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||
| Name | Description |
|
||||
|-------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------|
|
||||
| * | |
|
||||
| tdet1h* | Length of time window in seconds in which AR params of the waveform are determined. |
|
||||
| * | |
|
||||
| tpred1h* | Length of time window in seconds in which the waveform is predicted using the AR model. |
|
||||
| * | |
|
||||
| AICtsmoothS* | Average of samples in this time window is used for smoothing the AIC-CF. |
|
||||
| * | |
|
||||
| pickwinS* | Time window in which the minimum in the AIC-CF in front of the maximum in the ARH-CF is determined. |
|
||||
| * | |
|
||||
| checkwinS* | Time in front of the global maximum of the ARH-CF in which to search for a second local extrema. |
|
||||
| *minfactorP* | Used with * |
|
||||
| checkwinS*. If a second local maximum is found, it has to be at least as big as the first maximum * *minfactorS*. | |
|
||||
| * | |
|
||||
| tsignal* | Time window in seconds after the initial P pick used for determining signal amplitude. |
|
||||
| * | |
|
||||
| tnoise* | Time window in seconds in front of initial P pick used for determining noise amplitude. |
|
||||
| *tsafetey* | Time in seconds between *tsignal* and * |
|
||||
| tnoise*. | |
|
||||
| * | |
|
||||
| tslope* | Time window in seconds after initial P pick in which the slope of the onset is calculated. |
|
||||
|
||||
## Precise P pick
|
||||
|
||||
Parameters used for determination of precise P pick.
|
||||
|
||||
| Name | Description |
|
||||
|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *Precalcwin* | Time window in seconds for recalculation of the HOS-CF. The new CF will be two times the size of *
|
||||
Precalcwin*, since it will be calculated from the initial pick to +/- *Precalcwin*. |
|
||||
| *
|
||||
tsmoothP* | Average of samples in this time window will be used for smoothing the second HOS-CF. |
|
||||
| *
|
||||
ausP* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed HOS-CF is found when the previous sample is larger or equal to the current sample times (1+*
|
||||
ausP*). |
|
||||
| Name | Description |
|
||||
|-------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *Precalcwin* | Time window in seconds for recalculation of the HOS-CF. The new CF will be two times the size of * |
|
||||
| Precalcwin*, since it will be calculated from the initial pick to +/- *Precalcwin*. | |
|
||||
| * | |
|
||||
| tsmoothP* | Average of samples in this time window will be used for smoothing the second HOS-CF. |
|
||||
| * | |
|
||||
| ausP* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed HOS-CF is found when the previous sample is larger or equal to the current sample times (1+* |
|
||||
| ausP*). | |
|
||||
|
||||
## Precise S pick
|
||||
|
||||
Parameters used for determination of precise S pick.
|
||||
|
||||
| Name | Description |
|
||||
|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
tdet2h* | Time window for determination of AR coefficients. |
|
||||
| *
|
||||
tpred2h* | Time window in which the waveform is predicted using the determined AR parameters. |
|
||||
| *Srecalcwin* | Time window for recalculation of ARH-CF. New CF will be calculated from initial pick +/- *
|
||||
Srecalcwin*. |
|
||||
| *
|
||||
tsmoothS* | Average of samples in this time window will be used for smoothing the second ARH-CF. |
|
||||
| *
|
||||
ausS* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed ARH-CF is found when the previous sample is larger or equal to the current sample times (1+*
|
||||
ausS*). |
|
||||
| *
|
||||
pickwinS* | Time window around initial pick in which to look for a precise pick. |
|
||||
| Name | Description |
|
||||
|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| * | |
|
||||
| tdet2h* | Time window for determination of AR coefficients. |
|
||||
| * | |
|
||||
| tpred2h* | Time window in which the waveform is predicted using the determined AR parameters. |
|
||||
| *Srecalcwin* | Time window for recalculation of ARH-CF. New CF will be calculated from initial pick +/- * |
|
||||
| Srecalcwin*. | |
|
||||
| * | |
|
||||
| tsmoothS* | Average of samples in this time window will be used for smoothing the second ARH-CF. |
|
||||
| * | |
|
||||
| ausS* | Controls artificial uplift of samples during precise picking. A common local minimum of the smoothed and unsmoothed ARH-CF is found when the previous sample is larger or equal to the current sample times (1+* |
|
||||
| ausS*). | |
|
||||
| * | |
|
||||
| pickwinS* | Time window around initial pick in which to look for a precise pick. |
|
||||
|
||||
## Pick quality control
|
||||
|
||||
Parameters used for checking quality and integrity of automatic picks.
|
||||
|
||||
| Name | Description |
|
||||
|--------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
minAICPslope* | Initial P picks with a slope lower than this value will be discared. |
|
||||
| *
|
||||
minAICPSNR* | Initial P picks with a SNR below this value will be discarded. |
|
||||
| *
|
||||
minAICSslope* | Initial S picks with a slope lower than this value will be discarded. |
|
||||
| *
|
||||
minAICSSNR* | Initial S picks with a SNR below this value will be discarded. |
|
||||
| *minsiglength*, *noisefacor*. *minpercent* | Parameters for checking signal length. In the time window of size *
|
||||
| Name | Description |
|
||||
|--------------------------------------------|-----------------------------------------------------------------------|
|
||||
| * | |
|
||||
| minAICPslope* | Initial P picks with a slope lower than this value will be discared. |
|
||||
| * | |
|
||||
| minAICPSNR* | Initial P picks with a SNR below this value will be discarded. |
|
||||
| * | |
|
||||
| minAICSslope* | Initial S picks with a slope lower than this value will be discarded. |
|
||||
| * | |
|
||||
| minAICSSNR* | Initial S picks with a SNR below this value will be discarded. |
|
||||
| *minsiglength*, *noisefacor*. *minpercent* | Parameters for checking signal length. In the time window of size * |
|
||||
|
||||
minsiglength* after the initial P pick *
|
||||
minpercent* of samples have to be larger than the RMS value. |
|
||||
| *
|
||||
@@ -139,12 +140,12 @@ wdttolerance* | Maximum allowed deviation of S onset
|
||||
|
||||
Parameters for discrete quality classes.
|
||||
|
||||
| Name | Description |
|
||||
|------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| *
|
||||
timeerrorsP* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for P onsets. |
|
||||
| *
|
||||
timeerrorsS* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for S onsets. |
|
||||
| *nfacP*, *nfacS* | For determination of latest possible onset time. The time when the signal reaches an amplitude of *
|
||||
nfac* * mean value of the RMS amplitude in the time window *tnoise* corresponds to the latest possible onset time. |
|
||||
| Name | Description |
|
||||
|--------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| * | |
|
||||
| timeerrorsP* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for P onsets. |
|
||||
| * | |
|
||||
| timeerrorsS* | Width of the time windows in seconds between earliest and latest possible pick which represent the quality classes 0, 1, 2, 3 for S onsets. |
|
||||
| *nfacP*, *nfacS* | For determination of latest possible onset time. The time when the signal reaches an amplitude of * |
|
||||
| nfac* * mean value of the RMS amplitude in the time window *tnoise* corresponds to the latest possible onset time. | |
|
||||
|
||||
|
||||
@@ -36,8 +36,17 @@ class Data(object):
|
||||
loaded event. Container object holding, e.g. phase arrivals, etc.
|
||||
"""
|
||||
|
||||
def __init__(self, parent=None, evtdata=None):
|
||||
def __init__(self, parent=None, evtdata=None, picking_parameter=None):
|
||||
self._parent = parent
|
||||
|
||||
if not picking_parameter:
|
||||
if hasattr(parent, '_inputs'):
|
||||
picking_parameter = parent._inputs
|
||||
else:
|
||||
logging.warning('No picking parameters found! Using default input parameters!!!')
|
||||
picking_parameter = PylotParameter()
|
||||
self.picking_parameter = picking_parameter
|
||||
|
||||
if self.getParent():
|
||||
self.comp = parent.getComponent()
|
||||
else:
|
||||
@@ -403,23 +412,19 @@ class Data(object):
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '.cnv':
|
||||
try:
|
||||
velest.export(picks_copy, fnout + fnext, eventinfo=self.get_evt_data())
|
||||
velest.export(picks_copy, fnout + fnext, self.picking_parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '_focmec.in':
|
||||
try:
|
||||
parameter = PylotParameter()
|
||||
logging.warning('Using default input parameter')
|
||||
focmec.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
||||
focmec.export(picks_copy, fnout + fnext, self.picking_parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '.pha':
|
||||
try:
|
||||
parameter = PylotParameter()
|
||||
logging.warning('Using default input parameter')
|
||||
hypodd.export(picks_copy, fnout + fnext, parameter, eventinfo=self.get_evt_data())
|
||||
hypodd.export(picks_copy, fnout + fnext, self.picking_parameter, eventinfo=self.get_evt_data())
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
|
||||
@@ -53,10 +53,16 @@ class PylotParameter(object):
|
||||
self.__parameter = {}
|
||||
self._verbosity = verbosity
|
||||
self._parFileCont = {}
|
||||
|
||||
# io from parsed arguments alternatively
|
||||
for key, val in kwargs.items():
|
||||
self._parFileCont[key] = val
|
||||
self.from_file()
|
||||
|
||||
# if no filename or kwargs given, use default values
|
||||
if not fnin and not kwargs:
|
||||
self.reset_defaults()
|
||||
|
||||
if fnout:
|
||||
self.export2File(fnout)
|
||||
|
||||
|
||||
@@ -218,14 +218,12 @@ def picksdict_from_obs(fn):
|
||||
return picks
|
||||
|
||||
|
||||
def picksdict_from_picks(evt, parameter=None, nwst_id: bool = False):
|
||||
def picksdict_from_picks(evt, parameter=None):
|
||||
"""
|
||||
Takes an Event object and return the pick dictionary commonly used within
|
||||
PyLoT
|
||||
:param evt: Event object contain all available information
|
||||
:type evt: `~obspy.core.event.Event`
|
||||
:param nwst_id: determines if network and station id are used or only station id
|
||||
:type nwst_id: bool
|
||||
:return: pick dictionary (auto and manual)
|
||||
"""
|
||||
picksdict = {
|
||||
@@ -235,10 +233,7 @@ def picksdict_from_picks(evt, parameter=None, nwst_id: bool = False):
|
||||
for pick in evt.picks:
|
||||
errors = None
|
||||
phase = {}
|
||||
if not nwst_id:
|
||||
station_or_nwst = pick.waveform_id.station_code
|
||||
else:
|
||||
station_or_nwst = f'{pick.waveform_id.station_code}.{pick.waveform_id.network_code}'
|
||||
station = pick.waveform_id.station_code
|
||||
if pick.waveform_id.channel_code is None:
|
||||
channel = ''
|
||||
else:
|
||||
@@ -259,7 +254,7 @@ def picksdict_from_picks(evt, parameter=None, nwst_id: bool = False):
|
||||
if pick_method == 'None':
|
||||
pick_method = 'manual'
|
||||
try:
|
||||
onsets = picksdict[pick_method][station_or_nwst]
|
||||
onsets = picksdict[pick_method][station]
|
||||
except KeyError as e:
|
||||
# print(e)
|
||||
onsets = {}
|
||||
@@ -283,7 +278,6 @@ def picksdict_from_picks(evt, parameter=None, nwst_id: bool = False):
|
||||
weight = phase.get('weight')
|
||||
if not weight:
|
||||
if not parameter:
|
||||
logging.warning('Using ')
|
||||
logging.warning('Using default input parameter')
|
||||
parameter = PylotParameter()
|
||||
pick.phase_hint = identifyPhase(pick.phase_hint)
|
||||
@@ -306,7 +300,7 @@ def picksdict_from_picks(evt, parameter=None, nwst_id: bool = False):
|
||||
phase['filter_id'] = filter_id if filter_id is not None else ''
|
||||
|
||||
onsets[pick.phase_hint] = phase.copy()
|
||||
picksdict[pick_method][station_or_nwst] = onsets.copy()
|
||||
picksdict[pick_method][station] = onsets.copy()
|
||||
return picksdict
|
||||
|
||||
|
||||
@@ -518,7 +512,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
# write header
|
||||
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
||||
(parameter.get('datapath'), parameter.get('eventID')))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
arrivals = chooseArrivals(arrivals)
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
if 'P' in arrivals[key]:
|
||||
@@ -671,7 +665,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('%s, event %s \n' % (parameter.get('datapath'), parameter.get('eventID')))
|
||||
arrivals = chooseArrivals(arrivals) # MP MP what is chooseArrivals? It is not defined anywhere
|
||||
arrivals = chooseArrivals(arrivals)
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
if 'P' in arrivals[key] and arrivals[key]['P']['mpp'] is not None:
|
||||
@@ -762,11 +756,11 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
||||
n = 0
|
||||
# check whether arrivals are dictionaries (autoPyLoT) or pick object (PyLoT)
|
||||
if isinstance(arrivals, dict) == False:
|
||||
if isinstance(arrivals, dict) is False:
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
arrivals = picksdict_from_picks(evt, parameter=parameter)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
@@ -827,7 +821,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
arrivals = picksdict_from_picks(evt, parameter=parameter)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
@@ -878,7 +872,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
# convert pick object (PyLoT) into dictionary
|
||||
evt = ope.Event(resource_id=eventinfo['resource_id'])
|
||||
evt.picks = arrivals
|
||||
arrivals = picksdict_from_picks(evt)
|
||||
arrivals = picksdict_from_picks(evt, parameter=parameter)
|
||||
# check for automatic and manual picks
|
||||
# prefer manual picks
|
||||
usedarrivals = chooseArrivals(arrivals)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
|
||||
import traceback
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
@@ -16,8 +16,6 @@ from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
|
||||
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
|
||||
from obspy import UTCDateTime
|
||||
|
||||
from pylot.core.io.data import Data
|
||||
from pylot.core.io.phases import picksdict_from_picks
|
||||
from pylot.core.util.utils import identifyPhaseID
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
@@ -43,7 +41,6 @@ class MplCanvas(FigureCanvas):
|
||||
class Array_map(QtWidgets.QWidget):
|
||||
def __init__(self, parent, metadata, parameter=None, axes=None, annotate=True, pointsize=25.,
|
||||
linewidth=1.5, width=5e6, height=2e6):
|
||||
|
||||
QtWidgets.QWidget.__init__(self, parent=parent)
|
||||
|
||||
assert (parameter is not None or parent is not None), 'either parent or parameter has to be set'
|
||||
@@ -61,8 +58,6 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.uncertainties = None
|
||||
self.autopicks_dict = None
|
||||
self.hybrids_dict = None
|
||||
self.picks_dict_ref = None
|
||||
self.autopicks_dict_ref = None
|
||||
self.eventLoc = None
|
||||
self.parameter = parameter if parameter else parent._inputs
|
||||
|
||||
@@ -113,10 +108,8 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.status_label = QtWidgets.QLabel()
|
||||
self.map_reset_button = QtWidgets.QPushButton('Reset Map View')
|
||||
self.save_map_button = QtWidgets.QPushButton('Save Map')
|
||||
self.load_reference_picks_button = QtWidgets.QPushButton('Load reference picks.')
|
||||
self.go2eq_button = QtWidgets.QPushButton('Go to Event Location')
|
||||
self.subtract_mean_cb = QtWidgets.QCheckBox('Subtract mean')
|
||||
self.subtract_ref_cb = QtWidgets.QCheckBox('Subtract reference onsets')
|
||||
|
||||
self.main_box = QtWidgets.QVBoxLayout()
|
||||
self.setLayout(self.main_box)
|
||||
@@ -141,8 +134,8 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.cmaps_box = QtWidgets.QComboBox()
|
||||
self.cmaps_box.setMaxVisibleItems(20)
|
||||
[self.cmaps_box.addItem(map_name) for map_name in sorted(plt.colormaps())]
|
||||
# try to set to viridis as default
|
||||
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('viridis'))
|
||||
# try to set to plasma as default
|
||||
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText('plasma'))
|
||||
|
||||
self.top_row.addWidget(QtWidgets.QLabel('Select a phase: '))
|
||||
self.top_row.addWidget(self.comboBox_phase)
|
||||
@@ -163,9 +156,7 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.bot_row.addWidget(self.map_reset_button, 2)
|
||||
self.bot_row.addWidget(self.go2eq_button, 2)
|
||||
self.bot_row.addWidget(self.save_map_button, 2)
|
||||
self.bot_row.addWidget(self.load_reference_picks_button, 2)
|
||||
self.bot_row.addWidget(self.subtract_mean_cb, 0)
|
||||
self.bot_row.addWidget(self.subtract_ref_cb, 0)
|
||||
self.bot_row.addWidget(self.status_label, 5)
|
||||
|
||||
def init_colormap(self):
|
||||
@@ -226,9 +217,7 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.map_reset_button.clicked.connect(self.org_map_view)
|
||||
self.go2eq_button.clicked.connect(self.go2eq)
|
||||
self.save_map_button.clicked.connect(self.saveFigure)
|
||||
self.load_reference_picks_button.clicked.connect(self.load_reference_picks)
|
||||
self.subtract_mean_cb.stateChanged.connect(self.toggle_subtract_mean)
|
||||
self.subtract_ref_cb.stateChanged.connect(self.toggle_subtract_ref)
|
||||
|
||||
self.plotWidget.mpl_connect('motion_notify_event', self.mouse_moved)
|
||||
self.plotWidget.mpl_connect('scroll_event', self.mouse_scroll)
|
||||
@@ -385,11 +374,8 @@ class Array_map(QtWidgets.QWidget):
|
||||
def get_max_from_stations(self, key):
|
||||
return self._from_dict(max, key)
|
||||
|
||||
def get_selected_pick_type(self):
|
||||
return self.comboBox_am.currentText().split(' ')[0]
|
||||
|
||||
def current_picks_dict(self):
|
||||
picktype = self.get_selected_pick_type()
|
||||
picktype = self.comboBox_am.currentText().split(' ')[0]
|
||||
auto_manu = {'auto': self.autopicks_dict,
|
||||
'manual': self.picks_dict,
|
||||
'hybrid': self.hybrids_dict}
|
||||
@@ -461,9 +447,6 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.cmaps_box.setCurrentIndex(self.cmaps_box.findText(cmap))
|
||||
self._refresh_drawings()
|
||||
|
||||
def toggle_subtract_ref(self):
|
||||
self._refresh_drawings()
|
||||
|
||||
def init_lat_lon_dimensions(self):
|
||||
# init minimum and maximum lon and lat dimensions
|
||||
self.londim = self.lonmax - self.lonmin
|
||||
@@ -476,10 +459,7 @@ class Array_map(QtWidgets.QWidget):
|
||||
self.longrid, self.latgrid = np.meshgrid(lonaxis, lataxis)
|
||||
|
||||
def init_picksgrid(self):
|
||||
rval = self.get_picks_lat_lon()
|
||||
if not rval:
|
||||
return
|
||||
picks, uncertainties, lats, lons = rval
|
||||
picks, uncertainties, lats, lons = self.get_picks_lat_lon()
|
||||
try:
|
||||
self.picksgrid_active = griddata((lats, lons), picks, (self.latgrid, self.longrid), method='linear')
|
||||
except Exception as e:
|
||||
@@ -497,20 +477,15 @@ class Array_map(QtWidgets.QWidget):
|
||||
|
||||
def get_picks_lat_lon(self):
|
||||
picks_rel = self.picks_rel_mean_corrected if self.subtract_mean_cb.isChecked() else self.picks_rel
|
||||
picks_rel = self.picks_rel_ref_corrected if self.subtract_ref_cb.isChecked() else picks_rel
|
||||
if not picks_rel:
|
||||
return
|
||||
picks = []
|
||||
uncertainties = []
|
||||
latitudes = []
|
||||
longitudes = []
|
||||
for nwst_id, pick in picks_rel.items():
|
||||
if nwst_id not in self.uncertainties or nwst_id not in self.stations_dict:
|
||||
continue
|
||||
for st_id, pick in picks_rel.items():
|
||||
picks.append(pick)
|
||||
uncertainties.append(self.uncertainties.get(nwst_id))
|
||||
latitudes.append(self.stations_dict[nwst_id]['latitude'])
|
||||
longitudes.append(self.stations_dict[nwst_id]['longitude'])
|
||||
uncertainties.append(self.uncertainties.get(st_id))
|
||||
latitudes.append(self.stations_dict[st_id]['latitude'])
|
||||
longitudes.append(self.stations_dict[st_id]['longitude'])
|
||||
return picks, uncertainties, latitudes, longitudes
|
||||
|
||||
# plotting -----------------------------------------------------
|
||||
@@ -607,10 +582,7 @@ class Array_map(QtWidgets.QWidget):
|
||||
transform=ccrs.PlateCarree())
|
||||
|
||||
def scatter_picked_stations(self):
|
||||
rval = self.get_picks_lat_lon()
|
||||
if not rval:
|
||||
return
|
||||
picks, uncertainties, lats, lons = rval
|
||||
picks, uncertainties, lats, lons = self.get_picks_lat_lon()
|
||||
if len(lons) < 1 and len(lats) < 1:
|
||||
return
|
||||
|
||||
@@ -765,52 +737,6 @@ class Array_map(QtWidgets.QWidget):
|
||||
fname += '.png'
|
||||
self.canvas.fig.savefig(fname)
|
||||
|
||||
def load_reference_picks(self):
|
||||
fname = self._parent.filename_from_action()
|
||||
data_ref = Data(parent=self._parent, evtdata=str(fname))
|
||||
evt_ref = data_ref.get_evt_data()
|
||||
if not evt_ref:
|
||||
return
|
||||
picks_ref = evt_ref.picks
|
||||
if not picks_ref:
|
||||
return
|
||||
picksdict_ref = picksdict_from_picks(evt_ref, nwst_id=False)
|
||||
self.autopicks_dict = picksdict_ref['manual']
|
||||
self.autopicks_dict_ref = picksdict_ref['auto']
|
||||
return True
|
||||
|
||||
@property
|
||||
def picks_rel_ref_corrected(self):
|
||||
picktype = self.get_selected_pick_type()
|
||||
if picktype in ['auto', 'hybrid']:
|
||||
picks_dict = self.autopicks_dict_ref
|
||||
elif picktype == 'manual':
|
||||
picks_dict = self.autopicks_dict
|
||||
else:
|
||||
return
|
||||
if picks_dict:
|
||||
return self.subtract_picks(picks_dict)
|
||||
else:
|
||||
if self.load_reference_picks():
|
||||
return self.picks_rel_ref_corrected
|
||||
|
||||
def subtract_picks(self, picks_dict):
|
||||
current_picks = self.current_picks_dict()
|
||||
subtracted_picks = {}
|
||||
for station, ps_dict in current_picks.items():
|
||||
for phase, pick in ps_dict.items():
|
||||
pick_ref_ps = picks_dict.get(station)
|
||||
if not pick_ref_ps:
|
||||
continue
|
||||
pick_ref = pick_ref_ps.get(phase)
|
||||
if not pick_ref:
|
||||
continue
|
||||
mpp = pick['mpp'] - pick_ref['mpp']
|
||||
nwst_id = f'{pick["network"]}.{station}'
|
||||
subtracted_picks[nwst_id] = mpp
|
||||
return subtracted_picks
|
||||
|
||||
|
||||
def _warn(self, message):
|
||||
self.qmb = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Icon.Warning, 'Warning', message)
|
||||
self.qmb.show()
|
||||
|
||||
@@ -220,9 +220,9 @@ class Metadata(object):
|
||||
network_name = network.code
|
||||
if not station_name in self.stations_dict.keys():
|
||||
st_id = '{}.{}'.format(network_name, station_name)
|
||||
self.stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude,
|
||||
'elevation': station[0].elevation}
|
||||
self.stations_dict[st_id] = {'latitude': station.latitude,
|
||||
'longitude': station.longitude,
|
||||
'elevation': station.elevation}
|
||||
|
||||
read_stat = {'xml': stat_info_from_inventory,
|
||||
'dless': stat_info_from_parser}
|
||||
@@ -268,9 +268,6 @@ class Metadata(object):
|
||||
if not fnames:
|
||||
# search for station name in filename
|
||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + station + '*'))
|
||||
if not fnames:
|
||||
# search for network name in filename
|
||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + network + '*'))
|
||||
if not fnames:
|
||||
if self.verbosity:
|
||||
print('Could not find filenames matching station name, network name or seed id')
|
||||
@@ -282,7 +279,7 @@ class Metadata(object):
|
||||
continue
|
||||
invtype, robj = self._read_metadata_file(os.path.join(path_to_inventory, fname))
|
||||
try:
|
||||
# robj.get_coordinates(station_seed_id) # TODO: Commented out, failed with Parser, is this needed?
|
||||
robj.get_coordinates(station_seed_id)
|
||||
self.inventory_files[fname] = {'invtype': invtype,
|
||||
'data': robj}
|
||||
if station_seed_id in self.seed_ids.keys():
|
||||
@@ -290,6 +287,7 @@ class Metadata(object):
|
||||
self.seed_ids[station_seed_id] = fname
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.warning(e)
|
||||
continue
|
||||
print('Could not find metadata for station_seed_id {} in path {}'.format(station_seed_id, path_to_inventory))
|
||||
|
||||
@@ -654,6 +652,8 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
||||
"""
|
||||
|
||||
# data = remove_underscores(data)
|
||||
if not data:
|
||||
return
|
||||
|
||||
# loop over traces
|
||||
input_tuples = []
|
||||
@@ -661,9 +661,14 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
||||
input_tuples.append((tr, metadata, unit, force))
|
||||
data.remove(tr)
|
||||
|
||||
pool = gen_Pool(ncores)
|
||||
result = pool.imap_unordered(restitute_trace, input_tuples)
|
||||
pool.close()
|
||||
if ncores == 0:
|
||||
result = []
|
||||
for input_tuple in input_tuples:
|
||||
result.append(restitute_trace(input_tuple))
|
||||
else:
|
||||
pool = gen_Pool(ncores)
|
||||
result = pool.imap_unordered(restitute_trace, input_tuples)
|
||||
pool.close()
|
||||
|
||||
for tr, remove_trace in result:
|
||||
if not remove_trace:
|
||||
|
||||
@@ -51,7 +51,6 @@ def readDefaultFilterInformation():
|
||||
:rtype: dict
|
||||
"""
|
||||
pparam = PylotParameter()
|
||||
pparam.reset_defaults()
|
||||
return readFilterInformation(pparam)
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
|
||||
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
|
||||
# min_picks_export: minimum number of correlated picks for export
|
||||
# min_picks_autopylot: minimum number of reference autopicks picks to continue with event
|
||||
# min_picks_autopylot: minimum number of reference auto picks to continue with event
|
||||
# check_RMS: do RMS check to search for restitution errors (very experimental)
|
||||
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
|
||||
# station_list: use the following stations as reference for stacking
|
||||
@@ -17,6 +17,11 @@
|
||||
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
|
||||
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
|
||||
# such as 'PyLoT_20170501_141822_autopylot.xml'
|
||||
# dt_stacking: time shift for stacking (e.g. [0, 250] for 0 and 250 seconds shift)
|
||||
# filter_options: filter for first correlation (rough)
|
||||
# filter_options_final: filter for second correlation (fine)
|
||||
# filter_type: e.g. 'bandpass'
|
||||
# sampfreq: sampling frequency of the data
|
||||
|
||||
logging: info
|
||||
pick_phases: ['P', 'S']
|
||||
@@ -54,6 +59,9 @@ P:
|
||||
filter_type: bandpass
|
||||
sampfreq: 20.0
|
||||
|
||||
# ignore if autopylot fails to pick master-trace (not recommended if absolute onset times matter)
|
||||
ignore_autopylot_fail_on_master: True
|
||||
|
||||
# S-phase
|
||||
S:
|
||||
min_corr_stacking: 0.7
|
||||
@@ -88,3 +96,6 @@ S:
|
||||
filter_type: bandpass
|
||||
sampfreq: 20.0
|
||||
|
||||
# ignore if autopylot fails to pick master-trace (not recommended if absolute onset times matter)
|
||||
ignore_autopylot_fail_on_master: True
|
||||
|
||||
|
||||
@@ -436,6 +436,7 @@ def correlation_main(database_path_dmt: str, pylot_infile_path: str, params: dic
|
||||
|
||||
# iterate over all events in "database_path_dmt"
|
||||
for eventindex, eventdir in enumerate(eventdirs):
|
||||
eventindex += 1
|
||||
if not istart <= eventindex < istop:
|
||||
continue
|
||||
|
||||
@@ -443,12 +444,16 @@ def correlation_main(database_path_dmt: str, pylot_infile_path: str, params: dic
|
||||
continue
|
||||
|
||||
logging.info('\n' + 100 * '#')
|
||||
logging.info('Working on event {} ({}/{})'.format(eventdir, eventindex + 1, len(eventdirs)))
|
||||
logging.info('Working on event {} ({}/{})'.format(eventdir, eventindex, len(eventdirs)))
|
||||
if event_blacklist and get_event_id(eventdir) in event_blacklist:
|
||||
logging.info('Event on blacklist. Continue')
|
||||
|
||||
correlate_event(eventdir, pylot_parameter, params=params, channel_config=channel_config,
|
||||
update=update)
|
||||
try:
|
||||
correlate_event(eventdir, pylot_parameter, params=params, channel_config=channel_config,
|
||||
update=update)
|
||||
except Exception as e:
|
||||
logging.error(f'Could not correlate event {eventindex}: {e}')
|
||||
continue
|
||||
|
||||
logging.info('Finished script after {} at {}'.format(datetime.now() - tstart, datetime.now()))
|
||||
|
||||
@@ -522,7 +527,7 @@ def cut_stream_to_same_length(wfdata: Stream) -> None:
|
||||
st.trim(tstart, tend, pad=True, fill_value=0.)
|
||||
# check for stream length
|
||||
if len(st) < 3:
|
||||
logging.info('Not enough traces in stream, remove it from dataset:', str(st))
|
||||
logging.info(f'Not enough traces in stream, remove it from dataset: {st}')
|
||||
remove(wfdata, st)
|
||||
continue
|
||||
if st[0].stats.starttime != st[1].stats.starttime or st[1].stats.starttime != st[2].stats.starttime:
|
||||
@@ -635,10 +640,19 @@ def correlate_event(eventdir: str, pylot_parameter: PylotParameter, params: dict
|
||||
if len(picks) < params[phase_type]['min_picks_autopylot']:
|
||||
logging.info('Not enough automatic picks for correlation. Continue!')
|
||||
continue
|
||||
# calculate corrected taupy picks and remove strong outliers
|
||||
taupypicks_corr_initial, median_diff = get_corrected_taupy_picks(picks, taupypicks_orig)
|
||||
picks = remove_outliers(picks, taupypicks_corr_initial,
|
||||
params[phase_type]['initial_pick_outlier_threshold'])
|
||||
## calculate corrected taupy picks and remove strong outliers - part commented out for now. Maybe make this
|
||||
## another option but I think it is not worth it
|
||||
# taupypicks_corr_initial, median_diff = get_corrected_taupy_picks(picks, taupypicks_orig)
|
||||
# picks = remove_outliers(picks, taupypicks_corr_initial,
|
||||
# params[phase_type]['initial_pick_outlier_threshold'])
|
||||
|
||||
# use mean/median corrected picks from taupy as the new reference picks. This largely increases the yield
|
||||
# of final picks since every station has a decent reference onset now for a correlation
|
||||
taupypicks, time_shift = get_corrected_taupy_picks(picks, taupypicks_orig, all_available=True)
|
||||
|
||||
picks = copy.deepcopy(taupypicks)
|
||||
for pick in picks:
|
||||
pick.method_id.id = 'auto'
|
||||
|
||||
if phase_type == 'S':
|
||||
# check whether rotation to ZNE is possible (to get rid of horizontal channel 1,2,3)
|
||||
@@ -671,19 +685,56 @@ def correlate_event(eventdir: str, pylot_parameter: PylotParameter, params: dict
|
||||
if not params[phase_type]['use_stacked_trace']:
|
||||
# first stack mastertrace by using stations with high correlation on that station in station list with the
|
||||
# highest correlation coefficient
|
||||
stack_result = stack_mastertrace(wfdata_lowf, wfdata_highf, wfdata, picks, params=params[phase_type],
|
||||
channels=channels_list, method=method, fig_dir=fig_dir)
|
||||
else:
|
||||
stack_result = load_stacked_trace(eventdir, params[phase_type]['min_corr_stacking'])
|
||||
logging.info('Searching for master trace. ' + 20 * '-')
|
||||
best_result = None
|
||||
for stack_result in stack_mastertrace(wfdata_lowf, wfdata_highf, wfdata, picks, params=params[phase_type],
|
||||
channels=channels_list, method=method, fig_dir=fig_dir):
|
||||
|
||||
if not stack_result:
|
||||
continue
|
||||
|
||||
# save best result in case it is still needed
|
||||
if not best_result:
|
||||
best_result = stack_result
|
||||
|
||||
# extract stack result
|
||||
correlations_dict, nwst_id, trace_master, nstack = stack_result
|
||||
|
||||
# now pick stacked trace with PyLoT for a more precise pick (use raw trace, gets filtered by autoPyLoT)
|
||||
pick_stacked = repick_master_trace(wfdata_lowf, trace_master, pylot_parameter, event, event_id, metadata,
|
||||
phase_type, correlation_out_dir)
|
||||
|
||||
if not pick_stacked or not pick_stacked.time_errors.uncertainty:
|
||||
logging.info(f'Invalid autoPyLoT pick on master trace. Try next one.')
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
logging.info('Did not find autoPyLoT pick for any stacked trace.')
|
||||
|
||||
if params[phase_type]['ignore_autopylot_fail_on_master'] is True:
|
||||
# Fallback in case autopylot failed
|
||||
logging.warning(f'Could not pick stacked trace. Using reference pick instead. If you need '
|
||||
f'absolute onsets you need to account for a time-shift.')
|
||||
|
||||
correlations_dict, nwst_id, trace_master, nstack = best_result
|
||||
pick_stacked = get_pick4station(picks, network_code=trace_master.stats.network,
|
||||
station_code=trace_master.stats.station, method='auto')
|
||||
else:
|
||||
pick_stacked = None
|
||||
|
||||
if not pick_stacked:
|
||||
logging.info('Could not find reference pick also. Continue with next phase.')
|
||||
continue
|
||||
|
||||
else:
|
||||
raise NotImplementedError('Loading stacked trace currently not implemented')
|
||||
# stack_result = load_stacked_trace(eventdir, params[phase_type]['min_corr_stacking'])
|
||||
|
||||
if not stack_result:
|
||||
logging.info('No stack result. Continue.')
|
||||
continue
|
||||
|
||||
#############################################
|
||||
# NOW DO THE FINAL CORRELATION
|
||||
# extract stack result
|
||||
correlations_dict, nwst_id, trace_master, nstack = stack_result
|
||||
|
||||
if params[phase_type]['plot']:
|
||||
# plot correlations of traces used to generate stacked trace
|
||||
@@ -700,11 +751,6 @@ def correlate_event(eventdir: str, pylot_parameter: PylotParameter, params: dict
|
||||
# write unfiltered trace
|
||||
trace_master.write(os.path.join(correlation_out_dir, '{}_stacked.mseed'.format(trace_master.id)))
|
||||
|
||||
# now pick stacked trace with PyLoT for a more precise pick (use raw trace, gets filtered by autoPyLoT)
|
||||
pick_stacked = repick_master_trace(wfdata_lowf, trace_master, pylot_parameter, event, event_id, metadata,
|
||||
phase_type, correlation_out_dir)
|
||||
if not pick_stacked:
|
||||
continue
|
||||
|
||||
# correlate stations with repicked and stacked master trace
|
||||
fig_dir_traces = make_figure_dirs(fig_dir, trace_master.id)
|
||||
@@ -826,7 +872,7 @@ def get_picks_mean(picks: list) -> UTCDateTime:
|
||||
|
||||
|
||||
def get_corrected_taupy_picks(picks: list, taupypicks: list, all_available: bool = False) -> tuple:
|
||||
""" get mean/median from picks taupy picks, correct latter for the difference """
|
||||
""" get mean/median from picks relative to taupy picks, correct latter for the difference """
|
||||
|
||||
def nwst_id_from_wfid(wfid):
|
||||
return '{}.{}'.format(wfid.network_code if wfid.network_code else '',
|
||||
@@ -1165,38 +1211,38 @@ def stack_mastertrace(wfdata_lowf: Stream, wfdata_highf: Stream, wfdata_raw: Str
|
||||
A master trace will be created by stacking well correlating traces onto this station.
|
||||
"""
|
||||
|
||||
def get_best_station4stack(sta_result):
|
||||
""" return station with maximum mean_ccc"""
|
||||
def get_best_stations4stack(sta_result, n_max=4):
|
||||
""" return stations sorted after maximum mean_ccc"""
|
||||
ccc_means = {nwst_id: value['mean_ccc'] for nwst_id, value in sta_result.items() if
|
||||
not np.isnan(value['mean_ccc'])}
|
||||
if len(ccc_means) < 1:
|
||||
logging.warning('No valid station found for stacking! Return.')
|
||||
return
|
||||
best_station_id = max(ccc_means, key=ccc_means.get)
|
||||
logging.info(
|
||||
'Found highest mean correlation for station {} ({})'.format(best_station_id, max(ccc_means.values())))
|
||||
return best_station_id
|
||||
best_station_ids = sorted(ccc_means, key=ccc_means.get, reverse=True)[:n_max]
|
||||
logging.info(f'Found mean correlations for stations: {ccc_means}')
|
||||
return best_station_ids, ccc_means
|
||||
|
||||
station_results = iterate_correlation(wfdata_lowf, wfdata_highf, channels, picks, method, params, fig_dir=fig_dir)
|
||||
nwst_id_master = get_best_station4stack(station_results)
|
||||
nwst_ids_master, ccc_means = get_best_stations4stack(station_results)
|
||||
|
||||
# in case no stream with a valid pick is found
|
||||
if not nwst_id_master:
|
||||
logging.info('No mastertrace found! Will skip this event.')
|
||||
if not nwst_ids_master:
|
||||
logging.info('No mastertrace found! Continue with next.')
|
||||
return None
|
||||
|
||||
trace_master = station_results[nwst_id_master]['trace']
|
||||
stations4stack = station_results[nwst_id_master]['stations4stack']
|
||||
correlations_dict = station_results[nwst_id_master]['correlations_dict']
|
||||
for nwst_id_master in nwst_ids_master:
|
||||
trace_master = station_results[nwst_id_master]['trace']
|
||||
stations4stack = station_results[nwst_id_master]['stations4stack']
|
||||
correlations_dict = station_results[nwst_id_master]['correlations_dict']
|
||||
|
||||
wfdata_highf += trace_master
|
||||
wfdata_highf += trace_master
|
||||
|
||||
dt_pre, dt_post = params['dt_stacking']
|
||||
trace_master, nstack = apply_stacking(trace_master, stations4stack, wfdata_raw, picks, method=method,
|
||||
do_rms_check=params['check_RMS'], plot=params['plot'], fig_dir=fig_dir,
|
||||
dt_pre=dt_pre, dt_post=dt_post)
|
||||
dt_pre, dt_post = params['dt_stacking']
|
||||
trace_master, nstack = apply_stacking(trace_master, stations4stack, wfdata_raw, picks, method=method,
|
||||
do_rms_check=params['check_RMS'], plot=params['plot'], fig_dir=fig_dir,
|
||||
dt_pre=dt_pre, dt_post=dt_post)
|
||||
|
||||
return correlations_dict, nwst_id_master, trace_master, nstack
|
||||
yield correlations_dict, nwst_id_master, trace_master, nstack
|
||||
|
||||
|
||||
def iterate_correlation(wfdata_lowf: Stream, wfdata_highf: Stream, channels: list, picks: list, method: str,
|
||||
@@ -1984,4 +2030,5 @@ if __name__ == "__main__":
|
||||
# MAIN +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
correlation_main(ARGS.dmt_path, ARGS.pylot_infile, params=CORR_PARAMS, istart=int(ARGS.istart),
|
||||
istop=int(ARGS.istop),
|
||||
channel_config=CHANNELS, update=ARGS.update, event_blacklist=ARGS.blacklist)
|
||||
channel_config=CHANNELS, update=ARGS.update, event_blacklist=ARGS.blacklist,
|
||||
select_events=None) # can add a list of event ids if only specific events shall be picked
|
||||
|
||||
@@ -32,9 +32,10 @@ export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
|
||||
|
||||
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||
#pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||
pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||
|
||||
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
|
||||
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
|
||||
# use -pd for detailed plots in eventdir/correlation_XX_XX/figures
|
||||
python pick_correlation_correction.py $1 $pylot_infile -n ${NSLOTS:=1} -istart $2 --params 'parameters_adriaarray.yaml' # -pd
|
||||
#--event_blacklist eventlist.txt
|
||||
|
||||
@@ -3,21 +3,26 @@
|
||||
import subprocess
|
||||
|
||||
fnames = [
|
||||
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
|
||||
('/data/AdriaArray_Data/dmt_database_mantle_M5.0-5.4', 0),
|
||||
('/data/AdriaArray_Data/dmt_database_mantle_M5.4-5.7', 0),
|
||||
('/data/AdriaArray_Data/dmt_database_mantle_M5.7-6.0', 0),
|
||||
('/data/AdriaArray_Data/dmt_database_mantle_M6.0-6.3', 0),
|
||||
('/data/AdriaArray_Data/dmt_database_mantle_M6.3-10.0', 0),
|
||||
# ('/data/AdriaArray_Data/dmt_database_ISC_mantle_M5.0-5.4', 0),
|
||||
# ('/data/AdriaArray_Data/dmt_database_ISC_mantle_M5.4-5.7', 0),
|
||||
# ('/data/AdriaArray_Data/dmt_database_ISC_mantle_M5.7-6.0', 0),
|
||||
# ('/data/AdriaArray_Data/dmt_database_ISC_mantle_M6.0-10.0', 0),
|
||||
]
|
||||
|
||||
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
|
||||
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
|
||||
|
||||
####
|
||||
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
|
||||
script_location = '/home/marcel/VersionCtrl/git/pylot/pylot/correlation/submit_pick_corr_correction.sh'
|
||||
####
|
||||
|
||||
for fnin, istart in fnames:
|
||||
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
|
||||
|
||||
print(input_cmds)
|
||||
print(subprocess.check_output(input_cmds.split()))
|
||||
|
||||
|
||||
|
||||
print(subprocess.check_output(input_cmds.split()))
|
||||
0
pylot/tomography/__init__.py
Normal file
0
pylot/tomography/__init__.py
Normal file
23
pylot/tomography/fmtomo.py
Normal file
23
pylot/tomography/fmtomo.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from pylot.tomography.fmtomo_utils import Tomo3d
|
||||
import os
|
||||
|
||||
citer = 0
|
||||
niter = 12
|
||||
n_proc = 4 # only four processes for minimal example with four sources
|
||||
|
||||
# for some reason this did not work as expected and was commented out
|
||||
#if os.path.isfile('inviter.in'):
|
||||
# with open('inviter.in', 'r') as infile:
|
||||
# citer = int(infile.read())
|
||||
# print ('Continue on iteration step ', citer)
|
||||
|
||||
tomo = Tomo3d(os.getcwd(), os.getcwd(), overwrite=True, buildObs=False, saveRays=[6, 12], citer=citer)
|
||||
|
||||
try:
|
||||
tomo.runTOMO3D(n_proc, niter)
|
||||
except KeyboardInterrupt:
|
||||
print('runTTOMO3D interrupted by user or machine. Cleaning up.')
|
||||
except Exception as e:
|
||||
print(f'Catching unknown Exception in runTOMO3D: {e}. Trying to clean up...')
|
||||
finally:
|
||||
tomo.removeDirectories()
|
||||
2
pylot/tomography/fmtomo_tools/__init__.py
Normal file
2
pylot/tomography/fmtomo_tools/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
177
pylot/tomography/fmtomo_tools/compare_arrivals_hybrid.py
Executable file
177
pylot/tomography/fmtomo_tools/compare_arrivals_hybrid.py
Executable file
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Small script to compare arrival times of hybrid tau-p/fmm with simple tau-p for standard earth model
|
||||
|
||||
import os
|
||||
import argparse
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from obspy.taup import TauPyModel
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_receivers, organize_sources, organize_event_names
|
||||
|
||||
|
||||
def read_file(fnin):
|
||||
infile = open(fnin, 'r')
|
||||
return infile.readlines()
|
||||
|
||||
|
||||
def compare_arrivals(arrivals_file, receivers_file, sources_file, input_source_file, model='ak135_diehl_v2', exclude_phases=[]):
|
||||
'''
|
||||
Reads FMTOMO arrivals.dat file together with corresponding receiver, source and input_source files to match
|
||||
each arrival to a source and receiver combination, calculate tau-p arrivals for a given earth model and
|
||||
calculate differences
|
||||
'''
|
||||
arrivals_taup = {}
|
||||
arrivals_tomo = {}
|
||||
receiver_ids = {}
|
||||
event_names = {}
|
||||
events = organize_event_names(input_source_file)
|
||||
model = TauPyModel(model)
|
||||
|
||||
# organize sources and receivers in dictionaries containing their ID used by FMTOMO as dictionary key
|
||||
receivers_dict = organize_receivers(receivers_file)
|
||||
sources_dict = organize_sources(sources_file)
|
||||
|
||||
# read arrivals file
|
||||
with open(arrivals_file, 'r') as infile_arrivals:
|
||||
arrivals = infile_arrivals.readlines()
|
||||
|
||||
count = 0
|
||||
for src_number, source in sources_dict.items():
|
||||
if source['phase'] in exclude_phases:
|
||||
continue
|
||||
src_name = events[src_number - 1]
|
||||
if not src_name in event_names.keys():
|
||||
arrivals_taup[src_name] = []
|
||||
arrivals_tomo[src_name] = []
|
||||
receiver_ids[src_name] = []
|
||||
event_names[src_name] = count
|
||||
count += 1
|
||||
|
||||
for line in arrivals:
|
||||
# read line by line from fmtomo_tools output file arrivals.dat
|
||||
rec_id, src_id, ray_id, refl, arrival_time, diff, head = line.split()
|
||||
arrival_time = float(arrival_time)
|
||||
rec_id = int(rec_id)
|
||||
src_id = int(src_id)
|
||||
ray_id = int(ray_id)
|
||||
|
||||
# identify receiver and source using dictionary
|
||||
receiver = receivers_dict[rec_id]
|
||||
source = sources_dict[src_id]
|
||||
src_name = events[src_id - 1]
|
||||
phase = source['phase']
|
||||
if phase in exclude_phases: continue
|
||||
taup_arrival = model.get_travel_times_geo(source_depth_in_km=source['depth'],
|
||||
source_latitude_in_deg=source['lat'],
|
||||
source_longitude_in_deg=source['lon'],
|
||||
receiver_latitude_in_deg=receiver['lat'],
|
||||
receiver_longitude_in_deg=receiver['lon'],
|
||||
phase_list=[phase])
|
||||
receiver_depth_in_km = 6371. - receiver['rad']
|
||||
if len(taup_arrival) == 1:
|
||||
taup_arrival_time = taup_arrival[0].time
|
||||
else:
|
||||
taup_arrival_time = np.nan
|
||||
arrivals_taup[src_name].append(taup_arrival_time)
|
||||
arrivals_tomo[src_name].append(arrival_time)
|
||||
receiver_ids[src_name].append(rec_id)
|
||||
|
||||
#plt.plot([min(arrivals_taup),max(arrivals_taup)],[min(arrivals_taup), max(arrivals_taup)], 'k-')
|
||||
sorted_by_first_arrival = sorted([(src_name, min(arrivals)) for src_name, arrivals in arrivals_taup.items()],
|
||||
key=lambda x: x[1])
|
||||
|
||||
# print some output for analysis
|
||||
for item in sorted_by_first_arrival:
|
||||
print(item)
|
||||
#[print(source) for source in sources_dict.items()]
|
||||
#[print(item) for item in enumerate(events)]
|
||||
|
||||
current_fmtomo_folder_name = os.path.split(os.path.abspath(arrivals_file))[-2]
|
||||
fname_savefig = '{}'.format(current_fmtomo_folder_name)
|
||||
if exclude_phases:
|
||||
fname_savefig += '_e'
|
||||
for phase in exclude_phases:
|
||||
fname_savefig += '_{}'.format(phase)
|
||||
|
||||
plot_differences(arrivals_taup, arrivals_tomo, sorted_by_first_arrival, fname_savefig)
|
||||
#for event_name in ['20160124_103037.a_P.ttf', '20160729_211833.a_Pdiff.ttf', '20160729_211833.a_P.ttf']:
|
||||
# plot_event(arrivals_tomo, arrivals_taup, receiver_ids, receivers_dict, src_name=event_name)
|
||||
|
||||
|
||||
def plot_differences(arrivals_taup, arrivals_tomo, sorted_by_first_arrival, fname_savefig):
|
||||
fig = plt.figure(figsize=(16,9))
|
||||
ax = fig.add_subplot(111)
|
||||
|
||||
# init plot for tt differences
|
||||
cmap = plt.get_cmap('jet')
|
||||
colors = cmap(np.linspace(0, 1, len(sorted_by_first_arrival)))
|
||||
|
||||
for index, item in enumerate(sorted_by_first_arrival):
|
||||
src_name = item[0]
|
||||
ax.scatter(arrivals_taup[src_name], np.array(arrivals_tomo[src_name]) - np.array(arrivals_taup[src_name]),
|
||||
c=colors[index], s=25, marker='.', label=src_name, edgecolors='none')
|
||||
|
||||
# shrink box for legend
|
||||
box = ax.get_position()
|
||||
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
|
||||
|
||||
ax.legend(bbox_to_anchor=[1, 1], loc='upper left')
|
||||
|
||||
plt.title(fname_savefig)
|
||||
ax.set_xlabel('Absolute time $t_{tau-p}$')
|
||||
ax.set_ylabel('Time difference $t_{hybrid} - t_{tau-p}$')
|
||||
|
||||
print('Saving plot to {}.png'.format(fname_savefig))
|
||||
fig.savefig(fname_savefig + '.png', dpi=300)
|
||||
#plt.show()
|
||||
|
||||
|
||||
def plot_event(arrivals_tomo, arrivals_taup, receiver_ids_dict, receivers_dict, src_name):
|
||||
arrivals_diff = np.array(arrivals_tomo[src_name]) - np.array(arrivals_taup[src_name])
|
||||
receiver_ids = receiver_ids_dict[src_name]
|
||||
x = np.array([receivers_dict[rec_id]['lon'] for rec_id in receiver_ids])
|
||||
y = np.array([receivers_dict[rec_id]['lat'] for rec_id in receiver_ids])
|
||||
sc = plt.scatter(x, y, c=arrivals_diff, edgecolor='none')
|
||||
plt.xlabel('Longitude [deg]')
|
||||
plt.ylabel('Latitude [deg]')
|
||||
cbar = plt.colorbar(sc)
|
||||
cbar.ax.set_ylabel('traveltime difference ($t_{hybrid} - t_{tau-p}$)')
|
||||
plt.title('{}'.format(src_name))
|
||||
plt.show()
|
||||
|
||||
# folders=[
|
||||
# 'alparray_0_receiver_elev_zero',
|
||||
# 'alparray_0_receiver_elev_zero_finer_pgrid_vgrid_r',
|
||||
# 'alparray_0_receiver_elev_zero_finer_vgrid_llr',
|
||||
# 'alparray_0_receiver_elev_zero_smaller_box',
|
||||
# 'alparray_0_receiver_elev_zero_shallow_box',
|
||||
# 'alparray_0_receiver_elev_zero_finer_pgrid_llr',
|
||||
# 'alparray_0_receiver_elev_zero_finer_interface',
|
||||
# #'alparray_0_receiver_elev_zero_bigger_box',
|
||||
# ]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Compare arrivals with TauP-times')
|
||||
parser.add_argument('fmtomodir', help='path containing fm3d output')
|
||||
parser.add_argument('-e', dest='exclude', default=[], help='exclude phases, comma separated, no spaces')
|
||||
args = parser.parse_args()
|
||||
|
||||
fdir = args.fmtomodir
|
||||
|
||||
arrivals_file = os.path.join(fdir, 'arrivals.dat')
|
||||
receivers_file = os.path.join(fdir, 'receivers.in')
|
||||
sources_file = os.path.join(fdir, 'sources.in')
|
||||
input_source_file = os.path.join(fdir, 'input_source_file_P.in')
|
||||
|
||||
exclude_phases = args.exclude
|
||||
if exclude_phases:
|
||||
exclude_phases = exclude_phases.split(',')
|
||||
compare_arrivals(arrivals_file, receivers_file, sources_file, input_source_file=input_source_file, exclude_phases=exclude_phases)
|
||||
|
||||
|
||||
39
pylot/tomography/fmtomo_tools/create_tradeoff_runs.py
Normal file
39
pylot/tomography/fmtomo_tools/create_tradeoff_runs.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import os
|
||||
|
||||
cwdir = '/data/AlpArray_Data/fmtomo/v5/tradeoff_curves'
|
||||
parent_dir_name = 'crust_included_grad_smooth_FIXED_dts'#_grad_1.5'
|
||||
|
||||
dampings = [3., 10., 30.]#, 30.]
|
||||
smoothings = [5.6]
|
||||
|
||||
def main(submit_run=True):
|
||||
fdir_parent = os.path.join(cwdir, parent_dir_name)
|
||||
for damp in dampings:
|
||||
for smooth in smoothings:
|
||||
fdir_out = fdir_parent + '_sm{}_damp{}'.format(smooth, damp)
|
||||
if not os.path.isdir(fdir_out):
|
||||
os.mkdir(fdir_out)
|
||||
os.system('cp -P {}/* {}'.format(fdir_parent, fdir_out))
|
||||
invertfile = os.path.join(fdir_out, 'invert3d.in')
|
||||
modify_invert_in(invertfile, damp, smooth)
|
||||
if submit_run:
|
||||
os.chdir(fdir_out)
|
||||
os.system('qsub submit_fmtomo.sh')
|
||||
|
||||
def modify_invert_in(fnin, damp, smooth):
|
||||
with open(fnin, 'r') as infile:
|
||||
lines = infile.readlines()
|
||||
|
||||
with open(fnin, 'w') as outfile:
|
||||
for line in lines:
|
||||
if not line.startswith('c'):
|
||||
value, comment = line.split('c:')
|
||||
if 'Global damping' in comment:
|
||||
line = line.replace(value.strip(), str(damp) + ' ')
|
||||
elif 'Global smoothing' in comment:
|
||||
line = line.replace(value.strip(), str(smooth) + ' ')
|
||||
outfile.write(line)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
29
pylot/tomography/fmtomo_tools/dist_first_src.py
Executable file
29
pylot/tomography/fmtomo_tools/dist_first_src.py
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
from obspy.geodetics.base import gps2dist_azimuth
|
||||
|
||||
|
||||
def main(infile):
|
||||
per = 4e4 # earth perimeter
|
||||
|
||||
fid = open(infile, 'r')
|
||||
nrec = fid.readline()
|
||||
latsrc, lonsrc, depsrc = [float(value) for value in fid.readline().split()]
|
||||
phase = fid.readline()
|
||||
latrec, lonrec, deprec = [float(value) for value in fid.readline().split()[:3]]
|
||||
|
||||
print ('Lat/Lon Source: {} / {}'.format(latsrc, lonsrc))
|
||||
print ('Lat/Lon Receiver: {} / {}'.format(latrec, lonrec))
|
||||
|
||||
dist_deg = gps2dist_azimuth(latsrc, lonsrc, latrec, lonrec)[0] / 1e3 / per * 360
|
||||
print ('Distance: {} [deg]'.format(dist_deg))
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser('Estimate distance from source to first'
|
||||
' receiver in WGS84 ellipsoid in FMTOMO pick file.')
|
||||
parser.add_argument('infile', help='FMTOMO pickfile (*.ttf)')
|
||||
args = parser.parse_args()
|
||||
main(args.infile)
|
||||
185
pylot/tomography/fmtomo_tools/event_thinning.py
Normal file
185
pylot/tomography/fmtomo_tools/event_thinning.py
Normal file
@@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob, os, shutil
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import Rectangle
|
||||
from matplotlib.collections import PatchCollection
|
||||
|
||||
from obspy.geodetics import gps2dist_azimuth
|
||||
|
||||
#pwd = '/rscratch/minos13/marcel/fmtomo_alparray/v3.5/alparray_events_thinned/picks'
|
||||
pwd = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10_EASI_test_Plomerova_NS_events/picks'
|
||||
os.chdir(pwd)
|
||||
infiles = glob.glob('*.ttf')
|
||||
|
||||
clat=46.
|
||||
clon=11.
|
||||
|
||||
ddist = 5
|
||||
dazim = 5
|
||||
|
||||
|
||||
def make_grid(ddist, dazim):
|
||||
distgrid = np.arange(35, 135, ddist)
|
||||
bazimgrid = np.arange(0, 360, dazim)
|
||||
grid = []
|
||||
|
||||
for bazim in np.deg2rad(bazimgrid):
|
||||
for dist in distgrid:
|
||||
grid.append(np.array([bazim, dist]))
|
||||
return np.array(grid)
|
||||
|
||||
|
||||
def make_axes():
|
||||
fig = plt.figure(figsize=(16,9))
|
||||
ax1 = fig.add_subplot(121, projection='polar')
|
||||
ax2 = fig.add_subplot(122)
|
||||
ax1.set_theta_direction(-1)
|
||||
ax1.set_theta_zero_location('N')
|
||||
return ax1, ax2
|
||||
|
||||
|
||||
def load_events():
|
||||
events = {}
|
||||
for infile in infiles:
|
||||
with open(infile, 'r') as fid:
|
||||
eventid = infile.split('.ttf')[0]
|
||||
npicks = int(fid.readline())
|
||||
lat, lon, depth = [float(item) for item in fid.readline().split()]
|
||||
dist, bazim, azim = gps2dist_azimuth(clat, clon, lat, lon, a=6.371e6, f=0)
|
||||
bazim = np.deg2rad(bazim)
|
||||
dist = dist / (np.pi * 6371) * 180 / 1e3
|
||||
events[eventid] = dict(dist=dist, bazim=bazim, npicks=npicks)
|
||||
return events
|
||||
|
||||
|
||||
def get_events_in_grid():
|
||||
events_in_grid = []
|
||||
for index, gcorner in enumerate(grid):
|
||||
bazim_l, dist_l = gcorner
|
||||
bazim_u = bazim_l + np.deg2rad(dazim)
|
||||
dist_u = dist_l + ddist
|
||||
events_in_grid.append(dict(bazims=(bazim_l, bazim_u), dists=(dist_l, dist_u), events=[]))
|
||||
for eventid, event in events.items():
|
||||
if (dist_l <= event['dist'] < dist_u) and (bazim_l <= event['bazim'] <= bazim_u):
|
||||
events_in_grid[index]['events'].append(eventid)
|
||||
return events_in_grid
|
||||
|
||||
|
||||
def filter_events():
|
||||
filtered_events = {}
|
||||
for eventdict in events_in_grid:
|
||||
cur_events = eventdict['events']
|
||||
if not cur_events: continue
|
||||
eventid = get_best_event(cur_events)
|
||||
filtered_events[eventid] = events[eventid]
|
||||
return filtered_events
|
||||
|
||||
|
||||
def get_best_event(cur_events):
|
||||
''' return eventid with highest number of picks'''
|
||||
select_events = {key: events[key] for key in cur_events}
|
||||
npicks = {key: value['npicks'] for key, value in select_events.items()}
|
||||
eventid = max(npicks, key=npicks.get)
|
||||
return eventid
|
||||
|
||||
|
||||
def plot_distribution(events_dict):
|
||||
cmap_bnd = plt.get_cmap('Greys_r')
|
||||
cmap_center = plt.get_cmap('viridis')
|
||||
nevents = [len(grid_dict['events']) for grid_dict in events_dict]
|
||||
npicks = []
|
||||
for ev_dict in events_dict:
|
||||
npick = 0
|
||||
for eventid in ev_dict['events']:
|
||||
npick += events[eventid]['npicks']
|
||||
npicks.append(npick)
|
||||
|
||||
npicks = np.array(npicks)
|
||||
|
||||
ev_max = max(nevents)
|
||||
np_max = max(npicks)
|
||||
|
||||
print('N picks total:', np.sum(npicks))
|
||||
print('N picks max: ', np_max)
|
||||
print('N events max: ', ev_max)
|
||||
|
||||
ax_polar, ax_hist = make_axes()
|
||||
patches = []
|
||||
for npick, ev_dict in zip(npicks, events_dict):
|
||||
bazim_l, bazim_u = ev_dict.get('bazims')
|
||||
dist_l, dist_u = ev_dict.get('dists')
|
||||
n_ev = len(ev_dict.get('events'))
|
||||
color_edge = cmap_bnd(n_ev / ev_max)
|
||||
color_center = cmap_center(npick / np_max)
|
||||
# color = cmap(np.random.rand())
|
||||
rect = Rectangle((bazim_l, dist_l), np.deg2rad(dazim), ddist, edgecolor=color_edge)#, facecolor=color_center)
|
||||
patches.append(rect)
|
||||
|
||||
collection = PatchCollection(patches, cmap=cmap_center)
|
||||
collection.set_array(npicks)
|
||||
ax_polar.add_collection(collection)
|
||||
ax_polar.set_ylim((10, 135))
|
||||
cbar = plt.colorbar(collection)
|
||||
cbar.set_label('N picks')
|
||||
|
||||
# ax.scatter(grid[:, 0] + 0.5 * dazim, grid[:, 1] + 0.5 * ddist, c=nevents, s=50)
|
||||
bazims = []
|
||||
dists = []
|
||||
for event in events.values():
|
||||
bazims.append(event.get('bazim'))
|
||||
dists.append(event.get('dist'))
|
||||
|
||||
ax_polar.scatter(bazims, dists, c='k', zorder=3, s=5, alpha=0.5)
|
||||
ax_hist.hist(np.rad2deg(bazims), bins=np.arange(0, 360, dazim))
|
||||
ax_hist.set_xlabel('Backazimuth (deg)')
|
||||
ax_hist.set_ylabel('Number of events')
|
||||
plt.title('Polar event distribution and histogram of backazimuths')
|
||||
|
||||
|
||||
def export_filtered_events(fdir_save='picks_save'):
|
||||
if not os.path.isdir(fdir_save):
|
||||
os.mkdir(fdir_save)
|
||||
for infile in infiles:
|
||||
eventid = infile.split('.ttf')[0]
|
||||
if not eventid in events:
|
||||
for fname in glob.glob('{}.*'.format(eventid)):
|
||||
shutil.move(fname, fdir_save)
|
||||
print('Moved file {} to path {}'.format(fname, fdir_save))
|
||||
|
||||
|
||||
def write_input_source_file(fname='input_source_file_P_new.in'):
|
||||
with open(fname, 'w') as outfile:
|
||||
outfile.write('{}\n'.format(len(events)))
|
||||
for eventid in sorted(list(events.keys())):
|
||||
outfile.write('1 1 {}.ttf\n'.format(eventid))
|
||||
|
||||
|
||||
def filter_bazim(events, bazims_list):
|
||||
events_filtered = {}
|
||||
for eventid, event_dict in events.items():
|
||||
for baz_min, baz_max in bazims_list:
|
||||
if baz_min <= event_dict['bazim'] * 180. <= baz_max:
|
||||
events_filtered[eventid] = event_dict
|
||||
|
||||
return events_filtered
|
||||
|
||||
|
||||
filter_bazims = [(330, 360), (0, 30), (150, 210)]
|
||||
|
||||
events = load_events()
|
||||
#plot_distribution(events)
|
||||
|
||||
events = filter_bazim(events, bazims_list=filter_bazims)
|
||||
print()
|
||||
#grid = make_grid(ddist, dazim)
|
||||
#events_in_grid = get_events_in_grid()
|
||||
#plot_distribution()
|
||||
#events = filter_events()
|
||||
#events_in_grid = get_events_in_grid()
|
||||
#plot_distribution(events)
|
||||
#plt.show()
|
||||
export_filtered_events()
|
||||
write_input_source_file()
|
||||
@@ -0,0 +1,28 @@
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from obspy.geodetics import gps2dist_azimuth
|
||||
|
||||
from fmtomo_tools.fmtomo_teleseismic_utils import organize_sources
|
||||
|
||||
fmtomodir = '/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/no_crust_correction'
|
||||
clon = 17.5
|
||||
clat = 42.25
|
||||
|
||||
|
||||
os.chdir(fmtomodir)
|
||||
|
||||
sources = organize_sources('sources.in')
|
||||
|
||||
dists = []
|
||||
lats = []
|
||||
lons = []
|
||||
|
||||
for source_id, source_dict in sources.items():
|
||||
slat = source_dict['lat']
|
||||
slon = source_dict['lon']
|
||||
lats.append(slat)
|
||||
lons.append(slon)
|
||||
dist_m = gps2dist_azimuth(slat, slon, clat, clon, a=6.371e6, f=0)[0]
|
||||
dist = dist_m / (np.pi * 6371) * 180 / 1e3
|
||||
dists.append(dist)
|
||||
2019
pylot/tomography/fmtomo_tools/fmtomo_grid_utils.py
Normal file
2019
pylot/tomography/fmtomo_tools/fmtomo_grid_utils.py
Normal file
File diff suppressed because it is too large
Load Diff
52
pylot/tomography/fmtomo_tools/fmtomo_teleseismic.py
Normal file
52
pylot/tomography/fmtomo_tools/fmtomo_teleseismic.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from fmtomo_teleseismic_utils import setup_fmtomo_sim
|
||||
|
||||
#setup_fmtomo_sim('/data/AlpArray_Data/dmt_database/', '//data/AlpArray_Data/fmtomo/v6_S/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10')
|
||||
#setup_fmtomo_sim('/rscratch/minos13/marcel/dmt_database_test_event', '/home/marcel/marcel_scratch/alparray/fmtomo_traveltime_tomo/alparray_0/')
|
||||
#setup_fmtomo_sim('/rscratch/minos13/marcel/dmt_database_m7', '/home/marcel/marcel_scratch/alparray/fmtomo_traveltime_tomo/alparray_0/')
|
||||
#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_smaller_box')
|
||||
#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_bigger_box')
|
||||
#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_shallow_box')
|
||||
#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_finer_interface')
|
||||
#setup_fmtomo_sim('/rscratch/minos22/marcel/dmt_database_m7', '/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0_receiver_elev_zero_bigger_box_equal_dist_2')
|
||||
|
||||
#pgrid = Propgrid('/rscratch/minos22/marcel/alparray/fmtomo_traveltime_tomo/alparray_0/propgrid.in')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Prepare grid for fm3d teleseismic hybrid calculation.')
|
||||
parser.add_argument('dmt_path', help='path containing dmt_database with PyLoT picks')
|
||||
parser.add_argument('fmtomodir', help='path containing fm3d output')
|
||||
parser.add_argument('fname_extension', help='filename extension of pick files')
|
||||
parser.add_argument('--blacklist', default=None,
|
||||
help='station blacklist file (csv). After first line: NW_id,ST_id in each line')
|
||||
parser.add_argument('--no_write_init_nodes', default=False, action='store_true',
|
||||
help='do not calculate and write init nodes')
|
||||
parser.add_argument('--no_recalculate_init_nodes', default=False, action='store_true',
|
||||
help='do not recalculate init nodes if nodes file exists')
|
||||
parser.add_argument('-n', dest='ncores', default=None, help='number of cores for multiprocessing')
|
||||
parser.add_argument('--model', default='ak135')
|
||||
parser.add_argument('--s_phase', default=False, action='store_true')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
database_path = os.path.abspath(args.dmt_path)
|
||||
fmtomodir = os.path.abspath(args.fmtomodir)
|
||||
if args.ncores is not None:
|
||||
ncores = int(args.ncores)
|
||||
else:
|
||||
ncores = None
|
||||
fname_extension = args.fname_extension
|
||||
|
||||
sub_phases = {'P': ['P', 'PKP'], 'S': ['S', 'SKS']}
|
||||
phase_types = ['P']
|
||||
if args.s_phase:
|
||||
phase_types.append('S')
|
||||
|
||||
setup_fmtomo_sim(database_path, fmtomodir, fname_extension, sub_phases, ncores=ncores, check_notesfile=False,
|
||||
model=args.model, fname_station_blacklist=args.blacklist,
|
||||
no_write_init_nodes=args.no_write_init_nodes,
|
||||
no_recalculate_init_nodes=args.no_recalculate_init_nodes, phase_types=phase_types)
|
||||
930
pylot/tomography/fmtomo_tools/fmtomo_teleseismic_utils.py
Normal file
930
pylot/tomography/fmtomo_tools/fmtomo_teleseismic_utils.py
Normal file
@@ -0,0 +1,930 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import subprocess
|
||||
import warnings
|
||||
import os
|
||||
import glob
|
||||
from datetime import datetime
|
||||
|
||||
import multiprocessing
|
||||
import numpy as np
|
||||
import json
|
||||
|
||||
from obspy import read_events, UTCDateTime
|
||||
from obspy.taup import TauPyModel
|
||||
from obspy.geodetics import locations2degrees, gps2dist_azimuth
|
||||
|
||||
from pylot.core.util.utils import identifyPhaseID
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
from pylot.tomography.utils import pol2cart, pol2cart_vector
|
||||
from pylot.tomography.utils import get_metadata
|
||||
|
||||
|
||||
class Propgrid(object):
|
||||
'''
|
||||
small class that is built from an fm3d propgrid.in file; generates a regular grid in the same way as fm3d
|
||||
'''
|
||||
def __init__(self, filename):
|
||||
self.r_earth = 6371. # earth radius in km
|
||||
self.init_propgrid(filename)
|
||||
|
||||
def init_propgrid(self, filename_propgrid):
|
||||
self.read_propgrid(filename_propgrid)
|
||||
self.init_rGrid()
|
||||
self.init_latGrid()
|
||||
self.init_longGrid()
|
||||
|
||||
def init_rGrid(self):
|
||||
self.rs = np.zeros(self.nR)
|
||||
self.rbot = self.r_earth + self.zTop - (self.nR - 1) * self.deltaR
|
||||
for index in range(self.nR):
|
||||
self.rs[index] = self.rbot + index * self.deltaR
|
||||
|
||||
def init_latGrid(self):
|
||||
self.lats = np.zeros(self.nLat)
|
||||
self.latS = np.deg2rad(self.latSdeg)
|
||||
self.deltaLat = np.deg2rad(self.deltaLatDeg)
|
||||
for index in range(self.nLat):
|
||||
self.lats[index] = self.latS + index * self.deltaLat
|
||||
|
||||
def init_longGrid(self):
|
||||
self.longs = np.zeros(self.nLong)
|
||||
self.longW = np.deg2rad(self.longWdeg)
|
||||
self.deltaLong = np.deg2rad(self.deltaLongDeg)
|
||||
for index in range(self.nLong):
|
||||
self.longs[index] = self.longW + index * self.deltaLong
|
||||
|
||||
def read_propgrid(self, filename_propgrid):
|
||||
infile = open(filename_propgrid, 'r')
|
||||
self.nR, self.nLat, self.nLong = [int(value) for value in infile.readline().split()]
|
||||
self.deltaR, self.deltaLatDeg, self.deltaLongDeg = [np.float64(value) for value in infile.readline().split()]
|
||||
self.zTop, self.latSdeg, self.longWdeg = [np.float64(value) for value in infile.readline().split()]
|
||||
infile.close()
|
||||
|
||||
|
||||
def check_event_notes(eventdir):
|
||||
eventfile = os.path.join(eventdir, 'notes.txt')
|
||||
if os.path.isfile(eventfile):
|
||||
with open(eventfile, 'r') as infile:
|
||||
notes = infile.readline()
|
||||
print(notes)
|
||||
if 'exclude' in notes:
|
||||
print('Will exclude this event!')
|
||||
return False
|
||||
else:
|
||||
print('No notes file found.')
|
||||
return True
|
||||
|
||||
|
||||
def prepare_fmtomo_dir_first_run(fmtomo_dir):
|
||||
"""
|
||||
This helper function calls the file 'fm3d_prepare_tele' in the fmtomo binary after creating a "fake" receivers.in
|
||||
and sources.in file so that fm3d can prepare the teleseismic run and writes the file "init_nodes.out" that contains
|
||||
information on the boundary nodes of the current propagation grid. Make sure that "grid3dg" has already been called
|
||||
for this function to work.
|
||||
:param fmtomo_dir:
|
||||
:return:
|
||||
"""
|
||||
def write1(fid):
|
||||
fid.write('1\n')
|
||||
|
||||
def write_rec_file():
|
||||
""" Writes a fake receiver (has to be inside the grid) """
|
||||
with open(recfile, 'w') as fid:
|
||||
write1(fid)
|
||||
fid.write(f'0 {clat} {clon}\n')
|
||||
for _ in range(3):
|
||||
write1(fid)
|
||||
|
||||
def write_src_file():
|
||||
""" Writes a fake source """
|
||||
fakesrc_str = '''
|
||||
1
|
||||
1
|
||||
P
|
||||
0.0000 0.0000 0.0000 0.00000 0.00000 0.00000
|
||||
1
|
||||
1
|
||||
2 1
|
||||
1
|
||||
1
|
||||
'''
|
||||
|
||||
with open(srcfile, 'w') as fid:
|
||||
fid.write(fakesrc_str)
|
||||
|
||||
def get_clat_clon():
|
||||
if not os.path.isfile(propgrid_file):
|
||||
raise IOError(f'Missing file {propgrid_file} for fmtomo first run preparation.')
|
||||
|
||||
with open(propgrid_file, 'r') as fid:
|
||||
_, nlat, nlon = (int(item) for item in fid.readline().split())
|
||||
_, dlat, dlon = (float(item) for item in fid.readline().split())
|
||||
_, lat0, lon0 = (float(item) for item in fid.readline().split())
|
||||
clat = lat0 + nlat / 2 * dlat
|
||||
clon = lon0 + nlon / 2 * dlon
|
||||
return clat, clon
|
||||
|
||||
# check if binary file exists
|
||||
fmtomo_prep_tele = os.path.join(fmtomo_dir, 'fm3d_prepare_tele')
|
||||
assert os.path.isfile(fmtomo_prep_tele), f'Missing binary file {fmtomo_prep_tele}'
|
||||
|
||||
# set filenames for propgrid, sources and receivers
|
||||
propgrid_file = os.path.join(fmtomo_dir, 'propgrid.in')
|
||||
recfile = os.path.join(fmtomo_dir, 'receivers.in')
|
||||
srcfile = os.path.join(fmtomo_dir, 'sources.in')
|
||||
|
||||
# get coords from propgrid
|
||||
clat, clon = get_clat_clon()
|
||||
|
||||
# write fake source and receiver files
|
||||
write_src_file()
|
||||
write_rec_file()
|
||||
|
||||
# execute fm3d_prepare tele from local directory
|
||||
curdir = os.getcwd()
|
||||
os.chdir(fmtomo_dir)
|
||||
rval = subprocess.check_output([fmtomo_prep_tele]).decode('utf-8')
|
||||
if not rval.split('\n')[-2] == ' Finished teleseismic preparation. Stop.':
|
||||
raise ValueError('Unexpected output initialisation run.')
|
||||
|
||||
if not os.path.isfile(os.path.join(fmtomo_dir, 'init_nodes.out')):
|
||||
raise Exception('Could not create output file init_nodes.')
|
||||
os.chdir(curdir)
|
||||
|
||||
# clean up
|
||||
os.remove(srcfile)
|
||||
os.remove(recfile)
|
||||
|
||||
print('Prepare fmtomo dir first run: Success')
|
||||
|
||||
|
||||
def setup_fmtomo_sim(database_path_dmt, fmtomo_dir, fname_extension, sub_phases, ncores=None, model='ak135',
|
||||
min_picks_per_phase=10, write_vtk=True, check_notesfile=True, fname_station_blacklist=None,
|
||||
no_write_init_nodes=False, no_recalculate_init_nodes=False, phase_types=('P, S')):
|
||||
'''
|
||||
main function of this program, writes picks and input source file for FMTOMO obsdata program
|
||||
'''
|
||||
|
||||
assert os.path.isdir(database_path_dmt), 'Unrecognized directory {}'.format(database_path_dmt)
|
||||
assert os.path.isdir(fmtomo_dir), 'Unrecognized directory {}'.format(fmtomo_dir)
|
||||
|
||||
tstart = datetime.now()
|
||||
print('Starting script at {}'.format(tstart))
|
||||
print('Check notesfile set to {}'.format(check_notesfile))
|
||||
|
||||
# save means of event traveltimes to dictionary for statistical analysis
|
||||
#tt_means = {}
|
||||
|
||||
fname_fmtomo_nodes = os.path.join(fmtomo_dir, 'init_nodes.out')
|
||||
|
||||
# do first initialisation of FMTOMO to generate init_nodes file if required
|
||||
if not no_recalculate_init_nodes:
|
||||
prepare_fmtomo_dir_first_run(fmtomo_dir)
|
||||
|
||||
eventdirs = glob.glob(os.path.join(database_path_dmt, '*.?'))
|
||||
nEvents = len(eventdirs)
|
||||
# create directory that will contain the picks
|
||||
picksdir = os.path.join(fmtomo_dir, 'picks')
|
||||
if not os.path.isdir(picksdir):
|
||||
os.mkdir(picksdir)
|
||||
# track and count ALL source locations & phases (P and S) for fmtomo_tools
|
||||
associations_str = ''
|
||||
nsrc_total = 0
|
||||
# iterate over P and S to create one model each
|
||||
for phase_type in phase_types:
|
||||
sourcefile_str = ''
|
||||
nsrc = 0
|
||||
# iterate over all events in "database_path_dmt"
|
||||
for eventindex, eventdir in enumerate(eventdirs):
|
||||
print('Working on {}-picks for event {} ({}/{})'.format(phase_type, eventdir,
|
||||
eventindex + 1, len(eventdirs)))
|
||||
if check_notesfile and not check_event_notes(eventdir):
|
||||
continue
|
||||
# create ObsPy event from .pkl file in dmt eventdir
|
||||
event = get_event_obspy_dmt(eventdir)
|
||||
if len(event.origins) > 1:
|
||||
raise Exception('Ambiguous origin information for event {}'.format(event))
|
||||
origin = event.origins[0]
|
||||
# get all picks from PyLoT *.xml file
|
||||
picks = get_picks(eventdir, extension=fname_extension)
|
||||
if not picks:
|
||||
print('No picks for event {} found.'.format(eventdir))
|
||||
continue
|
||||
# remove picks for blacklisted stations
|
||||
if fname_station_blacklist:
|
||||
picks, n_deleted_blacklist = remove_blacklisted_station_picks(picks, fname_station_blacklist)
|
||||
# get metadata for current event from dmt_database
|
||||
metadata = get_metadata(eventdir)
|
||||
# get a dictionary containing coordinates for all stations
|
||||
stations_dict = metadata.get_all_coordinates()
|
||||
# catch event id
|
||||
event_id = get_event_id(eventdir)
|
||||
# map specific phases to another phase (Pdiff -> P)
|
||||
merge_phases = {'Pdiff': 'P'}
|
||||
for key, value in merge_phases.items():
|
||||
print('Will map phase {} to phase {} if present.'.format(key, value))
|
||||
# assign all picks of this event to a phase and add to sorted_picks dictionary
|
||||
sorted_picks = sort_picks_by_phase(picks, stations_dict, origin,
|
||||
sub_phases[phase_type], model, merge_phases=merge_phases)
|
||||
sorted_picks = remove_uncommon_picks(sorted_picks, min_picks_per_phase)
|
||||
# the following should not be necessary when calculating traveltimes on borders using TauPy
|
||||
#sorted_picks = translate_phase_names_TAUP(sorted_picks)
|
||||
#
|
||||
# iterate over sorted picks and write a traveltime file for each phase type
|
||||
for phase, picks_list in sorted_picks.items():
|
||||
pickfile_base = '{}_{}'.format(event_id, phase)
|
||||
pickfile_name = pickfile_base + '.ttf'
|
||||
init_nodes_name = pickfile_base + '.nodes'
|
||||
vtk_file_name = pickfile_base + '.vtk'
|
||||
pickfile = os.path.join(picksdir, pickfile_name)
|
||||
init_nodes_file = os.path.join(picksdir, init_nodes_name)
|
||||
vtk_file = os.path.join(picksdir, vtk_file_name)
|
||||
# remove source time
|
||||
picks_list = subtract_source_time(picks_list, origin.time)
|
||||
# save mean for statistical analysis
|
||||
#tt_means[pickfile_base] = mean
|
||||
# create a list with all "true" phases used for taupy to calculate boundary ttimes/ray parameters
|
||||
phases = list(set([item['true_phase'] for item in picks_list]))
|
||||
if no_write_init_nodes == False:
|
||||
if no_recalculate_init_nodes and os.path.isfile(init_nodes_file):
|
||||
print('Found previously calculated init nodes. Will not recalculate.')
|
||||
else:
|
||||
# calculate travel times and ray parameters for boundary nodes and write to file
|
||||
points_list = initNodesFm3d(fname_fmtomo_nodes, origin, model, phases, ncores=ncores)
|
||||
# in case initNodes fails and returns None continue with next phase
|
||||
if not points_list:
|
||||
print('No points list, initNodesFm3d failed for event.')
|
||||
continue
|
||||
if not write_init_points(points_list, init_nodes_file):
|
||||
print('Write init points failed for event.')
|
||||
continue
|
||||
if write_vtk==True:
|
||||
write_vtk_file(points_list, vtk_file)
|
||||
# write picks to pickfile for fm3d
|
||||
write_picks_to_pickfile(pickfile, phase, picks_list, stations_dict, origin)
|
||||
# add pickfile to sourcefile string and count number of pickfiles
|
||||
sourcefile_str += '1 1 {}\n'.format(pickfile_name)
|
||||
# add source location, phaseID and .nodes filename to association string
|
||||
source_string = '{phase} {lat} {lon} {depth} {fname}\n'
|
||||
associations_str += source_string.format(phase=phase, lat=origin.latitude,
|
||||
lon=origin.longitude, depth=origin.depth,
|
||||
fname=init_nodes_name)
|
||||
nsrc += 1
|
||||
nsrc_total += 1
|
||||
average_time = (datetime.now() - tstart) / (eventindex + 1)
|
||||
print('Average time for event (phase {}): {}'.format(phase_type, average_time))
|
||||
print('ETA for {}-events: {}'.format(phase_type, tstart + nEvents * average_time))
|
||||
|
||||
write_src_file(fmtomo_dir, phase_type, nsrc, sourcefile_str)
|
||||
|
||||
write_assc_file(fmtomo_dir, nsrc_total, associations_str)
|
||||
#write_json(tt_means, os.path.join(fmtomo_dir, 'ttmeans.json'))
|
||||
|
||||
print('Script finished! Good Bye!')
|
||||
|
||||
|
||||
def write_src_file(fmtomo_dir, phase_type, nsrc, sourcefile_str):
|
||||
# write input_source_file for obsdata
|
||||
input_source_file = open(os.path.join(fmtomo_dir, 'input_source_file_{}.in'.format(phase_type)), 'w')
|
||||
input_source_file.write('{}\n'.format(nsrc))
|
||||
input_source_file.write(sourcefile_str)
|
||||
input_source_file.close()
|
||||
|
||||
|
||||
def write_assc_file(fmtomo_dir, nsrc_total, associations_str):
|
||||
# write input_associations file for fm3d
|
||||
input_assc_file = open(os.path.join(fmtomo_dir, 'input_associations_file.in'), 'w')
|
||||
input_assc_file.write('{}\n'.format(nsrc_total))
|
||||
input_assc_file.write(associations_str)
|
||||
input_assc_file.close()
|
||||
|
||||
|
||||
def write_json(object, fname):
|
||||
with open(fname, 'w') as outfile:
|
||||
json.dump(object, outfile, sort_keys=True, indent=4)
|
||||
|
||||
|
||||
def write_vtk_file(points_list, fname):
|
||||
outfile = open(fname, 'w')
|
||||
|
||||
nPoints = len(points_list)
|
||||
|
||||
outfile.write('# vtk DataFile Version 2.0\n')
|
||||
outfile.write('FMM Init points\n')
|
||||
outfile.write('ASCII\n')
|
||||
outfile.write('DATASET POLYDATA\n')
|
||||
outfile.write('POINTS {} float\n'.format(nPoints))
|
||||
|
||||
for point in points_list:
|
||||
lat = point['pt_lat']
|
||||
lon = point['pt_lon']
|
||||
rad = point['pt_R']
|
||||
x, y, z = pol2cart(lat, lon, rad)
|
||||
outfile.write('{x} {y} {z}\n'.format(x=x, y=y, z=z))
|
||||
|
||||
# write number of vertices and their indices
|
||||
outfile.write('\nVERTICES {} {}\n'.format(nPoints, 2*nPoints))
|
||||
for index, point in enumerate(points_list):
|
||||
outfile.write('{} {}\n'.format(1, index))
|
||||
|
||||
# write header with number of data points
|
||||
outfile.write('\nPOINT_DATA {}\n'.format(nPoints))
|
||||
|
||||
# write header for traveltimes
|
||||
outfile.write('SCALARS traveltime float 1\n')
|
||||
outfile.write('LOOKUP_TABLE default\n')
|
||||
|
||||
for point in points_list:
|
||||
time = point.get('time')
|
||||
outfile.write('{}\n'.format(time if time else 0))
|
||||
|
||||
# write header for point indices
|
||||
outfile.write('SCALARS point_index integer 1\n')
|
||||
outfile.write('LOOKUP_TABLE default\n')
|
||||
|
||||
for point in points_list:
|
||||
pt_index = point.get('pt_index')
|
||||
outfile.write('{}\n'.format(pt_index if pt_index else 0))
|
||||
|
||||
outfile.write('VECTORS tt_grad float\n')
|
||||
for point in points_list:
|
||||
lat = point['pt_lat']
|
||||
lon = point['pt_lon']
|
||||
r_comp = point.get('ray_param_km_z_comp')
|
||||
n_comp = point.get('ray_param_km_n_comp')
|
||||
e_comp = point.get('ray_param_km_e_comp')
|
||||
sx, sy, sz = pol2cart_vector(lat, lon, n_comp, e_comp, r_comp)
|
||||
outfile.write('{sx} {sy} {sz}\n'.format(sx=sx if sx else 0.,
|
||||
sy=sy if sy else 0.,
|
||||
sz=sz if sz else 0.,))
|
||||
|
||||
outfile.close()
|
||||
|
||||
|
||||
|
||||
|
||||
def write_init_points(points_list, outfile):
|
||||
fid = open(outfile, 'w')
|
||||
print('Writing {} init points to file {}.'.format(len(points_list), outfile))
|
||||
output_str = ''
|
||||
# count number of points for file header
|
||||
npoints = 0
|
||||
for point_dict in points_list:
|
||||
nArrivals = point_dict.get('nArrivals')
|
||||
if not nArrivals:
|
||||
#print('No arrivals for point:')
|
||||
#print(point_dict)
|
||||
continue
|
||||
if nArrivals > 1:
|
||||
fid.close()
|
||||
os.remove(outfile)
|
||||
warning_template = 'Ambiguous information for point: {}, having {} different arrivals. Skipping event.'
|
||||
print(warning_template.format(point_dict, nArrivals))
|
||||
return False
|
||||
output_template = '{index} {time} {ray_param_z} {ray_param_n} {ray_param_e}\n'
|
||||
output_str += (output_template.format(index=point_dict['pt_index'],
|
||||
time=point_dict.get('time'),
|
||||
ray_param_z=point_dict.get('ray_param_km_z_comp'),
|
||||
ray_param_n=point_dict.get('ray_param_km_n_comp'),
|
||||
ray_param_e=point_dict.get('ray_param_km_e_comp')))
|
||||
npoints += 1
|
||||
fid.write('{}\n'.format(npoints))
|
||||
fid.write(output_str)
|
||||
fid.close()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def initNodesFm3d(filename_init_nodes, source_origin, model, phases, min_dist=25., ncores=None, R=6371000.):
|
||||
# This function uses obspy TauPy to calculate travel times and ray parameters on boundary points of the fm3d grid
|
||||
|
||||
def exposed_sides(grid_boundaries, src_lat, src_lon):
|
||||
# check which sides of grid are exposed to source (taken from fm3d teleseismic.f90 code)
|
||||
# here: baz is calculated, not az
|
||||
north = grid_boundaries['north']
|
||||
east = grid_boundaries['east']
|
||||
south = grid_boundaries['south']
|
||||
west = grid_boundaries['west']
|
||||
|
||||
# north
|
||||
baz1 = gps2dist_azimuth(north, west, src_lat, src_lon, a=R, f=0)[1]
|
||||
baz2 = gps2dist_azimuth(north, east, src_lat, src_lon, a=R, f=0)[1]
|
||||
exposed_north = all(np.cos(np.deg2rad(baz)) > 0.01 for baz in [baz1, baz2])
|
||||
# east
|
||||
baz1 = gps2dist_azimuth(north, east, src_lat, src_lon, a=R, f=0)[1]
|
||||
baz2 = gps2dist_azimuth(south, east, src_lat, src_lon, a=R, f=0)[1]
|
||||
exposed_east = all(np.sin(np.deg2rad(baz)) > 0.01 for baz in [baz1, baz2])
|
||||
# south
|
||||
baz1 = gps2dist_azimuth(south, west, src_lat, src_lon, a=R, f=0)[1]
|
||||
baz2 = gps2dist_azimuth(south, east, src_lat, src_lon, a=R, f=0)[1]
|
||||
exposed_south = all(np.cos(np.deg2rad(baz)) < -0.01 for baz in [baz1, baz2])
|
||||
# west
|
||||
baz1 = gps2dist_azimuth(north, west, src_lat, src_lon, a=R, f=0)[1]
|
||||
baz2 = gps2dist_azimuth(south, west, src_lat, src_lon, a=R, f=0)[1]
|
||||
exposed_west = all(np.sin(np.deg2rad(baz)) < -0.01 for baz in [baz1, baz2])
|
||||
|
||||
exposed_dict = dict(north=exposed_north, east=exposed_east, south=exposed_south, west=exposed_west,
|
||||
rbot=True)
|
||||
|
||||
return exposed_dict
|
||||
|
||||
def point_valid(pt_lat, pt_lon, pt_R, exposed_dict, grid_boundaries, R_earth):
|
||||
# check whether point has to be active or not depending on its position to the source
|
||||
|
||||
def check_point(actual, desired, threshold=0.001):
|
||||
# checks receiver (boundary) point orientation by comparison to corner boundary points
|
||||
return abs(actual - desired) < threshold
|
||||
|
||||
# check for negative depth (above surface)
|
||||
if pt_R > R_earth:
|
||||
return False
|
||||
|
||||
# check if point belongs to bottom interface
|
||||
#if check_point(pt_R, grid_boundaries['rbot']):
|
||||
# return True
|
||||
|
||||
pt_faces_dir = {}
|
||||
pt_exposed_dir = {}
|
||||
|
||||
directions = {'north': pt_lat,
|
||||
'east': pt_lon,
|
||||
'south': pt_lat,
|
||||
'west': pt_lon,
|
||||
'rbot': pt_R,}
|
||||
|
||||
for direction, pt_lat_or_lon in directions.items():
|
||||
# check if point belongs to boundary of this direction and save to pt_faces_dir
|
||||
pt_direction_check = check_point(pt_lat_or_lon, grid_boundaries[direction])
|
||||
pt_faces_dir[direction] = pt_direction_check
|
||||
# check if point is exposed to source in this direction and save to pt_exposed_dir
|
||||
pt_exposed_dir[direction] = pt_direction_check and exposed_dict[direction]
|
||||
|
||||
# compare number of points facing source direction to the actual direction they are facing
|
||||
return sum(pt_faces_dir.values()) == sum(pt_exposed_dir.values())
|
||||
|
||||
|
||||
import datetime
|
||||
now = datetime.datetime.now()
|
||||
infile = open(filename_init_nodes, 'r')
|
||||
R_earth = 6371.
|
||||
|
||||
# read input files containing fm3d boundary points in each line in the order: lat lon radius
|
||||
init_nodes = infile.readlines()
|
||||
nPoints = len(init_nodes)
|
||||
# split lines and convert to floats except for first value which is converted to int
|
||||
init_nodes = [[float(val) if index != 0 else int(val) for index, val in enumerate(line.split())]
|
||||
for line in init_nodes]
|
||||
|
||||
grid_boundaries = dict(north=max(init_nodes, key=lambda x: x[1])[1],
|
||||
east=max(init_nodes, key=lambda x: x[2])[2],
|
||||
south=min(init_nodes, key=lambda x: x[1])[1],
|
||||
west=min(init_nodes, key=lambda x: x[2])[2],
|
||||
rbot=min(init_nodes, key=lambda x: x[3])[3],
|
||||
rtop=max(init_nodes, key=lambda x: x[3])[3])
|
||||
|
||||
# get source coordinates
|
||||
src_lat, src_lon, src_depth = source_origin.latitude, source_origin.longitude, source_origin.depth
|
||||
#src_lat = 0; src_lon = 180; src_depth=50
|
||||
|
||||
# calculate which sides are exposed to source
|
||||
exposed_dict = exposed_sides(grid_boundaries, src_lat, src_lon)
|
||||
|
||||
# iterate over all points and calculate distance. Append a dictionary to an input list for
|
||||
# multiprocessing worker for each point
|
||||
points_list = []
|
||||
for point in init_nodes:
|
||||
pt_index, pt_lat, pt_lon, pt_R = point
|
||||
pt_depth = R_earth - pt_R
|
||||
dist = locations2degrees(pt_lat, pt_lon, src_lat, src_lon)
|
||||
|
||||
# check minimum distance for this point
|
||||
if dist < min_dist:
|
||||
warnings.warn('Distance for point {} less than minimum'
|
||||
' distance ({} km). Skipping event!'.format(point, min_dist))
|
||||
return
|
||||
|
||||
# check if point is exposed to source and has to be initiated
|
||||
if not point_valid(pt_lat, pt_lon, pt_R, exposed_dict, grid_boundaries, R_earth):
|
||||
continue
|
||||
|
||||
input_dict = {'pt_index': pt_index,
|
||||
'pt_depth': pt_depth,
|
||||
'pt_R': pt_R,
|
||||
'pt_lat': pt_lat,
|
||||
'pt_lon': pt_lon,
|
||||
'src_lat': src_lat,
|
||||
'src_lon': src_lon,
|
||||
'src_depth': src_depth,
|
||||
'dist': dist,
|
||||
'model': model,
|
||||
'phases': phases,
|
||||
}
|
||||
points_list.append(input_dict)
|
||||
|
||||
print('n Points init:', len(points_list))
|
||||
#plot_points(points_list)
|
||||
|
||||
rvals = []
|
||||
pool = multiprocessing.Pool(ncores, maxtasksperchild=1000)
|
||||
for rval in pool.imap(taup_worker, points_list, chunksize=10):
|
||||
rvals.append(rval)
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
print('Done after {}'.format(datetime.datetime.now() - now))
|
||||
|
||||
#plot_points(points_list)
|
||||
return rvals
|
||||
|
||||
|
||||
def taup_worker(input_dict):
|
||||
# initiate model for TauP method
|
||||
model = TauPyModel(model=input_dict['model'])
|
||||
huge_time = 1e7
|
||||
try:
|
||||
arrivals = model.get_ray_paths(input_dict['src_depth'], input_dict['dist'],
|
||||
phase_list=input_dict['phases'],
|
||||
receiver_depth_in_km=input_dict['pt_depth'], )
|
||||
if len(arrivals) < 1:
|
||||
#print('No arrivals for phase {}'.format(input_dict['phase']))
|
||||
#print(input_dict)
|
||||
return input_dict
|
||||
|
||||
arr = arrivals[0]
|
||||
|
||||
input_dict['nArrivals'] = len(arrivals)
|
||||
baz = gps2dist_azimuth(input_dict['pt_lat'], input_dict['pt_lon'],
|
||||
input_dict['src_lat'], input_dict['src_lon'],
|
||||
a=6371.*1e3, f=0)[1]
|
||||
|
||||
# calculate traveltime gradient, division by R to transform from rad to km
|
||||
# multiply with -1 to get values for azimuth instead of back-azimuth
|
||||
ray_param_km_z_comp = arr.ray_param / np.tan(np.deg2rad(arr.incident_angle)) / input_dict['pt_R']
|
||||
ray_param_km_n_comp = arr.ray_param * (-1) * np.cos(np.deg2rad(baz)) / input_dict['pt_R']
|
||||
ray_param_km_e_comp = arr.ray_param * (-1) * np.sin(np.deg2rad(baz)) / input_dict['pt_R']
|
||||
input_dict['ray_param_km_z_comp'] = ray_param_km_z_comp
|
||||
input_dict['ray_param_km_n_comp'] = ray_param_km_n_comp
|
||||
input_dict['ray_param_km_e_comp'] = ray_param_km_e_comp
|
||||
|
||||
input_dict['time'] = arr.time
|
||||
|
||||
except Exception as e:
|
||||
print(e, input_dict)
|
||||
return input_dict
|
||||
|
||||
|
||||
def plot_points(points_list):
|
||||
import matplotlib.pyplot as plt
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111, projection='3d')
|
||||
x = []
|
||||
y = []
|
||||
z = []
|
||||
color = np.full(len(points_list), np.nan)
|
||||
for index, points_dict in enumerate(points_list):
|
||||
x.append(points_dict['pt_lat'])
|
||||
y.append(points_dict['pt_lon'])
|
||||
z.append(points_dict['pt_depth'])
|
||||
arrivals = points_dict.get('arrivals')
|
||||
if arrivals:
|
||||
color[index] = arrivals[0].time
|
||||
ax.scatter(x, y, z, c=color)
|
||||
plt.show()
|
||||
|
||||
|
||||
def subtract_source_time(picks_list, origin_time):
|
||||
# WAS FUNCTION DEMEAN, but demean after calculating residuals using rtimes_tele in FMTOMO!!
|
||||
for phase_dict in picks_list:
|
||||
taup_time = phase_dict['taup_time']
|
||||
pick = phase_dict['pick']
|
||||
pick_time = pick.time
|
||||
ttres = pick_time - taup_time
|
||||
# I think doing this is wrong because taup times do NOT include station elevation!!! 5.3.20
|
||||
phase_dict['ttres'] = ttres
|
||||
|
||||
mean = np.mean([phase_dict['pick'].time.timestamp for phase_dict in picks_list])
|
||||
|
||||
for phase_dict in picks_list:
|
||||
phase_dict['ttres_demeaned'] = phase_dict['ttres'] - mean # see above comment
|
||||
phase_dict['abstimes'] = phase_dict['pick'].time - origin_time
|
||||
|
||||
return picks_list
|
||||
|
||||
|
||||
def translate_phase_names_TAUP(sorted_picks):
|
||||
# PKP and PKIKP are all just 'P' phases in fm3d (not very sure about this!)
|
||||
translations = {'PKIKP': 'PKP',
|
||||
'SKIKS': 'SKS',}
|
||||
for phase_name in sorted_picks.keys():
|
||||
if phase_name in translations.keys():
|
||||
sorted_picks[translations[phase_name]] = sorted_picks.pop(phase_name)
|
||||
return sorted_picks
|
||||
|
||||
|
||||
def write_picks_to_pickfile(pickfile, phase, picks_list, stations_dict, origin, picks_mode='abs'):
|
||||
fid = open(pickfile, 'w')
|
||||
header = '{npicks}\n' \
|
||||
'{lat_src:<15} {lon_src:<15} {depth_src:<15}\n' \
|
||||
'{phase_name}\n'
|
||||
# pickfile header including npicks, src coords, phasetype
|
||||
formatted_str = header.format(npicks=len(picks_list),
|
||||
lat_src=origin.latitude,
|
||||
lon_src=origin.longitude,
|
||||
depth_src=origin.depth,
|
||||
phase_name=phase)
|
||||
fid.write(formatted_str)
|
||||
|
||||
# write picks for each station to pickfile
|
||||
for phase_dict in picks_list:
|
||||
pick = phase_dict['pick']
|
||||
seed_id = pick.waveform_id.get_seed_string()
|
||||
# travel time residual (demeaned or abs)
|
||||
if picks_mode == 'abs':
|
||||
picktime = phase_dict['abstimes']
|
||||
elif picks_mode == 'res':
|
||||
warnings.warn('Using residuals here might not be exact. See above code where ttres_demeaned is calculated.')
|
||||
picktime = phase_dict['ttres_demeaned']
|
||||
else:
|
||||
raise IOError(f'Unknown picks_mode {picks_mode}')
|
||||
uncertainty = pick.time_errors.uncertainty
|
||||
network, station = seed_id.split('.')[:2]
|
||||
# get stations coords from metadata dict
|
||||
station_coords = stations_dict.get('{}.{}'.format(network, station))
|
||||
# prepare line to be written to pickfile and format, use traveltime residual
|
||||
line = '{lat_rec:<15} {lon_rec:<15} {depth_rec:<15} {picktime:<15} {uncert:15}\n'
|
||||
formatted_line = line.format(lat_rec=station_coords['latitude'],
|
||||
lon_rec=station_coords['longitude'],
|
||||
depth_rec=station_coords['elevation'] * (-1e-3),
|
||||
picktime=picktime,
|
||||
uncert=uncertainty)
|
||||
fid.write(formatted_line)
|
||||
print('Wrote {} picks for phase {} to file {}'.format(len(picks_list), phase, pickfile))
|
||||
fid.close()
|
||||
|
||||
|
||||
def remove_blacklisted_station_picks(picks, fname_blacklist, verbosity=1):
|
||||
blacklisted_stations = get_station_blacklist(fname_blacklist)
|
||||
deleted_picks = []
|
||||
deleted_stations = []
|
||||
for index, pick in list(reversed(list(enumerate(picks)))):
|
||||
seed_id = pick.waveform_id.get_seed_string()
|
||||
network, station = seed_id.split('.')[:2]
|
||||
nwst_id = '{}.{}'.format(network, station)
|
||||
if nwst_id in blacklisted_stations.keys():
|
||||
timewindow = blacklisted_stations[nwst_id]
|
||||
if not timewindow == 'always':
|
||||
tstart, tstop = [UTCDateTime(time) for time in timewindow.split('to')]
|
||||
if not tstart <= picks[index].time <= tstop:
|
||||
continue
|
||||
deleted_picks.append(picks.pop(index))
|
||||
deleted_stations.append(nwst_id)
|
||||
if verbosity > 0:
|
||||
print('Deleted picks for blacklisted stations:\n{}'.format(deleted_stations))
|
||||
return picks, deleted_stations
|
||||
|
||||
|
||||
def get_station_blacklist(fname_csv):
|
||||
with open(fname_csv, 'r') as infile:
|
||||
# skip first line
|
||||
blacklist = infile.readlines()[1:]
|
||||
blacklisted_stations = {}
|
||||
for line in blacklist:
|
||||
network, station, time = line.split(',')[:3]
|
||||
nwst_id = '{}.{}'.format(network, station)
|
||||
blacklisted_stations[nwst_id] = time
|
||||
return blacklisted_stations
|
||||
|
||||
|
||||
def remove_uncommon_picks(sorted_picks, min_picks_per_phase):
|
||||
for phase_name, picks_list in reversed(list(sorted_picks.items())):
|
||||
if len(picks_list) < min_picks_per_phase:
|
||||
msg = 'Removed picks for phase {}, as there are only {} picks given (threshold is {})'
|
||||
print(msg.format(phase_name, len(picks_list), min_picks_per_phase))
|
||||
del(sorted_picks[phase_name])
|
||||
return sorted_picks
|
||||
|
||||
|
||||
def sort_picks_by_phase(picks, stations_dict, source_origin, phase_types, model,
|
||||
max_phase_diff=50., merge_phases=None, verbosity=0):
|
||||
'''
|
||||
# First: Iterate through all picks to estimate possible phases for source/location combination, then assign
|
||||
# each pick to one phase and return sorted picks dictionary, as there has to be one pickfile for each phasetype
|
||||
|
||||
:param picks:
|
||||
:param stations_dict:
|
||||
:param source_origin:
|
||||
:param phase_types:
|
||||
:param model:
|
||||
:param max_phase_diff:
|
||||
:param merge_phases: assign a phase (key) to another phase (value) e.g. {Pdiff: P}
|
||||
:param verbosity:
|
||||
:return:
|
||||
'''
|
||||
print('Starting to sort picks by phase calculating theoretical travel times for each pick...')
|
||||
phases_dict = {}
|
||||
for pick in picks:
|
||||
# PROBLEM: seed_id from PyLoT does not contain Location ID!!! Makes it hard to find station coords
|
||||
# workaround: use stations_dict (ignore channel and location ID) instead of metadata.get_coordinates()
|
||||
seed_id = pick.waveform_id.get_seed_string()
|
||||
network, station = seed_id.split('.')[:2]
|
||||
phaseID = pick.phase_hint
|
||||
uncertainty = pick.time_errors.uncertainty
|
||||
# skip different phase
|
||||
if not phaseID in phase_types:
|
||||
continue
|
||||
# pick invalid if no uncertainty is given
|
||||
if not uncertainty:
|
||||
continue
|
||||
station_coords = stations_dict.get('{}.{}'.format(network, station))
|
||||
if not station_coords:
|
||||
print('Could not find coordinates for station: {}'.format(seed_id))
|
||||
continue
|
||||
# estimate phase type by taking the closest phase from Tau-P method (time is relative to source origin)
|
||||
phase_name, phase_time_rel, phase_diff = get_closest_taup_phase(source_origin, station_coords,
|
||||
phaseID, pick.time, model)
|
||||
if phase_diff > max_phase_diff:
|
||||
if verbosity > 0:
|
||||
print ('Warning, max_diff too large (> {} s) for phase {} at {}. Skipping'.format(max_phase_diff,
|
||||
phase_name, seed_id))
|
||||
continue
|
||||
phase_time = source_origin.time + phase_time_rel
|
||||
|
||||
# merge phase if selected, keep track of original phase for TauPy node initiation
|
||||
true_phase = phase_name
|
||||
if merge_phases and phase_name in merge_phases.keys():
|
||||
phase_name = merge_phases[phase_name]
|
||||
|
||||
if not phase_name in phases_dict.keys():
|
||||
print('Adding phase to sorted picks dictionary: {}'.format(phase_name))
|
||||
phases_dict[phase_name] = []
|
||||
phases_dict[phase_name].append({'pick': pick,
|
||||
'taup_time': phase_time,
|
||||
'true_phase': true_phase,})
|
||||
return phases_dict
|
||||
|
||||
def get_closest_taup_phase(source_origin, station_coords, phase, pick_time, model='ak135'):
|
||||
'''
|
||||
Estimate phase that was picked by searching for earliest P/S phase arriving at
|
||||
station_coords for given source_coords using TauPy. As PyLoT only searches for first arrivals
|
||||
using the same method, this should(!) yield the correct phase.
|
||||
:return:
|
||||
'''
|
||||
phases = {'P': [],
|
||||
'S': []}
|
||||
phase_lists = {'P': ['ttp'],
|
||||
'S': ['tts']}
|
||||
|
||||
# possible phases for fm3d (*K* and *KIK* -> *)
|
||||
#phase_list = {'P': ['P', 'PKP', 'PKiKP', 'PKIKP', 'PcP', 'ScP', 'SKP', 'PKKP', 'SKKP', 'PP',],
|
||||
# 'S': ['S', 'SKS', 'SKIKS', 'ScS']}
|
||||
|
||||
# in case pick phase hint is just P or S
|
||||
if phase in phases.keys():
|
||||
phase_list = phase_lists[phase]
|
||||
common_phase = phase
|
||||
# in case an explicit phase name is given use only this phase
|
||||
else:
|
||||
phase_list = [phase]
|
||||
common_phase = identifyPhaseID(phase)
|
||||
|
||||
model = TauPyModel(model=model)
|
||||
arrivals = model.get_travel_times_geo(source_origin.depth,
|
||||
source_origin.latitude,
|
||||
source_origin.longitude,
|
||||
station_coords['latitude'],
|
||||
station_coords['longitude'],
|
||||
phase_list)
|
||||
|
||||
# identifies phases from arrivals as P or S phase, not necessary when using 'ttp' or 'tts' for get_travel_times_geo
|
||||
for arr in arrivals:
|
||||
phases[identifyPhaseID(arr.phase.name)].append(arr)
|
||||
|
||||
source_time = source_origin.time
|
||||
|
||||
if not arrivals:
|
||||
raise Exception('No arrivals found for source.')
|
||||
|
||||
# get first P or S onsets from arrivals list
|
||||
arr, min_diff = min([(arr, abs(source_time + arr.time - pick_time)) for arr in phases[common_phase]],
|
||||
key=lambda t: t[1])
|
||||
return arr.name, arr.time, min_diff
|
||||
|
||||
|
||||
def get_event_obspy_dmt(eventdir):
|
||||
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
|
||||
if not os.path.exists(event_pkl_file):
|
||||
raise IOError('Could not find event path for event: {}'.format(eventdir))
|
||||
event = qml_from_obspyDMT(event_pkl_file)
|
||||
return event
|
||||
|
||||
|
||||
def get_event_pylot(eventdir, extension=''):
|
||||
event_id = get_event_id(eventdir)
|
||||
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
|
||||
if not os.path.isfile(filename):
|
||||
return
|
||||
cat = read_events(filename)
|
||||
return cat[0]
|
||||
|
||||
def get_event_id(eventdir):
|
||||
event_id = os.path.split(eventdir)[-1]
|
||||
return event_id
|
||||
|
||||
def get_picks(eventdir, extension=''):
|
||||
event_id = get_event_id(eventdir)
|
||||
filename = 'PyLoT_{}{}.xml'
|
||||
filename = filename.format(event_id, extension)
|
||||
fpath = os.path.join(eventdir, filename)
|
||||
fpaths = glob.glob(fpath)
|
||||
if len(fpaths) == 1:
|
||||
cat = read_events(fpaths[0])
|
||||
picks = cat[0].picks
|
||||
return picks
|
||||
elif len(fpaths) == 0:
|
||||
print('get_picks: File not found: {}'.format(fpath))
|
||||
return
|
||||
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
|
||||
return
|
||||
|
||||
def init_sources_file(nsrc, filename='sources.in'):
|
||||
infile_source = open(filename, 'w')
|
||||
infile_source.write('{}\n'.format(nsrc))
|
||||
return infile_source
|
||||
|
||||
def init_receivers_file(nrec, filename='receivers.in'):
|
||||
infile_rec = open(filename, 'w')
|
||||
infile_rec.write('{}\n'.format(nrec))
|
||||
return infile_rec
|
||||
|
||||
def append_source(fid, origin):
|
||||
pass
|
||||
|
||||
def append_receiver(fid, station_coords):
|
||||
# TODO check if order is correct
|
||||
fid.write('{rad:15} {lat:15} {lon:15} \n'.format(rad=station_coords['elevation'],
|
||||
lat=station_coords['latitude'],
|
||||
lon=station_coords['longitude']))
|
||||
|
||||
|
||||
def organize_receivers(fnin, unique=False):
|
||||
''' Open FMTOMO receivers.in file and read position of each receiver into a dict.'''
|
||||
|
||||
with open(fnin, 'r') as infile:
|
||||
nRec = int(infile.readline())
|
||||
rec_dict = {}
|
||||
for rec_number in range(1, nRec + 1):
|
||||
rad, lat, lon = [float(value) for value in infile.readline().split()]
|
||||
# dummy read next 3 lines
|
||||
for ind in range(3):
|
||||
infile.readline()
|
||||
|
||||
receiver = {'rad': rad,
|
||||
'lat': lat,
|
||||
'lon': lon, }
|
||||
if unique:
|
||||
if receiver in rec_dict.values():
|
||||
continue
|
||||
|
||||
rec_dict[rec_number] = receiver
|
||||
|
||||
return rec_dict
|
||||
|
||||
|
||||
def organize_sources(fnin):
|
||||
''' Open FMTOMO sources.in file and read position and phase of each source into a dict.'''
|
||||
with open(fnin, 'r') as infile:
|
||||
nSrc = int(infile.readline())
|
||||
src_dict = {}
|
||||
for src_number in range(nSrc):
|
||||
src_number += 1
|
||||
teleseismic_flag = int(infile.readline())
|
||||
if teleseismic_flag != 1: raise ValueError('Source not teleseismic!')
|
||||
phase = infile.readline().strip()
|
||||
rad, lat, lon = [float(value) for value in infile.readline().split()[:3]]
|
||||
# dummy read next 4 lines
|
||||
for ind in range(4):
|
||||
infile.readline()
|
||||
src_dict[src_number] = {'phase': phase,
|
||||
'depth': rad,
|
||||
'lat': lat,
|
||||
'lon': lon,}
|
||||
return src_dict
|
||||
|
||||
|
||||
def organize_event_names(fnin):
|
||||
infile = open(fnin, 'r')
|
||||
events = [line.split()[-1].strip() for line in infile.readlines()[1:]]
|
||||
infile.close()
|
||||
return events
|
||||
|
||||
|
||||
def export_otimes(otimes, fn_out):
|
||||
np.savetxt(fn_out, otimes, fmt=['%8d', '%8d', '%8d', '%8d', '% 4.6f', '% 4.6f'],
|
||||
header=str(len(otimes)), comments='')
|
||||
print('Wrote file:', fn_out)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# testing area
|
||||
prepare_fmtomo_dir_first_run('/data/AdriaArray_Data/fmtomo_adriaarray/v0/test_init')
|
||||
104
pylot/tomography/fmtomo_tools/get_tt_residuals.py
Normal file
104
pylot/tomography/fmtomo_tools/get_tt_residuals.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# get traveltime residuals using synthetic 1D travel times from FMTOMO hybrid method
|
||||
#
|
||||
# this script is used to make sure that synthetic travel times are the same for synthetic data residuals (rtimes)
|
||||
# and observed data residuals, because of small differences between hybrid 1D solver and taup method
|
||||
# Therefore, correlation output times are absolute (minus source time) values and residuals are calcuated using
|
||||
# FMTOMO hybrid reference times (e.g. rtimes.dat)
|
||||
#
|
||||
import os
|
||||
import argparse
|
||||
import shutil
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_event_names
|
||||
|
||||
# Note: weighting of mean might not be reasonable due to uncertainty distribution along the array
|
||||
def main(infile_rtimes, fn_events, kde_file, demean=True, no_deref=False, weight_kde=True, weight=False):
|
||||
eventnames = organize_event_names(fn_events)
|
||||
assert not(weight and weight_kde), 'Cannot use two weights'
|
||||
|
||||
if not no_deref:
|
||||
reftimes = np.genfromtxt(infile_rtimes)
|
||||
if weight_kde:
|
||||
kde_values = np.genfromtxt(kde_file)
|
||||
assert len(kde_values) == len(reftimes), 'Missmatch in reftimes and kde file length'
|
||||
|
||||
nSrc = len(eventnames)
|
||||
|
||||
diff_in_means = []
|
||||
# iterate over all sources
|
||||
for index in range(nSrc):
|
||||
srcid = index + 1
|
||||
eventname = eventnames[index]
|
||||
print('{} ({}/{})'.format(eventname, srcid, nSrc))
|
||||
# get pick filename and read observed times
|
||||
pickfile = os.path.join('picks', eventname)
|
||||
obsdata_src = np.loadtxt(pickfile, skiprows=3)
|
||||
# make safety copy
|
||||
picksafedir = os.path.join('picks', 'save')
|
||||
if not os.path.isdir(picksafedir):
|
||||
os.mkdir(picksafedir)
|
||||
shutil.copy(pickfile, picksafedir)
|
||||
npicks = len(obsdata_src)
|
||||
# read observed times from pickfile
|
||||
tt_obs = obsdata_src[:, 3]
|
||||
# read uncertainties from pickfile
|
||||
uncs = obsdata_src[:, 4]
|
||||
|
||||
if no_deref:
|
||||
tt_res = tt_obs
|
||||
else:
|
||||
indices = np.where(reftimes[:, 1] == srcid)[0]
|
||||
assert (len(indices) == npicks), 'Missmatch in indices for srcids'
|
||||
# read reference teleseismic times from FMTOMO run
|
||||
tt_ref = reftimes[:, 4][indices]
|
||||
# calculate residuals
|
||||
tt_res = tt_obs - tt_ref
|
||||
|
||||
# set new residual to obsdata array
|
||||
obsdata_src[:, 3] = tt_res
|
||||
# get kde values
|
||||
#weights = 1./kde_values[:, 2][indices] if weight_kde else None
|
||||
#
|
||||
weights = 1. / uncs ** 2 if weight else None
|
||||
# demean residuals for current source
|
||||
mean = np.average(tt_res, weights=weights)
|
||||
mean_unweighted = np.mean(tt_res)
|
||||
diff_in_means.append(mean - mean_unweighted)
|
||||
print('Mean:', mean)
|
||||
print('Unweighted Mean:', mean_unweighted)
|
||||
print('Demean setting:', demean)
|
||||
if demean:
|
||||
obsdata_src[:, 3] -= mean
|
||||
#obsdata_src[:, 3] = mtimes[:, 4][indices]
|
||||
# Write new pickfiles
|
||||
with open(pickfile, 'r') as infile_pick:
|
||||
header = infile_pick.readlines()[:3]
|
||||
with open(pickfile, 'w') as outfile_pick:
|
||||
for line in header:
|
||||
outfile_pick.write(line)
|
||||
for line in obsdata_src:
|
||||
outfile_pick.write('{:10.4f} {:10.4f} {:10.4f} {:15.8f} {:15.8f}\n'.format(*line))
|
||||
plt.hist(diff_in_means, bins=100)
|
||||
plt.title('Diffs in means (weighted - unweighted) [s]')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Calculate residuals for absolute observation times using reference file from FMTOMO')
|
||||
parser.add_argument('--rtimes', default='rtimes_tele.dat', help='reference_file')
|
||||
parser.add_argument('--sourcefile', default='input_source_file_P.in', help='input_source_file')
|
||||
#parser.add_argument('--kdefile', default='kde_weights.txt', help='input file with station kde values for weighting')
|
||||
parser.add_argument('-nd', '--no_demean', action='store_true', default=False,
|
||||
help='do not mean correct travel times')
|
||||
parser.add_argument('-nr', '--no_reftimes', action='store_true', default=False,
|
||||
help='do not use reference times (e.g. picks are already tt residuals). rtimes file still required at the moment.')
|
||||
#parser.add_argument('-nwk', '--no_weight_kde', action='store_true', default=True, help='do not weight travel times')
|
||||
args = parser.parse_args()
|
||||
#args.kdefile
|
||||
main(args.rtimes, args.sourcefile, None, not(args.no_demean), weight_kde=False, no_deref=args.no_reftimes) #not(args.no_weight))
|
||||
174
pylot/tomography/fmtomo_tools/gmtslice_sidehook.py
Normal file
174
pylot/tomography/fmtomo_tools/gmtslice_sidehook.py
Normal file
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
from obspy.geodetics import gps2dist_azimuth, locations2degrees
|
||||
from pylot.tomography.utils import get_coordinate_from_dist_baz
|
||||
|
||||
R=6371.
|
||||
|
||||
def main():
|
||||
profiles_infile = '/home/marcel/AlpArray/vtk_files/points_mark.txt'
|
||||
gmtslice_dir = '/rscratch/minos13/marcel/fmtomo_alparray/v3.5/alparray_mantle_diehl_crust_included_hf_gradient_smoothing/plot'
|
||||
####
|
||||
# maximum point spacing in km (decreases with depth)
|
||||
dpoints = 1.
|
||||
dpi = 100
|
||||
model_depth = 606.
|
||||
###
|
||||
with open(profiles_infile, 'r') as infile:
|
||||
profiles = infile.readlines()
|
||||
|
||||
for profile in profiles:
|
||||
print('Profile: ', profile)
|
||||
name = profile.split()[0]
|
||||
lon1, lat1, lon2, lat2 = [float(item) for item in profile.split()[1:]]
|
||||
|
||||
|
||||
def get_gmtslice_gc(gmtplot_dir, lat1, lon1, lat2, lon2, model_depth=606., dpoints=1, fn_out_slice='grid2dvgc.z',
|
||||
rel=True, fname_vgrid='../vgrids.in', fname_vgrid_ref='../vgridsref.in'):
|
||||
parfile_str_len = 28
|
||||
assert (len(fname_vgrid) < parfile_str_len), 'Check length of filename: {}'.format(fname_vgrid)
|
||||
assert (len(fname_vgrid_ref) < parfile_str_len), 'Check length of filename: {}'.format(fname_vgrid_ref)
|
||||
|
||||
infile_default = 'gmtslice_default.in'
|
||||
fn_out = 'gmtslice.in'
|
||||
bounds_out_gmt = 'boundgc.gmt'
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
os.chdir(gmtplot_dir)
|
||||
with open(infile_default, 'r') as infile:
|
||||
gmt_infile = infile.readlines()
|
||||
|
||||
# calculate great circle distance
|
||||
max_dist = gps2dist_azimuth(lat1, lon1, lat2, lon2, a=R * 1e3, f=0)[0]/1e3
|
||||
npointsR = int(model_depth / dpoints + 1)
|
||||
npointsLateral = int(max_dist / dpoints + 1)
|
||||
print('nR, nLateral:', npointsR, npointsLateral)
|
||||
|
||||
# filename
|
||||
gmt_infile[3] = '{} \n'.format(fname_vgrid)
|
||||
gmt_infile[4] = '{} \n'.format(fname_vgrid_ref)
|
||||
|
||||
# activate gc generation
|
||||
gmt_infile[51] = '1 \n'
|
||||
# modify lines with lat lon boundaries
|
||||
gmt_infile[52] = '{:5.2f} {:5.2f}\n'.format(lat1, lon1)
|
||||
gmt_infile[53] = '{:5.2f} {:5.2f}\n'.format(lat2, lon2)
|
||||
|
||||
abs_rel = 1 if rel else 0
|
||||
gmt_infile[58] = '{} \n'.format(abs_rel)
|
||||
|
||||
# modify lines with n Points
|
||||
gmt_infile[65] = '{:<5d} {:<5d}\n'.format(npointsLateral, npointsR)
|
||||
|
||||
with open(fn_out, 'w') as outfile:
|
||||
for line in gmt_infile:
|
||||
outfile.write(line)
|
||||
|
||||
print('Executing gmtslice...')
|
||||
os.system('gmtslice')
|
||||
print('Done!')
|
||||
|
||||
with open(bounds_out_gmt, 'r') as infile:
|
||||
bds = [float(bd) for bd in infile.readlines()]
|
||||
|
||||
bounds = '-R{bds[0]}/{bds[1]}/{bds[2]}/{bds[3]}'.format(bds=bds)
|
||||
xyz_string = 'gmt xyz2grd {grid_out} -Ggrid2dvgc.grd -I{bds[4]}+/{bds[5]}+ -ZLB {bounds}'.format(
|
||||
grid_out=fn_out_slice, bds=bds, bounds=bounds)
|
||||
print(xyz_string)
|
||||
os.system(xyz_string)
|
||||
|
||||
grid = np.loadtxt(fn_out_slice)
|
||||
dist_grid = []
|
||||
lon_grid = []
|
||||
lat_grid = []
|
||||
_, azim, bazim = gps2dist_azimuth(lat1, lon1, lat2, lon2, a=R * 1e3, f=0)
|
||||
#ddist = / (npointsLateral - 1)
|
||||
#ddepth = (bds[3] - bds[2]) / (npointsR - 1)
|
||||
for depth in np.linspace(-bds[3], -bds[2], num=npointsR):
|
||||
for dist in np.linspace(0, locations2degrees(lat1, lon1, lat2, lon2), num=npointsLateral):
|
||||
lon, lat = get_coordinate_from_dist_baz((lon1, lat1), dist, azim)
|
||||
lon_grid.append(lon)
|
||||
lat_grid.append(lat)
|
||||
#dist = ddist * indexLat
|
||||
#depth = ddepth * indexR
|
||||
dist_grid.append(np.array([dist, depth]))
|
||||
|
||||
dist_grid = np.array(dist_grid)
|
||||
lat_grid = np.array(lat_grid)
|
||||
lon_grid = np.array(lon_grid)
|
||||
|
||||
os.chdir(cwd)
|
||||
return grid, dist_grid, lat_grid, lon_grid, max_dist
|
||||
|
||||
|
||||
def get_gmtslice_depth(gmtplot_dir, depth, fn_out_slice='grid2dvd.z', fname_vgrid='../vgrids.in',
|
||||
fname_vgrid_ref='../vgridsref.in'):
|
||||
infile_default = 'gmtslice_default.in'
|
||||
fn_out = 'gmtslice.in'
|
||||
bounds_out_gmt = 'bounddp.gmt'
|
||||
|
||||
cwd = os.getcwd()
|
||||
|
||||
os.chdir(gmtplot_dir)
|
||||
with open(infile_default, 'r') as infile:
|
||||
gmt_infile = infile.readlines()
|
||||
|
||||
# filename
|
||||
gmt_infile[3] = '{} \n'.format(fname_vgrid)
|
||||
gmt_infile[4] = '{} \n'.format(fname_vgrid_ref)
|
||||
|
||||
# activate depth slice generation
|
||||
gmt_infile[41] = '1 \n'
|
||||
# set depth (negative for whatever reason)
|
||||
gmt_infile[42] = '{:5.2f} \n'.format(-depth)
|
||||
|
||||
with open(fn_out, 'w') as outfile:
|
||||
for line in gmt_infile:
|
||||
outfile.write(line)
|
||||
|
||||
print('Executing gmtslice...')
|
||||
os.system('gmtslice')
|
||||
print('Done!')
|
||||
|
||||
with open(bounds_out_gmt, 'r') as infile:
|
||||
bds = [float(bd) for bd in infile.readlines()]
|
||||
|
||||
bounds = '-R{bds[0]}/{bds[1]}/{bds[2]}/{bds[3]}'.format(bds=bds)
|
||||
xyz_string = 'gmt xyz2grd {grid_out} -Ggrid2dvd.grd -I{bds[4]}+/{bds[5]}+ -ZLB {bounds}'.format(
|
||||
grid_out=fn_out_slice, bds=bds, bounds=bounds)
|
||||
print(xyz_string)
|
||||
#os.system(xyz_string)
|
||||
|
||||
grid = np.loadtxt(fn_out_slice)
|
||||
lonlat = []
|
||||
lon0, lon1 = bds[:2]
|
||||
lat0, lat1 = bds[2:4]
|
||||
nlon = int(bds[4])
|
||||
nlat = int(bds[5])
|
||||
for lon in np.linspace(lon0, lon1, num=nlon):
|
||||
for lat in np.linspace(lat0, lat1, num=nlat):
|
||||
lonlat.append(np.array([lon, lat]))
|
||||
|
||||
lonlat = np.array(lonlat)
|
||||
|
||||
os.chdir(cwd)
|
||||
return grid, lonlat
|
||||
|
||||
|
||||
def grdimage_slice(lat1, lon1, lat2, lon2, bounds, name, npointsLateral, npointsR, dpi):
|
||||
proj = "-JX{:05.2f}i/{:05.2f}i".format(npointsLateral / dpi, npointsR / dpi)
|
||||
fnout = 'gmtslice_{}_{:.1f}_{:.1f}-{:.1f}_{:.1f}'.format(name, lat1, lon1, lat2, lon2)
|
||||
grdimage_string = 'gmt grdimage grid2dvgc.grd {bounds} {proj} -Ba50f10/a50f10 -Cpolar_inv_3.cpt -K > {fnout}'.format(
|
||||
bounds=bounds, proj=proj, fnout=fnout+'.ps')
|
||||
print(grdimage_string)
|
||||
os.system(grdimage_string)
|
||||
print('Convert to png...')
|
||||
os.system('convert {} {}'.format(fnout + '.ps', fnout + '.png'))
|
||||
|
||||
|
||||
|
||||
43
pylot/tomography/fmtomo_tools/heatmap_two_models.py
Normal file
43
pylot/tomography/fmtomo_tools/heatmap_two_models.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import sys
|
||||
|
||||
fnin1 = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10/it_12/vgrids.in'
|
||||
fnin2 = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_TESAURO_grad_sm30_dm10/it_12/vgrids.in'
|
||||
fnout = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10/vg_TES_i12sm30dm10.in'
|
||||
|
||||
|
||||
def check_vgrids_header_line(l1, l2, epsilon=1e-6):
|
||||
items1 = [float(item) for item in l1.split()]
|
||||
items2 = [float(item) for item in l2.split()]
|
||||
for i1, i2 in zip(items1, items2):
|
||||
if not abs(i1 - i2) < epsilon:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def vgrids_diff(fnin1, fnin2, fnout):
|
||||
diffs = []
|
||||
with open(fnin1, 'r') as infile1:
|
||||
with open(fnin2, 'r') as infile2:
|
||||
with open(fnout, 'w') as outfile:
|
||||
for index, l1 in enumerate(infile1):
|
||||
l2 = infile2.readline()
|
||||
if index < 4:
|
||||
assert check_vgrids_header_line(l1, l2), 'Different grid dimensions!'
|
||||
outfile.write(l1)
|
||||
else:
|
||||
try:
|
||||
v1 = float(l1.split()[0])
|
||||
v2 = float(l2.split()[0])
|
||||
except Exception as e:
|
||||
print('Read problem: {}'.format(e))
|
||||
sys.exit()
|
||||
vdiff = abs(v2 - v1)
|
||||
diffs.append(vdiff)
|
||||
outfile.write('{}\n'.format(vdiff))
|
||||
|
||||
print('Finished writing file', fnout)
|
||||
print('Max diff:', max(diffs))
|
||||
|
||||
|
||||
vgrids_diff(fnin1, fnin2, fnout)
|
||||
126
pylot/tomography/fmtomo_tools/misfit_evaluation.py
Normal file
126
pylot/tomography/fmtomo_tools/misfit_evaluation.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_receivers
|
||||
|
||||
# TODO: demeaning source by source??
|
||||
|
||||
def calc_misfit(data, synth, unc):
|
||||
return np.sum(((data - synth) / unc) ** 2) / len(data)
|
||||
|
||||
def calc_mean_residuals(data, synth):
|
||||
return np.sum(abs(data - synth)) / len(data)
|
||||
|
||||
|
||||
def sort_stations_for_search(station_coords, receivers_dict):
|
||||
epsilon = 1e-1 # very large epsilon actually...
|
||||
coords_unique = coords_unique_receivers(receivers_dict)
|
||||
latlon_dict = {}
|
||||
for nwst_id, coords in station_coords.items():
|
||||
lat = coords['latitude']
|
||||
lon = coords['longitude']
|
||||
for latu, lonu in coords_unique:
|
||||
if abs(lat - latu) < epsilon and abs(lon - lonu) < epsilon:
|
||||
latlon_dict[(latu, lonu)] = nwst_id
|
||||
return latlon_dict
|
||||
|
||||
|
||||
def extract_stations_for_network(station_coords, network_id):
|
||||
station_coords_extracted = {}
|
||||
for nwst_id, coords in station_coords.items():
|
||||
nw, st = nwst_id.split('.')
|
||||
if nw == network_id:
|
||||
station_coords_extracted[nwst_id] = coords
|
||||
return station_coords_extracted
|
||||
|
||||
|
||||
def get_receiver_ids(latlon_list, receivers_dict):
|
||||
receiver_ids = []
|
||||
for recid, coords in receivers_dict.items():
|
||||
lat, lon = (coords['lat'], coords['lon'])
|
||||
if (lat, lon) in latlon_list:
|
||||
receiver_ids.append(recid)
|
||||
return receiver_ids
|
||||
|
||||
|
||||
def coords_unique_receivers(receivers_dict):
|
||||
coords_unique = []
|
||||
for coords in receivers_dict.values():
|
||||
latlon_tuple = (coords['lat'], coords['lon'])
|
||||
if not latlon_tuple in coords_unique:
|
||||
coords_unique.append(latlon_tuple)
|
||||
return coords_unique
|
||||
|
||||
#def plot_residuals()
|
||||
|
||||
def main():
|
||||
#wdir = '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_diehl_crust_included_v3_hf'
|
||||
wdir = '/data/AlpArray_Data/fmtomo/v5/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10'
|
||||
os.chdir(wdir)
|
||||
|
||||
receivers_dict = organize_receivers('receivers.in')
|
||||
|
||||
print(wdir)
|
||||
misfits = {}
|
||||
mean_residuals = {}
|
||||
for extract_network in [None, 'Z3', 'ZS', ]:
|
||||
misfits[extract_network] = []
|
||||
mean_residuals[extract_network] = []
|
||||
with open('/rscratch/minos13/marcel/alparray/station_coords.json', 'r') as infile:
|
||||
station_coords = json.load(infile)
|
||||
|
||||
print('\nCalculating residuals for network:', extract_network)
|
||||
if extract_network:
|
||||
station_coords = extract_stations_for_network(station_coords, extract_network)
|
||||
latlon_dict = sort_stations_for_search(station_coords, receivers_dict)
|
||||
rec_ids = get_receiver_ids(list(latlon_dict.keys()), receivers_dict)
|
||||
|
||||
otimes = np.genfromtxt('otimes.dat', skip_header=1)
|
||||
#otimes_orig = np.genfromtxt('otimes_orig.dat', skip_header=1)
|
||||
ttimes = otimes[:, 4]
|
||||
uncs = otimes[:, 5]
|
||||
nsrc = int(otimes[-1, 1])
|
||||
|
||||
rtimes = np.genfromtxt('rtimes_tele.dat')[:, 4]
|
||||
|
||||
for itstep in range(0, 25):
|
||||
nRays = 0
|
||||
mtimes_all = np.genfromtxt('it_{}/arrivals.dat'.format(itstep))
|
||||
mtimes = mtimes_all[:, 4]
|
||||
mtimes_diff = mtimes - rtimes
|
||||
|
||||
for srcid in range(nsrc):
|
||||
srcid += 1
|
||||
# get all indices of current source id
|
||||
indices = np.where(mtimes_all[:, 1] == srcid)
|
||||
nRays += len(indices[0])
|
||||
mtimes_diff[indices] -= np.mean(mtimes_diff[indices])
|
||||
|
||||
# get all indices that are in rec_ids list as well
|
||||
mask_recs = np.isin(mtimes_all[:, 0].astype(int), rec_ids)
|
||||
|
||||
mf = calc_misfit(ttimes[mask_recs], mtimes_diff[mask_recs], uncs[mask_recs])
|
||||
sr = calc_mean_residuals(ttimes[mask_recs], mtimes_diff[mask_recs])
|
||||
misfits[extract_network].append(mf)
|
||||
mean_residuals[extract_network].append(sr)
|
||||
print('It: {}, misfit: {}, mean res: {}, nrays: {}'.format(itstep, mf, sr, nRays))
|
||||
|
||||
#plt.plot(mean_residuals[extract_network], label=extract_network)
|
||||
plt.plot(misfits[extract_network], label=extract_network)
|
||||
|
||||
plt.title(wdir)
|
||||
plt.ylabel('Mean residuals [s]')
|
||||
plt.xlabel('Iteration')
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
69
pylot/tomography/fmtomo_tools/model_slice_plomerova.py
Normal file
69
pylot/tomography/fmtomo_tools/model_slice_plomerova.py
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob, os, shutil
|
||||
|
||||
from fmtomo_tools.fmtomo_grid_utils import read_vgrid, write_vgrid
|
||||
|
||||
pjoin = os.path.join
|
||||
|
||||
pwd = '/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10_EASI_test_Plomerova/'
|
||||
|
||||
vgrid_file_in = pjoin(pwd, 'vgrids_dts_crust.in')
|
||||
vgrid_file_out = pjoin(pwd, 'vgrids_dts_crust_variance_slice.in')
|
||||
|
||||
latmin_rec, latmax_rec = 44.8, 51.3
|
||||
lonmin_rec, lonmax_rec = 11.94, 14.66
|
||||
|
||||
latmin_grid, latmax_grid = 44.45, 52.55
|
||||
lonmin_grid, lonmax_grid = 10.83, 15.77
|
||||
|
||||
|
||||
def modify_pickfiles():
|
||||
picks_path = pjoin(pwd, 'picks_orig')
|
||||
os.chdir(picks_path)
|
||||
|
||||
outdir = pjoin(pwd, 'picks')
|
||||
|
||||
|
||||
infiles = glob.glob('*.ttf')
|
||||
for infile in infiles:
|
||||
lines_out = []
|
||||
with open(infile, 'r') as fid:
|
||||
eventid = infile.split('.ttf')[0]
|
||||
npicks = int(fid.readline())
|
||||
|
||||
# copy source/phase header
|
||||
for _ in range(2):
|
||||
lines_out.append(fid.readline())
|
||||
|
||||
for line in fid:
|
||||
lat, lon = [float(item) for item in line.split()[:2]]
|
||||
if latmin_rec < lat < latmax_rec and lonmin_rec < lon < lonmax_rec:
|
||||
lines_out.append(line)
|
||||
|
||||
fn_out = pjoin(outdir, infile)
|
||||
with open(fn_out, 'w') as outfile:
|
||||
# number of picks: list contents - 2 header lines
|
||||
outfile.write(f'{len(lines_out) - 2}\n')
|
||||
for line in lines_out:
|
||||
outfile.write(line)
|
||||
|
||||
print(f'Modified {eventid}: Removed {npicks - len(lines_out)} out of {npicks} picks/stations')
|
||||
|
||||
os.chdir(pwd)
|
||||
|
||||
|
||||
def modify_variance_slice():
|
||||
grid, gridN, gridDelta, gridStart = read_vgrid(vgrid_file_in)
|
||||
|
||||
for index, latlon in enumerate(zip(grid['lats'], grid['lons'])):
|
||||
lat, lon = latlon
|
||||
if not latmin_grid < lat < latmax_grid or not lonmin_grid < lon < lonmax_grid:
|
||||
grid['covs'][index] = 1e-6
|
||||
|
||||
write_vgrid(grid, gridN, gridDelta, gridStart, vgrid_file_out)
|
||||
|
||||
|
||||
#modify_pickfiles()
|
||||
modify_variance_slice()
|
||||
294
pylot/tomography/fmtomo_tools/modify_otimes.py
Normal file
294
pylot/tomography/fmtomo_tools/modify_otimes.py
Normal file
@@ -0,0 +1,294 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
from scipy.interpolate import RegularGridInterpolator
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import export_otimes, organize_receivers
|
||||
|
||||
def crustal_correction_using_differences(fname_otimes, fname_otimes_diff, fname_otimes_out):
|
||||
otimes = np.loadtxt(fname_otimes, skiprows=1)
|
||||
otimes_diff = np.loadtxt(fname_otimes_diff, skiprows=1)
|
||||
|
||||
src_means = source_means(otimes_diff)
|
||||
|
||||
#mean_diffs = np.mean([float(item.split()[4]) for item in otimes_diff[1:]])
|
||||
print('Mean_diffs of all sources: ', np.mean(list(src_means.values())))
|
||||
# TODO: Check if demeaning is useful for crustal correction using synthetics (was it b4 using ak135_diehl 1D?)
|
||||
# Update 27.5.2020: If not demeaned, overall velocity perturbation shifted to negative in whole upper mantle
|
||||
# BUT: demean has to be executed for each source, not with global mean!
|
||||
|
||||
with open(fname_otimes_out, 'w') as outfile:
|
||||
outfile.write('{}\n'.format(len(otimes)))
|
||||
for index, line in enumerate(otimes):
|
||||
src_id = line[1]
|
||||
ttime = line[4]
|
||||
ttime_diff = otimes_diff[index][4]
|
||||
new_time = ttime - ttime_diff + src_means[src_id]
|
||||
line[4] = new_time
|
||||
for col_index, item in enumerate(line):
|
||||
if col_index < 4:
|
||||
fmt = '{:10g} '
|
||||
else:
|
||||
fmt = '{:10.8f} '
|
||||
outfile.write(fmt.format(item))
|
||||
outfile.write('\n')
|
||||
|
||||
|
||||
def source_means(otimes):
|
||||
# catch all source ids from otimes file
|
||||
src_ids = np.unique(otimes[:, 1])
|
||||
print('Source IDs:', src_ids)
|
||||
|
||||
# create dictionary containing means of travel time (differences) for each source (key)
|
||||
src_means = {src_id: np.mean(otimes[otimes[:, 1] == src_id][:, 4]) for src_id in src_ids}
|
||||
|
||||
print('Source means:')
|
||||
for srcid, val in src_means.items():
|
||||
print(srcid, ': ', val)
|
||||
|
||||
return src_means
|
||||
|
||||
|
||||
def crustal_correction_using_residual_map(fname_otimes, fname_residuals, receivers_in, fname_otimes_out):
|
||||
''' correct using residuals from numpy file with an array of shape (lons, lats, residuals)'''
|
||||
otimes = np.loadtxt(fname_otimes, skiprows=1)
|
||||
rec_dict = organize_receivers(receivers_in)
|
||||
lons, lats, res = np.load(fname_residuals, allow_pickle=True)
|
||||
|
||||
lonsU = np.unique(lons)
|
||||
latsU = np.unique(lats)
|
||||
#grid = (lonsU, latsU)
|
||||
#shape = [len(item) for item in grid]
|
||||
#res = res.reshape(shape)
|
||||
|
||||
# not very efficient but very lazy (just interpolate value for each ray, i.e. receiver, not unique,
|
||||
# which is quite redundant)
|
||||
rginter = RegularGridInterpolator((lonsU, latsU), res, bounds_error=False, fill_value=0.)
|
||||
|
||||
test_rginter(rginter)
|
||||
|
||||
for line in otimes:
|
||||
rec_id = int(line[0])
|
||||
lonlat = (rec_dict[rec_id]['lon'], rec_dict[rec_id]['lat'])
|
||||
res = rginter(lonlat)
|
||||
line[4] -= res
|
||||
|
||||
src_means = source_means(otimes)
|
||||
|
||||
for src_id, src_mean in src_means.items():
|
||||
otimes[otimes[:, 1] == src_id, 4] -= src_mean
|
||||
|
||||
with open(fname_otimes_out, 'w') as outfile:
|
||||
outfile.write('{}\n'.format(len(otimes)))
|
||||
for line in otimes:
|
||||
for col_index, item in enumerate(line):
|
||||
if col_index < 4:
|
||||
fmt = '{:10g} '
|
||||
else:
|
||||
fmt = '{:10.8f} '
|
||||
outfile.write(fmt.format(item))
|
||||
outfile.write('\n')
|
||||
|
||||
|
||||
def test_rginter(rginter):
|
||||
lons = np.linspace(0, 22, 220)
|
||||
lats = np.linspace(40, 52, 120)
|
||||
lonsg, latsg = np.meshgrid(lons, lats)
|
||||
data = rginter((lonsg, latsg))
|
||||
pcm = plt.pcolormesh(lonsg, latsg, data)
|
||||
plt.colorbar(pcm)
|
||||
plt.show()
|
||||
|
||||
|
||||
def get_otimes_diff(fname_arrivals1, fname_arrivals2, fname_out='otimes_diff.dat'):
|
||||
'''
|
||||
Calculate file containing travel time differences between fname_arrivals1 and fname_arrivals2, e.g. for crustal
|
||||
correction.
|
||||
:param fname_arrivals1:
|
||||
:param fname_arrivals2:
|
||||
:return:
|
||||
'''
|
||||
with open(fname_arrivals1, 'r') as infile:
|
||||
arrivals1 = infile.readlines()
|
||||
with open(fname_arrivals2, 'r') as infile:
|
||||
arrivals2 = infile.readlines()
|
||||
|
||||
assert len(arrivals1) == len(arrivals2), 'Length of input arrival files differs'
|
||||
|
||||
print('Calculating differences between file {} and {}'.format(fname_arrivals1, fname_arrivals2))
|
||||
columnString = '{} '
|
||||
|
||||
nArrivals = len(arrivals1)
|
||||
with open(fname_out, 'w') as outfile:
|
||||
outfile.write('{}\n'.format(nArrivals))
|
||||
for index in range(nArrivals):
|
||||
line1 = arrivals1[index]
|
||||
line2 = arrivals2[index]
|
||||
for item in line1.split()[:4]:
|
||||
outfile.write(columnString.format(item))
|
||||
diff = float(line1.split()[4]) - float(line2.split()[4])
|
||||
outfile.write(columnString.format(diff))
|
||||
outfile.write('\n')
|
||||
print('Wrote {} lines to file {}'.format(nArrivals, fname_out))
|
||||
|
||||
|
||||
def get_synthetic_obsdata_legacy(fname_m1, fname_m2, fname_otimes_orig, fname_out='otimes_modif.dat'):#, p_err=0.1):
|
||||
'''
|
||||
Create synthetic obsdata of model 1 relative to model 2 (usually 1D model synthetic travel times)
|
||||
:param fname_m1: arrivals.dat file of synthetic travel times (e.g. from checkerboard test)
|
||||
:param fname_m2: arrivals.dat file of synthetic travel times (e.g. ak135 model)
|
||||
:param fname_otimes_orig: original otimes file for pick uncertainties
|
||||
:param fname_out: otimes.dat file with output
|
||||
:param p_err: picking error
|
||||
:return:
|
||||
'''
|
||||
with open(fname_m1, 'r') as infile:
|
||||
arrivals_model1 = infile.readlines()
|
||||
with open(fname_m2, 'r') as infile:
|
||||
arrivals_model2 = infile.readlines()
|
||||
with open(fname_otimes_orig, 'r') as infile:
|
||||
otimes = infile.readlines()[1:]
|
||||
|
||||
assert(len(arrivals_model1) == len(arrivals_model2) == len(otimes)), 'missmatch in lengths of files!'
|
||||
|
||||
diffs = []
|
||||
with open(fname_out, 'w') as outfile:
|
||||
outfile.write('{}\n'.format(len(arrivals_model1)))
|
||||
|
||||
for line_model1, line_model2, line_otimes in zip(arrivals_model1, arrivals_model2, otimes):
|
||||
for item in line_model2.split()[:4]:
|
||||
outfile.write('{} '.format(item))
|
||||
|
||||
ttime_model1 = float(line_model1.split()[4])
|
||||
ttime_model2 = float(line_model2.split()[4])
|
||||
pickerror = float(line_otimes.split()[5])
|
||||
# pickerror = (p_err+2*p_err*abs(np.random.randn()))
|
||||
|
||||
ttime_diff = ttime_model1 - ttime_model2
|
||||
diffs.append(ttime_diff)
|
||||
|
||||
outfile.write('{} {}\n'.format(ttime_diff, pickerror))
|
||||
|
||||
mean_diff = np.mean(diffs)
|
||||
abs_mean_diff = np.mean(np.abs(diffs))
|
||||
|
||||
print('Mean_diff: {} - abs_mean_diff: {}'.format(mean_diff, abs_mean_diff))
|
||||
print('Done with {} travel times. Output in file: {}'.format(len(arrivals_model1), fname_out))
|
||||
|
||||
|
||||
def get_synthetic_obsdata(fname_m1, fname_m2, fname_otimes_orig, fname_out='otimes_modif.dat', sigma='original'):
|
||||
'''
|
||||
Create synthetic obsdata of model 1 relative to model 2 (usually 1D model synthetic travel times)
|
||||
:param fname_m1: arrivals.dat file of synthetic travel times (e.g. from checkerboard test)
|
||||
:param fname_m2: arrivals.dat file of synthetic travel times (e.g. ak135 model)
|
||||
:param fname_otimes_orig: original otimes file for pick uncertainties
|
||||
:param fname_out: otimes.dat file with output
|
||||
:param p_err: picking error
|
||||
:return:
|
||||
'''
|
||||
arrivals_model1 = np.genfromtxt(fname_m1)
|
||||
arrivals_model2 = np.genfromtxt(fname_m2)
|
||||
otimes = np.genfromtxt(fname_otimes_orig, skip_header=1)
|
||||
|
||||
assert(len(arrivals_model1) == len(arrivals_model2) == len(otimes)), 'missmatch in lengths of files!'
|
||||
|
||||
# get new array as copy from model1 after deleting last column (not needed for otimes)
|
||||
arrivals_diff = np.delete(arrivals_model1, 6, 1)
|
||||
|
||||
# overwrite time column by diffs
|
||||
arrivals_diff[:, 4] -= arrivals_model2[:, 4]
|
||||
# overwrite last column by pick errors
|
||||
arrivals_diff[:, 5] = otimes[:, 5]
|
||||
|
||||
# get max nSrc (in last line of sorted arrivals files)
|
||||
nSrc = int(arrivals_diff[-1, 1])
|
||||
|
||||
print('N sources:', nSrc)
|
||||
if sigma not in [None, False]:
|
||||
print('Applying gaussian noise with sigma={}'.format(sigma))
|
||||
if sigma == 'original':
|
||||
sigma_val = otimes[:, 5]
|
||||
else:
|
||||
sigma_val = sigma
|
||||
gauss_noise = np.random.normal(0., sigma_val, len(arrivals_diff[:, 4]))
|
||||
plt.axhline(0., color='grey')
|
||||
plt.vlines(np.arange(0, len(arrivals_diff)), arrivals_diff[:, 4], arrivals_diff[:, 4] + gauss_noise,
|
||||
color='grey', linestyles='dashed')
|
||||
plt.plot(arrivals_diff[:, 4], 'r.')
|
||||
arrivals_diff[:, 4] += gauss_noise
|
||||
|
||||
for index in range(nSrc):
|
||||
src_id = index + 1
|
||||
# get indices for this source ID
|
||||
indices = np.where(arrivals_diff[:, 1] == src_id)
|
||||
# get mean for that srcid
|
||||
mean = np.mean(arrivals_diff[:, 4][indices])
|
||||
print('Srcid: {}, mean: {}'.format(src_id, mean))
|
||||
# de-mean array
|
||||
arrivals_diff[:, 4][indices] -= mean
|
||||
|
||||
if sigma not in [None, False]:
|
||||
plt.axhline(0., color='grey')
|
||||
#plt.vlines(np.arange(0, len(arrivals_diff)), arrivals_diff[:, 4], arrivals_diff[:, 4] + gauss_noise)
|
||||
plt.plot(arrivals_diff[:, 4], 'b.')
|
||||
plt.figure()
|
||||
plt.hist(gauss_noise, bins=100)
|
||||
plt.show()
|
||||
|
||||
export_otimes(arrivals_diff, fname_out)
|
||||
|
||||
|
||||
def plot_histograms_for_crustal_corrections():
|
||||
import matplotlib.pyplot as plt
|
||||
ttdiffs_no_crust = []
|
||||
ttdiffs_crust = []
|
||||
ttdiffs_corrected = []
|
||||
|
||||
#get_synthetic_obsdata('it_synth_forward/arrivals.dat', 'arrivals_ak135_diehl.dat', 'otimes_three_boxes_no_crust.dat')
|
||||
#get_synthetic_obsdata('it_synth_forward_with_crust/arrivals.dat', 'arrivals_ak135_diehl.dat', 'otimes_three_boxes_with_diehl_crust.dat')
|
||||
with open('otimes_three_boxes_with_diehl_crust.dat', 'r') as infile:
|
||||
lines_crust = infile.readlines()[1:]
|
||||
with open('otimes_three_boxes_no_crust.dat', 'r') as infile:
|
||||
lines_no_crust = infile.readlines()[1:]
|
||||
with open('otimes_three_boxes_crustal_corrected.dat', 'r') as infile:
|
||||
lines_corrected = infile.readlines()[1:]
|
||||
|
||||
for line in lines_crust:
|
||||
ttdiffs_crust.append(float(line.split()[4]))
|
||||
for line in lines_no_crust:
|
||||
ttdiffs_no_crust.append(float(line.split()[4]))
|
||||
for line in lines_corrected:
|
||||
ttdiffs_corrected.append(float(line.split()[4]))
|
||||
|
||||
plt.hist(ttdiffs_crust, bins=100, label='tt-diff including crustal structure.')
|
||||
plt.hist(ttdiffs_no_crust, bins=100, label='tt-diff without crustal structure')
|
||||
plt.hist(ttdiffs_corrected, bins=100, label='tt-diff after correcting for crustal structure')
|
||||
plt.xlabel('Travel time differences to ak135_diehl 1D model [s]')
|
||||
plt.legend()
|
||||
|
||||
if __name__ == '__main__':
|
||||
#wdir = '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_diehl_crustal_corrections_v3_hf'
|
||||
#fname1 = os.path.join(wdir, 'arrivals_ak135di_crust.dat')
|
||||
#fname2 = os.path.join(wdir, 'arrivals_ak135di.dat')
|
||||
#get_otimes_diff(fname1, fname2, os.path.join(wdir, 'otimes_diff.dat'))
|
||||
|
||||
#get_synthetic_obsdata('arrivals_cb_N2.dat', 'arrivals_ak135.dat', 'otimes_cb_N2.dat')
|
||||
#os.chdir('/data/AlpArray_Data/fmtomo/v4/different_test_runs/alparray_mantle_waldhauser_crust_corrected_covs_hf_gradient_smoothing')
|
||||
#os.chdir('/data/AlpArray_Data/fmtomo/v4/different_test_runs/alparray_mantle_waldhauser_crust_corrected_covs_hf_gradient_smoothing')
|
||||
#crustal_correction_using_residual_map('otimes.dat', 'wh_residuals.npy', 'receivers.in', 'otimes_corr_wh.dat')
|
||||
|
||||
#os.chdir('/data/AlpArray_Data/fmtomo/v4/different_test_runs/alparray_mantle_diehl_crust_corrected_residuals_stacked_gradient_smoothing')
|
||||
#crustal_correction_using_residual_map('otimes.dat', 'diehl_2009_residuals.npy', 'receivers.in', 'otimes_corr_diehl.dat')
|
||||
|
||||
#os.chdir('/data/AlpArray_Data/fmtomo/v6/crust_corrected_VERTICAL_hf_sm_FIX_DTS_grad_sm30_dm10')
|
||||
#crustal_correction_using_residual_map('otimes.dat', 'dts_filt_12.5_12.5_7.5_CRUST_ONLY_RESIDUALS.npy', 'receivers.in', 'otimes_corr_dts.dat')
|
||||
|
||||
os.chdir('/data/AlpArray_Data/fmtomo/v6/crust_corrected_WH_hf_sm_FIX_DTS_grad_sm30_dm10')
|
||||
crustal_correction_using_residual_map('otimes.dat', 'wh_residuals_SORTED.npy', 'receivers.in', 'otimes_corr_wh.dat')
|
||||
|
||||
|
||||
|
||||
#crustal_correction_using_residual_map('otimes.dat', 'wh_residuals.npy', 'receivers.in', 'otimes_corr_wh.dat')
|
||||
154
pylot/tomography/fmtomo_tools/modify_vgrid.py
Normal file
154
pylot/tomography/fmtomo_tools/modify_vgrid.py
Normal file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Different functions to modify FMTOMO velocity(inversion) grid
|
||||
import argparse
|
||||
import numpy as np
|
||||
from math import erf
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid, write_vgrid, write_vtk
|
||||
|
||||
|
||||
def main(fname_in, fname_out, fname_vtk_out=None, cov_perc=0.15, bounds=[40, 60]):
|
||||
vgrid, gridN, gridDelta, gridStart = read_vgrid(fname_in)
|
||||
|
||||
print('Modifying covariances...')
|
||||
vgrid['covs'] = []
|
||||
for depth, vp in zip(vgrid['depths'], vgrid['vps']):
|
||||
vgrid['covs'].append(covariance_for_depth(depth, vp, cov_perc, bounds))
|
||||
|
||||
write_vgrid(vgrid, npts=gridN, delta=gridDelta, start=gridStart, fname=fname_out)
|
||||
if fname_vtk_out:
|
||||
write_vtk(vgrid, fname_vtk_out, write_data=['vps', 'covs'])
|
||||
|
||||
|
||||
def modify_vgrid_box(vgrid, vgrid_key, min_lon, max_lon, min_lat, max_lat, min_depth, max_depth,
|
||||
val_center, val_border, scale_factor, use_erf=True, extend_to_top=False):
|
||||
'''
|
||||
Modify a value (vgrid_key) of vgrids.in using an error function. Value will be increase smoothly to the borders
|
||||
of the external model. This function was used to prevent changes of the inversion result in the region of the
|
||||
initial crustal model that was corrected for. Value smoothly increases at the borders of the initial model.
|
||||
'''
|
||||
|
||||
# get central points of external model (here value will be minimal)
|
||||
c_lon = 0.5 * (min_lon + max_lon)
|
||||
c_lat = 0.5 * (min_lat + max_lat)
|
||||
c_depth = 0.5 * (min_depth + max_depth)
|
||||
|
||||
# get half the extent of the model dimensions for function scaling
|
||||
dlon = 0.5 * (max_lon - min_lon)
|
||||
dlat = 0.5 * (max_lat - min_lat)
|
||||
ddepth = 0.5 * (max_depth - min_depth)
|
||||
|
||||
print('Modifying {}...'.format(vgrid_key))
|
||||
print('Center {key}: {val_c}, border {key}: {val_b}'.format(key=vgrid_key, val_c=val_center, val_b=val_border))
|
||||
if not vgrid_key in vgrid.keys() or not vgrid[vgrid_key]:
|
||||
vgrid[vgrid_key] = list(np.ones(len(vgrid['depths'])))
|
||||
for index, tup in enumerate(zip(vgrid['lons'], vgrid['lats'], vgrid['depths'])):
|
||||
lon, lat, depth = tup
|
||||
if (min_lon <= lon <= max_lon and min_lat <= lat <= max_lat and depth <= max_depth):
|
||||
x = abs(lon - c_lon) / dlon
|
||||
y = abs(lat - c_lat) / dlat
|
||||
if extend_to_top and depth < c_depth:
|
||||
z = 0
|
||||
else:
|
||||
z = abs(depth - c_depth) / ddepth
|
||||
if use_erf:
|
||||
point_val = smoothing_erf(x, y, z, val_center, val_border, scale_factor=scale_factor)
|
||||
else:
|
||||
point_val = val_center
|
||||
else:
|
||||
point_val = val_border
|
||||
vgrid[vgrid_key][index] *= point_val
|
||||
|
||||
return vgrid
|
||||
|
||||
|
||||
def modify_vgrid_gradient(vgrid, vgrid_key, val_top, val_bot):
|
||||
'''
|
||||
Modify a value (vgrid_key) of vgrids.in using a linear gradient from val_top to val_bot
|
||||
'''
|
||||
|
||||
print('Modifying {} with linear gradient...'.format(vgrid_key))
|
||||
print('Top: {}, bot: {}'.format(val_top, val_bot))
|
||||
if not vgrid_key in vgrid.keys():
|
||||
vgrid[vgrid_key] = list(np.ones(len(vgrid['depths'])))
|
||||
depth_min = min(vgrid['depths'])
|
||||
depth_max = max(vgrid['depths'])
|
||||
for index, tup in enumerate(zip(vgrid['lons'], vgrid['lats'], vgrid['depths'])):
|
||||
lon, lat, depth = tup
|
||||
vgrid[vgrid_key][index] *= linear_gradient(depth, depth_min=depth_min, depth_max=depth_max, val_top=val_top,
|
||||
val_bot=val_bot)
|
||||
return vgrid
|
||||
|
||||
|
||||
def linear_gradient(depth, depth_min, depth_max, val_top, val_bot):
|
||||
return (val_bot - val_top) * depth / (depth_max - depth_min) + val_top
|
||||
|
||||
|
||||
def smoothing_erf(x, y, z, val_border, val_center, scale_factor=1.):
|
||||
a = val_center
|
||||
b = val_border
|
||||
sx = sy = sz = scale_factor
|
||||
f = (a - b) * (1. / 3. * (erf(2. * sx * (x - 1)) + erf(2. * sy * (y - 1)) + erf(2. * sz * (z - 1))) + 1.) + b
|
||||
return f
|
||||
|
||||
|
||||
def covariance_for_depth(depth, vp, cov_perc, bounds):
|
||||
'''
|
||||
Function that returns covariance values for certain depths, written to give low variance to crust, intermediate
|
||||
to an interlayer and high variance to everything else.
|
||||
:param depth: depth in kilometers
|
||||
:return: covariance
|
||||
'''
|
||||
if depth <= bounds[0]:
|
||||
return 0.05
|
||||
elif bounds[0] < depth <= bounds[1]:
|
||||
return 0.5 * cov_perc * vp
|
||||
else:
|
||||
return cov_perc * vp
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Prepare grid for fm3d teleseismic hybrid calculation.')
|
||||
parser.add_argument('fname_in', help='input filename (vgrids.in)')
|
||||
parser.add_argument('fname_out', help='output filename (vgrids_new.in)')
|
||||
parser.add_argument('--fname_out_vtk', default=None, help='vtk output filename')
|
||||
parser.add_argument('--cov_border', default=1.0, help='covariance for the model outside crustal model boundaries')
|
||||
parser.add_argument('--cov_center', default=0.05, help='maximum covariance in the center of the crustal model')
|
||||
parser.add_argument('--smooth_border', default=1., help='smoothing factor for the rest of the box')
|
||||
parser.add_argument('--smooth_center', default=0.5, help='smoothing factor inside SWATH-D box')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
vgrid, gridN, gridDelta, gridStart = read_vgrid(args.fname_in)
|
||||
|
||||
#main(args.fname_in, args.fname_out, args.fname_out_vtk)
|
||||
|
||||
#vgrid = modify_vgrid_gradient(vgrid, 'smoothfactors', 1.0, 2.0)
|
||||
|
||||
# bounds for waldhauser residual corrections
|
||||
wh_bounds = dict(lon0 = 3.23, lon1 = 13.96, lat0 = 43.18, lat1 = 49.45)
|
||||
vgrid = modify_vgrid_box(vgrid, vgrid_key='covs', min_lon=wh_bounds['lon0'], max_lon=wh_bounds['lon1'],
|
||||
min_lat=wh_bounds['lat0'], max_lat=wh_bounds['lat1'], min_depth=-5.0, max_depth=60.0,
|
||||
val_center=float(args.cov_center), val_border=float(args.cov_border),
|
||||
scale_factor=3, use_erf=True, extend_to_top=True)
|
||||
|
||||
#vgrid = modify_vgrid_box(vgrid, vgrid_key='covs', min_lon=2.2547, max_lon=17.0999, min_lat=41.0517,
|
||||
# max_lat=50.4984, min_depth=-5.0, max_depth=70.0, val_center=float(args.cov_center),
|
||||
# val_border=float(args.cov_border), scale_factor=3, use_erf=True, extend_to_top=True)
|
||||
|
||||
#vgrid = modify_vgrid_box(vgrid, vgrid_key='smoothfactors', min_lon=8.9, max_lon=15.3, min_lat=45.,
|
||||
# max_lat=48., min_depth=-15.0, max_depth=610.0, val_center=float(args.smooth_center),
|
||||
# val_border=float(args.smooth_border), scale_factor=1., use_erf=True)
|
||||
|
||||
# MP MP TEST TEST TEST ++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
#grid = modify_vgrid_box(grid, vgrid_key='smoothfactors', min_lon=11., max_lon=35., min_lat=30.,
|
||||
# max_lat=61., min_depth=-5.0, max_depth=600.0, val_center=float(args.smooth_center),
|
||||
# val_border=float(args.smooth_border), scale_factor=.4)
|
||||
# MP MP TEST TEST TEST ------------------------------------------
|
||||
|
||||
write_vgrid(vgrid, npts=gridN, delta=gridDelta, start=gridStart, fname=args.fname_out)
|
||||
if args.fname_out_vtk:
|
||||
write_vtk(vgrid, args.fname_out_vtk, write_data=['vps', 'covs', 'smoothfactors'])
|
||||
|
||||
|
||||
26
pylot/tomography/fmtomo_tools/plot_obsdata.py
Executable file
26
pylot/tomography/fmtomo_tools/plot_obsdata.py
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
def plot_otimes(otimes_fname):
|
||||
with open(otimes_fname, 'r') as infile:
|
||||
input_list = infile.readlines()[1:]
|
||||
ray_id = [int(line.split()[0]) for line in input_list]
|
||||
ttimes = [float(line.split()[-2]) for line in input_list]
|
||||
uncertainties = [float(line.split()[-1]) for line in input_list]
|
||||
plt.axhline(0, linestyle=':', color='k')
|
||||
plt.errorbar(ray_id, ttimes, yerr=uncertainties, fmt='o', markersize=1, ecolor='0.5', elinewidth=0.5)
|
||||
plt.show()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Plot ttimes with errors of FMTOMO file otimes.dat')
|
||||
parser.add_argument('infile', help='inputfile')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
plot_otimes(args.infile)
|
||||
341
pylot/tomography/fmtomo_tools/plot_residuals_map.py
Executable file
341
pylot/tomography/fmtomo_tools/plot_residuals_map.py
Executable file
@@ -0,0 +1,341 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
from obspy.geodetics.base import gps2dist_azimuth
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
from pylot.tomography.fmtomo_tools.compare_arrivals_hybrid import organize_receivers, organize_sources, organize_event_names
|
||||
from pylot.tomography.map_utils import make_map
|
||||
from pylot.tomography.utils import normed_figure
|
||||
|
||||
|
||||
def plot_map(otimes_fname, sources_fname, receivers_fname, isf, rtimes_fname=None, arrivals_fname=None, colorbar=True,
|
||||
title=True, stack=False, source_ids=(), savefig_dir='', demean=True, absolute=False, clat=42.25, clon=17.5,#clat=46., clon=11.,
|
||||
file_ext='png', max_abs=3., relative_to_otimes=False, only_otimes=False):
|
||||
if savefig_dir:
|
||||
if not os.path.isdir(savefig_dir):
|
||||
os.mkdir(savefig_dir)
|
||||
|
||||
src_dict = organize_sources(sources_fname)
|
||||
rec_dict = organize_receivers(receivers_fname)
|
||||
eventnames = organize_event_names(isf)
|
||||
|
||||
residuals_dict = read_fmtomo_tt_file(otimes_fname, rec_dict, rtimes_fname=rtimes_fname, synth_fname=arrivals_fname,
|
||||
demean=demean, absolute=absolute, height_correction_vp=5.5,
|
||||
relative_to_otimes=relative_to_otimes, only_otimes=only_otimes)
|
||||
|
||||
if stack:
|
||||
residuals_dict = stack_sources(residuals_dict)
|
||||
with open('stacked_residuals.json', 'w') as outfile:
|
||||
json.dump(residuals_dict, outfile)
|
||||
|
||||
count = 0
|
||||
if savefig_dir:
|
||||
fig = normed_figure(width_cm=12, ratio=12./9.)
|
||||
#fig = plt.figure(figsize=(12, 9))
|
||||
else:
|
||||
fig = plt.figure()
|
||||
if stack or source_ids is not [] or savefig_dir:
|
||||
count = 1
|
||||
|
||||
# increase point size if not stacked
|
||||
sizefactor_stacked = {True: 30,
|
||||
False: .2e4}
|
||||
|
||||
for src_id, dic in residuals_dict.items():
|
||||
eventname = eventnames[src_id - 1]
|
||||
if source_ids != []:
|
||||
if not src_id in source_ids:
|
||||
continue
|
||||
if max_abs == 'auto':
|
||||
max_abs = np.max(np.abs(np.array(dic['ttimes'])))
|
||||
# if not stack and not savefig_dir and source_ids == []:
|
||||
# count += 1
|
||||
# if count == 1:
|
||||
# ax = fig.add_subplot(3, 3, count)
|
||||
# ax0 = ax
|
||||
# else:
|
||||
# ax = fig.add_subplot(3, 3, count, sharex=ax0, sharey=ax0)
|
||||
#
|
||||
# ax.text(0.1, 0.9, 'Source ID {}, {}'.format(src_id, src_dict[src_id]['phase']), transform=ax.transAxes)
|
||||
# else:
|
||||
# ax = fig.add_subplot(111)
|
||||
if stack and title:
|
||||
plt.title('Stacked {} sources. Size relates to number of stacks per receiver.'.format(len(src_dict)),
|
||||
y=1.05)
|
||||
ax = make_map(draw_model=False, draw_faults=True, model_legends=False, clon=clon, clat=clat,
|
||||
width = 30, height = 21,) # , width=6e6, height=5e6)
|
||||
if not stack:
|
||||
slat, slon = src_dict[src_id]['lat'], src_dict[src_id]['lon']
|
||||
dist = plot_source(slat, slon)
|
||||
baz = gps2dist_azimuth(clat, clon, slat, slon, a=6.371e6, f=0)[1]
|
||||
if title:
|
||||
plt.title(
|
||||
'Plot of source {}. Distance: {:.0f}$^\circ$. BAZ: {:.0f}$^\circ$'.format(eventname, dist, baz),
|
||||
y=1.05)
|
||||
|
||||
#ax.set_xlabel('Longitude [$^\circ$]')
|
||||
#ax.set_ylabel('Latitude [$^\circ$]')
|
||||
|
||||
# prepare grids for contour plot
|
||||
#lonaxis = np.linspace(min(dic['lons']), max(dic['lons']), 250)
|
||||
#lataxis = np.linspace(min(dic['lats']), max(dic['lats']), 250)
|
||||
#longrid, latgrid = np.meshgrid(lonaxis, lataxis)
|
||||
#ttimes_grid = griddata((dic['lats'], dic['lons']), dic['ttimes'], (latgrid, longrid), method='linear')
|
||||
#levels = np.linspace(min(dic['ttimes']), max(dic['ttimes']), 75)
|
||||
|
||||
cmap = plt.get_cmap('seismic') if not absolute else plt.get_cmap('viridis')
|
||||
|
||||
vmin = -max_abs if not absolute else None
|
||||
vmax = max_abs if not absolute else None
|
||||
sc = ax.scatter(dic['lons'], dic['lats'], c=np.array(dic['ttimes']), cmap=cmap,
|
||||
vmin=vmin, vmax=vmax, edgecolors='grey', linewidths=0.2,
|
||||
s=sizefactor_stacked[stack]*np.array(dic['nttimes'])/len(src_dict), zorder=10,
|
||||
transform=ccrs.PlateCarree(), alpha=0.75)
|
||||
sc.set_edgecolor('0.')
|
||||
if colorbar:
|
||||
cb = plt.colorbar(sc, label='ttime [s]', shrink=0.5)
|
||||
|
||||
if stack:
|
||||
savefig_path = os.path.join(savefig_dir, 'stacked_events.{}'.format(file_ext))
|
||||
else:
|
||||
#fname = f'baz{baz:03.0f}_dist{dist:03.0f}_{eventname}_srcID{src_id}.{file_ext}'
|
||||
fname = f'{eventname}_srcID{src_id}.{file_ext}'
|
||||
savefig_path = os.path.join(savefig_dir, fname)
|
||||
|
||||
|
||||
if not savefig_dir and not count % 9:
|
||||
plt.show()
|
||||
fig = plt.figure()#figsize=(16, 9), dpi=300.)
|
||||
count = 0
|
||||
if savefig_dir:
|
||||
ax.figure.savefig(savefig_path, dpi=300., bbox_inches='tight', pad_inches=0.)
|
||||
print('Wrote file {}'.format(savefig_path))
|
||||
plt.clf()
|
||||
|
||||
if not savefig_dir:
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_source(lat, lon, clat=46., clon=11.):
|
||||
#basemap.drawgreatcircle(lon, lat, clon, clat, color='k', zorder=15, linestyle='dashed')
|
||||
dist, azim, bazim = gps2dist_azimuth(lat, lon, clat, clon, a=6.371e6, f=0)
|
||||
|
||||
dist_deg = dist/1000./np.pi/2./6371.*360.
|
||||
return dist_deg
|
||||
|
||||
#x = np.cos(np.deg2rad(azim))
|
||||
#y = np.sin(np.deg2rad(azim))
|
||||
#print(x, y)
|
||||
#ax.plot([0, x], [0, y], 'r', zorder=15)
|
||||
|
||||
|
||||
def read_fmtomo_tt_file(otimes_fname, rec_dict, synth_fname=None, rtimes_fname=None, demean=True, absolute=False,
|
||||
height_correction_vp=None, R=6371., relative_to_otimes=True, only_otimes=False):
|
||||
#with open(otimes_fname, 'r') as infile:
|
||||
# # skip first line for otimes.dat. Frist line contains N_rays
|
||||
# obsarray = infile.readlines()[1:]
|
||||
obsarray = np.loadtxt(otimes_fname, skiprows=1)
|
||||
|
||||
if height_correction_vp:
|
||||
if absolute:
|
||||
print('APPLICATION OF HEIGHT CORRECTION FOR ABS NOT IMPLEMENTED YET. Not needed!?')
|
||||
#print('Applying height correction of {} km/s'.format(height_correction_vp))
|
||||
else:
|
||||
height_correction_vp = None
|
||||
print('Will not apply height correction for relative values.')
|
||||
|
||||
if synth_fname:
|
||||
synth_array = np.genfromtxt(synth_fname)
|
||||
|
||||
if rtimes_fname:
|
||||
ref_array = np.genfromtxt(rtimes_fname)
|
||||
|
||||
if synth_fname and rtimes_fname:
|
||||
#with open(synth_fname, 'r') as infile:
|
||||
# synth_array = infile.readlines()
|
||||
#with open(rtimes_fname, 'r') as infile:
|
||||
# ref_array = infile.readlines()
|
||||
|
||||
#mean_obs = np.mean(obsarray[:, 4])
|
||||
#mean_synth = np.mean(synth_array[:, 4])
|
||||
#mean_ref = np.mean(ref_array[:, 4])
|
||||
|
||||
#mean_rel_synth = mean_synth - mean_ref
|
||||
#print('Means:\nobserved: {}, relative {}'.format(mean_obs, mean_rel_synth))
|
||||
synth_src_means = {}
|
||||
|
||||
nsrc = obsarray[-1, 1]
|
||||
for index in range(int(nsrc)):
|
||||
srcid = index + 1
|
||||
indices = np.where(synth_array[:, 1].astype(int) == srcid)
|
||||
synth_src_means[srcid] = np.mean(synth_array[indices, 4] - ref_array[indices, 4])
|
||||
|
||||
print('Average mean for sources: {} s'.format(np.mean(list(synth_src_means.values()))))
|
||||
|
||||
if demean:
|
||||
print('Removing mean from synthetic times...')
|
||||
else:
|
||||
print('Will not mean-correct travel times')
|
||||
# mean_rel_synth = 0.
|
||||
|
||||
residuals_dict = {}
|
||||
|
||||
for index, line in enumerate(obsarray):
|
||||
rec_id = int(line[0])
|
||||
src_id = int(line[1])
|
||||
ttime = line[4] if relative_to_otimes or only_otimes else 0
|
||||
|
||||
# get residuals from reference file if fname is given
|
||||
if synth_fname:
|
||||
# get synthetic time
|
||||
ttime_syn = synth_array[index][4]
|
||||
if rtimes_fname:
|
||||
# get reference time (1d travel time)
|
||||
ttime_ref = ref_array[index][4]
|
||||
if synth_fname and rtimes_fname:
|
||||
# calculate synthetic time relative to 1d travel time
|
||||
ttime_rel_syn = ttime_syn - ttime_ref
|
||||
#print(ttime_ref, ttime_syn, ttime_rel_syn, ttime, ttime-ttime_rel_syn)
|
||||
# calculate difference between relative observed and model times, multiply with -1 to get tsynth - tobs
|
||||
mean_rel_src = synth_src_means[srcid] if demean else 0.
|
||||
if not only_otimes:
|
||||
ttime = -1 * (ttime_rel_syn - ttime - mean_rel_src)
|
||||
if absolute:
|
||||
ttime = ttime_syn
|
||||
try:
|
||||
uncertainty = line[5]
|
||||
except:
|
||||
uncertainty = 0
|
||||
|
||||
# create dictionary for source if not exists
|
||||
if not src_id in residuals_dict.keys():
|
||||
residuals_dict[src_id] = {'lats': [],
|
||||
'lons': [],
|
||||
'rads': [],
|
||||
'ttimes':[],
|
||||
'nttimes': [],
|
||||
'uncerts': []}
|
||||
# chose correct dictionary
|
||||
dic = residuals_dict[src_id]
|
||||
# get coordinates from organized receivers dictionary
|
||||
dic['lats'].append(rec_dict[rec_id]['lat'])
|
||||
dic['lons'].append(rec_dict[rec_id]['lon'])
|
||||
dic['rads'].append(rec_dict[rec_id]['rad'])
|
||||
# rad is depth?
|
||||
#elev = - rec_dict[rec_id]['rad']
|
||||
#height_correction_vp = 0
|
||||
#station_height_corr = elev / (height_correction_vp) if height_correction_vp else 0.
|
||||
#if elev < 0: print(elev, station_height_corr)
|
||||
# MP MP NO HEIGHT CORRECTION FOR FMTOMO! REFTIMES CONTAIN STATION HIGHT!!!!!
|
||||
dic['ttimes'].append(ttime) # - station_height_corr)
|
||||
# number of ttimes will be set to 1 (determines size in scatter, only relevant for stacked ttimes)
|
||||
dic['nttimes'].append(1)
|
||||
dic['uncerts'].append(uncertainty)
|
||||
|
||||
return residuals_dict
|
||||
|
||||
|
||||
def stack_sources(residuals_dict):
|
||||
all_ttimes = {}
|
||||
all_uncs = {}
|
||||
for src_id, dic in residuals_dict.items():
|
||||
for index in range(len(dic['lats'])):
|
||||
lat = dic['lats'][index]
|
||||
lon = dic['lons'][index]
|
||||
rad = dic['rads'][index]
|
||||
source_tuple = (lat, lon, rad)
|
||||
if not source_tuple in all_ttimes.keys():
|
||||
all_ttimes[source_tuple] = []
|
||||
all_uncs[source_tuple] = []
|
||||
all_ttimes[source_tuple].append(dic['ttimes'][index])
|
||||
all_uncs[source_tuple].append(dic['uncerts'][index])
|
||||
# create new dictionary in the shape of residuals dict with only one source
|
||||
stacked_dict = {}
|
||||
stacked_dict[1] = {'lats': [],
|
||||
'lons': [],
|
||||
'rads': [],
|
||||
'ttimes':[],
|
||||
'nttimes': [],
|
||||
'misfits': []}
|
||||
|
||||
|
||||
for source_tuple in all_ttimes.keys():
|
||||
ttimes_list = all_ttimes[source_tuple]
|
||||
uncs_list = all_uncs[source_tuple]
|
||||
misfit = np.sum((np.array(ttimes_list) / np.array(uncs_list))**2) / len(ttimes_list)
|
||||
lat, lon, rad = source_tuple
|
||||
stacked_dict[1]['lats'].append(lat)
|
||||
stacked_dict[1]['lons'].append(lon)
|
||||
stacked_dict[1]['rads'].append(rad)
|
||||
stacked_dict[1]['ttimes'].append(np.sum(np.mean(ttimes_list)))
|
||||
stacked_dict[1]['nttimes'].append(len(ttimes_list))
|
||||
stacked_dict[1]['misfits'].append(misfit)
|
||||
#stacked_dict[1]['ttimes_list'].append(ttimes_list)
|
||||
|
||||
ttimes = [d['ttimes'] for d in stacked_dict.values()]
|
||||
plt.hist(ttimes, 100)
|
||||
plt.axvline(np.mean(ttimes), c='r')
|
||||
plt.xlabel('ttime [s]')
|
||||
|
||||
return stacked_dict
|
||||
|
||||
|
||||
def compare_residuals():
|
||||
wdir = '/rscratch/minos13/marcel/fmtomo_alparray/'
|
||||
os.chdir(wdir)
|
||||
with open('alparray_mantle_diehl_crust_included_v3_hf/stacked_residuals.json', 'r') as infile:
|
||||
sr = json.load(infile)
|
||||
with open('alparray_mantle_diehl_crust_included_v3_no_density_hf/stacked_residuals.json', 'r') as infile:
|
||||
srnd = json.load(infile)
|
||||
mfdiff = np.array(sr['1']['misfits']) - np.array(srnd['1']['misfits'])
|
||||
np.mean(mfdiff)
|
||||
sc = plt.scatter(sr['1']['lons'], sr['1']['lats'], c=mfdiff, cmap='seismic', vmin=-7.5, vmax=7.5)
|
||||
cb = plt.colorbar(sc)
|
||||
title = 'Difference in station misfit after 12 iterations (density kernel - no density).' \
|
||||
' Blue: decrease in residuals using density. Mean: {:.4f}'
|
||||
plt.title(title.format(np.mean(mfdiff)))
|
||||
plt.xlabel('Latitude (deg)')
|
||||
plt.ylabel('Longitude (deg)')
|
||||
cb.set_label('Misfit')
|
||||
sc.set_linewidth(0.2)
|
||||
sc.set_edgecolor('k')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Plot residuals of fm3d output file (arrivals.dat format) on map.')
|
||||
parser.add_argument('--otimes', default='otimes.dat', help='otimes.dat file (default: otimes.dat)')
|
||||
parser.add_argument('--sources', default='sources.in', help='sources.in file (default: sources.in)')
|
||||
parser.add_argument('--receivers', default='receivers.in', help='receivers.in file (default receivers.in)')
|
||||
parser.add_argument('--sourcefile', default='input_source_file.in', help='input_source_file (default: input_source_file.in)')
|
||||
parser.add_argument('--arrivals', default=None, help='arrivals_file')
|
||||
parser.add_argument('--rtimes', default=None, help='reference_file')
|
||||
parser.add_argument('--fext', default='png', help='file extension for images (if -w)')
|
||||
parser.add_argument('-s', '--stack', action='store_true', dest='stack', help='stack picks')
|
||||
parser.add_argument('-w', '--write', action='store_true', default=False, help='write figures to disk')
|
||||
parser.add_argument('-a', '--abs', action='store_true', dest='abs', default=False, help='compute absolute values')
|
||||
parser.add_argument('-nd', '--no_demean', action='store_true', default=False, help='do not demean travel times')
|
||||
parser.add_argument('-nc', '--no_colorbar', action='store_true', default=False, help='do not plot colorbar')
|
||||
parser.add_argument('-nt', '--no_title', action='store_true', default=False, help='do not plot title')
|
||||
parser.add_argument('-no', '--no_otimes', action='store_true', default=False, help='do not plot relative to otimes')
|
||||
parser.add_argument('-oo', '--only_otimes', action='store_true', default=False, help='only plot observed times (otimes)')
|
||||
parser.add_argument('-i', '--ids', dest='source_ids', type=int, default=[], nargs='*',
|
||||
help='Plot only one source with FMTOMO internal id(s).')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
savefig_dir = 'residual_maps_out' if args.write else ''
|
||||
plot_map(args.otimes, args.sources, args.receivers, args.sourcefile, arrivals_fname=args.arrivals,
|
||||
rtimes_fname=args.rtimes, stack=args.stack, source_ids=args.source_ids, colorbar=not args.no_colorbar,
|
||||
title=not args.no_title, savefig_dir=savefig_dir, demean=not args.no_demean, absolute=args.abs,
|
||||
file_ext=args.fext, relative_to_otimes=not args.no_otimes, only_otimes= args.only_otimes)
|
||||
270
pylot/tomography/fmtomo_tools/quantify_ray_crossing.py
Normal file
270
pylot/tomography/fmtomo_tools/quantify_ray_crossing.py
Normal file
@@ -0,0 +1,270 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
'''
|
||||
Estimate ray crossing at each depth using rays (as npy objects) precalculated from FMTOMO file rays.dat
|
||||
(e.g. script: plot_rays_on_plane)
|
||||
'''
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.interpolate import RegularGridInterpolator
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid_regular, read_vgrid
|
||||
from pylot.tomography.map_utils import angle_marker, make_map
|
||||
|
||||
pjoin = os.path.join
|
||||
|
||||
|
||||
def main(fdir, it=1, station_coords_file='/data/AdriaArray_Data/various/station_coords.json'):
|
||||
fnin_vgrid = pjoin(fdir, f'it_{it}/vgrids.in')
|
||||
fnout_vtk = pjoin(fdir, f'vgrids_it{it}_w_ray_crossings.vtk')
|
||||
fnout_npy = pjoin(fdir, f'vgrids_hf_it{it}' + '_crossing_bins_{}-{}_mincount_{}_STEP_{}.npy')
|
||||
|
||||
# binsize (degrees), min_count for each direction to raise quality by 1 increment
|
||||
binsize_az = 90.
|
||||
binsize_incl = 60.
|
||||
min_count = 5
|
||||
plot = False
|
||||
write = True
|
||||
draw_hists = True
|
||||
draw_simple_hists = True
|
||||
step = 2 # take nth sample of vgrid for calculating cells
|
||||
|
||||
# for stations plotting
|
||||
with open(station_coords_file, 'r') as infile:
|
||||
stations = json.load(infile)
|
||||
lonsStations, latsStations = zip(*[(sta['longitude'], sta['latitude']) for sta in stations.values()])
|
||||
|
||||
# some constants etc.
|
||||
R = 6371.
|
||||
bins_azim = np.arange(0, 360 + binsize_az, binsize_az)
|
||||
bins_incl = np.arange(0., 60. + binsize_incl, binsize_incl)
|
||||
nbins_azim = len(bins_azim) - 1
|
||||
nbins_incl = len(bins_incl) - 1
|
||||
|
||||
# for plotting:
|
||||
bins = bins_azim
|
||||
binsize = binsize_az
|
||||
nbins = nbins_azim
|
||||
|
||||
# get vgrid and estimate dlat/dlon
|
||||
vgrid_reg = read_vgrid_regular(fnin_vgrid)
|
||||
lons, lats, rads = [array[::step] for array in vgrid_reg[0]]
|
||||
vps, covs, sms, pdvs = [array[::step, ::step, ::step] for array in vgrid_reg[1:]]
|
||||
|
||||
dlat = lats[1] - lats[0]
|
||||
dlon = lons[1] - lons[0]
|
||||
|
||||
# grid = init_dict()
|
||||
# grid['vps'] = list(vps.ravel())
|
||||
# grid['covs'] = list(covs.ravel())
|
||||
# grid['sms'] = list(sms.ravel())
|
||||
# grid['pdvs'] = list(pdvs.ravel())
|
||||
# grid['res'] = []
|
||||
#
|
||||
# for rad in rads:
|
||||
# for lat in lats:
|
||||
# for lon in lons:
|
||||
# depth = R - rad
|
||||
# grid['lons'].append(lon)
|
||||
# grid['lats'].append(lat)
|
||||
# grid['depths'].append(depth)
|
||||
# x, y, z = pol2cart(lat, lon, R - depth)
|
||||
# grid['xs'].append(x)
|
||||
# grid['ys'].append(y)
|
||||
# grid['zs'].append(z)
|
||||
|
||||
# just for plotting at certain depths (not for write!!!)
|
||||
depths = np.arange(0., 800., 100)
|
||||
if not write:
|
||||
rads = R - depths
|
||||
|
||||
resolutions = []
|
||||
lons_export = []
|
||||
lats_export = []
|
||||
rads_export = []
|
||||
|
||||
print(rads)
|
||||
for rad in rads:
|
||||
# prepare dict containing horizontal angle for key tuple (ilat, ilon)
|
||||
vgrid_azim_ids = {}
|
||||
vgrid_incl_ids = {}
|
||||
|
||||
print('Working on radius', rad)
|
||||
# iterate over rays
|
||||
for fnin_rays in glob.glob1(fpath_events, '*.npz'):
|
||||
rays = np.load(os.path.join(fpath_events, fnin_rays), allow_pickle=True)
|
||||
# raypoints = np.zeros((len(rays), 3))
|
||||
#for index in range(len(rays)):
|
||||
for ray in rays.values():
|
||||
#ray = rays[index]
|
||||
# get index of closest depth here from ray (TAKE CARE OF ALIASING! Using nearest value)
|
||||
ind_min = np.abs(ray[:, 0] - rad).argmin()
|
||||
# in case ind_min is at upper boundary (surface)
|
||||
if ind_min == len(ray[:, 0]) - 1:
|
||||
ind_min -= 1
|
||||
# get diffs to following ray index (km, deg, deg)!
|
||||
ray_diff = np.diff(ray[ind_min:ind_min + 2], axis=0)[0]
|
||||
lat = ray[ind_min, 1]
|
||||
lat_diff_km = ray_diff[1] * (np.pi * R) / 180.
|
||||
lon_diff_km = ray_diff[2] * (np.pi * np.cos(np.deg2rad(lat)) * R) / 180.
|
||||
r_diff_km = ray_diff[0]
|
||||
dlateral = np.sqrt(lat_diff_km ** 2 + lon_diff_km ** 2)
|
||||
# calculate horizontal angle
|
||||
azim = np.rad2deg(np.arctan2(lon_diff_km, lat_diff_km))
|
||||
incl = np.rad2deg(np.arctan(dlateral / r_diff_km))
|
||||
# correct angle from -180-180 to 0-360 and also change azim to bazim
|
||||
bazim = azim + 180.
|
||||
# angles[index] = angle
|
||||
# raypoints[index] = ray[ind_min]
|
||||
lat, lon = ray[ind_min, 1:]
|
||||
lati = np.where((lats <= lat + dlat / 2.) & (lats > lat - dlat / 2.))[0][0]
|
||||
loni = np.where((lons <= lon + dlon / 2.) & (lons > lon - dlon / 2.))[0][0]
|
||||
key = (lati, loni)
|
||||
if not key in vgrid_azim_ids.keys():
|
||||
vgrid_azim_ids[key] = []
|
||||
vgrid_incl_ids[key] = []
|
||||
vgrid_azim_ids[key].append(bazim)
|
||||
vgrid_incl_ids[key].append(incl)
|
||||
# vgrid_ids[index] = np.array([lati, loni])
|
||||
# plt.scatter(lons[loni], lats[lati], c='r', marker='o', facecolor='none', alpha=0.5)
|
||||
# sc = plt.scatter(raypoints[:,2], raypoints[:,1], c=angles[:])
|
||||
|
||||
vgrid_angles_quality = {}
|
||||
vgrid_azim_hist = {}
|
||||
|
||||
for inds in vgrid_azim_ids.keys():
|
||||
azims = vgrid_azim_ids[inds]
|
||||
incls = vgrid_incl_ids[inds]
|
||||
lati, loni = inds
|
||||
hist_az, _ = np.histogram(azims, bins=bins_azim)
|
||||
hist_incl, _ = np.histogram(incls, bins=bins_incl)
|
||||
hist2d, _, _ = np.histogram2d(azims, incls, bins=[bins_azim, bins_incl])
|
||||
# hist_az, hist_inc = hist2d
|
||||
hist = hist2d.ravel()
|
||||
quality = len(hist[hist > min_count - 1]) / (nbins_azim * nbins_incl)
|
||||
# quality = len(hist_az[hist_az > min_count - 1]) / nbins_azim
|
||||
# quality = len(hist_incl[hist_incl > min_count - 1]) / nbins_incl
|
||||
vgrid_angles_quality[inds] = quality
|
||||
vgrid_azim_hist[inds] = hist_az
|
||||
|
||||
# LATS = []
|
||||
# LONS = []
|
||||
qualities = []
|
||||
hists = []
|
||||
|
||||
for ilat, lat in enumerate(lats):
|
||||
for ilon, lon in enumerate(lons):
|
||||
# LATS.append(lat)
|
||||
# LONS.append(lon)
|
||||
key = (ilat, ilon)
|
||||
quality = vgrid_angles_quality.get(key)
|
||||
hist = vgrid_azim_hist.get(key)
|
||||
# print(key, quality)
|
||||
if not quality:
|
||||
quality = 0.
|
||||
# hist for plotting only!
|
||||
if hist is None:
|
||||
hist = np.zeros(nbins)
|
||||
qualities.append(quality)
|
||||
hists.append(hist)
|
||||
lons_export.append(lon)
|
||||
lats_export.append(lat)
|
||||
rads_export.append(rad)
|
||||
|
||||
resolutions += qualities
|
||||
|
||||
if plot:
|
||||
# TODO: still old basemap code
|
||||
raise NotImplementedError('Still using old basemap code')
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
bmap = make_map(ax, resolution='l')
|
||||
|
||||
LONS, LATS = np.meshgrid(lons, lats)
|
||||
|
||||
sc = bmap.pcolormesh(LONS - dlon / 2., LATS - dlat / 2.,
|
||||
np.array(qualities).reshape(LATS.shape), latlon=True, zorder=1.5,
|
||||
shading='nearest') # , s=np.array(qualities)*100)
|
||||
|
||||
# sc = plt.contourf(LONS, LATS, np.array(qualities).reshape(LATS.shape), levels=21)
|
||||
|
||||
if draw_hists:
|
||||
for index, bin in enumerate(bins[:-1]):
|
||||
marker = angle_marker(bin, bin + binsize)
|
||||
s = np.array(hists)[:, index]
|
||||
if draw_simple_hists:
|
||||
s[s < min_count] = 0
|
||||
s[s >= min_count] = 150
|
||||
else:
|
||||
s *= 10.
|
||||
sc_angle = bmap.scatter(LONS, LATS, s=s, marker=marker, edgecolors='0.75', alpha=1., linewidths=0.6,
|
||||
latlon=True, zorder=1.5, )
|
||||
sc_angle.set_facecolor('none')
|
||||
# sc_angle = plt.scatter(LONS, LATS, s=1, marker='.', edgecolors='1.', alpha=1., linewidths=1.)
|
||||
|
||||
bmap.scatter(lonsStations, latsStations, c='k', s=0.5, latlon=True, zorder=1.5, ) # , alpha=0.5)
|
||||
plt.title('Azimuthal coverage at depth of {}km, {} bins'.format((R - rad), nbins_azim * nbins_incl))
|
||||
# plt.xlim([0, 22])
|
||||
# plt.ylim([40, 53])
|
||||
# plt.gca().set_aspect('equal')
|
||||
cbar = plt.colorbar(sc)
|
||||
cbar.set_label('Azimuthal coverage')
|
||||
plt.show()
|
||||
|
||||
if write:
|
||||
rginter_res = RegularGridInterpolator((rads, lats, lons),
|
||||
np.array(resolutions).reshape((len(rads), len(lats), len(lons))),
|
||||
bounds_error=False, fill_value=0.)
|
||||
grid = read_vgrid(fnin_vgrid)[0]
|
||||
grid['res'] = []
|
||||
for lon, lat, depth in zip(grid['lons'], grid['lats'], grid['depths']):
|
||||
grid['res'].append(rginter_res((R - depth, lat, lon)))
|
||||
a = np.array([lons_export, lats_export, rads_export, resolutions])
|
||||
np.save(fnout_npy.format(nbins_azim, nbins_incl, min_count, step), a)
|
||||
write_vtk(grid, fnout_vtk,
|
||||
write_data=['vps', 'res', 'covs', 'frechs']) # , clon=11., clat=46., dlon=12., dlat=6., sort=True)
|
||||
|
||||
|
||||
def rays_to_npy(infile, fnout_npy, n_points=10):
|
||||
rays = {}
|
||||
i = 0
|
||||
src_id_old = 1
|
||||
while True:
|
||||
i += 1
|
||||
l1 = infile.readline()
|
||||
if l1 == '': break
|
||||
rec_id, src_id, ray_id = [int(item) for item in l1.split()[:3]]
|
||||
if not src_id in rays.keys():
|
||||
rays[src_id] = []
|
||||
l2 = infile.readline()
|
||||
n = int(l2.split()[0])
|
||||
ray = np.zeros((n, 3))
|
||||
for index in range(n):
|
||||
r, lat, lon = [float(item) for item in infile.readline().split()]
|
||||
ray[index] = np.array([r, np.rad2deg(lat), np.rad2deg(lon)])
|
||||
rays[src_id].append(ray[::n_points])
|
||||
|
||||
dirname = os.path.split(fnout_npy)[0]
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname)
|
||||
|
||||
for src_id, ray in rays.items():
|
||||
np.savez(fnout_npy.format(src_id), *ray)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
fmtomodir = '/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/crust_incl_TESAURO_sm30_dm1/'
|
||||
it = 12
|
||||
|
||||
fpath_events = pjoin(fmtomodir, f'rays_npy_it{it}/')
|
||||
infile = open(pjoin(fmtomodir, f'it_{it}/rays.dat'), 'r')
|
||||
fnout_npy = pjoin(fpath_events, f'it_{it}_rays_event_' + '{}.npz')
|
||||
|
||||
rays_to_npy(infile, fnout_npy)
|
||||
|
||||
main(fmtomodir, it=it)
|
||||
43
pylot/tomography/fmtomo_tools/residual_histograms.py
Normal file
43
pylot/tomography/fmtomo_tools/residual_histograms.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import os
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
fpaths = ['/data/AlpArray_Data/fmtomo/v6/fwi_model_wolfgang_test',
|
||||
'/data/AlpArray_Data/fmtomo/v6/final']
|
||||
arr_path_ref = 'rtimes_tele.dat'
|
||||
|
||||
iterations = [0, 1, 2, 3, 4]
|
||||
labels = ['rel_1d'] + [f'rel_it{it}' for it in iterations]
|
||||
|
||||
colors = plt.get_cmap('viridis')(np.linspace(0,1, len(iterations)))
|
||||
colors = np.vstack(((.5, .5, .5, 1), colors))
|
||||
|
||||
axes = []
|
||||
for fpath in fpaths:
|
||||
rtimes = np.genfromtxt(os.path.join(fpath, arr_path_ref))[:, 4]
|
||||
otimes = np.genfromtxt(os.path.join(fpath, 'otimes.dat'), skip_header=1)[:, 4]
|
||||
|
||||
res = np.empty((len(rtimes), len(iterations) + 1))
|
||||
|
||||
res[:, 0] = otimes # = otimes + rtimes - rtimes
|
||||
|
||||
for index, itr in enumerate(iterations):
|
||||
arr_path = f'it_{itr}/arrivals.dat'
|
||||
|
||||
arrivals = np.genfromtxt(os.path.join(fpath, arr_path))[:, 4]
|
||||
res[:, index + 1] = (otimes + rtimes) - arrivals # otimes (rel) + rtimes => otimes (abs)
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
axes.append(ax)
|
||||
ax.hist(res, bins=np.linspace(-5, 5, 50), rwidth=0.9, color=colors, label=labels)
|
||||
ax.set_xlim(-1.5, 1.5)
|
||||
plt.title(fpath)
|
||||
|
||||
ax.legend()
|
||||
|
||||
ylim_max = max([ax.get_ylim()[1] for ax in axes])
|
||||
for ax in axes:
|
||||
ax.set_ylim(0, ylim_max)
|
||||
|
||||
plt.show()
|
||||
187
pylot/tomography/fmtomo_tools/station_density_kde.py
Normal file
187
pylot/tomography/fmtomo_tools/station_density_kde.py
Normal file
@@ -0,0 +1,187 @@
|
||||
# This script estimates density of station distribution using Gaussian KDE. For each point a Gaussian Kernel with
|
||||
# sigma ~ grid size (?) is summed in 2D, calculating estimate density function. If the sum of all points evaluated
|
||||
# at the data points increases that means there are more regions with high density
|
||||
|
||||
import os
|
||||
import copy
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from scipy.stats import gaussian_kde
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_teleseismic_utils import organize_receivers, organize_event_names, export_otimes
|
||||
|
||||
def get_events_dict(otimes, recs):
|
||||
events_dict = {}
|
||||
for otime in otimes:
|
||||
rec_id, src_id = int(otime[0]), int(otime[1])
|
||||
if not src_id in events_dict.keys():
|
||||
events_dict[src_id] = dict(lons=[], lats=[], rec_ids=[])
|
||||
events_dict[src_id]['lons'].append(recs[rec_id]['lon'])
|
||||
events_dict[src_id]['lats'].append(recs[rec_id]['lat'])
|
||||
events_dict[src_id]['rec_ids'].append(rec_id)
|
||||
return events_dict
|
||||
|
||||
|
||||
def transfer_kde_to_weight(kdes):
|
||||
weights = 0.1 - kdes
|
||||
indices_negative = np.where(weights < 0)[0]
|
||||
if indices_negative:
|
||||
print('Warning! {} negative indices in weights. Set to 0.'.format(len(indices_negative)))
|
||||
weights[indices_negative] = 0.
|
||||
return weights
|
||||
|
||||
|
||||
def calc_event_kernels(events_dict, eventnames, kde_factor, plot_single=False, fdir_out=None):
|
||||
kernels = {}
|
||||
evaluated_kernels = {}
|
||||
|
||||
if plot_single:
|
||||
fig = plt.figure(figsize=(16, 9))
|
||||
|
||||
for index in range(len(eventnames)):
|
||||
eventname = eventnames[index]
|
||||
source = index + 1
|
||||
lons = events_dict[source]['lons']
|
||||
lats = events_dict[source]['lats']
|
||||
kernel = gaussian_kde(np.array([lons, lats]), bw_method=kde_factor)
|
||||
evaluated_kernel = kernel((lons, lats))
|
||||
kernels[source] = kernel
|
||||
evaluated_kernels[source] = evaluated_kernel
|
||||
if plot_single:
|
||||
assert fdir_out, 'Need to specify output directory for plots'
|
||||
if not os.path.isdir(fdir_out):
|
||||
os.mkdir(fdir_out)
|
||||
weights = transfer_kde_to_weight(evaluated_kernel)
|
||||
iterdict = {'kde': evaluated_kernel, 'weight': weights}
|
||||
for name, colorarray in iterdict.items():
|
||||
if name == 'kde':
|
||||
vmin=0
|
||||
vmax=0.05
|
||||
else:
|
||||
vmin=None
|
||||
vmax=None
|
||||
scs = plt.scatter(lons, lats, marker='o', c=colorarray, lw=1.0, vmin=vmin, vmax=vmax)
|
||||
cbar = plt.colorbar(scs)
|
||||
plt.title('Station {} for event {}'.format(name, eventname))
|
||||
fig.savefig(os.path.join(fdir_out, '{}_{}.svg'.format(eventname, name)))
|
||||
fig.clear()
|
||||
#plt.show()
|
||||
if plot_single:
|
||||
plt.close('all')
|
||||
return kernels, evaluated_kernels
|
||||
|
||||
|
||||
def plot_hist_kernels(evaluated_kernels):
|
||||
all_kernels = np.array([])
|
||||
for kernel in evaluated_kernels.values():
|
||||
all_kernels = np.append(all_kernels, kernel)
|
||||
plt.hist(all_kernels, bins=200, label='kernels')
|
||||
plt.title('Distribution of all kernels.')
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_hist_uncertainties(otimes_new, otimes_orig, bins=200):
|
||||
plt.hist(otimes_orig[:, -1], lw=1, bins=bins, label='Unmodified uncertainties', fc=(.5, .5, 0., 0.5))
|
||||
plt.hist(otimes_new[:, -1], lw=1, bins=bins, label='New uncertainties', fc=(0., .5, .5, 0.5))
|
||||
plt.legend()
|
||||
plt.xlabel('Uncertainty [s]')
|
||||
plt.title('Uncertainty distribution before and after correction.')
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_uncertainties(otimes, recs, eventnames, fdir_out, vmin=0.1, vmax=0.4):
|
||||
if not os.path.isdir(fdir_out):
|
||||
os.mkdir(fdir_out)
|
||||
|
||||
print('Writing output to directory:', fdir_out)
|
||||
epsilon = 1e-6
|
||||
|
||||
fig = plt.figure(figsize=(16, 9))
|
||||
for index, eventname in enumerate(eventnames):
|
||||
src_id = index + 1
|
||||
indices = np.where(abs(otimes[:, 1] - src_id) <= epsilon)
|
||||
rec_ids = otimes[:, 0][indices]
|
||||
uncs = otimes[:, -1][indices]
|
||||
lats = [recs[rec_id]['lat'] for rec_id in rec_ids]
|
||||
lons = [recs[rec_id]['lon'] for rec_id in rec_ids]
|
||||
sc = plt.scatter(lons, lats, c=uncs, vmin=vmin, vmax=vmax)
|
||||
plt.title(eventname)
|
||||
cb = plt.colorbar(sc)
|
||||
fig.savefig(os.path.join(fdir_out, '{}.svg'.format(eventname)))
|
||||
fig.clear()
|
||||
plt.close('all')
|
||||
|
||||
|
||||
def exp_func(uncertainty, eval_kernel, exponent=30):
|
||||
return uncertainty * np.exp(exponent * eval_kernel)
|
||||
|
||||
|
||||
def modify_otimes(otimes, recs, kernels):
|
||||
print('Applying Kernel on otimes and modifying uncertainties...')
|
||||
for otime in otimes:
|
||||
rec_id, src_id = int(otime[0]), int(otime[1])
|
||||
lon = recs[rec_id]['lon']
|
||||
lat = recs[rec_id]['lat']
|
||||
eval_kernel = kernels[src_id]((lon, lat))
|
||||
otime[-1] = exp_func(otime[-1], eval_kernel)
|
||||
return otimes
|
||||
|
||||
|
||||
def plot_average_kernel(evaluated_kernels, eventnames):
|
||||
ids_names = [(index + 1, eventname) for index, eventname in enumerate(eventnames)]
|
||||
sorted_events = sorted(ids_names, key=lambda x: x[1])
|
||||
kernel_sums = {src_id: np.sum(kernel) for src_id, kernel in evaluated_kernels.items()}
|
||||
src_ids = [item[0] for item in sorted_events]
|
||||
eventnames = [item[1] for item in sorted_events]
|
||||
sums = [kernel_sums[src_id] for src_id in src_ids]
|
||||
plt.plot(sums)
|
||||
xticks = np.arange(0, len(eventnames), step=len(eventnames)//10)
|
||||
xticklabels = [eventname[:8] for index, eventname in enumerate(eventnames) if index in xticks]
|
||||
plt.xticks(xticks, xticklabels)
|
||||
plt.title('Average kernel per station. High value means inhomogeneous station distribution.')
|
||||
plt.show()
|
||||
|
||||
|
||||
def export_station_kdes(events_dict, evaluated_kernels, fnout):
|
||||
with open(fnout, 'w') as outfile:
|
||||
for src_id in range(1, len(events_dict) + 1):
|
||||
kernel = evaluated_kernels[src_id]
|
||||
for kde, rec_id in zip(kernel, events_dict[src_id]['rec_ids']):
|
||||
outfile.write('{} {} {}\n'.format(rec_id, src_id, kde))
|
||||
|
||||
|
||||
def main(plot=False, plot_detailed=False):
|
||||
working_path = '/rscratch/minos13/marcel/fmtomo_alparray/v4/alparray_mantle_diehl_crust_included_hf_gradient_smoothing'
|
||||
fdir_out = 'station_density'
|
||||
fnout = os.path.join(fdir_out, 'station_kdes.txt')
|
||||
|
||||
os.chdir(working_path)
|
||||
eventnames = organize_event_names('input_source_file_P.in')
|
||||
recs = organize_receivers('receivers.in')
|
||||
otimes_data = np.genfromtxt('otimes_orig.dat', skip_header=1)
|
||||
|
||||
# kernel width ~ grid size? or station spacing?
|
||||
kde_size = 30 # km
|
||||
kde_factor = kde_size / (6371. * np.pi / 180.)
|
||||
print('KDE width (degree):', kde_factor)
|
||||
|
||||
events_dict = get_events_dict(otimes_data, recs)
|
||||
kernels, evaluated_kernels = calc_event_kernels(events_dict, eventnames, kde_factor, plot_single=plot_detailed,
|
||||
fdir_out=fdir_out)
|
||||
|
||||
export_station_kdes(events_dict, evaluated_kernels, fnout)
|
||||
|
||||
otimes_modif = modify_otimes(copy.deepcopy(otimes_data), recs, kernels)
|
||||
#export_otimes(otimes_modif, os.path.join(working_path, 'otimes_modif_kernel.dat'))
|
||||
if plot:
|
||||
plot_average_kernel(evaluated_kernels, eventnames)
|
||||
plot_hist_kernels(evaluated_kernels)
|
||||
plot_hist_uncertainties(otimes_modif, otimes_data)
|
||||
if plot_detailed:
|
||||
plot_uncertainties(otimes_data, recs, eventnames, 'uncerts_old')
|
||||
plot_uncertainties(otimes_modif, recs, eventnames, 'uncerts_new')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(plot=True, plot_detailed=True)
|
||||
17
pylot/tomography/fmtomo_tools/submit_fmtomo.sh
Executable file
17
pylot/tomography/fmtomo_tools/submit_fmtomo.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
ulimit -s 8192
|
||||
|
||||
conda activate pylot_311
|
||||
|
||||
##qsub -l low -cwd -l "os=*stretch" -pe mpi-fu 40 submit_fmtomo.sh
|
||||
#$ -l low
|
||||
#$ -cwd
|
||||
#$ -pe smp 40
|
||||
#$ -q "*@minos11, *@minos12, *@minos15"
|
||||
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||
|
||||
python fmtomo.py
|
||||
10
pylot/tomography/fmtomo_tools/submit_fmtomo_grid_utils.sh
Normal file
10
pylot/tomography/fmtomo_tools/submit_fmtomo_grid_utils.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
conda activate py37
|
||||
|
||||
#qsub -l low -cwd -l "os=*stretch" -pe mpi-fu 40 submit_fmtomo_grid_utils.sh
|
||||
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||
|
||||
python fmtomo_grid_utils.py
|
||||
21
pylot/tomography/fmtomo_tools/submit_fmtomo_run.py
Normal file
21
pylot/tomography/fmtomo_tools/submit_fmtomo_run.py
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from pylot.tomography.fmtomo_utils import Tomo3d
|
||||
|
||||
#tomo = Tomo3d('/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_tesauro_model_on_top',
|
||||
# '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_tesauro_model_on_top',
|
||||
# buildObs=False, saveRays=False)
|
||||
|
||||
wdir = '/rscratch/minos13/marcel/fmtomo_alparray/'
|
||||
|
||||
#directories = ['alparray_mantle_from_m6.0_diehl_crustal_corrections_sm1_damp3',
|
||||
#'alparray_mantle_from_m6.0_diehl_crustal_corrections_sm1_damp30',]
|
||||
directories= ['alparray_mantle_from_m6.0_diehl_crustal_corrections_sm10_damp3',
|
||||
'alparray_mantle_from_m6.0_diehl_crustal_corrections_sm10_damp30']
|
||||
|
||||
for dire in directories:
|
||||
path = os.path.join(wdir, dire)
|
||||
tomo = Tomo3d(path, path, buildObs=False, saveRays=False)
|
||||
tomo.runTOMO3D(40, 8)
|
||||
12
pylot/tomography/fmtomo_tools/submit_fmtomo_run.sh
Normal file
12
pylot/tomography/fmtomo_tools/submit_fmtomo_run.sh
Normal file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
ulimit -s 8192
|
||||
|
||||
#$ -l low,os=*stretch
|
||||
#$ -l h_vmem=5G
|
||||
#$ -cwd
|
||||
#$ -pe smp 40
|
||||
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||
|
||||
python submit_fmtomo_run.py
|
||||
62
pylot/tomography/fmtomo_tools/submit_fmtomo_teleseismic.sh
Executable file
62
pylot/tomography/fmtomo_tools/submit_fmtomo_teleseismic.sh
Executable file
@@ -0,0 +1,62 @@
|
||||
#!/bin/bash
|
||||
|
||||
ulimit -s 8192
|
||||
|
||||
conda activate pylot_311
|
||||
|
||||
##qsub -l low -cwd -l "os=*stretch" -pe mpi-fu 40 submit_fmtomo_teleseismic.sh
|
||||
#$ -l low
|
||||
##$ -l os=*stretch
|
||||
#$ -cwd
|
||||
#$ -pe smp 40
|
||||
#$ -q "*@minos11, *@minos12, *@minos15"
|
||||
### WARNING fm3d_prepare_tele not compiled correctly for all cluster machines (e.g. not working on minos12, minos15)
|
||||
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_mantle_0.03-0.1_revised' \
|
||||
#'/data/AlpArray_Data/fmtomo/v6/crust_incl_lf_sm_FIX_DTS_grad_sm30_dm10' \
|
||||
#'_correlated_0.03-0.1' -n $NSLOTS --blacklist '/rscratch/minos22/marcel/alparray/station_blacklist.csv'
|
||||
# --model 'ak135_diehl_v2'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_mantle_0.03-0.5_revised' \
|
||||
#'/data/AlpArray_Data/fmtomo/v6/crust_incl_hf_sm_FIX_DTS_grad_sm30_dm10' \
|
||||
#'_correlated_0.03-0.5' -n $NSLOTS --blacklist '/rscratch/minos22/marcel/alparray/station_blacklist.csv'
|
||||
# --model 'ak135_diehl_v2'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S_SKS' \
|
||||
#'/data/AlpArray_Data/fmtomo/v6_S/crust_incl_hf_KSTL_grad_sm6_dm10/' \
|
||||
#'_correlated_0.01-0.2_S*' -n $NSLOTS --blacklist '/data/AlpArray_Data/various/alparray/station_blacklist.csv'
|
||||
# --model 'ak135_diehl_v2'
|
||||
#
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_mantle_0.03-0.5_revised' \
|
||||
# '/rscratch/minos13/marcel/fmtomo_alparray/v3.5/alparray_mantle_diehl_crust_included_hf_no_init' \
|
||||
# '_correlated_0.03-0.5' -n $NSLOTS --no_write_init_nodes --model 'ak135_diehl_v2'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_hybrid_test' \
|
||||
# '/rscratch/minos13/marcel/fmtomo_alparray/v3/hybrid_method_test_pgrid_fine' \
|
||||
# '' -n $NSLOTS #--blacklist '/rscratch/minos22/marcel/alparray/station_blacklist.csv' --model 'ak135_diehl_v2'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_hybrid_test_diehl_v2' \
|
||||
#'/data/AlpArray_Data/fmtomo/v6/hybrid_method_test' \
|
||||
#'' -n $NSLOTS --model 'ak135_diehl_v2'
|
||||
#--blacklist '/rscratch/minos22/marcel/alparray/station_blacklist.csv'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_hybrid_test_diehl_v2_orig_box_more_stations' \
|
||||
#'/data/AlpArray_Data/fmtomo/v5/hybrid_method_tests_diss/v5_orig_box_size_more_receiver_no_demean_CORRECTED_depth_sampling' \
|
||||
#'' -n $NSLOTS --model 'ak135_diehl_v2'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation' \
|
||||
#'/data/AlpArray_Data/fmtomo/v6_resolution_analysis/fwi_mk6_it3_P' \
|
||||
#'_correlated_P' -n $NSLOTS --model 'ak135_diehl_v2'
|
||||
|
||||
#python fmtomo_teleseismic.py '/data/AdriaArray_Data/dmt_database_alpadege' \
|
||||
#'/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/no_crust_correction' \
|
||||
#'_correlated_0.03-0.5' -n $NSLOTS --model 'ak135'
|
||||
|
||||
python fmtomo_teleseismic.py '/data/AdriaArray_Data/dmt_database_minimal' \
|
||||
'/data/AdriaArray_Data/fmtomo_adriaarray/alpadege/test_minimal' \
|
||||
'_correlated_0.03-0.5' -n $NSLOTS --model 'ak135'
|
||||
|
||||
266
pylot/tomography/fmtomo_tools/tradeoff_misfit_norm.py
Normal file
266
pylot/tomography/fmtomo_tools/tradeoff_misfit_norm.py
Normal file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import glob
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
import numpy.polynomial.polynomial as poly
|
||||
|
||||
from scipy.sparse import spdiags
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from itertools import cycle
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid
|
||||
|
||||
# def calc_dampnorm(vgrid, vgrid_ref):
|
||||
# # calculate m - m0
|
||||
# m_m0 = np.array(vgrid['vps']) - np.array(vgrid_ref['vps'])
|
||||
#
|
||||
# # calculate inverse of diagonal elements of a priori model covariance matrix (which should be a diagonal matrix)
|
||||
# # IMPORTANT: COVARIANCES ARE MOST LIKELY STANDARD DEVIATIONS -> square them
|
||||
# covs = np.array(vgrid_ref['covs'])**2
|
||||
#
|
||||
# covs_inv = 1. / covs
|
||||
#
|
||||
# #cm_inv = spdiags(covs_inv, 0, covs_inv.size, covs_inv.size)#
|
||||
#
|
||||
# #norm_calc_old = np.dot(m_m0.transpose(), m_m0 * cm_inv)
|
||||
#
|
||||
# norm = np.dot(m_m0, m_m0 * covs_inv)
|
||||
#
|
||||
# return norm
|
||||
#
|
||||
#
|
||||
# def calc_smoothnorm(vgrid, gridn, R=6371.):
|
||||
# nR, nTheta, nPhi = gridn
|
||||
#
|
||||
# vps = np.array(vgrid['vps'])
|
||||
# lats = np.array(vgrid['lats'])
|
||||
# lons = np.array(vgrid['lons'])
|
||||
# depths = np.array(vgrid['depths'])
|
||||
# #vgridref = np.array(vgridref['vps'])
|
||||
# #dvgrid = vgrid - vgridref
|
||||
#
|
||||
# vgarray = np.zeros((nR, nTheta, nPhi))
|
||||
# lonsarray_km = np.zeros((nR, nTheta, nPhi))
|
||||
# latsarray_km = np.zeros((nR, nTheta, nPhi))
|
||||
# radsarray_km = np.zeros((nR, nTheta, nPhi))
|
||||
# #vgarray_diff = np.zeros((nR, nTheta, nPhi))
|
||||
# smootharray = np.zeros((nR, nTheta, nPhi))
|
||||
# #for iLayer in range(nlayers):
|
||||
# globInd = 0
|
||||
# for iR in range(nR):
|
||||
# for iTheta in range(nTheta):
|
||||
# for iPhi in range(nPhi):
|
||||
# r = R - depths[globInd]
|
||||
# lat = lats[globInd]
|
||||
# lon = lons[globInd]
|
||||
# r_minor = np.cos(np.deg2rad(lat)) * r
|
||||
# vgarray[iR, iTheta, iPhi] = vps[globInd]
|
||||
# radsarray_km[iR, iTheta, iPhi] = r
|
||||
# latsarray_km[iR, iTheta, iPhi] = np.pi * r * lat / 180.
|
||||
# lonsarray_km[iR, iTheta, iPhi] = np.pi * r_minor * lon / 180.
|
||||
# #vgarray_diff[iR, iTheta, iPhi] = vgrid[globInd]
|
||||
# globInd += 1
|
||||
#
|
||||
# # iterate over grid diffs (correct?) and sum 1 * point left -2 * point + 1 * point right in all 3 dim.
|
||||
# smsum = 0.
|
||||
# for iR in range(nR):
|
||||
# for iTheta in range(nTheta):
|
||||
# for iPhi in range(nPhi):
|
||||
# vg = vgarray[iR, iTheta, iPhi]
|
||||
# sum1 = sum2 = sum3 = 0.
|
||||
# if 0 < iPhi < nPhi - 1:
|
||||
# h = abs(lonsarray_km[iR, iTheta, iPhi + 1] - lonsarray_km[iR, iTheta, iPhi - 1]) / 2
|
||||
# sum1 = (vgarray[iR, iTheta, iPhi - 1] - 2 * vg + vgarray[iR, iTheta, iPhi + 1]) / h**2
|
||||
# if 0 < iTheta < nTheta - 1:
|
||||
# h = abs(latsarray_km[iR, iTheta + 1, iPhi] - latsarray_km[iR, iTheta - 1, iPhi]) / 2
|
||||
# sum2 = (vgarray[iR, iTheta - 1, iPhi] - 2 * vg + vgarray[iR, iTheta + 1, iPhi]) / h**2
|
||||
# if 0 < iR < nR - 1:
|
||||
# h = abs(radsarray_km[iR - 1, iTheta, iPhi] - radsarray_km[iR + 1, iTheta, iPhi]) / 2
|
||||
# sum3 = (vgarray[iR - 1, iTheta, iPhi] - 2 * vg + vgarray[iR + 1, iTheta, iPhi]) / h**2
|
||||
# smsum += np.sqrt(sum1**2 + sum2**2 + sum3**2)
|
||||
# #print(sum1, sum2, sum3, smsum)
|
||||
# smootharray[iR, iTheta, iPhi] = smsum#sum1 + sum2 + sum3
|
||||
#
|
||||
# # m_T * D_T * D * m ?? todo: unsure
|
||||
# norm = np.sum(smootharray ** 2)
|
||||
#
|
||||
# return norm, smootharray
|
||||
from pylot.tomography.utils import normed_figure
|
||||
|
||||
|
||||
def calc_smoothnorm(wdir, iter):
|
||||
smv = np.loadtxt(os.path.join(wdir, 'it_{}/smv.out'.format(iter + 1)), skiprows=1)
|
||||
dm = np.loadtxt(os.path.join(wdir, 'it_{}/dm.out'.format(iter + 1)), skiprows=1)
|
||||
norm = np.sum(smv*dm)
|
||||
return norm
|
||||
|
||||
|
||||
def calc_dampnorm(wdir, iter):
|
||||
ecmi = np.loadtxt(os.path.join(wdir, 'it_{}/ecmi.out'.format(iter + 1)), skiprows=1)
|
||||
dm = np.loadtxt(os.path.join(wdir, 'it_{}/dm.out'.format(iter + 1)), skiprows=1)
|
||||
norm = np.sum(ecmi * dm**2)
|
||||
return norm
|
||||
|
||||
|
||||
def calc_norm(wdir, iteration_number):
|
||||
dampnorm = calc_dampnorm(wdir, iteration_number)
|
||||
smoothnorm = calc_smoothnorm(wdir, iteration_number)
|
||||
|
||||
print('dampnorm: ', dampnorm)
|
||||
print('smoothnorm: ', smoothnorm)
|
||||
|
||||
norm = dampnorm + smoothnorm
|
||||
|
||||
print('Calculated summed norm of', norm)
|
||||
|
||||
return norm, dampnorm, smoothnorm
|
||||
|
||||
|
||||
def calc_tradeoff(fpath_in, fname_out=None, iteration_number = 12):
|
||||
results = {}
|
||||
|
||||
for wdir in glob.glob(fpath_in):
|
||||
#wdir = '/rscratch/minos13/marcel/fmtomo_alparray/alparray_mantle_from_m6.0_diehl_crustal_corrections_sm1000_damp100/'
|
||||
smooth = float(wdir.split('_')[-2].split('sm')[-1])
|
||||
damp = float(wdir.split('_damp')[-1].split('/')[0])
|
||||
|
||||
print('Calculating tradeoff for smoothing and damping of {}, {}'.format(smooth, damp))
|
||||
if not smooth in results.keys():
|
||||
results[smooth] = {}
|
||||
|
||||
iteration_number_new = iteration_number
|
||||
ecmi_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'ecmi.out')
|
||||
smv_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'smv.out')
|
||||
while not os.path.isfile(ecmi_path) or not os.path.isfile(smv_path):
|
||||
iteration_number_new -= 1
|
||||
ecmi_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'ecmi.out')
|
||||
smv_path = os.path.join(wdir, 'it_{}'.format(iteration_number_new + 1), 'smv.out')
|
||||
print('WARNING: Iteration number lowered by 1:', iteration_number_new)
|
||||
if iteration_number_new <= 1:
|
||||
break
|
||||
|
||||
if iteration_number_new <= 1:
|
||||
continue
|
||||
else:
|
||||
iteration_number = iteration_number_new
|
||||
|
||||
#vgrid, gridn, griddelta, gridstart = read_vgrid(vgrid_path)
|
||||
#vgrid_ref = read_vgrid(os.path.join(wdir, 'vgridsref.in'))[0]
|
||||
|
||||
norm, dampnorm, smoothnorm = calc_norm(wdir, iteration_number)
|
||||
|
||||
try:
|
||||
fpath = os.path.join(wdir, 'residuals.dat')
|
||||
chi = float(subprocess.check_output(['tail', fpath]).split()[-1])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
chi = np.nan
|
||||
|
||||
results[smooth][wdir] = {'dampnorm': dampnorm, 'smoothnorm': smoothnorm,
|
||||
'norm': norm, 'chi': chi, 'damp': damp}
|
||||
|
||||
#print some output
|
||||
for smooth, result in results.items():
|
||||
print('Smoothing:', smooth)
|
||||
for wdir, item in result.items():
|
||||
print(item['chi'], item['norm'])
|
||||
print(20*'#')
|
||||
|
||||
if fname_out:
|
||||
with open(fname_out, 'w') as outfile:
|
||||
json.dump(results, outfile)
|
||||
return results
|
||||
|
||||
def quadratic_function(x, a, b, c):
|
||||
return a * x ** 2 + b * x + c
|
||||
|
||||
def one_over_x(x, a, b, c):
|
||||
return a / (x - b) + c
|
||||
|
||||
def exp_func(x, a, b, c):
|
||||
return a * np.exp(-b * x) + c
|
||||
|
||||
def plot_tradeoff(fname_in, fix='smooth', plot_norm='both', min_smooth=0, min_damp=0, max_smooth=1e6, max_damp=1e6):
|
||||
with open(fname_in, 'r') as infile:
|
||||
results_smooth = json.load(infile)
|
||||
|
||||
lines = ["-", "--", "-.", ":"]
|
||||
linecycler = cycle(lines)
|
||||
|
||||
# array will be built for each line: (smooth, damp, norm, chi)
|
||||
plot_values = []
|
||||
for smooth, result in results_smooth.items():
|
||||
for item in result.values():
|
||||
smooth = float(smooth)
|
||||
damping = item['damp']
|
||||
if smooth < min_smooth or damping < min_damp or smooth > max_smooth or damping > max_damp:
|
||||
continue
|
||||
plot_values.append(np.array([smooth, damping, item[plot_norm], item['chi']]))
|
||||
|
||||
plot_values = np.array(plot_values)
|
||||
|
||||
column_index = {'smooth': 0, 'damp': 1}
|
||||
|
||||
keys = np.unique(plot_values[:, column_index[fix]])
|
||||
names = {'smooth': 'Smoothing', 'damp': 'Damping'}
|
||||
|
||||
for key in keys:
|
||||
plot_line = plot_values[plot_values[:, column_index[fix]] == key]
|
||||
second_index = column_index['smooth'] if fix == 'damp' else column_index['damp']
|
||||
plot_line = np.array(sorted(plot_line, key=lambda x: x[second_index]))
|
||||
norms = plot_line[:, 2]
|
||||
chis = plot_line[:, 3]
|
||||
#text = [str(item) for item in plot_line[:, second_index]]
|
||||
|
||||
x = np.linspace(min(norms), max(norms), num=100)
|
||||
|
||||
#popt, pcov = curve_fit(one_over_x, norms, chis, method='trf')#, bounds=[min(norms), max(norms)])
|
||||
#fit_result = one_over_x(x, *popt)
|
||||
#line = plt.plot(x, fit_result, ':', lw=0.8)[0]
|
||||
fninfo = os.path.split(fname_in)[-1].replace('.json', '').split('_f')[-1]
|
||||
label = '{}: {:g}'.format(names[fix], float(key))
|
||||
|
||||
line = plt.plot(norms, chis, linestyle=next(linecycler), lw=0.8, label=label)[0]
|
||||
#coefs = poly.polyfit(norms, chis, 4)
|
||||
#ffit = poly.polyval(x, coefs)
|
||||
#line = plt.plot(x, ffit, ':', lw=0.8)[0]
|
||||
|
||||
#label = label='{}: {:g} (smgrad: {})'.format(names[fix], float(key), fninfo)
|
||||
plt.plot(norms, chis, c=line.get_color(), marker='.', lw=0.)
|
||||
#plt.text(norms, chis, text)
|
||||
for item in plot_line:
|
||||
plt.text(item[2], item[3], str(item[second_index]), horizontalalignment='left')
|
||||
#plt.title('Plot of Misfit against Norm ({})'.format(plot_norm))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
#calc_tradeoff('/data/AlpArray_Data/fmtomo/v5/tradeoff_curves/crust_included_grad_smooth_FIXED_dts_grad_1.5_sm*_damp*/',
|
||||
# '/data/AlpArray_Data/various/alparray/tradeoff_v5_f1.5.json')
|
||||
#calc_tradeoff('/data/AlpArray_Data/fmtomo/v5/tradeoff_curves/crust_included_grad_smooth_FIXED_dts_sm*_damp*/',
|
||||
# '/data/AlpArray_Data/various/alparray/tradeoff_v5_f2.0.json')
|
||||
|
||||
fig = normed_figure(width_cm=10, ratio=1.)
|
||||
#tradeoff_infiles = ['tradeoff_v4_f1.5.json', 'tradeoff_v4_f3.json', 'tradeoff_v4_f10.json']
|
||||
tradeoff_infiles = ['tradeoff_v5_f2.0.json']#, 'tradeoff_v5_f1.5.json']
|
||||
for infile in tradeoff_infiles:
|
||||
infile = os.path.join('/data/AlpArray_Data/various/alparray/', infile)
|
||||
plot_tradeoff(infile, fix='damp', plot_norm='norm')
|
||||
|
||||
plt.xlim([1900, 16200])
|
||||
plt.ylim([2.72, 3.8])
|
||||
plt.xlabel('Norm')
|
||||
#plt.ylabel(r'Misfit($\frac{\chi^2}{N}$)')
|
||||
plt.ylabel(r'Misfit($\chi^2/N$)')
|
||||
#plt.title('Tradeoff curve Misfit vs Norm. Numbers in plot show smoothing values.')
|
||||
plt.legend()
|
||||
#plt.show()
|
||||
plt.savefig('/data/AlpArray_Data/sciebo/AlpArray_home/pictures/paper_II/tradeoff.pdf', dpi=300)
|
||||
|
||||
63
pylot/tomography/fmtomo_tools/visualize_frechet_on_vgrid.py
Executable file
63
pylot/tomography/fmtomo_tools/visualize_frechet_on_vgrid.py
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import numpy as np
|
||||
import json
|
||||
|
||||
from pylot.tomography.fmtomo_tools.fmtomo_grid_utils import read_vgrid, write_vtk, calculate_differences_grid, write_vgrid
|
||||
|
||||
def visualize_frechet_derivative(fname_vgrid, fname_frechet, fname_out_vtk=None, fname_out_json=None,
|
||||
fname_out_vgrid=None, diff_model=None, fname_vgrid_ref=None):
|
||||
vgrid, gridN, gridDelta, gridStart = read_vgrid(fname_vgrid, inv_index_frechet=True)
|
||||
if diff_model and fname_vgrid_ref:
|
||||
raise OverflowError('Cannot have both parameters set, diff_model and fname_vgrid_ref')
|
||||
if diff_model:
|
||||
vgrid = calculate_differences_grid(vgrid, earth_model=diff_model)
|
||||
if fname_vgrid_ref:
|
||||
vgrid_ref, gridN_ref, gridDelta_ref, gridStart_ref = read_vgrid(fname_vgrid_ref, inv_index_frechet=False)
|
||||
grid_check = (gridN == gridN_ref,
|
||||
compare_tuple(gridDelta, gridDelta_ref),
|
||||
compare_tuple(gridStart, gridStart_ref))
|
||||
assert(all(grid_check), 'Missmatch ref grid size')
|
||||
vps = np.array(vgrid['vps'])
|
||||
vps_ref = np.array(vgrid_ref['vps'])
|
||||
vps_rel = (vps - vps_ref) / vps_ref * 100.
|
||||
print('Min/Max change {}/{}%'.format(min(vps_rel), max(vps_rel)))
|
||||
vgrid['vps'] = list(vps_rel)
|
||||
|
||||
add_frechet(vgrid, fname_frechet)
|
||||
|
||||
if fname_out_vgrid:
|
||||
write_vgrid(vgrid, gridN, gridDelta, gridStart, fname_out_vgrid)
|
||||
if fname_out_vtk:
|
||||
write_vtk(vgrid, fname_out_vtk, ['vps', 'frechs', 'grid_indices', 'hit_count'])
|
||||
if fname_out_json:
|
||||
with open(fname_out_vtk, 'w') as outfile:
|
||||
json.dump(vgrid, outfile)
|
||||
|
||||
|
||||
def add_frechet(vgrid, fname_frechet):
|
||||
vgrid['frechs'] = list(np.zeros(len(vgrid['xs'])))
|
||||
vgrid['hit_count'] = list(np.zeros(len(vgrid['xs'])))
|
||||
with open(fname_frechet, 'r') as infile:
|
||||
while True:
|
||||
try:
|
||||
n, source_id, m, k, n_pdev = [int(item) for item in infile.readline().split()]
|
||||
except:
|
||||
break
|
||||
#print(n, source_ids, m, k, n_pdev)
|
||||
for _ in range(n_pdev):
|
||||
pdev_index, pdev = infile.readline().split()
|
||||
pdev_index = int(pdev_index)
|
||||
pdev = float(pdev)
|
||||
vgrid_index = vgrid['inv_index'][pdev_index]
|
||||
vgrid['frechs'][vgrid_index] += pdev
|
||||
# hit by ray count
|
||||
vgrid['hit_count'][vgrid_index] += 1
|
||||
|
||||
|
||||
def compare_tuple(t1, t2, epsilon=1e-6):
|
||||
for item1, item2 in zip(t1, t2):
|
||||
if abs(item1 - item2) > epsilon:
|
||||
return False
|
||||
return True
|
||||
1323
pylot/tomography/fmtomo_utils.py
Normal file
1323
pylot/tomography/fmtomo_utils.py
Normal file
File diff suppressed because it is too large
Load Diff
160
pylot/tomography/map_utils.py
Normal file
160
pylot/tomography/map_utils.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob
|
||||
import os
|
||||
import json
|
||||
import numpy as np
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.io.shapereader import Reader
|
||||
from matplotlib.patches import Patch
|
||||
|
||||
#from cmcrameri import cm
|
||||
|
||||
from obspy.geodetics import gps2dist_azimuth
|
||||
|
||||
TRANSFORM = ccrs.PlateCarree()
|
||||
|
||||
|
||||
def draw_schmid_faults(ax, fnin='schmidfaults.shp'):
|
||||
reader = Reader(fnin)
|
||||
lines = []
|
||||
linestyles = []
|
||||
for record in reader.records():
|
||||
info = record.attributes
|
||||
if info["fault_type"] == 1 or info["fault_type"] == 3:
|
||||
linestyles.append('solid')
|
||||
else:
|
||||
linestyles.append('dashed')
|
||||
line_arr = np.array(record.geometry.coords)
|
||||
lines.append(line_arr)
|
||||
# ax.add_collection(LineCollection(lines, linewidths=1.2, linestyles=linemarkers, colors='black', transform=ccrs.PlateCarree()))
|
||||
for line, style in zip(lines, linestyles):
|
||||
ax.plot(line[:, 0], line[:, 1], transform=TRANSFORM, ls=style, c='k', lw=1.2)
|
||||
|
||||
|
||||
def draw_alcapadi_faults(ax, fnin='faults_alcapadi', color='black'):
|
||||
reader = Reader(fnin)
|
||||
lines = []
|
||||
linestyles = []
|
||||
linewidths = []
|
||||
for record in reader.records():
|
||||
info = record.attributes
|
||||
if info["fault_type"] == 1:
|
||||
linestyles.append('solid')
|
||||
linewidths.append(.8)
|
||||
elif info["fault_type"] == 2:
|
||||
linestyles.append('solid')
|
||||
linewidths.append(0.4)
|
||||
else:
|
||||
linestyles.append('dashed')
|
||||
linewidths.append(.8)
|
||||
line_arr = np.array(record.geometry.coords)
|
||||
lines.append(line_arr)
|
||||
for line, style, lwidth in zip(lines, linestyles, linewidths):
|
||||
ax.plot(line[:, 0], line[:, 1], transform=TRANSFORM, ls=style, c='k', lw=lwidth)
|
||||
|
||||
|
||||
def draw_alcapadi_model(ax, fnin='tect_units_alcapadi', alpha=0.2, add_legend=True):
|
||||
reader = Reader(fnin)
|
||||
color_dict = {"Adria accreted": (0.8, 0.7, 0.45, alpha),
|
||||
"Adria autochton": (0.52, 0.32, 0.2, alpha),
|
||||
"Europe accreted": (0.42, 0.67, 0.88, alpha),
|
||||
"Flexural foredeep and graben fill": (0.6, 0.6, 0.6, alpha), # (1.0, 1.0, 220/255., alpha),
|
||||
"Alpine Tethys": (0., 0.65, 0.3, alpha),
|
||||
"Neotethys": (0.5, 0.8, 0.32, alpha)}
|
||||
|
||||
patches_legend = [Patch(color=value, label=key.capitalize()) for key, value in color_dict.items()]
|
||||
|
||||
for record in reader.records():
|
||||
info = record.attributes
|
||||
shape = record.geometry
|
||||
color = color_dict.get(info['tect_unit'])
|
||||
if not color:
|
||||
color = (1.0, 1.0, 1.0, alpha)
|
||||
ax.add_geometries(shape, crs=TRANSFORM, facecolor=color)
|
||||
|
||||
if add_legend:
|
||||
ax.legend(handles=patches_legend, ncol=3, bbox_to_anchor=(0.5, -0.075), loc='center')
|
||||
|
||||
|
||||
def init_cartopy_map(fig=None, draw_mapbound=True, fill_continents=False,
|
||||
continents_color=None, mapbound_color=None, lakes_color=None, clon=None, clat=None):
|
||||
if not fig:
|
||||
fig = plt.figure()
|
||||
#projection = ccrs.LambertConformal(central_longitude=clon, central_latitude=clat)
|
||||
projection = ccrs.PlateCarree(central_longitude=clon)
|
||||
ax = fig.add_subplot(111, projection=projection)
|
||||
|
||||
if fill_continents:
|
||||
ax.add_feature(cartopy.feature.LAND, color=continents_color)
|
||||
ax.add_feature(cartopy.feature.LAKES, color=lakes_color)
|
||||
if draw_mapbound:
|
||||
#ax.add_feature(cartopy.feature.OCEAN, color='w', linewidth=0.1)
|
||||
ax.add_feature(cartopy.feature.BORDERS, linewidth=0.2, color='0.3')
|
||||
ax.add_feature(cartopy.feature.COASTLINE, linewidth=0.3, color='0.3')
|
||||
return ax
|
||||
|
||||
|
||||
def make_map(fig=None, draw_model=False, model_legends=True, draw_faults=False, width=20,
|
||||
height=14, clon=11, clat=46., draw_grid=True, continents='0.8', lakes='0.85', no_content=False,
|
||||
no_fill=True, alpha_model=0.2, faults_color='k', station_file=None):
|
||||
|
||||
ax = init_cartopy_map(fig=fig, draw_mapbound=not no_content, fill_continents=not no_fill,
|
||||
continents_color=continents, lakes_color=lakes, clon=clon, clat=clat)
|
||||
|
||||
if station_file:
|
||||
with open(station_file, 'r') as fid:
|
||||
stations = json.load(fid)
|
||||
|
||||
lons, lats = zip(*[(sta['longitude'], sta['latitude']) for sta in stations.values()])
|
||||
ax.scatter(lons, lats, c='0.3', s=1, transform=ccrs.PlateCarree(), zorder=5, edgecolors='none', alpha=0.5)
|
||||
|
||||
# if draw_topo:
|
||||
# draw_topo_model(basemap)
|
||||
if draw_model:
|
||||
fnin = '/home/marcel/sciebo/AlpArray_home/tectonic_maps_4dmb_2020_09_17/shape_files/tect_units_alcapadi'
|
||||
draw_alcapadi_model(ax, fnin, add_legend=model_legends, alpha=alpha_model)
|
||||
if draw_faults:
|
||||
fnin = '/home/marcel/sciebo/AlpArray_home/tectonic_maps_4dmb_2020_09_17/shape_files/faults_alcapadi'
|
||||
draw_alcapadi_faults(ax, fnin, faults_color)
|
||||
|
||||
# if not no_content:
|
||||
# basemap.drawcountries(color=line_color, linewidth=0.2)
|
||||
# basemap.drawcoastlines(color=line_color, linewidth=0.3)
|
||||
|
||||
if draw_grid:
|
||||
gl = ax.gridlines(crs=TRANSFORM, draw_labels=True,
|
||||
linewidth=0.5, color='gray', alpha=0.5, linestyle=':')
|
||||
# dashes = [3, 6]
|
||||
# parallels = list(np.arange(-90, 90, lgrid))
|
||||
# parallels_small = [item for item in np.arange(-90, 90, sgrid) if not item in parallels]
|
||||
# basemap.drawparallels(parallels_small, dashes=dashes, color=line_color, linewidth=0.1, zorder=7)
|
||||
# basemap.drawparallels(parallels, dashes=[], color=line_color, linewidth=0.2, zorder=7, labels=[1, 1, 0, 0])
|
||||
# meridians = list(np.arange(-180, 180, lgrid))
|
||||
# meridians_small = [item for item in np.arange(-180, 180, sgrid) if not item in meridians]
|
||||
# basemap.drawmeridians(meridians_small, dashes=dashes, color=line_color, linewidth=0.1, zorder=7)
|
||||
# basemap.drawmeridians(meridians, dashes=[], color=line_color, linewidth=0.2, zorder=7, labels=[0, 0, 1, 1])
|
||||
|
||||
ax.set_extent([clon - width/2, clon + width/2, clat - height/2, clat + height/2])
|
||||
|
||||
return ax
|
||||
|
||||
|
||||
def angle_marker(a1, a2, delta=1.):
|
||||
a1 = np.deg2rad(a1)
|
||||
a2 = np.deg2rad(a2)
|
||||
delta = np.deg2rad(delta)
|
||||
x_vals = [np.sin(angle) for angle in np.arange(a1, a2 + delta, delta)]
|
||||
y_vals = [np.cos(angle) for angle in np.arange(a1, a2 + delta, delta)]
|
||||
xy = zip(x_vals, y_vals)
|
||||
#x1 = np.sin(a1)
|
||||
#y1 = np.cos(a1)
|
||||
#x2 = np.sin(a2)
|
||||
#y2 = np.cos(a2)
|
||||
marker = [(0, 0), *xy, (0, 0)]
|
||||
return marker
|
||||
176
pylot/tomography/utils.py
Normal file
176
pylot/tomography/utils.py
Normal file
@@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import glob
|
||||
import numpy as np
|
||||
import os
|
||||
import json
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from obspy import Catalog, read_events
|
||||
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
|
||||
|
||||
def get_metadata(eventdir):
|
||||
metadata_path = os.path.join(eventdir, 'resp')
|
||||
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||
return metadata
|
||||
|
||||
|
||||
|
||||
def set_rc_params(textsize=7.):
|
||||
plt.style.use('/home/marcel/solid_earth.mplstyle')
|
||||
#plt.rcParams.update({'font.size': textsize,
|
||||
# 'font.family': 'sans-serif'})
|
||||
|
||||
|
||||
def normed_figure_ratio_width(width_cm, ratio):
|
||||
width_inch = width_cm / 2.54
|
||||
height_inch = width_inch / ratio
|
||||
return width_inch, height_inch
|
||||
|
||||
|
||||
def normed_figure_ratio_height(height_cm, ratio):
|
||||
height_inch = height_cm / 2.54
|
||||
width_inch = height_inch * ratio
|
||||
return width_inch, height_inch
|
||||
|
||||
|
||||
def normed_figure(width_cm=None, ratio=1.777):
|
||||
#assert ((width_cm and not height_cm) or (height_cm and not width_cm)), 'Choose either of width or height!'
|
||||
set_rc_params()
|
||||
if width_cm:
|
||||
fig = plt.figure(figsize=normed_figure_ratio_width(width_cm, ratio))
|
||||
return fig
|
||||
#elif height_cm:
|
||||
# fig = plt.figure(figsize=normed_figure_ratio_height(height_cm, ratio))
|
||||
return
|
||||
|
||||
|
||||
def pol2cart(lat, lon, r):
|
||||
x = r * np.cos(np.deg2rad(lat)) * np.cos(np.deg2rad(lon))
|
||||
y = r * np.cos(np.deg2rad(lat)) * np.sin(np.deg2rad(lon))
|
||||
z = r * np.sin(np.deg2rad(lat))
|
||||
return x, y, z
|
||||
|
||||
|
||||
def cart2pol(x, y, z):
|
||||
r = np.sqrt(x**2 + y**2 + z**2)
|
||||
theta = np.rad2deg(np.arccos(z/r))
|
||||
phi = np.rad2deg(np.arctan2(y, x))
|
||||
lat = 90. - theta
|
||||
lon = phi
|
||||
return lat, lon, r
|
||||
|
||||
|
||||
def pol2cart_vector(lat, lon, north, east, r_comp):
|
||||
if any(val is None for val in [north, east, r_comp]):
|
||||
return None, None, None
|
||||
phi = np.deg2rad(lon)
|
||||
# change north components to common spherical coordinate convention
|
||||
theta = np.deg2rad(90. - lat)
|
||||
north *= -1
|
||||
x = (np.sin(theta) * np.cos(phi) * r_comp +
|
||||
np.cos(theta) * np.cos(phi) * north -
|
||||
np.sin(phi) * east)
|
||||
y = (np.sin(theta) * np.sin(phi) * r_comp +
|
||||
np.cos(theta) * np.sin(phi) * north +
|
||||
np.cos(phi) * east)
|
||||
z = (np.cos(theta) * r_comp -
|
||||
np.sin(theta) * north)
|
||||
return x, y, z
|
||||
|
||||
|
||||
def read_cat_obspy_dmt_database(databasedir, filemask):
|
||||
infiles = glob.glob(os.path.join(databasedir, '*.a', filemask))
|
||||
cat = Catalog()
|
||||
nPicks = 0
|
||||
for index, infile in enumerate(infiles):
|
||||
print(f'Working on: {infile} ({index + 1}/{len(infiles)})')
|
||||
event = read_events(infile)[0]
|
||||
nPicks += len(event.picks)
|
||||
cat += event
|
||||
|
||||
nEvents = len(cat)
|
||||
print('Number of events: {} (filemask: {})'.format(nEvents, filemask))
|
||||
print('Total # picks: {} ({:.2f} per event)'.format(nPicks, float(nPicks)/nEvents))
|
||||
return cat
|
||||
|
||||
|
||||
def get_event(cat, eventid):
|
||||
for event in cat.events:
|
||||
if event.resource_id.id.split('/')[-1] == eventid:
|
||||
return event
|
||||
|
||||
|
||||
def get_pick4station(picks, network_code, station_code, method='auto'):
|
||||
for pick in picks:
|
||||
if pick.waveform_id.network_code == network_code:
|
||||
if pick.waveform_id.station_code == station_code:
|
||||
if pick.method_id.id.endswith(method):
|
||||
return pick
|
||||
|
||||
|
||||
def delete_picks(picks, nwst_ids_delete):
|
||||
''' Delete picks from list in picks containing ObsPy pick objects'''
|
||||
for index, pick in list(reversed(list(enumerate(picks)))):
|
||||
seed_id = pick.waveform_id.get_seed_string()
|
||||
network, station = seed_id.split('.')[:2]
|
||||
nwst_id = '{}.{}'.format(network, station)
|
||||
if nwst_id in nwst_ids_delete:
|
||||
picks.pop(index)
|
||||
print('Removed pick: ', nwst_id)
|
||||
return picks
|
||||
|
||||
|
||||
def save_all_station_coordinates_dmt_database(dmt_database, fn_out):
|
||||
'''
|
||||
Get all station coordinates from dmt_database and write them (unique) to json outputfile
|
||||
:param dmt_database:
|
||||
:param fn_out:
|
||||
:return:
|
||||
'''
|
||||
stations_dict = {}
|
||||
eventdirs = glob.glob(os.path.join(dmt_database, '*.?'))
|
||||
nEvents = len(eventdirs)
|
||||
for index, eventdir in enumerate(eventdirs):
|
||||
print('Working on event {} ({}/{})'.format(eventdir, index+1, nEvents))
|
||||
metadata = get_metadata(eventdir)
|
||||
current_stations_dict = metadata.get_all_coordinates()
|
||||
for nwst_id, coords in current_stations_dict.items():
|
||||
if not nwst_id in stations_dict.keys():
|
||||
stations_dict[nwst_id] = coords
|
||||
|
||||
with open(fn_out, 'w') as outfile:
|
||||
json.dump(stations_dict, outfile)
|
||||
|
||||
|
||||
def get_metadata(eventdir):
|
||||
metadata_path = os.path.join(eventdir, 'resp')
|
||||
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||
return metadata
|
||||
|
||||
|
||||
def get_coordinate_from_dist_baz(station_tmp, dist, baz, mode='deg'):
|
||||
''' function copied from Andre'''
|
||||
# station_tmp: [lon, lat]
|
||||
if mode!='deg' and mode!='rad':
|
||||
print('mode hast to be ether deg or rad!')
|
||||
return None
|
||||
else:
|
||||
station = np.deg2rad(station_tmp)
|
||||
epi_tmp=[0.,0.]
|
||||
if mode=='deg':
|
||||
dist = np.deg2rad(dist)
|
||||
baz = np.deg2rad(baz)
|
||||
az = baz - np.pi
|
||||
if az < 0:
|
||||
az += 2 * np.pi
|
||||
epi_tmp[1] = np.arcsin(np.sin(station[1]) * np.cos(dist) - np.cos(station[1]) * np.sin(dist) * np.cos(az))
|
||||
if (np.cos(dist) - np.sin(station[1]) * np.sin(epi_tmp[1])) / (np.cos(station[1]) * np.cos(epi_tmp[1])) >= 0.:
|
||||
epi_tmp[0] = station[0] - np.arcsin(np.sin(dist) * np.sin(az) / np.cos(epi_tmp[1]))
|
||||
else:
|
||||
epi_tmp[0] = station[0] - np.pi + np.arcsin(np.sin(dist) * np.sin(az) / np.cos(epi_tmp[1]))
|
||||
epi=np.rad2deg(epi_tmp)
|
||||
return epi
|
||||
@@ -25,7 +25,7 @@ class HidePrints:
|
||||
if self.hide:
|
||||
self._original_stdout = sys.stdout
|
||||
devnull = open(os.devnull, "w")
|
||||
#sys.stdout = devnull
|
||||
sys.stdout = devnull
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.hide:
|
||||
@@ -271,7 +271,7 @@ class TestAutopickStation(unittest.TestCase):
|
||||
'fm': 'N', 'channel': None}}
|
||||
with HidePrints():
|
||||
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled,
|
||||
metadata=(None, None), iplot=2)
|
||||
metadata=(None, None))
|
||||
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||
self.assertEqual('GRA1', station)
|
||||
|
||||
Reference in New Issue
Block a user