2022-11-08 12:06:00 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
__version__ = '0.1'
|
|
|
|
__author__ = 'Marcel Paffrath'
|
|
|
|
|
|
|
|
import os
|
2023-02-01 14:49:26 +01:00
|
|
|
import io
|
2023-01-31 16:12:07 +01:00
|
|
|
import copy
|
2022-11-08 16:45:21 +01:00
|
|
|
import traceback
|
2022-11-03 15:39:23 +01:00
|
|
|
import yaml
|
2022-11-08 16:45:21 +01:00
|
|
|
import argparse
|
2023-02-01 14:49:26 +01:00
|
|
|
import json
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
import time
|
|
|
|
from datetime import timedelta
|
|
|
|
import numpy as np
|
2022-11-09 14:32:13 +01:00
|
|
|
import matplotlib.pyplot as plt
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
from obspy import read, UTCDateTime, Stream
|
|
|
|
from obspy.clients.filesystem.sds import Client
|
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
from write_utils import get_html_text, get_html_row, html_footer, get_html_header, get_print_title_str, \
|
|
|
|
init_html_table, finish_html_table, get_mail_html_header, add_html_image
|
2022-12-21 16:03:10 +01:00
|
|
|
from utils import get_bg_color, modify_stream_for_plot, set_axis_yticks, set_axis_color, plot_axis_thresholds
|
2022-11-07 17:56:41 +01:00
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
try:
|
|
|
|
import smtplib
|
2023-02-01 14:49:26 +01:00
|
|
|
from email.message import EmailMessage
|
|
|
|
from email.utils import make_msgid
|
2022-11-21 15:31:32 +01:00
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
mail_functionality = True
|
|
|
|
except ImportError:
|
|
|
|
print('Could not import smtplib or mail. Disabled sending mails.')
|
|
|
|
mail_functionality = False
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
pjoin = os.path.join
|
|
|
|
UP = "\x1B[{length}A"
|
|
|
|
CLR = "\x1B[0K"
|
|
|
|
deg_str = '\N{DEGREE SIGN}C'
|
|
|
|
|
2022-11-07 17:56:41 +01:00
|
|
|
|
2022-12-21 11:57:37 +01:00
|
|
|
def read_yaml(file_path, n_read=3):
|
|
|
|
for index in range(n_read):
|
|
|
|
try:
|
|
|
|
with open(file_path, "r") as f:
|
|
|
|
params = yaml.safe_load(f)
|
|
|
|
except Exception as e:
|
|
|
|
print(f'Could not read parameters file: {e}.\nWill try again {n_read - index - 1} time(s).')
|
|
|
|
time.sleep(10)
|
|
|
|
continue
|
|
|
|
return params
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
def nsl_from_id(nwst_id):
|
2023-02-01 14:49:26 +01:00
|
|
|
nwst_id = get_full_seed_id(nwst_id)
|
2022-11-15 13:48:56 +01:00
|
|
|
network, station, location = nwst_id.split('.')
|
2022-11-03 15:39:23 +01:00
|
|
|
return dict(network=network, station=station, location=location)
|
|
|
|
|
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
def get_full_seed_id(nwst_id):
|
|
|
|
seed_id = '{}.{}.{}'.format(*nwst_id.split('.'), '')
|
|
|
|
return seed_id
|
|
|
|
|
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
def get_nwst_id(trace):
|
2022-11-03 15:39:23 +01:00
|
|
|
stats = trace.stats
|
2022-11-07 17:56:41 +01:00
|
|
|
return f'{stats.network}.{stats.station}.' # {stats.location}'
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
def fancy_timestr(dt, thresh=600, modif='+'):
|
2023-02-16 15:49:07 +01:00
|
|
|
if isinstance(dt, timedelta) and dt > timedelta(seconds=thresh):
|
2022-11-03 15:39:23 +01:00
|
|
|
value = f'{modif} ' + str(dt) + f' {modif}'
|
|
|
|
else:
|
|
|
|
value = str(dt)
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
class SurveillanceBot(object):
|
2022-11-08 16:45:21 +01:00
|
|
|
def __init__(self, parameter_path, outpath_html=None):
|
2023-01-03 17:59:42 +01:00
|
|
|
self.keys = ['last active', '230V', '12V', 'router', 'charger', 'voltage', 'mass', 'clock', 'gaps', 'temp',
|
|
|
|
'other']
|
2022-11-08 16:45:21 +01:00
|
|
|
self.parameter_path = parameter_path
|
|
|
|
self.update_parameters()
|
2022-11-03 15:39:23 +01:00
|
|
|
self.starttime = UTCDateTime()
|
2022-11-09 14:32:13 +01:00
|
|
|
self.plot_hour = self.starttime.hour
|
2022-11-14 22:31:22 +01:00
|
|
|
self.current_day = self.starttime.julday
|
2022-11-08 16:45:21 +01:00
|
|
|
self.outpath_html = outpath_html
|
2022-11-03 15:39:23 +01:00
|
|
|
self.filenames = []
|
2023-01-03 17:59:42 +01:00
|
|
|
self.filenames_wf_data = []
|
2023-01-31 16:10:57 +01:00
|
|
|
self.filenames_read_last_modif = {}
|
2022-11-03 15:39:23 +01:00
|
|
|
self.station_list = []
|
|
|
|
self.analysis_print_list = []
|
|
|
|
self.analysis_results = {}
|
2022-11-17 09:52:04 +01:00
|
|
|
self.status_track = {}
|
2022-11-03 15:39:23 +01:00
|
|
|
self.dataStream = Stream()
|
|
|
|
self.data = {}
|
2023-01-03 17:59:42 +01:00
|
|
|
self.gaps = []
|
2022-11-03 15:39:23 +01:00
|
|
|
self.print_count = 0
|
2022-11-08 16:45:21 +01:00
|
|
|
self.status_message = ''
|
2022-11-09 16:53:43 +01:00
|
|
|
self.html_fig_dir = 'figures'
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
self.active_figures = {}
|
|
|
|
|
2022-11-07 17:56:41 +01:00
|
|
|
self.cl = Client(self.parameters.get('datapath')) # TODO: Check if this has to be loaded again on update
|
2022-11-03 15:39:23 +01:00
|
|
|
self.get_stations()
|
|
|
|
|
2022-11-08 16:45:21 +01:00
|
|
|
def update_parameters(self):
|
|
|
|
self.parameters = read_yaml(self.parameter_path)
|
2023-01-03 17:59:42 +01:00
|
|
|
# add channels to list in parameters dictionary, also add data channels
|
|
|
|
channels = list(self.parameters.get('CHANNELS').keys())
|
|
|
|
for channel in self.parameters.get('data_channels'):
|
|
|
|
if not channel in channels:
|
|
|
|
channels.append(channel)
|
|
|
|
self.parameters['channels'] = channels
|
2022-11-08 16:45:21 +01:00
|
|
|
self.reread_parameters = self.parameters.get('reread_parameters')
|
|
|
|
self.dt_thresh = [int(val) for val in self.parameters.get('dt_thresh')]
|
|
|
|
self.verbosity = self.parameters.get('verbosity')
|
|
|
|
self.stations_blacklist = self.parameters.get('stations_blacklist')
|
|
|
|
self.networks_blacklist = self.parameters.get('networks_blacklist')
|
|
|
|
self.refresh_period = self.parameters.get('interval')
|
|
|
|
self.transform_parameters()
|
2022-11-15 13:44:19 +01:00
|
|
|
add_links = self.parameters.get('add_links')
|
|
|
|
self.add_links = add_links if add_links else {}
|
2022-11-08 16:45:21 +01:00
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def transform_parameters(self):
|
|
|
|
for key in ['networks', 'stations', 'locations', 'channels']:
|
|
|
|
parameter = self.parameters.get(key)
|
|
|
|
if type(parameter) == str:
|
|
|
|
self.parameters[key] = list(self.parameters[key])
|
|
|
|
elif type(parameter) not in [list]:
|
|
|
|
raise TypeError(f'Bad input type for {key}: {type(key)}')
|
|
|
|
|
|
|
|
def get_stations(self):
|
|
|
|
networks = self.parameters.get('networks')
|
|
|
|
stations = self.parameters.get('stations')
|
|
|
|
|
|
|
|
self.station_list = []
|
|
|
|
nwst_list = self.cl.get_all_stations()
|
|
|
|
for nw, st in nwst_list:
|
|
|
|
if self.stations_blacklist and st in self.stations_blacklist:
|
|
|
|
continue
|
|
|
|
if self.networks_blacklist and nw in self.networks_blacklist:
|
|
|
|
continue
|
|
|
|
if (networks == ['*'] or nw in networks) and (stations == ['*'] or st in stations):
|
2022-11-15 13:48:56 +01:00
|
|
|
nwst_id = f'{nw}.{st}.'
|
|
|
|
self.station_list.append(nwst_id)
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
def get_filenames(self):
|
|
|
|
self.filenames = []
|
2023-01-03 17:59:42 +01:00
|
|
|
self.filenames_wf_data = []
|
2022-11-03 15:39:23 +01:00
|
|
|
time_now = UTCDateTime()
|
|
|
|
t1 = time_now - self.parameters.get('timespan') * 24 * 3600
|
|
|
|
networks = self.parameters.get('networks')
|
|
|
|
stations = self.parameters.get('stations')
|
|
|
|
locations = self.parameters.get('locations')
|
|
|
|
channels = self.parameters.get('channels')
|
2023-01-03 17:59:42 +01:00
|
|
|
channels_wf_data = self.parameters.get('data_channels')
|
2022-11-03 15:39:23 +01:00
|
|
|
for network in networks:
|
|
|
|
for station in stations:
|
|
|
|
for location in locations:
|
|
|
|
for channel in channels:
|
2023-01-03 17:59:42 +01:00
|
|
|
fnames = list(self.cl._get_filenames(network, station, location, channel,
|
|
|
|
starttime=t1, endtime=time_now))
|
|
|
|
self.filenames += fnames
|
|
|
|
|
|
|
|
# keep track of filenames with wf data (only read headers later)
|
|
|
|
if channel in channels_wf_data:
|
|
|
|
self.filenames_wf_data += fnames
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2023-01-31 16:10:57 +01:00
|
|
|
def read_data(self, re_read_at_hour=1):
|
2022-11-14 22:31:22 +01:00
|
|
|
'''
|
|
|
|
read data method reads new data into self.stream
|
|
|
|
|
|
|
|
:param re_read_at_hour: update archive at specified hour each day (hours up to 24)
|
|
|
|
'''
|
2022-11-03 15:39:23 +01:00
|
|
|
self.data = {}
|
|
|
|
|
2022-11-14 22:31:22 +01:00
|
|
|
# re-read all data every new day
|
|
|
|
curr_time = UTCDateTime()
|
|
|
|
current_day = curr_time.julday
|
|
|
|
current_hour = curr_time.hour
|
|
|
|
if re_read_at_hour is not False and current_day != self.current_day and current_hour == re_read_at_hour:
|
2023-01-31 16:10:57 +01:00
|
|
|
self.filenames_read_last_modif = {}
|
2022-11-14 22:31:22 +01:00
|
|
|
self.dataStream = Stream()
|
|
|
|
self.current_day = current_day
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
# add all data to current stream
|
|
|
|
for filename in self.filenames:
|
2023-01-31 16:10:57 +01:00
|
|
|
# if file already read and last modification time is the same as of last read operation: continue
|
|
|
|
if self.filenames_read_last_modif.get(filename) == os.path.getmtime(filename):
|
2023-01-31 17:37:17 +01:00
|
|
|
if self.verbosity > 0:
|
|
|
|
print('Continue on file', filename)
|
2022-11-03 15:39:23 +01:00
|
|
|
continue
|
|
|
|
try:
|
2023-01-03 17:59:42 +01:00
|
|
|
# read only header of wf_data
|
|
|
|
if filename in self.filenames_wf_data:
|
|
|
|
st_new = read(filename, headonly=True)
|
|
|
|
else:
|
|
|
|
st_new = read(filename, dtype=float)
|
2023-01-31 16:10:57 +01:00
|
|
|
self.filenames_read_last_modif[filename] = os.path.getmtime(filename)
|
2022-11-03 15:39:23 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(f'Could not read file {filename}:', e)
|
|
|
|
continue
|
|
|
|
self.dataStream += st_new
|
2023-01-03 17:59:42 +01:00
|
|
|
self.gaps = self.dataStream.get_gaps(min_gap=self.parameters['THRESHOLDS'].get('min_gap'))
|
|
|
|
self.dataStream.merge()
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
# organise data in dictionary with key for each station
|
|
|
|
for trace in self.dataStream:
|
2022-11-15 13:48:56 +01:00
|
|
|
nwst_id = get_nwst_id(trace)
|
|
|
|
if not nwst_id in self.data.keys():
|
|
|
|
self.data[nwst_id] = Stream()
|
|
|
|
self.data[nwst_id].append(trace)
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
def execute_qc(self):
|
2022-11-08 16:45:21 +01:00
|
|
|
if self.reread_parameters:
|
|
|
|
self.update_parameters()
|
2022-11-03 15:39:23 +01:00
|
|
|
self.get_filenames()
|
|
|
|
self.read_data()
|
2022-11-08 16:45:21 +01:00
|
|
|
qc_starttime = UTCDateTime()
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
self.analysis_print_list = []
|
|
|
|
self.analysis_results = {}
|
2022-11-15 13:48:56 +01:00
|
|
|
for nwst_id in sorted(self.station_list):
|
|
|
|
stream = self.data.get(nwst_id)
|
2022-11-03 15:39:23 +01:00
|
|
|
if stream:
|
2022-11-15 13:48:56 +01:00
|
|
|
nsl = nsl_from_id(nwst_id)
|
2022-11-21 15:31:32 +01:00
|
|
|
station_qc = StationQC(self, stream, nsl, self.parameters, self.keys, qc_starttime,
|
2022-11-17 09:52:04 +01:00
|
|
|
self.verbosity, print_func=self.print,
|
|
|
|
status_track=self.status_track.get(nwst_id))
|
2022-11-03 15:39:23 +01:00
|
|
|
analysis_print_result = station_qc.return_print_analysis()
|
2022-11-15 17:19:39 +01:00
|
|
|
station_dict = station_qc.return_analysis()
|
2022-11-03 15:39:23 +01:00
|
|
|
else:
|
2022-11-15 13:48:56 +01:00
|
|
|
analysis_print_result = self.get_no_data_station(nwst_id, to_print=True)
|
2022-11-15 17:19:39 +01:00
|
|
|
station_dict = self.get_no_data_station(nwst_id)
|
2022-11-03 15:39:23 +01:00
|
|
|
self.analysis_print_list.append(analysis_print_result)
|
2022-11-15 17:19:39 +01:00
|
|
|
self.analysis_results[nwst_id] = station_dict
|
2022-11-17 09:52:04 +01:00
|
|
|
self.track_status()
|
2022-11-08 16:45:21 +01:00
|
|
|
|
|
|
|
self.update_status_message()
|
2022-11-03 15:39:23 +01:00
|
|
|
return 'ok'
|
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
def track_status(self):
|
|
|
|
"""
|
|
|
|
tracks error status of the last n_track + 1 errors.
|
|
|
|
"""
|
|
|
|
n_track = self.parameters.get('n_track')
|
|
|
|
if not n_track or n_track < 1:
|
|
|
|
return
|
|
|
|
for nwst_id, analysis_dict in self.analysis_results.items():
|
|
|
|
if not nwst_id in self.status_track.keys():
|
|
|
|
self.status_track[nwst_id] = {}
|
|
|
|
for key, status in analysis_dict.items():
|
|
|
|
if not key in self.status_track[nwst_id].keys():
|
|
|
|
self.status_track[nwst_id][key] = []
|
|
|
|
track_lst = self.status_track[nwst_id][key]
|
|
|
|
# pop list until length is n_track + 1
|
|
|
|
while len(track_lst) > n_track:
|
|
|
|
track_lst.pop(0)
|
|
|
|
track_lst.append(status.is_error)
|
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
def get_no_data_station(self, nwst_id, no_data='-', to_print=False):
|
|
|
|
delay = self.get_station_delay(nwst_id)
|
2022-11-03 15:39:23 +01:00
|
|
|
if not to_print:
|
|
|
|
status_dict = {}
|
|
|
|
for key in self.keys:
|
|
|
|
if key == 'last active':
|
2022-11-15 17:19:39 +01:00
|
|
|
status_dict[key] = Status(message=timedelta(seconds=int(delay)), detailed_messages=['No data'])
|
2022-11-03 15:39:23 +01:00
|
|
|
else:
|
2022-11-15 17:19:39 +01:00
|
|
|
status_dict[key] = Status(message=no_data, detailed_messages=['No data'])
|
|
|
|
return status_dict
|
2022-11-03 15:39:23 +01:00
|
|
|
else:
|
2022-11-15 13:48:56 +01:00
|
|
|
items = [nwst_id.rstrip('.')] + [fancy_timestr(timedelta(seconds=int(delay)))]
|
2022-11-03 15:39:23 +01:00
|
|
|
for _ in range(len(self.keys) - 1):
|
|
|
|
items.append(no_data)
|
|
|
|
return items
|
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
def get_station_delay(self, nwst_id):
|
2022-11-03 15:39:23 +01:00
|
|
|
""" try to get station delay from SDS archive using client"""
|
|
|
|
locations = ['', '0', '00']
|
2023-01-03 17:59:42 +01:00
|
|
|
channels = self.parameters.get('channels') + self.parameters.get('data_channels')
|
2022-11-15 13:48:56 +01:00
|
|
|
network, station = nwst_id.split('.')[:2]
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
times = []
|
|
|
|
for channel in channels:
|
|
|
|
for location in locations:
|
|
|
|
t = self.cl.get_latency(network, station, location, channel)
|
|
|
|
if t:
|
|
|
|
times.append(t)
|
|
|
|
if len(times) > 0:
|
|
|
|
return min(times)
|
|
|
|
|
|
|
|
def print_analysis(self):
|
2022-11-07 17:56:41 +01:00
|
|
|
self.print(200 * '+')
|
|
|
|
title_str = get_print_title_str(self.parameters)
|
2022-11-03 15:39:23 +01:00
|
|
|
self.print(title_str)
|
|
|
|
if self.refresh_period > 0:
|
|
|
|
self.print(f'Refreshing every {self.refresh_period}s.')
|
|
|
|
items = ['Station'] + self.keys
|
|
|
|
self.console_print(items, sep='---')
|
|
|
|
for items in self.analysis_print_list:
|
|
|
|
self.console_print(items)
|
|
|
|
|
2022-11-08 16:45:21 +01:00
|
|
|
def start(self):
|
2022-11-03 15:39:23 +01:00
|
|
|
'''
|
|
|
|
Perform qc periodically.
|
|
|
|
:param refresh_period: Update every x seconds
|
|
|
|
:return:
|
|
|
|
'''
|
2022-11-09 14:32:13 +01:00
|
|
|
first_exec = True
|
2022-11-03 15:39:23 +01:00
|
|
|
status = 'ok'
|
|
|
|
while status == 'ok' and self.refresh_period > 0:
|
|
|
|
status = self.execute_qc()
|
2022-11-08 16:45:21 +01:00
|
|
|
if self.outpath_html:
|
|
|
|
self.write_html_table()
|
2022-11-09 14:32:13 +01:00
|
|
|
if self.parameters.get('html_figures'):
|
2022-11-21 15:31:32 +01:00
|
|
|
self.write_html_figures(check_plot_time=not (first_exec))
|
2022-11-08 16:45:21 +01:00
|
|
|
else:
|
|
|
|
self.print_analysis()
|
2022-11-03 15:39:23 +01:00
|
|
|
time.sleep(self.refresh_period)
|
2022-11-08 16:45:21 +01:00
|
|
|
if not self.outpath_html:
|
|
|
|
self.clear_prints()
|
2022-11-09 14:32:13 +01:00
|
|
|
first_exec = False
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
def console_print(self, itemlist, str_len=21, sep='|', seplen=3):
|
|
|
|
assert len(sep) <= seplen, f'Make sure seperator has less than {seplen} characters'
|
|
|
|
sl = sep.ljust(seplen)
|
|
|
|
sr = sep.rjust(seplen)
|
|
|
|
string = sl
|
|
|
|
for item in itemlist:
|
|
|
|
string += item.center(str_len) + sr
|
|
|
|
self.print(string, flush=False)
|
|
|
|
|
2022-11-09 14:32:13 +01:00
|
|
|
def check_plot_hour(self):
|
|
|
|
''' Check if new hour started '''
|
|
|
|
current_hour = UTCDateTime().hour
|
|
|
|
if not current_hour > self.plot_hour:
|
|
|
|
return False
|
|
|
|
if current_hour == 23:
|
|
|
|
self.plot_hour = 0
|
|
|
|
else:
|
|
|
|
self.plot_hour += 1
|
|
|
|
return True
|
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
def get_fig_path_abs(self, nwst_id):
|
|
|
|
return pjoin(self.outpath_html, self.get_fig_path_rel(nwst_id))
|
2022-11-09 14:32:13 +01:00
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
def get_fig_path_rel(self, nwst_id, fig_format='png'):
|
|
|
|
return os.path.join(self.html_fig_dir, f'{nwst_id.rstrip(".")}.{fig_format}')
|
2022-11-09 16:53:43 +01:00
|
|
|
|
|
|
|
def check_fig_dir(self):
|
|
|
|
fdir = pjoin(self.outpath_html, self.html_fig_dir)
|
|
|
|
if not os.path.isdir(fdir):
|
|
|
|
os.mkdir(fdir)
|
|
|
|
|
|
|
|
def check_html_dir(self):
|
|
|
|
if not os.path.isdir(self.outpath_html):
|
|
|
|
os.mkdir(self.outpath_html)
|
2022-11-09 14:32:13 +01:00
|
|
|
|
|
|
|
def write_html_figures(self, check_plot_time=True):
|
2022-11-21 15:31:32 +01:00
|
|
|
""" Write figures for html (e.g. hourly) """
|
2022-11-09 14:32:13 +01:00
|
|
|
if check_plot_time and not self.check_plot_hour():
|
|
|
|
return
|
|
|
|
|
2022-11-15 13:48:56 +01:00
|
|
|
for nwst_id in self.station_list:
|
2022-11-21 15:31:32 +01:00
|
|
|
self.write_html_figure(nwst_id)
|
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
def write_html_figure(self, nwst_id, save_bytes=False):
|
2022-11-21 15:31:32 +01:00
|
|
|
""" Write figure for html for specified station """
|
|
|
|
self.check_fig_dir()
|
|
|
|
|
|
|
|
fig = plt.figure(figsize=(16, 9))
|
2023-02-01 14:49:26 +01:00
|
|
|
fnames_out = [self.get_fig_path_abs(nwst_id), io.BytesIO()]
|
|
|
|
st = self.data.get(get_full_seed_id(nwst_id))
|
2022-11-21 15:31:32 +01:00
|
|
|
if st:
|
2023-01-06 10:38:07 +01:00
|
|
|
# TODO: this section failed once, adding try-except block for analysis and to prevent program from crashing
|
2022-11-22 14:20:22 +01:00
|
|
|
try:
|
2023-01-06 10:38:07 +01:00
|
|
|
endtime = UTCDateTime()
|
|
|
|
starttime = endtime - self.parameters.get('timespan') * 24 * 3600
|
2022-11-22 14:20:22 +01:00
|
|
|
st = modify_stream_for_plot(st, parameters=self.parameters)
|
2023-01-06 10:38:07 +01:00
|
|
|
st.plot(fig=fig, show=False, draw=False, block=False, equal_scale=False, method='full',
|
|
|
|
starttime=starttime, endtime=endtime)
|
2022-12-21 16:03:10 +01:00
|
|
|
# set_axis_ylabels(fig, self.parameters, self.verbosity)
|
|
|
|
set_axis_yticks(fig, self.parameters, self.verbosity)
|
|
|
|
set_axis_color(fig)
|
|
|
|
plot_axis_thresholds(fig, self.parameters, self.verbosity)
|
2022-11-22 14:20:22 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(f'Could not generate plot for {nwst_id}:')
|
|
|
|
print(traceback.format_exc())
|
|
|
|
if len(fig.axes) > 0:
|
|
|
|
ax = fig.axes[0]
|
|
|
|
ax.set_title(f'Plot refreshed at (UTC) {UTCDateTime.now().strftime("%Y-%m-%d %H:%M:%S")}. '
|
|
|
|
f'Refreshed hourly or on FAIL status.')
|
2022-12-02 11:15:37 +01:00
|
|
|
for ax in fig.axes:
|
|
|
|
ax.grid(True, alpha=0.1)
|
2023-02-01 14:49:26 +01:00
|
|
|
for fnout in fnames_out:
|
|
|
|
fig.savefig(fnout, dpi=150., bbox_inches='tight')
|
|
|
|
# if needed save figure as virtual object (e.g. for mailing)
|
|
|
|
if save_bytes:
|
|
|
|
fnames_out[-1].seek(0)
|
|
|
|
self.active_figures[nwst_id] = fnames_out[-1]
|
2022-11-21 15:31:32 +01:00
|
|
|
plt.close(fig)
|
2022-11-09 14:32:13 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
def get_html_class(self, hide_keys_mobile=None, status=None, check_key=None):
|
|
|
|
""" helper function for html class if a certain condition is fulfilled """
|
|
|
|
html_class = None
|
|
|
|
if status and status.is_active:
|
|
|
|
html_class = 'blink-bg'
|
|
|
|
if hide_keys_mobile and check_key in hide_keys_mobile:
|
|
|
|
html_class = 'hidden-mobile'
|
|
|
|
return html_class
|
|
|
|
|
|
|
|
def make_html_table_header(self, default_header_color, hide_keys_mobile=None, add_links=True):
|
|
|
|
# First write header items
|
|
|
|
header = self.keys.copy()
|
|
|
|
# add columns for additional links
|
|
|
|
if add_links:
|
|
|
|
for key in self.add_links:
|
|
|
|
header.insert(-1, key)
|
|
|
|
|
|
|
|
header_items = [dict(text='Station', color=default_header_color)]
|
|
|
|
for check_key in header:
|
|
|
|
html_class = self.get_html_class(hide_keys_mobile, check_key=check_key)
|
|
|
|
item = dict(text=check_key, color=default_header_color, html_class=html_class)
|
|
|
|
header_items.append(item)
|
|
|
|
|
|
|
|
return header, header_items
|
|
|
|
|
|
|
|
def get_html_row_items(self, status_dict, nwst_id, header, default_color, hide_keys_mobile=None,
|
|
|
|
hyperlinks=True):
|
|
|
|
''' create a html table row for the different keys '''
|
|
|
|
|
|
|
|
fig_name = self.get_fig_path_rel(nwst_id)
|
|
|
|
nwst_id_str = nwst_id.rstrip('.')
|
|
|
|
col_items = [dict(text=nwst_id_str, color=default_color, hyperlink=fig_name if hyperlinks else None,
|
|
|
|
bold=True, tooltip=f'Show plot of {nwst_id_str}')]
|
|
|
|
|
|
|
|
for check_key in header:
|
|
|
|
if check_key in self.keys:
|
|
|
|
status = status_dict.get(check_key)
|
|
|
|
message, detailed_message = status.get_status_str()
|
|
|
|
|
|
|
|
# get background color
|
|
|
|
dt_thresh = [timedelta(seconds=sec) for sec in self.dt_thresh]
|
|
|
|
bg_color = get_bg_color(check_key, status, dt_thresh, hex=True)
|
|
|
|
if not bg_color:
|
|
|
|
bg_color = default_color
|
|
|
|
|
|
|
|
# add degree sign for temp
|
|
|
|
if check_key == 'temp':
|
|
|
|
if not type(message) in [str]:
|
|
|
|
message = str(message) + deg_str
|
|
|
|
|
|
|
|
html_class = self.get_html_class(hide_keys_mobile, status=status, check_key=check_key)
|
|
|
|
item = dict(text=str(message), tooltip=str(detailed_message), color=bg_color,
|
|
|
|
html_class=html_class)
|
|
|
|
elif check_key in self.add_links:
|
|
|
|
value = self.add_links.get(check_key).get('URL')
|
|
|
|
link_text = self.add_links.get(check_key).get('text')
|
|
|
|
if not value:
|
|
|
|
continue
|
|
|
|
nw, st = nwst_id.split('.')[:2]
|
|
|
|
hyperlink_dict = dict(nw=nw, st=st, nwst_id=nwst_id)
|
|
|
|
link = value.format(**hyperlink_dict)
|
|
|
|
item = dict(text=link_text, tooltip=link, hyperlink=link if hyperlinks else None, color=default_color)
|
|
|
|
else:
|
|
|
|
item = dict(text='', tooltip='')
|
|
|
|
col_items.append(item)
|
2022-11-29 10:42:15 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
return col_items
|
2022-11-29 10:42:15 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
def write_html_table(self, default_color='#e6e6e6', default_header_color='#999999', hide_keys_mobile=('other',)):
|
2022-11-09 16:53:43 +01:00
|
|
|
self.check_html_dir()
|
2022-11-09 14:32:13 +01:00
|
|
|
fnout = pjoin(self.outpath_html, 'survBot_out.html')
|
2022-11-08 16:45:21 +01:00
|
|
|
if not fnout:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
with open(fnout, 'w') as outfile:
|
2023-02-01 14:49:26 +01:00
|
|
|
outfile.write(get_html_header(self.refresh_period))
|
|
|
|
|
|
|
|
# write_html_table_title(self.parameters)
|
|
|
|
outfile.write(init_html_table())
|
|
|
|
|
|
|
|
# write html header row
|
|
|
|
header, header_items = self.make_html_table_header(default_header_color, hide_keys_mobile)
|
|
|
|
html_row = get_html_row(header_items, html_key='th')
|
|
|
|
outfile.write(html_row)
|
|
|
|
|
|
|
|
# Write all cells (row after row)
|
2022-11-15 13:48:56 +01:00
|
|
|
for nwst_id in self.station_list:
|
2023-02-01 14:49:26 +01:00
|
|
|
# get list with column-wise items to write as a html row
|
|
|
|
status_dict = self.analysis_results.get(nwst_id)
|
|
|
|
col_items = self.get_html_row_items(status_dict, nwst_id, header, default_color, hide_keys_mobile)
|
|
|
|
outfile.write(get_html_row(col_items))
|
|
|
|
|
|
|
|
outfile.write(finish_html_table())
|
|
|
|
|
|
|
|
outfile.write(get_html_text(self.status_message))
|
|
|
|
outfile.write(html_footer())
|
|
|
|
|
2022-11-08 16:45:21 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(f'Could not write HTML table to {fnout}:')
|
|
|
|
print(traceback.format_exc())
|
|
|
|
|
2022-12-21 12:51:01 +01:00
|
|
|
if self.verbosity:
|
|
|
|
print(f'Wrote html table to {fnout}')
|
|
|
|
|
2022-11-08 16:45:21 +01:00
|
|
|
def update_status_message(self):
|
|
|
|
timespan = timedelta(seconds=int(self.parameters.get('timespan') * 24 * 3600))
|
|
|
|
self.status_message = f'Program starttime (UTC) {self.starttime.strftime("%Y-%m-%d %H:%M:%S")} | ' \
|
|
|
|
f'Current time (UTC) {UTCDateTime().strftime("%Y-%m-%d %H:%M:%S")} | ' \
|
2022-11-21 15:31:32 +01:00
|
|
|
f'Refresh period: {self.refresh_period}s | ' \
|
2022-11-08 16:45:21 +01:00
|
|
|
f'Showing data of last {timespan}'
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def print(self, string, **kwargs):
|
|
|
|
clear_end = CLR + '\n'
|
|
|
|
n_nl = string.count('\n')
|
|
|
|
string.replace('\n', clear_end)
|
|
|
|
print(string, end=clear_end, **kwargs)
|
2022-11-07 17:56:41 +01:00
|
|
|
self.print_count += n_nl + 1 # number of newlines + actual print with end='\n' (no check for kwargs end!)
|
|
|
|
# print('pc:', self.print_count)
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
def clear_prints(self):
|
|
|
|
print(UP.format(length=self.print_count), end='')
|
|
|
|
self.print_count = 0
|
|
|
|
|
|
|
|
|
|
|
|
class StationQC(object):
|
2022-12-22 16:00:07 +01:00
|
|
|
def __init__(self, parent, stream, nsl, parameters, keys, starttime, verbosity, print_func, status_track=None):
|
2022-11-03 15:39:23 +01:00
|
|
|
"""
|
|
|
|
Station Quality Check class.
|
|
|
|
:param nsl: dictionary containing network, station and location (key: str)
|
|
|
|
:param parameters: parameters dictionary from parameters.yaml file
|
|
|
|
"""
|
2022-12-22 16:00:07 +01:00
|
|
|
if status_track is None:
|
|
|
|
status_track = {}
|
2022-11-21 15:31:32 +01:00
|
|
|
self.parent = parent
|
2022-11-03 15:39:23 +01:00
|
|
|
self.stream = stream
|
|
|
|
self.nsl = nsl
|
|
|
|
self.network = nsl.get('network')
|
|
|
|
self.station = nsl.get('station')
|
|
|
|
self.location = nsl.get('location')
|
2023-01-31 16:12:07 +01:00
|
|
|
# make a copy of parameters object to prevent accidental changes
|
|
|
|
self.parameters = copy.deepcopy(parameters)
|
2022-11-03 15:39:23 +01:00
|
|
|
self.program_starttime = starttime
|
|
|
|
self.verbosity = verbosity
|
|
|
|
self.last_active = False
|
|
|
|
self.print = print_func
|
|
|
|
|
|
|
|
self.keys = keys
|
2022-11-15 17:19:39 +01:00
|
|
|
self.status_dict = {key: Status() for key in self.keys}
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
if not status_track:
|
|
|
|
status_track = {}
|
|
|
|
self.status_track = status_track
|
|
|
|
|
|
|
|
self.start()
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-11-21 15:31:32 +01:00
|
|
|
@property
|
|
|
|
def nwst_id(self):
|
|
|
|
return f'{self.network}.{self.station}'
|
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
def status_ok(self, key, detailed_message="Everything OK", status_message='OK', overwrite=False):
|
|
|
|
current_status = self.status_dict.get(key)
|
|
|
|
# do not overwrite existing warnings or errors
|
|
|
|
if not overwrite and (current_status.is_warn or current_status.is_error):
|
|
|
|
return
|
|
|
|
self.status_dict[key] = StatusOK(message=status_message, detailed_messages=[detailed_message])
|
|
|
|
|
|
|
|
def warn(self, key, detailed_message, last_occurrence=None, count=1):
|
|
|
|
if key == 'other':
|
|
|
|
self.status_other(detailed_message, last_occurrence, count)
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
new_warn = StatusWarn(count=count, show_count=self.parameters.get('warn_count'))
|
2022-11-08 14:23:56 +01:00
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
current_status = self.status_dict.get(key)
|
2022-11-08 14:23:56 +01:00
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
# change this to something more useful, SMS/EMAIL/PUSH
|
|
|
|
if self.verbosity:
|
2022-11-08 14:23:56 +01:00
|
|
|
self.print(f'{UTCDateTime()}: {detailed_message}', flush=False)
|
2022-11-15 17:19:39 +01:00
|
|
|
|
|
|
|
# if error, do not overwrite with warning
|
|
|
|
if current_status.is_error:
|
|
|
|
return
|
|
|
|
|
|
|
|
if current_status.is_warn:
|
|
|
|
current_status.count += count
|
|
|
|
else:
|
|
|
|
current_status = new_warn
|
|
|
|
|
|
|
|
self._update_status(key, current_status, detailed_message, last_occurrence)
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
# warnings.warn(message)
|
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
# # update detailed status if already existing
|
|
|
|
# current_message = self.detailed_status_dict.get(key)
|
|
|
|
# current_message = '' if current_message in [None, '-'] else current_message + ' | '
|
|
|
|
# self.detailed_status_dict[key] = current_message + detailed_message
|
|
|
|
#
|
|
|
|
# # this is becoming a little bit too complicated (adding warnings to existing)
|
|
|
|
# current_status_message = self.status_dict.get(key)
|
|
|
|
# current_status_message = '' if current_status_message in [None, 'OK', '-'] else current_status_message + ' | '
|
|
|
|
# self.status_dict[key] = current_status_message + status_message
|
|
|
|
|
|
|
|
def error(self, key, detailed_message, last_occurrence=None, count=1):
|
2023-02-01 14:49:26 +01:00
|
|
|
send_mail = False
|
2022-11-15 17:19:39 +01:00
|
|
|
new_error = StatusError(count=count, show_count=self.parameters.get('warn_count'))
|
|
|
|
current_status = self.status_dict.get(key)
|
|
|
|
if current_status.is_error:
|
|
|
|
current_status.count += count
|
|
|
|
else:
|
|
|
|
current_status = new_error
|
2022-11-22 18:06:25 +01:00
|
|
|
# if error is new and not on program-startup set active and refresh plot (using parent class)
|
2022-11-23 11:29:05 +01:00
|
|
|
if self.search_previous_errors(key, n_errors=1) is True:
|
2023-02-01 14:49:26 +01:00
|
|
|
self.parent.write_html_figure(self.nwst_id, save_bytes=True)
|
2022-11-15 17:19:39 +01:00
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if self.verbosity:
|
2022-11-15 17:19:39 +01:00
|
|
|
self.print(f'{UTCDateTime()}: {detailed_message}', flush=False)
|
2022-11-17 09:52:04 +01:00
|
|
|
|
|
|
|
# do not send error mail if this is the first run (e.g. program startup) or state was already error (unchanged)
|
2022-11-23 11:29:05 +01:00
|
|
|
if self.search_previous_errors(key) is True:
|
2023-02-01 14:49:26 +01:00
|
|
|
send_mail = True
|
|
|
|
# set status to "inactive" when info mail is sent
|
2022-11-22 18:06:25 +01:00
|
|
|
current_status.is_active = False
|
2022-11-23 11:29:05 +01:00
|
|
|
elif self.search_previous_errors(key) == 'active':
|
|
|
|
current_status.is_active = True
|
2022-11-22 18:06:25 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
# first update status, then send mail
|
2022-11-22 18:06:25 +01:00
|
|
|
self._update_status(key, current_status, detailed_message, last_occurrence)
|
2023-02-01 14:49:26 +01:00
|
|
|
if send_mail:
|
|
|
|
self.send_mail(key, status_type='FAIL', additional_message=detailed_message)
|
2022-11-17 09:52:04 +01:00
|
|
|
|
2022-11-21 15:31:32 +01:00
|
|
|
def search_previous_errors(self, key, n_errors=None):
|
2022-11-17 09:52:04 +01:00
|
|
|
"""
|
|
|
|
Check n_track + 1 previous statuses for errors.
|
2022-11-23 11:29:05 +01:00
|
|
|
If first item in list is no error but all others are return True
|
|
|
|
(first time n_track errors appeared if ALL n_track + 1 are error: error is old)
|
|
|
|
If last item is error but not all items are error yet return keyword 'active' -> error active, no message sent
|
|
|
|
In all other cases return False.
|
2022-11-17 09:52:04 +01:00
|
|
|
This also prevents sending status (e.g. mail) in case of program startup
|
|
|
|
"""
|
2022-12-21 15:48:18 +01:00
|
|
|
if n_errors is None:
|
|
|
|
n_errors = self.parameters.get('n_track')
|
|
|
|
|
2023-02-16 15:49:07 +01:00
|
|
|
# +1 to check whether n_errors + 1 was no error (error is new)
|
2022-12-21 15:48:18 +01:00
|
|
|
n_errors += 1
|
2022-11-21 15:31:32 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
# simulate an error specified in json file (dictionary: {nwst_id: key} )
|
|
|
|
if self._simulated_error_check(key) is True:
|
|
|
|
print(f'Simulating Error on {self.nwst_id}, {key}')
|
|
|
|
return True
|
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
previous_errors = self.status_track.get(key)
|
|
|
|
# only if error list is filled n_track times
|
2022-11-21 15:31:32 +01:00
|
|
|
if previous_errors and len(previous_errors) == n_errors:
|
2022-11-17 09:52:04 +01:00
|
|
|
# if first entry was no error but all others are, return True (-> new Fail n_track times)
|
|
|
|
if not previous_errors[0] and all(previous_errors[1:]):
|
|
|
|
return True
|
2023-02-16 15:49:07 +01:00
|
|
|
# special case: n_errors set to 1 (+1) to check for upcoming error (refresh plot etc.), but not on startup
|
|
|
|
if not previous_errors[0] and n_errors == 2:
|
|
|
|
return True
|
2022-11-23 11:29:05 +01:00
|
|
|
# in case previous_errors exists, last item is error but not all items are error, error still active
|
|
|
|
elif previous_errors and previous_errors[-1] and not all(previous_errors):
|
|
|
|
return 'active'
|
|
|
|
return False
|
2022-11-17 09:52:04 +01:00
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
def _simulated_error_check(self, key, fname='simulate_fail.json'):
|
|
|
|
if not os.path.isfile(fname):
|
|
|
|
return
|
|
|
|
with open(fname) as fid:
|
|
|
|
d = json.load(fid)
|
|
|
|
if d.get(self.nwst_id) == key:
|
|
|
|
return True
|
|
|
|
|
2022-11-23 11:52:26 +01:00
|
|
|
def send_mail(self, key, status_type, additional_message=''):
|
2022-11-17 09:52:04 +01:00
|
|
|
""" Send info mail using parameters specified in parameters file """
|
|
|
|
if not mail_functionality:
|
|
|
|
if self.verbosity:
|
|
|
|
print('Mail functionality disabled. Return')
|
|
|
|
return
|
2022-12-20 10:23:25 +01:00
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
mail_params = self.parameters.get('EMAIL')
|
|
|
|
if not mail_params:
|
|
|
|
if self.verbosity:
|
|
|
|
print('parameter "EMAIL" not set in parameter file. Return')
|
|
|
|
return
|
2022-12-20 10:23:25 +01:00
|
|
|
|
|
|
|
stations_blacklist = mail_params.get('stations_blacklist')
|
|
|
|
if stations_blacklist and self.station in stations_blacklist:
|
|
|
|
if self.verbosity:
|
|
|
|
print(f'Station {self.station} listed in blacklist. Return')
|
|
|
|
return
|
|
|
|
|
|
|
|
networks_blacklist = mail_params.get('networks_blacklist')
|
|
|
|
if networks_blacklist and self.network in networks_blacklist:
|
|
|
|
if self.verbosity:
|
|
|
|
print(f'Station {self.station} of network {self.network} listed in blacklist. Return')
|
|
|
|
return
|
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
sender = mail_params.get('sender')
|
|
|
|
addresses = mail_params.get('addresses')
|
2023-01-31 16:12:07 +01:00
|
|
|
add_addresses = self.get_additional_mail_recipients(mail_params)
|
|
|
|
if add_addresses:
|
|
|
|
# create copy of addresses ( [:] ) to prevent changing original, general list with addresses
|
|
|
|
addresses = addresses[:] + list(add_addresses)
|
2022-11-17 09:52:04 +01:00
|
|
|
server = mail_params.get('mailserver')
|
|
|
|
if not sender or not addresses:
|
|
|
|
if self.verbosity:
|
2023-01-31 16:12:07 +01:00
|
|
|
print('Mail sender or addresses not (correctly) defined. Return')
|
2022-11-17 09:52:04 +01:00
|
|
|
return
|
2022-11-23 11:52:26 +01:00
|
|
|
dt = self.get_dt_for_action()
|
|
|
|
text = f'{key}: Status {status_type} longer than {dt}: ' + additional_message
|
2023-02-01 14:49:26 +01:00
|
|
|
|
|
|
|
msg = EmailMessage()
|
|
|
|
|
2022-11-23 11:52:26 +01:00
|
|
|
msg['Subject'] = f'new message on station {self.nwst_id}'
|
2022-11-17 09:52:04 +01:00
|
|
|
msg['From'] = sender
|
|
|
|
msg['To'] = ', '.join(addresses)
|
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
msg.set_content(text)
|
|
|
|
|
|
|
|
# html mail version
|
|
|
|
html_str = self.add_html_mail_body(text)
|
|
|
|
msg.add_alternative(html_str, subtype='html')
|
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
# send message via SMTP server
|
|
|
|
s = smtplib.SMTP(server)
|
2023-01-31 16:12:07 +01:00
|
|
|
s.send_message(msg)
|
2022-11-17 09:52:04 +01:00
|
|
|
s.quit()
|
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
def add_html_mail_body(self, text, default_color='#e6e6e6'):
|
|
|
|
parent = self.parent
|
|
|
|
|
|
|
|
header, header_items = parent.make_html_table_header('#999999', add_links=False)
|
|
|
|
col_items = parent.get_html_row_items(self.status_dict, self.nwst_id, header, default_color, hyperlinks=False)
|
|
|
|
|
|
|
|
# set general status text
|
|
|
|
html_str = get_html_text(text)
|
|
|
|
|
|
|
|
# init html header and table
|
|
|
|
html_str += get_mail_html_header()
|
|
|
|
html_str += init_html_table()
|
|
|
|
|
|
|
|
# add table header and row of current station
|
|
|
|
html_str += get_html_row(header_items, html_key='th')
|
|
|
|
html_str += get_html_row(col_items)
|
|
|
|
|
|
|
|
html_str += finish_html_table()
|
|
|
|
|
|
|
|
if self.nwst_id in self.parent.active_figures.keys():
|
|
|
|
fid = self.parent.active_figures.pop(self.nwst_id)
|
|
|
|
html_str += add_html_image(img_data=fid.read())
|
|
|
|
|
|
|
|
html_str += html_footer()
|
|
|
|
|
|
|
|
return html_str
|
|
|
|
|
2023-01-31 16:12:07 +01:00
|
|
|
def get_additional_mail_recipients(self, mail_params):
|
|
|
|
""" return additional recipients from external mail list if this station (self.nwst_id) is specified """
|
|
|
|
eml_filename = mail_params.get('external_mail_list')
|
2023-02-01 14:49:26 +01:00
|
|
|
if eml_filename:
|
|
|
|
# try to open file
|
|
|
|
try:
|
|
|
|
with open(eml_filename, 'r') as fid:
|
|
|
|
address_dict = yaml.safe_load(fid)
|
|
|
|
|
|
|
|
for address, nwst_ids in address_dict.items():
|
|
|
|
if self.nwst_id in nwst_ids:
|
|
|
|
yield address
|
|
|
|
# file not existing
|
|
|
|
except FileNotFoundError as e:
|
|
|
|
if self.verbosity:
|
|
|
|
print(e)
|
|
|
|
# no dictionary
|
|
|
|
except AttributeError as e:
|
|
|
|
if self.verbosity:
|
|
|
|
print(f'Could not read dictionary from file {eml_filename}: {e}')
|
|
|
|
# other exceptions
|
|
|
|
except Exception as e:
|
|
|
|
if self.verbosity:
|
|
|
|
print(f'Could not open file {eml_filename}: {e}')
|
|
|
|
# no file specified
|
|
|
|
else:
|
2023-01-31 16:12:07 +01:00
|
|
|
if self.verbosity:
|
2023-02-01 14:49:26 +01:00
|
|
|
print('No external mail list set.')
|
|
|
|
|
|
|
|
return []
|
2023-01-31 16:12:07 +01:00
|
|
|
|
2022-11-23 11:52:26 +01:00
|
|
|
def get_dt_for_action(self):
|
|
|
|
n_track = self.parameters.get('n_track')
|
|
|
|
interval = self.parameters.get('interval')
|
|
|
|
dt = timedelta(seconds=n_track * interval)
|
|
|
|
return dt
|
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
def status_other(self, detailed_message, status_message, last_occurrence=None, count=1):
|
|
|
|
key = 'other'
|
|
|
|
new_status = StatusOther(count=count, messages=[status_message])
|
|
|
|
current_status = self.status_dict.get(key)
|
|
|
|
if current_status.is_other:
|
|
|
|
current_status.count += count
|
|
|
|
current_status.messages.append(status_message)
|
|
|
|
else:
|
|
|
|
current_status = new_status
|
|
|
|
|
|
|
|
self._update_status(key, current_status, detailed_message, last_occurrence)
|
|
|
|
|
|
|
|
def _update_status(self, key, current_status, detailed_message, last_occurrence):
|
|
|
|
current_status.detailed_messages.append(detailed_message)
|
|
|
|
current_status.last_occurrence = last_occurrence
|
|
|
|
|
|
|
|
self.status_dict[key] = current_status
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-11-23 11:52:26 +01:00
|
|
|
def activity_check(self, key='last_active'):
|
2022-11-03 15:39:23 +01:00
|
|
|
self.last_active = self.last_activity()
|
|
|
|
if not self.last_active:
|
2022-11-15 17:19:39 +01:00
|
|
|
status = StatusError()
|
2022-11-03 15:39:23 +01:00
|
|
|
else:
|
2022-11-23 11:52:26 +01:00
|
|
|
dt_active = timedelta(seconds=int(self.program_starttime - self.last_active))
|
|
|
|
status = Status(message=dt_active)
|
|
|
|
self.check_for_inactive_message(key, dt_active)
|
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
self.status_dict['last active'] = status
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
def last_activity(self):
|
|
|
|
if not self.stream:
|
|
|
|
return
|
|
|
|
endtimes = []
|
|
|
|
for trace in self.stream:
|
|
|
|
endtimes.append(trace.stats.endtime)
|
|
|
|
if len(endtimes) > 0:
|
|
|
|
return max(endtimes)
|
|
|
|
|
2022-11-23 11:52:26 +01:00
|
|
|
def check_for_inactive_message(self, key, dt_active):
|
|
|
|
dt_action = self.get_dt_for_action()
|
|
|
|
interval = self.parameters.get('interval')
|
2022-11-24 10:13:21 +01:00
|
|
|
if dt_action <= dt_active < dt_action + timedelta(seconds=interval):
|
2023-01-06 10:38:07 +01:00
|
|
|
detailed_message = f'\n{self.nwst_id}\n\n'
|
|
|
|
for key, status in self.status_dict.items():
|
|
|
|
detailed_message += f'{key}: {status.message}\n'
|
|
|
|
self.send_mail(key, status_type='Inactive', additional_message=detailed_message)
|
2022-11-23 11:52:26 +01:00
|
|
|
|
2022-11-17 09:52:04 +01:00
|
|
|
def start(self):
|
|
|
|
self.analyse_channels()
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def analyse_channels(self):
|
2022-11-17 09:52:04 +01:00
|
|
|
timespan = self.parameters.get('timespan') * 24 * 3600
|
|
|
|
self.analysis_starttime = self.program_starttime - timespan
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if self.verbosity > 0:
|
2022-11-07 17:56:41 +01:00
|
|
|
self.print(150 * '#')
|
2022-11-03 15:39:23 +01:00
|
|
|
self.print('This is StationQT. Calculating quality for station'
|
|
|
|
' {network}.{station}.{location}'.format(**self.nsl))
|
|
|
|
self.voltage_analysis()
|
|
|
|
self.pb_temp_analysis()
|
|
|
|
self.pb_power_analysis()
|
|
|
|
self.pb_rout_charge_analysis()
|
2022-12-20 16:54:27 +01:00
|
|
|
self.mass_analysis()
|
2022-12-22 15:36:55 +01:00
|
|
|
self.clock_quality_analysis()
|
2023-01-03 17:59:42 +01:00
|
|
|
self.gaps_analysis()
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2023-01-06 10:38:07 +01:00
|
|
|
# activity check should be done last for useful status output (e.g. email)
|
|
|
|
self.activity_check()
|
|
|
|
|
2023-02-01 14:49:26 +01:00
|
|
|
self._simulate_error()
|
|
|
|
|
|
|
|
def _simulate_error(self):
|
|
|
|
for key in self.keys:
|
|
|
|
if self._simulated_error_check(key):
|
|
|
|
self.error(key, 'SIMULATED ERROR')
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def return_print_analysis(self):
|
2022-11-21 15:31:32 +01:00
|
|
|
items = [self.nwst_id]
|
2022-11-03 15:39:23 +01:00
|
|
|
for key in self.keys:
|
2022-11-15 17:19:39 +01:00
|
|
|
status = self.status_dict[key]
|
|
|
|
message = status.message
|
2022-11-03 15:39:23 +01:00
|
|
|
if key == 'last active':
|
2022-11-15 17:19:39 +01:00
|
|
|
items.append(fancy_timestr(message))
|
2022-11-03 15:39:23 +01:00
|
|
|
elif key == 'temp':
|
2022-11-15 17:19:39 +01:00
|
|
|
items.append(str(message) + deg_str)
|
2022-11-03 15:39:23 +01:00
|
|
|
else:
|
2022-11-15 17:19:39 +01:00
|
|
|
items.append(str(message))
|
2022-11-03 15:39:23 +01:00
|
|
|
return items
|
|
|
|
|
|
|
|
def return_analysis(self):
|
2022-11-15 17:19:39 +01:00
|
|
|
return self.status_dict
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-12-22 15:56:32 +01:00
|
|
|
def get_unit_factor(self, channel):
|
|
|
|
""" Get channel multiplier for unit from parameters. If none is specified return 1 """
|
|
|
|
channel_params = self.parameters.get('CHANNELS').get(channel)
|
|
|
|
if channel_params:
|
|
|
|
multiplier = channel_params.get('unit')
|
|
|
|
if multiplier:
|
|
|
|
return float(multiplier)
|
|
|
|
return 1
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def get_last_occurrence_timestring(self, trace, indices):
|
|
|
|
""" returns a nicely formatted string of the timedelta since program starttime and occurrence and abs time"""
|
2022-11-15 17:19:39 +01:00
|
|
|
last_occur = self.get_last_occurrence(trace, indices)
|
2022-11-03 15:39:23 +01:00
|
|
|
if not last_occur:
|
|
|
|
return ''
|
|
|
|
last_occur_dt = timedelta(seconds=int(self.program_starttime - last_occur))
|
|
|
|
return f', Last occurrence: {last_occur_dt} ({last_occur.strftime("%Y-%m-%d %H:%M:%S")})'
|
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
def get_last_occurrence(self, trace, indices):
|
|
|
|
return self.get_time(trace, indices[-1])
|
|
|
|
|
2022-12-22 15:36:55 +01:00
|
|
|
def clock_quality_analysis(self, channel='LCQ', n_sample_average=10):
|
2022-12-06 15:43:13 +01:00
|
|
|
""" Analyse clock quality """
|
|
|
|
key = 'clock'
|
|
|
|
st = self.stream.select(channel=channel)
|
|
|
|
trace = self.get_trace(st, key)
|
2022-12-22 15:36:55 +01:00
|
|
|
if not trace:
|
|
|
|
return
|
2022-12-22 16:00:37 +01:00
|
|
|
clock_quality = trace.data
|
|
|
|
clock_quality_warn_level = self.parameters.get('THRESHOLDS').get('clockquality_warn')
|
|
|
|
clock_quality_fail_level = self.parameters.get('THRESHOLDS').get('clockquality_fail')
|
2022-12-06 15:43:13 +01:00
|
|
|
|
|
|
|
if self.verbosity > 1:
|
|
|
|
self.print(40 * '-')
|
|
|
|
self.print('Performing Clock Quality check', flush=False)
|
2022-12-20 17:02:16 +01:00
|
|
|
|
2022-12-22 16:00:37 +01:00
|
|
|
clockQuality_warn = np.where(clock_quality < clock_quality_warn_level)[0]
|
|
|
|
clockQuality_fail = np.where(clock_quality < clock_quality_fail_level)[0]
|
2022-12-06 15:43:13 +01:00
|
|
|
|
|
|
|
if len(clockQuality_warn) == 0 and len(clockQuality_fail) == 0:
|
2022-12-22 16:00:37 +01:00
|
|
|
self.status_ok(key, detailed_message=f'ClockQuality={(clock_quality[-1])}')
|
2022-12-06 15:43:13 +01:00
|
|
|
return
|
2022-12-20 17:02:16 +01:00
|
|
|
|
2022-12-22 16:00:37 +01:00
|
|
|
last_val_average = np.nanmean(clock_quality[-n_sample_average:])
|
2022-12-22 15:36:55 +01:00
|
|
|
|
|
|
|
# keep OK status if there are only minor warnings (lower warn level)
|
2022-12-06 15:43:13 +01:00
|
|
|
warn_message = f'Trace {trace.get_id()}:'
|
|
|
|
if len(clockQuality_warn) > 0:
|
|
|
|
# try calculate number of warn peaks from gaps between indices
|
2022-12-06 15:51:24 +01:00
|
|
|
n_qc_warn = self.calc_occurrences(clockQuality_warn)
|
2022-12-22 16:00:37 +01:00
|
|
|
detailed_message = warn_message + f' {n_qc_warn}x Clock quality less then {clock_quality_warn_level}%' \
|
2022-12-06 15:43:13 +01:00
|
|
|
+ self.get_last_occurrence_timestring(trace, clockQuality_warn)
|
2022-12-22 15:36:55 +01:00
|
|
|
self.status_ok(key, detailed_message=detailed_message)
|
2022-12-06 15:43:13 +01:00
|
|
|
|
2022-12-22 15:56:32 +01:00
|
|
|
# set WARN status for severe warnings in the past
|
2022-12-06 15:43:13 +01:00
|
|
|
if len(clockQuality_fail) > 0:
|
|
|
|
# try calculate number of fail peaks from gaps between indices
|
2022-12-06 15:51:24 +01:00
|
|
|
n_qc_fail = self.calc_occurrences(clockQuality_fail)
|
2022-12-22 16:00:37 +01:00
|
|
|
detailed_message = warn_message + f' {n_qc_fail}x Clock quality less then {clock_quality_fail_level}%' \
|
2022-12-06 15:43:13 +01:00
|
|
|
+ self.get_last_occurrence_timestring(trace, clockQuality_fail)
|
2022-12-22 15:36:55 +01:00
|
|
|
self.warn(key, detailed_message=detailed_message, count=n_qc_fail,
|
2022-12-06 15:43:13 +01:00
|
|
|
last_occurrence=self.get_last_occurrence(trace, clockQuality_fail))
|
|
|
|
|
2022-12-22 15:36:55 +01:00
|
|
|
# set FAIL state if last value is less than fail level
|
2022-12-22 16:00:37 +01:00
|
|
|
if last_val_average < clock_quality_fail_level:
|
|
|
|
self.error(key, detailed_message=f'ClockQuality={(clock_quality[-1])}')
|
2022-12-22 15:36:55 +01:00
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def voltage_analysis(self, channel='VEI'):
|
|
|
|
""" Analyse voltage channel for over/undervoltage """
|
2022-11-07 17:56:41 +01:00
|
|
|
key = 'voltage'
|
2022-11-03 15:39:23 +01:00
|
|
|
st = self.stream.select(channel=channel)
|
|
|
|
trace = self.get_trace(st, key)
|
2022-12-20 16:54:27 +01:00
|
|
|
if not trace:
|
|
|
|
return
|
2022-12-22 15:56:32 +01:00
|
|
|
voltage = trace.data * self.get_unit_factor(channel)
|
2022-11-03 15:39:23 +01:00
|
|
|
low_volt = self.parameters.get('THRESHOLDS').get('low_volt')
|
|
|
|
high_volt = self.parameters.get('THRESHOLDS').get('high_volt')
|
|
|
|
|
|
|
|
if self.verbosity > 1:
|
|
|
|
self.print(40 * '-')
|
|
|
|
self.print('Performing Voltage check', flush=False)
|
|
|
|
|
|
|
|
overvolt = np.where(voltage > high_volt)[0]
|
|
|
|
undervolt = np.where(voltage < low_volt)[0]
|
|
|
|
|
|
|
|
if len(overvolt) == 0 and len(undervolt) == 0:
|
2022-11-15 17:19:39 +01:00
|
|
|
self.status_ok(key, detailed_message=f'U={(voltage[-1])}V')
|
2022-11-03 15:39:23 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
warn_message = f'Trace {trace.get_id()}:'
|
|
|
|
if len(overvolt) > 0:
|
2022-11-08 17:11:37 +01:00
|
|
|
# try calculate number of voltage peaks from gaps between indices
|
|
|
|
n_overvolt = len(np.where(np.diff(overvolt) > 1)[0]) + 1
|
2022-11-15 17:19:39 +01:00
|
|
|
detailed_message = warn_message + f' {n_overvolt}x Voltage over {high_volt}V' \
|
|
|
|
+ self.get_last_occurrence_timestring(trace, overvolt)
|
|
|
|
self.warn(key, detailed_message=detailed_message, count=n_overvolt,
|
|
|
|
last_occurrence=self.get_last_occurrence(trace, overvolt))
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if len(undervolt) > 0:
|
2022-11-08 17:11:37 +01:00
|
|
|
# try calculate number of voltage peaks from gaps between indices
|
|
|
|
n_undervolt = len(np.where(np.diff(undervolt) > 1)[0]) + 1
|
2022-11-21 15:31:32 +01:00
|
|
|
detailed_message = warn_message + f' {n_undervolt}x Voltage under {low_volt}V ' \
|
2022-11-15 17:19:39 +01:00
|
|
|
+ self.get_last_occurrence_timestring(trace, undervolt)
|
|
|
|
self.warn(key, detailed_message=detailed_message, count=n_undervolt,
|
|
|
|
last_occurrence=self.get_last_occurrence(trace, undervolt))
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
def pb_temp_analysis(self, channel='EX1'):
|
|
|
|
""" Analyse PowBox temperature output. """
|
2022-11-07 17:56:41 +01:00
|
|
|
key = 'temp'
|
2022-11-03 15:39:23 +01:00
|
|
|
st = self.stream.select(channel=channel)
|
|
|
|
trace = self.get_trace(st, key)
|
2022-12-20 16:54:27 +01:00
|
|
|
if not trace:
|
|
|
|
return
|
2022-12-22 15:56:32 +01:00
|
|
|
voltage = trace.data * self.get_unit_factor(channel)
|
2022-11-03 15:39:23 +01:00
|
|
|
thresholds = self.parameters.get('THRESHOLDS')
|
|
|
|
temp = 20. * voltage - 20
|
|
|
|
# average temp
|
2022-11-07 17:56:41 +01:00
|
|
|
timespan = min([self.parameters.get('timespan') * 24 * 3600, int(len(temp) / trace.stats.sampling_rate)])
|
2022-11-03 15:39:23 +01:00
|
|
|
nsamp_av = int(trace.stats.sampling_rate) * timespan
|
2022-12-20 16:54:27 +01:00
|
|
|
av_temp_str = str(round(np.nanmean(temp[-nsamp_av:]), 1)) + deg_str
|
2022-11-03 15:39:23 +01:00
|
|
|
# dt of average
|
2022-11-21 15:31:32 +01:00
|
|
|
dt_t_str = str(timedelta(seconds=int(timespan))).replace(', 0:00:00', '')
|
2022-11-03 15:39:23 +01:00
|
|
|
# current temp
|
|
|
|
cur_temp = round(temp[-1], 1)
|
|
|
|
if self.verbosity > 1:
|
|
|
|
self.print(40 * '-')
|
|
|
|
self.print('Performing PowBox temperature check (EX1)', flush=False)
|
2022-12-20 16:54:27 +01:00
|
|
|
self.print(f'Average temperature at {np.nanmean(temp)}\N{DEGREE SIGN}', flush=False)
|
2022-11-03 15:39:23 +01:00
|
|
|
self.print(f'Peak temperature at {max(temp)}\N{DEGREE SIGN}', flush=False)
|
|
|
|
self.print(f'Min temperature at {min(temp)}\N{DEGREE SIGN}', flush=False)
|
|
|
|
max_temp = thresholds.get('max_temp')
|
|
|
|
t_check = np.where(temp > max_temp)[0]
|
|
|
|
if len(t_check) > 0:
|
|
|
|
self.warn(key=key,
|
2022-11-08 14:23:56 +01:00
|
|
|
detailed_message=f'Trace {trace.get_id()}: '
|
2022-11-15 17:19:39 +01:00
|
|
|
f'Temperature over {max_temp}\N{DEGREE SIGN} at {trace.get_id()}!'
|
|
|
|
+ self.get_last_occurrence_timestring(trace, t_check),
|
|
|
|
last_occurrence=self.get_last_occurrence(trace, t_check))
|
2022-11-03 15:39:23 +01:00
|
|
|
else:
|
|
|
|
self.status_ok(key,
|
|
|
|
status_message=cur_temp,
|
2022-11-15 17:19:39 +01:00
|
|
|
detailed_message=f'Average temperature of last {dt_t_str}: {av_temp_str}')
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-12-20 16:54:27 +01:00
|
|
|
def mass_analysis(self, channels=('VM1', 'VM2', 'VM3'), n_samp_mean=10):
|
|
|
|
""" Analyse datalogger mass channels. """
|
|
|
|
key = 'mass'
|
|
|
|
|
|
|
|
# build stream with all channels
|
|
|
|
st = Stream()
|
|
|
|
for channel in channels:
|
|
|
|
st += self.stream.select(channel=channel).copy()
|
|
|
|
st.merge()
|
|
|
|
|
|
|
|
# return if there are no three components
|
|
|
|
if not len(st) == 3:
|
|
|
|
return
|
|
|
|
|
|
|
|
# correct for channel unit
|
|
|
|
for trace in st:
|
2022-12-22 15:56:32 +01:00
|
|
|
trace.data = trace.data * self.get_unit_factor(trace.stats.channel)
|
2022-12-20 16:54:27 +01:00
|
|
|
|
|
|
|
# calculate average of absolute maximum of mass offset of last n_samp_mean
|
|
|
|
last_values = np.array([trace.data[-n_samp_mean:] for trace in st])
|
|
|
|
last_val_mean = np.nanmean(last_values, axis=1)
|
|
|
|
common_highest_val = np.nanmax(abs(last_val_mean))
|
|
|
|
common_highest_val = round(common_highest_val, 1)
|
|
|
|
|
2022-12-21 12:51:01 +01:00
|
|
|
# get thresholds for WARN (max_vm_warn) and FAIL (max_vm_fail)
|
2022-12-20 16:54:27 +01:00
|
|
|
thresholds = self.parameters.get('THRESHOLDS')
|
2022-12-21 12:51:01 +01:00
|
|
|
max_vm_warn = thresholds.get('max_vm_warn')
|
|
|
|
max_vm_fail = thresholds.get('max_vm_fail')
|
|
|
|
if not max_vm_warn or not max_vm_fail:
|
2022-12-20 16:54:27 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
# change status depending on common_highest_val
|
2022-12-21 12:51:01 +01:00
|
|
|
if common_highest_val < max_vm_warn:
|
2022-12-20 16:54:27 +01:00
|
|
|
self.status_ok(key, detailed_message=f'{common_highest_val}V')
|
2022-12-21 12:51:01 +01:00
|
|
|
elif max_vm_warn <= common_highest_val < max_vm_fail:
|
2022-12-20 16:54:27 +01:00
|
|
|
self.warn(key=key,
|
2022-12-21 15:48:18 +01:00
|
|
|
detailed_message=f'Warning raised for mass centering. Highest val (abs) {common_highest_val}V', )
|
2022-12-20 16:54:27 +01:00
|
|
|
else:
|
|
|
|
self.error(key=key,
|
2022-12-21 15:48:18 +01:00
|
|
|
detailed_message=f'Fail status for mass centering. Highest val (abs) {common_highest_val}V',)
|
2022-12-20 16:54:27 +01:00
|
|
|
|
|
|
|
if self.verbosity > 1:
|
|
|
|
self.print(40 * '-')
|
|
|
|
self.print('Performing mass position check', flush=False)
|
|
|
|
self.print(f'Average mass position at {common_highest_val}', flush=False)
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def pb_power_analysis(self, channel='EX2', pb_dict_key='pb_SOH2'):
|
|
|
|
""" Analyse EX2 channel of PowBox """
|
|
|
|
keys = ['230V', '12V']
|
|
|
|
st = self.stream.select(channel=channel)
|
|
|
|
trace = self.get_trace(st, keys)
|
2022-11-14 22:31:22 +01:00
|
|
|
if not trace:
|
|
|
|
return
|
|
|
|
|
2022-12-22 15:56:32 +01:00
|
|
|
voltage = trace.data * self.get_unit_factor(channel)
|
2022-11-03 15:39:23 +01:00
|
|
|
if self.verbosity > 1:
|
|
|
|
self.print(40 * '-')
|
|
|
|
self.print('Performing PowBox 12V/230V check (EX2)', flush=False)
|
2022-11-14 22:31:22 +01:00
|
|
|
voltage_check, voltage_dict, last_val = self.pb_voltage_ok(trace, voltage, pb_dict_key, channel=channel)
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if voltage_check:
|
|
|
|
for key in keys:
|
|
|
|
self.status_ok(key)
|
|
|
|
return
|
|
|
|
|
|
|
|
soh2_params = self.parameters.get('POWBOX').get(pb_dict_key)
|
|
|
|
self.in_depth_voltage_check(trace, voltage_dict, soh2_params, last_val)
|
|
|
|
|
|
|
|
def pb_rout_charge_analysis(self, channel='EX3', pb_dict_key='pb_SOH3'):
|
|
|
|
""" Analyse EX3 channel of PowBox """
|
|
|
|
keys = ['router', 'charger']
|
|
|
|
pb_thresh = self.parameters.get('THRESHOLDS').get('pb_1v')
|
|
|
|
st = self.stream.select(channel=channel)
|
|
|
|
trace = self.get_trace(st, keys)
|
2022-11-14 22:31:22 +01:00
|
|
|
if not trace:
|
|
|
|
return
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2022-12-22 15:56:32 +01:00
|
|
|
voltage = trace.data * self.get_unit_factor(channel)
|
2022-11-03 15:39:23 +01:00
|
|
|
if self.verbosity > 1:
|
|
|
|
self.print(40 * '-')
|
|
|
|
self.print('Performing PowBox Router/Charger check (EX3)', flush=False)
|
2022-11-14 22:31:22 +01:00
|
|
|
voltage_check, voltage_dict, last_val = self.pb_voltage_ok(trace, voltage, pb_dict_key, channel=channel)
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if voltage_check:
|
|
|
|
for key in keys:
|
|
|
|
self.status_ok(key)
|
|
|
|
return
|
|
|
|
|
|
|
|
soh3_params = self.parameters.get('POWBOX').get(pb_dict_key)
|
|
|
|
self.in_depth_voltage_check(trace, voltage_dict, soh3_params, last_val)
|
|
|
|
|
|
|
|
def in_depth_voltage_check(self, trace, voltage_dict, soh_params, last_val):
|
|
|
|
""" Associate values in voltage_dict to error messages specified in SOH_params and warn."""
|
|
|
|
for volt_lvl, ind_array in voltage_dict.items():
|
2022-11-15 17:19:39 +01:00
|
|
|
if volt_lvl == 1:
|
|
|
|
continue # No need to do anything here
|
2022-11-03 15:39:23 +01:00
|
|
|
if len(ind_array) > 0:
|
2022-11-15 17:19:39 +01:00
|
|
|
# get result from parameter dictionary for voltage level
|
2022-11-03 15:39:23 +01:00
|
|
|
result = soh_params.get(volt_lvl)
|
|
|
|
for key, message in result.items():
|
2022-11-15 17:19:39 +01:00
|
|
|
# if result is OK, continue with next voltage level
|
2022-11-03 15:39:23 +01:00
|
|
|
if message == 'OK':
|
|
|
|
self.status_ok(key)
|
|
|
|
continue
|
2022-11-15 17:19:39 +01:00
|
|
|
if volt_lvl > 1:
|
2022-12-06 15:31:09 +01:00
|
|
|
n_occurrences = self.calc_occurrences(ind_array)
|
2022-11-15 17:19:39 +01:00
|
|
|
self.warn(key=key,
|
|
|
|
detailed_message=f'Trace {trace.get_id()}: '
|
2022-11-21 15:31:32 +01:00
|
|
|
f'Found {n_occurrences} occurrence(s) of {volt_lvl}V: {key}: {message}'
|
2022-11-15 17:19:39 +01:00
|
|
|
+ self.get_last_occurrence_timestring(trace, ind_array),
|
|
|
|
count=n_occurrences,
|
|
|
|
last_occurrence=self.get_last_occurrence(trace, ind_array))
|
|
|
|
# if last_val == current voltage (which is not 1) -> FAIL or last_val < 1: PBox no data
|
|
|
|
if volt_lvl == last_val or (volt_lvl == -1 and last_val < 1):
|
|
|
|
self.error(key, detailed_message=f'Last PowBox voltage state {last_val}V: {message}')
|
2022-11-03 15:39:23 +01:00
|
|
|
|
2023-01-03 17:59:42 +01:00
|
|
|
def gaps_analysis(self, key='gaps'):
|
|
|
|
""" return gaps of a given nwst_id """
|
|
|
|
|
|
|
|
gaps = []
|
|
|
|
for gap_list in self.parent.gaps:
|
|
|
|
nw_gap, st_gap = gap_list[:2]
|
|
|
|
if nw_gap == self.network and st_gap == self.station:
|
|
|
|
gaps.append(gap_list)
|
|
|
|
|
|
|
|
if not gaps:
|
|
|
|
self.status_ok(key=key)
|
|
|
|
return
|
|
|
|
|
|
|
|
detailed_message = ''
|
|
|
|
for gap_list in gaps:
|
|
|
|
text = '{}.{}.{}.{}: last sample - {}, next sample - {}, delta {}, samples {}\n'.format(*gap_list)
|
|
|
|
detailed_message += text
|
|
|
|
|
|
|
|
self.warn(key=key, detailed_message=detailed_message, count=len(gaps))
|
|
|
|
|
2022-12-06 15:31:09 +01:00
|
|
|
def calc_occurrences(self, ind_array):
|
|
|
|
# try calculate number of voltage peaks/plateaus from gaps between indices
|
|
|
|
if len(ind_array) == 0:
|
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
# start index at 1 if there are gaps (n_peaks = n_gaps + 1)
|
|
|
|
n_occurrences = 1
|
|
|
|
|
|
|
|
min_samples = self.parameters.get('min_sample')
|
|
|
|
if not min_samples:
|
|
|
|
min_samples = 1
|
|
|
|
|
|
|
|
# calculated differences in index array, diff > 1: gap, diff == 1: within peak/plateau
|
|
|
|
diffs = np.diff(ind_array)
|
|
|
|
gap_start_inds = np.where(np.diff(ind_array) > 1)[0]
|
|
|
|
# iterate over all gaps and check "min_samples" before the gap
|
|
|
|
for gsi in gap_start_inds:
|
|
|
|
# right boundary index of peak (gap index - 1)
|
|
|
|
peak_rb_ind = gsi - 1
|
|
|
|
# left boundary index of peak
|
|
|
|
peak_lb_ind = max([0, peak_rb_ind - min_samples])
|
|
|
|
if all(diffs[peak_lb_ind: peak_rb_ind] == 1):
|
|
|
|
n_occurrences += 1
|
|
|
|
|
|
|
|
return n_occurrences
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
def get_trace(self, stream, keys):
|
|
|
|
if not type(keys) == list:
|
|
|
|
keys = [keys]
|
|
|
|
if len(stream) == 0:
|
|
|
|
for key in keys:
|
|
|
|
self.warn(key, 'NO DATA', 'NO DATA')
|
|
|
|
return
|
|
|
|
if len(stream) > 1:
|
|
|
|
raise Exception('Ambiguity error')
|
|
|
|
trace = stream[0]
|
|
|
|
if trace.stats.endtime < self.analysis_starttime:
|
|
|
|
for key in keys:
|
|
|
|
self.warn(key, 'NO DATA', 'NO DATA')
|
|
|
|
return
|
|
|
|
return trace
|
|
|
|
|
2022-11-14 22:31:22 +01:00
|
|
|
def pb_voltage_ok(self, trace, voltage, pb_dict_key, channel=None):
|
2022-11-03 15:39:23 +01:00
|
|
|
"""
|
|
|
|
Checks if voltage level is ok everywhere and returns True. If it is not okay it returns a dictionary
|
|
|
|
with each voltage value associated to the different steps specified in POWBOX > pb_steps. Also raises
|
|
|
|
self.warn in case there are unassociated voltage values recorded.
|
|
|
|
"""
|
|
|
|
pb_thresh = self.parameters.get('THRESHOLDS').get('pb_thresh')
|
|
|
|
pb_ok = self.parameters.get('POWBOX').get('pb_ok')
|
|
|
|
# possible voltage levels are keys of pb voltage level dict
|
|
|
|
voltage_levels = list(self.parameters.get('POWBOX').get(pb_dict_key).keys())
|
|
|
|
|
|
|
|
# get mean voltage value of last samples
|
|
|
|
last_voltage = np.nanmean(voltage[-3:])
|
|
|
|
|
|
|
|
# check if voltage is over or under OK-level (1V), if not return True
|
|
|
|
over = np.where(voltage > pb_ok + pb_thresh)[0]
|
|
|
|
under = np.where(voltage < pb_ok - pb_thresh)[0]
|
2022-11-14 22:31:22 +01:00
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if len(over) == 0 and len(under) == 0:
|
|
|
|
return True, {}, last_voltage
|
|
|
|
|
2022-11-14 22:31:22 +01:00
|
|
|
# Get voltage levels for classification
|
|
|
|
voltage_dict = {}
|
|
|
|
classified_indices = np.array([])
|
|
|
|
|
|
|
|
# add classified levels to voltage_dict
|
|
|
|
for volt in voltage_levels:
|
|
|
|
indices = np.where((voltage < volt + pb_thresh) & (voltage > volt - pb_thresh))[0]
|
|
|
|
voltage_dict[volt] = indices
|
|
|
|
classified_indices = np.append(classified_indices, indices)
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
# Warn in case of voltage under OK-level (1V)
|
|
|
|
if len(under) > 0:
|
2022-11-04 16:50:20 +01:00
|
|
|
# try calculate number of occurences from gaps between indices
|
|
|
|
n_occurrences = len(np.where(np.diff(under) > 1)[0]) + 1
|
2022-11-14 22:31:22 +01:00
|
|
|
voltage_dict[-1] = under
|
2022-11-15 17:19:39 +01:00
|
|
|
self.status_other(detailed_message=f'Trace {trace.get_id()}: '
|
2022-11-21 15:31:32 +01:00
|
|
|
f'Voltage below {pb_ok}V in {len(under)} samples, {n_occurrences} time(s). '
|
|
|
|
f'Mean voltage: {np.mean(voltage):.2}'
|
|
|
|
+ self.get_last_occurrence_timestring(trace, under),
|
2022-11-15 17:19:39 +01:00
|
|
|
status_message='under 1V ({})'.format(n_occurrences))
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
# classify last voltage values
|
|
|
|
for volt in voltage_levels:
|
|
|
|
if (last_voltage < volt + pb_thresh) and (last_voltage > volt - pb_thresh):
|
|
|
|
last_val = volt
|
|
|
|
break
|
|
|
|
else:
|
2022-11-14 22:31:22 +01:00
|
|
|
last_val = round(last_voltage, 2)
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
# in case not all voltage values could be classified
|
|
|
|
if not len(classified_indices) == len(voltage):
|
|
|
|
all_indices = np.arange(len(voltage))
|
|
|
|
unclassified_indices = all_indices[~np.isin(all_indices, classified_indices)]
|
|
|
|
n_unclassified = len(unclassified_indices)
|
|
|
|
max_uncl = self.parameters.get('THRESHOLDS').get('unclassified')
|
|
|
|
if max_uncl and n_unclassified > max_uncl:
|
2022-11-15 17:19:39 +01:00
|
|
|
self.status_other(detailed_message=f'Trace {trace.get_id()}: '
|
2022-11-21 15:31:32 +01:00
|
|
|
f'{n_unclassified}/{len(all_indices)} '
|
|
|
|
f'unclassified voltage values in channel {trace.get_id()}',
|
2022-11-15 17:19:39 +01:00
|
|
|
status_message=f'{channel}: {n_unclassified} uncl.')
|
2022-11-03 15:39:23 +01:00
|
|
|
|
|
|
|
return False, voltage_dict, last_val
|
|
|
|
|
|
|
|
def get_time(self, trace, index):
|
|
|
|
""" get UTCDateTime from trace and index"""
|
|
|
|
return trace.stats.starttime + trace.stats.delta * index
|
|
|
|
|
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
class Status(object):
|
2022-11-23 11:52:26 +01:00
|
|
|
def __init__(self, message=None, detailed_messages=None, count: int = 0, last_occurrence=None, show_count=True):
|
|
|
|
if message is None:
|
|
|
|
message = '-'
|
2022-11-15 17:19:39 +01:00
|
|
|
if detailed_messages is None:
|
|
|
|
detailed_messages = []
|
|
|
|
self.show_count = show_count
|
|
|
|
self.message = message
|
|
|
|
self.messages = [message]
|
|
|
|
self.detailed_messages = detailed_messages
|
|
|
|
self.count = count
|
|
|
|
self.last_occurrence = last_occurrence
|
|
|
|
self.is_warn = None
|
|
|
|
self.is_error = None
|
|
|
|
self.is_other = False
|
2022-11-22 18:06:25 +01:00
|
|
|
self.is_active = False
|
2022-11-15 17:19:39 +01:00
|
|
|
|
|
|
|
def set_warn(self):
|
|
|
|
self.is_warn = True
|
|
|
|
|
|
|
|
def set_error(self):
|
|
|
|
self.is_warn = False
|
|
|
|
self.is_error = True
|
|
|
|
|
|
|
|
def set_ok(self):
|
|
|
|
self.is_warn = False
|
|
|
|
self.is_error = False
|
|
|
|
|
|
|
|
def get_status_str(self):
|
|
|
|
message = self.message
|
|
|
|
if self.count > 1 and self.show_count:
|
|
|
|
message += f' ({self.count})'
|
|
|
|
detailed_message = ''
|
|
|
|
|
|
|
|
for index, dm in enumerate(self.detailed_messages):
|
|
|
|
if index > 0:
|
|
|
|
detailed_message += ' | '
|
|
|
|
detailed_message += dm
|
|
|
|
|
|
|
|
return message, detailed_message
|
|
|
|
|
|
|
|
|
|
|
|
class StatusOK(Status):
|
|
|
|
def __init__(self, message='OK', detailed_messages=None):
|
|
|
|
super(StatusOK, self).__init__(message=message, detailed_messages=detailed_messages)
|
|
|
|
self.set_ok()
|
|
|
|
|
|
|
|
|
|
|
|
class StatusWarn(Status):
|
|
|
|
def __init__(self, message='WARN', count=1, last_occurence=None, detailed_messages=None, show_count=False):
|
|
|
|
super(StatusWarn, self).__init__(message=message, count=count, last_occurrence=last_occurence,
|
|
|
|
detailed_messages=detailed_messages, show_count=show_count)
|
|
|
|
self.set_warn()
|
|
|
|
|
|
|
|
|
|
|
|
class StatusError(Status):
|
|
|
|
def __init__(self, message='FAIL', count=1, last_occurence=None, detailed_messages=None, show_count=False):
|
|
|
|
super(StatusError, self).__init__(message=message, count=count, last_occurrence=last_occurence,
|
|
|
|
detailed_messages=detailed_messages, show_count=show_count)
|
|
|
|
self.set_error()
|
|
|
|
|
2022-11-21 15:31:32 +01:00
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
class StatusOther(Status):
|
|
|
|
def __init__(self, messages=None, count=1, last_occurence=None, detailed_messages=None):
|
|
|
|
super(StatusOther, self).__init__(count=count, last_occurrence=last_occurence,
|
|
|
|
detailed_messages=detailed_messages)
|
|
|
|
if messages is None:
|
|
|
|
messages = []
|
|
|
|
self.messages = messages
|
|
|
|
self.is_other = True
|
2022-11-21 15:31:32 +01:00
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
def get_status_str(self):
|
|
|
|
if self.messages == []:
|
|
|
|
return '-'
|
2022-11-21 15:31:32 +01:00
|
|
|
|
2022-11-15 17:19:39 +01:00
|
|
|
message = ''
|
|
|
|
for index, mes in enumerate(self.messages):
|
|
|
|
if index > 0:
|
|
|
|
message += ' | '
|
|
|
|
message += mes
|
|
|
|
|
|
|
|
detailed_message = ''
|
|
|
|
for index, dm in enumerate(self.detailed_messages):
|
|
|
|
if index > 0:
|
|
|
|
detailed_message += ' | '
|
|
|
|
detailed_message += dm
|
|
|
|
|
|
|
|
return message, detailed_message
|
|
|
|
|
|
|
|
|
2022-11-03 15:39:23 +01:00
|
|
|
if __name__ == '__main__':
|
2022-11-08 16:45:21 +01:00
|
|
|
parser = argparse.ArgumentParser(description='Call survBot')
|
2022-11-09 16:53:43 +01:00
|
|
|
parser.add_argument('-html', dest='html_path', default=None, help='filepath for HTML output')
|
2022-11-08 16:45:21 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-11-09 16:53:43 +01:00
|
|
|
survBot = SurveillanceBot(parameter_path='parameters.yaml', outpath_html=args.html_path)
|
2022-11-08 16:45:21 +01:00
|
|
|
survBot.start()
|