diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 6d3e4a1..c45c708 100755 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,13 +4,17 @@ Changelog Version 1.2.0 [2021-02-22] =========== - APP: HAT_DataManager_HMC_NRT + APP: HAT_DataOrganizer_HMC_NRT.py - FIX: bugs related to xarray library; - FIX: bugs related to progressbar library + APP: HAT_DataPublisher_HMC_NRT.py +- FIX: bugs related to matplotlib=3.1.3 and cartopy=0.17 libraries + APP: HAT_RunAnalyzer_HMC_MAIN.py +- Release for HMC version 3.x.x Version 1.1.5 [2019-10-22] =========== - APP: HAT_DataManager_HMC_NRT + APP: HAT_DataManager_HMC_NRT.py - ADD: method to set terrain variable in buffered datasets when is missed - ADD: effective time steps information about computing gridded average or accumulated variable(s) skipping empty step(s) - FIX: empty time steps condition in computing gridded average or accumulated variable(s) @@ -19,37 +23,37 @@ Version 1.1.5 [2019-10-22] Version 1.1.4 [2019-07-08] =========== - APP: HAT_DataManager_HMC_NRT + APP: HAT_DataManager_HMC_NRT.py - ADD: variables to datasets (soil moisture and accumulated rain time-series) - ADD: management of tmp file(s) in a common system folder - FIX: datasets finder according with operative chain requests Version 1.1.3 [2019-05-27] =========== - APP: HAT_DataPublisher_HMC_NRT + APP: HAT_DataPublisher_HMC_NRT.py - ADD: soil moisture and accumulated rain to time-series graph(s) Version 1.1.2 [2019-05-16] =========== - APP: HAT_DataManager_HMC_NRT + APP: HAT_DataManager_HMC_NRT.py - FIX: bug about selecting gridded variables with mean equal to zero - FIX: bug about out of range probabilistic runs - APP: HAT_DataPublisher_HMC_NRT + APP: HAT_DataPublisher_HMC_NRT.py - FIX: bugs in data seeking and data plotting (hydrapp time-series and maps product) Version 1.1.1 [2019-03-04] =========== - APP: HAT_DataManager_HMC_NRT + APP: HAT_DataManager_HMC_NRT.py - FIX: nasty bugs arisen in operative mode fixed -Version 1.1.0 [2019-03-22] +Version 1.1.0 [2019-02-22] =========== - APP: HAT_DataPublisher_HMC_NRT + APP: HAT_DataPublisher_HMC_NRT.py - Beta release Version 1.0.0 [2019-02-13] =========== - APP: HAT_DataManager_HMC_NRT + APP: HAT_DataManager_HMC_NRT.py - Beta release Version 0.0.1 [2019-01-16] diff --git a/apps/DataPublisher/HAT_DataManager_HMC_NRT.py b/apps/Analyzer_Datasets/HAT_DataOrganizer_HMC_NRT.py similarity index 99% rename from apps/DataPublisher/HAT_DataManager_HMC_NRT.py rename to apps/Analyzer_Datasets/HAT_DataOrganizer_HMC_NRT.py index 1d6127a..a4a260c 100755 --- a/apps/DataPublisher/HAT_DataManager_HMC_NRT.py +++ b/apps/Analyzer_Datasets/HAT_DataOrganizer_HMC_NRT.py @@ -7,7 +7,7 @@ __library__ = 'hat' General command line: -python3 HAT_DataManager_HMC_NRT.py -settings_file configuration.json -time "YYYY-MM-DD HH:MM" +python3 HAT_RunOrganizer_HMC_NRT.py -settings_file configuration.json -time "YYYY-MM-DD HH:MM" Version(s) in changelog file """ # ------------------------------------------------------------------------------------- diff --git a/apps/DataPublisher/HAT_DataPublisher_HMC_NRT.py b/apps/Analyzer_Datasets/HAT_DataPublisher_HMC_NRT.py similarity index 98% rename from apps/DataPublisher/HAT_DataPublisher_HMC_NRT.py rename to apps/Analyzer_Datasets/HAT_DataPublisher_HMC_NRT.py index 09d8211..c914332 100755 --- a/apps/DataPublisher/HAT_DataPublisher_HMC_NRT.py +++ b/apps/Analyzer_Datasets/HAT_DataPublisher_HMC_NRT.py @@ -1,5 +1,5 @@ """ -Hydrological Analysis Tool - DataPublisher for Hydrological Model Continuum - Near Real Time +Hydrological Analysis Tool - Analyzer_Datasets for Hydrological Model Continuum - Near Real Time __date__ = '20210218' __version__ = '1.2.0' @@ -7,7 +7,7 @@ __library__ = 'hat' General command line: -python3 HAT_DataPublisher_HMC_NRT.py -settings_file configuration.json -time YYYYMMDDHHMM +python3 HAT_RunPublisher_HMC_NRT.py -settings_file configuration.json -time YYYYMMDDHHMM Version(s) in changelog file """ @@ -45,7 +45,7 @@ def main(): # Version and algorithm information sProgramVersion = '1.2.0' sProjectName = 'HAT' - sAlgType = 'DataPublisher' + sAlgType = 'Analyzer_Datasets' sAlgName = 'HMC NearRealTime' # Time algorithm information dStartTime = time.time() diff --git a/apps/DataPublisher/hat_datamanager_hmc_nrt_configuration_example_generic.json b/apps/Analyzer_Datasets/hat_dataorganizer_hmc_nrt_configuration_example_generic.json similarity index 100% rename from apps/DataPublisher/hat_datamanager_hmc_nrt_configuration_example_generic.json rename to apps/Analyzer_Datasets/hat_dataorganizer_hmc_nrt_configuration_example_generic.json diff --git a/apps/DataPublisher/hat_datapublisher_hmc_nrt_configuration_example_generic.json b/apps/Analyzer_Datasets/hat_datapublisher_hmc_nrt_configuration_example_generic.json similarity index 100% rename from apps/DataPublisher/hat_datapublisher_hmc_nrt_configuration_example_generic.json rename to apps/Analyzer_Datasets/hat_datapublisher_hmc_nrt_configuration_example_generic.json diff --git a/apps/Analyzer_Execution/HAT_RunAnalyzer_HMC_Main.py b/apps/Analyzer_Execution/HAT_RunAnalyzer_HMC_Main.py new file mode 100755 index 0000000..46cf4e4 --- /dev/null +++ b/apps/Analyzer_Execution/HAT_RunAnalyzer_HMC_Main.py @@ -0,0 +1,199 @@ +#!/usr/bin/python3 +""" +Hydrological Analysis Tool - Analyzer_Execution + +__date__ = '20210225' +__version__ = '1.5.0' +__author__ = + 'Fabio Delogu (fabio.delogu@cimafoundation.org', + 'Flavio Pignone (flavio.pignone@cimafoundation.org', + +__library__ = 'HAT' + +General command line: +python3 HAT_RunAnalyzer_HMC_Main.py -settings_file configuration.json -time "YYYY-MM-DD HH:MM" + +Version(s): +20210225 (1.5.0) --> Beta release for HMC 3.x.x +20200605 (1.0.0) --> Beta release for HMC 2.x.x +""" + +# ------------------------------------------------------------------------------------- +# Complete library +import logging +import time +import os + +from argparse import ArgumentParser + +from lib_data_io_json import read_file_settings +from lib_utils_time import set_time + +from driver_data_io_static import DriverStatic +from driver_data_io_dynamic import DriverDynamic +# ------------------------------------------------------------------------------------- + +# ------------------------------------------------------------------------------------- +# Algorithm information +alg_version = '1.5.0' +alg_release = '2021-02-25' +alg_name = 'RUN ANALYZER' +# Algorithm parameter(s) +time_format = '%Y-%m-%d %H:%M' +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Script Main +def main(): + + # ------------------------------------------------------------------------------------- + # Get algorithm settings + alg_settings, alg_time = get_args() + + # Set algorithm settings + data_settings = read_file_settings(alg_settings) + + # Set algorithm logging + set_logging(logger_file=os.path.join(data_settings['log']['folder_name'], + data_settings['log']['file_name'])) + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Info algorithm + logging.info(' ============================================================================ ') + logging.info(' ==> ' + alg_name + ' (Version: ' + alg_version + ' Release_Date: ' + alg_release + ')') + logging.info(' ==> START ... ') + logging.info(' ') + + # Time algorithm information + start_time = time.time() + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Organize time information + time_now, time_exec, time_range = set_time( + time_run_args=alg_time, time_run_file=data_settings['time']['time_now'], + time_format=time_format, + time_period=data_settings['time']['time_period'], + time_frequency=data_settings['time']['time_frequency'], + time_rounding=data_settings['time']['time_rounding']) + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Driver and method of static datasets + driver_data_static = DriverStatic( + src_dict=data_settings['data']['static'], + alg_ancillary=data_settings['algorithm']['ancillary'], + alg_template_tags=data_settings['algorithm']['template'] + ) + static_data_collection = driver_data_static.organize_static() + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Iterate over time range + for time_step in time_range: + + # ------------------------------------------------------------------------------------- + # Driver and method of dynamic datasets + driver_data_dynamic = DriverDynamic( + time_now=time_now, + time_exec=time_exec, + time_run=time_step, + static_data_collection=static_data_collection, + src_dict=data_settings['data']['dynamic']['source'], + anc_dict=data_settings['data']['dynamic']['ancillary'], + dest_dict=data_settings['data']['dynamic']['destination'], + alg_ancillary=data_settings['algorithm']['ancillary'], + alg_template_tags=data_settings['algorithm']['template'], + flag_cleaning_dynamic_source=data_settings['algorithm']['flags']['cleaning_dynamic_source'], + flag_cleaning_dynamic_analysis=data_settings['algorithm']['flags']['cleaning_dynamic_analysis'], + flag_cleaning_dynamic_destination=data_settings['algorithm']['flags']['cleaning_dynamic_destination'], + flag_cleaning_dynamic_tmp=data_settings['algorithm']['flags']['cleaning_dynamic_tmp']) + + dynamic_data_collection = driver_data_dynamic.organize_dynamic_data() + analyze_data_collection, analyze_warnings_collection = driver_data_dynamic.analyze_dynamic_data( + dynamic_data_collection) + driver_data_dynamic.dump_dynamic_data(analyze_data_collection, analyze_warnings_collection) + driver_data_dynamic.clean_dynamic_tmp() + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Info algorithm + alg_time_elapsed = round(time.time() - alg_time_start, 1) + + logging.info(' ') + logging.info('[' + project_name + ' ' + alg_type + ' - ' + alg_name + ' (Version ' + alg_version + + ' - Release ' + alg_release + ')]') + logging.info(' ==> TIME ELAPSED: ' + str(alg_time_elapsed) + ' seconds') + logging.info(' ==> ... END') + logging.info(' ==> Bye, Bye') + logging.info(' ============================================================================ ') + # ------------------------------------------------------------------------------------- + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to get script argument(s) +def get_args(): + parser_handle = ArgumentParser() + parser_handle.add_argument('-settings_file', action="store", dest="alg_settings") + parser_handle.add_argument('-time', action="store", dest="alg_time") + parser_values = parser_handle.parse_args() + + if parser_values.alg_settings: + alg_settings = parser_values.alg_settings + else: + alg_settings = 'configuration.json' + + if parser_values.alg_time: + alg_time = parser_values.alg_time + else: + alg_time = None + + return alg_settings, alg_time + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to set logging information +def set_logging(logger_file='log.txt', logger_format=None): + if logger_format is None: + logger_format = '%(asctime)s %(name)-12s %(levelname)-8s ' \ + '%(filename)s:[%(lineno)-6s - %(funcName)20s()] %(message)s' + + # Remove old logging file + if os.path.exists(logger_file): + os.remove(logger_file) + + # Set level of root debugger + logging.root.setLevel(logging.DEBUG) + + # Open logging basic configuration + logging.basicConfig(level=logging.DEBUG, format=logger_format, filename=logger_file, filemode='w') + + # Set logger handle + logger_handle_1 = logging.FileHandler(logger_file, 'w') + logger_handle_2 = logging.StreamHandler() + # Set logger level + logger_handle_1.setLevel(logging.DEBUG) + logger_handle_2.setLevel(logging.DEBUG) + # Set logger formatter + logger_formatter = logging.Formatter(logger_format) + logger_handle_1.setFormatter(logger_formatter) + logger_handle_2.setFormatter(logger_formatter) + + # Add handle to logging + logging.getLogger('').addHandler(logger_handle_1) + logging.getLogger('').addHandler(logger_handle_2) + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Call script from external library +if __name__ == '__main__': + main() +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/driver_data_io_dynamic.py b/apps/Analyzer_Execution/driver_data_io_dynamic.py new file mode 100755 index 0000000..8de8eb6 --- /dev/null +++ b/apps/Analyzer_Execution/driver_data_io_dynamic.py @@ -0,0 +1,768 @@ +""" +Class Features + +Name: driver_data_io_dynamic +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210225' +Version: '1.0.0' +""" + +###################################################################################### +# Library +import logging +import os + +import pandas as pd + +from copy import deepcopy + +from lib_utils_io import read_obj, write_obj +from lib_utils_system import fill_tags2string, make_folder + +from lib_utils_exec import read_file_execution_info +from lib_data_io_json import read_file_hydrograph_ts, read_file_hydrograph_info +from lib_data_io_html import write_file_summary +from lib_data_io_ascii import write_file_warnings + +from lib_data_analysis import analyze_hydrograph_datasets, analyze_time_info + +# Debug +# import matplotlib.pylab as plt +###################################################################################### + + +# ------------------------------------------------------------------------------------- +# Class DriverDynamic +class DriverDynamic: + + # ------------------------------------------------------------------------------------- + # Initialize class + def __init__(self, time_now, time_exec, time_run, src_dict, anc_dict, dest_dict, static_data_collection, + alg_ancillary=None, alg_template_tags=None, + tag_section_data='section_data', tag_execution_data='execution_data', + tag_src_reference_start='run_reference_start', tag_src_reference_end='run_reference_end', + tag_anc_source='run_source', tag_anc_analysis='run_analysis', tag_anc_destination='run_destination', + tag_dest_summary='run_summary', tag_dest_warnings='run_warnings', + tag_section_name='section_name', tag_basin_name='basin_name', + flag_cleaning_dynamic_source=True, flag_cleaning_dynamic_analysis=True, + flag_cleaning_dynamic_destination=True, flag_cleaning_dynamic_tmp=True, string_sep=':'): + + self.time_now = time_now + self.time_exec = time_exec + self.time_run = time_run + + self.tag_section_data = tag_section_data + self.tag_execution_data = tag_execution_data + + self.tag_src_reference_start = tag_src_reference_start + self.tag_src_reference_end = tag_src_reference_end + self.tag_anc_source = tag_anc_source + self.tag_anc_analysis = tag_anc_analysis + self.tag_anc_destination = tag_anc_destination + self.tag_dest_summary = tag_dest_summary + self.tag_dest_warnings = tag_dest_warnings + + self.tag_section_name = tag_section_name + self.tag_basin_name = tag_basin_name + self.alg_ancillary = alg_ancillary + self.alg_template_tags = alg_template_tags + + self.string_sep = string_sep + + self.section_data_collection = static_data_collection[self.tag_section_data] + self.execution_data_collection = static_data_collection[self.tag_execution_data] + + self.domain_name = self.alg_ancillary['domain_name'] + + self.file_name_tag = 'file_name' + self.folder_name_tag = 'folder_name' + + self.section_name_list = self.collect_section_list( + columns_tag_in=['section_name'], columns_tag_out=[self.tag_section_name])[self.tag_section_name] + self.basin_name_list = self.collect_section_list( + columns_tag_in=['section_domain'], columns_tag_out=[self.tag_basin_name])[self.tag_basin_name] + + self.outlet_name_list = [self.string_sep.join( + [b.lower(), s.lower()]) for b, s in zip(self.basin_name_list, self.section_name_list)] + + self.folder_name_src_ref_start = src_dict[self.tag_src_reference_start][self.folder_name_tag] + self.file_name_src_ref_start = src_dict[self.tag_src_reference_start][self.file_name_tag] + self.folder_name_src_ref_end = src_dict[self.tag_src_reference_end][self.folder_name_tag] + self.file_name_src_ref_end = src_dict[self.tag_src_reference_end][self.file_name_tag] + + self.folder_name_anc_source = anc_dict[self.tag_anc_source][self.folder_name_tag] + self.file_name_anc_source = anc_dict[self.tag_anc_source][self.file_name_tag] + self.folder_name_anc_analysis = anc_dict[self.tag_anc_analysis][self.folder_name_tag] + self.file_name_anc_analysis = anc_dict[self.tag_anc_analysis][self.file_name_tag] + self.folder_name_anc_destination = anc_dict[self.tag_anc_destination][self.folder_name_tag] + self.file_name_anc_destination = anc_dict[self.tag_anc_destination][self.file_name_tag] + + self.folder_name_dest_summary = dest_dict[self.tag_dest_summary][self.folder_name_tag] + self.file_name_dest_summary = dest_dict[self.tag_dest_summary][self.file_name_tag] + self.folder_name_dest_warnings = dest_dict[self.tag_dest_warnings][self.folder_name_tag] + self.file_name_dest_warnings = dest_dict[self.tag_dest_warnings][self.file_name_tag] + + self.exec_name_list = list(self.execution_data_collection.index) + self.exec_values_list = list(self.execution_data_collection.values) + self.exec_type_list = list(self.execution_data_collection.columns) + + file_collections_src_ref_start = {} + file_collections_src_ref_end = {} + for exec_name, exec_fields in zip(self.exec_name_list, self.exec_values_list): + + file_extra_variables = {'domain_name': self.domain_name} + for exec_type_step, exec_field_step in zip(self.exec_type_list, exec_fields): + file_extra_variables[exec_type_step] = exec_field_step + file_extra_collections = {self.tag_basin_name: self.basin_name_list, self.tag_section_name: self.section_name_list} + + file_list_src_ref_start = self.define_file_string( + None, os.path.join(self.folder_name_src_ref_start, self.file_name_src_ref_start), + file_extra_variables=file_extra_variables, file_extra_collections=file_extra_collections) + + file_list_src_ref_end = self.define_file_string( + None, os.path.join(self.folder_name_src_ref_end, self.file_name_src_ref_end), + file_extra_variables=file_extra_variables, file_extra_collections=file_extra_collections) + + file_collections_src_ref_start[exec_name] = pd.Series(file_list_src_ref_start).drop_duplicates().tolist() + file_collections_src_ref_end[exec_name] = pd.Series(file_list_src_ref_end).drop_duplicates().tolist() + + self.file_collections_src_ref_start = file_collections_src_ref_start + self.file_collections_src_ref_end = file_collections_src_ref_end + + self.file_path_anc_source = self.define_file_string( + self.time_run, os.path.join(self.folder_name_anc_source, self.file_name_anc_source), + file_extra_variables=None, file_extra_collections=None) + self.file_path_anc_analysis = self.define_file_string( + self.time_run, os.path.join(self.folder_name_anc_analysis, self.file_name_anc_analysis), + file_extra_variables=None, file_extra_collections=None) + self.file_path_anc_destination = self.define_file_string( + self.time_run, os.path.join(self.folder_name_anc_destination, self.file_name_anc_destination), + file_extra_variables=None, file_extra_collections=None) + + self.file_path_dest_summary = self.define_file_string( + self.time_run, os.path.join(self.folder_name_dest_summary, self.file_name_dest_summary), + file_extra_variables=None, file_extra_collections=None) + self.file_path_dest_warnings = self.define_file_string( + self.time_run, os.path.join(self.folder_name_dest_warnings, self.file_name_dest_warnings), + file_extra_variables=None, file_extra_collections=None) + + self.flag_cleaning_dynamic_source = flag_cleaning_dynamic_source + self.flag_cleaning_dynamic_analysis = flag_cleaning_dynamic_analysis + self.flag_cleaning_dynamic_destination = flag_cleaning_dynamic_destination + self.flag_cleaning_dynamic_tmp = flag_cleaning_dynamic_tmp + + self.tag_time_period = 'time_period' + self.tag_time_frequency = 'time_frequency' + self.tag_time_rounding = 'time_rounding' + self.tag_run_description = 'run_description' + self.tag_run_var_sim = 'run_variable_sim' + self.tag_run_var_obs = 'run_variable_obs' + + self.tag_run_ref_start = 'run_start' + self.tag_run_ref_end = 'run_end' + self.tag_run_ref_elapsed = 'run_elapsed' + self.tag_run_datasets_section = 'run_datasets_section' + self.tag_run_datasets_attrs = 'run_datasets_attrs' + self.tag_run_n = 'run_n' + self.tag_section_n = 'section_n' + self.tag_run_type = 'run_type' + + self.tag_summary_alert_value = 'alert_value' + self.tag_summary_alert_index = 'alert_index' + self.tag_summary_alert_section = 'alert_section' + self.tag_summary_alert_thr = 'alert_thr' + self.tag_summary_alarm_value = 'alarm_value' + self.tag_summary_alarm_index = 'alarm_index' + self.tag_summary_alarm_section = 'alarm_section' + self.tag_summary_alarm_thr = 'alarm_thr' + self.tag_summary_run_type = 'run_type' + + self.warnings_structure = { + 'alert_datasets': { + 'key_ref': self.tag_summary_alert_value, + 'key_list': [self.tag_summary_alert_value, self.tag_summary_alert_index, + self.tag_summary_alert_thr, self.tag_summary_run_type] + }, + 'alarm_datasets': { + 'key_ref': self.tag_summary_alert_value, + 'key_list': [self.tag_summary_alarm_value, self.tag_summary_alarm_index, + self.tag_summary_alarm_thr, self.tag_summary_run_type]} + } + + self.tag_info = 'info' + self.tag_data = 'data' + self.tag_warnings = 'warnings' + self.tag_time_ref_start = 'time_start' + self.tag_time_ref_end = 'time_end' + + self.tag_file_time_create = 'time_create' + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to define filename + def define_file_string(self, time, file_path_raw, + file_extra_variables=None, file_extra_collections=None): + + if file_extra_variables is None: + file_extra_variables = {} + if file_extra_collections is None: + file_extra_collections = None + + alg_template_tags = self.alg_template_tags + + alg_template_values_raw = { + 'source_datetime': time, + 'source_sub_path_time': time, + 'ancillary_datetime': time, + 'ancillary_sub_path_time': time, + 'destination_datetime': time, + 'destination_sub_path_time': time, + 'run_name': None, + 'run_sub_path_datasets': None, + 'run_sub_path_execution': None, + 'domain_name': None, + 'section_name': None, + 'basin_name': None + } + + if file_extra_collections is not None: + file_n = None + file_values_list = [] + for id_values, file_values in enumerate(file_extra_collections.values()): + if file_n is None: + file_n = file_values.__len__() + file_values_list.append(file_values) + file_keys_list = [] + for id_key, file_key in enumerate(file_extra_collections.keys()): + file_keys_list.append([file_key] * file_n) + + file_collections = {} + for file_id, (file_keys_step, file_values_step) in enumerate(zip(file_keys_list, file_values_list)): + for id, (key_step, value_step) in enumerate(zip(file_keys_step, file_values_step)): + if id not in list(file_collections.keys()): + file_collections[id] = [key_step, value_step] + else: + list_tmp = file_collections[id] + list_tmp.extend([key_step, value_step]) + file_collections[id] = list_tmp + + file_path_obj = [] + for file_fields in file_collections.values(): + file_keys = file_fields[0::2] + file_values = file_fields[1::2] + + alg_template_values_def = deepcopy(alg_template_values_raw) + for file_key, file_value in zip(file_keys, file_values): + alg_template_values_def[file_key] = file_value + + alg_template_values = {**alg_template_values_def, **file_extra_variables} + + file_path_def = fill_tags2string(file_path_raw, alg_template_tags, alg_template_values) + file_path_obj.append(file_path_def) + else: + + file_path_obj = fill_tags2string(file_path_raw, alg_template_tags, alg_template_values_raw) + + return file_path_obj + + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to collect section information + def collect_section_list(self, columns_tag_in=None, columns_tag_out=None): + + if columns_tag_in is None: + columns_tag_in = ['section_domain', 'section_name'] + if columns_tag_out is None: + columns_tag_out = ['section_domain', 'section_name'] + + section_data_collection = self.section_data_collection + + section_dict = {} + for columns_in_step, columns_out_step in zip(columns_tag_in, columns_tag_out): + section_data_step = section_data_collection[columns_in_step].values.tolist() + section_dict[columns_out_step] = section_data_step + return section_dict + + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to clean dynamic tmp + def clean_dynamic_tmp(self): + + flag_cleaning_tmp = self.flag_cleaning_dynamic_tmp + + file_path_anc_source = self.file_path_anc_source + file_path_anc_analysis = self.file_path_anc_analysis + file_path_anc_destination = self.file_path_anc_destination + + file_path_anc_list = [file_path_anc_source, file_path_anc_analysis, file_path_anc_destination] + if flag_cleaning_tmp: + for file_path_anc_step in file_path_anc_list: + if os.path.exists(file_path_anc_step): + os.remove(file_path_anc_step) + + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to dump dynamic data + def dump_dynamic_data(self, analysis_datasets_collections, analysis_warnings_collections): + + time_exec = self.time_exec + time_run = self.time_run + + exec_name_list = self.exec_name_list + outlet_name_list = self.outlet_name_list + exec_collections = self.execution_data_collection + + warnings_structure = self.warnings_structure + + file_path_anc = self.file_path_anc_destination + file_path_dest_summary = self.file_path_dest_summary + file_path_dest_warnings = self.file_path_dest_warnings + + flag_clean_anc = self.flag_cleaning_dynamic_destination + + logging.info(' ---> Dump dynamic datasets [' + str(time_run) + '] ... ') + + if flag_clean_anc: + if os.path.exists(file_path_anc): + os.remove(file_path_anc) + + summary_collections = {} + for exec_name_step in exec_name_list: + + logging.info(' ----> Execution ' + exec_name_step + ' ... ') + summary_collections[exec_name_step] = {} + + attrs_exec = exec_collections[exec_collections.index == exec_name_step].to_dict('r')[0] + + data_analysis = analysis_datasets_collections[exec_name_step][self.tag_run_datasets_section] + + if data_analysis is not None: + + attrs_analysis = analysis_datasets_collections[exec_name_step][self.tag_run_datasets_attrs] + attrs_analysis[self.tag_run_ref_start] = analysis_datasets_collections[exec_name_step][self.tag_run_ref_start] + attrs_analysis[self.tag_run_ref_end] = analysis_datasets_collections[exec_name_step][self.tag_run_ref_end] + attrs_analysis[self.tag_run_ref_elapsed] = analysis_datasets_collections[exec_name_step][self.tag_run_ref_elapsed] + + summary_datasets = {} + for section_name, section_data in data_analysis.items(): + + logging.info(' -----> Section ' + section_name + ' ... ') + + attrs_datasets = {**attrs_analysis, **attrs_exec} + + for time_step, data_step in section_data.items(): + + logging.info(' ------> Time ' + str(time_step) + ' ... ') + + max_alert_value = data_step[self.tag_summary_alert_value] + max_alert_idx = data_step[self.tag_summary_alert_index] + max_alarm_value = data_step[self.tag_summary_alarm_value] + max_alarm_idx = data_step[self.tag_summary_alarm_index] + thr_alert_value = data_step[self.tag_summary_alert_thr] + thr_alarm_value = data_step[self.tag_summary_alarm_thr] + + if time_step not in list(summary_datasets.keys()): + summary_datasets[time_step] = {} + + summary_datasets[time_step][self.tag_summary_alert_value] = [max_alert_value] + summary_datasets[time_step][self.tag_summary_alert_index] = [max_alert_idx] + summary_datasets[time_step][self.tag_summary_alert_section] = [section_name] + summary_datasets[time_step][self.tag_summary_alert_thr] = [thr_alert_value] + + summary_datasets[time_step][self.tag_summary_alarm_value] = [max_alarm_value] + summary_datasets[time_step][self.tag_summary_alarm_index] = [max_alarm_idx] + summary_datasets[time_step][self.tag_summary_alarm_section] = [section_name] + summary_datasets[time_step][self.tag_summary_alarm_thr] = [thr_alarm_value] + + summary_datasets[time_step][self.tag_summary_run_type] = [exec_name_step] + + else: + + tmp_datasets = deepcopy(summary_datasets[time_step]) + + tmp_alert_value = tmp_datasets[self.tag_summary_alert_value] + tmp_alert_idx = tmp_datasets[self.tag_summary_alert_index] + tmp_alert_section = tmp_datasets[self.tag_summary_alert_section] + tmp_alert_thr = tmp_datasets[self.tag_summary_alert_thr] + tmp_alert_value.append(max_alert_value) + tmp_alert_idx.append(max_alert_idx) + tmp_alert_section.append(section_name) + tmp_alert_thr.append(thr_alert_value) + + summary_datasets[time_step][self.tag_summary_alert_value] = tmp_alert_value + summary_datasets[time_step][self.tag_summary_alert_index] = tmp_alert_idx + summary_datasets[time_step][self.tag_summary_alert_section] = tmp_alert_section + summary_datasets[time_step][self.tag_summary_alert_thr] = tmp_alert_thr + + tmp_alarm_value = tmp_datasets[self.tag_summary_alarm_value] + tmp_alarm_idx = tmp_datasets[self.tag_summary_alarm_index] + tmp_alarm_section = tmp_datasets[self.tag_summary_alarm_section] + tmp_alarm_thr = tmp_datasets[self.tag_summary_alarm_thr] + tmp_alarm_value.append(max_alarm_value) + tmp_alarm_idx.append(max_alarm_idx) + tmp_alarm_section.append(section_name) + tmp_alarm_thr.append(thr_alarm_value) + + summary_datasets[time_step][self.tag_summary_alarm_value] = tmp_alarm_value + summary_datasets[time_step][self.tag_summary_alarm_index] = tmp_alarm_idx + summary_datasets[time_step][self.tag_summary_alarm_section] = tmp_alarm_section + summary_datasets[time_step][self.tag_summary_alarm_thr] = tmp_alarm_thr + + tmp_run_type = tmp_datasets[self.tag_summary_run_type] + tmp_run_type.append(exec_name_step) + summary_datasets[time_step][self.tag_summary_run_type] = tmp_run_type + + logging.info(' ------> Time ' + str(time_step) + ' ... DONE') + + logging.info(' -----> Section ' + section_name + ' ... DONE') + + else: + summary_datasets = None + attrs_datasets = deepcopy(attrs_exec) + + summary_collections[exec_name_step][self.tag_data] = {} + summary_collections[exec_name_step][self.tag_data] = summary_datasets + summary_collections[exec_name_step][self.tag_info] = {} + summary_collections[exec_name_step][self.tag_info] = attrs_datasets + + logging.info(' ----> Execution ' + exec_name_step + ' ... DONE') + + # Save summary bulletin + folder_name_dest_summary, file_name_dest_summary = os.path.split(file_path_dest_summary) + make_folder(folder_name_dest_summary) + logging.info(' ----> Save summary bulletin ' + file_name_dest_summary + ' ... ') + + write_file_summary(time_run, time_exec, time_format='%Y-%m-%d %H:%M', html_name=file_path_dest_summary, + run_name=exec_name_list, run_summary=summary_collections, + tag_summary_data=self.tag_data, tag_summary_info=self.tag_info, + tag_alert_section=self.tag_summary_alert_section, + tag_alert_value=self.tag_summary_alert_value, + tag_alert_index=self.tag_summary_alert_index, + tag_alarm_section=self.tag_summary_alarm_section, + tag_alarm_value=self.tag_summary_alarm_value, + tag_alarm_index=self.tag_summary_alarm_index) + + logging.info(' ----> Save summary bulletin ' + file_name_dest_summary + ' ... DONE') + + # Save warnings bulletin + warnings_collections = {} + for warnings_key, warnings_data in analysis_warnings_collections.items(): + warnings_collections[warnings_key] = {} + + for summary_key, summary_fields in warnings_structure.items(): + + key_ref = summary_fields['key_ref'] + key_list = summary_fields['key_list'] + + values_ref = warnings_data[key_ref] + index_ref = int(values_ref.index(max(values_ref))) + + warnings_collections[warnings_key][summary_key] = {} + for key_step in key_list: + value_step = warnings_data[key_step][index_ref] + warnings_collections[warnings_key][summary_key][key_step] = value_step + + folder_name_dest_warnings, file_name_dest_warnings = os.path.split(file_path_dest_warnings) + make_folder(folder_name_dest_warnings) + + logging.info(' ----> Save warnings bulletin ' + file_name_dest_warnings + ' ... ') + + write_file_warnings(file_path_dest_warnings, warnings_collections, warnings_structure) + + logging.info(' ----> Save warnings bulletin ' + file_name_dest_warnings + ' ... DONE') + + logging.info(' ---> Dump dynamic datasets [' + str(time_run) + '] ... DONE') + + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to define time period search + @staticmethod + def define_time_search(time_step, time_period=1, time_frequency='H', time_rounding='H', time_reverse=False): + + time_step = time_step.floor(time_rounding) + time_range = pd.date_range(end=time_step, periods=time_period, freq=time_frequency) + + if time_reverse: + time_range = time_range[::-1] + + return time_range + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to analyze dynamic data + def analyze_dynamic_data(self, file_workspace): + + time = self.time_run + exec_name_list = self.exec_name_list + + exec_collections = self.execution_data_collection + + file_path_anc = self.file_path_anc_analysis + flag_clean_anc = self.flag_cleaning_dynamic_analysis + + logging.info(' ---> Analyze dynamic datasets [' + str(time) + '] ... ') + + if flag_clean_anc: + if os.path.exists(file_path_anc): + os.remove(file_path_anc) + + if not os.path.exists(file_path_anc): + + analyze_warnings_collections = {} + analyze_datasets_collections = {} + for exec_name_step in exec_name_list: + + logging.info(' ----> Execution ' + exec_name_step + ' ... ') + + exec_data = exec_collections[exec_collections.index == exec_name_step] + exec_run_var_sim = exec_data[self.tag_run_var_sim].values[0] + exec_run_var_obs = exec_data[self.tag_run_var_obs].values[0] + + analyze_datasets_collections[exec_name_step] = {} + if exec_name_step in list(file_workspace.keys()): + + exec_workspace = file_workspace[exec_name_step] + + exec_info_collection = deepcopy(exec_workspace[self.tag_info]) + exec_data_collection = deepcopy(exec_workspace[self.tag_data]) + + if (exec_info_collection[self.tag_run_ref_start] is not None) and \ + (exec_info_collection[self.tag_run_ref_start] is not None): + + analysis_time_creation_start, analysis_time_creation_end, \ + analysis_time_creation_elapsed = analyze_time_info( + exec_info_collection[self.tag_run_ref_start], + exec_info_collection[self.tag_run_ref_end], + tag_file_create=self.tag_file_time_create) + + analysis_datasets_section, analysis_warnings_section, \ + attrs_datasets_section = analyze_hydrograph_datasets( + exec_name_step, exec_data_collection, + tag_discharge_observed=exec_run_var_obs, tag_discharge_simulated=exec_run_var_sim, + tag_discharge_thr_alert=self.tag_summary_alert_thr, + tag_discharge_thr_alarm=self.tag_summary_alarm_thr, + tag_discharge_max_alert_value=self.tag_summary_alert_value, + tag_discharge_max_alert_index=self.tag_summary_alert_index, + tag_discharge_max_alarm_value=self.tag_summary_alarm_value, + tag_discharge_max_alarm_index=self.tag_summary_alarm_index, + tag_run_n=self.tag_run_n, tag_section_n=self.tag_section_n) + + logging.info(' ----> Execution ' + exec_name_step + ' ... DONE') + else: + analysis_time_creation_start = None + analysis_time_creation_end = None + analysis_time_creation_elapsed = None + analysis_datasets_section = None + analysis_warnings_section = None + attrs_datasets_section = None + logging.info(' ----> Execution ' + exec_name_step + ' ... SKIPPED. Execution datasets are null') + else: + analysis_time_creation_start = None + analysis_time_creation_end = None + analysis_time_creation_elapsed = None + analysis_datasets_section = None + analysis_warnings_section = None + attrs_datasets_section = None + logging.info(' ----> Execution ' + exec_name_step + ' ... SKIPPED. Execution not available') + + analyze_datasets_collections[exec_name_step][self.tag_run_ref_start] = analysis_time_creation_start + analyze_datasets_collections[exec_name_step][self.tag_run_ref_end] = analysis_time_creation_end + analyze_datasets_collections[exec_name_step][self.tag_run_ref_elapsed] = analysis_time_creation_elapsed + analyze_datasets_collections[exec_name_step][self.tag_run_datasets_section] = analysis_datasets_section + analyze_datasets_collections[exec_name_step][self.tag_run_datasets_attrs] = attrs_datasets_section + + if analysis_warnings_section is not None: + for warnings_key, warnings_data in analysis_warnings_section.items(): + + if warnings_key not in list(analyze_warnings_collections.keys()): + analyze_warnings_collections[warnings_key] = {} + + for var_key, var_value in warnings_data.items(): + if var_key not in list(analyze_warnings_collections[warnings_key].keys()): + analyze_warnings_collections[warnings_key][var_key] = var_value + else: + warning_value_tmp = analyze_warnings_collections[warnings_key][var_key] + warning_value_tmp.extend(var_value) + analyze_warnings_collections[warnings_key][var_key] = warning_value_tmp + + folder_name_anc, file_name_anc = os.path.split(file_path_anc) + make_folder(folder_name_anc) + + analyze_collections = {self.tag_data: analyze_datasets_collections, + self.tag_warnings: analyze_warnings_collections} + + write_obj(file_path_anc, analyze_collections) + + else: + + analyze_collections = read_obj(file_path_anc) + analyze_datasets_collections = analyze_collections[self.tag_data] + analyze_warnings_collections = analyze_collections[self.tag_warnings] + + logging.info(' ---> Analyze dynamic datasets [' + str(time) + '] ... DONE') + + return analyze_datasets_collections, analyze_warnings_collections + + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to organize dynamic data + def organize_dynamic_data(self): + + time = self.time_run + + outlet_name_list = self.outlet_name_list + exec_name_list = self.exec_name_list + + exec_collections = self.execution_data_collection + + file_collections_src_ref_start = self.file_collections_src_ref_start + file_collections_src_ref_end = self.file_collections_src_ref_end + + file_path_anc = self.file_path_anc_source + flag_clean_anc = self.flag_cleaning_dynamic_source + + logging.info(' ---> Organize dynamic datasets [' + str(time) + '] ... ') + + if flag_clean_anc: + if os.path.exists(file_path_anc): + os.remove(file_path_anc) + + if not os.path.exists(file_path_anc): + + file_workspace = {} + for exec_name_step in exec_name_list: + + logging.info(' ----> Execution ' + exec_name_step + ' ... ') + + file_path_src_ref_start = file_collections_src_ref_start[exec_name_step] + file_path_src_ref_end = file_collections_src_ref_end[exec_name_step] + + exec_data = exec_collections[exec_collections.index == exec_name_step] + exec_time_period = exec_data[self.tag_time_period].values[0] + exec_time_frequency = exec_data[self.tag_time_frequency].values[0] + exec_time_rounding = exec_data[self.tag_time_rounding].values[0] + + time_search = self.define_time_search(time, + time_period=exec_time_period, time_frequency=exec_time_frequency, + time_rounding=exec_time_rounding, time_reverse=True) + + file_workspace[exec_name_step] = {} + file_workspace[exec_name_step][self.tag_info] = {} + file_workspace[exec_name_step][self.tag_data] = {} + file_workspace[exec_name_step][self.tag_info][self.tag_run_ref_start] = None + file_workspace[exec_name_step][self.tag_info][self.tag_time_ref_start] = None + file_workspace[exec_name_step][self.tag_info][self.tag_run_ref_end] = None + file_workspace[exec_name_step][self.tag_info][self.tag_time_ref_end] = None + file_workspace[exec_name_step][self.tag_data] = None + + for time_step in time_search: + + logging.info(' -----> Time ' + str(time_step) + ' ... ') + + logging.info(' ------> Reference run_start datasets ... ') + if file_workspace[exec_name_step]['info']['run_start'] is None: + if file_path_src_ref_start.__len__() == 1: + + file_path_src_start_raw = file_path_src_ref_start[0] + file_path_src_start_def = self.define_file_string(time_step, file_path_src_start_raw) + + if file_path_src_start_def.endswith('.x'): + if os.path.exists(file_path_src_start_def): + file_info_start = read_file_execution_info(file_path_src_start_def) + else: + file_info_start = None + else: + logging.error(' ------> Reference run_start datasets ... FAILED') + raise NotImplementedError('Case not implemented in source reference type start') + + file_check_start = all(elem is None for elem in list([file_info_start])) + if file_check_start: + file_insert_start = False + logging.info(' ------> Reference run_start datasets ... SKIPPED. Datasets not defined') + else: + file_insert_start = True + logging.info(' ------> Reference run_start datasets ... DONE') + else: + logging.error(' ------> Reference run_start datasets ... FAILED') + raise NotImplementedError('Case not implemented in source reference file start') + else: + logging.info(' ------> Reference run_start datasets ... SKIPPED. Datasets already selected') + file_insert_start = False + + logging.info(' ------> Reference run_end datasets ... ') + if file_workspace[exec_name_step]['info']['run_end'] is None: + if file_path_src_ref_end.__len__() == 1: + logging.error(' ------> Reference run_end datasets ... FAILED') + raise NotImplementedError('Case not implemented in source reference file end') + elif file_path_src_ref_end.__len__() > 1: + + if file_path_src_ref_end[0].endswith('.json'): + + file_info_end = {} + file_data_end = {} + for outlet_name_step, file_path_src_end_raw in zip(outlet_name_list, + file_path_src_ref_end): + + file_path_src_end_def = self.define_file_string(time_step, file_path_src_end_raw) + + if os.path.exists(file_path_src_end_def): + file_info_src_ref_end = read_file_hydrograph_info(file_path_src_end_def) + file_data_src_ref_end = read_file_hydrograph_ts(file_path_src_end_def) + else: + file_info_src_ref_end = None + file_data_src_ref_end = None + + file_info_end[outlet_name_step] = file_info_src_ref_end + file_data_end[outlet_name_step] = file_data_src_ref_end + + file_check_end = all(elem is None for elem in list(file_info_end.values())) + if file_check_end: + file_insert_end = False + logging.info(' ------> Reference run_end datasets ... SKIPPED. Datasets not defined') + else: + file_insert_end = True + logging.info(' ------> Reference run_end datasets ... DONE') + else: + logging.error(' ------> Reference run_end datasets ... FAILED') + raise NotImplementedError('Case not implemented in source reference type end') + else: + logging.error(' ------> Reference run_end datasets ... FAILED') + raise NotImplementedError('Case not implemented in source reference file end') + else: + logging.info(' ------> Reference run_end datasets ... SKIPPED. Datasets already selected') + file_insert_end = False + + # Store reference source information and datasets + if file_insert_start: + file_workspace[exec_name_step][self.tag_info][self.tag_run_ref_start] = file_info_start + file_workspace[exec_name_step][self.tag_info][self.tag_time_ref_start] = time_step + elif file_insert_end: + file_workspace[exec_name_step][self.tag_info][self.tag_run_ref_end] = file_info_end + file_workspace[exec_name_step][self.tag_info][self.tag_time_ref_end] = time_step + file_workspace[exec_name_step][self.tag_data] = file_data_end + + logging.info(' -----> Time ' + str(time_step) + ' ... DONE') + + logging.info(' ----> Execution ' + exec_name_step + ' ... DONE') + + folder_name_anc, file_name_anc = os.path.split(file_path_anc) + make_folder(folder_name_anc) + + write_obj(file_path_anc, file_workspace) + + else: + + file_workspace = read_obj(file_path_anc) + + logging.info(' ---> Organize dynamic datasets [' + str(time) + '] ... DONE') + + return file_workspace + + # ------------------------------------------------------------------------------------- + +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/driver_data_io_static.py b/apps/Analyzer_Execution/driver_data_io_static.py new file mode 100755 index 0000000..57b1cae --- /dev/null +++ b/apps/Analyzer_Execution/driver_data_io_static.py @@ -0,0 +1,83 @@ +""" +Class Features + +Name: driver_data_io_static +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210225' +Version: '1.0.0' +""" + +###################################################################################### +# Library +import logging +import os + +from lib_data_io_shapefile import read_file_section_data +from lib_utils_exec import read_file_execution_data + +# Debug +# import matplotlib.pylab as plt +###################################################################################### + + +# ------------------------------------------------------------------------------------- +# Class DriverStatic +class DriverStatic: + + # ------------------------------------------------------------------------------------- + # Initialize class + def __init__(self, src_dict, dst_dict=None, + alg_ancillary=None, alg_template_tags=None, + flag_section_data='section_data', flag_execution_data='execution_data', + flag_domain_collections='domain_collection', + flag_cleaning_static=True): + + self.src_dict = src_dict + + self.flag_section_data = flag_section_data + self.flag_execution_data = flag_execution_data + + self.flag_domain_collections = flag_domain_collections + + self.alg_ancillary = alg_ancillary + + self.alg_template_tags = alg_template_tags + self.file_name_tag = 'file_name' + self.folder_name_tag = 'folder_name' + + self.folder_name_section = self.src_dict[self.flag_section_data][self.folder_name_tag] + self.file_name_section = self.src_dict[self.flag_section_data][self.file_name_tag] + self.file_path_section = os.path.join(self.folder_name_section, self.file_name_section) + + self.execution_data = self.src_dict[self.flag_execution_data] + + self.flag_cleaning_static = flag_cleaning_static + + # ------------------------------------------------------------------------------------- + + # ------------------------------------------------------------------------------------- + # Method to organize geographical data + def organize_static(self): + + # Info start + logging.info(' ---> Organize static datasets ... ') + + # Data collection object + dframe_collections = {} + + # Read section dataset + dframe_section = read_file_section_data(self.file_path_section) + # Read execution dataset + dframe_execution = read_file_execution_data(self.execution_data) + + # Collect datasets in a common object + dframe_collections[self.flag_section_data] = dframe_section + dframe_collections[self.flag_execution_data] = dframe_execution + + # Info end + logging.info(' ---> Organize static datasets ... DONE') + + return dframe_collections + # ------------------------------------------------------------------------------------- + +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/hat_runanalyzer_hmc_configuration_example.json b/apps/Analyzer_Execution/hat_runanalyzer_hmc_configuration_example.json new file mode 100755 index 0000000..10d301f --- /dev/null +++ b/apps/Analyzer_Execution/hat_runanalyzer_hmc_configuration_example.json @@ -0,0 +1,116 @@ +{ + "algorithm":{ + "flags": { + "cleaning_dynamic_source": false, + "cleaning_dynamic_analysis": false, + "cleaning_dynamic_destination": true, + "cleaning_dynamic_tmp": true + }, + "template": { + "source_datetime": "%Y%m%d%H%M", + "source_sub_path_time": "%Y/%m/%d/%H", + "run_sub_path_datasets": "string_sub_path_datasets", + "run_sub_path_execution": "string_sub_path_execution", + "ancillary_datetime": "%Y%m%d%H%M", + "ancillary_sub_path_time": "%Y/%m/%d/%H", + "destination_datetime": "%Y%m%d%H%M", + "destination_sub_path_time": "%Y/%m/%d/", + "run_name": "string_name", + "section_name": "string_section_name", + "basin_name": "string_basin_name", + "domain_name": "string_domain_name" + }, + "ancillary": { + "domain_name" : "marche" + }, + "general": { + "title": "HAT - Run analyzer to control the execution of the HMC model", + "web-site": "", + "source": "Python library developed by CIMA Research Foundation", + "history": "1.5.0 [20210225]", + "project-info": "HAT - Hydrological Analysis Tool", + "algorithm": "Analysis tools developed by CIMA Research Foundation" + } + }, + "time": { + "time_now": null, + "time_period": 36, + "time_frequency": "H", + "time_rounding": "H" + }, + "data": { + "static": { + "section_data": { + "folder_name": "/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-ws/data/data_static/shapefile/", + "file_name": "fp_sections_marche.shp" + }, + "execution_data": { + "id_01": { + "features": {"run_name": "weather_stations_realtime", "run_sub_path_execution": "deterministic", "run_description": "RUN OSSERVATO", + "run_sub_path_datasets": "collections/", "run_variable_sim": "discharge_simulated", "run_variable_obs": "discharge_observed" }, + "time": {"time_period": 2, "time_frequency": "H", "time_rounding": "H"} + }, + "id_02": { + "features": {"run_name": "nwp_ecmwf0100_realtime", "run_sub_path_execution": "deterministic", "run_description": "ECMWF DETERMINISTICO", + "run_sub_path_datasets": "collections/", "run_variable_sim": "discharge_simulated", "run_variable_obs": "discharge_observed" }, + "time": {"time_period": 24, "time_frequency": "H", "time_rounding": "H"} + }, + "id_03": { + "features": {"run_name": "rfarm_ecmwf0100_realtime", "run_sub_path_execution": "probabilistic_001", "run_description": "ECMWF PROBABILISTICO", + "run_sub_path_datasets": "probabilistic_ensemble/collections/", "run_variable_sim": "discharge_simulated_{:}", "run_variable_obs": "discharge_observed"}, + "time": {"time_period": 24, "time_frequency": "H", "time_rounding": "H"} + }, + "id_04": { + "features": {"run_name": "nwp_lami-2i_realtime", "run_sub_path_execution": "deterministic", "run_description": "LAMI DETERMINISTICO", + "run_sub_path_datasets": "collections/", "run_variable_sim": "discharge_simulated", "run_variable_obs": "discharge_observed"}, + "time": {"time_period": 24, "time_frequency": "H", "time_rounding": "H"} + }, + "id_05": { + "features": {"run_name": "rfarm_lami-2i_realtime", "run_sub_path_execution": "probabilistic_001","run_description": "LAMI PROBABILISTICO", + "run_sub_path_datasets": "probabilistic_ensemble/collections/", "run_variable_sim": "discharge_simulated_{:}", "run_variable_obs": "discharge_observed"}, + "time": {"time_period": 24, "time_frequency": "H", "time_rounding": "H"} + } + } + }, + "dynamic" : { + "source": { + "run_reference_start": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/run/{run_name}/{run_sub_path_execution}/exec/", + "file_name": "HMC_Model_V3_{run_name}.x" + }, + "run_reference_end": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/{run_name}/{source_sub_path_time}/{run_sub_path_datasets}", + "file_name": "hydrograph_{section_name}_{basin_name}_{source_datetime}.json" + } + }, + "ancillary": { + "run_source": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/ancillary/{ancillary_sub_path_time}", + "file_name": "run_analyzer_data_source_{ancillary_datetime}.workspace" + }, + "run_analysis": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/ancillary/{ancillary_sub_path_time}", + "file_name": "run_analyzer_data_analysis_{ancillary_datetime}.workspace" + }, + "run_destination": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/ancillary/{ancillary_sub_path_time}", + "file_name": "run_analyzer_data_destination_{ancillary_datetime}.workspace" + } + }, + "destination": { + "run_summary": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/summary/{destination_sub_path_time}", + "file_name": "summary_simulations_marche.html" + }, + "run_warnings": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/warnings/{destination_sub_path_time}", + "file_name": "warnings_simulations_marche.csv" + } + } + } + }, + "log": { + "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/log/", + "file_name": "hat_runanalyzer_hmc_log.txt" + } +} diff --git a/apps/RunAnalyzer/hat_runanalyzer_hmc_main_realtime.sh b/apps/Analyzer_Execution/hat_runanalyzer_hmc_main_example.sh similarity index 100% rename from apps/RunAnalyzer/hat_runanalyzer_hmc_main_realtime.sh rename to apps/Analyzer_Execution/hat_runanalyzer_hmc_main_example.sh diff --git a/apps/Analyzer_Execution/lib_data_analysis.py b/apps/Analyzer_Execution/lib_data_analysis.py new file mode 100644 index 0000000..468b193 --- /dev/null +++ b/apps/Analyzer_Execution/lib_data_analysis.py @@ -0,0 +1,274 @@ +""" +Library Features: + +Name: lib_data_analysis +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210225' +Version: '1.0.0' +""" +####################################################################################### +# Library +import logging + +import pandas as pd +from copy import deepcopy + +from lib_info_args import logger_name + +# Logging +log_stream = logging.getLogger(logger_name) +####################################################################################### + + +# ------------------------------------------------------------------------------------- +# Method to analyze data hydrograph +def analyze_hydrograph_datasets(run_name, file_datasets, + tag_discharge_observed='discharge_observed', + tag_discharge_simulated='discharge_simulated', + tag_discharge_thr_alert='section_discharge_thr_alert', + tag_discharge_thr_alarm='section_discharge_thr_alarm', + tag_discharge_max_alert_value='discharge_alert_max_value', + tag_discharge_max_alert_index='discharge_alert_max_index', + tag_discharge_max_alarm_value='discharge_alarm_max_value', + tag_discharge_max_alarm_index='discharge_alarm_max_index', + tag_section_n='section_n', tag_run_n='run_n', + tag_run_type='run_type'): + + section_n = file_datasets.__len__() + + analysis_warnings_section = {} + analysis_datasets_section = {} + attrs_ts_collections = None + for section_tag, section_datasets in file_datasets.items(): + + logging.info(' ----> Analyze section ' + section_tag + ' ... ') + + section_dframe = section_datasets[0] + section_attrs = section_datasets[1] + + section_ts_vars = section_attrs['run_var'].split(',') + # data_thr_alert = float(section_attrs[tag_discharge_thr_alert]) + # data_thr_alarm = float(section_attrs[tag_discharge_thr_alarm]) + + # DEBUG + data_thr_alert = 1 + data_thr_alarm = 10 + + section_ts_time = section_dframe.index + section_ts_days = section_ts_time[1:].normalize().unique() + + # Create analysis datasets section + if section_ts_vars.__len__() == 1: + + run_n = 1 + + section_ts_collections = {} + for section_ts_step in section_ts_days: + + section_ts_collections[section_ts_step] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alert_value] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alert_index] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alarm_value] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alarm_index] = {} + + ts_day = section_ts_step.day + ts_month = section_ts_step.month + ts_year = section_ts_step.year + + section_dframe_day = section_dframe.loc[(section_dframe.index.day == ts_day) & + (section_dframe.index.month == ts_month) & + (section_dframe.index.year == ts_year), :] + + section_ts_day_sim = section_dframe_day[tag_discharge_simulated] + section_ts_day_obs = section_dframe_day[tag_discharge_observed] + + section_ts_thr_alert = section_ts_day_sim.where(section_ts_day_sim > data_thr_alert) + section_ts_thr_alarm = section_ts_day_sim.where(section_ts_day_sim > data_thr_alarm) + + section_ts_alert_max_value = section_ts_thr_alert.max(skipna=True) + section_ts_alert_max_index = section_ts_thr_alert.idxmax(skipna=True) + + section_ts_alarm_max_value = section_ts_thr_alarm.max(skipna=True) + section_ts_alarm_max_index = section_ts_thr_alarm.idxmax(skipna=True) + + section_ts_collections[section_ts_step][tag_discharge_max_alert_value] = section_ts_alert_max_value + section_ts_collections[section_ts_step][tag_discharge_max_alert_index] = section_ts_alert_max_index + section_ts_collections[section_ts_step][tag_discharge_max_alarm_value] = section_ts_alarm_max_value + section_ts_collections[section_ts_step][tag_discharge_max_alarm_index] = section_ts_alarm_max_index + section_ts_collections[section_ts_step][tag_discharge_thr_alert] = data_thr_alert + section_ts_collections[section_ts_step][tag_discharge_thr_alarm] = data_thr_alarm + section_ts_collections[section_ts_step][tag_run_type] = run_name + + if attrs_ts_collections is None: + attrs_ts_collections = {tag_run_n: run_n, tag_section_n: section_n, tag_run_type: run_name} + else: + + run_n = section_ts_vars.__len__() + + section_ts_collections = {} + for section_ts_step in section_ts_days: + + section_ts_collections[section_ts_step] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alert_value] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alert_index] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alarm_value] = {} + section_ts_collections[section_ts_step][tag_discharge_max_alarm_index] = {} + section_ts_collections[section_ts_step][tag_discharge_thr_alert] = {} + section_ts_collections[section_ts_step][tag_discharge_thr_alarm] = {} + + ts_day = section_ts_step.day + ts_month = section_ts_step.month + ts_year = section_ts_step.year + + section_ts_alert_max_value_list = [] + section_ts_alert_max_index_list = [] + section_ts_alarm_max_value_list = [] + section_ts_alarm_max_index_list = [] + for section_ts_var in section_ts_vars: + + tag_discharge_simulated_var = tag_discharge_simulated.format(section_ts_var) + tag_discharge_observed_var = tag_discharge_observed.format(section_ts_var) + + section_dframe_day = section_dframe.loc[(section_dframe.index.day == ts_day) & + (section_dframe.index.month == ts_month) & + (section_dframe.index.year == ts_year), :] + + section_ts_sim_var = section_dframe_day[tag_discharge_simulated_var] + section_ts_obs_var = section_dframe_day[tag_discharge_observed_var] + + section_ts_thr_alert_var = section_ts_sim_var.where(section_ts_sim_var > data_thr_alert) + section_ts_thr_alarm_var = section_ts_sim_var.where(section_ts_sim_var > data_thr_alarm) + + section_ts_alert_max_value_var = section_ts_thr_alert_var.max(skipna=True) + section_ts_alert_max_index_var = section_ts_thr_alert_var.idxmax(skipna=True) + + section_ts_alarm_max_value_var = section_ts_thr_alarm_var.max(skipna=True) + section_ts_alarm_max_index_var = section_ts_thr_alarm_var.idxmax(skipna=True) + + if section_ts_alert_max_index_var not in section_ts_alert_max_index_list: + section_ts_alert_max_value_list.append(section_ts_alert_max_value_var) + section_ts_alert_max_index_list.append(section_ts_alert_max_index_var) + if section_ts_alarm_max_index_var not in section_ts_alarm_max_index_list: + section_ts_alarm_max_value_list.append(section_ts_alarm_max_value_var) + section_ts_alarm_max_index_list.append(section_ts_alarm_max_index_var) + + section_ts_thr_alert_max = pd.Series([section_ts_alert_max_value_list]) + section_ts_thr_alert_max.index = section_ts_alert_max_index_list + section_ts_thr_alarm_max = pd.Series([section_ts_alarm_max_value_list]) + section_ts_thr_alarm_max.index = section_ts_alarm_max_index_list + + if section_ts_thr_alert_max.shape[0] == 1: + section_ts_alert_max_value = section_ts_thr_alert_max.values[0] + section_ts_alert_max_index = section_ts_thr_alert_max.index[0] + else: + section_ts_alert_max_value = section_ts_thr_alert_max.max(skipna=True) + section_ts_alert_max_index = section_ts_thr_alert_max.idxmax(skipna=True) + + if section_ts_thr_alarm_max.shape[0] == 1: + section_ts_alarm_max_value = section_ts_thr_alarm_max.values[0] + section_ts_alarm_max_index = section_ts_thr_alarm_max.index[0] + else: + section_ts_alarm_max_value = section_ts_thr_alarm_max.max(skipna=True) + section_ts_alarm_max_index = section_ts_thr_alarm_max.idxmax(skipna=True) + + if isinstance(section_ts_alert_max_value, list): + if section_ts_alert_max_value.__len__() == 1: + section_ts_alert_max_value = section_ts_alert_max_value[0] + else: + raise NotImplementedError('Analysis hydrograph format max alert value in unsupported format') + + if isinstance(section_ts_alarm_max_value, list): + if section_ts_alarm_max_value.__len__() == 1: + section_ts_alarm_max_value = section_ts_alarm_max_value[0] + else: + raise NotImplementedError('Analysis hydrograph format max alarm value in unsupported format') + + section_ts_collections[section_ts_step][tag_discharge_max_alert_value] = section_ts_alert_max_value + section_ts_collections[section_ts_step][tag_discharge_max_alert_index] = section_ts_alert_max_index + section_ts_collections[section_ts_step][tag_discharge_max_alarm_value] = section_ts_alarm_max_value + section_ts_collections[section_ts_step][tag_discharge_max_alarm_index] = section_ts_alarm_max_index + section_ts_collections[section_ts_step][tag_discharge_thr_alert] = data_thr_alert + section_ts_collections[section_ts_step][tag_discharge_thr_alarm] = data_thr_alarm + section_ts_collections[section_ts_step][tag_run_type] = run_name + + if attrs_ts_collections is None: + attrs_ts_collections = {tag_run_n: run_n, tag_section_n: section_n, tag_run_type: run_name} + + # Create analysis warning section + if section_tag not in list(analysis_warnings_section.keys()): + analysis_warnings_section[section_tag] = {} + for section_time, section_data in section_ts_collections.items(): + for section_key, section_value in section_data.items(): + if section_key in list(analysis_warnings_section[section_tag].keys()): + section_value_tmp = deepcopy(analysis_warnings_section[section_tag][section_key]) + section_value_tmp.append(section_value) + analysis_warnings_section[section_tag][section_key] = section_value_tmp + else: + analysis_warnings_section[section_tag][section_key] = {} + analysis_warnings_section[section_tag][section_key] = [section_value] + + analysis_datasets_section[section_tag] = section_ts_collections + + logging.info(' ----> Analyze section ' + section_tag + ' ... DONE') + + return analysis_datasets_section, analysis_warnings_section, attrs_ts_collections + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to analyze time information +def analyze_time_info(file_info_start, file_info_end, tag_file_create='file_create'): + + time_creation_start = define_time_creation( + file_info_start, tag_file_create=tag_file_create) + time_creation_end = define_time_creation( + file_info_end, tag_file_create=tag_file_create) + + time_creation_elapsed = time_creation_end[0] - time_creation_start[0] + + if isinstance(time_creation_start, list) and (time_creation_start.__len__() == 1): + time_creation_start = time_creation_start[0] + else: + raise NotImplementedError('Time creation start format not supported yet') + + if isinstance(time_creation_end, list) and (time_creation_end.__len__() == 1): + time_creation_end = time_creation_end[0] + else: + raise NotImplementedError('Time creation end format not supported yet') + + if time_creation_start > time_creation_end: + time_creation_end = None + time_creation_elapsed = None + elif time_creation_start <= time_creation_end: + pass + else: + raise NotImplementedError('Analyze time information case not implemented yet') + + return time_creation_start, time_creation_end, time_creation_elapsed + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to define file creation +def define_time_creation(file_info, tag_file_create='file_create'): + + if isinstance(file_info, dict): + + if tag_file_create in list(file_info.keys()): + file_time_creation = [file_info[tag_file_create]] + else: + file_time_list = [] + for file_values in file_info.values(): + if tag_file_create in list(file_values.keys()): + file_time_step = file_values[tag_file_create] + file_time_list.append(file_time_step) + + file_time_idx = pd.DatetimeIndex(file_time_list) + file_time_creation = [file_time_idx.max()] + else: + raise NotImplementedError('File info obj is not defined by known format') + + return file_time_creation +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_data_io_ascii.py b/apps/Analyzer_Execution/lib_data_io_ascii.py new file mode 100644 index 0000000..bcb360f --- /dev/null +++ b/apps/Analyzer_Execution/lib_data_io_ascii.py @@ -0,0 +1,120 @@ +""" +Library Features: + +Name: lib_data_io_ascii +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210225' +Version: '1.0.0' +""" +####################################################################################### +# Library +import logging +import os +import time +import json +import difflib + +import numpy as np +import pandas as pd + +from lib_info_args import logger_name + +# Logging +log_stream = logging.getLogger(logger_name) +####################################################################################### + + +# ------------------------------------------------------------------------------------- +# Method to select data warnings +def select_data_warnings(file_dframe, file_section_col, file_section_value, file_section_name, file_fields=None): + + section_dframe_raw = file_dframe.loc[file_dframe[file_section_col] == file_section_name, file_fields] + + section_dframe_unique = section_dframe_raw.drop_duplicates(keep='first') + section_dframe_cols = section_dframe_unique.columns + + section_dict = {} + if section_dframe_unique.shape[0] == 1: + + for section_dframe_col in section_dframe_cols: + section_dframe_val = section_dframe_unique[section_dframe_col].values + section_dict[section_dframe_col] = section_dframe_val + else: + + section_dframe_unique = section_dframe_unique.reset_index() + section_dframe_idx = section_dframe_unique[file_section_value].idxmax() + + if not np.isnan(section_dframe_idx): + section_dframe_select = section_dframe_unique.iloc[section_dframe_idx] + + for section_dframe_col in section_dframe_cols: + section_dframe_val = section_dframe_select[section_dframe_col] + section_dict[section_dframe_col] = section_dframe_val + else: + section_dict = None + + return section_dict +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to write warnings ascii file +def write_file_warnings(file_name, warnings_collections, warnings_structure, + tag_section_name='section_name', + no_time='NA', no_data=-9999.0, no_string='NA', + column_delimiter=','): + + field_type_list = ['float', 'time_stamp', 'float', 'string'] + + warnings_header = None + warnings_datasets = None + for section_name, section_data in warnings_collections.items(): + + field_list = [] + field_values = [] + + field_list.append(tag_section_name) + field_values.append(section_name) + for key, data in section_data.items(): + + for (field_key, field_value), field_type in zip(data.items(), field_type_list): + field_list.append(field_key) + + if field_type == 'float': + if isinstance(field_value, (int, float)): + if not np.isnan(field_value): + field_value = str(float(field_value)) + else: + field_value = str(no_data) + else: + field_value = str(no_data) + elif field_type == 'time_stamp': + if isinstance(field_value, pd.Timestamp): + field_value = field_value.strftime('%Y-%m-%d %H:%M') + else: + field_value = no_time + elif field_type == 'string': + if isinstance(field_value, str): + field_value = str(field_value) + else: + field_value = no_string + else: + raise NotImplementedError('Parse warnings case not implemented yet') + + field_values.append(field_value) + + if warnings_header is None: + warnings_header = column_delimiter.join(field_list) + if warnings_datasets is None: + warnings_datasets = [] + + warnings_datasets.append(column_delimiter.join(field_values)) + + with open(file_name, 'w') as file_handle: + warnings_header = warnings_header + '\n' + file_handle.write(warnings_header) + for warnings_row in warnings_datasets: + warnings_row = warnings_row + '\n' + file_handle.write(warnings_row) + +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_data_io_html.py b/apps/Analyzer_Execution/lib_data_io_html.py new file mode 100644 index 0000000..4b3ae5d --- /dev/null +++ b/apps/Analyzer_Execution/lib_data_io_html.py @@ -0,0 +1,311 @@ +# ------------------------------------------------------------------------------------- +# Libraries +import numpy as np +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to slice list every n elements +def slice_list(source, step): + return [source[i::step] for i in range(step)] +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to write an html summary file +def write_file_summary(time_run, time_exec, time_format='%Y-%m-%d %H:%M', time_mode='LOCAL', + html_name='run_analyzer.html', run_summary=None, + run_name=None, tag_summary_data='data', tag_summary_info='info', + tag_run_start='run_start', tag_run_end='run_end', tag_run_datasets='run_datasets', + tag_alarm_section='alarm_section', + tag_alarm_value='alarm_value', + tag_alarm_index='alarm_index', + tag_alert_section='alert_section', + tag_alert_value='alert_value', + tag_alert_index='alert_index'): + + if not isinstance(run_name, list): + run_name = [run_name] + + with open(html_name, "w") as html_handle: + + html_handle.write('\n') + + html_handle.write('\n') + + html_handle.write('\n') + + html_handle.write('
Execution Time: ' + + time_exec.strftime(time_format) + ' ' + time_mode + '
\n') + + html_handle.write('
Reference Time: ' + + time_run.strftime(time_format) + ' ' + time_mode + '
\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + html_handle.write('\n') + + for run_name_step in run_name: + + run_data = run_summary[run_name_step][tag_summary_data] + run_info = run_summary[run_name_step][tag_summary_info] + + if 'run_n' in list(run_info.keys()): + run_n = str(run_info['run_n']) + else: + run_n = 'NA' + if 'run_description' in list(run_info.keys()): + run_description = str(run_info['run_description']) + else: + run_description = 'NA' + if 'section_n' in list(run_info.keys()): + section_n = str(run_info['section_n']) + else: + section_n = 'NA' + + html_handle.write('') + + if run_data is not None: + + time_stamp_start = run_info['run_start'] + if time_stamp_start is not None: + time_str_start = time_stamp_start.strftime(time_format) + else: + time_str_start = 'NA' + time_stamp_end = run_info['run_end'] + if time_stamp_end is not None: + time_str_end = time_stamp_end.strftime(time_format) + else: + time_str_end = 'NA' + time_stamp_elapsed = run_info['run_elapsed'] + if time_stamp_elapsed is not None: + time_str_elapsed = str(time_stamp_elapsed) + else: + time_str_elapsed = 'NA' + + time_str_end = '2021-02-25 12:49' + + # CASE CONDITIONS + if time_str_start != 'NA' and time_str_end != 'NA': + + section_alert_collections = {} + section_alarm_collections = {} + for id_step, (time_step, data_step) in enumerate(run_data.items()): + + alert_value_raw = data_step[tag_alert_value] + alert_index_raw = data_step[tag_alert_index] + alert_section_raw = data_step[tag_alert_section] + alarm_value_raw = data_step[tag_alarm_value] + alarm_index_raw = data_step[tag_alarm_index] + alarm_section_raw = data_step[tag_alarm_section] + + alert_idx = [i for i, n in enumerate(alert_value_raw) if np.isfinite(n)] + alarm_idx = [i for i, n in enumerate(alarm_value_raw) if np.isfinite(n)] + + alert_value_def = [] + alert_index_def = [] + alert_section_def = [] + for idx in alert_idx: + alert_value_def.append(alert_value_raw[idx]) + alert_index_def.append(alert_index_raw[idx]) + alert_section_def.append(alert_section_raw[idx]) + section_alert_collections[id_step] = {} + section_alert_collections[id_step][tag_alert_section] = {} + section_alert_collections[id_step][tag_alert_section] = alert_section_def + section_alert_collections[id_step][tag_alert_value] = {} + section_alert_collections[id_step][tag_alert_value] = alert_value_def + section_alert_collections[id_step][tag_alert_index] = {} + section_alert_collections[id_step][tag_alert_index] = alert_index_def + + alarm_value_def = [] + alarm_index_def = [] + alarm_section_def = [] + for idx in alarm_idx: + alarm_value_def.append(alarm_value_raw[idx]) + alarm_index_def.append(alarm_index_raw[idx]) + alarm_section_def.append(alarm_section_raw[idx]) + section_alarm_collections[id_step] = {} + section_alarm_collections[id_step][tag_alarm_section] = {} + section_alarm_collections[id_step][tag_alarm_section] = alarm_section_def + section_alarm_collections[id_step][tag_alarm_value] = {} + section_alarm_collections[id_step][tag_alarm_value] = alarm_value_def + section_alarm_collections[id_step][tag_alarm_index] = {} + section_alarm_collections[id_step][tag_alarm_index] = alarm_index_def + + if (alert_section_def.__len__() == 0) and (alarm_section_def.__len__() == 0): + + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + + else: + + # CASE COMPLETED + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + + for id, data in section_alert_collections.items(): + section_list = data[tag_alert_section] + idx_list = data[tag_alert_index] + value_list = data[tag_alert_value] + section_n_alert = str(section_list.__len__()) + + alert_group = '' + for section_step, idx_step, value_step in zip(section_list, idx_list, value_list): + alert_string = section_step + ' :: ' + str(idx_step.strftime('%H')) + ' :: ' + str(value_step) + alert_group += alert_string + '\n' + alert_group = alert_group.replace("\n", "
\n") + + # section_groups = slice_list(section_list, 5) + # section_cell = '\n'.join(' '.join(sub) for sub in section_groups) + + # alert today + if id == 0: + if section_list.__len__() > 0: + html_handle.write('') + else: + html_handle.write('') + html_handle.write('') + + # alert tomorrow + if id == 1: + if section_list: + html_handle.write('') + else: + html_handle.write('') + html_handle.write('') + html_handle.write('') + + for id, data in section_alarm_collections.items(): + section_list = data[tag_alarm_section] + idx_list = data[tag_alarm_index] + value_list = data[tag_alarm_value] + section_n_alert = str(section_list.__len__()) + + alarm_group = '' + for section_step, idx_step, value_step in zip(section_list, idx_list, value_list): + alarm_string = section_step + ' :: ' + str(idx_step.strftime('%H')) + ' :: ' + str(value_step) + alarm_group += alarm_string + '\n' + alert_group = alert_group.replace("\n", "
\n") + + # alert today + if id == 0: + if section_list: + html_handle.write('') + else: + html_handle.write('') + html_handle.write('') + + # alert tomorrow + if id == 1: + if section_list: + html_handle.write('') + else: + html_handle.write('') + html_handle.write('') + html_handle.write('') + + elif time_str_start != 'NA' and time_str_end == 'NA': + + # CASE RUNNING + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + + else: + + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + html_handle.write('') + + html_handle.write('') + + html_handle.write('') + html_handle.write('') + +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_data_io_json.py b/apps/Analyzer_Execution/lib_data_io_json.py new file mode 100755 index 0000000..e938121 --- /dev/null +++ b/apps/Analyzer_Execution/lib_data_io_json.py @@ -0,0 +1,156 @@ +""" +Library Features: + +Name: lib_data_io_json +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210225' +Version: '1.0.0' +""" +####################################################################################### +# Library +import logging +import os +import time +import json +import difflib + +import numpy as np +import pandas as pd + +from lib_info_args import logger_name + +# Logging +log_stream = logging.getLogger(logger_name) +####################################################################################### + + +# ------------------------------------------------------------------------------------- +# Method to write file hydrograph +def write_file_hydrograph_ts(file_name, file_dict_raw, file_indent=4, file_sep=','): + + file_dict_parser = {} + for file_key, file_value in file_dict_raw.items(): + if isinstance(file_value, list): + file_value = [str(i) for i in file_value] + file_value = file_sep.join(file_value) + elif isinstance(file_value, (int, float)): + file_value = str(file_value) + elif isinstance(file_value, str): + pass + elif isinstance(file_value, np.ndarray): + file_value = [str(i) for i in file_value] + file_value = file_sep.join(file_value) + else: + log_stream.error(' ===> Error in parsering json time series') + raise RuntimeError('Parsering case not implemented yet') + + file_dict_parser[file_key] = file_value + + file_data = json.dumps(file_dict_parser, indent=file_indent, ensure_ascii=False, sort_keys=True) + with open(file_name, "w", encoding='utf-8') as file_handle: + file_handle.write(file_data) + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read file execution information +def read_file_hydrograph_info(file_name, + tag_time_access='time_access', tag_time_modified='time_modified', + tag_time_create='time_create', tag_file_size='file_size'): + + file_time_access = pd.Timestamp(time.ctime(os.path.getatime(file_name))) + file_time_modified = pd.Timestamp(time.ctime(os.path.getmtime(file_name))) + file_time_create = pd.Timestamp(time.ctime(os.path.getctime(file_name))) + file_size = os.path.getsize(file_name) + + file_obj = {tag_time_access: file_time_access, tag_time_modified: file_time_modified, + tag_time_create: file_time_create, tag_file_size: file_size} + + return file_obj +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read file hydrograph +def read_file_hydrograph_ts(file_name, file_sep=',', + tag_time_in='time_period', + tag_discharge_obs_in='time_series_discharge_observed', + tag_discharge_sim_in='time_series_discharge_simulated', + tag_time_out='time_period', + tag_discharge_obs_out='discharge_observed', + tag_discharge_sim_out='discharge_simulated', + tag_index='time_period'): + + if os.path.exists(file_name): + with open(file_name) as file_handle: + file_data = json.load(file_handle) + else: + log_stream.error(' ===> Error in reading hydrograph file ' + file_name) + raise IOError('File not found') + + keys_list = list(file_data.keys()) + tag_discharge_sim_in_select = [] + tag_discharge_sim_out_select = [] + for keys_step in keys_list: + if tag_discharge_sim_in in keys_step: + tag_discharge_sim_in_select.append(keys_step) + + tag_diff_tmp = [li for li in difflib.ndiff(keys_step, tag_discharge_sim_in) if li[0] != ' '] + tag_diff_element = ''.join([tag_step[1:].strip() for tag_step in tag_diff_tmp]) + tag_diff_idx = keys_step.find(tag_diff_element) + + if tag_discharge_sim_out.__len__() == tag_diff_idx: + tag_discharge_sim_out_step = tag_discharge_sim_out[:tag_diff_idx] + tag_diff_element + elif tag_discharge_sim_out.__len__() > tag_diff_idx: + tag_discharge_sim_out_step = tag_discharge_sim_out + tag_diff_element + elif tag_discharge_sim_out.__len__() < tag_diff_idx: + tag_discharge_sim_out_step = tag_discharge_sim_out + tag_diff_element + else: + log_stream.error(' ===> Error in create tags for outcome variable') + raise NotImplementedError('Case not implemented yet') + + tag_discharge_sim_out_select.append(tag_discharge_sim_out_step) + + variable_list_in = [tag_time_in, tag_discharge_obs_in] + variable_list_in.extend(tag_discharge_sim_in_select) + variable_list_out = [tag_time_out, tag_discharge_obs_out] + variable_list_out.extend(tag_discharge_sim_out_select) + + file_data_attrs = {} + file_data_dict = {} + for file_key, file_value in file_data.items(): + if file_key in variable_list_in: + var_idx = variable_list_in.index(file_key) + var_name = variable_list_out[var_idx] + file_data_dict[var_name] = file_value + else: + file_data_attrs[file_key] = file_value + + for file_key, file_value_tmp in file_data_dict.items(): + file_list_tmp = file_value_tmp.split(file_sep) + if file_key == tag_time_out: + file_list_converted = pd.DatetimeIndex(file_list_tmp) + else: + file_list_converted = list(map(float, file_list_tmp)) + file_data_dict[file_key] = file_list_converted + + file_data_df = pd.DataFrame(data=file_data_dict) + if tag_index in list(file_data_df.columns): + file_data_df.set_index(tag_index, inplace=True) + + return file_data_df, file_data_attrs +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read file settings +def read_file_settings(file_name): + if os.path.exists(file_name): + with open(file_name) as file_handle: + file_data = json.load(file_handle) + else: + log_stream.error(' ===> Error in reading settings file ' + file_name) + raise IOError('File not found') + return file_data +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_data_io_shapefile.py b/apps/Analyzer_Execution/lib_data_io_shapefile.py new file mode 100755 index 0000000..89e34cd --- /dev/null +++ b/apps/Analyzer_Execution/lib_data_io_shapefile.py @@ -0,0 +1,100 @@ +""" +Class Features + +Name: lib_data_io_shapefile +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210225' +Version: '1.0.0' +""" + +####################################################################################### +# Libraries +import logging + +import pandas as pd +import geopandas as gpd + +logging.getLogger('fiona').setLevel(logging.WARNING) +logging.getLogger('geopandas').setLevel(logging.WARNING) + +# Debug +# import matplotlib.pylab as plt +####################################################################################### + + +# ------------------------------------------------------------------------------------- +# Method to find data section +def find_file_section_data(section_df, section_name=None, basin_name=None, + tag_column_section_in='section_name', tag_column_basin_in='section_domain', + tag_column_section_out='section_name', tag_column_basin_out='basin_name'): + + section_name_ref = section_name.lower() + basin_name_ref = basin_name.lower() + + section_name_list = section_df[tag_column_section_in].values + basin_name_list = section_df[tag_column_basin_in].values + + section_dict_tmp = {tag_column_section_in: section_name_list, tag_column_basin_in: basin_name_list} + section_df_tmp = pd.DataFrame(data=section_dict_tmp) + section_df_tmp = section_df_tmp.astype(str).apply(lambda x: x.str.lower()) + + point_idx = section_df_tmp[(section_df_tmp[tag_column_section_in] == section_name_ref) & + (section_df_tmp[tag_column_basin_in] == basin_name_ref)].index + + if point_idx.shape[0] == 1: + point_idx = point_idx[0] + point_dict = section_df.iloc[point_idx, :].to_dict() + + point_dict[tag_column_section_out] = point_dict.pop(tag_column_section_in) + point_dict[tag_column_basin_out] = point_dict.pop(tag_column_basin_in) + + elif point_idx.shape[0] == 0: + raise IOError('Section selection failed; section not found') + else: + raise NotImplementedError('Section selection failed for unknown reason.') + + return point_dict +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read shapefile section(s) +def read_file_section_data(file_name, columns_name_expected_in=None, columns_name_expected_out=None, columns_name_type=None): + + if columns_name_expected_in is None: + columns_name_expected_in = [ + 'HMC_X', 'HMC_Y', 'LAT', 'LON', 'BASIN', 'SEC_NAME', 'SEC_RS', 'AREA', 'Q_THR1', 'Q_THR2'] + + if columns_name_expected_out is None: + columns_name_expected_out = [ + 'hmc_idx_x', 'hmc_idx_y', 'latitude', 'longitude', 'section_domain', 'section_name', 'section_code', + 'section_drained_area', 'section_discharge_thr_alert', 'section_discharge_thr_alarm'] + + if columns_name_type is None: + columns_name_type = ['int', 'int', 'float', 'float', + 'str', 'str', 'str', 'float', 'float', 'float'] + + file_dframe_raw = gpd.read_file(file_name) + file_rows = file_dframe_raw.shape[0] + + section_obj = {} + for column_name_in, column_name_out, column_type in zip(columns_name_expected_in, + columns_name_expected_out, columns_name_type): + if column_name_in in file_dframe_raw.columns: + column_data = file_dframe_raw[column_name_in].values.tolist() + else: + if column_type == 'int': + column_data = [-9999] * file_rows + elif column_type == 'str': + column_data = [''] * file_rows + elif column_type == 'float': + column_data = [-9999.0] * file_rows + else: + raise NotImplementedError('Datatype not implemented yet') + + section_obj[column_name_out] = column_data + + section_df = pd.DataFrame(data=section_obj) + + return section_df +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_info_args.py b/apps/Analyzer_Execution/lib_info_args.py new file mode 100755 index 0000000..aacb901 --- /dev/null +++ b/apps/Analyzer_Execution/lib_info_args.py @@ -0,0 +1,32 @@ +""" +Library Features: + +Name: lib_info_args +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210113' +Version: '1.0.0' +""" + +####################################################################################### +# Library +import pandas as pd +####################################################################################### + +# ------------------------------------------------------------------------------------- +# Time information +time_type = 'GMT' # 'GMT', 'local' +time_units = 'days since 1858-11-17 00:00:00' +time_calendar = 'gregorian' +time_format_datasets = "%Y%m%d%H%M" +time_format_algorithm = '%Y-%m-%d %H:%M' +time_machine = pd.Timestamp.now + +# Logging information +logger_name = 'hat_runanalyzer_logger' +logger_file = 'hat_runanalyzer.txt' +logger_handle = 'file' # 'file' or 'stream' +logger_format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)-80s %(filename)s:[%(lineno)-6s - %(funcName)-20s()] ' + +# Definition of zip extension +zip_extension = '.gz' +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_utils_exec.py b/apps/Analyzer_Execution/lib_utils_exec.py new file mode 100755 index 0000000..9acf9b8 --- /dev/null +++ b/apps/Analyzer_Execution/lib_utils_exec.py @@ -0,0 +1,65 @@ +""" +Library Features: + +Name: lib_utils_exec +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20210113' +Version: '1.0.0' +""" + +# ------------------------------------------------------------------------------------- +# Libraries +import logging +import os +import time + +import pandas as pd +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read file execution information +def read_file_execution_info(file_name, + tag_time_access='time_access', tag_time_modified='time_modified', + tag_time_create='time_create', tag_file_size='file_size'): + + file_time_access = pd.Timestamp(time.ctime(os.path.getatime(file_name))) + file_time_modified = pd.Timestamp(time.ctime(os.path.getmtime(file_name))) + file_time_create = pd.Timestamp(time.ctime(os.path.getctime(file_name))) + file_size = os.path.getsize(file_name) + + file_obj = {tag_time_access: file_time_access, tag_time_modified: file_time_modified, + tag_time_create: file_time_create, tag_file_size: file_size} + + return file_obj +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read file execution data +def read_file_execution_data(execution_data, tag_name='run_name'): + logging.info(' ---> Read execution data ... ') + collections_data = {} + collections_columns = None + for exec_id, exec_step in execution_data.items(): + exec_key = exec_step['features'][tag_name] + collections_data[exec_key] = {} + + data_list = [] + columns_list = [] + for exec_subkey in exec_step.keys(): + for exec_type, exec_value in exec_step[exec_subkey].items(): + columns_list.append(exec_type) + data_list.append(exec_value) + + collections_data[exec_key] = data_list + if collections_columns is None: + collections_columns = columns_list + + execution_df = pd.DataFrame.from_dict(collections_data, orient='index', columns=collections_columns) + + logging.info(' ---> Read execution data ... DONE') + + return execution_df + +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_utils_io.py b/apps/Analyzer_Execution/lib_utils_io.py new file mode 100755 index 0000000..f126140 --- /dev/null +++ b/apps/Analyzer_Execution/lib_utils_io.py @@ -0,0 +1,180 @@ +# ------------------------------------------------------------------------------------- +# Libraries +import logging +import tempfile +import os +import time +import json +import pickle + +import numpy as np +import pandas as pd + +from distutils.util import strtobool +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read discharge file +def read_file_discharge(file_name, file_template=None, file_skiprows=4): + + file_tmp = pd.read_table(file_name) + file_data = list(file_tmp.values) + + collections_id = 0 + obj_mod = None + obj_observed = None + obj_header = {} + for row_id, row_step in enumerate(file_data): + row_tmp = row_step[0] + if row_id < file_skiprows: + field_key, field_data = row_tmp.rstrip().split('=') + + if field_key in list(file_template.keys()): + field_name = file_template[field_key]['name'] + field_type = file_template[field_key]['type'] + field_value = file_template[field_key]['value'] + else: + field_name = field_key + field_type = None + field_value = None + + if field_type is not None: + if field_type == 'time_stamp': + field_data = pd.Timestamp(field_data) + elif field_type == 'time_frequency': + field_data = pd.Timedelta(field_data) + elif field_type == 'int': + field_data = np.int(field_data) + + if field_value is not None: + field_data = field_value + + obj_header[field_name] = field_data + elif row_id == file_skiprows: + columns_values = row_tmp.rstrip().split(' ') + array_observed = np.array(columns_values, dtype=np.float) + array_observed[array_observed < 0.0] = np.nan + time_n = array_observed.shape[0] + + if obj_observed is None: + obj_observed = np.zeros(shape=(time_n, 1)) + obj_observed[:, :] = np.nan + obj_observed[:, 0] = array_observed + + elif row_id >= file_skiprows: + + columns_values = row_tmp.rstrip().split(' ') + array_values = np.array(columns_values, dtype=np.float) + array_values[array_values < 0.0] = np.nan + time_n = array_values.shape[0] + + if obj_mod is None: + obj_mod = np.zeros(shape=(time_n , obj_header['scenario_n'])) + obj_mod[:, :] = np.nan + obj_mod[:, collections_id] = array_values + + collections_id += 1 + + return obj_header, obj_mod, obj_observed +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read warning file +def read_file_warning(file_name, file_template=None, file_skiprows=4, tag_data='section_{:}'): + + file_tmp = pd.read_table(file_name) + file_data = list(file_tmp.values) + + file_time_modified = pd.Timestamp(time.ctime(os.path.getmtime(file_name))) + file_time_created = pd.Timestamp(time.ctime(os.path.getctime(file_name))) + + collections_id = 0 + file_header = {} + file_collections = {} + for row_id, row_step in enumerate(file_data): + row_tmp = row_step[0] + if row_id < file_skiprows: + field_key, field_data = row_tmp.rstrip().split('=') + + if field_key in list(file_template.keys()): + field_name = file_template[field_key]['name'] + field_type = file_template[field_key]['type'] + field_value = file_template[field_key]['value'] + else: + field_name = field_key + field_type = None + field_value = None + + if field_type is not None: + if field_type == 'time_stamp': + field_data = pd.Timestamp(field_data) + elif field_type == 'time_frequency': + field_data = pd.Timedelta(field_data) + elif field_type == 'int': + field_data = np.int(field_data) + + if field_value is not None: + field_data = field_value + + file_header[field_name] = field_data + elif row_id == file_skiprows: + columns_name = row_tmp.rstrip().split(' ') + columns_type = [str, str, float, float, bool, float, bool] + elif row_id > file_skiprows: + columns_values = row_tmp.rstrip().split(' ') + file_collections[tag_data.format(collections_id)] = {} + + for column_name, column_value, column_type in zip(columns_name, columns_values, columns_type): + file_collections[tag_data.format(collections_id)][column_name] = {} + + if column_type is float: + column_value = np.float(column_value) + elif column_type is bool: + column_value = strtobool(column_value.lower()) + elif column_type is str: + column_value = column_value.strip() + file_collections[tag_data.format(collections_id)][column_name] = column_value + + collections_id += 1 + + file_header['time_file_created'] = file_time_created + file_header['time_file_modified'] = file_time_modified + + return file_header, file_collections +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to create a tmp name +def create_filename_tmp(prefix='tmp_', suffix='.tiff', folder=None): + + if folder is None: + folder = '/tmp' + + with tempfile.NamedTemporaryFile(dir=folder, prefix=prefix, suffix=suffix, delete=False) as tmp: + temp_file_name = tmp.name + return temp_file_name +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to read data obj +def read_obj(filename): + if os.path.exists(filename): + data = pickle.load(open(filename, "rb")) + else: + data = None + return data +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to write data obj +def write_obj(filename, data): + if os.path.exists(filename): + os.remove(filename) + with open(filename, 'wb') as handle: + pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL) +# ------------------------------------------------------------------------------------- diff --git a/apps/Analyzer_Execution/lib_utils_logging.py b/apps/Analyzer_Execution/lib_utils_logging.py new file mode 100755 index 0000000..90bcbc3 --- /dev/null +++ b/apps/Analyzer_Execution/lib_utils_logging.py @@ -0,0 +1,125 @@ +""" +Library Features: + +Name: lib_utils_logging +Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org) +Date: '20201202' +Version: '1.0.0' +""" + +####################################################################################### +# Library +import logging +import os +import logging.config +import glob + +from lib_info_args import logger_name as logger_name_default +from lib_info_args import logger_file as logger_file_default +from lib_info_args import logger_handle as logger_handle_default +from lib_info_args import logger_format as logger_formatter_default + +from lib_utils_system import make_folder + +# Debug +# import matplotlib.pylab as plt +####################################################################################### + + +# ------------------------------------------------------------------------------------- +# Method to set logging file +def set_logging_file(logger_file=logger_file_default, logger_name=logger_name_default, + logger_handle=logger_handle_default, logger_formatter=logger_formatter_default, + logger_history=False, logger_history_maxfiles=12, + logger_extra_tags=None): + + # Set to flush progressbar output in logging stream handle + # progressbar.streams.wrap_stderr() + + if logger_extra_tags is not None: + for extra_key, extra_value in logger_extra_tags.items(): + logger_file = logger_file.replace(extra_key, ':') + string_count = logger_file.count(':') + extra_value = [extra_value] * string_count + logger_file = logger_file.format(*extra_value) + + logger_folder_name, logger_file_name = os.path.split(logger_file) + make_folder(logger_folder_name) + + # Save old logger file (to check run in the past) + if logger_history: + store_logging_file(logger_file, logger_file_max=logger_history_maxfiles) + + # Remove old logging file + if os.path.exists(logger_file): + os.remove(logger_file) + + # Open logger + logging.getLogger(logger_name) + logging.root.setLevel(logging.DEBUG) + + # Set logger handle type + if logger_handle == 'file': + + # Set logger handler obj + logger_handle_1 = logging.FileHandler(logger_file, 'w') + logger_handle_2 = logging.StreamHandler() + # Set logger level + logger_handle_1.setLevel(logging.DEBUG) + logger_handle_2.setLevel(logging.DEBUG) + # Set logger formatter + logger_handle_1.setFormatter(logging.Formatter(logger_formatter)) + logger_handle_2.setFormatter(logging.Formatter(logger_formatter)) + # Add handle to logger + logging.getLogger('').addHandler(logger_handle_1) + logging.getLogger('').addHandler(logger_handle_2) + + elif logger_handle == 'stream': + + # Set logger handler obj + logging.StreamHandler() + # Set logger level + logger_handle.setLevel(logging.DEBUG) + # Set logger formatter + logger_handle.setFormatter(logging.Formatter(logger_formatter)) + # Add handle to logger + logging.getLogger('').addHandler(logger_handle) + + else: + + # Set logger handler obj + logging.NullHandler() + # Add handle to logger + logging.getLogger('').addHandler(logger_handle) + +# ------------------------------------------------------------------------------------- + + +# ------------------------------------------------------------------------------------- +# Method to store logging file (to save execution history) +def store_logging_file(logger_file, logger_ext='.old.{}', logger_file_max=12): + + # Get logger folder + logger_folder = os.path.split(logger_file)[0] + + # Iterate to store old logging file + if os.path.exists(logger_file): + + logger_file_loop = logger_file + logger_file_id = 0 + while os.path.exists(logger_file_loop): + logger_file_id = logger_file_id + 1 + logger_file_loop = logger_file + logger_ext.format(logger_file_id) + + if logger_file_id > logger_file_max: + logger_file_obj = glob.glob(os.path.join(logger_folder, '*')) + for logger_file_step in logger_file_obj: + if logger_file_step.startswith(logger_file): + os.remove(logger_file_step) + logger_file_loop = logger_file + break + + if logger_file_loop: + if logger_file != logger_file_loop: + os.rename(logger_file, logger_file_loop) +# ------------------------------------------------------------------------------------- diff --git a/apps/RunAnalyzer/lib_utils_system.py b/apps/Analyzer_Execution/lib_utils_system.py similarity index 87% rename from apps/RunAnalyzer/lib_utils_system.py rename to apps/Analyzer_Execution/lib_utils_system.py index 74c2e2b..40baaf5 100755 --- a/apps/RunAnalyzer/lib_utils_system.py +++ b/apps/Analyzer_Execution/lib_utils_system.py @@ -36,12 +36,15 @@ def fill_tags2string(string_raw, tags_format=None, tags_filling=None): tags_format_tmp = deepcopy(tags_format) for tag_key, tag_value in tags_format.items(): tag_key_tmp = '{' + tag_key + '}' - if tag_value is not None: - if tag_key_tmp in string_raw: - string_filled = string_raw.replace(tag_key_tmp, tag_value) - string_raw = string_filled - else: - tags_format_tmp.pop(tag_key, None) + if tag_key in list(tags_filling.keys()): + tag_filled = tags_filling[tag_key] + if tag_value is not None: + if tag_key_tmp in string_raw: + if tag_filled is not None: + string_filled = string_raw.replace(tag_key_tmp, tag_value) + string_raw = string_filled + else: + tags_format_tmp.pop(tag_key, None) dim_max = 1 for tags_filling_values_tmp in tags_filling.values(): diff --git a/apps/RunAnalyzer/lib_utils_time.py b/apps/Analyzer_Execution/lib_utils_time.py similarity index 100% rename from apps/RunAnalyzer/lib_utils_time.py rename to apps/Analyzer_Execution/lib_utils_time.py diff --git a/apps/RunAnalyzer/HAT_RunAnalyzer_HMC_Main.py b/apps/RunAnalyzer/HAT_RunAnalyzer_HMC_Main.py deleted file mode 100755 index 5e43843..0000000 --- a/apps/RunAnalyzer/HAT_RunAnalyzer_HMC_Main.py +++ /dev/null @@ -1,796 +0,0 @@ -#!/usr/bin/python3 -""" -Hydrological Analysis Tool - RunAnalyzer - -__date__ = '20200605' -__version__ = '1.0.0' -__author__ = - 'Fabio Delogu (fabio.delogu@cimafoundation.org', - 'Flavio Pignone (flavio.pignone@cimafoundation.org', - -__library__ = 'HAT' - -General command line: -python3 HAT_RunAnalyzer_HMC_Main.py -settings_file configuration.json -time "YYYY-MM-DD HH:MM" - -Version(s): -20200605 (1.0.0) --> Beta release -""" - -# ------------------------------------------------------------------------------------- -# Complete library -import logging -import warnings -import time -import os - -import numpy as np -import pandas as pd - -from copy import deepcopy -from argparse import ArgumentParser - -from lib_utils_io import read_file_json, read_file_warning, read_file_discharge, write_file_status, read_obj, write_obj -from lib_utils_system import make_folder, fill_tags2string -from lib_utils_time import set_time -# ------------------------------------------------------------------------------------- - -# ------------------------------------------------------------------------------------- -# Algorithm information -alg_version = '1.0.0' -alg_release = '2020-06-05' -alg_name = 'RUN ANALYZER' -# Algorithm parameter(s) -time_format = '%Y-%m-%d %H:%M' -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Script Main -def main(): - - # ------------------------------------------------------------------------------------- - # Get algorithm settings - alg_settings, alg_time = get_args() - - # Set algorithm settings - data_settings = read_file_json(alg_settings) - - # Set algorithm logging - make_folder(data_settings['log']['folder_name']) - set_logging(logger_file=os.path.join(data_settings['log']['folder_name'], - data_settings['log']['file_name'])) - # ------------------------------------------------------------------------------------- - - # ------------------------------------------------------------------------------------- - # Info algorithm - logging.info(' ============================================================================ ') - logging.info(' ==> ' + alg_name + ' (Version: ' + alg_version + ' Release_Date: ' + alg_release + ')') - logging.info(' ==> START ... ') - logging.info(' ') - - # Time algorithm information - start_time = time.time() - # ------------------------------------------------------------------------------------- - - # ------------------------------------------------------------------------------------- - # Organize time information - time_run, time_exec, time_range = set_time( - time_run_args=alg_time, time_run_file=data_settings['time']['time_now'], - time_format=time_format, - time_period=data_settings['time']['time_period'], - time_frequency=data_settings['time']['time_frequency'], - time_rounding=data_settings['time']['time_rounding']) - # ------------------------------------------------------------------------------------- - - # ------------------------------------------------------------------------------------- - # Method to freeze run basin information - info_collections = freeze_run_info(data_settings['run']) - - # Method to search available executions - execution_collections = search_run_execution( - time_run, - algorithm_execution=data_settings['data']['source']['execution'], - run_data=data_settings['run'], - template_tags=data_settings['template_tags']) - - # Method to search available results - results_header, results_collections = search_run_results( - time_range, - algorithm_results=data_settings['data']['source']['results'], - run_data=data_settings['run'], - template_tags=data_settings['template_tags']) - - # Method to get run warnings - data_collections_warning = get_run_warning(results_collections, template_fields=data_settings['template_fields']) - - # Method to get run discharges - data_header, data_collections_discharge = get_run_discharge(results_collections, data_collections_warning, - template_fields=data_settings['template_fields']) - - # Method to analyze run execution - analysis_collections_executions = analyze_run_execution(execution_collections, - template_fields=data_settings['template_fields']) - - # Method to analyze run results - analysis_header, analysis_collections_results = analyze_run_results( - time_run, data_header, results_header, data_collections_discharge, - flag_ancillary=data_settings['flags']['update_ancillary'], - data_ancillary=data_settings['data']['ancillary'], - template_tags=data_settings['template_tags'] - ) - - # Method to dump run status - dump_run_status(time_run, time_exec, time_format=time_format, - algorithm_destination=data_settings['data']['destination'], - default_header=info_collections, - analysis_header=analysis_header, - analysis_execution=analysis_collections_executions, - analysis_results=analysis_collections_results, - template_tags=data_settings['template_tags']) - # ------------------------------------------------------------------------------------- - - # ------------------------------------------------------------------------------------- - # Info algorithm - time_elapsed = round(time.time() - start_time, 1) - - logging.info(' ') - logging.info(' ==> ' + alg_name + ' (Version: ' + alg_version + ' Release_Date: ' + alg_release + ')') - logging.info(' ==> TIME ELAPSED: ' + str(time_elapsed) + ' seconds') - logging.info(' ==> ... END') - logging.info(' ==> Bye, Bye') - logging.info(' ============================================================================ ') - - # ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to dump run status -def dump_run_status(time_run, time_exec, time_format='%Y-%m-%d %H:%M', - algorithm_destination=None, - default_header=None, - analysis_header=None, analysis_execution=None, analysis_results=None, - template_tags=None, - folder_name_tag='folder_name', file_name_tag='file_name'): - - datetime_run = time_run.to_pydatetime() - # datetime_exec = time_exec.to_pydatetime() - - time_ref = time_run.replace(hour=0, minute=0) - time_range_ref = pd.date_range(start=time_ref, periods=2, freq='D') - - folder_name_raw = algorithm_destination[folder_name_tag] - file_name_raw = algorithm_destination[file_name_tag] - - template_values = {'run_datetime': datetime_run, 'run_sub_path_time': datetime_run, - 'analysis_datetime': datetime_run, 'analysis_sub_path_time': datetime_run} - - folder_name_def = fill_tags2string(folder_name_raw, template_tags, template_values) - file_name_def = fill_tags2string(file_name_raw, template_tags, template_values) - - make_folder(folder_name_def) - file_path_def = os.path.join(folder_name_def, file_name_def) - - logging.info(' ---> Organize html datasets ... ') - - collections_ancillary = {} - collections_alarm = {} - collections_alert = {} - for run_key, run_fields in analysis_header.items(): - - # DEBUG START - # run_key = 'realtime_rf-ecmwf-0100' - # run_fields = analysis_header[run_key] - - logging.info(' ----> RunType ' + run_key + ' ... ') - - if run_fields is not None: - - collections_ancillary[run_key] = {} - collections_alarm[run_key] = {} - collections_alert[run_key] = {} - - run_analysis_execution = analysis_execution[run_key] - run_analysis_results = analysis_results[run_key] - - collections_ancillary[run_key]['description'] = run_fields['description'] - collections_ancillary[run_key]['scenario_n'] = run_fields['scenario_n'] - collections_ancillary[run_key]['section_n'] = run_fields['section_n'] - collections_ancillary[run_key]['time_modified_first'] = run_analysis_execution['time_created_first'] - collections_ancillary[run_key]['time_modified_last'] = run_fields['time_file_modified'] - collections_ancillary[run_key]['time_range'] = time_range_ref - - for point_key, point_fields in run_analysis_results.items(): - - point_tag = point_fields['name'] - point_fields_alarm = point_fields['discharge_alarm'] - point_fields_alert = point_fields['discharge_alert'] - - point_field_alert_select = point_fields_alert[point_fields_alert.index >= time_ref] - point_field_alarm_select = point_fields_alarm[point_fields_alarm.index >= time_ref] - - logging.info(' -----> Point ' + point_tag + ' ... ') - - for time_id, time_range_step in enumerate(time_range_ref): - - time_alert_control = [time_check for time_check in point_field_alert_select.index if - time_check.replace(hour=0, minute=0) == time_range_step] - time_alarm_control = [time_check for time_check in point_field_alarm_select.index if - time_check.replace(hour=0, minute=0) == time_range_step] - - if time_alert_control: - - if point_field_alert_select.values[time_id] != -9999: - point_field_alert_idx = point_field_alert_select.index[time_id] - point_field_alert_value = point_field_alert_select.values[time_id] - - if time_range_step not in list(collections_alert[run_key].keys()): - collections_alert[run_key][time_range_step] = {} - collections_alert[run_key][time_range_step]['name'] = [point_tag] - collections_alert[run_key][time_range_step]['idx'] = [point_field_alert_idx] - collections_alert[run_key][time_range_step]['value'] = [point_field_alert_value] - else: - name_tmp = collections_alert[run_key][time_range_step]['point'] - idx_tmp = collections_alert[run_key][time_range_step]['idx'] - value_tmp = collections_alert[run_key][time_range_step]['value'] - - name_tmp.append(point_tag) - idx_tmp.append(point_field_alert_idx) - value_tmp.append(point_field_alert_value) - - collections_alert[run_key][time_range_step]['name'] = name_tmp - collections_alert[run_key][time_range_step]['idx'] = idx_tmp - collections_alert[run_key][time_range_step]['value'] = value_tmp - - if time_alarm_control: - - if point_field_alarm_select.values[time_id] != -9999: - point_field_alarm_idx = point_field_alarm_select.index[time_id] - point_field_alarm_value = point_field_alarm_select.values[time_id] - - if time_range_step not in list(collections_alarm[run_key].keys()): - collections_alarm[run_key][time_range_step] = {} - collections_alarm[run_key][time_range_step]['name'] = [point_tag] - collections_alarm[run_key][time_range_step]['idx'] = [point_field_alarm_idx] - collections_alarm[run_key][time_range_step]['value'] = [point_field_alarm_value] - else: - name_tmp = collections_alarm[run_key][time_range_step]['point'] - idx_tmp = collections_alarm[run_key][time_range_step]['idx'] - value_tmp = collections_alarm[run_key][time_range_step]['value'] - - name_tmp.append(point_tag) - idx_tmp.append(point_field_alarm_idx) - value_tmp.append(point_field_alarm_value) - - collections_alarm[run_key][time_range_step]['name'] = name_tmp - collections_alarm[run_key][time_range_step]['idx'] = idx_tmp - collections_alarm[run_key][time_range_step]['value'] = value_tmp - - logging.info(' -----> Point ' + point_tag + ' ... DONE') - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - else: - collections_ancillary[run_key] = None - collections_alarm[run_key] = None - collections_alert[run_key] = None - - logging.info(' ----> RunType ' + run_key + ' ... SKIPPED. Datasets are undefined.') - - logging.info(' ---> Organize html datasets ... DONE') - - logging.info(' ---> Write html datasets ... ') - write_file_status(time_run, time_exec, time_format='%Y-%m-%d %H:%M', html_name=file_path_def, - run_default=default_header, - run_execution=analysis_execution, - run_ancillary=collections_ancillary, - run_alert=collections_alert, run_alarm=collections_alarm) - logging.info(' ---> Write html datasets ... DONE') - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to analyze run execution(s) -def analyze_run_execution(execution_collections, template_fields=None): - - logging.info(' ---> Analyze run executions ... ') - - html_collections = {} - for run_key, run_file_list in execution_collections.items(): - - logging.info(' ----> RunType ' + run_key + ' ... ') - - if run_file_list: - - time_created = [] - time_modified = [] - for file_name_raw in run_file_list: - - template_values = {'run_name': run_key} - - file_name_def = fill_tags2string(file_name_raw, template_fields, template_values) - - file_modified = pd.Timestamp(time.ctime(os.path.getmtime(file_name_def))) - file_created = pd.Timestamp(time.ctime(os.path.getctime(file_name_def))) - - time_created.append(file_created) - time_modified.append(file_modified) - - scenario_n = run_file_list.__len__() - - datetime_created = pd.DatetimeIndex(time_created) - datetime_modified = pd.DatetimeIndex(time_modified) - - datetime_created_first = datetime_created.min() - datetime_created_last = datetime_created.max() - - datetime_modified_first = datetime_modified.min() - datetime_modified_last = datetime_modified.max() - - html_collections[run_key] = {} - html_collections[run_key]['scenario_n'] = scenario_n - html_collections[run_key]['time_created_first'] = datetime_created_first - html_collections[run_key]['time_created_last'] = datetime_created_last - html_collections[run_key]['time_modified_first'] = datetime_modified_first - html_collections[run_key]['time_modified_last'] = datetime_modified_last - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - else: - - html_collections[run_key] = None - logging.info(' ----> RunType ' + run_key + ' ... SKIPPED. Run is not available.') - - logging.info(' ---> Analyze run executions ... DONE') - - return html_collections - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to analyze run results -def analyze_run_results(time_run, data_header, results_header, data_collections, - tag_columns='data_max', data_ancillary=None, flag_ancillary=False, - template_tags=None): - - logging.info(' ---> Analyze run results ... ') - - # Prepare ancillary data - folder_name_raw = data_ancillary['folder_name'] - file_name_raw = data_ancillary['file_name'] - - template_values = {'run_datetime': time_run, 'run_sub_path_time': time_run} - - folder_name_def = fill_tags2string(folder_name_raw, template_tags, template_values) - file_name_def = fill_tags2string(file_name_raw, template_tags, template_values) - - file_path_def = os.path.join(folder_name_def, file_name_def) - - if flag_ancillary: - if os.path.exists(file_path_def): - os.remove(file_path_def) - - if not os.path.exists(file_path_def) or flag_ancillary: - html_header = {} - html_collections = {} - for run_key, run_data in data_collections.items(): - - logging.info(' ----> RunType ' + run_key + ' ... ') - - # DEBUG START - # run_key = 'realtime_rf-ecmwf-0100' - # run_data = data_collections[run_key] - - if run_data is not None: - - html_header[run_key] = {**data_header[run_key], **results_header[run_key]} - - html_collections[run_key] = {} - for point_key, point_data in run_data.items(): - - point_basin = point_data['Basin'] - point_section = point_data['Section'] - point_alarm_thr = point_data['QAlarm'] - point_alert_thr = point_data['QAlert'] - point_data_raw = point_data['discharge_modelled'] - - point_tag = '@'.join([point_basin, point_section]) - - logging.info(' -----> Point ' + point_tag + ' ... ') - - point_time_n = point_data_raw.shape[0] - point_step_n = point_data_raw.shape[1] - - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - point_data_max = np.nanmax(point_data_raw, axis=1) - - point_time_hourly_idx = pd.date_range(start=data_header[run_key]['time_start'], - periods=point_time_n, freq=data_header[run_key]['time_frequency']) - - point_time_hourly_start = point_time_hourly_idx[0] - point_time_hourly_end = point_time_hourly_idx[-1] - - point_df_hourly = pd.DataFrame(index=point_time_hourly_idx, data=point_data_max, columns=[tag_columns]) - - point_df_daily = point_df_hourly.loc[point_df_hourly.groupby(pd.Grouper(freq='D')).idxmax().iloc[:, 0]] - point_time_daily_maximum = point_df_daily.index - point_time_daily_default = point_df_daily.index.floor('D') - - point_df_alarm_select = point_df_daily[point_df_daily[tag_columns] >= point_alarm_thr] - point_df_alert_select = point_df_daily[point_df_daily[tag_columns] >= point_alert_thr] - - point_data_daily_default = np.zeros(shape=point_time_daily_default.size) - point_data_daily_default[:] = -9999.0 - point_df_daily_default = pd.DataFrame(index=point_time_daily_default, data=point_data_daily_default, - columns=[tag_columns]) - - point_df_alarm_filled = point_df_daily_default.combine_first(point_df_alarm_select) - point_df_alarm_unique = point_df_alarm_filled.loc[point_df_alarm_filled.groupby( - pd.Grouper(freq='D')).idxmax().iloc[:, 0]] - - point_df_alert_filled = point_df_daily_default.combine_first(point_df_alert_select) - point_df_alert_unique = point_df_alert_filled.loc[point_df_alert_filled.groupby( - pd.Grouper(freq='D')).idxmax().iloc[:, 0]] - - html_collections[run_key][point_key] = {} - html_collections[run_key][point_key]['name'] = point_tag - html_collections[run_key][point_key]['time_start'] = point_time_hourly_start - html_collections[run_key][point_key]['time_end'] = point_time_hourly_end - html_collections[run_key][point_key]['discharge_alarm'] = point_df_alarm_unique - html_collections[run_key][point_key]['discharge_alert'] = point_df_alert_unique - - logging.info(' -----> Point ' + point_tag + ' ... DONE') - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - else: - html_header[run_key] = None - html_collections[run_key] = None - - logging.info(' ----> RunType ' + run_key + ' ... SKIPPED. Datasets are undefined.') - - make_folder(folder_name_def) - html_obj = {'header': html_header, 'collections': html_collections} - write_obj(file_path_def, html_obj) - - logging.info(' ---> Analyze run results ... DONE') - - else: - - html_obj = read_obj(file_path_def) - html_header = html_obj['header'] - html_collections = html_obj['collections'] - - logging.info(' ---> Analyze run results ... SKIPPED. PREVIOUSLY SAVED') - - return html_header, html_collections - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to get warning data -def get_run_warning(run_collections, tag_field='warning', template_fields=None): - - logging.info(' ---> Get warning table ... ') - - warning_collections = {} - for run_key, run_data in run_collections.items(): - - logging.info(' ----> RunType ' + run_key + ' ... ') - - warning_collections[run_key] = {} - if run_data: - - run_fields = run_data[tag_field] - file_step = run_fields['file_list'][0] - - if os.path.exists(file_step): - data_header, data_collections = read_file_warning(file_step, file_template=template_fields) - data_header['section_n'] = list(data_collections.keys()).__len__() - else: - data_header = None - data_collections = None - - warning_collections[run_key]['header'] = data_header - warning_collections[run_key]['data'] = data_collections - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - else: - warning_collections[run_key]['header'] = None - warning_collections[run_key]['data'] = None - - logging.info(' ----> RunType ' + run_key + ' ... SKIPPED. Warning table is not available') - - logging.info(' ---> Get warning table ... DONE') - - return warning_collections -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to get discharge data -def get_run_discharge(run_collections, warning_collections, tag_field='discharge', template_fields=None): - - logging.info(' ---> Get discharge time-series ... ') - - data_header = {} - data_collections = {} - for run_key, run_data in run_collections.items(): - - logging.info(' ----> RunType ' + run_key + ' ... ') - - if run_data: - - run_fields = run_data[tag_field] - warning_section = warning_collections[run_key] - - warning_header = warning_section['header'] - warning_data = warning_section['data'] - - data_workspace = deepcopy(warning_data) - - data_header[run_key] = {} - data_collections[run_key] = {} - for (data_key, data_field), data_ws in zip(warning_data.items(), data_workspace.values()): - - field_basin = data_field['Basin'] - field_section = data_field['Section'] - - file_select = [elem for elem in run_fields['file_list'] if (field_basin in elem) and (field_section in elem)][0] - - if os.path.exists(file_select): - discharge_header, discharge_data_modelled, discharge_data_observed = read_file_discharge( - file_select, file_template=template_fields) - else: - discharge_header = None - discharge_data_modelled = None - discharge_data_observed = None - - data_header[run_key] = {**warning_header, **discharge_header} - data_ws['discharge_observed'] = discharge_data_observed - data_ws['discharge_modelled'] = discharge_data_modelled - - data_collections[run_key][data_key] = data_ws - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - else: - data_header[run_key] = None - data_collections[run_key] = None - - logging.info(' ----> RunType ' + run_key + ' ... SKIPPED. Discharge time-series are not available') - - logging.info(' ---> Get discharge time-series ... DONE') - - return data_header, data_collections -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to freeze run information -def freeze_run_info(run_data): - logging.info(' ---> Freeze run information ... ') - info_collections = {} - for run_id, run_step in run_data.items(): - run_key = run_step['ancillary']['name'] - info_collections[run_key] = {} - for run_field, run_value in run_step['ancillary'].items(): - info_collections[run_key][run_field] = run_value - logging.info(' ---> Freeze run information ... DONE') - - return info_collections - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to search available run -def search_run_execution(time_run, algorithm_execution, run_data=None, template_tags=None, - folder_name_tag='folder_name', file_name_tag='file_name'): - - logging.info(' ---> Search run executions ... ') - - execution_collections = {} - - datetime_run = time_run.to_pydatetime() - for run_id, run_step in run_data.items(): - - run_key = run_step['ancillary']['name'] - - logging.info(' ----> RunType ' + run_key + ' ... ') - - folder_name_raw = algorithm_execution[folder_name_tag] - file_name_raw = algorithm_execution[file_name_tag] - - template_values = {'run_name': run_key, - 'run_datetime': datetime_run, 'run_sub_path_time': datetime_run} - - folder_name_def = fill_tags2string(folder_name_raw, template_tags, template_values) - file_name_def = fill_tags2string(file_name_raw, template_tags, template_values) - - file_list = [] - for root, dirs, files in os.walk(folder_name_def, topdown=False): - for name in files: - if name == file_name_def: - file_list.append(os.path.join(root, name)) - - execution_collections[run_key] = {} - execution_collections[run_key] = file_list - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - logging.info(' ---> Search run executions ... DONE') - - return execution_collections - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to search available run results -def search_run_results(time_range_max, algorithm_results=None, run_data=None, template_tags=None, - folder_name_tag='folder_name', file_name_tag='file_name'): - - logging.info(' ---> Search run results ... ') - - results_header = {} - results_collections = {} - for run_id, run_step in run_data.items(): - - run_key = run_step['ancillary']['name'] - - time_period = run_step['time']['time_period'] - time_frequency = run_step['time']['time_frequency'] - time_rounding = run_step['time']['time_rounding'] - - time_end = time_range_max[0] - if time_period == 1: - time_start = time_end.floor(time_rounding) - else: - time_start = pd.date_range(end=time_end.floor(time_rounding), - periods=time_period, freq=time_rounding)[0] - - if time_end == time_start: - time_range_adapt = pd.date_range(end=time_end, periods=time_period, freq=time_frequency) - else: - time_range_adapt = pd.date_range(start=time_start, end=time_end, freq=time_frequency) - time_range_adapt = time_range_adapt[::-1] - - logging.info(' ----> RunType ' + run_key + ' ... ') - - run_locs = deepcopy(algorithm_results) - - results_collections[run_key] = {} - results_header[run_key] = {} - path_name_time = [] - path_name_list = [] - for time_step in time_range_adapt: - - datetime_step = time_step.to_pydatetime() - - for loc_key, loc_args in run_locs.items(): - - template_values = {'run_name': run_key, - 'run_datetime': datetime_step, 'run_sub_path_time': datetime_step} - - folder_name_raw = loc_args[folder_name_tag] - file_name_raw = loc_args[file_name_tag] - - folder_name_def = fill_tags2string(folder_name_raw, template_tags, template_values) - file_name_def = fill_tags2string(file_name_raw, template_tags, template_values) - - if os.path.exists(folder_name_def): - - if loc_key not in list(results_collections[run_key].keys()): - results_collections[run_key][loc_key] = {} - path_name_time = [] - path_name_list = [] - - if not path_name_list: - - if '*' in file_name_def: - file_name_list = os.listdir(folder_name_def) - - if file_name_list.__len__() > 0: - - for file_name_step in file_name_list: - path_name_step = os.path.join(folder_name_def, file_name_step) - path_name_list.append(path_name_step) - path_name_time.append(time_step) - - else: - path_name_def = os.path.join(folder_name_def, file_name_def) - if os.path.exists(path_name_def): - path_name_list.append(path_name_def) - path_name_time.append(time_step) - - if path_name_list.__len__() > 0: - results_collections[run_key][loc_key]['time'] = path_name_time - results_collections[run_key][loc_key]['file_list'] = path_name_list - - for field_key, field_value in run_step['ancillary'].items(): - results_header[run_key][field_key] = field_value - for field_key, field_value in run_step['time'].items(): - results_header[run_key][field_key] = field_value - - logging.info(' ----> RunType ' + run_key + ' ... DONE') - - logging.info(' ---> Search run results ... DONE') - - return results_header, results_collections -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to get script argument(s) -def get_args(): - parser_handle = ArgumentParser() - parser_handle.add_argument('-settings_file', action="store", dest="alg_settings") - parser_handle.add_argument('-time', action="store", dest="alg_time") - parser_values = parser_handle.parse_args() - - if parser_values.alg_settings: - alg_settings = parser_values.alg_settings - else: - alg_settings = 'configuration.json' - - if parser_values.alg_time: - alg_time = parser_values.alg_time - else: - alg_time = None - - return alg_settings, alg_time - - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to set logging information -def set_logging(logger_file='log.txt', logger_format=None): - if logger_format is None: - logger_format = '%(asctime)s %(name)-12s %(levelname)-8s ' \ - '%(filename)s:[%(lineno)-6s - %(funcName)20s()] %(message)s' - - # Remove old logging file - if os.path.exists(logger_file): - os.remove(logger_file) - - # Set level of root debugger - logging.root.setLevel(logging.DEBUG) - - # Open logging basic configuration - logging.basicConfig(level=logging.DEBUG, format=logger_format, filename=logger_file, filemode='w') - - # Set logger handle - logger_handle_1 = logging.FileHandler(logger_file, 'w') - logger_handle_2 = logging.StreamHandler() - # Set logger level - logger_handle_1.setLevel(logging.DEBUG) - logger_handle_2.setLevel(logging.DEBUG) - # Set logger formatter - logger_formatter = logging.Formatter(logger_format) - logger_handle_1.setFormatter(logger_formatter) - logger_handle_2.setFormatter(logger_formatter) - - # Add handle to logging - logging.getLogger('').addHandler(logger_handle_1) - logging.getLogger('').addHandler(logger_handle_2) - - -# ------------------------------------------------------------------------------------- - - -# ---------------------------------------------------------------------------- -# Call script from external library -if __name__ == '__main__': - main() -# ---------------------------------------------------------------------------- diff --git a/apps/RunAnalyzer/hat_runanalyzer_hmc_configuration_local.json b/apps/RunAnalyzer/hat_runanalyzer_hmc_configuration_local.json deleted file mode 100755 index bd2fe65..0000000 --- a/apps/RunAnalyzer/hat_runanalyzer_hmc_configuration_local.json +++ /dev/null @@ -1,115 +0,0 @@ -{ - "flags": { - "update_ancillary": false, - "clean_ancillary": false - }, - "template_tags": { - "run_name": "string_name", - "run_datetime": "%Y%m%d%H%M", - "run_sub_path_time": "%Y/%m/%d/%H", - "analysis_datetime": "%Y%m%d%H%M", - "analysis_sub_path_time": "%Y/%m/%d/" - }, - "template_fields": { - "DateMeteoModel": {"name" : "time_run", "type" : "time_stamp", "value": null}, - "sTimeNow": {"name" : "time_run", "type" : "time_stamp", "value": null}, - "TimeFrom": {"name" : "time_start", "type" : "time_stamp", "value": null}, - "TimeTo": {"name" : "time_end", "type" : "time_stamp", "value": null}, - "DateStart": {"name" : "time_start", "type" : "time_stamp", "value": null}, - "Temp.Resolution": {"name" : "time_frequency", "type" : "time_frequency", "value": "H"}, - "SscenariosNumber": {"name" : "scenario_n", "type" : "int", "value": null} - }, - "time": { - "time_now": null, - "time_period": 36, - "time_frequency": "H", - "time_rounding": "H" - }, - "data": { - "source" : { - "execution": { - "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/run/{run_name}/exec/", - "file_name": "HMC_Model_V2_{run_name}.x" - }, - "results": { - "discharge": { - "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/{run_name}/{run_sub_path_time}/timeseries/section_q", - "file_name": "hydrograph_{*}_{*}_{run_datetime}.txt" - }, - "warning": { - "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/{run_name}/{run_sub_path_time}/warnings/", - "file_name": "warnings_sections_{run_datetime}.txt" - } - } - }, - "ancillary": { - "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/ancillary/", - "file_name": "run_analyzer_{run_datetime}.workspace" - }, - "destination": { - "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/archive/run_analyzer/html/{analysis_sub_path_time}", - "file_name": "summary_simulations_marche.html" - } - }, - "run": { - "id_01": { - "ancillary": { - "name": "realtime_ws-db", - "description": "RUN OSSERVATO" - }, - "time": { - "time_period": 20, - "time_frequency": "H", - "time_rounding": "H" - } - }, - "id_02": { - "ancillary": { - "name": "realtime_nwp-ecmwf-0100", - "description": "ECMWF DETERMINISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_03": { - "ancillary": { - "name": "realtime_rf-ecmwf-0100", - "description": "ECMWF PROBABILISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_04": { - "ancillary": { - "name": "realtime_nwp-lami-2i", - "description": "LAMI DETERMINISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_05": { - "ancillary": { - "name": "realtime_rf-lami-2i", - "description": "LAMI PROBABILISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - } - }, - "log": { - "folder_name": "/home/fabio/Desktop/PyCharm_Workspace/hat-ws/log/", - "file_name": "hat_runanalyzer_hmc_log.txt" - } -} diff --git a/apps/RunAnalyzer/hat_runanalyzer_hmc_configuration_realtime.json b/apps/RunAnalyzer/hat_runanalyzer_hmc_configuration_realtime.json deleted file mode 100755 index a5ccd25..0000000 --- a/apps/RunAnalyzer/hat_runanalyzer_hmc_configuration_realtime.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "flags": { - "update_ancillary": false, - "clean_ancillary": false - }, - "template_tags": { - "run_name": "string_name", - "run_datetime": "%Y%m%d%H%M", - "run_sub_path_time": "%Y/%m/%d/%H", - "analysis_datetime": "%Y%m%d%H%M", - "analysis_sub_path_time": "%Y/%m/%d/" - }, - "template_fields": { - "DateMeteoModel": {"name" : "time_run", "type" : "time_stamp", "value": null}, - "sTimeNow": {"name" : "time_run", "type" : "time_stamp", "value": null}, - "TimeFrom": {"name" : "time_start", "type" : "time_stamp", "value": null}, - "TimeTo": {"name" : "time_end", "type" : "time_stamp", "value": null}, - "DateStart": {"name" : "time_start", "type" : "time_stamp", "value": null}, - "Temp.Resolution": {"name" : "time_frequency", "type" : "time_frequency", "value": "H"}, - "SscenariosNumber": {"name" : "scenario_n", "type" : "int", "value": null} - }, - "time": { - "time_now": null, - "time_period": 36, - "time_frequency": "H", - "time_rounding": "H" - }, - "data": { - "source" : { - "execution": { - "folder_name": "/hydro/run/{run_name}/exec/", - "file_name": "hmc.log" - }, - "results": { - "discharge": { - "folder_name": "/hydro/archive/{run_name}/{run_sub_path_time}/timeseries/section_q", - "file_name": "hydrograph_{*}_{*}_{run_datetime}.txt" - }, - "warning": { - "folder_name": "/hydro/archive/{run_name}/{run_sub_path_time}/warnings/", - "file_name": "warnings_sections_{run_datetime}.txt" - } - } - }, - "ancillary": { - "folder_name": "/hydro/tmp/hat-checker/", - "file_name": "run_analyzer_{run_datetime}.workspace" - }, - "destination": { - "folder_name": "/hydro/summary/{analysis_sub_path_time}", - "file_name": "summary_simulations_marche.html" - } - }, - "run": { - "id_01": { - "ancillary": { - "name": "realtime_ws-db", - "description": "OSSERVATO SENSORI A TERRA" - }, - "time": { - "time_period": 3, - "time_frequency": "H", - "time_rounding": "H" - } - }, - "id_02": { - "ancillary": { - "name": "realtime_nwp-ecmwf-0100", - "description": "ECMWF DETERMINISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_03": { - "ancillary": { - "name": "realtime_rf-ecmwf-0100", - "description": "ECMWF PROBABILISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_04": { - "ancillary": { - "name": "realtime_nwp-lami-2i", - "description": "LAMI DETERMINISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_05": { - "ancillary": { - "name": "realtime_rf-lami-2i", - "description": "LAMI PROBABILISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "D" - } - }, - "id_06": { - "ancillary": { - "name": "realtime_ef-lami-2i", - "description": "EXPERT FORECAST PROBABILISTICO" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "H" - } - }, - "id_07": { - "ancillary": { - "name": "realtime_radar-mcm", - "description": "OSSERVATO RADAR MCM" - }, - "time": { - "time_period": 1, - "time_frequency": "H", - "time_rounding": "H" - } - } - }, - "log": { - "folder_name": "/hydro/log/", - "file_name": "hat_runanalyzer_hmc_log_realtime.txt" - } -} diff --git a/apps/RunAnalyzer/lib_utils_io.py b/apps/RunAnalyzer/lib_utils_io.py deleted file mode 100755 index 1da23a1..0000000 --- a/apps/RunAnalyzer/lib_utils_io.py +++ /dev/null @@ -1,542 +0,0 @@ -# ------------------------------------------------------------------------------------- -# Libraries -import logging -import tempfile -import os -import time -import json -import pickle -import rasterio -import numpy as np -import xarray as xr -import pandas as pd - -from distutils.util import strtobool - -from rasterio.transform import Affine -from osgeo import gdal, gdalconst - -logging.getLogger('rasterio').setLevel(logging.WARNING) -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to read discharge file -def read_file_discharge(file_name, file_template=None, file_skiprows=4): - - file_tmp = pd.read_table(file_name) - file_data = list(file_tmp.values) - - collections_id = 0 - obj_mod = None - obj_observed = None - obj_header = {} - for row_id, row_step in enumerate(file_data): - row_tmp = row_step[0] - if row_id < file_skiprows: - field_key, field_data = row_tmp.rstrip().split('=') - - if field_key in list(file_template.keys()): - field_name = file_template[field_key]['name'] - field_type = file_template[field_key]['type'] - field_value = file_template[field_key]['value'] - else: - field_name = field_key - field_type = None - field_value = None - - if field_type is not None: - if field_type == 'time_stamp': - field_data = pd.Timestamp(field_data) - elif field_type == 'time_frequency': - field_data = pd.Timedelta(field_data) - elif field_type == 'int': - field_data = np.int(field_data) - - if field_value is not None: - field_data = field_value - - obj_header[field_name] = field_data - elif row_id == file_skiprows: - columns_values = row_tmp.rstrip().split(' ') - array_observed = np.array(columns_values, dtype=np.float) - array_observed[array_observed < 0.0] = np.nan - time_n = array_observed.shape[0] - - if obj_observed is None: - obj_observed = np.zeros(shape=(time_n, 1)) - obj_observed[:, :] = np.nan - obj_observed[:, 0] = array_observed - - elif row_id >= file_skiprows: - - columns_values = row_tmp.rstrip().split(' ') - array_values = np.array(columns_values, dtype=np.float) - array_values[array_values < 0.0] = np.nan - time_n = array_values.shape[0] - - if obj_mod is None: - obj_mod = np.zeros(shape=(time_n , obj_header['scenario_n'])) - obj_mod[:, :] = np.nan - obj_mod[:, collections_id] = array_values - - collections_id += 1 - - return obj_header, obj_mod, obj_observed -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to read warning file -def read_file_warning(file_name, file_template=None, file_skiprows=4, tag_data='section_{:}'): - - file_tmp = pd.read_table(file_name) - file_data = list(file_tmp.values) - - file_time_modified = pd.Timestamp(time.ctime(os.path.getmtime(file_name))) - file_time_created = pd.Timestamp(time.ctime(os.path.getctime(file_name))) - - collections_id = 0 - file_header = {} - file_collections = {} - for row_id, row_step in enumerate(file_data): - row_tmp = row_step[0] - if row_id < file_skiprows: - field_key, field_data = row_tmp.rstrip().split('=') - - if field_key in list(file_template.keys()): - field_name = file_template[field_key]['name'] - field_type = file_template[field_key]['type'] - field_value = file_template[field_key]['value'] - else: - field_name = field_key - field_type = None - field_value = None - - if field_type is not None: - if field_type == 'time_stamp': - field_data = pd.Timestamp(field_data) - elif field_type == 'time_frequency': - field_data = pd.Timedelta(field_data) - elif field_type == 'int': - field_data = np.int(field_data) - - if field_value is not None: - field_data = field_value - - file_header[field_name] = field_data - elif row_id == file_skiprows: - columns_name = row_tmp.rstrip().split(' ') - columns_type = [str, str, float, float, bool, float, bool] - elif row_id > file_skiprows: - columns_values = row_tmp.rstrip().split(' ') - file_collections[tag_data.format(collections_id)] = {} - - for column_name, column_value, column_type in zip(columns_name, columns_values, columns_type): - file_collections[tag_data.format(collections_id)][column_name] = {} - - if column_type is float: - column_value = np.float(column_value) - elif column_type is bool: - column_value = strtobool(column_value.lower()) - elif column_type is str: - column_value = column_value.strip() - file_collections[tag_data.format(collections_id)][column_name] = column_value - - collections_id += 1 - - file_header['time_file_created'] = file_time_created - file_header['time_file_modified'] = file_time_modified - - return file_header, file_collections -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to write an html status file -def write_file_status(time_run, time_exec, time_format='%Y-%m-%d %H:%M', - html_name='run_analyzer.html', - run_default=None, run_execution=None, run_ancillary=None, - run_alert=None, run_alarm=None, time_mode='LOCAL'): - - with open(html_name, "w") as html_handle: - - html_handle.write('\n') - - html_handle.write('\n') - - html_handle.write('
Run Configuration
Regione Marche
StateTimeStartTimeEndTimeElapsedScenariosSections OGGI
gialla
OGGI
gialla
DOMANI
gialla
DOMANI
gialla
OGGI
rossa
OGGI
rossa
DOMANI
rossa
DOMANI
rossa
' + run_description + ' COMPLETED ' + time_str_start + ' ' + time_mode + ' ' + time_str_end + ' ' + time_mode + ' ' + time_str_elapsed + ' ' + run_n + '' + section_n + '--------' + run_description + ' COMPLETED ' + time_str_start + ' ' + time_mode + ' ' + time_str_end + ' ' + time_mode + ' ' + time_str_elapsed + ' ' + run_n + '' + section_n + '' + section_n_alert + '/' + section_n) + html_handle.write('' + alert_group + '--' + section_n_alert + '/' + section_n) + html_handle.write('' + alert_group + '--' + section_n_alert + '/' + section_n) + html_handle.write('' + alarm_group + '--' + section_n_alert + '/' + section_n) + html_handle.write('' + alarm_group + '--' + run_description + ' RUNNING ... ' + time_str_start + ' ' + time_mode + ' NA NA ' + run_n + '' + section_n + '--------' + run_description + ' RUN NOT AVAILABLE NA NA NA NA NA --------
\n') - - html_handle.write('
Execution Time: ' + - time_exec.strftime(time_format) + ' ' + time_mode + '
\n') - - html_handle.write('
Reference Time: ' + - time_run.strftime(time_format) + ' ' + time_mode + '
\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - html_handle.write('\n') - - for (run_key, run_execution_step), run_ancillary_step, run_alert_step, run_alarm_step in zip( - run_execution.items(), run_ancillary.values(), run_alert.values(), run_alarm.values()): - - html_handle.write('') - - run_description = run_default[run_key]['description'] - - if run_ancillary_step is not None: - run_time_mod_first = run_ancillary_step['time_modified_first'].strftime(time_format) - run_time_mod_last = run_ancillary_step['time_modified_last'].strftime(time_format) - run_section_n_found = str(run_ancillary_step['section_n']) - run_scenario_n = str(run_ancillary_step['scenario_n']) - # run_time_range = str(run_ancillary_step['time_range']) - - if run_alert_step or run_alarm_step: - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - - if run_alert_step: - - for id, (run_alert_key, run_alert_value) in enumerate(run_alert_step.items()): - - section_list_name = run_alert_value['name'] - section_list_idx = run_alert_value['idx'] - section_list_value = run_alert_value['value'] - section_n_alert = str(section_list_name.__len__()) - - section_name = ','.join(section_list_name) - - # alert today - if id == 0: - if section_list_name: - html_handle.write('') - else: - html_handle.write('') - html_handle.write('') - - # alert tomorrow - if id == 1: - if section_list_name: - html_handle.write('') - else: - html_handle.write('') - html_handle.write('') - html_handle.write('') - - if run_alarm_step: - - for id, (run_alarm_key, run_alarm_value) in enumerate(run_alarm_step.items()): - - section_list_name = run_alarm_value['name'] - section_list_idx = run_alarm_value['idx'] - section_list_value = run_alarm_value['value'] - section_n_alarm = str(section_list_name.__len__()) - - section_name = ','.join(section_list_name) - - # alert today - if id == 0: - if section_list_name: - html_handle.write('') - else: - html_handle.write('') - html_handle.write('') - - # alert tomorrow - if id == 1: - if section_list_name: - html_handle.write('') - else: - html_handle.write('') - html_handle.write('') - - if (not run_alert_step) and (not run_alarm_step): - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - - else: - - if run_execution_step and (run_ancillary_step is None): - - run_time_mod_first = run_execution_step['time_modified_first'].strftime(time_format) - run_time_mod_last = run_execution_step['time_modified_last'].strftime(time_format) - run_scenario_n = str(run_execution_step['scenario_n']) - - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - - elif (run_execution_step is None) and (run_ancillary_step is None): - - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - html_handle.write('') - - html_handle.write('') - - #### - - html_handle.write('') - html_handle.write('') - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to create a tmp name -def create_filename_tmp(prefix='tmp_', suffix='.tiff', folder=None): - - if folder is None: - folder = '/tmp' - - with tempfile.NamedTemporaryFile(dir=folder, prefix=prefix, suffix=suffix, delete=False) as tmp: - temp_file_name = tmp.name - return temp_file_name -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to write file tiff -def write_file_tif(file_name, file_data, file_wide, file_high, file_geotrans, file_proj, - file_metadata=None, - file_format=gdalconst.GDT_Float32): - - if not isinstance(file_data, list): - file_data = [file_data] - - if file_metadata is None: - file_metadata = {'description_field': 'data'} - if not isinstance(file_metadata, list): - file_metadata = [file_metadata] * file_data.__len__() - - if isinstance(file_geotrans, Affine): - file_geotrans = file_geotrans.to_gdal() - - file_n = file_data.__len__() - dset_handle = gdal.GetDriverByName('GTiff').Create(file_name, file_wide, file_high, file_n, file_format, - options=['COMPRESS=DEFLATE']) - dset_handle.SetGeoTransform(file_geotrans) - dset_handle.SetProjection(file_proj) - - for file_id, (file_data_step, file_metadata_step) in enumerate(zip(file_data, file_metadata)): - dset_handle.GetRasterBand(file_id + 1).WriteArray(file_data_step) - dset_handle.GetRasterBand(file_id + 1).SetMetadata(file_metadata_step) - del dset_handle -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to read file tif -def read_file_tif(file_name, file_bands=1): - - - file_handle = rasterio.open(file_name) - file_proj = file_handle.crs.wkt - file_geotrans = file_handle.transform - - file_tags = file_handle.tags() - file_bands = file_handle.count - file_metadata = file_handle.profile - - if file_bands == 1: - file_data = file_handle.read(1) - elif file_bands > 1: - file_data = [] - for band_id in range(0, file_bands): - file_data_tmp = file_handle.read(band_id + 1) - file_data.append(file_data_tmp) - else: - logging.error(' ===> File multi-band are not supported') - raise NotImplementedError('File multi-band not implemented yet') - - return file_data, file_proj, file_geotrans -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to read file json -def read_file_json(file_name): - env_ws = {} - for env_item, env_value in os.environ.items(): - env_ws[env_item] = env_value - - with open(file_name, "r") as file_handle: - json_block = [] - for file_row in file_handle: - - for env_key, env_value in env_ws.items(): - env_tag = '$' + env_key - if env_tag in file_row: - env_value = env_value.strip("'\\'") - file_row = file_row.replace(env_tag, env_value) - file_row = file_row.replace('//', '/') - - # Add the line to our JSON block - json_block.append(file_row) - - # Check whether we closed our JSON block - if file_row.startswith('}'): - # Do something with the JSON dictionary - json_dict = json.loads(''.join(json_block)) - # Start a new block - json_block = [] - - return json_dict - -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to create a data array -def create_darray_3d(data, time, geo_x, geo_y, geo_1d=True, - coord_name_x='west_east', coord_name_y='south_north', coord_name_time='time', - dim_name_x='west_east', dim_name_y='south_north', dim_name_time='time', - dims_order=None): - - if dims_order is None: - dims_order = [dim_name_y, dim_name_x, dim_name_time] - - if geo_1d: - if geo_x.shape.__len__() == 2: - geo_x = geo_x[0, :] - if geo_y.shape.__len__() == 2: - geo_y = geo_y[:, 0] - - data_da = xr.DataArray(data, - dims=dims_order, - coords={coord_name_time: (dim_name_time, time), - coord_name_x: (dim_name_x, geo_x), - coord_name_y: (dim_name_y, geo_y)}) - else: - logging.error(' ===> Longitude and Latitude must be 1d') - raise IOError('Variable shape is not valid') - - return data_da -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to create a data array -def create_darray_2d(data, geo_x, geo_y, geo_1d=True, name='geo', - coord_name_x='west_east', coord_name_y='south_north', - dim_name_x='west_east', dim_name_y='south_north', - dims_order=None): - - if dims_order is None: - dims_order = [dim_name_y, dim_name_x] - - if geo_1d: - if geo_x.shape.__len__() == 2: - geo_x = geo_x[0, :] - if geo_y.shape.__len__() == 2: - geo_y = geo_y[:, 0] - - data_da = xr.DataArray(data, - dims=dims_order, - coords={coord_name_x: (dim_name_x, geo_x), - coord_name_y: (dim_name_y, geo_y)}, - name=name) - else: - logging.error(' ===> Longitude and Latitude must be 1d') - raise IOError('Variable shape is not valid') - - return data_da -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to read data obj -def read_obj(filename): - if os.path.exists(filename): - data = pickle.load(open(filename, "rb")) - else: - data = None - return data -# ------------------------------------------------------------------------------------- - - -# ------------------------------------------------------------------------------------- -# Method to write data obj -def write_obj(filename, data): - if os.path.exists(filename): - os.remove(filename) - with open(filename, 'wb') as handle: - pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL) -# ------------------------------------------------------------------------------------- diff --git a/apps/profiling.sh b/apps/profiling.sh deleted file mode 100755 index e69de29..0000000 diff --git a/bin/local/hat_datamanager_hmc_nrt_local_profiling.sh b/bin/local/hat_datamanager_hmc_nrt_local_profiling.sh index 5f6669f..64abae9 100755 --- a/bin/local/hat_datamanager_hmc_nrt_local_profiling.sh +++ b/bin/local/hat_datamanager_hmc_nrt_local_profiling.sh @@ -13,8 +13,8 @@ fp_env_libs='virtualenv_python3' #----------------------------------------------------------------------------------------- # Get file information -script_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps/HAT_DataManager_HMC_NRT.py' -setting_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps/hat_datamanager_hmc_nrt_configuration_example_generic.json' +script_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps/HAT_RunOrganizer_HMC_NRT.py' +setting_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps/hat_runorganizer_hmc_nrt_configuration_example_generic.json' script_folder='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/' # Get information (-u to get gmt time) diff --git a/bin/local/hat_datapublisher_hmc_nrt_local_profiling.sh b/bin/local/hat_datapublisher_hmc_nrt_local_profiling.sh index 6cd7028..4e95e82 100755 --- a/bin/local/hat_datapublisher_hmc_nrt_local_profiling.sh +++ b/bin/local/hat_datapublisher_hmc_nrt_local_profiling.sh @@ -2,7 +2,7 @@ #----------------------------------------------------------------------------------------- # Script information -script_name='HAT - DataPublisher HMC NRT - LOCAL' +script_name='HAT - Analyzer_Datasets HMC NRT - LOCAL' script_version="1.5.0" script_date='2019/03/04' @@ -13,8 +13,8 @@ fp_env_libs='virtualenv_python3' #----------------------------------------------------------------------------------------- # Get file information -script_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps//HAT_DataPublisher_HMC_NRT.py' -settings_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps/hat_datapublisher_hmc_nrt_configuration_example_generic.json' +script_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps//HAT_RunPublisher_HMC_NRT.py' +settings_file='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/apps/hat_runpublisher_hmc_nrt_configuration_example_generic.json' script_folder='/home/fabio/Documents/Work_Area/Code_Development/Workspace/PyCharm_Workspace/hat-master/' stats_folder='/home/fabio/Desktop/PyCharm_Workspace/hat-master/bin/local/' diff --git a/bin/local/profile.svg b/bin/local/profile.svg deleted file mode 100755 index 3b41f27..0000000 --- a/bin/local/profile.svg +++ /dev/null @@ -1,4361 +0,0 @@ - - - - - - -%3 - - - -91 - - -text:170:update -0.53% -(0.06%) -430390× - - - - - -110 - - -artist:942:update -1.73% -(0.30%) -1019731× - - - - - -91->110 - - -0.47% -430390× - - - -3116 - - -contextlib:79:__enter__ -0.62% -(0.05%) -1312672× - - - - - -110->3116 - - -0.31% -1019731× - - - -6461 - - -contextlib:157:helper -0.51% -(0.09%) -1312672× - - - - - -110->6461 - - -0.45% -1019731× - - - -100 - - -_base:2573:draw -21.37% -(0.00%) -3184× - - - - - -1664 - - -image:117:_draw_list_compositing_images -24.90% -(0.01%) -4000× - - - - - -100->1664 - - -20.88% -3184× - - - -4096 - - -artist:30:draw_wrapper -25.15% -(0.18%) -816508× - - - - - -100->4096 - - -0.13% -3144× - - - -1664->4096 - - -24.90% -3184× - - - -4096->100 - - -21.37% -3184× - - - -2066 - - -figure:1661:draw -25.15% -(0.00%) -816× - - - - - -4096->2066 - - -25.15% -816× - - - -2574 - - -lines:751:draw -3.90% -(0.39%) -336578× - - - - - -4096->2574 - - -3.90% -336578× - - - -3002 - - -text:655:draw -4.84% -(0.21%) -240388× - - - - - -4096->3002 - - -4.84% -240388× - - - -4095 - - -geoaxes:350:draw -4.47% -(0.00%) -40× - - - - - -4096->4095 - - -4.47% -40× - - - -7101 - - -patches:563:draw -2.45% -(0.06%) -105160× - - - - - -4096->7101 - - -2.45% -105160× - - - -7135 - - -collections:2002:draw -0.71% -(0.00%) -80× - - - - - -4096->7135 - - -0.71% -80× - - - -7258 - - -axis:1195:draw -14.27% -(0.01%) -6368× - - - - - -4096->7258 - - -14.27% -6368× - - - -7323 - - -legend:640:draw -3.12% -(0.00%) -3880× - - - - - -4096->7323 - - -3.12% -3880× - - - -10521 - - -axis:289:draw -7.03% -(0.06%) -102622× - - - - - -4096->10521 - - -7.03% -102622× - - - -109 - - -_base:960:cla -1.29% -(0.01%) -1592× - - - - - -2981 - - -_base:2744:grid -0.87% -(0.00%) -4000× - - - - - -109->2981 - - -0.61% -3184× - - - -2985 - - -axis:1432:grid -0.87% -(0.00%) -8000× - - - - - -2981->2985 - - -0.87% -8000× - - - -3110 - - -~:0:<built-in method builtins.next> -0.79% -(0.11%) -2677976× - - - - - -3116->3110 - - -0.58% -1312672× - - - -134 - - -cpl_datapublisher_hmc_nrt_hydrapp:74:publishData -99.57% -(0.02%) - - - - - - -165 - - -lib_generic_configuration_utils:32:configVarFx -97.89% -(0.00%) -408× - - - - - -134->165 - - -97.89% -408× - - - -354 - - -lib_generic_io_method:250:readFilePickle -0.86% -(0.00%) -396× - - - - - -134->354 - - -0.86% -396× - - - -1774 - - -lib_hydrapp_graph_gridded:40:plotGridded_Var2D -7.38% -(0.00%) -20× - - - - - -165->1774 - - -7.38% -20× - - - -2071 - - -lib_hydrapp_graph_ts:229:plotTS_NWP_Probabilistic -33.34% -(0.00%) -194× - - - - - -165->2071 - - -33.34% -194× - - - -2072 - - -lib_hydrapp_graph_ts:494:plotTS_NWP_Comparison -45.51% -(0.00%) -97× - - - - - -165->2072 - - -45.51% -97× - - - -6477 - - -lib_hydrapp_graph_ts:55:plotTS_OBS -11.66% -(0.00%) -97× - - - - - -165->6477 - - -11.66% -97× - - - -5897 - - -~:0:<built-in method _pickle.load> -0.86% -(0.85%) -397× - - - - - -354->5897 - - -0.86% -396× - - - -3691 - - -figure:2038:savefig -27.70% -(0.00%) -408× - - - - - -1774->3691 - - -4.67% -20× - - - -4099 - - -geoaxes:831:stock_img -2.42% -(0.00%) -20× - - - - - -1774->4099 - - -2.42% -20× - - - -2071->2981 - - -0.13% -388× - - - -2071->3691 - - -11.61% -194× - - - -1908 - - -lib_hydrapp_graph_configuration:89:configGraphData_TS -15.81% -(7.54%) -3880× - - - - - -2071->1908 - - -4.02% -1940× - - - -2590 - - -pyplot:931:subplot -0.90% -(0.00%) -776× - - - - - -2071->2590 - - -0.42% -388× - - - -6502 - - -lib_hydrapp_graph_analysis:26:computeVarPeaks -0.74% -(0.01%) -388× - - - - - -2071->6502 - - -0.37% -194× - - - -6503 - - -lib_hydrapp_graph_analysis:48:computeVarQuantile -7.60% -(0.00%) -388× - - - - - -2071->6503 - - -2.16% -194× - - - -6504 - - -lib_hydrapp_graph_analysis:135:filterVarLimit -29.62% -(0.00%) -1067× - - - - - -2071->6504 - - -8.33% -582× - - - -7694 - - -__init__:1598:inner -2.88% -(0.00%) -2000× - - - - - -2071->7694 - - -1.46% -970× - - - -10438 - - -_base:4417:twinx -0.97% -(0.00%) -776× - - - - - -2071->10438 - - -0.49% -388× - - - -10503 - - -_base:3336:set_xticks -6.93% -(0.00%) -1552× - - - - - -2071->10503 - - -3.49% -776× - - - -2072->3691 - - -6.35% -97× - - - -2072->1908 - - -8.09% -1455× - - - -2072->2590 - - -0.26% -194× - - - -2072->6502 - - -0.36% -194× - - - -2072->6503 - - -5.44% -194× - - - -2072->6504 - - -21.29% -485× - - - -2072->7694 - - -1.09% -873× - - - -2072->10438 - - -0.24% -194× - - - -2072->10503 - - -1.72% -388× - - - -6477->3691 - - -5.07% -97× - - - -6477->1908 - - -3.70% -485× - - - -6477->2590 - - -0.21% -194× - - - -6477->7694 - - -0.31% -97× - - - -6477->10438 - - -0.24% -194× - - - -6477->10503 - - -1.71% -388× - - - -317 - - -Image:2007:save -1.66% -(0.00%) -408× - - - - - -2132 - - -JpegImagePlugin:616:_save -1.65% -(0.00%) -408× - - - - - -317->2132 - - -1.65% -408× - - - -319 - - -ImageFile:476:_save -1.65% -(0.00%) -408× - - - - - -2132->319 - - -1.65% -408× - - - -10307 - - -~:0:<method 'encode_to_file' of 'ImagingEncoder' objects> -1.65% -(1.65%) -408× - - - - - -319->10307 - - -1.65% -408× - - - -366 - - -HAT_DataPublisher_HMC_NRT:67:main -99.89% -(0.00%) - - - - - - -5926 - - -drv_datapublisher_hmc_nrt:231:selectDataAnalysisMaker -99.57% -(0.00%) - - - - - - -366->5926 - - -99.57% - - - - -5926->134 - - -99.57% - - - - -425 - - -expressions:63:_evaluate_standard -0.82% -(0.00%) -778× - - - - - -2914 - - -ops:1126:column_op -0.81% -(0.00%) -776× - - - - - -425->2914 - - -0.81% -776× - - - -429 - - -ops:1127:<dictcomp> -0.81% -(0.01%) -776× - - - - - -2914->429 - - -0.81% -776× - - - -1613 - - -generic:4113:reindex -2.33% -(0.00%) -2134× - - - - - -7897 - - -generic:5699:copy -2.34% -(0.00%) -6020× - - - - - -1613->7897 - - -2.31% -2134× - - - -1808 - - -generic:5036:__finalize__ -35.60% -(16.68%) -22708× - - - - - -7897->1808 - - -2.30% -6020× - - - -1623 - - -stride_tricks:206:broadcast_arrays -0.55% -(0.17%) -317018× - - - - - -1661 - - -axis:332:_apply_params -0.64% -(0.04%) -54792× - - - - - -1904 - - -artist:1064:set -0.57% -(0.06%) -273960× - - - - - -1661->1904 - - -0.57% -273960× - - - -1904->110 - - -0.11% -164376× - - - -1745 - - -~:0:<built-in method builtins.exec> -100.00% -(0.00%) -2155× - - - - - -12772 - - -HAT_DataPublisher_HMC_NRT:13:<module> -100.00% -(0.00%) - - - - - - -1745->12772 - - -100.00% - - - - -12772->366 - - -99.89% - - - - -1748 - - -~:0:<built-in method builtins.getattr> -23.10% -(15.64%) -1157859201× - - - - - -8288 - - -generic:5053:__getattr__ -7.84% -(7.84%) -105783011× - - - - - -1748->8288 - - -7.39% -104316995× - - - -10374 - - -backend_qt5agg:87:print_figure -27.70% -(0.00%) -408× - - - - - -3691->10374 - - -27.70% -408× - - - -2140 - - -geoaxes:1073:imshow -5.24% -(0.00%) -40× - - - - - -4099->2140 - - -2.40% -20× - - - -1795 - - -managers:318:apply -2.66% -(0.02%) -30149× - - - - - -8314 - - -_decorators:195:wrapper -2.31% -(0.00%) -776× - - - - - -1795->8314 - - -2.31% -776× - - - -3705 - - -frame:3794:reindex -2.31% -(0.00%) -776× - - - - - -8314->3705 - - -2.31% -776× - - - -1808->1748 - - -18.92% -758389658× - - - -1810 - - -generic:8556:_where -16.54% -(0.00%) -2134× - - - - - -1810->1808 - - -2.38% -2134× - - - -8055 - - -managers:506:where -2.38% -(0.00%) -2134× - - - - - -1810->8055 - - -2.38% -2134× - - - -8304 - - -frame:4027:fillna -12.09% -(0.00%) -1552× - - - - - -1810->8304 - - -2.31% -776× - - - -8305 - - -frame:3783:align -9.40% -(0.00%) -776× - - - - - -1810->8305 - - -9.40% -776× - - - -8055->1795 - - -2.38% -2134× - - - -2365 - - -generic:5948:fillna -12.10% -(0.00%) -4075× - - - - - -8304->2365 - - -12.09% -1552× - - - -2366 - - -generic:8388:align -9.42% -(0.00%) -2134× - - - - - -8305->2366 - - -9.40% -776× - - - -1887 - - -fromnumeric:73:_wrapreduction -0.94% -(0.27%) -1956820× - - - - - -12245 - - -~:0:<method 'reduce' of 'numpy.ufunc' objects> -0.80% -(0.80%) -2813041× - - - - - -1887->12245 - - -0.54% -1923175× - - - -1900 - - -backend_bases:1958:print_figure -17.40% -(0.00%) -408× - - - - - -3690 - - -backend_agg:547:print_jpg -17.32% -(0.00%) -408× - - - - - -1900->3690 - - -17.32% -408× - - - -3690->317 - - -1.66% -408× - - - -2971 - - -Image:1448:paste -0.53% -(0.00%) -408× - - - - - -3690->2971 - - -0.53% -408× - - - -10371 - - -backend_agg:534:print_to_buffer -15.09% -(0.00%) -408× - - - - - -3690->10371 - - -15.09% -408× - - - -1903 - - -markers:292:set_marker -1.25% -(0.16%) -605414× - - - - - -2131 - - -~:0:<built-in method builtins.isinstance> -1.12% -(0.57%) -28074557× - - - - - -1903->2131 - - -0.18% -3027070× - - - -7133 - - -markers:245:_recache -0.86% -(0.10%) -1207724× - - - - - -1903->7133 - - -0.85% -605414× - - - -1906 - - -artist:170:convert_xunits -0.63% -(0.07%) -1393582× - - - - - -7427 - - -axis:1539:convert_units -0.69% -(0.04%) -835492× - - - - - -1906->7427 - - -0.50% -395416× - - - -1908->1748 - - -3.65% -383645767× - - - -1974 - - -~:0:<built-in method builtins.hasattr> -4.75% -(4.36%) -388540404× - - - - - -1908->1974 - - -4.58% -385004155× - - - -1974->8288 - - -0.37% -1358388× - - - -2066->1664 - - -24.90% -816× - - - -2066->4096 - - -0.22% -816× - - - -2558 - - -figure:1261:add_subplot -1.73% -(0.00%) -1552× - - - - - -2590->2558 - - -0.89% -776× - - - -8047 - - -frame:7697:quantile -7.46% -(0.00%) -388× - - - - - -6503->8047 - - -7.46% -388× - - - -2405 - - -ops:2088:f -13.04% -(0.00%) -776× - - - - - -6504->2405 - - -13.04% -776× - - - -8306 - - -generic:8815:where -16.54% -(0.00%) -2134× - - - - - -6504->8306 - - -16.54% -2134× - - - -3664 - - -_axes:2189:bar -2.59% -(0.03%) -776× - - - - - -7694->3664 - - -2.59% -776× - - - -3713 - - -_subplots:161:_make_twin_axes -0.84% -(0.00%) -776× - - - - - -10438->3713 - - -0.84% -776× - - - -2986 - - -axis:1741:set_ticks -6.92% -(0.00%) -1592× - - - - - -10503->2986 - - -6.92% -1552× - - - -2121 - - -transforms:164:set_children -0.55% -(0.50%) -1673378× - - - - - -4065 - - -img_transform:127:warp_array -5.22% -(0.01%) -40× - - - - - -2140->4065 - - -5.22% -40× - - - -2775 - - -img_transform:225:regrid -5.20% -(0.13%) -40× - - - - - -4065->2775 - - -5.20% -40× - - - -2240 - - -transforms:2470:composite_transform_factory -0.62% -(0.08%) -991478× - - - - - -2244 - - -transforms:1236:__add__ -0.68% -(0.05%) -986742× - - - - - -2244->2240 - - -0.62% -986742× - - - -2302 - - -series:152:__init__ -0.71% -(0.04%) -94436× - - - - - -2365->1808 - - -12.07% -4075× - - - -2367 - - -generic:8432:_align_frame -9.40% -(0.00%) -776× - - - - - -2366->2367 - - -9.40% -776× - - - -2367->1808 - - -4.69% -1552× - - - -3159 - - -generic:4469:_reindex_with_indexers -4.70% -(0.00%) -1552× - - - - - -2367->3159 - - -4.70% -1552× - - - -3159->1808 - - -4.68% -1552× - - - -2402 - - -ops:1660:wrapper -0.66% -(0.02%) -25027× - - - - - -2402->2302 - - -0.34% -25027× - - - -2405->8304 - - -9.78% -776× - - - -7896 - - -generic:5581:astype -2.33% -(0.00%) -1940× - - - - - -2405->7896 - - -2.32% -776× - - - -8315 - - -frame:5118:_combine_const -0.94% -(0.00%) -776× - - - - - -2405->8315 - - -0.94% -776× - - - -7896->1808 - - -2.31% -1940× - - - -5661 - - -ops:1102:dispatch_to_series -0.94% -(0.00%) -776× - - - - - -8315->5661 - - -0.94% -776× - - - -2542 - - -_base:1976:_update_patch_limits -1.34% -(0.03%) -41168× - - - - - -7309 - - -transforms:1307:contains_branch_seperately -0.52% -(0.01%) -43904× - - - - - -2542->7309 - - -0.50% -41168× - - - -7380 - - -patches:209:get_transform -1.65% -(0.03%) -220640× - - - - - -2542->7380 - - -0.32% -41168× - - - -11024 - - -patches:757:get_patch_transform -1.77% -(0.01%) -244376× - - - - - -2542->11024 - - -0.31% -41128× - - - -7197 - - -transforms:1287:contains_branch -0.55% -(0.02%) -53216× - - - - - -7309->7197 - - -0.50% -43904× - - - -7380->2244 - - -0.14% -220640× - - - -7380->11024 - - -1.46% -203248× - - - -7163 - - -patches:728:_update_patch_transform -1.76% -(0.13%) -244376× - - - - - -11024->7163 - - -1.76% -244376× - - - -2549 - - -_base:421:__init__ -1.74% -(0.01%) -1592× - - - - - -2549->109 - - -1.27% -1572× - - - -2559 - - -_subplots:18:__init__ -1.72% -(0.00%) -1552× - - - - - -2558->2559 - - -1.72% -1552× - - - -2559->2549 - - -1.69% -1552× - - - -2567 - - -text:121:__init__ -0.78% -(0.09%) -234182× - - - - - -2567->91 - - -0.33% -234182× - - - -2569 - - -lines:273:__init__ -4.45% -(0.49%) -336629× - - - - - -2569->110 - - -0.89% -336629× - - - -10188 - - -_asarray:16:asarray -0.88% -(0.11%) -3858713× - - - - - -2569->10188 - - -0.15% -673258× - - - -11112 - - -markers:223:__init__ -1.36% -(0.05%) -602310× - - - - - -2569->11112 - - -0.80% -336629× - - - -6364 - - -~:0:<built-in method numpy.array> -1.73% -(1.59%) -10572053× - - - - - -10188->6364 - - -0.77% -3845031× - - - -11112->1903 - - -1.25% -602310× - - - -2570 - - -font_manager:620:__init__ -0.77% -(0.19%) -406000× - - - - - -3001 - - -lines:667:recache -2.14% -(0.36%) -314957× - - - - - -2574->3001 - - -1.30% -201454× - - - -7182 - - -transforms:1976:scale -0.63% -(0.12%) -513794× - - - - - -2574->7182 - - -0.15% -104950× - - - -7238 - - -backend_agg:119:draw_path -1.52% -(0.04%) -215432× - - - - - -2574->7238 - - -0.60% -112024× - - - -3001->1906 - - -0.26% -294406× - - - -9822 - - -<__array_function__ internals>:2:broadcast_arrays -0.60% -(0.03%) -317018× - - - - - -3001->9822 - - -0.60% -314957× - - - -7182->6364 - - -0.18% -513794× - - - -10235 - - -<__array_function__ internals>:2:dot -0.55% -(0.15%) -2412142× - - - - - -7182->10235 - - -0.16% -513794× - - - -7137 - - -~:0:<method 'draw_path' of 'matplotlib.backends._backend_agg.RendererAgg' objects> -1.46% -(0.98%) -215432× - - - - - -7238->7137 - - -1.46% -215432× - - - -2576 - - -artist:709:set_clip_path -0.79% -(0.03%) -79088× - - - - - -2576->7380 - - -0.53% -71288× - - - -6271 - - -~:0:<method 'transform_points' of 'cartopy._crs.CRS' objects> -5.33% -(5.34%) -902× - - - - - -2775->6271 - - -4.94% -200× - - - -2816 - - -transforms:1470:transform_point -0.65% -(0.05%) -277706× - - - - - -4125 - - -transforms:1377:transform -1.28% -(0.08%) -420194× - - - - - -2816->4125 - - -0.51% -277706× - - - -7144 - - -transforms:2371:transform_affine -0.63% -(0.04%) -419574× - - - - - -4125->7144 - - -0.63% -419574× - - - -2834 - - -colors:149:to_rgba -0.60% -(0.12%) -1295921× - - - - - -2888 - - -generic:641:transpose -6.03% -(0.00%) -776× - - - - - -2888->1808 - - -6.02% -776× - - - -10293 - - -~:0:<method 'paste' of 'ImagingCore' objects> -0.53% -(0.53%) -408× - - - - - -2971->10293 - - -0.53% -408× - - - -2974 - - -backend_agg:164:draw_text -2.63% -(0.08%) -83982× - - - - - -7434 - - -backend_agg:241:_get_agg_font -1.02% -(0.09%) -480010× - - - - - -2974->7434 - - -0.19% -83982× - - - -7436 - - -~:0:<method 'set_text' of 'matplotlib.ft2font.FT2Font' objects> -3.92% -(3.93%) -480010× - - - - - -2974->7436 - - -1.24% -83982× - - - -11080 - - -font_manager:1179:findfont -0.79% -(0.18%) -480010× - - - - - -7434->11080 - - -0.79% -480010× - - - -2978 - - -_base:1894:add_line -0.51% -(0.00%) -5044× - - - - - -3782 - - -axis:862:set_tick_params -1.11% -(0.01%) -17532× - - - - - -2985->3782 - - -0.87% -8000× - - - -2982 - - -axis:1074:_update_ticks -2.39% -(0.06%) -9472× - - - - - -2983 - - -axis:1404:get_major_ticks -7.42% -(0.02%) -12616× - - - - - -2982->2983 - - -0.59% -9472× - - - -3164 - - -axis:1326:get_minorticklocs -0.52% -(0.01%) -9472× - - - - - -2982->3164 - - -0.52% -9472× - - - -7280 - - -axis:1322:get_majorticklocs -0.55% -(0.00%) -11044× - - - - - -2982->7280 - - -0.55% -9472× - - - -10436 - - -axis:2217:_get_tick -1.67% -(0.01%) -24531× - - - - - -2983->10436 - - -0.49% -7339× - - - -10516 - - -axis:1928:_get_tick -5.99% -(0.02%) -83928× - - - - - -2983->10516 - - -5.66% -79152× - - - -10559 - - -axis:1374:_copy_tick_props -1.24% -(0.02%) -86491× - - - - - -2983->10559 - - -1.24% -86491× - - - -7294 - - -ticker:2076:__call__ -1.04% -(0.00%) -12416× - - - - - -3164->7294 - - -0.50% -6208× - - - -7280->7294 - - -0.54% -6208× - - - -3376 - - -axis:56:__init__ -7.63% -(0.15%) -108459× - - - - - -10436->3376 - - -1.66% -24531× - - - -10516->3376 - - -5.97% -83928× - - - -11109 - - -lines:1327:update_from -0.76% -(0.08%) -265681× - - - - - -10559->11109 - - -0.74% -259473× - - - -3782->1661 - - -0.64% -54792× - - - -3867 - - -axis:658:__get__ -0.66% -(0.00%) -9552× - - - - - -3782->3867 - - -0.45% -3184× - - - -2986->2983 - - -6.83% -1572× - - - -2991 - - -offsetbox:806:get_extent -1.11% -(0.04%) -77600× - - - - - -3099 - - -text:270:_get_layout -5.29% -(0.54%) -283754× - - - - - -2991->3099 - - -0.54% -77600× - - - -7437 - - -backend_agg:193:get_text_width_height_descent -3.73% -(0.11%) -396028× - - - - - -2991->7437 - - -0.47% -77600× - - - -3099->7437 - - -3.26% -318428× - - - -7179 - - -transforms:1930:rotate_deg -0.64% -(0.15%) -402774× - - - - - -3099->7179 - - -0.32% -158398× - - - -7437->7434 - - -0.83% -396028× - - - -7437->7436 - - -2.69% -396028× - - - -9691 - - -~:0:<built-in method numpy.core._multiarray_umath.implement_array_function> -3.24% -(0.74%) -6569799× - - - - - -9822->9691 - - -0.57% -317018× - - - -3002->2816 - - -0.16% -62910× - - - -3002->2974 - - -2.63% -83982× - - - -3002->3099 - - -1.56% -82350× - - - -3090 - - -ticker:2016:_raw_ticks -1.01% -(0.04%) -12456× - - - - - -7146 - - -axis:2486:get_tick_space -0.88% -(0.02%) -12416× - - - - - -3090->7146 - - -0.88% -12416× - - - -7146->10436 - - -0.86% -12416× - - - -3097 - - -offsetbox:423:get_extent_offsets -1.56% -(0.11%) -89240× - - - - - -10683 - - -offsetbox:431:<listcomp> -1.40% -(0.01%) -89240× - - - - - -3097->10683 - - -1.40% -42680× - - - -10683->2991 - - -1.11% -77600× - - - -10681 - - -offsetbox:232:get_extent -1.36% -(0.01%) -95448× - - - - - -10683->10681 - - -1.14% -25608× - - - -7226 - - -ticker:2080:tick_values -1.03% -(0.00%) -12456× - - - - - -7294->7226 - - -1.03% -12416× - - - -10525 - - -axis:425:_get_text1 -0.71% -(0.03%) -83928× - - - - - -3376->10525 - - -0.71% -83928× - - - -10526 - - -axis:442:_get_text2 -0.66% -(0.03%) -83928× - - - - - -3376->10526 - - -0.66% -83928× - - - -10527 - - -axis:458:_get_tick1line -1.33% -(0.04%) -83928× - - - - - -3376->10527 - - -1.33% -83928× - - - -10528 - - -axis:469:_get_tick2line -1.27% -(0.04%) -83928× - - - - - -3376->10528 - - -1.27% -83928× - - - -10529 - - -axis:484:_get_gridline -1.54% -(0.04%) -83928× - - - - - -3376->10529 - - -1.54% -83928× - - - -10525->2567 - - -0.28% -83928× - - - -10525->2570 - - -0.21% -83928× - - - -10526->2567 - - -0.29% -83928× - - - -10526->2570 - - -0.16% -83928× - - - -10527->2569 - - -1.22% -83928× - - - -10528->2569 - - -1.17% -83928× - - - -10529->2569 - - -0.98% -83928× - - - -11060 - - -lines:1021:get_path -0.86% -(0.02%) -125919× - - - - - -10529->11060 - - -0.46% -83928× - - - -3664->110 - - -0.17% -41128× - - - -3866 - - -_base:1964:add_patch -1.83% -(0.01%) -41168× - - - - - -3664->3866 - - -1.83% -41128× - - - -3866->2542 - - -1.34% -41168× - - - -3866->2576 - - -0.45% -41168× - - - -10315 - - -backend_agg:382:draw -25.36% -(0.00%) -816× - - - - - -10371->10315 - - -15.06% -408× - - - -10374->1900 - - -17.40% -408× - - - -10374->10315 - - -10.30% -408× - - - -3705->1613 - - -2.31% -776× - - - -3713->2558 - - -0.84% -776× - - - -3867->10436 - - -0.33% -4776× - - - -3867->10516 - - -0.33% -4776× - - - -3867->3867 - - -0.21% -3184× - - - -3818 - - -img_tiles:56:image_for_domain -0.66% -(0.00%) -20× - - - - - -6029 - - -img_tiles:158:get_image -0.63% -(0.00%) -120× - - - - - -3818->6029 - - -0.63% -120× - - - -3901 - - -axis:2279:_get_tick_boxes_siblings -0.78% -(0.00%) -3144× - - - - - -3901->2982 - - -0.66% -3144× - - - -10556 - - -axis:1147:_get_tick_bboxes -3.93% -(0.00%) -9472× - - - - - -3901->10556 - - -0.11% -3144× - - - -10555 - - -axis:1149:<listcomp> -3.56% -(0.02%) -9472× - - - - - -10556->10555 - - -3.56% -9472× - - - -3902 - - -axis:1985:_get_tick_boxes_siblings -1.81% -(0.00%) -1592× - - - - - -3902->2982 - - -0.22% -1592× - - - -3902->10556 - - -1.59% -1592× - - - -4095->4096 - - -0.95% -40× - - - -4095->2140 - - -2.84% -20× - - - -4095->3818 - - -0.66% -20× - - - -7101->3116 - - -0.27% -206816× - - - -7101->7380 - - -0.80% -103408× - - - -7101->7238 - - -0.92% -103408× - - - -7151 - - -transforms:2395:get_affine -0.77% -(0.21%) -929390× - - - - - -7101->7151 - - -0.26% -90992× - - - -7135->4125 - - -0.27% -40× - - - -7258->4096 - - -7.46% -112094× - - - -7258->2982 - - -1.51% -4736× - - - -7258->10556 - - -2.23% -4736× - - - -7096 - - -axis:2298:_update_label_position -0.87% -(0.00%) -3144× - - - - - -7258->7096 - - -0.87% -3144× - - - -7097 - - -axis:2004:_update_label_position -2.00% -(0.00%) -1592× - - - - - -7258->7097 - - -2.00% -1592× - - - -7310 - - -offsetbox:247:draw -2.29% -(0.03%) -31816× - - - - - -7323->7310 - - -2.29% -3880× - - - -7369 - - -offsetbox:239:get_window_extent -0.81% -(0.00%) -3880× - - - - - -7323->7369 - - -0.81% -3880× - - - -10521->4096 - - -6.96% -513110× - - - -7144->7151 - - -0.47% -419574× - - - -5657 - - -expressions:192:evaluate -0.83% -(0.00%) -778× - - - - - -5657->425 - - -0.82% -778× - - - -5661->5657 - - -0.83% -776× - - - -8070 - - -generic:5195:_get_numeric_data -1.17% -(0.00%) -388× - - - - - -8047->8070 - - -1.17% -388× - - - -8289 - - -frame:2602:transpose -6.03% -(0.00%) -776× - - - - - -8047->8289 - - -6.03% -776× - - - -8306->1810 - - -16.54% -2134× - - - -7094 - - -text:852:get_window_extent -4.12% -(0.10%) -214796× - - - - - -7094->2816 - - -0.32% -123804× - - - -7094->3099 - - -3.19% -123804× - - - -7096->3901 - - -0.78% -3144× - - - -7097->3902 - - -1.81% -1592× - - - -7151->7151 - - -0.34% -303484× - - - -7151->10235 - - -0.18% -929370× - - - -10235->9691 - - -0.37% -2412142× - - - -7163->2244 - - -0.14% -244376× - - - -7180 - - -transforms:1950:rotate_deg_around -0.70% -(0.04%) -244376× - - - - - -7163->7180 - - -0.70% -244376× - - - -7363 - - -transforms:788:from_extents -0.54% -(0.07%) -510622× - - - - - -7163->7363 - - -0.26% -244376× - - - -7180->7179 - - -0.32% -244376× - - - -7363->6364 - - -0.13% -510622× - - - -7226->3090 - - -1.01% -12456× - - - -7310->3097 - - -0.43% -19400× - - - -7310->7310 - - -1.95% -3880× - - - -10422 - - -offsetbox:348:get_extent_offsets -1.41% -(0.05%) -41904× - - - - - -7310->10422 - - -0.47% -12416× - - - -10666 - - -offsetbox:835:draw -0.89% -(0.00%) -15520× - - - - - -7310->10666 - - -0.89% -15520× - - - -10682 - - -offsetbox:362:<listcomp> -1.33% -(0.01%) -41904× - - - - - -10422->10682 - - -1.33% -24832× - - - -10666->4096 - - -0.89% -15520× - - - -7369->10422 - - -0.72% -3880× - - - -7759 - - -__init__:810:__getitem__ -0.70% -(0.70%) -19491177× - - - - - -8070->1808 - - -1.17% -388× - - - -8289->2888 - - -6.03% -776× - - - -9691->1623 - - -0.55% -317018× - - - -10095 - - -fromnumeric:2189:any -0.57% -(0.06%) -767181× - - - - - -9691->10095 - - -0.57% -767181× - - - -10095->1887 - - -0.51% -767181× - - - -10094 - - -<__array_function__ internals>:2:any -0.67% -(0.05%) -767181× - - - - - -10094->9691 - - -0.61% -767181× - - - -10315->4096 - - -25.15% -816× - - - -10682->10681 - - -1.33% -38800× - - - -11060->3001 - - -0.84% -113503× - - - -10555->7094 - - -3.53% -185248× - - - -11109->11112 - - -0.56% -265681× - - - -10681->3097 - - -1.32% -38800× - - - -10681->10422 - - -1.13% -25608× - - - -10760 - - -artist:219:stale -1.06% -(0.63%) -16794970× - - - - - diff --git a/bin/local/stats.binary b/bin/local/stats.binary deleted file mode 100755 index dd38f60..0000000 Binary files a/bin/local/stats.binary and /dev/null differ diff --git a/bin/server/hat_datamanager_hmc_nrt_history.sh b/bin/server/hat_datamanager_hmc_nrt_history.sh index e2aec9f..dd5e53d 100755 --- a/bin/server/hat_datamanager_hmc_nrt_history.sh +++ b/bin/server/hat_datamanager_hmc_nrt_history.sh @@ -13,7 +13,7 @@ fp_env_libs='fp_env_python' #----------------------------------------------------------------------------------------- # Get file information -script_file='/home/dpc-marche/library/hat-master/apps/HAT_DataManager_HMC_NRT.py' +script_file='/home/dpc-marche/library/hat-master/apps/HAT_RunOrganizer_HMC_NRT.py' setting_file='/home/dpc-marche/library/hat-master/apps/hat_datamanager_hmc_nrt_configuration_server_history.json' script_folder='/home/dpc-marche/library/hat-master/' diff --git a/bin/server/hat_datamanager_hmc_nrt_realtime.sh b/bin/server/hat_datamanager_hmc_nrt_realtime.sh index 5f4648f..5d4790d 100755 --- a/bin/server/hat_datamanager_hmc_nrt_realtime.sh +++ b/bin/server/hat_datamanager_hmc_nrt_realtime.sh @@ -13,7 +13,7 @@ fp_env_libs='fp_env_python' #----------------------------------------------------------------------------------------- # Get file information -script_file='/home/dpc-marche/library/hat-master/apps/HAT_DataManager_HMC_NRT.py' +script_file='/home/dpc-marche/library/hat-master/apps/HAT_RunOrganizer_HMC_NRT.py' setting_file='/home/dpc-marche/library/hat-master/apps/hat_datamanager_hmc_nrt_configuration_server_realtime.json' script_folder='/home/dpc-marche/library/hat-master/' diff --git a/bin/server/hat_datapublisher_hmc_nrt_history.sh b/bin/server/hat_datapublisher_hmc_nrt_history.sh index b7b6129..5bf80bb 100755 --- a/bin/server/hat_datapublisher_hmc_nrt_history.sh +++ b/bin/server/hat_datapublisher_hmc_nrt_history.sh @@ -2,7 +2,7 @@ #----------------------------------------------------------------------------------------- # Script information -script_name='HAT - DataPublisher HMC NRT - HISTORY' +script_name='HAT - Analyzer_Datasets HMC NRT - HISTORY' script_version="1.5.0" script_date='2019/03/04' @@ -13,7 +13,7 @@ fp_env_libs='fp_env_python' #----------------------------------------------------------------------------------------- # Get file information -script_file='/home/dpc-marche/library/hat-master/apps/HAT_DataPublisher_HMC_NRT.py' +script_file='/home/dpc-marche/library/hat-master/apps/HAT_RunPublisher_HMC_NRT.py' setting_file='/home/dpc-marche/library/hat-master/apps/hat_datapublisher_hmc_nrt_configuration_server_history.json' script_folder='/home/dpc-marche/library/hat-master/' diff --git a/bin/server/hat_datapublisher_hmc_nrt_realtime.sh b/bin/server/hat_datapublisher_hmc_nrt_realtime.sh index a72cab0..35a01a0 100755 --- a/bin/server/hat_datapublisher_hmc_nrt_realtime.sh +++ b/bin/server/hat_datapublisher_hmc_nrt_realtime.sh @@ -2,7 +2,7 @@ #----------------------------------------------------------------------------------------- # Script information -script_name='HAT - DataPublisher HMC NRT - REALTIME' +script_name='HAT - Analyzer_Datasets HMC NRT - REALTIME' script_version="1.5.0" script_date='2019/03/04' @@ -13,7 +13,7 @@ fp_env_libs='fp_env_python' #----------------------------------------------------------------------------------------- # Get file information -script_file='/home/dpc-marche/library/hat-master/apps/HAT_DataPublisher_HMC_NRT.py' +script_file='/home/dpc-marche/library/hat-master/apps/HAT_RunPublisher_HMC_NRT.py' setting_file='/home/dpc-marche/library/hat-master/apps/hat_datapublisher_hmc_nrt_configuration_server_realtime.json' script_folder='/home/dpc-marche/library/hat-master/' diff --git a/bin/server/hat_datarunner_hmc_nrt_history.sh b/bin/server/hat_datarunner_hmc_nrt_history.sh index 8817c29..82ee89e 100755 --- a/bin/server/hat_datarunner_hmc_nrt_history.sh +++ b/bin/server/hat_datarunner_hmc_nrt_history.sh @@ -13,9 +13,9 @@ fp_env_libs='fp_env_python' #----------------------------------------------------------------------------------------- # Get file information -script_file_manager='/home/dpc-marche/library/hat-master/apps/HAT_DataManager_HMC_NRT.py' +script_file_manager='/home/dpc-marche/library/hat-master/apps/HAT_RunOrganizer_HMC_NRT.py' setting_file_manager='/home/dpc-marche/library/hat-master/apps/hat_datamanager_hmc_nrt_configuration_server_history.json' -script_file_publisher='/home/dpc-marche/library/hat-master/apps/HAT_DataPublisher_HMC_NRT.py' +script_file_publisher='/home/dpc-marche/library/hat-master/apps/HAT_RunPublisher_HMC_NRT.py' setting_file_publisher='/home/dpc-marche/library/hat-master/apps/hat_datapublisher_hmc_nrt_configuration_server_history.json' script_folder='/home/dpc-marche/library/hat-master/' diff --git a/bin/server/hat_datarunner_hmc_nrt_realtime.sh b/bin/server/hat_datarunner_hmc_nrt_realtime.sh index c70c227..0e56c80 100755 --- a/bin/server/hat_datarunner_hmc_nrt_realtime.sh +++ b/bin/server/hat_datarunner_hmc_nrt_realtime.sh @@ -13,9 +13,9 @@ fp_env_libs='fp_env_python' #----------------------------------------------------------------------------------------- # Get file information -script_file_manager='/home/dpc-marche/library/hat-master/apps/HAT_DataManager_HMC_NRT.py' +script_file_manager='/home/dpc-marche/library/hat-master/apps/HAT_RunOrganizer_HMC_NRT.py' setting_file_manager='/home/dpc-marche/library/hat-master/apps/hat_datamanager_hmc_nrt_configuration_server_realtime.json' -script_file_publisher='/home/dpc-marche/library/hat-master/apps/HAT_DataPublisher_HMC_NRT.py' +script_file_publisher='/home/dpc-marche/library/hat-master/apps/HAT_RunPublisher_HMC_NRT.py' setting_file_publisher='/home/dpc-marche/library/hat-master/apps/hat_datapublisher_hmc_nrt_configuration_server_realtime.json' script_folder='/home/dpc-marche/library/hat-master/'
Run Configuration
Regione Marche
StateTimeStartTimeEndScenariosSections OGGI
gialla
OGGI
gialla
DOMANI
gialla
DOMANI
gialla
OGGI
rossa
OGGI
rossa
DOMANI
rossa
DOMANI
rossa
' + run_description + ' COMPLETED ' + str(run_time_mod_first) + ' ' + time_mode + ' ' + str(run_time_mod_last) + ' ' + time_mode + ' ' + run_scenario_n + '' + run_section_n_found + '' + section_n_alert + '/' + run_section_n_found) - html_handle.write('' + section_name + '--' + section_n_alert + '/' + run_section_n_found) - html_handle.write('' + section_name + '--' + section_n_alarm + 'di' + run_section_n_found) - html_handle.write('' + section_name + '--' + section_n_alarm + 'di' + run_section_n_found) - html_handle.write('' + section_name + '--' + run_description + ' COMPLETED ' + str(run_time_mod_first) + ' ' + time_mode + ' ' + str(run_time_mod_last) + ' ' + time_mode + ' ' + run_scenario_n + '' + run_section_n_found + '--------' + run_description + ' RUNNING ... ' + str(run_time_mod_first) + ' ' + time_mode + ' NA ' + run_scenario_n + ' NA --------' + run_description + ' RUN NOT AVAILABLE NA NA NA NA --------