# # # build_raw_chrono_graphs() # # Builds a chrono raw graph node. # Note, each report field becomes its own raw graph node, the subroutine builds all # graph fields of the active base query header at once. # # include "templates.statistics.util.get_table_exists"; include "templates.util.date_time.date_time"; include "templates.statistics.raw_graphs.raw_graphs_table_util"; include "templates.statistics.raw_graphs.raw_graphs_utilities"; include "templates.statistics.raw_graphs.build_chrono_data"; include "templates.statistics.raw_graphs.build_special_chrono_data"; subroutine(build_raw_chrono_graphs( string session_id, string profile_name, node query_fields, node report_job, node report_element, string report_element_type, node base_query_header, node used_database_fields, string alpha_root_table_id, string root_table_id, string combined_filter_expression, bool is_report_field_expression, bool is_table_filter_expression, bool is_sort_by_before_expression_evaluation, string sort_by_before_expression_evaluation, string sort_direction_before_expression_evaluation, bool use_overview_for_totals, string main_field_category, string graph_type, int y_height, int x_length, bool show_percent_on_y_axis, bool sort_all_descending, int first_weekday, int number_of_days, node progress_substep_info, node collected_whitelist_objects), ( debug_message("\n\n#### build_raw_chrono_graphs() START \n\n"); # debug_message("\n\n" . node_as_string(base_query_header) . "\n\n"); # debug_message("#### the report element:\n" . node_as_string(report_element) . "\n\n"); # ToDo - this subroutine could be replaced with get_raw_graph_table_info() subroutine(get_sorted_table_id_and_table_in_build_raw_chrono_graphs( string profile_name, string root_table_id, string main_field_report_field_name, string main_field_table_field_name, node collected_whitelist_objects, int first_weekday), ( string sorted_table_id = get_sorted_table_id(root_table_id, main_field_report_field_name, "ascending", first_weekday); if (!get_table_exists(profile_name, sorted_table_id, collected_whitelist_objects)) then ( string qs = "CREATE TABLE " . sorted_table_id . " "; qs .= "SELECT * "; qs .= "FROM " . root_table_id . " "; qs .= "ORDER BY " . main_field_table_field_name; ssql_query(qs); # Add new table to collected_whitelist_objects @collected_whitelist_objects{"tables"}{sorted_table_id} = true; ); # Return sorted_table_id sorted_table_id; )); # Get table info, respectively a root table, for the total rows. # Note, we don't use xref tables for chronological graphs # because we require all rows of the table and it is yet # not clear how this will work with xref tables. bool is_xref_table = false; string xref_source_table_id = ""; node raw_graph_table_info = get_raw_graph_table_info( session_id, profile_name, query_fields, report_element_type, base_query_header, used_database_fields, is_report_field_expression, is_table_filter_expression, is_sort_by_before_expression_evaluation, sort_by_before_expression_evaluation, sort_direction_before_expression_evaluation, use_overview_for_totals, root_table_id, combined_filter_expression, "", # sort_by "", # sort_direction, 0, # max_variables, first_weekday, number_of_days, true, # allow_xref_table true, # get_total_rows progress_substep_info, collected_whitelist_objects); int total_rows = @raw_graph_table_info{"total_rows"}; if (total_rows > 0) then ( string database_last_modification_time = @(report_job . ".report_state.database.last_modification_time"); # int first_weekday = @report_job{"first_weekday"}; # # # Set total in query header # # set_sum_and_total_in_query_header( session_id, profile_name, query_fields, database_last_modification_time, alpha_root_table_id, root_table_id, is_xref_table, xref_source_table_id, base_query_header, combined_filter_expression, is_table_filter_expression, use_overview_for_totals, number_of_days, collected_whitelist_objects ); # # # # Get the main field data (the single non-aggregating date_time field in the base_query_header) # # # node main_field_info = get_raw_graph_chrono_main_field_info(base_query_header); string main_field_report_field_name = @main_field_info{"report_field_name"}; string main_field_report_field_type = @main_field_info{"type"}; string main_field_table_field_name = @main_field_info{"table_field_name"}; # string main_field_display_format_type = @main_field_info{"display_format_type"}; int main_field_ssql_column_number = @main_field_info{"ssql_column_number"}; # debug_message("\n" . node_as_string(main_field_info) . "\n"); # # # # # # Get chrono_start, chrono_end, the chrono_type and the working_table # chrono_type is: year | month | day | hour | minute | second | day_of_week | hour_of_day # # If main field category is date_time then chrono_start and chrono_end # is a Salang date, i.e.: i.e. 12/Feb/2007 __:__:__. In this case we get # chrono_start and chrono_end from the date_filter_info. # # if main field category is day_of_week or hour_of_day then chrono_start and chrono_end # is an integer and we have to get chrono_start and chrono_end from a sorted table. # # string chrono_start; string chrono_end; string chrono_type; string sorted_table_id; table working_table; if (main_field_category eq "date_time") then ( # # Load the working_table # working_table = load_table(root_table_id); # # Get chrono_type # string date_time_sample_value = table_get_cell_string_value(working_table, 0, main_field_ssql_column_number); debug_message("#### date_time_sample_value: " . date_time_sample_value . "\n"); chrono_type = get_date_time_granularity(date_time_sample_value); debug_message("#### chrono_type: " . chrono_type . "\n"); # # # Get date filter info # # node report_job_date_filter = report_job{"date_filter"}; node date_filter_info; string start_date; string end_date; # KHP 11/July/2012 - is_report_element_date_filter=true doesn't mean that every report element # has a date filter, make sure that the active report element actually has a date filter! if (@report_job_date_filter{"is_report_element_date_filter"} and @report_element{"is_date_filter"}) then ( # debug_message("#### Using date_filter_info from report_element. \n"); date_filter_info = report_element{"date_filter_info"}; ) else if (@report_job_date_filter{"is_report_date_filter"}) then ( # debug_message("#### Using date_filter_info from from report \n"); date_filter_info = report_job_date_filter{"report_date_filter_info"}; ) else ( # debug_message("#### Using date_filter_info from global_date_filter_info \n"); date_filter_info = report_job_date_filter{"global_date_filter_info"}; ); debug_message("\ndate_filter_info 10:\n" . node_as_string(date_filter_info) . "\n"); # # Get chrono_start and chrono_end # # We use chrono_start and chrono_end because start_date and end_date # may not match the required format for date_time_to_epoc() # # If there is no valid date filter we use the database date_time values! if (@date_filter_info{"is_valid_date_filter_syntax"} and !@date_filter_info{"is_out_of_range"}) then ( debug_message("#### Get start_date and end_date from date_filter_info \n"); # There is a valid date, get start_date and end_date bool is_relative_date_filter = @date_filter_info{"is_relative_date_filter"}; string date_type = @date_filter_info{"date_type"}; bool sync_graph_axis_with_relative_date = @(report_job . ".report_state.sync_graph_axis_with_relative_date"); debug_message("#### is_relative_date_filter: " . is_relative_date_filter . "\n"); debug_message("#### date_type: " . date_type . "\n"); debug_message("#### sync_graph_axis_with_relative_date: " . sync_graph_axis_with_relative_date . "\n"); if (is_relative_date_filter and sync_graph_axis_with_relative_date) then ( # Relative date filter # We use the date range of the relative date for the graphs x axis, regardless of the actual date range. # This is useful when comparing graphs in a report with multiple report elements where i.e. each report # element shows a different quarter. start_date = @date_filter_info{"earliest_relative_date"}; end_date = @date_filter_info{"latest_relative_date"}; ) else ( if (chrono_type eq "year" or chrono_type eq "month" or chrono_type eq "day") then ( # # # Year, month and day handling # # if (date_type ne "single_date") then ( # # Date range # start_date = @date_filter_info{"earliest_date"}; end_date = @date_filter_info{"latest_date"}; ) else ( # # Single date # start_date = @date_filter_info{"single_date"}; end_date = start_date; ); ) else ( # # # Hour, minute or second handling (for date_range and single_date!) # # # The date filter earliest_date and latest_date do not contain any # hour, minute or second value. So we have to get the start_date and end_date # from the ssql table. # # Unload the current working table because we will load the sorted table! # unload_table(working_table); # # # Get sorted_table_id and table if it doesn't yet exist # # sorted_table_id = get_sorted_table_id_and_table_in_build_raw_chrono_graphs( profile_name, root_table_id, main_field_report_field_name, main_field_table_field_name, collected_whitelist_objects, first_weekday ); # # Load the sorted table as working_table # working_table = load_table(sorted_table_id); # # Get start_date and end_date from ssql table # start_date = table_get_cell_string_value(working_table, 0, main_field_ssql_column_number); end_date = table_get_cell_string_value(working_table, (total_rows - 1), main_field_ssql_column_number); ); ); ) else ( # Invalid date in date_filter, use database dates debug_message("#### Invalid date in date_filter - get start_date and end_date from database values \n"); start_date = @(report_job . ".report_state.database.earliest_log_date"); end_date = @(report_job . ".report_state.database.latest_log_date"); ); debug_message("#### start_date: " . start_date . "\n"); debug_message("#### end_date: " . end_date . "\n"); chrono_start = get_raw_graph_chrono_start_or_end_value(start_date, chrono_type); chrono_end = get_raw_graph_chrono_start_or_end_value(end_date, chrono_type); ) else ( # If day_of_week or hour_of_day chrono_type = main_field_category; sorted_table_id = get_sorted_table_id_and_table_in_build_raw_chrono_graphs( profile_name, root_table_id, main_field_report_field_name, main_field_table_field_name, collected_whitelist_objects, first_weekday ); # # Load the sorted working_table # working_table = load_table(sorted_table_id); # # Get chrono_start and chrono_end from ssql table # chrono_start = table_get_cell_value(working_table, 0, main_field_ssql_column_number); chrono_end = table_get_cell_value(working_table, (total_rows - 1), main_field_ssql_column_number); ); debug_message("#### chrono_start: " . chrono_start . "\n"); debug_message("#### chrono_end: " . chrono_end . "\n"); debug_message("#### chrono_type: " . chrono_type . "\n"); # # # Get number_of_variables_in_chrono_range # # int number_of_variables_in_chrono_range = get_raw_graph_number_of_variables_in_chrono_range(chrono_start, chrono_end, chrono_type); debug_message("#### number_of_variables_in_chrono_range: " . number_of_variables_in_chrono_range . "\n"); # # # Set is_special_chrono_graph # # Note, if we have hours, minutes or seconds where the number_of_variables_in_chrono_range is # greater than the x_length (we assume 1 pixel bars), then we use a special time graph method # method. # # bool is_special_chrono_graph = false; if ((number_of_variables_in_chrono_range > x_length) and ( chrono_type eq "hour" or chrono_type eq "minute" or chrono_type eq "second")) then ( is_special_chrono_graph = true; ); debug_message("#### is_special_chrono_graph: " . is_special_chrono_graph . "\n"); # # # # # # Create the chronological query data # # # # # # # All relevant chrono data are created in subroutines and written # to chrono_query_data and every_1st_day_on_graph_lookup # # KHP 2/Dec/2009 - chrono_query_data are populated in subroutines and # then re-used here, so we can't just use new_node() but must # keep the global variable style. # node chrono_query_data = new_node(); # node every_1st_day_on_graph_lookup = new_node(); delete_node("v.temp_chrono_query_data"); v.temp_chrono_query_data = ""; node chrono_query_data = "v.temp_chrono_query_data"; delete_node("v.temp_every_1st_day_on_graph_lookup"); v.temp_every_1st_day_on_graph_lookup = ""; node every_1st_day_on_graph_lookup = "v.temp_every_1st_day_on_graph_lookup"; node chrono_graph_query_header = get_raw_graph_chrono_graph_query_header(base_query_header); int number_of_variables_in_graph; if (!is_special_chrono_graph) then ( build_chrono_data( chrono_graph_query_header, working_table, total_rows, main_field_report_field_name, main_field_report_field_type, main_field_ssql_column_number, chrono_start, chrono_end, chrono_type, number_of_variables_in_chrono_range, x_length, chrono_query_data, every_1st_day_on_graph_lookup ); number_of_variables_in_graph = num_subnodes(chrono_query_data); ) else ( # We use the full range of available pixels on the graph! number_of_variables_in_graph = x_length; int chrono_start_in_epoc = date_time_to_epoc(chrono_start); int chrono_end_in_epoc = date_time_to_epoc(chrono_end); int chrono_time_span_in_seconds = chrono_end_in_epoc - chrono_start_in_epoc; int seconds_per_variable = chrono_time_span_in_seconds / x_length; if ((chrono_time_span_in_seconds % x_length) != 0) then ( seconds_per_variable = seconds_per_variable + 1; ); number_of_variables_in_graph = chrono_time_span_in_seconds / seconds_per_variable; if ((chrono_time_span_in_seconds % seconds_per_variable) != 0) then ( number_of_variables_in_graph = number_of_variables_in_graph + 1; ); # Fix the x_length in case that it is too short if (number_of_variables_in_graph > x_length) then ( x_length = number_of_variables_in_graph; ); build_special_chrono_data( chrono_graph_query_header, working_table, total_rows, main_field_report_field_name, main_field_report_field_type, main_field_ssql_column_number, chrono_start_in_epoc, chrono_end_in_epoc, chrono_type, number_of_variables_in_graph, seconds_per_variable, x_length, chrono_query_data ); ); # # # Unload the working table # (All values have been set in the chrono_query_data node) # unload_table(working_table); debug_message("#### sort_all_descending: " . sort_all_descending . "\n"); debug_message("#### final chrono_query_data in ASCENDING ORDER:\n" . node_as_string(chrono_query_data) . "\n"); # # # # Sort the chronological_query_data # # # if (sort_all_descending) then ( string sort_method = "field:" . main_field_report_field_name . ",chronological,descending"; sort(chrono_query_data, sort_method); # debug_message("#### final chrono_query_data in DESCENDING ORDER:\n" . node_as_string(chrono_query_data) . "\n"); ); # # # # Create the x_data_info. # x data are identical for all graph fields in the base query header, # so we create it once and then clone it to each raw graph. # # # node x_data_info = get_raw_graph_chrono_x_data_info( chrono_query_data, is_special_chrono_graph, main_field_report_field_name, main_field_category, chrono_type, x_length, number_of_variables_in_graph, number_of_variables_in_chrono_range, every_1st_day_on_graph_lookup ); # PROGRESS_SETTER increment_progress_substep(progress_substep_info); # # # # # # # Start creating the raw_graphs # # # # # # node column; string graph_id; string graph_label; string report_field_name; string display_format_type; # float total_sum; float _100_divided_by_total; node raw_graph; node raw_variables; int raw_variable_count; node chrono_query_data_item; float variable_value; float max_variable_value; node y_data; debug_message("\n" . node_as_string(chrono_graph_query_header) . "\n"); foreach column chrono_graph_query_header ( graph_id = @column{"graph_id"}; graph_label = @column{"column_label"}; report_field_name = @column{"report_field_name"}; display_format_type = @column{"display_format_type"}; # total_sum = @column{"sum_value"}; _100_divided_by_total = @column{"_100_divided_by_total"}; # debug_message("\n"); # debug_message("#### build_raw_chrono_graphs() - report_field_name: " . report_field_name . "\n"); # debug_message("#### build_raw_chrono_graphs() - _100_divided_by_total: " . _100_divided_by_total . "\n"); # Let's check if the _100_divided_by_total is actually zero or just doesn't display something like "0.00000012" ! # debug_message("#### build_raw_chrono_graphs() - _100_divided_by_total * 1000000 = " . (_100_divided_by_total * 1000000) . "\n"); # debug_message("\n"); # # Write all data to "v.raw_graph" which becomes saved in raw_graphs # delete_node("v.raw_graph"); v.raw_graph = ""; raw_graph = "v.raw_graph"; set_subnode_value(raw_graph, "graph_id", graph_id); set_subnode_value(raw_graph, "graph_label", graph_label); set_subnode_value(raw_graph, "sort_info", ""); # remains empty, no sort info is required in chrono graphs, sort is visible by date time on the x-axis set_subnode_value(raw_graph, "report_field_name", report_field_name); set_subnode_value(raw_graph, "graph_type", graph_type); set_subnode_value(raw_graph, "is_chrono_graph", true); set_subnode_value(raw_graph, "show_percent_on_y_axis", show_percent_on_y_axis); set_subnode_value(raw_graph, "sort_all_descending", sort_all_descending); set_subnode_value(raw_graph, "last_variable_is_remainder", false); set_subnode_value(raw_graph, "number_of_remainder_items", 0); set_subnode_value(raw_graph, "show_legend", false); # # Set default data which are set when the graph image is created # (such as image width and height, which we don't know yet) # set_subnode_value(raw_graph, "image_width", 0); set_subnode_value(raw_graph, "image_height", 0); set_subnode_value(raw_graph, "variables", ""); raw_variables = raw_graph{"variables"}; # # # Set raw variables # # raw_variable_count = 0; max_variable_value = 0.0; foreach chrono_query_data_item chrono_query_data ( variable_value = @chrono_query_data_item{report_field_name}; set_subnode_value(raw_variables, raw_variable_count, variable_value); if (variable_value > max_variable_value) then ( max_variable_value = variable_value; ); raw_variable_count++; ); # # # Set raw y_data # # set_subnode_value(raw_graph, "y_data", ""); y_data = raw_graph{"y_data"}; set_raw_graph_y_data( y_data, y_height, max_variable_value, _100_divided_by_total, display_format_type ); # # # Set raw x_data # # clone_node(x_data_info, raw_graph . ".x_data"); # debug_message("\n\n### CHECK NODE VALUE OF RAW_GRAPH:\n" . node_as_string(raw_graph) . "\n"); # # # Save the raw graph # # clone_node(raw_graph, "profiles_cache." . profile_name . ".raw_graphs." . graph_id); save_node("profiles_cache." . profile_name . ".raw_graphs." . graph_id); # Add raw_graph to profiles_cache whitelist @collected_whitelist_objects{"raw_graphs"}{graph_id} = true; ); delete_node(chrono_query_data); delete_node(every_1st_day_on_graph_lookup); ); # Return total_rows; ));