# -------------------------------- # subroutine report_builder_export # -------------------------------- # initiated from report_builder include "templates.statistics.report_elements.util.table.get_sort_type"; include "templates.statistics.report_elements.util.set_filter_expression"; include "templates.shared.util.convert_to_valid_node_name"; include "templates.shared.util.check_webserver_mode"; include "templates.shared.util.get_loganalysisinfo_dir"; include "templates.statistics.report_elements.util.table.aggr_method_is_average"; include "templates.statistics.report_elements.util.table.aggr_method_is_sum"; subroutine(report_builder_export, ( # export_to_file is true if invoked from the user_agent (result is saved in a file) # export to_file is false if invoked from command_line (result is displayed in output stream) bool export_to_file = false; if (internal.task_originator eq "user_agent") then ( export_to_file = true; ); string profile_name = internal.profile_name; string profile_path = "profiles." . profile_name; string report_name = internal.report.report_name; node report_node = profile_path . ".statistics.reports." . report_name; string report_label = node_value(subnode_by_name(report_node, "label")); debug_message("EXPORT report name: " . report_name . "\n"); string report_element = internal.export.report_element; # if no report_element is defined then export the first report_element if (report_element eq "") then ( report_element = node_name(subnode_by_number(report_node . ".report_elements", 0)); ); debug_message("EXPORT report element: " . report_element . "\n"); node report_element_node = report_node . ".report_elements." . report_element; string report_element_type = node_value(subnode_by_name(report_element_node, "type")); debug_message("report element type: " . report_element_type . "\n\n"); # get report element label string report_element_label; if (subnode_exists(report_element_node, "label")) then ( report_element_label = node_value(subnode_by_name(report_element_node, "label")); ) else ( report_element_label = report_label; ); # -------------------- # set export_total_row # -------------------- bool export_total_row = false; if (node_exists("internal.export.total_row")) then ( export_total_row = node_value("internal.export.total_row"); ); # ----------------------------------------------------------------------------------------------------- # SET PROGRESS FOR REPORT ELEMENT STEP (volatile.report_element_number = 0, +1 for each report element) # ----------------------------------------------------------------------------------------------------- # Required for C++ progress handling only! volatile.report_element_number = 0; # ---------- # build html # ---------- if (export_to_file) then ( templates.shared.doctype; '\n'; '\n'; '' . lang_stats.export.label . '\n'; expand(templates.shared.content_type); '\n'; '\n'; '\n'; '\n'; '\n'; '\n'; '
\n'; '
\n'; '\n'; '\n'; '\n'; '\n'; '
\n'; '
\n'; ); # if export_to_file # ---------- # Set filter # ---------- set_filter_expression(report_name, report_element); # ----------------------------------------- # Query the database for the report-element # ----------------------------------------- if (node_exists("volatile.query_result")) then ( delete_node("volatile.query_result"); ); # debug_message("report element node: " . report_element_node . "\n"); query_db_for_view(report_element_node, "volatile.query_result"); string query_header = ("volatile.query_result.header"); string query_data = ("volatile.query_result.data"); # debug_message("export query header: \n" . node_as_string(query_header) . "\n\n"); # debug_message("export query data: \n" . node_as_string(query_data) . "\n\n"); # ================================= # Handle aggregation_method average # and total row sum setup # ================================= # If there is a column where aggregation_method is average then calculate # the average now so that the table becomes properly sorted. # We add the sum for the total row in the query_header # be adding a sum node node column; string field_name; string data_type; bool aggregation_method_is_sum; bool aggregation_method_is_average; node column_sum_node; int count = 0; bool handle_aggregation = false; string database_fields_path = "profiles." . profile_name . ".database.fields"; if (node_exists("volatile.aggregation_columns_header")) then ( delete_node("volatile.aggregation_columns_header"); ); foreach column query_header ( data_type = node_value(subnode_by_name(column, "data_type")); if (data_type ne "string") then ( field_name = node_value(subnode_by_name(column, "field_name")); # ---------------------------- # handle sum (total row setup) # ---------------------------- if (export_total_row) then ( aggregation_method_is_sum = aggr_method_is_sum(field_name, profile_name); if (aggregation_method_is_sum and (data_type ne "unique")) then ( # prepare node to calculate the sum value # in case of missing or invalid data type define float if (data_type ne "int" and data_type ne "float") then ( data_type = "float"; ); column . ".sum" = 0; column_sum_node = column . ".sum"; set_node_type(column_sum_node, data_type); set_node_value(column_sum_node, 0); ); ); # --------------------------------- # handle aggregation method average # --------------------------------- aggregation_method_is_average = aggr_method_is_average(field_name, profile_name); if (aggregation_method_is_average) then ( handle_aggregation = true; "volatile.aggregation_columns_header." . count . ".field_name" = field_name; "volatile.aggregation_columns_header." . count . ".average_denominator_field" = node_value(database_fields_path . "." . field_name . ".average_denominator_field"); count++; ); ); ); # debug_message("export query header after sum setup: \n" . node_as_string(query_header) . "\n\n"); if (handle_aggregation) then ( # ------------------------------------ # Set aggreagtion values in query_data # ------------------------------------ debug_message("\n\n" . node_as_string("volatile.aggregation_columns_header") . "\n\n"); node item; node column; string field_name; string denominator_field; float field_value; float denominator_value; float average_value; foreach item query_data ( foreach column "volatile.aggregation_columns_header" ( field_name = node_value(subnode_by_name(column, "field_name")); denominator_field = node_value(subnode_by_name(column, "average_denominator_field")); field_value = node_value(subnode_by_name(item, field_name)); denominator_value = node_value(subnode_by_name(item, denominator_field)); average_value = field_value / denominator_value; # debug_message("calculate aggregation --> $field_value / $denominator_value = $average_value \n"); set_node_value(subnode_by_name(item, field_name), average_value); ); ); ); # ----------------------------------------------- # Sort the query as in the report element display # ----------------------------------------------- string sort_field = ""; string sort_direction = ""; string sort_type; string sort_method; if (node_exists(report_element_node . ".sort_by") and (node_value(report_element_node . ".sort_by") ne "")) then ( sort_field = node_value(report_element_node . ".sort_by"); sort_direction = (if node_exists(report_element_node . ".sort_direction") then $(report_element_node . ".sort_direction") else "descending"); sort_type = get_sort_type(profile_name, sort_field); sort_method = "field:" . sort_field . "," . sort_type . "," . sort_direction; sort(query_data, sort_method); ); # -------------- # number of rows # -------------- # Note, due the huge amount a log_detail report doesn't show all records in the query_data node # The number of rows in log data depends of the current set ending_row, if set to 50, the 50 rows # will be in the query data. int num_total_rows = num_subnodes(query_data); int num_export_rows = num_total_rows; if (node_exists("internal.command_line.ending_row") and (node_value("internal.command_line.ending_row") ne "")) then ( int i = node_value("internal.command_line.ending_row"); if (i <= num_total_rows) then ( num_export_rows = i; ); ); # debug_message("EXPORT number of rows query_data: " . num_total_rows); # ------------- # csv_delimiter # ------------- string csv_delimiter = ","; if (node_exists(profile_path . ".statistics.miscellaneous.csv_delimiter")) then ( string dat = node_value(profile_path . ".statistics.miscellaneous.csv_delimiter"); if (dat ne "") then ( csv_delimiter = dat; ); ); int csv_delimiter_int = char_to_ascii(csv_delimiter); string csv_delimiter_in_hex = format(csv_delimiter_int, '%X'); csv_delimiter_in_hex = "%" . csv_delimiter_in_hex; debug_message("#### CSV delimiter: " . csv_delimiter . "\n"); debug_message("#### CSV delimiter in hex: " . csv_delimiter_in_hex . "\n"); # ------------------ # end_of_line_marker # ------------------ string end_of_line_marker; if (node_exists("internal.export.end_of_line") and (node_value("internal.export.end_of_line") ne "")) then ( end_of_line_marker = node_value("internal.export.end_of_line"); ) else ( if (_PLATFORM eq "Win32") then ( end_of_line_marker = "\r\n"; ) else ( end_of_line_marker = "\n"; ); ); # --- # CSV # --- node row; node column; string main_field_node_name; string column_label; string column_type; string column_field_name; string column_field_type; string column_display_format_type; string column_value; node column_sum_node; int last_column_number = num_subnodes(query_header) - 1; int column_number; bool contains_delimiter; bool contains_double_quote; string csv_contents = ""; # ---------- # CSV header # ---------- column_number = 0; foreach column query_header ( column_label = node_value(subnode_by_name(column, "header_label")); column_label = capitalize(expand(column_label)); if (column_number != last_column_number) then ( csv_contents .= column_label . csv_delimiter; ) else ( csv_contents .= column_label . end_of_line_marker; ); column_number++; ); # foreach column query_header # -------- # CSV data # -------- # If we don't need to show/calculate the totals row set num_total_rows to num_export_rows if (!export_total_row and (num_export_rows != num_total_rows)) then ( num_total_rows = num_export_rows; ); for (int row_number = 1; row_number <= num_total_rows; row_number++) ( # Get the query row node for this table row row = subnode_by_number(query_data, row_number - 1); column_number = 0; foreach column query_header ( column_type = node_value(subnode_by_name(column, "type")); column_field_name = node_value(subnode_by_name(column, "field_name")); column_field_type = node_value(subnode_by_name(column, "data_type")); column_value = node_value(subnode_by_name(row, column_field_name)); if (row_number <= num_export_rows) then ( # add data to csv # ------------------------ # Decode __HexEsc__ values # ------------------------ if (contains(column_value, "__HexEsc__")) then ( column_value = replace_all(column_value, "__HexEsc__", "%"); ); # ---------------------------------- # Escape delimiter and double quotes # ---------------------------------- # debug_message("#### #### export value before escape: " . column_value . "\n"); contains_delimiter = contains(column_value, csv_delimiter_in_hex); contains_double_quote = contains(column_value, "%22"); if (contains_delimiter or contains_double_quote) then ( # enclose column_value with double quotes! column_value = '"' . column_value . '"'; # debug_message("#### #### escaped export value: " . column_value . "\n"); ); # ---------------- # Write csv string # ---------------- csv_contents .= column_value; if (column_number != last_column_number) then ( csv_contents .= csv_delimiter; ) else ( csv_contents .= end_of_line_marker; ); ); # row_number <= num_export_rows if (export_total_row) then ( # ----------------- # add values to sum # ----------------- if (subnode_exists(column, "sum")) then ( column_sum_node = subnode_by_name(column, "sum"); set_node_value(column_sum_node, node_value(column_sum_node) + column_value); ); ); column_number++; ); # foreach column query_header ); # for rows # debug_message("export query header after sum calculation: \n" . node_as_string(query_header) . "\n\n"); # ------------- # Add total row # ------------- if (export_total_row) then ( int column_number = 0; string total_row = lang_stats.table.total_label . csv_delimiter; foreach column query_header ( if (column_number > 0) then ( if (subnode_exists(column, "sum")) then ( total_row .= node_value(subnode_by_name(column, "sum")) . csv_delimiter ; ) else ( total_row .= csv_delimiter; ); ); column_number++; ); # add end of line total_row = replace_last(total_row, csv_delimiter, end_of_line_marker); csv_contents .= total_row; ); csv_contents = convert_escapes(csv_contents); # If charset conversion is enabled, do the conversion now if (node_exists("profiles." . profile_name . ".output.convert_export_charset") and node_value("profiles." . profile_name . ".output.convert_export_charset")) then ( debug_message("Converted CSV export from " . output.convert_export_from_charset . " to " . output.convert_export_to_charset . "\n"); csv_contents = convert_charset(csv_contents, output.convert_export_from_charset, output.convert_export_to_charset); debug_message("Done converting CSV export\n"); ); if (export_to_file) then ( # via Web GUI # ---------------------- # Get CSV directory path # ---------------------- string divider = "/"; string user_node_name = "none"; bool webserver_mode = check_webserver_mode(); if (node_exists("volatile.authenticated_user_node_path")) then ( user_node_name = node_name(volatile.authenticated_user_node_path); ); string csv_directory_path; string csv_sub_directory_path = "csv_export" . divider . user_node_name . divider . profile_name; if (webserver_mode) then ( string log_analysis_info_directory = get_loganalysisinfo_dir(); debug_message("#### log_analysis_info_directory: " . log_analysis_info_directory . "\n"); csv_directory_path = log_analysis_info_directory . "WebServerRoot" . divider . csv_sub_directory_path; ) else ( # cgi-mode csv_directory_path = node_value("preferences.server.temporary_directory_pathname"); if (!ends_with(csv_directory_path, divider)) then ( csv_directory_path .= divider; ); csv_directory_path .= csv_sub_directory_path; ); debug_message("#### csv_directory_path: " . csv_directory_path . "\n"); # ----------------- # Get csv_file_name # ----------------- # Make file names maximal 32 characters string report_part_of_file_name; string csv_filename; # int date_time_stamp_in_sec = now(); # debug_message("#### date_time_stamp_in_sec: " . date_time_stamp_in_sec . "\n"); if (report_name eq report_element) then ( # csv_filename = report_element . "_" . date_time_stamp_in_sec . ".csv"; report_part_of_file_name = report_element; ) else ( # csv_filename = report_name . "_" . report_element . "_" . date_time_stamp_in_sec . ".csv"; report_part_of_file_name = report_name . "_" . report_element; ); if (length(report_part_of_file_name) > 24) then ( report_part_of_file_name = substr(report_part_of_file_name, 0, 24); ); string csv_directory_path_with_filename; int count = 1; string file_number; bool is_unique_file_name = false; while (!is_unique_file_name) ( if (count > 99) then ( file_number = count; ) else if (count > 9) then ( file_number = "0" . count; ) else ( file_number = "00" . count; ); csv_filename = report_part_of_file_name . "_" . file_number . ".csv"; csv_directory_path_with_filename = csv_directory_path . "/" . csv_filename; if (!file_exists(csv_directory_path_with_filename)) then ( is_unique_file_name = true; ); count++; ); # debug_message("#### csv_filename: " . csv_filename . "\n"); # ---------- # Write file # ---------- write_file(csv_directory_path_with_filename, csv_contents); # ----------------- # Set download link # ----------------- string csv_export_path = "csv_export/" . user_node_name . "/" . profile_name . "/" . csv_filename; string download_href; if (webserver_mode) then ( # Note, this includes cgi-mode in case that # preferences.server.temporary_directory_url and # preferences.server.temporary_directory_pathname is empty! # This is the new cgi-mode! download_href = 'href=' . fileref(csv_export_path); ) else ( # cgi-mode (old, see above for new cgi-mode) string temporary_directory_url = node_value("preferences.server.temporary_directory_url"); if (!ends_with(temporary_directory_url, "/")) then ( temporary_directory_url .= "/"; ); download_href = 'href="' . temporary_directory_url . csv_export_path . '"'; ); debug_message("#### download_href: " . download_href . "\n"); '
\n'; '

' . lang_stats.export.data_exported_info . '

\n'; '

' . lang_stats.export.download_button . '

\n'; '
\n'; '
\n'; '\n'; '\n'; ) # if export_to_file via Web GUI else ( # -------------------------- # Handle command line export # -------------------------- string output_filename = node_value("internal.export.output_filename"); if (output_filename eq "") then ( # --------------------- # Display output_stream # --------------------- # Save CSV data in a node, the value of the node becomes the output_stream in report_setup.cvf string csv_data_node_name = node_value("internal.export.csv_data_node_name"); ("IPC." . csv_data_node_name) = csv_contents; save_node("IPC." . csv_data_node_name); ) else ( # ----------------------------- # Write csv data to output file # ----------------------------- write_file(output_filename, csv_contents); ); ); ));