# Copyright (c) 2010 Flowerfire, Inc. All Rights Reserved. win2_kperfmon = { plugin_version = "3.0.2" plugin_time = "2" # ????-??-?? - ??? - 1.0 - Initial implementation # 2009-05-27 - 2.0 - GMF - Added support for parsing the header line in the plug-in, and support for yyyy-mm-dd date format # 2009-07-10 - 2.0.1 - GMF - Fixed bug with time extraction from dt # 2011-07-25 - 2.0.2 - MSG - Edited info lines. # 2012-05-19 - 3.0 - GMF - Changed to using full field names; changed to computing all numerical fields in filter_initialization. # 2013-06-13 - 3.0.1 - GMF - Changed autodetect expression to look for just PDF-CSV rather than assuming anything about field values # 2013-06-13 - 3.0.2 - GMF - Forced fields to start with letters info.1.manufacturer = "Microsoft" info.1.device = "Windows Performance Monitor" info.1.version.1 = "" # The name of the log format log.format.format_label = "Windows Performance Monitor" log.miscellaneous.log_data_type = "generic" log.miscellaneous.log_format_type = "other" # The log is in this format if any of the first ten lines match this regular expression # log.format.autodetect_regular_expression = "^\"[0-9]+/[0-9]+/[0-9]+ [0-9]+:[0-9]+:[0-9.]+\",\"[0-9.]*\",\"[0-9.]*\",\"[0-9.]*\",\"[0-9]*\",\"[0-9]*\",\"[0-9]*\"," log.format.autodetect_regular_expression = "^\"[(]PDH-CSV 4.0" # Ignore the CSV header even if this is a .csv file log.format.ignore_format_lines = "true" # This handles header lines like this one: # "(PDH-CSV 4.0) (Pacific Standard Time)(480)","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Current Connected Players","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Current Player Allocated Bandwidth (Kbps)","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Current Streaming Players","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Peak Connected Players","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Peak Streaming Players","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Total Player Bytes Sent","\\12.34.56.78\Windows Media Publishing Points(abcdefg)\Total Streaming Players" log.filter_preprocessor = ` if (matches_regular_expression(current_log_line(), '^"[(]')) then ( string fields = current_log_line(); string fieldname; v.logfieldindex = 1; string numerical_fields = "profiles." . internal.profile_name . ".database.numerical_fields"; # This subroutine creates a database field subroutine(create_database_field(string fieldname), ( debug_message("create_database_field(" . fieldname . ")\n"); string databasefieldpath = "profiles." . internal.profile_name . ".database.fields." . fieldname; (databasefieldpath . "") = ""; node databasefield = databasefieldpath; # set_subnode_value(databasefield, "label", fieldname); databasefield; )); subroutine(create_log_field(string fieldname, string type, bool withindex), ( debug_message("create_log_field(" . fieldname . "; type=" . type . ")\n"); string logfieldpath = "profiles." . internal.profile_name . ".log.fields." . fieldname; (logfieldpath . "") = ""; node logfield = logfieldpath; # set_subnode_value(logfield, "label", fieldname); if (withindex) then ( set_subnode_value(logfield, "index", v.logfieldindex); v.logfieldindex++; ); set_subnode_value(logfield, "subindex", 0); if (type ne '') then set_subnode_value(logfield, "type", type); logfield; )); # Extract the fields on at a time while (matches_regular_expression(fields, '^"([^"]+)",(.*)$')) ( string unconverted_fieldname = $1; fields = $2; string field_label = unconverted_fieldname; #echo("unconverted_fieldname: " . unconverted_fieldname); # Clean up the field name; chop off everything before the final slash, and convert to lowercase and underbars. # 2012-05-19 - GMF - We can't chop off the beginning because some fields differ only in their begining. Keep the whole thing. fieldname = unconverted_fieldname; # if (matches_regular_expression(fieldname, '^.*\\\\\\\\([^\\\\]*)$')) then # fieldname = $1; #echo("fieldname1: " . fieldname); # Handle date formats like "(PDH-CSV 4.0) (Pacific Standard Time)(480)" if (matches_regular_expression(fieldname, ".*-CSV .*")) then fieldname = "dt"; #echo("fieldname2: " . fieldname); string replaced_fieldname = ""; for (int i = 0; i < length(fieldname); i++) ( string c = lowercase(substr(fieldname, i, 1)); #echo("c: " . c); if (!matches_regular_expression(c, '^[a-z0-9]$')) then c = '_'; replaced_fieldname .= c; ); fieldname = replaced_fieldname; #echo("fieldname3: " . fieldname); while (matches_regular_expression(fieldname, '^(.*)_$')) fieldname = $1; #echo("fieldname4: " . fieldname); # Database fields should start with letters if (matches_regular_expression(fieldname, '^[^a-z](.*)$')) then fieldname = 's' . fieldname; # Get the log field type string log_field_type = 'flat'; # Create the log field create_log_field(fieldname, log_field_type, true); # If we're creating a profile, create the database fields too. if (node_exists("volatile.creating_profile")) then ( # Handle localtime by creating date_time and derived database fields if (fieldname eq "dt") then ( create_log_field('date', '', false); create_log_field('time', '', false); create_database_field('date_time'); create_database_field('day_of_week'); create_database_field('hour_of_day'); ); # if localtime # All other fields are aggregating database fields else ( node dbfield = "profiles"{internal.profile_name}{"database"}{"fields"}{fieldname}; @dbfield{"label"} = field_label; @dbfield{"type"} = "int"; if (matches_regular_expression(field_label, 'Bytes')) then ( @dbfield{"integer_bits"} = 64; @dbfield{"display_format_type"} = "bandwidth"; ); # All fields are best represented as averages; they're all either "Avg" or "something/sec" or "% something". It doesn't make sense to sum any of those. Max, maybe, but for now, average. @dbfield{"aggregation_operator"} = "average"; @dbfield{"average_denominator_field"} = "events"; #echo("Created aggregating field: " . node_as_string(dbfield)); ); # if aggregating # Don't add a database field for numerical fields # else if (subnode_exists(numerical_fields, fieldname)) then ( # ); # Create a normal database field # else # create_database_field(fieldname); ); # if creating profile ); # while another field # Don't parse the header line as a data line 'reject'; ); # if #Fields ` # Fields are separated by commas log.format.field_separator = "," # The format of dates and times in this log # log.format.date_format = "mm/dd/yyyy hh:mm:ss" # log.format.time_format = "mm/dd/yyyy hh:mm:ss" log.format.date_format = "auto" log.format.time_format = "auto" # Log fields log.fields = { # date_time = "" # bytes_second = "" # bytes_second_2_ = "" # processortime = "" # connections24h = "" # currentconnections = "" # currentdownloads = "" # filesdownloadederror = "" # maximumconnections = "" # serverbandwidth = "" # totalconnections = "" # totaldownloads = "" # totalkbdownloaded = "" # queuelength = "" } # log.fields # Database fields database.fields = { # date_time = "" # day_of_week = "" # hour_of_day = "" # currentconnections = "" # currentdownloads = "" # queuelength = "" } # database.fields database.numerical_fields = { events = { default = true requires_log_field = false entries_field = true } # events # serverbandwidth = { # type = "int" # integer_bits = 64 # display_format_type = "bandwidth" # } # serverbandwidth # # current_connected_players = { # aggregation_method = "average" # average_denominator_field = "events" # } # current_connected_players # # current_player_allocated_bandwidth__kbps = { # aggregation_method = "average" # average_denominator_field = "events" # } # current_player_allocated_bandwidth__kbps # # current_streaming_players = { # aggregation_method = "average" # average_denominator_field = "events" # } # current_streaming_players # # peak_connected_players = { # aggregation_method = "maximum" # average_denominator_field = "events" # } # peak_connected_players # # peak_streaming_players = { # aggregation_method = "maximum" # average_denominator_field = "events" # } # peak_streaming_players # # total_player_bytes_sent = { # type = "int" # integer_bits = 64 # display_format_type = "bandwidth" # } } # database.numerical_fields log.filters = { mark_entry = { label = '$lang_admin.log_filters.mark_entry_label' comment = '$lang_admin.log_filters.mark_entry_comment' value = 'events = 1;' } # mark_entry } # log.filters log.parsing_filters = { date_time = { label = '' comment = '' value = ` # e.g. 04/03/2009 16:03:23.046 if (matches_regular_expression(dt, "^([0-9/]+) ([0-9:]+)")) then ( date = $1; time = $2; ); # Old format? else ( date = substr(dt, 0, 8); time = substr(dt, 9); ); ` } } # log.parsing_filters create_profile_wizard_options = { # How the reports should be grouped in the report menu report_groups = { date_time_group = "" } # report_groups } # create_profile_wizard_options } # win2_kperfmon