Repository: spaghettidba/WorkloadTools Branch: master Commit: 9fab260e2d4e Files: 176 Total size: 904.9 KB Directory structure: gitextract_79qcbrcm/ ├── .config/ │ └── dotnet-tools.json ├── .editorconfig ├── .gitattributes ├── .gitignore ├── ConvertWorkload/ │ ├── App.config │ ├── ConvertWorkload.csproj │ ├── EventReader.cs │ ├── EventWriter.cs │ ├── ExtendedEventsEventReader.cs │ ├── LocalDBManager.cs │ ├── NLog.config │ ├── Program.cs │ ├── Properties/ │ │ └── AssemblyInfo.cs │ ├── SqlTraceEventReader.cs │ ├── WorkloadConverter.cs │ ├── WorkloadFileEventWriter.cs │ └── packages.config ├── DebuggingTools/ │ ├── capture.json │ ├── capture_trace.json │ ├── capture_xel.json │ ├── generate-allWorkload.bat │ ├── generate-workload.ps1 │ ├── replay.json │ ├── report.bat │ ├── setup.bat │ ├── start-capture.bat │ ├── start-capture_trace.bat │ ├── start-capture_xel.bat │ └── start-replay.bat ├── LICENSE.md ├── README.md ├── Reports/ │ ├── README.md │ ├── WorkloadTools Report - Sample.pbix │ └── WorkloadTools Report - Template.pbit ├── Setup/ │ ├── Product.wxs │ ├── Setup.wixproj │ ├── SignMsi.ps1 │ ├── buildmsi.ps1 │ ├── postbuild.bat │ ├── transform-nodirs.xsl │ ├── transform.xsl │ ├── transform2.xsl │ └── transform3.xsl ├── SetupBootstrapper/ │ ├── Bundle.wxs │ ├── License/ │ │ └── License.rtf │ ├── SetupBootstrapper.wixproj │ ├── SignMsi.ps1 │ ├── buildexe.ps1 │ └── postbuild.bat ├── SharedAssemblyInfo.cs ├── SqlWorkload/ │ ├── NLog.config │ ├── Program.cs │ ├── Properties/ │ │ ├── AssemblyInfo.cs │ │ ├── Resources.Designer.cs │ │ └── Resources.resx │ ├── SqlWorkload.csproj │ ├── app.config │ └── packages.config ├── WorkloadTools/ │ ├── BinarySerializedBufferedEventQueue.cs │ ├── BufferedEventQueue.cs │ ├── Config/ │ │ ├── AnalysisSample.json │ │ ├── ReplaySample.json │ │ ├── Sample.json │ │ ├── SqlWorkloadConfig.cs │ │ └── SqlWorkloadConfigTypeResolver.cs │ ├── Consumer/ │ │ ├── Analysis/ │ │ │ ├── AnalysisConsumer.cs │ │ │ ├── DatabaseSchema.sql │ │ │ ├── NormalizedSqlText.cs │ │ │ ├── SqlTextNormalizer.cs │ │ │ ├── WorkloadAnalyzer.cs │ │ │ └── createAnalysisView.sql │ │ ├── BufferedWorkloadConsumer.cs │ │ ├── Replay/ │ │ │ ├── ReplayCommand.cs │ │ │ ├── ReplayConsumer.cs │ │ │ ├── ReplayWorker.cs │ │ │ ├── ReplayWorker.cs.bak │ │ │ └── ResultSetConsumer.cs │ │ ├── WorkloadConsumer.cs │ │ └── WorkloadFile/ │ │ └── WorkloadFileWriterConsumer.cs │ ├── CounterWorkloadEvent.cs │ ├── DiskPerfWorkloadEvent.cs │ ├── ErrorWorkloadEvent.cs │ ├── ExecutionWorkloadEvent.cs │ ├── FilterPredicate.cs │ ├── GlobalSuppressions.cs │ ├── IEventQueue.cs │ ├── Listener/ │ │ ├── ExtendedEvents/ │ │ │ ├── ExtendedEventsEventFilter.cs │ │ │ ├── ExtendedEventsFilterPredicate.cs │ │ │ ├── ExtendedEventsWorkloadListener.cs │ │ │ ├── FileTargetXEventDataReader.cs │ │ │ ├── StreamXEventDataReader.cs │ │ │ ├── XEventDataReader.cs │ │ │ └── sqlworkload.sql │ │ ├── File/ │ │ │ ├── FileEventFilter.cs │ │ │ ├── FileFilterPredicate.cs │ │ │ └── FileWorkloadListener.cs │ │ ├── ReadIteration.cs │ │ ├── SqlTransformer.cs │ │ └── Trace/ │ │ ├── FileTraceEventDataReader.cs │ │ ├── ProfilerEventFilter.cs │ │ ├── ProfilerFilterPredicate.cs │ │ ├── ProfilerWorkloadListener.cs │ │ ├── SqlConnectionInfoWrapper.cs │ │ ├── SqlTraceWorkloadListener.cs │ │ ├── TraceEventDataReader.cs │ │ ├── TraceEventFilter.cs │ │ ├── TraceEventParser.cs │ │ ├── TraceFileWrapper.cs │ │ ├── TraceFilterPredicate.cs │ │ ├── TraceServerWrapper.cs │ │ ├── TraceUtils.cs │ │ ├── sqlworkload.sql │ │ └── sqlworkload.tdf │ ├── MMFEventQueue.cs │ ├── MessagWorkloadEvent.cs │ ├── Properties/ │ │ ├── AssemblyInfo.cs │ │ ├── Settings.Designer.cs │ │ ├── Settings.settings │ │ └── SharedAssemblyInfo.cs │ ├── SqlConnectionInfo.cs │ ├── SqliteEventQueue.cs │ ├── Util/ │ │ ├── DataUtils.cs │ │ ├── ModelConverter.cs │ │ ├── RingBuffer.cs │ │ └── StringExtensions.cs │ ├── WaitStatsWorkloadEvent.cs │ ├── WorkloadController.cs │ ├── WorkloadEvent.cs │ ├── WorkloadEventFilter.cs │ ├── WorkloadListener.cs │ ├── WorkloadTools.csproj │ ├── app.config │ └── packages.config ├── WorkloadTools.sln ├── WorkloadToolsTests/ │ ├── Properties/ │ │ └── AssemblyInfo.cs │ ├── WorkloadTools/ │ │ ├── BinarySerializedBufferedEventQueueTest.cs │ │ └── SqlTextNormalizerTest.cs │ ├── WorkloadToolsTests.csproj │ ├── app.config │ └── packages.config ├── WorkloadViewer/ │ ├── App.config │ ├── App.xaml │ ├── App.xaml.cs │ ├── Comparer/ │ │ └── QueryResultEqualityComparer.cs │ ├── Model/ │ │ ├── NormalizedQuery.cs │ │ ├── QueryDetails.cs │ │ ├── SqlConnectionInfo.cs │ │ ├── WorkloadAnalysis.cs │ │ └── WorkloadAnalysisPoint.cs │ ├── NLog.config │ ├── Properties/ │ │ ├── AssemblyInfo.cs │ │ ├── Resources.Designer.cs │ │ ├── Resources.resx │ │ ├── Settings.Designer.cs │ │ └── Settings.settings │ ├── Resources/ │ │ ├── TSQL.xshd │ │ └── WorkloadAnalysis.sql │ ├── View/ │ │ ├── ConnectionInfoDialog.xaml │ │ ├── ConnectionInfoDialog.xaml.cs │ │ ├── ConnectionInfoDialogStyle.xaml │ │ ├── ConnectionInfoEditor.xaml │ │ ├── ConnectionInfoEditor.xaml.cs │ │ ├── MainWindow.xaml │ │ └── MainWindow.xaml.cs │ ├── ViewModel/ │ │ ├── ConnectionInfoEditorViewModel.cs │ │ ├── DictionaryExtensions.cs │ │ ├── FilterDefinition.cs │ │ ├── LinqExtensions.cs │ │ ├── MainViewModel.cs │ │ ├── Message.cs │ │ ├── QueryResult.cs │ │ ├── SortColMessage.cs │ │ └── ViewModelLocator.cs │ ├── WorkloadViewer.csproj │ └── packages.config └── build.ps1 ================================================ FILE CONTENTS ================================================ ================================================ FILE: .config/dotnet-tools.json ================================================ { "version": 1, "isRoot": true, "tools": { "wix": { "version": "4.0.5", "commands": [ "wix" ] } } } ================================================ FILE: .editorconfig ================================================ # Remove the line below if you want to inherit .editorconfig settings from higher directories root = true # C# files [*.cs] #### Core EditorConfig Options #### # Indentation and spacing indent_size = 4 indent_style = space tab_width = 4 # New line preferences end_of_line = crlf insert_final_newline = false #### .NET Coding Conventions #### # Organize usings dotnet_separate_import_directive_groups = true dotnet_sort_system_directives_first = true file_header_template = unset # this. and Me. preferences dotnet_style_qualification_for_event = false dotnet_style_qualification_for_field = false dotnet_style_qualification_for_method = false dotnet_style_qualification_for_property = false # Language keywords vs BCL types preferences dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion dotnet_style_predefined_type_for_member_access = true:suggestion # Parentheses preferences dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:suggestion dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:suggestion dotnet_style_parentheses_in_other_operators = never_if_unnecessary:suggestion dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:suggestion # Modifier preferences dotnet_style_require_accessibility_modifiers = for_non_interface_members # Expression-level preferences dotnet_style_coalesce_expression = true dotnet_style_collection_initializer = true dotnet_style_explicit_tuple_names = true dotnet_style_namespace_match_folder = true dotnet_style_null_propagation = true dotnet_style_object_initializer = true dotnet_style_operator_placement_when_wrapping = beginning_of_line dotnet_style_prefer_auto_properties = true:suggestion dotnet_style_prefer_compound_assignment = true dotnet_style_prefer_conditional_expression_over_assignment = true dotnet_style_prefer_conditional_expression_over_return = true dotnet_style_prefer_foreach_explicit_cast_in_source = when_strongly_typed dotnet_style_prefer_inferred_anonymous_type_member_names = true dotnet_style_prefer_inferred_tuple_names = true dotnet_style_prefer_is_null_check_over_reference_equality_method = true dotnet_style_prefer_simplified_boolean_expressions = true dotnet_style_prefer_simplified_interpolation = true # Field preferences dotnet_style_readonly_field = true # Parameter preferences dotnet_code_quality_unused_parameters = all # Suppression preferences dotnet_remove_unnecessary_suppression_exclusions = none # New line preferences dotnet_style_allow_multiple_blank_lines_experimental = true dotnet_style_allow_statement_immediately_after_block_experimental = true #### C# Coding Conventions #### # var preferences csharp_style_var_elsewhere = true:warning csharp_style_var_for_built_in_types = true:warning csharp_style_var_when_type_is_apparent = true:warning # Expression-bodied members csharp_style_expression_bodied_accessors = when_on_single_line:warning csharp_style_expression_bodied_constructors = when_on_single_line:silent csharp_style_expression_bodied_indexers = when_on_single_line:warning csharp_style_expression_bodied_lambdas = when_on_single_line:warning csharp_style_expression_bodied_local_functions = when_on_single_line:warning csharp_style_expression_bodied_methods = when_on_single_line:silent csharp_style_expression_bodied_operators = when_on_single_line:warning csharp_style_expression_bodied_properties = when_on_single_line:warning # Pattern matching preferences csharp_style_pattern_matching_over_as_with_null_check = true:error csharp_style_pattern_matching_over_is_with_cast_check = true:error csharp_style_prefer_extended_property_pattern = true:suggestion csharp_style_prefer_not_pattern = true:suggestion csharp_style_prefer_pattern_matching = true:suggestion csharp_style_prefer_switch_expression = true:warning # Null-checking preferences csharp_style_conditional_delegate_call = true:warning # Modifier preferences csharp_prefer_static_local_function = true:warning csharp_preferred_modifier_order = public,private,protected,internal,file,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,required,volatile,async csharp_style_prefer_readonly_struct = true:warning csharp_style_prefer_readonly_struct_member = true:warning # Code-block preferences csharp_prefer_braces = true:warning csharp_prefer_simple_using_statement = false:warning csharp_style_namespace_declarations = block_scoped:warning csharp_style_prefer_method_group_conversion = true:suggestion csharp_style_prefer_top_level_statements = false:suggestion # Expression-level preferences csharp_prefer_simple_default_expression = true:suggestion csharp_style_deconstructed_variable_declaration = true:suggestion csharp_style_implicit_object_creation_when_type_is_apparent = true:suggestion csharp_style_inlined_variable_declaration = true:suggestion csharp_style_prefer_index_operator = true:suggestion csharp_style_prefer_local_over_anonymous_function = true:suggestion csharp_style_prefer_null_check_over_type_check = true:warning csharp_style_prefer_range_operator = true:suggestion csharp_style_prefer_tuple_swap = true:suggestion csharp_style_prefer_utf8_string_literals = true:suggestion csharp_style_throw_expression = false:suggestion csharp_style_unused_value_assignment_preference = discard_variable:warning csharp_style_unused_value_expression_statement_preference = discard_variable:warning # 'using' directive preferences csharp_using_directive_placement = outside_namespace:suggestion # New line preferences csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true:silent csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = true:silent csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = true:silent csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true:silent csharp_style_allow_embedded_statements_on_same_line_experimental = true:silent #### C# Formatting Rules #### # New line preferences csharp_new_line_before_catch = true csharp_new_line_before_else = true csharp_new_line_before_finally = true csharp_new_line_before_members_in_anonymous_types = true csharp_new_line_before_members_in_object_initializers = true csharp_new_line_before_open_brace = all csharp_new_line_between_query_expression_clauses = true # Indentation preferences csharp_indent_block_contents = true csharp_indent_braces = false csharp_indent_case_contents = true csharp_indent_case_contents_when_block = true csharp_indent_labels = one_less_than_current csharp_indent_switch_labels = true # Space preferences csharp_space_after_cast = false csharp_space_after_colon_in_inheritance_clause = true csharp_space_after_comma = true csharp_space_after_dot = false csharp_space_after_keywords_in_control_flow_statements = true csharp_space_after_semicolon_in_for_statement = true csharp_space_around_binary_operators = before_and_after csharp_space_around_declaration_statements = false csharp_space_before_colon_in_inheritance_clause = true csharp_space_before_comma = false csharp_space_before_dot = false csharp_space_before_open_square_brackets = false csharp_space_before_semicolon_in_for_statement = false csharp_space_between_empty_square_brackets = false csharp_space_between_method_call_empty_parameter_list_parentheses = false csharp_space_between_method_call_name_and_opening_parenthesis = false csharp_space_between_method_call_parameter_list_parentheses = false csharp_space_between_method_declaration_empty_parameter_list_parentheses = false csharp_space_between_method_declaration_name_and_open_parenthesis = false csharp_space_between_method_declaration_parameter_list_parentheses = false csharp_space_between_parentheses = false csharp_space_between_square_brackets = false # Wrapping preferences csharp_preserve_single_line_blocks = true csharp_preserve_single_line_statements = true #### Naming styles #### # Naming rules dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion dotnet_naming_rule.types_should_be_pascal_case.symbols = types dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case # Symbol specifications dotnet_naming_symbols.interface.applicable_kinds = interface dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected dotnet_naming_symbols.interface.required_modifiers = dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected dotnet_naming_symbols.types.required_modifiers = dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected dotnet_naming_symbols.non_field_members.required_modifiers = # Naming styles dotnet_naming_style.pascal_case.required_prefix = dotnet_naming_style.pascal_case.required_suffix = dotnet_naming_style.pascal_case.word_separator = dotnet_naming_style.pascal_case.capitalization = pascal_case dotnet_naming_style.begins_with_i.required_prefix = I dotnet_naming_style.begins_with_i.required_suffix = dotnet_naming_style.begins_with_i.word_separator = dotnet_naming_style.begins_with_i.capitalization = pascal_case [*.{cs,vb}] dotnet_style_coalesce_expression = true:suggestion dotnet_style_null_propagation = true:suggestion dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion dotnet_style_prefer_auto_properties = true:warning dotnet_style_object_initializer = true:suggestion dotnet_style_collection_initializer = true:suggestion dotnet_style_prefer_simplified_boolean_expressions = true:suggestion dotnet_style_prefer_conditional_expression_over_assignment = true:silent dotnet_style_prefer_conditional_expression_over_return = true:silent dotnet_style_explicit_tuple_names = true:warning dotnet_style_prefer_inferred_tuple_names = true:suggestion dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion dotnet_style_operator_placement_when_wrapping = beginning_of_line tab_width = 4 indent_size = 4 end_of_line = crlf dotnet_style_prefer_compound_assignment = true:suggestion dotnet_style_prefer_simplified_interpolation = true:suggestion dotnet_style_namespace_match_folder = true:suggestion dotnet_style_readonly_field = true:warning dotnet_style_predefined_type_for_locals_parameters_members = true:warning dotnet_style_predefined_type_for_member_access = true:warning dotnet_style_require_accessibility_modifiers = for_non_interface_members:warning dotnet_style_allow_multiple_blank_lines_experimental = false:warning dotnet_style_allow_statement_immediately_after_block_experimental = true:silent dotnet_code_quality_unused_parameters = all:error dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:suggestion dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:suggestion dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:suggestion dotnet_style_parentheses_in_other_operators = never_if_unnecessary:suggestion dotnet_style_qualification_for_field = false:suggestion dotnet_style_qualification_for_property = false:suggestion dotnet_style_qualification_for_method = false:suggestion dotnet_style_qualification_for_event = false:suggestion ================================================ FILE: .gitattributes ================================================ ############################################################################### # Set default behavior to automatically normalize line endings. ############################################################################### * text=auto ############################################################################### # Set default behavior for command prompt diff. # # This is need for earlier builds of msysgit that does not have it on by # default for csharp files. # Note: This is only used by command line ############################################################################### #*.cs diff=csharp ############################################################################### # Set the merge driver for project and solution files # # Merging from the command prompt will add diff markers to the files if there # are conflicts (Merging from VS is not affected by the settings below, in VS # the diff markers are never inserted). Diff markers may cause the following # file extensions to fail to load in VS. An alternative would be to treat # these files as binary and thus will always conflict and require user # intervention with every merge. To do so, just uncomment the entries below ############################################################################### #*.sln merge=binary #*.csproj merge=binary #*.vbproj merge=binary #*.vcxproj merge=binary #*.vcproj merge=binary #*.dbproj merge=binary #*.fsproj merge=binary #*.lsproj merge=binary #*.wixproj merge=binary #*.modelproj merge=binary #*.sqlproj merge=binary #*.wwaproj merge=binary ############################################################################### # behavior for image files # # image files are treated as binary by default. ############################################################################### #*.jpg binary #*.png binary #*.gif binary ############################################################################### # diff behavior for common document formats # # Convert binary document formats to text before diffing them. This feature # is only available from the command line. Turn it on by uncommenting the # entries below. ############################################################################### #*.doc diff=astextplain #*.DOC diff=astextplain #*.docx diff=astextplain #*.DOCX diff=astextplain #*.dot diff=astextplain #*.DOT diff=astextplain #*.pdf diff=astextplain #*.PDF diff=astextplain #*.rtf diff=astextplain #*.RTF diff=astextplain ================================================ FILE: .gitignore ================================================ ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. # User-specific files *.suo *.user *.userosscache *.sln.docstates # User-specific files (MonoDevelop/Xamarin Studio) *.userprefs # Build results [Dd]ebug/ [Dd]ebugPublic/ [Rr]elease/ [Rr]eleases/ x64/ x86/ bld/ [Bb]in/ [Oo]bj/ [Ll]og/ # Visual Studio 2015 cache/options directory .vs/ # Uncomment if you have tasks that create the project's static files in wwwroot #wwwroot/ # MSTest test Results [Tt]est[Rr]esult*/ [Bb]uild[Ll]og.* # NUNIT *.VisualState.xml TestResult.xml # Build Results of an ATL Project [Dd]ebugPS/ [Rr]eleasePS/ dlldata.c # DNX project.lock.json project.fragment.lock.json artifacts/ *_i.c *_p.c *_i.h *.ilk *.meta *.obj *.pch *.pdb *.pgc *.pgd *.rsp *.sbr *.tlb *.tli *.tlh *.tmp *.tmp_proj *.log *.vspscc *.vssscc .builds *.pidb *.svclog *.scc # Chutzpah Test files _Chutzpah* # Visual C++ cache files ipch/ *.aps *.ncb *.opendb *.opensdf *.sdf *.cachefile *.VC.db *.VC.VC.opendb # Visual Studio profiler *.psess *.vsp *.vspx *.sap # TFS 2012 Local Workspace $tf/ # Guidance Automation Toolkit *.gpState # ReSharper is a .NET coding add-in _ReSharper*/ *.[Rr]e[Ss]harper *.DotSettings.user # JustCode is a .NET coding add-in .JustCode # TeamCity is a build add-in _TeamCity* # DotCover is a Code Coverage Tool *.dotCover # NCrunch _NCrunch_* .*crunch*.local.xml nCrunchTemp_* # MightyMoose *.mm.* AutoTest.Net/ # Web workbench (sass) .sass-cache/ # Installshield output folder [Ee]xpress/ # DocProject is a documentation generator add-in DocProject/buildhelp/ DocProject/Help/*.HxT DocProject/Help/*.HxC DocProject/Help/*.hhc DocProject/Help/*.hhk DocProject/Help/*.hhp DocProject/Help/Html2 DocProject/Help/html # Click-Once directory publish/ # Publish Web Output *.[Pp]ublish.xml *.azurePubxml # TODO: Comment the next line if you want to checkin your web deploy settings # but database connection strings (with potential passwords) will be unencrypted #*.pubxml *.publishproj # Microsoft Azure Web App publish settings. Comment the next line if you want to # checkin your Azure Web App publish settings, but sensitive information contained # in these scripts will be unencrypted PublishScripts/ # NuGet Packages *.nupkg # The packages folder can be ignored because of Package Restore **/packages/* # except build/, which is used as an MSBuild target. !**/packages/build/ # Uncomment if necessary however generally it will be regenerated when needed #!**/packages/repositories.config # NuGet v3's project.json files produces more ignoreable files *.nuget.props *.nuget.targets .nuget/nuget.exe # Microsoft Azure Build Output csx/ *.build.csdef # Microsoft Azure Emulator ecf/ rcf/ # Windows Store app package directories and files AppPackages/ BundleArtifacts/ Package.StoreAssociation.xml _pkginfo.txt # Visual Studio cache files # files ending in .cache can be ignored *.[Cc]ache # but keep track of directories ending in .cache !*.[Cc]ache/ # Others ClientBin/ ~$* *~ *.dbmdl *.dbproj.schemaview *.jfm *.pfx *.publishsettings node_modules/ orleans.codegen.cs # Since there are multiple workflows, uncomment next line to ignore bower_components # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) #bower_components/ # RIA/Silverlight projects Generated_Code/ # Backup & report files from converting an old project file # to a newer Visual Studio version. Backup files are not needed, # because we have git ;-) _UpgradeReport_Files/ Backup*/ UpgradeLog*.XML UpgradeLog*.htm # SQL Server files *.mdf *.ldf # Business Intelligence projects *.rdl.data *.bim.layout *.bim_*.settings # Microsoft Fakes FakesAssemblies/ # GhostDoc plugin setting file *.GhostDoc.xml # Node.js Tools for Visual Studio .ntvs_analysis.dat # Visual Studio 6 build log *.plg # Visual Studio 6 workspace options file *.opt # Visual Studio LightSwitch build output **/*.HTMLClient/GeneratedArtifacts **/*.DesktopClient/GeneratedArtifacts **/*.DesktopClient/ModelManifest.xml **/*.Server/GeneratedArtifacts **/*.Server/ModelManifest.xml _Pvt_Extensions # Paket dependency manager .paket/paket.exe paket-files/ # FAKE - F# Make .fake/ # JetBrains Rider .idea/ *.sln.iml # CodeRush .cr/ # Python Tools for Visual Studio (PTVS) __pycache__/ *.pyc harvest.wxs harvest2.wxs harvest3.wxs Setup/SignParams.ps1 SetupBootstrapper/SignParams.ps1 *.wixobj.nuget/nuget.exe *.wixobj ================================================ FILE: ConvertWorkload/App.config ================================================ ================================================ FILE: ConvertWorkload/ConvertWorkload.csproj ================================================  Debug AnyCPU {62E37C03-BA08-46CE-A583-D71FB7A8825B} Exe ConvertWorkload ConvertWorkload v4.8 512 false true AnyCPU true full false bin\Debug\ DEBUG;TRACE prompt 4 false AnyCPU pdbonly true bin\Release\ TRACE prompt 4 false TRACE;DEBUG AnyCPU bin\Debug\ false bin\Release\ false TRACE true bin\Release\ false TRACE true bin\Debug\ ..\packages\CommandLineParser.1.9.71\lib\net45\CommandLine.dll ..\packages\NLog.4.4.12\lib\net45\NLog.dll ..\packages\System.Data.SQLite.Core.1.0.112.0\lib\net46\System.Data.SQLite.dll Properties\SharedAssemblyInfo.cs Designer Always {ae6e4548-8c33-4728-8504-88aa9666020b} WorkloadTools This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. ================================================ FILE: ConvertWorkload/EventReader.cs ================================================ using System; using WorkloadTools; namespace ConvertWorkload { public abstract class EventReader : IDisposable { private WorkloadEventFilter _filter; public string[] ApplicationFilter { get; set; } public string[] DatabaseFilter { get; set; } public string[] HostFilter { get; set; } public string[] LoginFilter { get; set; } protected bool stopped; protected IEventQueue Events; protected WorkloadEventFilter Filter { get { if (_filter != null) { _filter.ApplicationFilter.PredicateValue = ApplicationFilter; _filter.DatabaseFilter.PredicateValue = DatabaseFilter; _filter.HostFilter.PredicateValue = HostFilter; _filter.LoginFilter.PredicateValue = LoginFilter; return _filter; } else { return null; } } set { _filter = value; } } public abstract bool HasFinished(); public abstract WorkloadEvent Read(); public abstract bool HasMoreElements(); public void Dispose() { stopped = true; Events.Dispose(); Dispose(true); GC.SuppressFinalize(this); } protected abstract void Dispose(bool disposing); } } ================================================ FILE: ConvertWorkload/EventWriter.cs ================================================ using System; using WorkloadTools; namespace ConvertWorkload { public abstract class EventWriter : IDisposable { protected bool stopped; public abstract void Write(WorkloadEvent evt); public void Dispose() { stopped = true; Dispose(true); GC.SuppressFinalize(this); } protected abstract void Dispose(bool disposing); } } ================================================ FILE: ConvertWorkload/ExtendedEventsEventReader.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools; using WorkloadTools.Listener; using WorkloadTools.Listener.ExtendedEvents; using WorkloadTools.Util; namespace ConvertWorkload { public class ExtendedEventsEventReader : EventReader { private static Logger logger = LogManager.GetCurrentClassLogger(); private string filePath; private bool started = false; private bool finished = false; private FileTargetXEventDataReader reader; public ExtendedEventsEventReader(string path) { Events = new BinarySerializedBufferedEventQueue(); Events.BufferSize = 10000; filePath = path; Filter = new ExtendedEventsEventFilter(); } private void ReadEventsFromFile() { try { var info = new SqlConnectionInfo(); info.ServerName = "(localdb)\\MSSQLLocalDB"; var sqlCreateTable = @" IF OBJECT_ID('tempdb.dbo.trace_reader_queue') IS NULL BEGIN CREATE TABLE tempdb.dbo.trace_reader_queue ( ts datetime DEFAULT GETDATE(), path nvarchar(4000) ) END TRUNCATE TABLE tempdb.dbo.trace_reader_queue; INSERT INTO tempdb.dbo.trace_reader_queue (path) VALUES(@path); "; using (var conn = new SqlConnection()) { conn.ConnectionString = info.ConnectionString(); conn.Open(); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sqlCreateTable; var prm = new SqlParameter() { ParameterName = "@path", DbType = System.Data.DbType.String, Size = 4000, Value = filePath }; cmd.Parameters.Add(prm); cmd.ExecuteNonQuery(); } } reader = new FileTargetXEventDataReader(info.ConnectionString(), null, Events, ExtendedEventsWorkloadListener.ServerType.LocalDB); reader.ReadEvents(); finished = true; } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } Dispose(); } } public override bool HasFinished() { return finished && !Events.HasMoreElements(); } public override bool HasMoreElements() { return !finished && !stopped && (started ? Events.HasMoreElements() : true); } public override WorkloadEvent Read() { if (!started) { var t = Task.Factory.StartNew(ReadEventsFromFile); started = true; } WorkloadEvent result = null; while (!Events.TryDequeue(out result)) { if (stopped || finished) { return null; } Thread.Sleep(5); } return result; } protected override void Dispose(bool disposing) { if (!stopped) { stopped = true; reader.Stop(); reader.Dispose(); } } } } ================================================ FILE: ConvertWorkload/LocalDBManager.cs ================================================ using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Diagnostics; using System.IO; using System.Linq; using System.Net; using System.Security.Principal; using System.Text; using System.Threading.Tasks; using WorkloadTools; namespace ConvertWorkload { internal class LocalDBManager { public bool IsElevated { get { return new WindowsPrincipal(WindowsIdentity.GetCurrent()).IsInRole(WindowsBuiltInRole.Administrator); } } public string DownloadLocalDB() { var localPath = Path.GetTempPath() + "SqlLocalDB.msi"; using (var client = new WebClient()) { var wp = WebRequest.DefaultWebProxy; wp.Credentials = CredentialCache.DefaultCredentials; client.Proxy = wp; client.DownloadFile("https://download.microsoft.com/download/7/c/1/7c14e92e-bdcb-4f89-b7cf-93543e7112d1/SqlLocalDB.msi", localPath); } return localPath; } public void InstallLocalDB() { if (!IsElevated) { throw new InvalidOperationException("Installing LocalDB requires elevation."); } var localFileName = DownloadLocalDB(); var p = new Process(); p.StartInfo.WindowStyle = ProcessWindowStyle.Hidden; p.StartInfo.FileName = "c:\\windows\\system32\\msiexec.exe"; p.StartInfo.Arguments = " /i "+ localFileName +" /qn IACCEPTSQLLOCALDBLICENSETERMS=YES"; p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.RedirectStandardError = true; //Vista or higher check if (System.Environment.OSVersion.Version.Major >= 6) { p.StartInfo.Verb = "runas"; } p.Start(); p.WaitForExit(); } public bool CanConnectToLocalDB() { try { var info = new SqlConnectionInfo(); info.ServerName = @"(localdb)\MSSQLLocalDB"; info.UseIntegratedSecurity = true; using (var conn = new SqlConnection(info.ConnectionString() + ";Connect Timeout=30;")) { conn.Open(); } return true; } catch(Exception) { return false; } } } } ================================================ FILE: ConvertWorkload/NLog.config ================================================  ================================================ FILE: ConvertWorkload/Program.cs ================================================ using CommandLine; using CommandLine.Text; using NLog; using NLog.Targets; using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Runtime; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools.Listener.Trace; namespace ConvertWorkload { class Program { private static Logger logger = LogManager.GetCurrentClassLogger(); private static CancellationTokenSource source; static void Main(string[] args) { AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(GenericErrorHandler); GCSettings.LargeObjectHeapCompactionMode = GCLargeObjectHeapCompactionMode.CompactOnce; var assembly = System.Reflection.Assembly.GetExecutingAssembly(); var fvi = FileVersionInfo.GetVersionInfo(assembly.Location); var version = fvi.FileMajorPart.ToString() + "." + fvi.FileMinorPart.ToString() + "." + fvi.FileBuildPart.ToString(); var name = assembly.FullName; logger.Info(name + " " + version); try { var options = new Options(); if (!CommandLine.Parser.Default.ParseArguments(args, options)) { return; } Run(options); } catch (Exception e) { logger.Error(e); } } private static void Run(Options options) { // reconfigure loggers to use a file in the current directory // or the file specified by the "Log" commandline parameter if(LogManager.Configuration != null) { var target = (FileTarget)LogManager.Configuration.FindTargetByName("logfile"); if (target != null) { var pathToLog = options.LogFile; if (pathToLog == null) { pathToLog = Path.Combine(Environment.CurrentDirectory, "ConvertWorkload.log"); } if (!Path.IsPathRooted(pathToLog)) { pathToLog = Path.Combine(Environment.CurrentDirectory, pathToLog); } target.FileName = pathToLog; LogManager.ReconfigExistingLoggers(); } } // check whether localdb is installed logger.Info("Checking LocalDB..."); var manager = new LocalDBManager(); if (!manager.CanConnectToLocalDB()) { logger.Info("Installing LocalDB..."); try { manager.InstallLocalDB(); } catch (InvalidOperationException) { logger.Error("This operation requires elevation. Restart the application as an administrator."); return; } } EventReader reader = null; if (options.InputFile.EndsWith(".trc")) { reader = new SqlTraceEventReader(options.InputFile); } else { reader = new ExtendedEventsEventReader(options.InputFile); } EventWriter writer = new WorkloadFileEventWriter(options.OutputFile); var converter = new WorkloadConverter(reader, writer); if(options.ApplicationFilter != null) { converter.ApplicationFilter = new string[1] { options.ApplicationFilter }; } if (options.DatabaseFilter != null) { converter.DatabaseFilter = new string[1] { options.DatabaseFilter }; } if (options.HostFilter != null) { converter.HostFilter = new string[1] { options.HostFilter }; } if (options.LoginFilter != null) { converter.LoginFilter = new string[1] { options.LoginFilter }; } Console.CancelKeyPress += delegate (object sender, ConsoleCancelEventArgs e) { e.Cancel = true; logger.Info("Received shutdown signal..."); source.CancelAfter(TimeSpan.FromSeconds(10)); // give a 10 seconds cancellation grace period converter.Stop(); }; var t = processConverter(converter); t.Wait(); logger.Info("Converter stopped."); } public static void CancelNotification() { logger.Info("Shutdown complete."); } public static async Task processConverter(WorkloadConverter converter) { source = new CancellationTokenSource(); source.Token.Register(CancelNotification); var completionSource = new TaskCompletionSource(); source.Token.Register(() => completionSource.TrySetCanceled()); var task = Task.Factory.StartNew(() => converter.Convert(), source.Token); await Task.WhenAny(task, completionSource.Task); } static void GenericErrorHandler(object sender, UnhandledExceptionEventArgs e) { try { logger.Error(e.ToString()); } finally { Console.WriteLine("Caught unhandled exception..."); } } } class Options { [Option('L', "Log", HelpText = "Log file")] public string LogFile { get; set; } [Option('I', "Input", HelpText = "Input file", Required = true)] public string InputFile { get; set; } [Option('O', "Output", HelpText = "Output file", Required = true)] public string OutputFile { get; set; } [Option('A', "ApplicationFilter", HelpText = "Application filter")] public string ApplicationFilter { get; set; } [Option('D', "DatabaseFilter", HelpText = "Database filter")] public string DatabaseFilter { get; set; } [Option('H', "HostFilter", HelpText = "Host Filter")] public string HostFilter { get; set; } [Option('U', "LoginFilter", HelpText = "Login Filter")] public string LoginFilter { get; set; } [ParserState] public IParserState LastParserState { get; set; } [HelpOption] public string GetUsage() { return HelpText.AutoBuild(this, (HelpText current) => HelpText.DefaultParsingErrorsHandler(this, current)); } } } ================================================ FILE: ConvertWorkload/Properties/AssemblyInfo.cs ================================================ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("62e37c03-ba08-46ce-a583-d71fb7a8825b")] ================================================ FILE: ConvertWorkload/SqlTraceEventReader.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools; using WorkloadTools.Listener; using WorkloadTools.Listener.Trace; namespace ConvertWorkload { public class SqlTraceEventReader : EventReader { private static Logger logger = LogManager.GetCurrentClassLogger(); private string tracePath; private bool started = false; private bool finished = false; private FileTraceEventDataReader reader; public SqlTraceEventReader(string path) { Events = new BinarySerializedBufferedEventQueue(); Events.BufferSize = 10000; tracePath = path; Filter = new TraceEventFilter(); } private void ReadEventsFromFile() { try { var info = new SqlConnectionInfo(); info.ServerName = "(localdb)\\MSSQLLocalDB"; var sqlCreateTable = @" IF OBJECT_ID('tempdb.dbo.trace_reader_queue') IS NULL BEGIN CREATE TABLE tempdb.dbo.trace_reader_queue ( ts datetime DEFAULT GETDATE(), path nvarchar(4000) ) END TRUNCATE TABLE tempdb.dbo.trace_reader_queue; INSERT INTO tempdb.dbo.trace_reader_queue (path) VALUES(@path); "; using(var conn = new SqlConnection()) { conn.ConnectionString = info.ConnectionString(); conn.Open(); using(var cmd = conn.CreateCommand()) { cmd.CommandText = sqlCreateTable; var prm = new SqlParameter() { ParameterName = "@path", DbType = System.Data.DbType.String, Size = 4000, Value = tracePath }; cmd.Parameters.Add(prm); cmd.ExecuteNonQuery(); } } reader = new FileTraceEventDataReader(info.ConnectionString(), Filter, Events); reader.ReadEvents(); finished = true; } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } Dispose(); } } public override WorkloadEvent Read() { if (!started) { var t = Task.Factory.StartNew(ReadEventsFromFile); started = true; } WorkloadEvent result = null; while (!Events.TryDequeue(out result)) { if (stopped || finished) { return null; } Thread.Sleep(5); } return result; } protected override void Dispose(bool disposing) { if (!stopped) { stopped = true; reader.Stop(); reader.Dispose(); } } public override bool HasMoreElements() { return !finished && !stopped && (started ? Events.HasMoreElements() : true); } public override bool HasFinished() { return finished && !Events.HasMoreElements(); } } } ================================================ FILE: ConvertWorkload/WorkloadConverter.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using WorkloadTools; namespace ConvertWorkload { public class WorkloadConverter { private static Logger logger = LogManager.GetCurrentClassLogger(); private EventReader reader; private EventWriter writer; private bool stopped = false; public string[] ApplicationFilter { get; set; } public string[] DatabaseFilter { get; set; } public string[] HostFilter { get; set; } public string[] LoginFilter { get; set; } public WorkloadConverter(EventReader reader, EventWriter writer) { this.reader = reader; this.writer = writer; } public void Convert() { try { if (ApplicationFilter != null) { reader.ApplicationFilter = ApplicationFilter; } if (DatabaseFilter != null) { reader.DatabaseFilter = DatabaseFilter; } if (HostFilter != null) { reader.HostFilter = HostFilter; } if (LoginFilter != null) { reader.LoginFilter = LoginFilter; } while ((!reader.HasFinished() || reader.HasMoreElements()) && !stopped) { writer.Write(reader.Read()); } } catch(Exception ex) { stopped = true; logger.Error(ex); } finally { Stop(); } } public void Stop() { stopped = true; reader.Dispose(); writer.Dispose(); } } } ================================================ FILE: ConvertWorkload/WorkloadFileEventWriter.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using WorkloadTools; using WorkloadTools.Consumer.WorkloadFile; namespace ConvertWorkload { public class WorkloadFileEventWriter : EventWriter { private WorkloadFileWriterConsumer consumer; public WorkloadFileEventWriter(string outputPath) { consumer = new WorkloadFileWriterConsumer() { OutputFile = outputPath }; } public override void Write(WorkloadEvent evt) { consumer.Consume(evt); } protected override void Dispose(bool disposing) { consumer.Dispose(); } } } ================================================ FILE: ConvertWorkload/packages.config ================================================  ================================================ FILE: DebuggingTools/capture.json ================================================ { "Controller": { "Listener": { "__type": "SqlTraceWorkloadListener", "ConnectionInfo": { "ServerName": "localhost" }, "DatabaseFilter": "benchmark", "ApplicationFilter": "testapp", "TimeoutMinutes": 10 }, "Consumers": [ { "__type": "WorkloadFileWriterConsumer", "OutputFile": "C:\\temp\\WorkloadTools\\debug\\capture.sqlite" } ,{ "__type": "AnalysisConsumer", "ConnectionInfo": { "ServerName": "(local)", "DatabaseName": "benchmark_analysis", "SchemaName": "test" }, "UploadIntervalSeconds": 60 } ] } } ================================================ FILE: DebuggingTools/capture_trace.json ================================================ { "Controller": { "Listener": { "__type": "SqlTraceWorkloadListener", "ConnectionInfo": { "ServerName": "localhost" }, "DatabaseFilter": "benchmark", "TraceRolloverCount": 100, "TimeoutMinutes": 10 }, "Consumers": [ { "__type": "WorkloadFileWriterConsumer", "OutputFile": "C:\\temp\\WorkloadTools\\debug\\capture.sqlite" } ,{ "__type": "AnalysisConsumer", "ConnectionInfo": { "ServerName": "(local)", "DatabaseName": "benchmark_analysis", "SchemaName": "test" }, "UploadIntervalSeconds": 60 } ] } } ================================================ FILE: DebuggingTools/capture_xel.json ================================================ { "Controller": { "Listener": { "__type": "ExtendedEventsWorkloadListener", "ConnectionInfo": { "ServerName": "localhost" }, "ApplicationFilter": "testapp", "FileTargetPath": "C:\\temp\\WorkloadTools\\debug\\capture.xel" }, "Consumers": [ { "__type": "WorkloadFileWriterConsumer", "OutputFile": "C:\\temp\\WorkloadTools\\debug\\capture.sqlite" } ,{ "__type": "AnalysisConsumer", "ConnectionInfo": { "ServerName": "(local)", "DatabaseName": "benchmark_analysis", "SchemaName": "test" }, "UploadIntervalSeconds": 60 } ] } } ================================================ FILE: DebuggingTools/generate-allWorkload.bat ================================================ del "c:\temp\workloadtools\debug\capture.sqlite" sqlcmd -S(local) -dbenchmark -Q"TRUNCATE TABLE dbo.benchmark" start "" powershell -File .\generate-workload.ps1 -start 0 start "" powershell -File .\generate-workload.ps1 -start 1000000 start "" powershell -File .\generate-workload.ps1 -start 2000000 start "" powershell -File .\generate-workload.ps1 -start 3000000 start "" powershell -File .\generate-workload.ps1 -start 4000000 start "" powershell -File .\generate-workload.ps1 -start 5000000 ================================================ FILE: DebuggingTools/generate-workload.ps1 ================================================ param ( [int]$start ) Get-Date -Format "yyyy-MM-dd HH:mm:ss" $connectionString = "Data Source=(local);Integrated Security=SSPI;Initial Catalog=benchmark;Application Name=testapp" $connection = new-object system.data.SqlClient.SQLConnection($connectionString) $connection.Open() $command = new-object system.data.sqlclient.sqlcommand("INSERT INTO benchmark VALUES(@p0)",$connection) $command.Parameters.AddWithValue("@p0",0) | Out-Null for($i = $start; $i -lt ($start + 100000); $i++) { $command.Parameters[0].Value = $i $command.ExecuteNonQuery() | Out-Null if($i % 1000 -eq 0) {$i} } $i $connection.Close() Get-Date -Format "yyyy-MM-dd HH:mm:ss" ================================================ FILE: DebuggingTools/replay.json ================================================ { "Controller": { "Listener": { "__type": "FileWorkloadListener", "Source": "C:\\temp\\workloadtools\\debug\\capture.sqlite", // in this case you want to simulate the original query rate "SynchronizationMode": "true" }, "Consumers": [ { "__type": "ReplayConsumer", "ConnectionInfo": { "ServerName": "(local)", "DatabaseName": "benchmark" }, "ConsumeResults": "false" } ] } } ================================================ FILE: DebuggingTools/report.bat ================================================ "c:\Program Files\WorkloadTools\WorkloadViewer.exe" --BaselineServer (local) --BaselineSchema test --BaselineDatabase benchmark_analysis ================================================ FILE: DebuggingTools/setup.bat ================================================ sqlcmd -S(local) -Q"CREATE DATABASE benchmark" sqlcmd -S(local) -dbenchmark -Q"CREATE TABLE dbo.benchmark ( i int NULL )" sqlcmd -S(local) -Q"CREATE DATABASE benchmark_analysis" mkdir c:\temp mkdir c:\temp\workloadtools mkdir c:\temp\workloadtools\debug ================================================ FILE: DebuggingTools/start-capture.bat ================================================ del "c:\temp\workloadtools\debug\capture.sqlite" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Applications" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Databases" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Errors" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Hosts" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Intervals" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Logins" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.NormalizedQueries" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WaitStats" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WorkloadDetails" "c:\program files\workloadtools\sqlworkload.exe" --File "%CD%\capture.json" ================================================ FILE: DebuggingTools/start-capture_trace.bat ================================================ sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Applications" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Databases" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Errors" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Hosts" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Intervals" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Logins" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.NormalizedQueries" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WaitStats" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WorkloadDetails" "c:\program files\workloadtools\sqlworkload.exe" --File "%CD%\capture_trace.json" ================================================ FILE: DebuggingTools/start-capture_xel.bat ================================================ sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Applications" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Databases" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Errors" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Hosts" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Intervals" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Logins" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.NormalizedQueries" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WaitStats" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WorkloadDetails" "c:\program files\workloadtools\sqlworkload.exe" --File "%CD%\capture_xel.json" ================================================ FILE: DebuggingTools/start-replay.bat ================================================ echo %time% sqlcmd -S(local) -dbenchmark -Q"TRUNCATE TABLE benchmark" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Applications" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Databases" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Errors" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Hosts" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Intervals" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.Logins" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.NormalizedQueries" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WaitStats" sqlcmd -S(local) -dbenchmark_analysis -Q"TRUNCATE TABLE test.WorkloadDetails" "c:\program files\workloadtools\sqlworkload.exe" --File "%CD%\replay.json" echo %time% ================================================ FILE: LICENSE.md ================================================ MIT License Copyright (c) 2018 Gianluca Sartori Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ ![](https://countrush-prod.azurewebsites.net/l/badge/?repository=spaghettidba.WorkloadTools) # WorkloadTools *WorkloadTools is a collection of tools to collect, analyze and replay SQL Server workloads, on premises and in the cloud .* ## Download Go to the [release page](https://github.com/spaghettidba/WorkloadTools/releases/latest) and download the msi installer for your target bitness (x86 or x64) ## Documentation If you're looking for detailed documentation on the individual tools, please have a look at the [Wiki](https://github.com/spaghettidba/WorkloadTools/wiki) If you're looking for usage scenarios and examples, see the [posts tagged WorkloadTools at my blog](https://spaghettidba.com/tag/workloadtools/) ## SqlWorkload SqlWorkload is a command line tool to start workload collection, analyze the collected data and replay the workload to a target machine, all in real-time. SqlWorkload can connect to a SQL Server instance and capture execution related events via SqlTrace or Extended Events. These events are processed and passed to "consumers" that can replay the events to a target instance in real-time and analyze the statements. All the batches are "normalized" (parameters and constants are stripped away) and metrics are calculated on each normalized batch, like cpu, duration, reads and writes. During the analysis, additional metrics are captured and saved regularly to the analysis database: - cpu usage - wait stats ### Replaying and analyzing a production workload in test If you want to compare the execution of the same workload on two different machines, you can point a first instance of SqlWorkload to your production server: SqlWorkload will analyze the workload and write the metrics to a database of your choice. It will also replay the workload to a test server, where you can point a second instance of SqlWorkload to obtain the same metrics. This second instance of SqlWorkload will not perform the replay, but it will only perform the workload analysis and write it to the same database where you stored the metrics relative to production (possibly on a different schema). Once you have captured and replayed the workload for a representative enough time, you can stop the two instances of SqlWorkload and analyze the data using the included Workload Analyzer or PowerBI dashboard. ### Command line switches SqlWorkload accepts two command line switches: `--Log` Path to the log file `--File` Path to the `.JSON` configuration file In fact, SqlWorkload supports a multitude of parameters and specifying them all in the command line can become really tedious. For this reason, SqlWorkload supports `.JSON` configuration files. This is a sample configuration file. Please refer to [the documentation](https://github.com/spaghettidba/WorkloadTools/wiki) to see the full list of available configuration options. ```javascript { "Controller": { // The Listener section describes how to capture the events "Listener": { // The main parameter here is the class type of the Listener "__type": "ExtendedEventsWorkloadListener", // The ConnectionInfo describes how to connect the Listener "ConnectionInfo": { "ServerName": "SQLDEMO\\SQL2014", // If you omit the UserName/Password, Windows authentication // will be used "UserName": "sa", "Password": "P4$$w0rd!" }, // Filters for the workload "DatabaseFilter": "DS3", "ApplicationFilter" : "SomeAppName", "HostFilter" : "MyComputer", "LoginFilter": "sa" }, // This section contains the list of the consumers // The list can contain 0 to N consumers of different types "Consumers": [ { // This is the type of the consumer "__type": "ReplayConsumer", // The same considerations for ConnectionInfo // valid for the Listener apply here as well "ConnectionInfo": { "ServerName": "SQLDEMO\\SQL2016", "DatabaseName": "DS3", "UserName": "sa", "Password": "P4$$w0rd!" } }, { // Here is another example with the AnalysisConsumer "__type": "AnalysisConsumer", // ConnectionInfo "ConnectionInfo": { "ServerName": "SQLDEMO\\SQL2016", "DatabaseName": "DS3", // This "SchemaName" parameter is important, because it // decides where the analysis data is written to "SchemaName": "baseline", "UserName": "sa", "Password": "P4$$w0rd!" }, // This decides how often the metrics are aggregated and // written to the target database "UploadIntervalSeconds": 60 } ] } } ``` ## WorkloadViewer WorkloadViewer is a GUI tool to analyze the data collected by the WorkloadAnalysisTarget in a SQL Server database. It shows metrics about the workload, relative to the beginning of the capture (in minutes). Here are some screenshots of WorkloadViewer. ### Workload The three charts in the "Workload" tab show an overview of the workload analysis: CPU, Duration and Batches/sec. Two workloads can be compared by displaying independent series (Baseline and Benchmark) for each workload. ![SqlWorkload analysis Overview](./Images/SqlWorkloadOverview.png "Overview") ### Queries This tab displays information about the queries and how they relate to the workload. For a single workload analysis, it shows the most expensive queries. When comparing two workloads, it can be used to identify query regressions. ![SqlWorkload regressed queries](./Images/SqlWorkloadRegresses.png "RegressedQueries") ### Query Details Double clicking a query in the "Queries" tab takes you to the "Query Details" tab, where you can see the text of the selected query, specific statistics by application, database, host and login and the average duration in a chart. ![SqlWorkload query detail](./Images/SqlWorkloadDetail.png "Detail") ## ConvertWorkload ConvertWorkload is a command line tool to convert existing trace files to the internal SQLite format used by WorkloadTools. In the future, ConvertWorkload will also support conversion from existing Extended Events files. Why converting trace file to a different intermediate format instead of supporting it directly? Trace files can be read using an API that works only in x86. While WorkloadTools **can** work in x86, using x64 builds is highly recommended, due to the possible high memory usage when capturing intensive workloads. Using a x86 API would have excluded the functionality from the x64 builds, hence using an external tool to convert trace files seems much more appropriate. ### Command line switches ```text -I or --Input The input file (trace or extended events) to convert -O or --Output The output SQLite file to write -L or --Log Specifies where to save the log file -A or --ApplicationFilter Application filter to apply while converting the source file -D or --DatabaseFilter Database filter to apply while converting the source file -H or --HostFilter Host filter to apply while converting the source file -U or --LoginFilter Login filter to apply while converting the source file ``` ================================================ FILE: Reports/README.md ================================================ # WorkloadTools Power BI Report To analyze the data produced by WorkloadTools you can use the provided Power BI Template. In this folder you will find: * A Sample Power Bi report "WorkloadTools Report - Sample.pbix", use it play with the report * The Power BI template "WorkloadTools Report - Template.pbit", which defines a report structure and will ask for some input parameters before it loading data. ## Overview of the report pages ![Overview](/Images/PowerBI_Overview.png) ![WeightOnTotal](/Images/PowerBI_WeightOnTotal.png) ![Queries](/Images/PowerBI_Queries.png) ![QueryDetail](/Images/PowerBI_QueryDetail.png) ![WaitStats](/Images/PowerBI_WaitStats.png) ## Usage 1. Download and install Power BI Desktop 2. Open the provided template "WorkloadTools Report - Template.pbit" 3. Provide the connection parameters 4. Explore your data 5. (optional) Save the file for later use, it won't ask again for the parameters The report allows you to load one or two series of data, if you want to visualize only one series leave the optional parameters ("Benchmark") empty Required parameters: * Baseline Server\Instance * Baseline Database * Baseline Schema Optional parameters: * Benchmark Server\Instance * Benchmark Database * Benchmark Schema ![Input Parameters](/Images/PowerBI_InputParams.png) **Note:** When using only one serie of data some charts and metrics will be empty, the deltas won't be meaningful. ### Using "Drillthrough" for "Query Detail" In order to correctly filter the "Query Detail" sheet you need to use the "Drillthrough" function of Power BI. This function is available on any visual that contains a specific fields (in this case "Sql Hash") and has a properly configured Drillthrough page. ie: on the "Queries" page you can find several tables with the field "Sql Hash", right click it and choose "Drillthrough", you will now see the available drillthrough pages, if you click the page, it will show up, filtered by the selected "Sql Hash". To go back to the previous page you can use the arrow in the top-left corner of the page. ![Drillthrough](/Images/PowerBI_DrillThrough.png) ## Authentication When connecting to the specified source or sources (if the data are on 2 different server/databases) power Bi will use windows authentication. If the current user does not have the permission to access the data the load will fail. To use different credentials you will need to: * Run the Power BI template * Save it as a report even if it fails to load the data * Go to: "File -> options and settings -> data source settings" select the datasource and click "edit permission" now you can set the authentication method and credentials ## Use the Power BI Report on Existing Databases **What if I already have the data and just want to use the Power BI report?** Power BI needs a bunch of views in order to load the data, those views are created when some workload data are written to the db for the first time. You can manually create the views by running the procedure in this folder: [WorkloadTools\Consumer\Analysis\createAnalysisView.sql](/WorkloadTools/Consumer/Analysis/createAnalysisView.sql) ``` --At least one of the two parameters must be provided, the schema must be the same of your data tables EXEC [dbo].[createAnalysisView] @baselineSchema = N'', @replaySchema = N'' ``` ## Additional Suggestions Power BI Desktop does not offer a "wiewer mode", it has been made to create and edit reports. Anyone with the file has full control over it, and can create/edit/delete any visual or measure in the frontend or tables, relationships and M script in the backend. In order to make the report more usable on Power BI Desktop you can set the following * **Lock Objects** - this prevents them from moving around while using the report (in the top bar "View" → "Lock Objects" checkbox) * **Collapse Unused Bars** - this allows you to recover plently of space to view the report, you can collapse the side-bars and the top bar (small arrow i the top right corner) ## Notes For Editors and Curious If you are new to Power BI and want to make some changes, make your own report or simply know how the report works you may need to know the following * **Hidden Objects** - to have a more readable report only the strictly necessary is visible in the "Field" panel, some tables, columns and measures (formulas) are hidden. To view them expand the "Field" side-bar, right click and enable "View hidden" * **Time Field** - For how the Power BI model works: * **always** use the field [Elapsed Time (min)] of the "Time" table in any visual that displayes the trend by time (or use the time in general) * Any other [Elapsed Time (min)] Field (there is one in almost every table, set as hidden) will not propagate the filter correctly. If used you will obtain flat charts and static numbers ================================================ FILE: Setup/Product.wxs ================================================ ================================================ FILE: Setup/Setup.wixproj ================================================  Debug x86 3.10 2.0 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D} Package WorkloadTools $(MSBuildExtensionsPath32)\Microsoft\WiX\v3.x\Wix.targets $(MSBuildExtensionsPath)\Microsoft\WiX\v3.x\Wix.targets 0.0.0.0 bin\x86\Release\ obj\x86\Release\ BuildVersion=$(BuildVersion);Platform=$(Platform) bin\x64\Release\ obj\x64\Release\ BuildVersion=$(BuildVersion);Platform=$(Platform) bin\x86\Debug\ obj\x86\Debug\ BuildVersion=$(BuildVersion);Platform=$(Platform) bin\x64\Debug\ obj\x64\Debug\ BuildVersion=$(BuildVersion);Platform=$(Platform) call $(ProjectDir)postbuild.bat "$(TargetPath)" "$(TargetDir)WorkloadTools_$(Platform)$(TargetExt)" ================================================ FILE: Setup/SignMsi.ps1 ================================================ [CmdletBinding()] Param( [Parameter(Mandatory=$True,Position=1)] [string]$InputFile, [Parameter(Mandatory=$True,Position=2)] [string]$OutputFile ) if(-not (Test-Path $PSScriptRoot\SignParams.ps1)) { Write-Warning "No code signing is applied to the .msi file." Write-Warning "You need to create a file called SignParams.ps1 and provide signing info." Write-Output "Moving $InputFile --> $OutputFile" Move-Item $InputFile $OutputFile -Force exit } # read paramters $signParams = get-content $PSScriptRoot\SignParams.ps1 -Raw Invoke-Expression $signParams $params = $( 'sign' ,'/fd' ,'SHA1' ,'/f' ,('"' + $certPath + '"') ,'/p' ,('"' + $certPass + '"') ,'/sha1' ,$certSha ,'/t' ,('"' + $certTime + '"') ,'/d' ,'"WorkloadTools"' ) & $signTool ($params + $InputFile) Write-Output "Moving $InputFile --> $OutputFile" Move-Item $InputFile $OutputFile -Force ================================================ FILE: Setup/buildmsi.ps1 ================================================ param ( [Parameter(Mandatory=$false)] [string]$BuildVersion = "1.0.0.0", [Parameter(Mandatory=$false)] [string]$Platform = "x64" ) Set-Location $PSScriptRoot # --------------------------------------------------------------------------- # Locate MSBuild via vswhere (ships with Visual Studio 2017+) # --------------------------------------------------------------------------- $vswhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" if (-not (Test-Path $vswhere)) { throw "vswhere.exe not found at '$vswhere'. Visual Studio 2017 or newer is required." } $msbuild = & $vswhere -latest -requires Microsoft.Component.MSBuild ` -find MSBuild\**\Bin\MSBuild.exe | Select-Object -First 1 if (-not $msbuild) { throw "MSBuild.exe not found. Please install Visual Studio with the MSBuild component." } # --------------------------------------------------------------------------- # Locate WiX v3 tools (heat.exe, candle.exe, light.exe) # The WIX environment variable is set automatically by the WiX v3 installer. # --------------------------------------------------------------------------- $wixDir = $null if ($env:WIX -and (Test-Path $env:WIX)) { $wixDir = $env:WIX } if (-not $wixDir) { $wixDir = @( "${env:ProgramFiles(x86)}\WiX Toolset v3.14", "${env:ProgramFiles(x86)}\WiX Toolset v3.11", "${env:ProgramFiles(x86)}\WiX Toolset v3.10", "${env:ProgramFiles}\WiX Toolset v3.14", "${env:ProgramFiles}\WiX Toolset v3.11" ) | Where-Object { Test-Path $_ } | Select-Object -First 1 } if (-not $wixDir) { throw "WiX Toolset v3 not found. Install from https://wixtoolset.org/ or set the WIX environment variable." } $heat = Join-Path $wixDir "bin\heat.exe" $candle = Join-Path $wixDir "bin\candle.exe" $light = Join-Path $wixDir "bin\light.exe" # --------------------------------------------------------------------------- # Build the .NET projects whose output directories will be harvested into # the MSI by heat.exe. # # SqlWorkload and WorkloadViewer use the "AnyCPU" MSBuild platform to produce # x64 output (bin\x64\Release) or "x86" for x86 output (bin\x86\Release). # ConvertWorkload always outputs to bin\Release regardless of platform. # --------------------------------------------------------------------------- $netPlatform = if ($Platform -eq 'x86') { 'x86' } else { 'AnyCPU' } & $msbuild "$PSScriptRoot\..\SqlWorkload\SqlWorkload.csproj" ` -t:Rebuild -p:Configuration=Release "-p:Platform=$netPlatform" ` -nologo -verbosity:minimal if ($LASTEXITCODE -ne 0) { throw "SqlWorkload build failed." } & $msbuild "$PSScriptRoot\..\WorkloadViewer\WorkloadViewer.csproj" ` -t:Rebuild -p:Configuration=Release "-p:Platform=$netPlatform" ` -nologo -verbosity:minimal if ($LASTEXITCODE -ne 0) { throw "WorkloadViewer build failed." } & $msbuild "$PSScriptRoot\..\ConvertWorkload\ConvertWorkload.csproj" ` -t:Rebuild -p:Configuration=Release ` -nologo -verbosity:minimal if ($LASTEXITCODE -ne 0) { throw "ConvertWorkload build failed." } # --------------------------------------------------------------------------- # Prepare output and intermediate directories # --------------------------------------------------------------------------- $outDir = "$PSScriptRoot\bin\$Platform\Release" $objDir = "$PSScriptRoot\obj\$Platform\Release" foreach ($dir in $outDir, $objDir) { if (-not (Test-Path $dir)) { New-Item -ItemType Directory -Path $dir -Force | Out-Null } elseif (Test-Path "$dir\*") { Remove-Item "$dir\*" -Recurse -Force } } # --------------------------------------------------------------------------- # Harvest .NET output directories with heat.exe (WiX v3) # The generated harvest*.wxs files are placed in the Setup project directory # so they are picked up as Compile items in Setup.wixproj. # --------------------------------------------------------------------------- $sqlWorkloadDir = [System.IO.Path]::GetFullPath("$PSScriptRoot\..\SqlWorkload\bin\$Platform\Release") $workloadViewerDir = [System.IO.Path]::GetFullPath("$PSScriptRoot\..\WorkloadViewer\bin\$Platform\Release") $convertWorkloadDir = [System.IO.Path]::GetFullPath("$PSScriptRoot\..\ConvertWorkload\bin\Release") & $heat dir "$sqlWorkloadDir" ` -cg ProductComponents -dr INSTALLFOLDER -srd -sreg -ag ` -var var.SqlWorkloadDir ` -t "$PSScriptRoot\transform.xsl" ` -out "$PSScriptRoot\harvest.wxs" -nologo if ($LASTEXITCODE -ne 0) { throw "heat.exe failed for SqlWorkload." } & $heat dir "$workloadViewerDir" ` -cg WorkloadViewerComponents -dr INSTALLFOLDER -srd -sreg -ag ` -var var.WorkloadViewerDir ` -t "$PSScriptRoot\transform.xsl" -t "$PSScriptRoot\transform2.xsl" ` -t "$PSScriptRoot\transform-nodirs.xsl" ` -out "$PSScriptRoot\harvest2.wxs" -nologo if ($LASTEXITCODE -ne 0) { throw "heat.exe failed for WorkloadViewer." } & $heat dir "$convertWorkloadDir" ` -cg ConvertWorkloadComponents -dr INSTALLFOLDER -srd -sreg -ag ` -var var.ConvertWorkloadDir ` -t "$PSScriptRoot\transform.xsl" -t "$PSScriptRoot\transform2.xsl" ` -t "$PSScriptRoot\transform3.xsl" -t "$PSScriptRoot\transform-nodirs.xsl" ` -out "$PSScriptRoot\harvest3.wxs" -nologo if ($LASTEXITCODE -ne 0) { throw "heat.exe failed for ConvertWorkload." } # --------------------------------------------------------------------------- # Compile all WXS sources with candle.exe # --------------------------------------------------------------------------- $arch = if ($Platform -eq 'x86') { 'x86' } else { 'x64' } & $candle ` "$PSScriptRoot\Product.wxs" ` "$PSScriptRoot\harvest.wxs" ` "$PSScriptRoot\harvest2.wxs" ` "$PSScriptRoot\harvest3.wxs" ` -arch $arch ` "-dBuildVersion=$BuildVersion" ` "-dPlatform=$Platform" ` "-dSqlWorkloadDir=$sqlWorkloadDir" ` "-dWorkloadViewerDir=$workloadViewerDir" ` "-dConvertWorkloadDir=$convertWorkloadDir" ` -out "$objDir\" ` -nologo if ($LASTEXITCODE -ne 0) { throw "candle.exe failed." } # --------------------------------------------------------------------------- # Link with light.exe to produce the MSI # --------------------------------------------------------------------------- $wixObjs = Get-ChildItem "$objDir\*.wixobj" | Select-Object -ExpandProperty FullName & $light $wixObjs ` -out "$outDir\WorkloadTools.msi" ` -nologo if ($LASTEXITCODE -ne 0) { throw "light.exe failed." } # --------------------------------------------------------------------------- # Sign (or just rename if no signing cert is configured) # --------------------------------------------------------------------------- . $PSScriptRoot\SignMsi.ps1 ` -InputFile "$outDir\WorkloadTools.msi" ` -OutputFile "$outDir\WorkloadTools_$Platform.msi" ================================================ FILE: Setup/postbuild.bat ================================================ powershell.exe -ExecutionPolicy Bypass -NoProfile -NonInteractive -File %~dp0\SignMsi.ps1 -InputFile %1 -OutputFile %2 ================================================ FILE: Setup/transform-nodirs.xsl ================================================ ================================================ FILE: Setup/transform.xsl ================================================  ================================================ FILE: Setup/transform2.xsl ================================================ ================================================ FILE: Setup/transform3.xsl ================================================ ================================================ FILE: SetupBootstrapper/Bundle.wxs ================================================ ================================================ FILE: SetupBootstrapper/License/License.rtf ================================================ {\rtf1\adeflang1025\ansi\ansicpg1252\uc1\adeff0\deff0\stshfdbch0\stshfloch31506\stshfhich31506\stshfbi31506\deflang1033\deflangfe1033\themelang1033\themelangfe0\themelangcs0{\fonttbl{\f0\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f2\fbidi \fmodern\fcharset0\fprq1{\*\panose 02070309020205020404}Courier New;} {\f34\fbidi \froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria Math;}{\f37\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;}{\flomajor\f31500\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} {\fdbmajor\f31501\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fhimajor\f31502\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0302020204030204}Calibri Light;} {\fbimajor\f31503\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\flominor\f31504\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} {\fdbminor\f31505\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fhiminor\f31506\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;} {\fbiminor\f31507\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f43\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\f44\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} {\f46\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\f47\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\f48\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\f49\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} {\f50\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\f51\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\f63\fbidi \fmodern\fcharset238\fprq1 Courier New CE;}{\f64\fbidi \fmodern\fcharset204\fprq1 Courier New Cyr;} {\f66\fbidi \fmodern\fcharset161\fprq1 Courier New Greek;}{\f67\fbidi \fmodern\fcharset162\fprq1 Courier New Tur;}{\f68\fbidi \fmodern\fcharset177\fprq1 Courier New (Hebrew);}{\f69\fbidi \fmodern\fcharset178\fprq1 Courier New (Arabic);} {\f70\fbidi \fmodern\fcharset186\fprq1 Courier New Baltic;}{\f71\fbidi \fmodern\fcharset163\fprq1 Courier New (Vietnamese);}{\f383\fbidi \froman\fcharset238\fprq2 Cambria Math CE;}{\f384\fbidi \froman\fcharset204\fprq2 Cambria Math Cyr;} {\f386\fbidi \froman\fcharset161\fprq2 Cambria Math Greek;}{\f387\fbidi \froman\fcharset162\fprq2 Cambria Math Tur;}{\f390\fbidi \froman\fcharset186\fprq2 Cambria Math Baltic;}{\f391\fbidi \froman\fcharset163\fprq2 Cambria Math (Vietnamese);} {\f413\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\f414\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;}{\f416\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\f417\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;} {\f418\fbidi \fswiss\fcharset177\fprq2 Calibri (Hebrew);}{\f419\fbidi \fswiss\fcharset178\fprq2 Calibri (Arabic);}{\f420\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;}{\f421\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);} {\flomajor\f31508\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flomajor\f31509\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\flomajor\f31511\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} {\flomajor\f31512\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flomajor\f31513\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\flomajor\f31514\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} {\flomajor\f31515\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flomajor\f31516\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fdbmajor\f31518\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} {\fdbmajor\f31519\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbmajor\f31521\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fdbmajor\f31522\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;} {\fdbmajor\f31523\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbmajor\f31524\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fdbmajor\f31525\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;} {\fdbmajor\f31526\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhimajor\f31528\fbidi \fswiss\fcharset238\fprq2 Calibri Light CE;}{\fhimajor\f31529\fbidi \fswiss\fcharset204\fprq2 Calibri Light Cyr;} {\fhimajor\f31531\fbidi \fswiss\fcharset161\fprq2 Calibri Light Greek;}{\fhimajor\f31532\fbidi \fswiss\fcharset162\fprq2 Calibri Light Tur;}{\fhimajor\f31533\fbidi \fswiss\fcharset177\fprq2 Calibri Light (Hebrew);} {\fhimajor\f31534\fbidi \fswiss\fcharset178\fprq2 Calibri Light (Arabic);}{\fhimajor\f31535\fbidi \fswiss\fcharset186\fprq2 Calibri Light Baltic;}{\fhimajor\f31536\fbidi \fswiss\fcharset163\fprq2 Calibri Light (Vietnamese);} {\fbimajor\f31538\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbimajor\f31539\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fbimajor\f31541\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} {\fbimajor\f31542\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbimajor\f31543\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fbimajor\f31544\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} {\fbimajor\f31545\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbimajor\f31546\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\flominor\f31548\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} {\flominor\f31549\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\flominor\f31551\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\flominor\f31552\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;} {\flominor\f31553\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\flominor\f31554\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\flominor\f31555\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;} {\flominor\f31556\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fdbminor\f31558\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fdbminor\f31559\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} {\fdbminor\f31561\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fdbminor\f31562\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fdbminor\f31563\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} {\fdbminor\f31564\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fdbminor\f31565\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fdbminor\f31566\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);} {\fhiminor\f31568\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\fhiminor\f31569\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;}{\fhiminor\f31571\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\fhiminor\f31572\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;} {\fhiminor\f31573\fbidi \fswiss\fcharset177\fprq2 Calibri (Hebrew);}{\fhiminor\f31574\fbidi \fswiss\fcharset178\fprq2 Calibri (Arabic);}{\fhiminor\f31575\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;} {\fhiminor\f31576\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);}{\fbiminor\f31578\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbiminor\f31579\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} {\fbiminor\f31581\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fbiminor\f31582\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbiminor\f31583\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} {\fbiminor\f31584\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fbiminor\f31585\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbiminor\f31586\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}} {\colortbl;\red0\green0\blue0;\red0\green0\blue255;\red0\green255\blue255;\red0\green255\blue0;\red255\green0\blue255;\red255\green0\blue0;\red255\green255\blue0;\red255\green255\blue255;\red0\green0\blue128;\red0\green128\blue128;\red0\green128\blue0; \red128\green0\blue128;\red128\green0\blue0;\red128\green128\blue0;\red128\green128\blue128;\red192\green192\blue192;\red0\green0\blue0;\red0\green0\blue0;}{\*\defchp \f31506\fs22 }{\*\defpap \ql \li0\ri0\sa160\sl259\slmult1 \widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 }\noqfpromote {\stylesheet{\ql \li0\ri0\sa160\sl259\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af0\afs22\alang1025 \ltrch\fcs0 \f31506\fs22\lang1033\langfe1033\cgrid\langnp1033\langfenp1033 \snext0 \sqformat \spriority0 Normal;}{\*\cs10 \additive \ssemihidden \sunhideused \spriority1 Default Paragraph Font;}{\* \ts11\tsrowd\trftsWidthB3\trpaddl108\trpaddr108\trpaddfl3\trpaddft3\trpaddfb3\trpaddfr3\trcbpat1\trcfpat1\tblind0\tblindtype3\tsvertalt\tsbrdrt\tsbrdrl\tsbrdrb\tsbrdrr\tsbrdrdgl\tsbrdrdgr\tsbrdrh\tsbrdrv \ql \li0\ri0\sa160\sl259\slmult1 \widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af31506\afs22\alang1025 \ltrch\fcs0 \f31506\fs22\lang1033\langfe1033\cgrid\langnp1033\langfenp1033 \snext11 \ssemihidden \sunhideused Normal Table;}{\*\cs15 \additive \rtlch\fcs1 \af0 \ltrch\fcs0 \ul\cf2 \sbasedon10 \ssemihidden \sunhideused \styrsid6231089 Hyperlink;}{\s16\ql \li0\ri0\widctlpar \tx916\tx1832\tx2748\tx3664\tx4580\tx5496\tx6412\tx7328\tx8244\tx9160\tx10076\tx10992\tx11908\tx12824\tx13740\tx14656\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af2\afs20\alang1025 \ltrch\fcs0 \f2\fs20\lang1033\langfe1033\cgrid\langnp1033\langfenp1033 \sbasedon0 \snext16 \slink17 \ssemihidden \sunhideused \styrsid6231089 HTML Preformatted;}{\*\cs17 \additive \rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20 \sbasedon10 \slink16 \slocked \ssemihidden \styrsid6231089 HTML Preformatted Char;}}{\*\pgptbl {\pgp\ipgp0\itap0\li0\ri0\sb0\sa0}}{\*\rsidtbl \rsid1144825\rsid6231089\rsid7602890\rsid8207159\rsid10379327\rsid11422003\rsid12734223\rsid13719530\rsid14571638 \rsid15563878}{\mmathPr\mmathFont34\mbrkBin0\mbrkBinSub0\msmallFrac0\mdispDef1\mlMargin0\mrMargin0\mdefJc1\mwrapIndent1440\mintLim0\mnaryLim1}{\info{\author Gianluca Sartori}{\operator Gianluca Sartori}{\creatim\yr2020\mo3\dy30\hr16\min21} {\revtim\yr2020\mo3\dy30\hr17\min5}{\version5}{\edmins3}{\nofpages1}{\nofwords160}{\nofchars914}{\nofcharsws1072}{\vern123}}{\*\xmlnstbl {\xmlns1 http://schemas.microsoft.com/office/word/2003/wordml}} \paperw12240\paperh15840\margl1440\margr1440\margt1440\margb1440\gutter0\ltrsect \widowctrl\ftnbj\aenddoc\trackmoves0\trackformatting1\donotembedsysfont1\relyonvml0\donotembedlingdata0\grfdocevents0\validatexml1\showplaceholdtext0\ignoremixedcontent0\saveinvalidxml0\showxmlerrors1\noxlattoyen \expshrtn\noultrlspc\dntblnsbdb\nospaceforul\formshade\horzdoc\dgmargin\dghspace180\dgvspace180\dghorigin1440\dgvorigin1440\dghshow1\dgvshow1 \jexpand\viewkind1\viewscale120\pgbrdrhead\pgbrdrfoot\splytwnine\ftnlytwnine\htmautsp\nolnhtadjtbl\useltbaln\alntblind\lytcalctblwd\lyttblrtgr\lnbrkrule\nobrkwrptbl\snaptogridincell\allowfieldendsel\wrppunct \asianbrkrule\rsidroot6231089\newtblstyruls\nogrowautofit\usenormstyforlist\noindnmbrts\felnbrelev\nocxsptable\indrlsweleven\noafcnsttbl\afelev\utinl\hwelev\spltpgpar\notcvasp\notbrkcnstfrctbl\notvatxbx\krnprsnet\cachedcolbal \nouicompat \fet0 {\*\wgrffmtfilter 2450}\nofeaturethrottle1\ilfomacatclnup0\ltrpar \sectd \ltrsect\linex0\endnhere\sectlinegrid360\sectdefaultcl\sftnbj {\*\pnseclvl1\pnucrm\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl2\pnucltr\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl3\pndec\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl4\pnlcltr\pnstart1\pnindent720\pnhang {\pntxta )}}{\*\pnseclvl5\pndec\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl6\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl7\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl8\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl9\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}} \pard\plain \ltrpar\ql \li0\ri0\widctlpar\tx916\tx1832\tx2748\tx3664\tx4580\tx5496\tx6412\tx7328\tx8244\tx9160\tx10076\tx10992\tx11908\tx12824\tx13740\tx14656\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0\pararsid6231089 \rtlch\fcs1 \af0\afs22\alang1025 \ltrch\fcs0 \f31506\fs22\lang1033\langfe1033\cgrid\langnp1033\langfenp1033 {\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid12734223 MIT License}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 \par \par Copyright (c) 20}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089 20}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 Gianluca Sartori \par \par Permission is hereby granted, free of charge, to any person obtaining a copy}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 of this software and associated documentation files (the "Software"), to deal}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 in the Software without restriction, including without limitation the rights}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 to use, copy, modify, merge, pu blish, distribute, sublicense, and/or sell}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 copies of the Software, and to permit persons to whom the Software is}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 furnished to do so, subject to the following conditions:}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089 \par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825\charrsid6231089 \par }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 The above copyright notice and this permission notice shall be included in }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 a}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 ll}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 copies or substantial portions of the Software. \par \par }\pard \ltrpar\ql \li0\ri0\widctlpar\tx916\tx1832\tx2748\tx3664\tx4580\tx5496\tx6412\tx7328\tx8244\tx9160\tx10076\tx10992\tx11908\tx12824\tx13740\tx14656\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0\pararsid1144825 {\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid6231089\charrsid6231089 SOFTWARE.}{\rtlch\fcs1 \af2\afs20 \ltrch\fcs0 \f2\fs20\cf1\insrsid1144825 }{\rtlch\fcs1 \af0 \ltrch\fcs0 \insrsid13719530\charrsid6231089 \par }{\*\themedata 504b030414000600080000002100e9de0fbfff0000001c020000130000005b436f6e74656e745f54797065735d2e786d6cac91cb4ec3301045f748fc83e52d4a 9cb2400825e982c78ec7a27cc0c8992416c9d8b2a755fbf74cd25442a820166c2cd933f79e3be372bd1f07b5c3989ca74aaff2422b24eb1b475da5df374fd9ad 5689811a183c61a50f98f4babebc2837878049899a52a57be670674cb23d8e90721f90a4d2fa3802cb35762680fd800ecd7551dc18eb899138e3c943d7e503b6 b01d583deee5f99824e290b4ba3f364eac4a430883b3c092d4eca8f946c916422ecab927f52ea42b89a1cd59c254f919b0e85e6535d135a8de20f20b8c12c3b0 0c895fcf6720192de6bf3b9e89ecdbd6596cbcdd8eb28e7c365ecc4ec1ff1460f53fe813d3cc7f5b7f020000ffff0300504b030414000600080000002100a5d6 a7e7c0000000360100000b0000005f72656c732f2e72656c73848fcf6ac3300c87ef85bd83d17d51d2c31825762fa590432fa37d00e1287f68221bdb1bebdb4f c7060abb0884a4eff7a93dfeae8bf9e194e720169aaa06c3e2433fcb68e1763dbf7f82c985a4a725085b787086a37bdbb55fbc50d1a33ccd311ba548b6309512 0f88d94fbc52ae4264d1c910d24a45db3462247fa791715fd71f989e19e0364cd3f51652d73760ae8fa8c9ffb3c330cc9e4fc17faf2ce545046e37944c69e462 a1a82fe353bd90a865aad41ed0b5b8f9d6fd010000ffff0300504b0304140006000800000021006b799616830000008a0000001c0000007468656d652f746865 6d652f7468656d654d616e616765722e786d6c0ccc4d0ac3201040e17da17790d93763bb284562b2cbaebbf600439c1a41c7a0d29fdbd7e5e38337cedf14d59b 4b0d592c9c070d8a65cd2e88b7f07c2ca71ba8da481cc52c6ce1c715e6e97818c9b48d13df49c873517d23d59085adb5dd20d6b52bd521ef2cdd5eb9246a3d8b 4757e8d3f729e245eb2b260a0238fd010000ffff0300504b030414000600080000002100b6f4679893070000c9200000160000007468656d652f7468656d652f 7468656d65312e786d6cec59cd8b1bc915bf07f23f347d97f5d5ad8fc1f2a24fcfda33b6b164873dd648a5eef2547789aad28cc56208de532e81c026e49085bd ed21842cecc22eb9e48f31d8249b3f22afaa5bdd5552c99e191c3061463074977eefd5afde7bf5de53d5ddcf5e26d4bbc05c1096f6fcfa9d9aefe174ce16248d 7afeb3d9a4d2f13d2151ba4094a5b8e76fb0f03fbbf7eb5fdd454732c609f6403e1547a8e7c752ae8eaa5531876124eeb0154ee1bb25e30992f0caa3ea82a34b d09bd06aa3566b55134452df4b51026a1f2f97648ebd9952e9dfdb2a1f53784da5500373caa74a35b6243476715e5708b11143cabd0b447b3eccb3609733fc52 fa1e4542c2173dbfa6fffceabdbb5574940b517940d6909be8bf5c2e17589c37f49c3c3a2b260d823068f50bfd1a40e53e6edc1eb7c6ad429f06a0f91c569a71 b175b61bc320c71aa0ecd1a17bd41e35eb16ded0dfdce3dc0fd5c7c26b50a63fd8c34f2643b0a285d7a00c1feee1c3417730b2f56b50866fede1dbb5fe28685b fa3528a6243ddf43d7c25673b85d6d0159327aec8477c360d26ee4ca4b144443115d6a8a254be5a1584bd00bc6270050408a24493db959e1259a43140f112567 9c7827248a21f056286502866b8ddaa4d684ffea13e827ed5174849121ad780113b137a4f87862cec94af6fc07a0d537206f7ffef9cdeb1fdfbcfee9cd575fbd 79fdf77c6eadca923b466964cafdf2dd1ffef3cd6fbd7ffff0ed2f5fff319b7a172f4cfcbbbffdeedd3ffef93ef5b0e2d2146ffff4fdbb1fbf7ffbe7dfffebaf 5f3bb4f7393a33e1339260e13dc297de5396c0021dfcf119bf9ec42c46c494e8a791402952b338f48f656ca11f6d10450edc00db767cce21d5b880f7d72f2cc2 d398af2571687c182716f094313a60dc6985876a2ec3ccb3751ab927e76b13f714a10bd7dc43945a5e1eaf579063894be530c616cd2714a5124538c5d253dfb1 738c1dabfb8210cbaea764ce99604be97d41bc01224e93ccc899154da5d03149c02f1b1741f0b7659bd3e7de8051d7aa47f8c246c2de40d4417e86a965c6fb68 2d51e252394309350d7e8264ec2239ddf0b9891b0b099e8e3065de78818570c93ce6b05ec3e90f21cdb8dd7e4a37898de4929cbb749e20c64ce4889d0f6394ac 5cd829496313fbb938871045de13265df05366ef10f50e7e40e941773f27d872f787b3c133c8b026a53240d4376beef0e57dccacf89d6ee8126157aae9f3c44a b17d4e9cd131584756689f604cd1255a60ec3dfbdcc160c05696cd4bd20f62c82ac7d815580f901dabea3dc5027a25d5dcece7c91322ac909de2881de073bad9 493c1b9426881fd2fc08bc6eda7c0ca52e7105c0633a3f37818f08f480102f4ea33c16a0c308ee835a9fc4c82a60ea5db8e375c32dff5d658fc1be7c61d1b8c2 be04197c6d1948eca6cc7b6d3343d49aa00c9819822ec3956e41c4727f29a28aab165b3be596f6a62ddd00dd91d5f42424fd6007b4d3fb84ffbbde073a8cb77f f9c6b10f3e4ebfe3566c25ab6b763a8792c9f14e7f7308b7dbd50c195f904fbfa919a175fa04431dd9cf58b73dcd6d4fe3ffdff73487f6f36d2773a8dfb8ed64 7ce8306e3b99fc70e5e3743265f3027d8d3af0c80e7af4b14f72f0d46749289dca0dc527421ffc08f83db398c0a092d3279eb838055cc5f0a8ca1c4c60e1228e b48cc799fc0d91f134462b381daafb4a492472d591f0564cc0a1911e76ea5678ba4e4ed9223becacd7d5c16656590592e5782d2cc6e1a04a66e856bb3cc02bd4 6bb6913e68dd1250b2d721614c6693683a48b4b783ca48fa58178ce620a157f65158741d2c3a4afdd6557b2c805ae115f8c1edc1cff49e1f06200242701e07cd f942f92973f5d6bbda991fd3d3878c69450034d8db08283ddd555c0f2e4fad2e0bb52b78da2261849b4d425b46377822869fc17974aad1abd0b8aeafbba54b2d 7aca147a3e08ad9246bbf33e1637f535c8ede6069a9a9982a6de65cf6f35430899395af5fc251c1ac363b282d811ea3717a211dcbccc25cf36fc4d32cb8a0b39 4222ce0cae934e960d122231f728497abe5a7ee1069aea1ca2b9d51b90103e59725d482b9f1a3970baed64bc5ce2b934dd6e8c284b67af90e1b35ce1fc568bdf 1cac24d91adc3d8d1797de195df3a708422c6cd795011744c0dd413db3e682c0655891c8caf8db294c79da356fa3740c65e388ae62945714339967709dca0b3a faadb081f196af190c6a98242f8467912ab0a651ad6a5a548d8cc3c1aafb6121653923699635d3ca2aaa6abab39835c3b60cecd8f26645de60b53531e434b3c2 67a97b37e576b7b96ea74f28aa0418bcb09fa3ea5ea12018d4cac92c6a8af17e1a56393b1fb56bc776811fa07695226164fdd656ed8edd8a1ae19c0e066f54f9 416e376a6168b9ed2bb5a5f5adb979b1cdce5e40f2184197bba6526857c2c92e47d0104d754f92a50dd8222f65be35e0c95b73d2f3bfac85fd60d80887955a27 1c57826650ab74c27eb3d20fc3667d1cd66ba341e31514161927f530bbb19fc00506dde4f7f67a7cefee3ed9ded1dc99b3a4caf4dd7c5513d777f7f5c6e1bb7b 8f40d2f9b2d598749bdd41abd26df627956034e854bac3d6a0326a0ddba3c9681876ba9357be77a1c141bf390c5ae34ea5551f0e2b41aba6e877ba9576d068f4 8376bf330efaaff23606569ea58fdc16605ecdebde7f010000ffff0300504b0304140006000800000021000dd1909fb60000001b010000270000007468656d65 2f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73848f4d0ac2301484f78277086f6fd3ba109126dd88d0add40384e4350d36 3f2451eced0dae2c082e8761be9969bb979dc9136332de3168aa1a083ae995719ac16db8ec8e4052164e89d93b64b060828e6f37ed1567914b284d262452282e 3198720e274a939cd08a54f980ae38a38f56e422a3a641c8bbd048f7757da0f19b017cc524bd62107bd5001996509affb3fd381a89672f1f165dfe514173d985 0528a2c6cce0239baa4c04ca5bbabac4df000000ffff0300504b01022d0014000600080000002100e9de0fbfff0000001c020000130000000000000000000000 0000000000005b436f6e74656e745f54797065735d2e786d6c504b01022d0014000600080000002100a5d6a7e7c0000000360100000b00000000000000000000 000000300100005f72656c732f2e72656c73504b01022d00140006000800000021006b799616830000008a0000001c0000000000000000000000000019020000 7468656d652f7468656d652f7468656d654d616e616765722e786d6c504b01022d0014000600080000002100b6f4679893070000c92000001600000000000000 000000000000d60200007468656d652f7468656d652f7468656d65312e786d6c504b01022d00140006000800000021000dd1909fb60000001b01000027000000 000000000000000000009d0a00007468656d652f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73504b050600000000050005005d010000980b00000000} {\*\colorschememapping 3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d3822207374616e64616c6f6e653d22796573223f3e0d0a3c613a636c724d 617020786d6c6e733a613d22687474703a2f2f736368656d61732e6f70656e786d6c666f726d6174732e6f72672f64726177696e676d6c2f323030362f6d6169 6e22206267313d226c743122207478313d22646b3122206267323d226c743222207478323d22646b322220616363656e74313d22616363656e74312220616363 656e74323d22616363656e74322220616363656e74333d22616363656e74332220616363656e74343d22616363656e74342220616363656e74353d22616363656e74352220616363656e74363d22616363656e74362220686c696e6b3d22686c696e6b2220666f6c486c696e6b3d22666f6c486c696e6b222f3e} {\*\latentstyles\lsdstimax376\lsdlockeddef0\lsdsemihiddendef0\lsdunhideuseddef0\lsdqformatdef0\lsdprioritydef99{\lsdlockedexcept \lsdqformat1 \lsdpriority0 \lsdlocked0 Normal;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 1; \lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 2;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 3;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 4; \lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 5;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 6;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 7; \lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 8;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 1; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 5; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 9; \lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 1;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 2;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 3; \lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 4;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 5;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 6; \lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 7;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 8;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Indent; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 header;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footer; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index heading;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority35 \lsdlocked0 caption;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of figures; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope return;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation reference; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 line number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 page number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote text; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of authorities;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 macro;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 toa heading;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 3; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 3; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 3; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 5;\lsdqformat1 \lsdpriority10 \lsdlocked0 Title;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Closing; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Signature;\lsdsemihidden1 \lsdunhideused1 \lsdpriority1 \lsdlocked0 Default Paragraph Font;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 4; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Message Header;\lsdqformat1 \lsdpriority11 \lsdlocked0 Subtitle;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Salutation; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Date;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Note Heading; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 3; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Block Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 FollowedHyperlink;\lsdqformat1 \lsdpriority22 \lsdlocked0 Strong; \lsdqformat1 \lsdpriority20 \lsdlocked0 Emphasis;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Document Map;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Plain Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 E-mail Signature; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Top of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Bottom of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal (Web);\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Acronym; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Cite;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Code;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Definition; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Keyboard;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Preformatted;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Sample;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Typewriter; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Variable;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation subject;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 No List;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 1; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Balloon Text;\lsdpriority39 \lsdlocked0 Table Grid; \lsdsemihidden1 \lsdlocked0 Placeholder Text;\lsdqformat1 \lsdpriority1 \lsdlocked0 No Spacing;\lsdpriority60 \lsdlocked0 Light Shading;\lsdpriority61 \lsdlocked0 Light List;\lsdpriority62 \lsdlocked0 Light Grid; \lsdpriority63 \lsdlocked0 Medium Shading 1;\lsdpriority64 \lsdlocked0 Medium Shading 2;\lsdpriority65 \lsdlocked0 Medium List 1;\lsdpriority66 \lsdlocked0 Medium List 2;\lsdpriority67 \lsdlocked0 Medium Grid 1;\lsdpriority68 \lsdlocked0 Medium Grid 2; \lsdpriority69 \lsdlocked0 Medium Grid 3;\lsdpriority70 \lsdlocked0 Dark List;\lsdpriority71 \lsdlocked0 Colorful Shading;\lsdpriority72 \lsdlocked0 Colorful List;\lsdpriority73 \lsdlocked0 Colorful Grid;\lsdpriority60 \lsdlocked0 Light Shading Accent 1; \lsdpriority61 \lsdlocked0 Light List Accent 1;\lsdpriority62 \lsdlocked0 Light Grid Accent 1;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 1;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 1;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 1; \lsdsemihidden1 \lsdlocked0 Revision;\lsdqformat1 \lsdpriority34 \lsdlocked0 List Paragraph;\lsdqformat1 \lsdpriority29 \lsdlocked0 Quote;\lsdqformat1 \lsdpriority30 \lsdlocked0 Intense Quote;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 1; \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 1;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 1;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 1;\lsdpriority70 \lsdlocked0 Dark List Accent 1;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 1; \lsdpriority72 \lsdlocked0 Colorful List Accent 1;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 1;\lsdpriority60 \lsdlocked0 Light Shading Accent 2;\lsdpriority61 \lsdlocked0 Light List Accent 2;\lsdpriority62 \lsdlocked0 Light Grid Accent 2; \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 2;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 2;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 2;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 2; \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 2;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 2;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 2;\lsdpriority70 \lsdlocked0 Dark List Accent 2;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 2; \lsdpriority72 \lsdlocked0 Colorful List Accent 2;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 2;\lsdpriority60 \lsdlocked0 Light Shading Accent 3;\lsdpriority61 \lsdlocked0 Light List Accent 3;\lsdpriority62 \lsdlocked0 Light Grid Accent 3; \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 3;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 3;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 3;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 3; \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 3;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 3;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 3;\lsdpriority70 \lsdlocked0 Dark List Accent 3;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 3; \lsdpriority72 \lsdlocked0 Colorful List Accent 3;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 3;\lsdpriority60 \lsdlocked0 Light Shading Accent 4;\lsdpriority61 \lsdlocked0 Light List Accent 4;\lsdpriority62 \lsdlocked0 Light Grid Accent 4; \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 4;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 4;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 4;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 4; \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 4;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 4;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 4;\lsdpriority70 \lsdlocked0 Dark List Accent 4;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 4; \lsdpriority72 \lsdlocked0 Colorful List Accent 4;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 4;\lsdpriority60 \lsdlocked0 Light Shading Accent 5;\lsdpriority61 \lsdlocked0 Light List Accent 5;\lsdpriority62 \lsdlocked0 Light Grid Accent 5; \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 5;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 5;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 5;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 5; \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 5;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 5;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 5;\lsdpriority70 \lsdlocked0 Dark List Accent 5;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 5; \lsdpriority72 \lsdlocked0 Colorful List Accent 5;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 5;\lsdpriority60 \lsdlocked0 Light Shading Accent 6;\lsdpriority61 \lsdlocked0 Light List Accent 6;\lsdpriority62 \lsdlocked0 Light Grid Accent 6; \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 6;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 6;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 6;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 6; \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 6;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 6;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 6;\lsdpriority70 \lsdlocked0 Dark List Accent 6;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 6; \lsdpriority72 \lsdlocked0 Colorful List Accent 6;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 6;\lsdqformat1 \lsdpriority19 \lsdlocked0 Subtle Emphasis;\lsdqformat1 \lsdpriority21 \lsdlocked0 Intense Emphasis; \lsdqformat1 \lsdpriority31 \lsdlocked0 Subtle Reference;\lsdqformat1 \lsdpriority32 \lsdlocked0 Intense Reference;\lsdqformat1 \lsdpriority33 \lsdlocked0 Book Title;\lsdsemihidden1 \lsdunhideused1 \lsdpriority37 \lsdlocked0 Bibliography; \lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority39 \lsdlocked0 TOC Heading;\lsdpriority41 \lsdlocked0 Plain Table 1;\lsdpriority42 \lsdlocked0 Plain Table 2;\lsdpriority43 \lsdlocked0 Plain Table 3;\lsdpriority44 \lsdlocked0 Plain Table 4; \lsdpriority45 \lsdlocked0 Plain Table 5;\lsdpriority40 \lsdlocked0 Grid Table Light;\lsdpriority46 \lsdlocked0 Grid Table 1 Light;\lsdpriority47 \lsdlocked0 Grid Table 2;\lsdpriority48 \lsdlocked0 Grid Table 3;\lsdpriority49 \lsdlocked0 Grid Table 4; \lsdpriority50 \lsdlocked0 Grid Table 5 Dark;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 1; \lsdpriority48 \lsdlocked0 Grid Table 3 Accent 1;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 1;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 1; \lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 1;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 2;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 2; \lsdpriority49 \lsdlocked0 Grid Table 4 Accent 2;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 2; \lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 3;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 3;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 3;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 3; \lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 3;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 4; \lsdpriority47 \lsdlocked0 Grid Table 2 Accent 4;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 4;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 4;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 4; \lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 4;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 5; \lsdpriority48 \lsdlocked0 Grid Table 3 Accent 5;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 5;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 5; \lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 5;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 6;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 6; \lsdpriority49 \lsdlocked0 Grid Table 4 Accent 6;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 6; \lsdpriority46 \lsdlocked0 List Table 1 Light;\lsdpriority47 \lsdlocked0 List Table 2;\lsdpriority48 \lsdlocked0 List Table 3;\lsdpriority49 \lsdlocked0 List Table 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark; \lsdpriority51 \lsdlocked0 List Table 6 Colorful;\lsdpriority52 \lsdlocked0 List Table 7 Colorful;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 List Table 2 Accent 1;\lsdpriority48 \lsdlocked0 List Table 3 Accent 1; \lsdpriority49 \lsdlocked0 List Table 4 Accent 1;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 1;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 1; \lsdpriority46 \lsdlocked0 List Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 List Table 2 Accent 2;\lsdpriority48 \lsdlocked0 List Table 3 Accent 2;\lsdpriority49 \lsdlocked0 List Table 4 Accent 2; \lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 2;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 3; \lsdpriority47 \lsdlocked0 List Table 2 Accent 3;\lsdpriority48 \lsdlocked0 List Table 3 Accent 3;\lsdpriority49 \lsdlocked0 List Table 4 Accent 3;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 3; \lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 4;\lsdpriority47 \lsdlocked0 List Table 2 Accent 4; \lsdpriority48 \lsdlocked0 List Table 3 Accent 4;\lsdpriority49 \lsdlocked0 List Table 4 Accent 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 4;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 4; \lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 List Table 2 Accent 5;\lsdpriority48 \lsdlocked0 List Table 3 Accent 5; \lsdpriority49 \lsdlocked0 List Table 4 Accent 5;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 5;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 5; \lsdpriority46 \lsdlocked0 List Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 List Table 2 Accent 6;\lsdpriority48 \lsdlocked0 List Table 3 Accent 6;\lsdpriority49 \lsdlocked0 List Table 4 Accent 6; \lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Mention; \lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hashtag;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Unresolved Mention;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Link;}}{\*\datastore 01050000 02000000180000004d73786d6c322e534158584d4c5265616465722e362e3000000000000000000000060000 d0cf11e0a1b11ae1000000000000000000000000000000003e000300feff090006000000000000000000000001000000010000000000000000100000feffffff00000000feffffff0000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff fffffffffffffffffdfffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ffffffffffffffffffffffffffffffff52006f006f007400200045006e00740072007900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000500ffffffffffffffffffffffff0c6ad98892f1d411a65f0040963251e5000000000000000000000000d0bc a2b6a406d601feffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000105000000000000}} ================================================ FILE: SetupBootstrapper/SetupBootstrapper.wixproj ================================================  Debug x86 3.10 2.0 {CAD976C4-D0C6-4313-B605-EC3749A23B5F} Bundle WorkloadTools $(MSBuildExtensionsPath32)\Microsoft\WiX\v3.x\Wix.targets $(MSBuildExtensionsPath)\Microsoft\WiX\v3.x\Wix.targets 0.0.0.0 bin\x86\Release\ obj\x86\Release\ BuildVersion=$(BuildVersion);Platform=$(Platform) bin\x64\Release\ obj\x64\Release\ BuildVersion=$(BuildVersion);Platform=$(Platform) bin\x86\Debug\ obj\x86\Debug\ BuildVersion=$(BuildVersion);Platform=$(Platform) bin\x64\Debug\ obj\x64\Debug\ BuildVersion=$(BuildVersion);Platform=$(Platform) call $(ProjectDir)postbuild.bat "$(TargetPath)" "$(TargetDir)WorkloadTools_$(Platform)$(TargetExt)" ================================================ FILE: SetupBootstrapper/SignMsi.ps1 ================================================ [CmdletBinding()] Param( [Parameter(Mandatory=$True,Position=1)] [string]$InputFile, [Parameter(Mandatory=$True,Position=2)] [string]$OutputFile ) if(-not (Test-Path $PSScriptRoot\SignParams.ps1)) { Write-Warning "No code signing is applied to the .msi file." Write-Warning "You need to create a file called SignParams.ps1 and provide signing info." Write-Output "Moving $InputFile --> $OutputFile" Move-Item $InputFile $OutputFile -Force exit } # read paramters $signParams = get-content $PSScriptRoot\SignParams.ps1 -Raw Invoke-Expression $signParams $params = $( 'sign' ,'/fd' ,'SHA1' ,'/f' ,('"' + $certPath + '"') ,'/p' ,('"' + $certPass + '"') ,'/sha1' ,$certSha ,'/t' ,('"' + $certTime + '"') ,'/d' ,'"WorkloadTools"' ) $ParentPath = Split-Path -Path $InputFile & $insigniaPath $("-ib","$InputFile","-o","$ParentPath\engine.exe") & $signTool ($params + "$ParentPath\engine.exe") & $insigniaPath $("-ab","$ParentPath\engine.exe",$InputFile,"-o",$InputFile) & $signTool ($params + $InputFile) Write-Output "Moving $InputFile --> $OutputFile" Move-Item $InputFile $OutputFile -Force Remove-Item "$ParentPath\engine.exe" ================================================ FILE: SetupBootstrapper/buildexe.ps1 ================================================ param ( [Parameter(Mandatory=$false)] [string]$BuildVersion = "1.0.0.0", [Parameter(Mandatory=$false)] [string]$Platform = "x64" ) # --------------------------------------------------------------------------- # Build the MSI first # --------------------------------------------------------------------------- . $PSScriptRoot\..\Setup\buildmsi.ps1 -BuildVersion $BuildVersion -Platform $Platform Set-Location $PSScriptRoot # --------------------------------------------------------------------------- # Resolve the version from SharedAssemblyInfo.cs if the caller left the default # --------------------------------------------------------------------------- if ($BuildVersion -eq "1.0.0.0") { $BuildVersion = (Get-Content ..\SharedAssemblyInfo.cs | Where-Object { $_.StartsWith("[assembly: AssemblyVersion(") }). Replace('[assembly: AssemblyVersion("','').Replace('")]','') } # --------------------------------------------------------------------------- # Locate WiX v3 tools (candle.exe, light.exe) # The WIX environment variable is set automatically by the WiX v3 installer. # --------------------------------------------------------------------------- $wixDir = $null if ($env:WIX -and (Test-Path $env:WIX)) { $wixDir = $env:WIX } if (-not $wixDir) { $wixDir = @( "${env:ProgramFiles(x86)}\WiX Toolset v3.14", "${env:ProgramFiles(x86)}\WiX Toolset v3.11", "${env:ProgramFiles(x86)}\WiX Toolset v3.10", "${env:ProgramFiles}\WiX Toolset v3.14", "${env:ProgramFiles}\WiX Toolset v3.11" ) | Where-Object { Test-Path $_ } | Select-Object -First 1 } if (-not $wixDir) { throw "WiX Toolset v3 not found. Install from https://wixtoolset.org/ or set the WIX environment variable." } $candle = Join-Path $wixDir "bin\candle.exe" $light = Join-Path $wixDir "bin\light.exe" # --------------------------------------------------------------------------- # Prepare output and intermediate directories # --------------------------------------------------------------------------- $outDir = "$PSScriptRoot\bin\$Platform\Release" $objDir = "$PSScriptRoot\obj\$Platform\Release" foreach ($dir in $outDir, $objDir) { if (-not (Test-Path $dir)) { New-Item -ItemType Directory -Path $dir -Force | Out-Null } elseif (Test-Path "$dir\*") { Remove-Item "$dir\*" -Recurse -Force } } # --------------------------------------------------------------------------- # Compile Bundle.wxs with candle.exe # --------------------------------------------------------------------------- $arch = if ($Platform -eq 'x86') { 'x86' } else { 'x64' } & $candle ` "$PSScriptRoot\Bundle.wxs" ` -arch $arch ` "-dBuildVersion=$BuildVersion" ` "-dPlatform=$Platform" ` -out "$objDir\" ` -nologo -ext WixBalExtension if ($LASTEXITCODE -ne 0) { throw "candle.exe failed for Bundle.wxs." } # --------------------------------------------------------------------------- # Link with light.exe to produce the bootstrapper EXE # --------------------------------------------------------------------------- $wixObjs = Get-ChildItem "$objDir\*.wixobj" | Select-Object -ExpandProperty FullName & $light $wixObjs ` -out "$outDir\WorkloadTools.exe" ` -nologo -ext WixBalExtension if ($LASTEXITCODE -ne 0) { throw "light.exe failed for Bundle." } # --------------------------------------------------------------------------- # Sign (or just rename if no signing cert is configured) # --------------------------------------------------------------------------- . $PSScriptRoot\SignMsi.ps1 ` -InputFile "$outDir\WorkloadTools.exe" ` -OutputFile "$outDir\WorkloadTools_$Platform.exe" ================================================ FILE: SetupBootstrapper/postbuild.bat ================================================ powershell.exe -ExecutionPolicy Bypass -NoProfile -NonInteractive -File %~dp0\SignMsi.ps1 -InputFile %1 -OutputFile %2 ================================================ FILE: SharedAssemblyInfo.cs ================================================ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("WorkloadTools")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("spaghettidba")] [assembly: AssemblyProduct("WorkloadTools")] [assembly: AssemblyCopyright("Copyright © 2021 spaghettidba")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.7.4")] [assembly: AssemblyFileVersion("1.7.4")] ================================================ FILE: SqlWorkload/NLog.config ================================================  ================================================ FILE: SqlWorkload/Program.cs ================================================ using CommandLine; using CommandLine.Text; using NLog; using NLog.Targets; using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Runtime; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools; using WorkloadTools.Config; using WorkloadTools.Consumer; using WorkloadTools.Listener; using WorkloadTools.Listener.ExtendedEvents; using WorkloadTools.Listener.Trace; namespace SqlWorkload { class Program { private static Logger logger = LogManager.GetCurrentClassLogger(); private static CancellationTokenSource source; static void Main(string[] args) { AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(GenericErrorHandler); GCSettings.LargeObjectHeapCompactionMode = GCLargeObjectHeapCompactionMode.CompactOnce; var assembly = System.Reflection.Assembly.GetExecutingAssembly(); var fvi = FileVersionInfo.GetVersionInfo(assembly.Location); var version = fvi.FileMajorPart.ToString() + "." + fvi.FileMinorPart.ToString() + "." + fvi.FileBuildPart.ToString(); var name = assembly.FullName; logger.Info(name + " " + version); try { var options = new Options(); if (!CommandLine.Parser.Default.ParseArguments(args, options)) { return; } Run(options); } catch(Exception e) { logger.Error(e); } } static void Run(Options options) { // reconfigure loggers to use a file in the current directory // or the file specified by the "Log" commandline parameter if (LogManager.Configuration != null) { var target = (FileTarget)LogManager.Configuration.FindTargetByName("logfile"); if (target != null) { var pathToLog = options.LogFile; if (pathToLog == null) { pathToLog = Path.Combine(Environment.CurrentDirectory, "SqlWorkload.log"); } if (!Path.IsPathRooted(pathToLog)) { pathToLog = Path.Combine(Environment.CurrentDirectory, pathToLog); } target.FileName = pathToLog; if(options.LogLevel != null) { foreach(var rule in LogManager.Configuration.LoggingRules) { foreach (var level in LogLevel.AllLoggingLevels) { rule.DisableLoggingForLevel(level); } rule.EnableLoggingForLevels(LogLevel.FromString(options.LogLevel),LogLevel.Fatal); } } LogManager.ReconfigExistingLoggers(); } } options.ConfigurationFile = System.IO.Path.GetFullPath(options.ConfigurationFile); logger.Info(String.Format("Reading configuration from '{0}'", options.ConfigurationFile)); if (!File.Exists(options.ConfigurationFile)) { logger.Error("File not found!"); Console.WriteLine(options.GetUsage()); return; } var config = SqlWorkloadConfig.LoadFromFile(options.ConfigurationFile); config.Controller.Listener.Source = System.IO.Path.GetFullPath(config.Controller.Listener.Source); Console.CancelKeyPress += delegate (object sender, ConsoleCancelEventArgs e) { e.Cancel = true; logger.Info("Received shutdown signal..."); source.CancelAfter(TimeSpan.FromSeconds(100)); // give a 100 seconds cancellation grace period config.Controller.Stop(); }; var t = processController(config.Controller); t.Wait(); logger.Info("Controller stopped."); config.Controller.Dispose(); logger.Info("Controller disposed."); } static void GenericErrorHandler(object sender, UnhandledExceptionEventArgs e) { try { logger.Error(e.ToString()); } finally { Console.WriteLine("Caught unhandled exception..."); } } public static async Task processController(WorkloadController controller) { source = new CancellationTokenSource(); source.Token.Register(CancelNotification); var completionSource = new TaskCompletionSource(); source.Token.Register(() => completionSource.TrySetCanceled()); var task = Task.Factory.StartNew(() => controller.Run(), source.Token); await Task.WhenAny(task, completionSource.Task); } public static void CancelNotification() { logger.Info("Shutdown complete."); } } class Options { [Option('F', "File", DefaultValue = "SqlWorkload.json", HelpText = "Configuration file")] public string ConfigurationFile { get; set; } [Option('L', "Log", HelpText = "Log file")] public string LogFile { get; set; } [Option('E', "LogLevel", HelpText = "Log level")] public string LogLevel { get; set; } [ParserState] public IParserState LastParserState { get; set; } [HelpOption] public string GetUsage() { return HelpText.AutoBuild(this, (HelpText current) => HelpText.DefaultParsingErrorsHandler(this, current)); } } } ================================================ FILE: SqlWorkload/Properties/AssemblyInfo.cs ================================================ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: Guid("fb46ad2c-df81-4d35-b419-d93e5ef9d98a")] ================================================ FILE: SqlWorkload/Properties/Resources.Designer.cs ================================================ //------------------------------------------------------------------------------ // // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // //------------------------------------------------------------------------------ namespace SqlWorkload.Properties { using System; /// /// A strongly-typed resource class, for looking up localized strings, etc. /// // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "15.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] public class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// /// Returns the cached ResourceManager instance used by this class. /// [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SqlWorkload.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } /// /// Looks up a localized string similar to true. /// public static string TRUNCATE_TO_4000 { get { return ResourceManager.GetString("TRUNCATE_TO_4000", resourceCulture); } } } } ================================================ FILE: SqlWorkload/Properties/Resources.resx ================================================  text/microsoft-resx 2.0 System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 true ================================================ FILE: SqlWorkload/SqlWorkload.csproj ================================================  Debug AnyCPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A} Exe SqlWorkload SqlWorkload v4.8 512 AnyCPU true full false bin\x64\Debug\ DEBUG;TRACE prompt 4 false AnyCPU pdbonly true bin\x64\Release\ TRACE prompt 4 false true bin\x86\Debug\ DEBUG;TRACE full x86 prompt MinimumRecommendedRules.ruleset bin\x86\Release\ TRACE true pdbonly x86 prompt MinimumRecommendedRules.ruleset ..\packages\CommandLineParser.1.9.71\lib\net45\CommandLine.dll ..\packages\DouglasCrockford.JsMin.1.1.3\lib\net40-client\DouglasCrockford.JsMin.dll ..\packages\NLog.4.7.15\lib\net45\NLog.dll ..\packages\System.Data.SQLite.Core.1.0.112.0\lib\net46\System.Data.SQLite.dll Properties\SharedAssemblyInfo.cs Always {ae6e4548-8c33-4728-8504-88aa9666020b} WorkloadTools True This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. ================================================ FILE: SqlWorkload/app.config ================================================ ================================================ FILE: SqlWorkload/packages.config ================================================  ================================================ FILE: WorkloadTools/BinarySerializedBufferedEventQueue.cs ================================================ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Runtime.Serialization.Formatters.Binary; using WorkloadTools.Util; using System.Diagnostics; namespace WorkloadTools { public class BinarySerializedBufferedEventQueue : BufferedEventQueue { private readonly string baseFolder; private int _minFile, _maxFile; private readonly string file_name_uniquifier = ""; private readonly BinaryFormatter _formatter = new BinaryFormatter(); public BinarySerializedBufferedEventQueue() : base() { file_name_uniquifier = DateTime.Now.ToString("yyyyMMddHHmm") + "_" + ("000000000" + (Environment.TickCount & int.MaxValue)).Right(9) + "_"; baseFolder = Path.Combine(Path.Combine(System.IO.Path.GetTempPath(), "WorkloadTools"), "SerializedEventQueue"); _ = System.IO.Directory.CreateDirectory(baseFolder); _minFile = 0; _maxFile = 0; } protected override WorkloadEvent[] ReadEvents(int count) { WorkloadEvent[] result = null; var destFile = Path.Combine(baseFolder, file_name_uniquifier + ("000000000" + _minFile).Right(9) + ".cache"); using (var fileStream = new System.IO.FileStream(destFile, System.IO.FileMode.Open)) using (var bufferedStream = new BufferedStream(fileStream)) { result = (WorkloadEvent[])_formatter.Deserialize(bufferedStream); if(result.Length != count) { throw new ArgumentOutOfRangeException($"The deserialized array is of the wrong size (expected: {count}, found: {result.Length})"); } } File.Delete(destFile); _minFile++; return result; } protected override void WriteEvents(WorkloadEvent[] events) { var destFile = Path.Combine(baseFolder, file_name_uniquifier); // c# does not have a String.Right method, so I created // an extension for it. Crazy, right? destFile += ("000000000" + _maxFile).Right(9) + ".cache"; if (File.Exists(destFile)) { File.Delete(destFile); } using (var fileStream = new FileStream(destFile, FileMode.CreateNew)) using (var bufferedStream = new BufferedStream(fileStream)) { _formatter.Serialize(bufferedStream, events); fileStream.Close(); } _maxFile++; } protected override void Dispose(bool disposing) { // delete all pending files for (var i=_minFile; i<=_maxFile; i++) { var destFile = Path.Combine(baseFolder, file_name_uniquifier); destFile += ("000000000" + i).Right(9) + ".cache"; if (File.Exists(destFile)) { File.Delete(destFile); } } } } } ================================================ FILE: WorkloadTools/BufferedEventQueue.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools { public abstract class BufferedEventQueue : IEventQueue { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private int _bufferSize; public int BufferSize { get => _bufferSize; set { _bufferSize = value; initialize(); } } protected object syncRoot = new object(); private WorkloadEvent[] _array; private WorkloadEvent[] _overflowArray; private int _head; // First valid element in the queue private int _tail; // Last valid element in the queue private int _size; // Number of elements in the array private int _totOverflowSize; // Total number of elements in the overflow array AND disk private int _overflowSize; // Current number of elements in the overflow array private int _overflowBufferSize; public virtual int Count { get { lock (syncRoot) { return _size + _totOverflowSize; } } } public BufferedEventQueue() { initialize(); } private void initialize() { _array = new WorkloadEvent[BufferSize]; _overflowArray = null; _overflowBufferSize = BufferSize / 2; } protected abstract void WriteEvents(WorkloadEvent[] events); protected abstract WorkloadEvent[] ReadEvents(int count); public virtual void Enqueue(WorkloadEvent evt) { lock (syncRoot) { if (_overflowArray != null) { // write to the overflow array _overflowArray[_overflowSize] = evt; _overflowSize++; _totOverflowSize++; if(_overflowSize == _overflowBufferSize) { // decide what to do with the overflow: // if we have enough room in the base array // AND no events persisted to disk, then // write events back to the queue // if the base array does not have enough room, // write the overflow array to the database // and allocate a new overflow array if((_size <= BufferSize - _overflowBufferSize) && (_totOverflowSize - _overflowSize <= 0)) { EnqueueAll(_overflowArray); _overflowSize = 0; _overflowArray = null; } else { WriteEvents(_overflowArray); _overflowSize = 0; _overflowArray = new WorkloadEvent[_overflowBufferSize]; } } } else { _array[_tail] = evt; _tail = (_tail + 1) % _array.Length; _size++; // the internal array is at capacity: allocate an overflow array // with size = 50% of BufferSize if (_size == _array.Length) { _overflowArray = new WorkloadEvent[_overflowBufferSize]; } } } } private void EnqueueAll(WorkloadEvent[] source) { EnqueueAll(source, source.Length); } private void EnqueueAll(WorkloadEvent[] source, int count) { if(count > source.Length) { throw new ArgumentOutOfRangeException($"The 'count' argument ({count}) is greater than the length of the array to enqueue ({source.Length})."); } if (_head < _tail) { var numFirst = _array.Length - _tail; if (numFirst > count) { numFirst = count; } Array.Copy(source, 0, _array, _tail, numFirst); _tail = (_tail + numFirst) % _array.Length; if (numFirst < count) { var numSecond = count - numFirst; Array.Copy(source, numFirst, _array, _tail, numSecond); _tail = (_tail + numSecond) % _array.Length; } } else { Array.Copy(source, 0, _array, _tail, count); _tail = (_tail + count) % _array.Length; } _size += count; _totOverflowSize -= count; } public virtual bool TryDequeue(out WorkloadEvent result) { result = null; try { lock (syncRoot) { if (Count == 0) { return false; } result = _array[_head]; _array[_head] = null; _head = (_head + 1) % _array.Length; _size--; if(_totOverflowSize > 0) { // if we have space available and overflowed // events on disk, then we read them back if (_totOverflowSize - _overflowSize > 0) { if (_size == _array.Length - _overflowBufferSize) { EnqueueAll(ReadEvents(_overflowBufferSize)); } } else { // if we have events in the overflow array (but not on disk) // and enough space in the queue (at least 75% free), put them back if (_overflowSize > 0 && _size <= _array.Length - (_overflowBufferSize + (_overflowBufferSize / 2))) { EnqueueAll(_overflowArray, _overflowSize); _overflowSize = 0; _overflowArray = null; _totOverflowSize = 0; } } } } return true; } catch (Exception e) { logger.Warn(e, "Unable to dequeue"); result = null; return false; } } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected abstract void Dispose(bool disposing); public bool HasMoreElements() { return Count > 0; } } } ================================================ FILE: WorkloadTools/Config/AnalysisSample.json ================================================ { "Controller": { "Listener": { "__type": "ExtendedEventsWorkloadListener", "ConnectionInfo": { "ServerName": "testServer", "UserName": "sa", "Password": "P4$$w0rd!" }, "DatabaseFilter": "MyDB" }, "Consumers": [ { "__type": "AnalysisConsumer", "ConnectionInfo": { "ServerName": "testServer", "DatabaseName": "SqlWorkload", "SchemaName": "replay", "UserName": "sa", "Password": "P4$$w0rd!" }, "UploadIntervalSeconds": 60 } ] } } ================================================ FILE: WorkloadTools/Config/ReplaySample.json ================================================ { "Controller": { "Listener": { "__type": "ExtendedEventsWorkloadListener", "ConnectionInfo": { "ServerName": "ProductionServer", "UserName": "sa", "Password": "P4$$w0rd!" }, "DatabaseFilter": "MyDB" }, "Consumers": [ { "__type": "ReplayConsumer", "ConnectionInfo": { "ServerName": "testServer", "DatabaseName": "MyDB", "UserName": "sa", "Password": "P4$$w0rd!" } }, { "__type": "AnalysisConsumer", "ConnectionInfo": { "ServerName": "testServer", "DatabaseName": "SqlWorkload", "SchemaName": "baseline", "UserName": "sa", "Password": "P4$$w0rd!" }, "UploadIntervalSeconds": 60 } ] } } ================================================ FILE: WorkloadTools/Config/Sample.json ================================================ { "Controller": { "Listener": { "__type": "ExtendedEventsWorkloadListener", "ConnectionInfo": { "ServerName": "SQLDEMO\\SQL2014", "UserName": "sa", "Password": "P4$$w0rd!" }, "DatabaseFilter": "DS3" }, "Consumers": [ { "__type": "ReplayConsumer", "ConnectionInfo": { "ServerName": "SQLDEMO\\SQL2016", "DatabaseName": "DS3", "UserName": "sa", "Password": "P4$$w0rd!" } }, { "__type": "AnalysisConsumer", "ConnectionInfo": { "ServerName": "SQLDEMO\\SQL2016", "DatabaseName": "DS3", "SchemaName": "baseline", "UserName": "sa", "Password": "P4$$w0rd!" }, "UploadIntervalSeconds": 60 } ] } } ================================================ FILE: WorkloadTools/Config/SqlWorkloadConfig.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using WorkloadTools; using System.Web.Script.Serialization; using System.IO; using DouglasCrockford.JsMin; using WorkloadTools.Listener.ExtendedEvents; using WorkloadTools.Consumer.Replay; using WorkloadTools.Consumer.Analysis; using WorkloadTools.Util; namespace WorkloadTools.Config { public class SqlWorkloadConfig { public SqlWorkloadConfig() { } public WorkloadController Controller { get; set; } public static SqlWorkloadConfig LoadFromFile(string path) { var ser = new JavaScriptSerializer(new SqlWorkloadConfigTypeResolver()); ser.RegisterConverters(new JavaScriptConverter[] { new ModelConverter() }); using (var r = new StreamReader(path)) { var json = r.ReadToEnd(); var minifier = new JsMinifier(); // minify JSON to strip away comments // Comments in config files are very useful but JSON parsers // do not allow comments. Minification solves the issue. SqlWorkloadConfig result = null; string jsonMin = null; try { jsonMin = minifier.Minify(json); } catch (Exception e) { throw new FormatException($"Unable to load configuration from '{path}'. The file contains syntax errors.", e); } try { result = ser.Deserialize(jsonMin); } catch (Exception e) { throw new FormatException($"Unable to load configuration from '{path}'. The file contains semantic errors.", e); } return result; } } public static void Test() { var ser = new JavaScriptSerializer(new SqlWorkloadConfigTypeResolver()); var x = new SqlWorkloadConfig() { Controller = new WorkloadController() }; x.Controller.Listener = new ExtendedEventsWorkloadListener() { Source = "Listener\\ExtendedEvents\\sqlworkload.sql", ConnectionInfo = new SqlConnectionInfo() { ServerName = "SQLDEMO\\SQL2014", UserName = "sa", Password = "P4$$w0rd!" } }; //x.Controller.Listener.Filter.DatabaseFilter.PredicateValue = "DS3"; x.Controller.Consumers.Add(new ReplayConsumer() { ConnectionInfo = new SqlConnectionInfo() { ServerName = "SQLDEMO\\SQL2016", UserName = "sa", Password = "P4$$w0rd!" } }); x.Controller.Consumers.Add(new ReplayConsumer() { ConnectionInfo = new SqlConnectionInfo() { ServerName = "SQLDEMO\\SQL2016", UserName = "sa", Password = "P4$$w0rd!", DatabaseName = "RTR", SchemaName = "baseline" }, DatabaseMap = new Dictionary() { { "DatabaseA", "DatabaseB" }, { "DatabaseC", "DatabaseD" } } }); var s = ser.Serialize(x); Console.WriteLine(s); //SqlWorkloadConfig tc = ser.Deserialize(Samples.Sample.ToString()); } } } ================================================ FILE: WorkloadTools/Config/SqlWorkloadConfigTypeResolver.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Web.Script.Serialization; namespace WorkloadTools.Config { internal class SqlWorkloadConfigTypeResolver : SimpleTypeResolver { private static readonly Dictionary mappedTypes = new Dictionary(); static SqlWorkloadConfigTypeResolver() { var currentAssembly = Assembly.GetExecutingAssembly(); var nameSpace = "WorkloadTools"; var types = currentAssembly.GetTypes().Where(t => t != null && t.FullName.StartsWith(nameSpace) & !t.FullName.Contains("+")).ToArray(); foreach (var t in types) { try { mappedTypes.Add(t.AssemblyQualifiedName, t); mappedTypes.Add(t.Name, t); } catch(Exception) { throw; } } } public override Type ResolveType(string id) { if (mappedTypes.ContainsKey(id)) { return mappedTypes[id]; } else { return base.ResolveType(id); } } public override string ResolveTypeId(Type type) { return base.ResolveTypeId(type); } } } ================================================ FILE: WorkloadTools/Consumer/Analysis/AnalysisConsumer.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Text; using WorkloadTools.Consumer.Analysis; namespace WorkloadTools.Consumer.Analysis { public class AnalysisConsumer : BufferedWorkloadConsumer { private WorkloadAnalyzer analyzer; private int _uploadIntervalSeconds; public SqlConnectionInfo ConnectionInfo { get; set; } public int UploadIntervalSeconds { get => _uploadIntervalSeconds; set { if (value % 60 != 0) { throw new ArgumentOutOfRangeException("UploadIntervalSeconds must be an exact multiple of 60"); } _uploadIntervalSeconds = value; } } public int UploadIntervalMinutes { get => _uploadIntervalSeconds / 60; set => _uploadIntervalSeconds = value * 60; } public int MaximumWriteRetries { get; set; } = 5; public bool SqlNormalizerTruncateTo4000 { get; set; } public bool SqlNormalizerTruncateTo1024 { get; set; } public bool WriteDetail { get; set; } = true; public bool WriteSummary { get; set; } = true; public override void ConsumeBuffered(WorkloadEvent evt) { if(analyzer == null) { analyzer = new WorkloadAnalyzer() { Interval = UploadIntervalSeconds / 60, ConnectionInfo = ConnectionInfo, MaximumWriteRetries = MaximumWriteRetries, TruncateTo1024 = SqlNormalizerTruncateTo1024, TruncateTo4000 = SqlNormalizerTruncateTo4000, WriteDetail = WriteDetail }; } analyzer.Add(evt); } public override bool HasMoreEvents() { return analyzer.HasEventsQueued || !Buffer.IsEmpty; } protected override void Dispose(bool disposing) { if (analyzer != null) { analyzer.Stop(); analyzer.Dispose(); } } } } ================================================ FILE: WorkloadTools/Consumer/Analysis/DatabaseSchema.sql ================================================ IF SCHEMA_ID('{SchemaName}') IS NULL EXEC('CREATE SCHEMA [{SchemaName}]'); IF OBJECT_ID('{SchemaName}.WorkloadDetails') IS NULL CREATE TABLE [{SchemaName}].[WorkloadDetails]( [interval_id] [int] NOT NULL, [sql_hash] [bigint] NOT NULL, [application_id] [int] NOT NULL, [database_id] [int] NOT NULL, [host_id] [int] NOT NULL, [login_id] [int] NOT NULL, [avg_cpu_us] [bigint] NULL, [min_cpu_us] [bigint] NULL, [max_cpu_us] [bigint] NULL, [sum_cpu_us] [bigint] NULL, [avg_reads] [bigint] NULL, [min_reads] [bigint] NULL, [max_reads] [bigint] NULL, [sum_reads] [bigint] NULL, [avg_writes] [bigint] NULL, [min_writes] [bigint] NULL, [max_writes] [bigint] NULL, [sum_writes] [bigint] NULL, [avg_duration_us] [bigint] NULL, [min_duration_us] [bigint] NULL, [max_duration_us] [bigint] NULL, [sum_duration_us] [bigint] NULL, [execution_count] [bigint] NULL, CONSTRAINT PK_WorkloadDetails PRIMARY KEY CLUSTERED ( [interval_id], [sql_hash], [application_id], [database_id], [host_id], [login_id] ) ) IF OBJECT_ID('{SchemaName}.WorkloadSummary') IS NULL CREATE TABLE [{SchemaName}].[WorkloadSummary]( [application_id] [int] NOT NULL, [database_id] [int] NOT NULL, [host_id] [int] NOT NULL, [login_id] [int] NOT NULL, [min_cpu_us] [bigint] NULL, [max_cpu_us] [bigint] NULL, [sum_cpu_us] [bigint] NULL, [min_reads] [bigint] NULL, [max_reads] [bigint] NULL, [sum_reads] [bigint] NULL, [min_writes] [bigint] NULL, [max_writes] [bigint] NULL, [sum_writes] [bigint] NULL, [min_duration_us] [bigint] NULL, [max_duration_us] [bigint] NULL, [sum_duration_us] [bigint] NULL, [min_execution_date] datetime, [max_execution_date] datetime, [execution_count] [bigint] NULL, CONSTRAINT PK_WorkloadSummary PRIMARY KEY CLUSTERED ( [application_id], [database_id], [host_id], [login_id] ) ) IF OBJECT_ID('{SchemaName}.Applications') IS NULL CREATE TABLE [{SchemaName}].[Applications]( [application_id] [int] NOT NULL PRIMARY KEY, [application_name] [nvarchar](128) NOT NULL ) IF OBJECT_ID('{SchemaName}.Databases') IS NULL CREATE TABLE [{SchemaName}].[Databases]( [database_id] [int] NOT NULL PRIMARY KEY, [database_name] [nvarchar](128) NOT NULL ) IF OBJECT_ID('{SchemaName}.Hosts') IS NULL CREATE TABLE [{SchemaName}].[Hosts]( [host_id] [int] NOT NULL PRIMARY KEY, [host_name] [nvarchar](128) NOT NULL ) IF OBJECT_ID('{SchemaName}.Logins') IS NULL CREATE TABLE [{SchemaName}].[Logins]( [login_id] [int] NOT NULL PRIMARY KEY, [login_name] [nvarchar](128) NOT NULL ) IF OBJECT_ID('{SchemaName}.Intervals') IS NULL CREATE TABLE [{SchemaName}].[Intervals] ( [interval_id] [int] NOT NULL PRIMARY KEY, [end_time] [datetime] NOT NULL, [duration_minutes] [int] NOT NULL ) IF OBJECT_ID('{SchemaName}.NormalizedQueries') IS NULL CREATE TABLE [{SchemaName}].[NormalizedQueries]( [sql_hash] [bigint] NOT NULL PRIMARY KEY, [normalized_text] [nvarchar](max) NOT NULL, [example_text] [nvarchar](max) NULL ) IF OBJECT_ID('{SchemaName}.PerformanceCounters') IS NULL CREATE TABLE [{SchemaName}].[PerformanceCounters]( [interval_id] [int] NOT NULL, [counter_name] [varchar](255) NOT NULL, [min_counter_value] [float] NOT NULL, [max_counter_value] [float] NOT NULL, [avg_counter_value] [float] NOT NULL ) IF OBJECT_ID('{SchemaName}.WaitStats') IS NULL CREATE TABLE [{SchemaName}].[WaitStats]( [interval_id] [int] NOT NULL, [wait_type] [varchar](255) NOT NULL, [wait_sec] [float] NOT NULL, [resource_sec] [float] NOT NULL, [signal_sec] [float] NOT NULL, [wait_count] [bigint] NOT NULL ) IF OBJECT_ID('{SchemaName}.DiskPerf') IS NULL CREATE TABLE [{SchemaName}].[DiskPerf] ( [interval_id] [int] NOT NULL, [database_name] nvarchar(128) NULL, [physical_filename] nvarchar(128) NULL, [logical_filename] nvarchar(128) NULL, [file_type] nvarchar(128) NULL, [volume_mount_point] nvarchar(128) NULL, [read_latency_ms] int NULL, [reads] int NULL, [read_bytes] int NULL, [write_latency_ms] int NULL, [writes] int NULL, [write_bytes] int NULL, [cum_read_latency_ms] bigint NULL, [cum_reads] bigint NULL, [cum_read_bytes] bigint NULL, [cum_write_latency_ms] bigint NULL, [cum_writes] bigint NULL, [cum_write_bytes] bigint NULL ); IF OBJECT_ID('{SchemaName}.Errors') IS NULL CREATE TABLE [{SchemaName}].[Errors]( [interval_id] [int] NOT NULL, [error_type] [nvarchar](30) NOT NULL, [message] [nvarchar](max) NULL, [error_count] int NULL ) ================================================ FILE: WorkloadTools/Consumer/Analysis/NormalizedSqlText.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Consumer.Analysis { public class NormalizedSqlText { public enum CommandTypeEnum { SP_EXECUTE, SP_PREPARE, SP_UNPREPARE, SP_CURSOR, OTHER, SP_RESET_CONNECTION, SP_RESET_CONNECTION_NONPOOLED } public NormalizedSqlText() { CommandType = CommandTypeEnum.OTHER; } public NormalizedSqlText(string command) : this() { NormalizedText = command; OriginalText = command; Statement = command; Handle = 0; } public string OriginalText { get; set; } public string Statement { get; set; } public string NormalizedText { get; set; } public int Handle { get; set; } public CommandTypeEnum CommandType { get; set; } internal int ReferenceCount { get; set; } } } ================================================ FILE: WorkloadTools/Consumer/Analysis/SqlTextNormalizer.cs ================================================ using NLog; using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Reflection; using System.Text; using System.Text.RegularExpressions; using System.Threading; using WorkloadTools.Properties; namespace WorkloadTools.Consumer.Analysis { public class SqlTextNormalizer { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private static readonly Hashtable prepSql = new Hashtable(); private static readonly ConcurrentDictionary cachedQueries = new ConcurrentDictionary(); private static readonly Regex _doubleApostrophe = new Regex("('')(?.*?)('')", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.IgnorePatternWhitespace | RegexOptions.CultureInvariant); private static readonly Regex _delimiterStart = new Regex("(--)|(/\\*)|'", RegexOptions.Compiled); private static readonly Regex _spreadCsv = new Regex(",(?=\\S)", RegexOptions.Compiled); private static readonly Regex _spaces = new Regex("\\s+", RegexOptions.Compiled); private static readonly Regex _inlineComment = new Regex("--.*$", RegexOptions.Multiline | RegexOptions.Compiled); private static readonly Regex _prepareSql = new Regex("EXEC\\s+(?SP_PREP(ARE|EXEC))\\s+@P1\\s+OUTPUT,\\s*(NULL|(N\\'.+?\\')),\\s*N(?.+)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _prepExecRpc = new Regex("SET\\s+@P1=(?\\d+)\\s+EXEC\\s+SP_PREPEXECRPC\\s+@P1\\s+OUTPUT,\\s*N\\'(?.+?)'", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _preppedSqlStatement = new Regex("^(')(?((?!\\1).|\\1{2})*)\\1", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _execPrepped = new Regex("^EXEC\\s+SP_EXECUTE\\s+(?\\d+)", RegexOptions.Compiled); private static readonly Regex _execUnprep = new Regex("EXEC\\s+SP_UNPREPARE\\s+(?\\d+)", RegexOptions.Compiled); private static readonly Regex _cursor = new Regex("EXEC\\s+SP_CURSOROPEN\\s+(@CURSOR\\s*=\\s*)?\\@P1\\s+OUTPUT\\,\\s*(@STMT\\s*=\\s*)?(N)?(?') (? (( (?!\\k) .|\\k{2})*) ) \\k", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace); private static readonly Regex _cursorPrepExec = new Regex("EXEC\r\n\\s+ # any spaces\r\nsp_cursorprepexec\r\n.+ # any characters up to the string\r\nN \r\n(?') # matches an apostraphe\r\n(?!@) # but no @ following\r\n(? (( (?!\\k) .|\\k{2})*) ) # all the characters ...\r\n\\k # until the next tick that isn't doubled.", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace); private static readonly Regex _spExecuteSql = new Regex("EXEC\\s+SP_EXECUTESQL\\s+N\\'(?.+?)\\'", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _spExecuteSqlWithStatement = new Regex("EXEC\\s+SP_EXECUTESQL\\s+@statement\\s*=\\s*N\\'(?.+?)\\'", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _objectName = new Regex("EXEC(UTE){0,1}\\s(?(\\w+\\.)*)(?\\w+)", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _dbAndObjectName = new Regex("EXEC(UTE){0,1}\\s+(?\\w+)\\.\\.(?\\w+)", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _emptyString = new Regex("\\'\\'", RegexOptions.Compiled); private static readonly Regex _unicodeConstant = new Regex("N{STR}", RegexOptions.Compiled); private static readonly Regex _stringConstant = new Regex("(')(((?!\\1).|\\1{2})*)\\1", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _paramNameValueStr = new Regex(@"@(?\w+)\s*=\s*N?'(?(?:[^']|'')*)'", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _paramNameValueNum = new Regex(@"@(?\w+)\s?=\s?(?([0-9.])+)", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _binaryConstant = new Regex("0X([0-9ABCDEF])+", RegexOptions.Compiled); private static readonly Regex _numericConstant = new Regex("(?[\\(\\s,=\\-><\\!\\&\\|\\+\\*\\/\\%\\~\\$])(?[\\-\\.\\d]+)", RegexOptions.Compiled); private static readonly Regex _inClause = new Regex("IN\\s*\\(\\s*\\{.*\\}\\s*\\)", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _brackets = new Regex("(\\[|\\])", RegexOptions.Compiled); private static readonly Regex _TVPExecute = new Regex(@"DECLARE\s*@(?(\w+))\s*(AS)?\s*(?(\w+)).*EXEC(UTE)?\s*(?(\S+)).*@\k\sREADONLY", RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.Singleline); public bool TruncateTo4000 { get; set; } public bool TruncateTo1024 { get; set; } private static readonly Thread Sweeper; static SqlTextNormalizer() { Sweeper = new Thread(() => { try { while (true) { var toDelete = cachedQueries.Where(t => t.Value.ReferenceCount < 10).ToList(); foreach (var el in toDelete) { _ = cachedQueries.TryRemove(el.Key, out var nst); } Thread.Sleep(30000); } } catch (Exception e) { logger.Error(e.Message); logger.Error(e.StackTrace); } }) { IsBackground = true, Name = "SqlTextNormalizer.CacheSweeper" }; Sweeper.Start(); } public NormalizedSqlText NormalizeSqlText(string sql, int spid) { try { var hashCode = sql.GetHashCode(); if (cachedQueries.TryGetValue(hashCode, out var result)) { if (result != null && result.OriginalText == sql) { result.ReferenceCount++; return result; } } result = NormalizeSqlText(sql, spid, true); logger.Trace("NormalizeSqlText:[{0}]: {1}", spid, sql); if (result != null) { logger.Trace("NormalizeSqlText:[{0}]: {1}", spid, result.NormalizedText); result.ReferenceCount = 1; _ = cachedQueries.TryAdd(hashCode, result); } return result; } catch (Exception) { throw; } } public NormalizedSqlText NormalizeSqlText(string sql, int spid, bool spreadCsv) { var normalizedSqlText = new NormalizedSqlText { OriginalText = sql, NormalizedText = sql }; var result = normalizedSqlText; if (sql == null) { result.OriginalText = ""; result.NormalizedText = ""; result.Statement = ""; return result; } sql = sql.Trim(); if (TruncateTo1024 && sql.Length > 1024000) { result.Statement = "{SQL>1MB}"; result.NormalizedText = "{SQL>1MB}"; return result; } var flag1 = false; var flag2 = false; var num = 0; if ((sql == "sp_reset_connection") || (sql == "exec sp_reset_connection") || (sql == "exec sp_reset_connection /*Nonpooled*/")) { return null; } sql = FixComments(sql); sql = _spaces.Replace(sql, " ").ToUpper(CultureInfo.InvariantCulture); sql = _doubleApostrophe.Replace(sql, "{STR}"); var matchPrepExecRpc = _prepExecRpc.Match(sql); if (matchPrepExecRpc.Success) { sql = matchPrepExecRpc.Groups["statement"].ToString(); result.Statement = sql; result.NormalizedText = sql; } var matchPrepareSql = _prepareSql.Match(sql); if (matchPrepareSql.Success) { if (matchPrepareSql.Groups["preptype"].ToString().ToLower() == "sp_prepare") { flag2 = true; } //num = !(match3.Groups["stmtnum"].ToString() == "NULL") ? Convert.ToInt32(match3.Groups["stmtnum"].ToString()) : 0; sql = matchPrepareSql.Groups["remaining"].ToString(); var matchPreppedSqlStatement = _preppedSqlStatement.Match(sql); if (matchPreppedSqlStatement.Success) { sql = matchPreppedSqlStatement.Groups["statement"].ToString(); sql = _doubleApostrophe.Replace(sql, "'${string}'"); result.Statement = sql; result.NormalizedText = sql; } flag1 = true; } var matchExecPrepped = _execPrepped.Match(sql); if (matchExecPrepped.Success) { num = Convert.ToInt32(matchExecPrepped.Groups["stmtnum"].ToString()); if (prepSql.ContainsKey((object)(spid.ToString() + "_" + num.ToString()))) { result.NormalizedText = TruncateSql("{PREPARED} " + prepSql[(object)(spid.ToString() + "_" + num.ToString())].ToString()); return result; } } var matchExecUnprep = _execUnprep.Match(sql); if (matchExecUnprep.Success) { num = Convert.ToInt32(matchExecUnprep.Groups["stmtnum"].ToString()); var str = spid.ToString() + "_" + num.ToString(); if (prepSql.ContainsKey((object)str)) { sql = prepSql[(object)str].ToString(); prepSql.Remove((object)(spid.ToString() + "_" + num.ToString())); result.NormalizedText = TruncateSql("{UNPREPARING} " + sql); return result; } } var matchCursor = _cursor.Match(sql); if (matchCursor.Success) { sql = matchCursor.Groups["statement"].ToString(); sql = _doubleApostrophe.Replace(sql, "'${string}'"); result.Statement = sql; result.NormalizedText = "{CURSOR} " + sql; } var matchCursorPrepexec = _cursorPrepExec.Match(sql); if (matchCursorPrepexec.Success) { sql = matchCursorPrepexec.Groups["statement"].ToString(); sql = _doubleApostrophe.Replace(sql, "'${string}'"); result.Statement = sql; result.NormalizedText = "{CURSOR} " + sql; } var matchSpExecuteSql = _spExecuteSql.Match(sql); if (matchSpExecuteSql.Success) { sql = matchSpExecuteSql.Groups["statement"].ToString(); result.Statement = sql; result.NormalizedText = sql; } var matchSpExecuteSqlWithStatement = _spExecuteSqlWithStatement.Match(sql); if (matchSpExecuteSqlWithStatement.Success) { sql = matchSpExecuteSqlWithStatement.Groups["statement"].ToString(); result.Statement = sql; result.NormalizedText = sql; } if (!_brackets.Match(sql).Success) { var matchDbAndObjectName = _dbAndObjectName.Match(sql); if (matchDbAndObjectName.Success) { sql = matchDbAndObjectName.Groups["object"].ToString(); } else { var matchObjectName = _objectName.Match(sql); if (matchObjectName.Success) { sql = matchObjectName.Groups["object"].ToString(); } } if (sql == "SP_CURSOR" || sql == "SP_CURSORFETCH" || sql == "SP_CURSORCLOSE" || sql == "SP_RESET_CONNECTION") { return null; } } if (sql.Contains("EXEC") && sql.Contains("READONLY")) { var matchTVPExecute = _TVPExecute.Match(sql); if (matchTVPExecute.Success) { result.Statement = sql; result.NormalizedText = "EXECUTE " + matchTVPExecute.Groups["object"].ToString(); } } result.NormalizedText = _paramNameValueStr.Replace(result.NormalizedText, "@${paramname} = {STR}"); result.NormalizedText = _paramNameValueNum.Replace(result.NormalizedText, "@${paramname} = {NUM}"); result.NormalizedText = _emptyString.Replace(result.NormalizedText, "{STR}"); result.NormalizedText = _stringConstant.Replace(result.NormalizedText, "{STR}"); result.NormalizedText = _unicodeConstant.Replace(result.NormalizedText, "{NSTR}"); result.NormalizedText = _binaryConstant.Replace(result.NormalizedText, "{BINARY}"); result.NormalizedText = _numericConstant.Replace(result.NormalizedText, "${prefix}{##}"); result.NormalizedText = _inClause.Replace(result.NormalizedText, "{IN}"); if (spreadCsv) { result.NormalizedText = _spreadCsv.Replace(result.NormalizedText, ", "); } result.NormalizedText = _spaces.Replace(result.NormalizedText, " "); result.NormalizedText = TruncateSql(result.NormalizedText); if (flag1 && num != 0) { var theKey = (object)(spid.ToString() + "_" + num.ToString()); if (!prepSql.ContainsKey(theKey)) { prepSql.Add(theKey, sql); } else { prepSql[theKey] = sql; } } if (flag2) { result.NormalizedText = TruncateSql("{PREPARING} " + sql); return result; } if (flag1 && !flag2) { result.NormalizedText = TruncateSql("{PREPARED} " + sql); return result; } result.NormalizedText = TruncateSql(result.NormalizedText); return result; } private string TruncateSql(string sql) { sql = sql.Trim(); if (TruncateTo4000 && sql.Length > 4000) { return sql.Substring(0, 4000); } return sql; } private string FixComments(string sql) { var str = sql; var num = 0; var startat = 0; var match1 = _delimiterStart.Match(sql, startat); while (match1.Success) { switch (match1.Value) { case "'": var match2 = _stringConstant.Match(sql, match1.Index); if (match2.Success) { startat = match1.Index + match2.Length; break; } ++startat; break; case "--": startat = match1.Index; if (_inlineComment.Match(sql, startat).Success) { sql = _inlineComment.Replace(sql, "", 1, startat); break; } ++startat; break; case "/*": var index = match1.Index; sql = RemoveBlockComments(sql, index); startat = index + 1; break; default: return sql; } if (startat < sql.Length) { match1 = _delimiterStart.Match(sql, startat); ++num; if (num > 1000000) { throw new Exception("Infinite loop in FixComments (" + Assembly.GetExecutingAssembly().GetName().Version.ToString() + ")" + Environment.NewLine + Environment.NewLine + str); } } else { break; } } return sql; } private string RemoveBlockComments(string sql, int position) { var stringBuilder = new StringBuilder(sql.Length); _ = stringBuilder.Append(sql.Substring(0, position)); var num = 0; var startIndex = position; while (startIndex < sql.Length - 1) { switch (sql.Substring(startIndex, 2)) { case "/*": ++num; startIndex = startIndex + 1 + 1; break; case "*/": --num; startIndex = startIndex + 1 + 1; break; default: ++startIndex; break; } if (num == 0) { if (startIndex < sql.Length) { _ = stringBuilder.Append(sql.Substring(startIndex, sql.Length - startIndex)); } return stringBuilder.ToString(); } } return sql; } public long GetHashCode(string text) { text = text ?? ""; var num = text.Length / 2; return (int.MaxValue * (long)text.Substring(0, num).GetHashCode()) + text.Substring(num, text.Length - num).GetHashCode(); } } } ================================================ FILE: WorkloadTools/Consumer/Analysis/WorkloadAnalyzer.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.CompilerServices; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools.Util; using System.Collections.Concurrent; using FastMember; using Microsoft.SqlServer.Management.SqlParser.SqlCodeDom; namespace WorkloadTools.Consumer.Analysis { public class WorkloadAnalyzer : IDisposable { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public SqlConnectionInfo ConnectionInfo { get; set; } public int Interval { get; set; } private readonly Dictionary normalizedQueries = new Dictionary(); private readonly Dictionary applications = new Dictionary(); private readonly Dictionary databases = new Dictionary(); private readonly Dictionary logins = new Dictionary(); private readonly Dictionary hosts = new Dictionary(); private Queue _internalQueue = new Queue(); private readonly object _internalQueueLock = new object(); private Thread Worker; private bool stopped = false; public int MaxInternalQueueSize { get; set; } = 10000; private ConcurrentDictionary> rawData; private DataTable errorData; private readonly SqlTextNormalizer normalizer; private bool TargetTableCreated = false; private bool FirstIntervalWritten = false; private DataTable counterData; private DataTable waitsData; private DataTable diskPerfData; public int MaximumWriteRetries { get; set; } public bool TruncateTo4000 { get; set; } public bool TruncateTo1024 { get; set; } public bool WriteDetail { get; set; } = true; public bool WriteSummary { get; set; } = true; private class NormalizedQuery { public long Hash { get; set; } public string NormalizedText { get; set; } public string ExampleText { get; set; } } private DateTime lastDump = DateTime.MinValue; private DateTime lastEventTime = DateTime.MinValue; private volatile int lastWrittenIntervalId = -1; public WorkloadAnalyzer() { normalizer = new SqlTextNormalizer() { TruncateTo1024 = TruncateTo1024, TruncateTo4000 = TruncateTo4000 }; } public bool HasEventsQueued { get { lock (_internalQueueLock) { return _internalQueue.Count > 0; } } } private void CloseInterval() { // Write collected data to the destination database var duration = lastEventTime - lastDump; if (duration.TotalMinutes >= Interval) { // Avoid writing the same interval_id twice. This can happen when // Interval=0 (the default) and multiple events share the same // second-precision timestamp: after the first write sets // lastDump=lastEventTime, the condition above is 0>=0 (always true), // so the next loop iteration would attempt to INSERT to WorkloadDetails // for an interval_id that was already committed. var prospectiveIntervalId = ComputeIntervalId(lastEventTime); if (prospectiveIntervalId == lastWrittenIntervalId) { lastDump = lastEventTime; return; } try { var numRetries = 0; while (numRetries <= MaximumWriteRetries) { try { WriteToServer(lastEventTime); numRetries = MaximumWriteRetries + 1; } catch (Exception ex) { logger.Warn("Unable to write workload analysis."); logger.Warn(ex.Message); if (numRetries == MaximumWriteRetries) { throw; } } numRetries++; } } catch (Exception e) { try { logger.Error(e, "Unable to write workload analysis info to the destination database."); logger.Error(e.StackTrace); } catch { Console.WriteLine(string.Format("Unable to write to the database: {0}.", e.Message)); } } finally { lastDump = lastEventTime; } } } private void ProcessQueue() { while (!stopped) { WorkloadEvent data = null; bool hasData = false; lock (_internalQueueLock) { CloseInterval(); if (_internalQueue.Count > 0) { data = _internalQueue.Dequeue(); hasData = true; // Notify Add() that a slot is now free Monitor.PulseAll(_internalQueueLock); } } if (hasData) { InternalAdd(data); } else { // Sleep outside the lock so Add() is not blocked unnecessarily Thread.Sleep(10); } } } public void Add(WorkloadEvent evt) { if (evt is ExecutionWorkloadEvent executionEvent && string.IsNullOrEmpty(executionEvent.Text)) { return; } try { ProvisionWorker(); } catch (Exception e) { logger.Error(e, "Unable to start the worker thread for WorkloadAnalyzer"); } lock (_internalQueueLock) { // Block when the queue is full to avoid unbounded memory growth while (!stopped && _internalQueue.Count >= MaxInternalQueueSize) { Monitor.Wait(_internalQueueLock); } if (stopped) return; lastEventTime = evt.StartTime; if (lastDump == DateTime.MinValue) { lastDump = lastEventTime; } _internalQueue.Enqueue(evt); } } private void ProvisionWorker() { var startNewWorker = false; if (Worker == null) { startNewWorker = true; } else { if (!Worker.IsAlive) { startNewWorker = true; } } if (startNewWorker) { // Start a new background worker if the thread is null // or stopped / aborted Worker = new Thread(() => { try { ProcessQueue(); } catch (Exception e) { logger.Error(e.Message); logger.Error(e.StackTrace); } }) { IsBackground = true, Name = "RealtimeWorkloadAnalyzer.Worker" }; Worker.Start(); Thread.Sleep(100); } } private void InternalAdd(WorkloadEvent evt) { if (evt is ExecutionWorkloadEvent executionEvent) { InternalAdd(executionEvent); } if (evt is ErrorWorkloadEvent errorEvent) { InternalAdd(errorEvent); } if (evt is CounterWorkloadEvent counterEvent) { InternalAdd(counterEvent); } if (evt is WaitStatsWorkloadEvent waitStatsEvent) { InternalAdd(waitStatsEvent); } if (evt is DiskPerfWorkloadEvent diskPerfEvent) { InternalAdd(diskPerfEvent); } } private void InternalAdd(ErrorWorkloadEvent evt) { var row = errorData.NewRow(); row.SetField("message", evt.Text); row.SetField("type", evt.Type); errorData.Rows.Add(row); } private void InternalAdd(WaitStatsWorkloadEvent evt) { if (waitsData == null) { waitsData = evt.Waits; } else { waitsData.Merge(evt.Waits); } } private void InternalAdd(DiskPerfWorkloadEvent evt) { if (diskPerfData == null) { diskPerfData = evt.DiskPerf; } else { diskPerfData.Merge(evt.DiskPerf); } } private void InternalAdd(CounterWorkloadEvent evt) { if (counterData == null) { counterData = new DataTable(); _ = counterData.Columns.Add("event_time", typeof(DateTime)); _ = counterData.Columns.Add("counter_name", typeof(string)); _ = counterData.Columns.Add("counter_value", typeof(float)); } foreach(var cntr in evt.Counters.Keys) { var row = counterData.NewRow(); row.SetField("event_time", evt.StartTime); row.SetField("counter_name", cntr.ToString()); row.SetField("counter_value", evt.Counters[cntr]); counterData.Rows.Add(row); } } private void InternalAdd(ExecutionWorkloadEvent evt) { if (rawData == null) { PrepareDataTables(); PrepareDictionaries(); } var norm = normalizer.NormalizeSqlText(evt.Text, (int)evt.SPID); string normSql; if (norm != null) { normSql = norm.NormalizedText; } else { return; } if (normSql == null) { return; } var hash = normalizer.GetHashCode(normSql); if (!normalizedQueries.ContainsKey(hash)) { normalizedQueries.Add(hash, new NormalizedQuery { Hash = hash, NormalizedText = normSql, ExampleText = evt.Text }); } var appId = -1; if (evt.ApplicationName != null && !applications.TryGetValue(evt.ApplicationName, out appId)) { applications.Add(evt.ApplicationName, appId = applications.Count); } var dbId = -1; if (evt.DatabaseName != null && !databases.TryGetValue(evt.DatabaseName, out dbId)) { databases.Add(evt.DatabaseName, dbId = databases.Count); } var hostId = -1; if (evt.HostName != null && !hosts.TryGetValue(evt.HostName, out hostId)) { hosts.Add(evt.HostName, hostId = hosts.Count); } var loginId = -1; if (evt.LoginName != null && !logins.TryGetValue(evt.LoginName, out loginId)) { logins.Add(evt.LoginName, loginId = logins.Count); } var theKey = new ExecutionDetailKey() { Sql_hash = hash, Application_id = appId, Database_id = dbId, Host_id = hostId, Login_id = loginId }; var theValue = new ExecutionDetailValue() { Event_time = evt.StartTime, Cpu_us = evt.CPU, Reads = evt.Reads, Writes = evt.Writes, Duration_us = evt.Duration }; // Look up execution detail if (rawData.TryGetValue(theKey, out var theList)) { if (theList == null) { theList = new List(); } theList.Add(theValue); } else { theList = new List { theValue }; if (!rawData.TryAdd(theKey, theList)) { throw new InvalidOperationException("Unable to add an executionEvent to the queue"); } } } public void Stop() { try { WriteToServer(lastEventTime); } catch (Exception e) { // duplicate key errors might be thrown at this time // that's expected if trying to upload to the same // interval already uploaded and new queries with the // same hash have been captured if(!e.Message.Contains("Violation of PRIMARY KEY")) { throw; } } stopped = true; // Wake up any Add() calls that may be blocked waiting for queue space lock (_internalQueueLock) { Monitor.PulseAll(_internalQueueLock); } } [MethodImpl(MethodImplOptions.Synchronized)] private void WriteToServer(DateTime intervalTime) { logger.Trace("Writing Workload Analysis data"); using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); if (!TargetTableCreated) { CreateTargetTables(); TargetTableCreated = true; } var tran = conn.BeginTransaction(); try { var current_interval_id = 0; if(WriteDetail) { current_interval_id = CreateInterval(conn, tran, intervalTime); } WriteDictionary(applications, conn, tran, "applications"); WriteDictionary(databases, conn, tran, "databases"); WriteDictionary(hosts, conn, tran, "hosts"); WriteDictionary(logins, conn, tran, "logins"); if (rawData == null) { PrepareDataTables(); } lock (rawData) { if (WriteSummary) { WriteExecutionSummary(conn, tran); } if (WriteDetail) { WriteExecutionDetails(conn, tran, current_interval_id); } rawData.Clear(); } if (WriteDetail) { WriteNormalizedQueries(normalizedQueries, conn, tran); WriteExecutionErrors(conn, tran, current_interval_id); WritePerformanceCounters(conn, tran, current_interval_id); WriteWaitsData(conn, tran, current_interval_id); WriteDiskPerf(conn, tran, current_interval_id); } tran.Commit(); if (WriteDetail) { lastWrittenIntervalId = current_interval_id; } } catch(Exception) { tran.Rollback(); throw; } } } private void WriteWaitsData(SqlConnection conn, SqlTransaction tran, int current_interval_id) { if (waitsData == null) { return; } lock (waitsData) { using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "[" + ConnectionInfo.SchemaName + "].[WaitStats]"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; var Table = from t in waitsData.AsEnumerable() group t by new { wait_type = t.Field("wait_type") } into grp select new { interval_id = current_interval_id, grp.Key.wait_type, wait_sec = grp.Sum(t => t.Field("wait_sec")), resource_sec = grp.Sum(t => t.Field("resource_sec")), signal_sec = grp.Sum(t => t.Field("signal_sec")), wait_count = grp.Sum(t => t.Field("wait_count")) }; using(var dt = DataUtils.ToDataTable(Table)) { bulkCopy.WriteToServer(dt); } logger.Info("Wait stats written"); } waitsData.Dispose(); waitsData = null; } } private void WriteDiskPerf(SqlConnection conn, SqlTransaction tran, int current_interval_id) { if (diskPerfData == null) { return; } lock (diskPerfData) { using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "[" + ConnectionInfo.SchemaName + "].[DiskPerf]"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; var Table = from t in diskPerfData.AsEnumerable() group t by new { database_name = t.Field("database_name"), physical_filename = t.Field("physical_filename"), logical_filename = t.Field("logical_filename"), file_type = t.Field("file_type"), volume_mount_point = t.Field("volume_mount_point"), } into grp select new { interval_id = current_interval_id, grp.Key.database_name, grp.Key.physical_filename, grp.Key.logical_filename, grp.Key.file_type, grp.Key.volume_mount_point, read_latency_ms = grp.Average(t => t.Field("read_latency_ms")), reads = grp.Sum(t => t.Field("reads")), read_bytes = grp.Sum(t => t.Field("read_bytes")), write_latency_ms = grp.Average(t => t.Field("write_latency_ms")), writes = grp.Sum(t => t.Field("writes")), write_bytes = grp.Sum(t => t.Field("write_bytes")), cum_read_latency_ms = grp.Max(t => t.Field("cum_read_latency_ms")), cum_reads = grp.Max(t => t.Field("cum_reads")), cum_read_bytes = grp.Max(t => t.Field("cum_read_bytes")), cum_write_latency_ms = grp.Max(t => t.Field("cum_write_latency_ms")), cum_writes = grp.Max(t => t.Field("cum_writes")), cum_write_bytes = grp.Max(t => t.Field("cum_write_bytes")) }; using (var dt = DataUtils.ToDataTable(Table)) { bulkCopy.WriteToServer(dt); } logger.Info("Disk perf written"); } diskPerfData.Dispose(); diskPerfData = null; } } private void WritePerformanceCounters(SqlConnection conn, SqlTransaction tran, int current_interval_id) { if (counterData == null) { return; } lock (counterData) { using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "[" + ConnectionInfo.SchemaName + "].[PerformanceCounters]"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; var Table = from t in counterData.AsEnumerable() group t by new { counter_name = t.Field("counter_name") } into grp select new { interval_id = current_interval_id, grp.Key.counter_name, min_counter_value = grp.Min(t => t.Field("counter_value")), max_counter_value = grp.Max(t => t.Field("counter_value")), avg_counter_value = grp.Average(t => t.Field("counter_value")) }; using (var dt = DataUtils.ToDataTable(Table)) { bulkCopy.WriteToServer(dt); } logger.Info("Performance counters written"); } counterData.Dispose(); counterData = null; } } private void WriteExecutionSummary(SqlConnection conn, SqlTransaction tran) { // create temporary table for uploading data var sql = $@" IF OBJECT_ID('tempdb..#WorkloadSummary') IS NOT NULL DROP TABLE #WorkloadSummary; SELECT TOP(0) * INTO #WorkloadSummary FROM [{ConnectionInfo.SchemaName}].WorkloadSummary; "; using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } // bulk copy data to temp table using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "#WorkloadSummary"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; var Table = from t in rawData.Keys from v in rawData[t] group new { v.Cpu_us, v.Duration_us, v.Event_time, v.Reads, v.Writes } by new { application_id = t.Application_id, database_id = t.Database_id, host_id = t.Host_id, login_id = t.Login_id } into grp select new { grp.Key.application_id, grp.Key.database_id, grp.Key.host_id, grp.Key.login_id, min_cpu_us = grp.Min(v => v.Cpu_us), max_cpu_us = grp.Max(v => v.Cpu_us), sum_cpu_us = grp.Sum(v => v.Cpu_us), min_reads = grp.Min(v => v.Reads), max_reads = grp.Max(v => v.Reads), sum_reads = grp.Sum(v => v.Reads), min_writes = grp.Min(v => v.Writes), max_writes = grp.Max(v => v.Writes), sum_writes = grp.Sum(v => v.Writes), min_duration_us = grp.Min(v => v.Duration_us), max_duration_us = grp.Max(v => v.Duration_us), sum_duration_us = grp.Sum(v => v.Duration_us), min_execution_date = grp.Min(v => v.Event_time), max_execution_date = grp.Max(v => v.Event_time), execution_count = grp.Count() }; using (var reader = ObjectReader.Create(Table, "application_id", "database_id", "host_id", "login_id", "min_cpu_us", "max_cpu_us", "sum_cpu_us", "min_reads", "max_reads", "sum_reads", "min_writes", "max_writes", "sum_writes", "min_duration_us", "max_duration_us", "sum_duration_us", "min_execution_date", "max_execution_date", "execution_count")) { bulkCopy.WriteToServer(reader); } } var affectedRows = 0; // merge with existing data sql = $@" UPDATE WS SET min_cpu_us = CASE WHEN T.min_cpu_us < WS.min_cpu_us THEN T.min_cpu_us ELSE WS.min_cpu_us END, max_cpu_us = CASE WHEN T.max_cpu_us > WS.max_cpu_us THEN T.max_cpu_us ELSE WS.max_cpu_us END, sum_cpu_us += T.sum_cpu_us, min_reads = CASE WHEN T.min_reads < WS.min_reads THEN T.min_reads ELSE WS.min_reads END, max_reads = CASE WHEN T.max_reads > WS.max_reads THEN T.max_reads ELSE WS.max_reads END, sum_reads += T.sum_reads, min_writes = CASE WHEN T.min_writes < WS.min_writes THEN T.min_writes ELSE WS.min_writes END, max_writes = CASE WHEN T.max_writes > WS.max_writes THEN T.max_writes ELSE WS.max_writes END, sum_writes += T.sum_writes, min_duration_us = CASE WHEN T.min_duration_us < WS.min_duration_us THEN T.min_duration_us ELSE WS.min_duration_us END, max_duration_us = CASE WHEN T.max_duration_us > WS.max_duration_us THEN T.max_duration_us ELSE WS.max_duration_us END, sum_duration_us += T.sum_duration_us, min_execution_date = CASE WHEN T.min_execution_date < WS.min_execution_date THEN T.min_execution_date ELSE WS.min_execution_date END, max_execution_date = CASE WHEN T.max_execution_date > WS.max_execution_date THEN T.max_execution_date ELSE WS.max_execution_date END, execution_count += T.execution_count FROM [{ConnectionInfo.SchemaName}].WorkloadSummary AS WS INNER JOIN #WorkloadSummary AS T ON T.application_id = WS.application_id AND T.database_id = WS.database_id AND T.host_id = WS.host_id AND T.login_id = WS.login_id; "; using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; affectedRows += cmd.ExecuteNonQuery(); } sql = $@" INSERT INTO [{ConnectionInfo.SchemaName}].WorkloadSummary SELECT * FROM #WorkloadSummary AS T WHERE NOT EXISTS ( SELECT * FROM [{ConnectionInfo.SchemaName}].WorkloadSummary AS WS WHERE T.application_id = WS.application_id AND T.database_id = WS.database_id AND T.host_id = WS.host_id AND T.login_id = WS.login_id ); "; using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; affectedRows += cmd.ExecuteNonQuery(); } logger.Info($"Summary info written ({affectedRows} rows)"); } private void WriteExecutionDetails(SqlConnection conn, SqlTransaction tran, int current_interval_id) { int numRows; using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "[" + ConnectionInfo.SchemaName + "].[WorkloadDetails]"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; var Table = from t in rawData.Keys from v in rawData[t] group new { v.Cpu_us, v.Duration_us, v.Event_time, v.Reads, v.Writes } by new { sql_hash = t.Sql_hash, application_id = t.Application_id, database_id = t.Database_id, host_id = t.Host_id, login_id = t.Login_id } into grp select new { interval_id = current_interval_id, grp.Key.sql_hash, grp.Key.application_id, grp.Key.database_id, grp.Key.host_id, grp.Key.login_id, avg_cpu_us = grp.Average(v => v.Cpu_us), min_cpu_us = grp.Min(v => v.Cpu_us), max_cpu_us = grp.Max(v => v.Cpu_us), sum_cpu_us = grp.Sum(v => v.Cpu_us), avg_reads = grp.Average(v => v.Reads), min_reads = grp.Min(v => v.Reads), max_reads = grp.Max(v => v.Reads), sum_reads = grp.Sum(v => v.Reads), avg_writes = grp.Average(v => v.Writes), min_writes = grp.Min(v => v.Writes), max_writes = grp.Max(v => v.Writes), sum_writes = grp.Sum(v => v.Writes), avg_duration_us = grp.Average(v => v.Duration_us), min_duration_us = grp.Min(v => v.Duration_us), max_duration_us = grp.Max(v => v.Duration_us), sum_duration_us = grp.Sum(v => v.Duration_us), execution_count = grp.Count() }; using (var reader = ObjectReader.Create(Table, "interval_id", "sql_hash", "application_id", "database_id", "host_id", "login_id", "avg_cpu_us", "min_cpu_us", "max_cpu_us", "sum_cpu_us", "avg_reads", "min_reads", "max_reads", "sum_reads", "avg_writes", "min_writes", "max_writes", "sum_writes", "avg_duration_us", "min_duration_us", "max_duration_us", "sum_duration_us", "execution_count")) { bulkCopy.WriteToServer(reader); } numRows = rawData.Sum(x => x.Value.Count); logger.Info($"{numRows} rows aggregated"); numRows = rawData.Count(); logger.Info($"{numRows} rows written"); } } private void WriteExecutionErrors(SqlConnection conn, SqlTransaction tran, int current_interval_id) { if (errorData == null) { PrepareDataTables(); } lock (errorData) { using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "[" + ConnectionInfo.SchemaName + "].[Errors]"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; var Table = from t in errorData.AsEnumerable() group t by new { type = t.Field("type"), message = t.Field("message") } into grp select new { interval_id = current_interval_id, error_type = ((WorkloadEvent.EventType)grp.Key.type).ToString(), grp.Key.message, error_count = grp.Count() }; bulkCopy.WriteToServer(DataUtils.ToDataTable(Table)); } errorData.Rows.Clear(); } } private void WriteDictionary(Dictionary values, SqlConnection conn, SqlTransaction tran, string name) { // create a temporary table var sql = @" SELECT TOP(0) * INTO #{0} FROM [{1}].[{0}]; "; sql = string.Format(sql, name, ConnectionInfo.SchemaName); using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } // bulk insert into temporary using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "#" + name; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; bulkCopy.WriteToServer(DataUtils.ToDataTable(from t in values select new { t.Value, t.Key })); } // merge new data sql = @" INSERT INTO [{1}].[{0}s] SELECT * FROM #{0}s AS src WHERE NOT EXISTS ( SELECT * FROM [{1}].[{0}s] AS dst WHERE dst.[{0}_id] = src.[{0}_id] ); "; sql = string.Format(sql, name.Substring(0, name.Length - 1), ConnectionInfo.SchemaName); using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } } private void WriteNormalizedQueries(Dictionary values, SqlConnection conn, SqlTransaction tran) { // create a temporary table var sql = @" SELECT TOP(0) * INTO #NormalizedQueries FROM [{0}].[NormalizedQueries]; "; sql = string.Format(sql, ConnectionInfo.SchemaName); using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } // bulk insert into temporary using (var bulkCopy = new System.Data.SqlClient.SqlBulkCopy(conn, SqlBulkCopyOptions.KeepIdentity | SqlBulkCopyOptions.FireTriggers | SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.TableLock, tran)) { bulkCopy.DestinationTableName = "#NormalizedQueries"; bulkCopy.BatchSize = 1000; bulkCopy.BulkCopyTimeout = 300; bulkCopy.WriteToServer(DataUtils.ToDataTable(from t in values where t.Value != null select new { t.Value.Hash, t.Value.NormalizedText, t.Value.ExampleText })); } // merge new data sql = @" INSERT INTO [{0}].[NormalizedQueries] SELECT * FROM #NormalizedQueries AS src WHERE NOT EXISTS ( SELECT * FROM [{0}].[NormalizedQueries] AS dst WHERE dst.[sql_hash] = src.[sql_hash] ); "; sql = string.Format(sql, ConnectionInfo.SchemaName); using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } // Erase from memory all the normalized queries // already written to the database. This should reduce // the memory footprint quite a lot foreach(var hash in values.Keys.ToList()) { values[hash] = null; } // Run the Garbage Collector in a separate task _ = Task.Factory.StartNew(() => InvokeGC()); } private void InvokeGC() { GC.Collect(); GC.WaitForPendingFinalizers(); } private int ComputeIntervalId(DateTime intervalTime) { // interval id is the number of seconds since 01/01/2000 return (int)intervalTime.Subtract(DateTime.MinValue.AddYears(1999)).TotalSeconds; } private int CreateInterval(SqlConnection conn, SqlTransaction tran, DateTime intervalTime) { var sql = @" UPDATE [{0}].[Intervals] SET end_time = @end_time ,duration_minutes = @duration_minutes WHERE interval_id = @interval_id; IF @@ROWCOUNT = 0 INSERT INTO [{0}].[Intervals] (interval_id, end_time, duration_minutes) VALUES (@interval_id, @end_time, @duration_minutes); "; sql = string.Format(sql, ConnectionInfo.SchemaName); var interval_id = ComputeIntervalId(intervalTime); using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.Parameters.AddWithValue("@interval_id", interval_id); _ = cmd.Parameters.AddWithValue("@end_time", intervalTime); _ = cmd.Parameters.AddWithValue("@duration_minutes", Interval); _ = cmd.ExecuteNonQuery(); } // If this the first interval of the analysis, write // a marker interval with duration = 0 if (!FirstIntervalWritten) { using (var cmd = conn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandText = sql; _ = cmd.Parameters.AddWithValue("@interval_id", interval_id - 1); _ = cmd.Parameters.AddWithValue("@end_time", intervalTime.AddSeconds(-1)); _ = cmd.Parameters.AddWithValue("@duration_minutes", 0); _ = cmd.ExecuteNonQuery(); FirstIntervalWritten = true; } } return interval_id; } private void PrepareDataTables() { rawData = new ConcurrentDictionary>(); errorData = new DataTable(); _ = errorData.Columns.Add("type", typeof(int)); _ = errorData.Columns.Add("message", typeof(string)); } private void PrepareDictionaries() { CreateTargetDatabase(); using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); var sql = string.Format(@"SELECT * FROM [{0}].[Applications]",ConnectionInfo.SchemaName); AddAllRows(conn, sql, applications); sql = string.Format(@"SELECT * FROM [{0}].[Databases]", ConnectionInfo.SchemaName); AddAllRows(conn, sql, databases); sql = string.Format(@"SELECT * FROM [{0}].[Hosts]", ConnectionInfo.SchemaName); AddAllRows(conn, sql, hosts); sql = string.Format(@"SELECT * FROM [{0}].[Logins]", ConnectionInfo.SchemaName); AddAllRows(conn, sql, logins); } } private void AddAllRows(SqlConnection conn, string sql, Dictionary d) { try { using (var adapter = new SqlDataAdapter(sql, conn)) { using (var ds = new DataSet()) { _ = adapter.Fill(ds); var dt = ds.Tables[0]; foreach (DataRow dr in dt.Rows) { d.Add((string)dr[1], (int)dr[0]); } } } } catch(SqlException e) { logger.Trace("Unable to read saved classifiers from the analysis database: {0}", e.Message); } catch(Exception e) { logger.Error(e.Message); throw; } } protected void CreateTargetTables() { CreateTargetDatabase(); var sql = File.ReadAllText(WorkloadController.BaseLocation + "\\Consumer\\Analysis\\DatabaseSchema.sql"); sql = sql.Replace("{DatabaseName}", ConnectionInfo.DatabaseName); sql = sql.Replace("{SchemaName}", ConnectionInfo.SchemaName); using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); conn.ChangeDatabase(ConnectionInfo.DatabaseName); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } sql = "IF OBJECT_ID('dbo.createAnalysisView') IS NULL EXEC('CREATE PROCEDURE dbo.createAnalysisView AS RETURN 0')"; using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } sql = File.ReadAllText(WorkloadController.BaseLocation + "\\Consumer\\Analysis\\createAnalysisView.sql"); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } // Invoke the stored procedure to create the workload comparison view sql = @" DECLARE @name1 sysname, @name2 sysname; SELECT @name1 = [1], @name2 = [2] FROM ( SELECT TOP(2) OBJECT_SCHEMA_NAME(object_id) AS schema_name, ROW_NUMBER() OVER (ORDER BY create_date DESC) AS RN FROM sys.tables WHERE name = 'WorkloadDetails' ORDER BY create_date DESC ) AS src PIVOT( MIN(schema_name) FOR RN IN ([1], [2])) AS p; SELECT @name1 ,@name2 IF OBJECT_ID(@name1 + '.WorkloadDetails') IS NOT NULL OR OBJECT_ID(@name2 + '.WorkloadDetails') IS NOT NULL BEGIN EXEC createAnalysisView @name1, @name2; END "; using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } } } protected void CreateTargetDatabase() { try { var databaseName = ConnectionInfo.DatabaseName; using (var conn = new SqlConnection()) { // create a new connection to the target server // for the analysis database, on the master db // then create the target database if not available var ci = new SqlConnectionInfo(ConnectionInfo); ci.DatabaseName = "master"; conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); using (var cmd = conn.CreateCommand()) { var createDb = @" IF DB_ID(@name) IS NULL BEGIN DECLARE @sql nvarchar(max); SET @sql = N'CREATE DATABASE ' + QUOTENAME(@name); EXEC sp_executesql @sql; END "; cmd.CommandText = createDb; _ = cmd.Parameters.AddWithValue("@name", databaseName); _ = cmd.ExecuteNonQuery(); } } } catch(Exception e) { logger.Warn("Unable to create the target database for the analysis", e.Message); } } public void Dispose() { rawData?.Clear(); errorData?.Dispose(); counterData?.Dispose(); waitsData?.Dispose(); } internal class ExecutionDetailKey : IEquatable { public long Sql_hash { get; set; } public int Application_id { get; set; } public int Database_id { get; set; } public int Host_id { get; set; } public int Login_id { get; set; } public override int GetHashCode() { var hash = 497; unchecked { hash = (hash * 17) + Sql_hash.GetHashCode(); hash = (hash * 17) + Application_id.GetHashCode(); hash = (hash * 17) + Database_id.GetHashCode(); hash = (hash * 17) + Host_id.GetHashCode(); hash = (hash * 17) + Login_id.GetHashCode(); } return hash; } public override bool Equals(object other) { return Equals(other as ExecutionDetailKey); } public bool Equals(ExecutionDetailKey other) { return other != null && Sql_hash.Equals(other.Sql_hash) && Application_id.Equals(other.Application_id) && Database_id.Equals(other.Database_id) && Host_id.Equals(other.Host_id) && Login_id.Equals(other.Login_id); } } internal class ExecutionDetailValue { public DateTime Event_time { get; set; } public long? Cpu_us { get; set; } public long? Reads { get; set; } public long? Writes { get; set; } public long? Duration_us { get; set; } } } } ================================================ FILE: WorkloadTools/Consumer/Analysis/createAnalysisView.sql ================================================ ALTER PROCEDURE [dbo].[createAnalysisView] @baselineSchema AS nvarchar(max), @replaySchema AS nvarchar(max) AS BEGIN SET NOCOUNT ON; SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; BEGIN TRY BEGIN TRAN; DECLARE @sql nvarchar(max); -- DROP PowerBI_WaitStats IF OBJECT_ID( QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WaitStats') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WaitStats') + ' AS SELECT 1 AS one ' EXEC(@sql) END IF OBJECT_ID( QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WaitStats') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WaitStats') + ' AS SELECT 1 AS one ' EXEC(@sql) END -- DROP PowerBI_WinPerfCounters IF OBJECT_ID( QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WinPerfCounters') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WinPerfCounters') + ' AS SELECT 1 AS one ' EXEC(@sql) END IF OBJECT_ID( QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WinPerfCounters') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WinPerfCounters') + ' AS SELECT 1 AS one ' EXEC(@sql) END -- DROP PowerBI_WorkloadData IF OBJECT_ID( QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WorkloadData') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WorkloadData') + ' AS SELECT 1 AS one ' EXEC(@sql) END IF OBJECT_ID( QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WorkloadData') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WorkloadData') + ' AS SELECT 1 AS one ' EXEC(@sql) END -- DROP PowerBI_WorkloadQueries IF OBJECT_ID( QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WorkloadQueries') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_WorkloadQueries') + ' AS SELECT 1 AS one ' EXEC(@sql) END IF OBJECT_ID( QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WorkloadQueries') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_WorkloadQueries') + ' AS SELECT 1 AS one ' EXEC(@sql) END -- DROP PowerBI_Time IF OBJECT_ID( QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_Time') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@baselineSchema) +'.'+ QUOTENAME('PowerBI_Time') + ' AS SELECT 1 AS one ' EXEC(@sql) END IF OBJECT_ID( QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_Time') ) IS NULL BEGIN SET @sql = 'CREATE VIEW ' + QUOTENAME(@replaySchema) +'.'+ QUOTENAME('PowerBI_Time') + ' AS SELECT 1 AS one ' EXEC(@sql) END -- CREATE VIEWS --=========================================================== DECLARE @PowerBI_WorkloadQueries nvarchar(max) = N' ALTER VIEW {0}.[PowerBI_WorkloadQueries] AS SELECT bNQ.[sql_hash] AS [Sql Hash], bNQ.[normalized_text] AS [Sql Normalized Text], bNQ.[example_text] AS [Sql Sample Text] FROM {0}.[NormalizedQueries] AS bNQ ' IF @baselineSchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WorkloadQueries, '{0}', @baselineSchema); EXEC(@sql); END IF @replaySchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WorkloadQueries, '{0}', @replaySchema); EXEC(@sql); END --=========================================================== DECLARE @PowerBI_WinPerfCounters nvarchar(max) = N' ALTER VIEW {0}.[PowerBI_WinPerfCounters] AS SELECT bPC.[counter_name] AS [Win Counter] ,bPC.[min_counter_value] AS [Counter Min Value] ,bPC.[max_counter_value] AS [Counter Max Value] ,bPC.[avg_counter_value] AS [Counter Average Value] ,bIn.[Elapsed Time (min)] FROM {0}.[PerformanceCounters] AS bPC INNER JOIN ( SELECT [interval_id], [duration_minutes], [end_time], SUM([duration_minutes]) OVER(ORDER BY [end_time] ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS [Elapsed Time (min)] FROM {0}.[Intervals] ) AS bIn ON bPC.[interval_id] = bIn.[interval_id] ' IF @baselineSchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WinPerfCounters, '{0}', @baselineSchema); EXEC(@sql); END IF @replaySchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WinPerfCounters, '{0}', @replaySchema); EXEC(@sql); END --=========================================================== DECLARE @PowerBI_WorkloadData nvarchar(max) = N' ALTER VIEW {0}.[PowerBI_WorkloadData] AS SELECT bWD.[sql_hash] AS [Sql Hash], bIn.[duration_minutes] AS [Interval Duration (min)], bIn.[end_time] AS [Interval End Time], bIn.[Elapsed Time (min)], bAp.[application_name] AS [Application], bDB.[database_name] AS [Database], bHS.[host_name] AS [Host], bLI.[login_name] AS [Login], bWD.[avg_cpu_us] AS [Avg Cpu (µs)], bWD.[min_cpu_us] AS [Min Cpu (µs)], bWD.[max_cpu_us] AS [Max Cpu (µs)], bWD.[sum_cpu_us] AS [Sum Cpu (µs)], bWD.[avg_reads] AS [Avg Reads], bWD.[min_reads] AS [Min Reads], bWD.[max_reads] AS [Max Reads], bWD.[sum_reads] AS [Sum Reads], bWD.[avg_writes] AS [Avg Writes], bWD.[min_writes] AS [Min Writes], bWD.[max_writes] AS [Max Writes], bWD.[sum_writes] AS [Sum Writes], bWD.[avg_duration_us] AS [Avg Duration (µs)], bWD.[min_duration_us] AS [Min Duration (µs)], bWD.[max_duration_us] AS [Max Duration (µs)], bWD.[sum_duration_us] AS [Sum Duration (µs)], bWD.[execution_count] AS [Execution Count] FROM {0}.WorkloadDetails AS bWD INNER JOIN ( SELECT [interval_id], [duration_minutes], [end_time], SUM([duration_minutes]) OVER(ORDER BY [end_time] ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS [Elapsed Time (min)] FROM {0}.[Intervals] ) AS bIn ON bIn.[interval_id] = bWD.[interval_id] INNER JOIN {0}.Applications AS bAp ON bAp.[application_id] = bWD.[application_id] INNER JOIN {0}.Databases AS bDB ON bDB.[database_id] = [bWD].database_id INNER JOIN {0}.Hosts AS bHS ON bHS.[host_id] = bWD.[host_id] INNER JOIN {0}.Logins AS bLI ON bLI.[login_id] = bWD.[login_id] ' IF @baselineSchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WorkloadData, '{0}', @baselineSchema); EXEC(@sql); END IF @replaySchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WorkloadData, '{0}', @replaySchema); EXEC(@sql); END --=========================================================== DECLARE @PowerBI_WaitStats nvarchar(max) = N' ALTER VIEW {0}.[PowerBI_WaitStats] AS SELECT bWS.[interval_id] ,bWS.[wait_type] AS [Wait Type] ,bWS.[wait_sec]*1000 AS [Wait Time (µs)] ,bWS.[resource_sec]*1000 AS [Wait Time Resource (µs)] ,bWS.[signal_sec]*1000 AS [Wait Time Signal (µs)] ,bWS.[wait_count] AS [Wait Count] ,bIn.[Elapsed Time (min)] ,COALESCE(WsCat.[Wait Category],'''') AS [Wait Type Category] FROM {0}.[WaitStats] AS bWS INNER JOIN ( SELECT [interval_id], [duration_minutes], [end_time], SUM([duration_minutes]) OVER(ORDER BY [end_time] ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS [Elapsed Time (min)] FROM {0}.[Intervals] ) AS bIn ON bIn.[interval_id] = bWS.[interval_id] LEFT JOIN ( VALUES (''HADR_AG_MUTEX'',''Replication'',0) ,(''HADR_AR_CRITICAL_SECTION_ENTRY'',''Replication'',0) ,(''HADR_AR_MANAGER_MUTEX'',''Replication'',0) ,(''HADR_AR_UNLOAD_COMPLETED'',''Replication'',0) ,(''HADR_ARCONTROLLER_NOTIFICATIONS_SUBSCRIBER_LIST'',''Replication'',0) ,(''HADR_BACKUP_BULK_LOCK'',''Replication'',0) ,(''HADR_BACKUP_QUEUE'',''Replication'',0) ,(''HADR_CLUSAPI_CALL'',''Replication'',0) ,(''HADR_COMPRESSED_CACHE_SYNC'',''Replication'',0) ,(''HADR_CONNECTIVITY_INFO'',''Replication'',0) ,(''HADR_DATABASE_FLOW_CONTROL'',''Replication'',0) ,(''HADR_DATABASE_VERSIONING_STATE'',''Replication'',0) ,(''HADR_DATABASE_WAIT_FOR_RECOVERY'',''Replication'',0) ,(''HADR_DATABASE_WAIT_FOR_RESTART'',''Replication'',0) ,(''HADR_DATABASE_WAIT_FOR_TRANSITION_TO_VERSIONING'',''Replication'',0) ,(''HADR_DB_COMMAND'',''Replication'',0) ,(''HADR_DB_OP_COMPLETION_SYNC'',''Replication'',0) ,(''HADR_DB_OP_START_SYNC'',''Replication'',0) ,(''HADR_DBR_SUBSCRIBER'',''Replication'',0) ,(''HADR_DBR_SUBSCRIBER_FILTER_LIST'',''Replication'',0) ,(''HADR_DBSEEDING'',''Replication'',0) ,(''HADR_DBSEEDING_LIST'',''Replication'',0) ,(''HADR_DBSTATECHANGE_SYNC'',''Replication'',0) ,(''HADR_FABRIC_CALLBACK'',''Replication'',0) ,(''HADR_FILESTREAM_BLOCK_FLUSH'',''Replication'',0) ,(''HADR_FILESTREAM_FILE_CLOSE'',''Replication'',0) ,(''HADR_FILESTREAM_FILE_REQUEST'',''Replication'',0) ,(''HADR_FILESTREAM_IOMGR'',''Replication'',0) ,(''HADR_FILESTREAM_IOMGR_IOCOMPLETION'',''Replication'',0) ,(''HADR_FILESTREAM_MANAGER'',''Replication'',0) ,(''HADR_FILESTREAM_PREPROC'',''Replication'',0) ,(''HADR_GROUP_COMMIT'',''Replication'',0) ,(''HADR_LOGCAPTURE_SYNC'',''Replication'',0) ,(''HADR_LOGCAPTURE_WAIT'',''Replication'',0) ,(''HADR_LOGPROGRESS_SYNC'',''Replication'',0) ,(''HADR_NOTIFICATION_DEQUEUE'',''Replication'',0) ,(''HADR_NOTIFICATION_WORKER_EXCLUSIVE_ACCESS'',''Replication'',0) ,(''HADR_NOTIFICATION_WORKER_STARTUP_SYNC'',''Replication'',0) ,(''HADR_NOTIFICATION_WORKER_TERMINATION_SYNC'',''Replication'',0) ,(''HADR_PARTNER_SYNC'',''Replication'',0) ,(''HADR_READ_ALL_NETWORKS'',''Replication'',0) ,(''HADR_RECOVERY_WAIT_FOR_CONNECTION'',''Replication'',0) ,(''HADR_RECOVERY_WAIT_FOR_UNDO'',''Replication'',0) ,(''HADR_REPLICAINFO_SYNC'',''Replication'',0) ,(''HADR_SEEDING_CANCELLATION'',''Replication'',0) ,(''HADR_SEEDING_FILE_LIST'',''Replication'',0) ,(''HADR_SEEDING_LIMIT_BACKUPS'',''Replication'',0) ,(''HADR_SEEDING_SYNC_COMPLETION'',''Replication'',0) ,(''HADR_SEEDING_TIMEOUT_TASK'',''Replication'',0) ,(''HADR_SEEDING_WAIT_FOR_COMPLETION'',''Replication'',0) ,(''HADR_SYNC_COMMIT'',''Replication'',0) ,(''HADR_SYNCHRONIZING_THROTTLE'',''Replication'',0) ,(''HADR_TDS_LISTENER_SYNC'',''Replication'',0) ,(''HADR_TDS_LISTENER_SYNC_PROCESSING'',''Replication'',0) ,(''HADR_THROTTLE_LOG_RATE_GOVERNOR'',''Log Rate Governor'',0) ,(''HADR_TIMER_TASK'',''Replication'',0) ,(''HADR_TRANSPORT_DBRLIST'',''Replication'',0) ,(''HADR_TRANSPORT_FLOW_CONTROL'',''Replication'',0) ,(''HADR_TRANSPORT_SESSION'',''Replication'',0) ,(''HADR_WORK_POOL'',''Replication'',0) ,(''HADR_WORK_QUEUE'',''Replication'',0) ,(''HADR_XRF_STACK_ACCESS'',''Replication'',0) ,(''INSTANCE_LOG_RATE_GOVERNOR'',''Log Rate Governor'',0) ,(''BROKER_TASK_SUBMIT'',''Service Broker'',0) ,(''BROKER_TO_FLUSH'',''Service Broker'',0) ,(''BROKER_TRANSMISSION_OBJECT'',''Service Broker'',0) ,(''BROKER_TRANSMISSION_TABLE'',''Service Broker'',0) ,(''BROKER_TRANSMISSION_WORK'',''Service Broker'',0) ,(''BROKER_FORWARDER'',''Service Broker'',0) ,(''CXCONSUMER'',''Parallelism'',0) ,(''DTCNEW_ENLIST'',''Transaction'',0) ,(''DTCNEW_PREPARE'',''Transaction'',0) ,(''DTCNEW_RECOVERY'',''Transaction'',0) ,(''DTCNEW_TM'',''Transaction'',0) ,(''DTCNEW_TRANSACTION_ENLISTMENT'',''Transaction'',0) ,(''DTCPNTSYNC'',''Transaction'',0) ,(''BROKER_DISPATCHER'',''Service Broker'',0) ,(''BROKER_SERVICE'',''Service Broker'',0) ,(''BROKER_START'',''Service Broker'',0) ,(''BROKER_TASK_SHUTDOWN'',''Service Broker'',0) ,(''EXTERNAL_SCRIPT_NETWORK_IOF'',''Network IO'',0) ,(''FT_COMPROWSET_RWLOCK'',''Full Text Search'',0) ,(''FT_IFTS_RWLOCK'',''Full Text Search'',0) ,(''FT_IFTS_SCHEDULER_IDLE_WAIT'',''Idle'',0) ,(''FT_IFTSHC_MUTEX'',''Full Text Search'',0) ,(''FT_IFTSISM_MUTEX'',''Full Text Search'',0) ,(''FT_MASTER_MERGE'',''Full Text Search'',0) ,(''FT_MASTER_MERGE_COORDINATOR'',''Full Text Search'',0) ,(''FT_METADATA_MUTEX'',''Full Text Search'',0) ,(''FT_PROPERTYLIST_CACHE'',''Full Text Search'',0) ,(''IO_QUEUE_LIMIT'',''Other Disk IO'',0) ,(''IO_RETRY'',''Other Disk IO'',0) ,(''POOL_LOG_RATE_GOVERNOR'',''Log Rate Governor'',0) ,(''PREEMPTIVE_ABR'',''Preemptive'',0) ,(''PREEMPTIVE_CLOSEBACKUPMEDIA'',''Preemptive'',0) ,(''PREEMPTIVE_CLOSEBACKUPTAPE'',''Preemptive'',0) ,(''PREEMPTIVE_CLOSEBACKUPVDIDEVICE'',''Preemptive'',0) ,(''PREEMPTIVE_CLUSAPI_CLUSTERRESOURCECONTROL'',''Preemptive'',0) ,(''PREEMPTIVE_COM_COCREATEINSTANCE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_COGETCLASSOBJECT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_CREATEACCESSOR'',''Preemptive'',0) ,(''PREEMPTIVE_COM_DELETEROWS'',''Preemptive'',0) ,(''PREEMPTIVE_COM_GETCOMMANDTEXT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_GETDATA'',''Preemptive'',0) ,(''PREEMPTIVE_COM_GETNEXTROWS'',''Preemptive'',0) ,(''PREEMPTIVE_COM_GETRESULT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_GETROWSBYBOOKMARK'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBFLUSH'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBLOCKREGION'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBREADAT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBSETSIZE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBSTAT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBUNLOCKREGION'',''Preemptive'',0) ,(''PREEMPTIVE_COM_LBWRITEAT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_QUERYINTERFACE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_RELEASE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_RELEASEACCESSOR'',''Preemptive'',0) ,(''PREEMPTIVE_COM_RELEASEROWS'',''Preemptive'',0) ,(''PREEMPTIVE_COM_RELEASESESSION'',''Preemptive'',0) ,(''PREEMPTIVE_COM_RESTARTPOSITION'',''Preemptive'',0) ,(''PREEMPTIVE_COM_SEQSTRMREAD'',''Preemptive'',0) ,(''PREEMPTIVE_COM_SEQSTRMREADANDWRITE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_SETDATAFAILURE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_SETPARAMETERINFO'',''Preemptive'',0) ,(''PREEMPTIVE_COM_SETPARAMETERPROPERTIES'',''Preemptive'',0) ,(''PREEMPTIVE_COM_STRMLOCKREGION'',''Preemptive'',0) ,(''PREEMPTIVE_COM_STRMSEEKANDREAD'',''Preemptive'',0) ,(''PREEMPTIVE_COM_STRMSEEKANDWRITE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_STRMSETSIZE'',''Preemptive'',0) ,(''PREEMPTIVE_COM_STRMSTAT'',''Preemptive'',0) ,(''PREEMPTIVE_COM_STRMUNLOCKREGION'',''Preemptive'',0) ,(''PREEMPTIVE_CONSOLEWRITE'',''Preemptive'',0) ,(''PREEMPTIVE_CREATEPARAM'',''Preemptive'',0) ,(''PREEMPTIVE_DEBUG'',''Preemptive'',0) ,(''PREEMPTIVE_DFSADDLINK'',''Preemptive'',0) ,(''PREEMPTIVE_DFSLINKEXISTCHECK'',''Preemptive'',0) ,(''PREEMPTIVE_DFSLINKHEALTHCHECK'',''Preemptive'',0) ,(''PREEMPTIVE_DFSREMOVELINK'',''Preemptive'',0) ,(''PREEMPTIVE_DFSREMOVEROOT'',''Preemptive'',0) ,(''PREEMPTIVE_DFSROOTFOLDERCHECK'',''Preemptive'',0) ,(''PREEMPTIVE_DFSROOTINIT'',''Preemptive'',0) ,(''PREEMPTIVE_DFSROOTSHARECHECK'',''Preemptive'',0) ,(''PREEMPTIVE_DTC_ABORT'',''Preemptive'',0) ,(''PREEMPTIVE_DTC_ABORTREQUESTDONE'',''Preemptive'',0) ,(''PREEMPTIVE_DTC_BEGINTRANSACTION'',''Preemptive'',0) ,(''PREEMPTIVE_DTC_COMMITREQUESTDONE'',''Preemptive'',0) ,(''PREEMPTIVE_DTC_ENLIST'',''Preemptive'',0) ,(''PREEMPTIVE_DTC_PREPAREREQUESTDONE'',''Preemptive'',0) ,(''PREEMPTIVE_FILESIZEGET'',''Preemptive'',0) ,(''PREEMPTIVE_FSAOLEDB_ABORTTRANSACTION'',''Preemptive'',0) ,(''PREEMPTIVE_FSAOLEDB_COMMITTRANSACTION'',''Preemptive'',0) ,(''PREEMPTIVE_FSAOLEDB_STARTTRANSACTION'',''Preemptive'',0) ,(''PREEMPTIVE_FSRECOVER_UNCONDITIONALUNDO'',''Preemptive'',0) ,(''PREEMPTIVE_GETRMINFO'',''Preemptive'',0) ,(''PREEMPTIVE_HADR_LEASE_MECHANISM'',''Preemptive'',0) ,(''PREEMPTIVE_HTTP_EVENT_WAIT'',''Preemptive'',0) ,(''PREEMPTIVE_HTTP_REQUEST'',''Preemptive'',0) ,(''PREEMPTIVE_LOCKMONITOR'',''Preemptive'',0) ,(''PREEMPTIVE_MSS_RELEASE'',''Preemptive'',0) ,(''PREEMPTIVE_ODBCOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OLE_UNINIT'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_ABORTORCOMMITTRAN'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_ABORTTRAN'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_GETDATASOURCE'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_GETLITERALINFO'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_GETPROPERTIES'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_GETPROPERTYINFO'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_GETSCHEMALOCK'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_JOINTRANSACTION'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_RELEASE'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDB_SETPROPERTIES'',''Preemptive'',0) ,(''PREEMPTIVE_OLEDBOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_ACCEPTSECURITYCONTEXT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_ACQUIRECREDENTIALSHANDLE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_AUTHENTICATIONOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_AUTHORIZATIONOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_AUTHZGETINFORMATIONFROMCONTEXT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_AUTHZINITIALIZECONTEXTFROMSID'',''Preemptive'',0) ,(''PREEMPTIVE_OS_AUTHZINITIALIZERESOURCEMANAGER'',''Preemptive'',0) ,(''PREEMPTIVE_OS_BACKUPREAD'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CLOSEHANDLE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CLUSTEROPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_COMOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_COMPLETEAUTHTOKEN'',''Preemptive'',0) ,(''PREEMPTIVE_OS_COPYFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CREATEDIRECTORY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CREATEFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CRYPTACQUIRECONTEXT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CRYPTIMPORTKEY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_CRYPTOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DECRYPTMESSAGE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DELETEFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DELETESECURITYCONTEXT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DEVICEIOCONTROL'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DEVICEOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DIRSVC_NETWORKOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DISCONNECTNAMEDPIPE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DOMAINSERVICESOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DSGETDCNAME'',''Preemptive'',0) ,(''PREEMPTIVE_OS_DTCOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_ENCRYPTMESSAGE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_FILEOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_FINDFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_FLUSHFILEBUFFERS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_FORMATMESSAGE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_FREECREDENTIALSHANDLE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_FREELIBRARY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GENERICOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETADDRINFO'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETCOMPRESSEDFILESIZE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETDISKFREESPACE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETFILEATTRIBUTES'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETFILESIZE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETFINALFILEPATHBYHANDLE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETLONGPATHNAME'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETPROCADDRESS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETVOLUMENAMEFORVOLUMEMOUNTPOINT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_GETVOLUMEPATHNAME'',''Preemptive'',0) ,(''PREEMPTIVE_OS_INITIALIZESECURITYCONTEXT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_LIBRARYOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_LOADLIBRARY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_LOGONUSER'',''Preemptive'',0) ,(''PREEMPTIVE_OS_LOOKUPACCOUNTSID'',''Preemptive'',0) ,(''PREEMPTIVE_OS_MESSAGEQUEUEOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_MOVEFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETGROUPGETUSERS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETLOCALGROUPGETMEMBERS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETUSERGETGROUPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETUSERGETLOCALGROUPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETUSERMODALSGET'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETVALIDATEPASSWORDPOLICY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_NETVALIDATEPASSWORDPOLICYFREE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_OPENDIRECTORY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_PDH_WMI_INIT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_PIPEOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_PROCESSOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_QUERYCONTEXTATTRIBUTES'',''Preemptive'',0) ,(''PREEMPTIVE_OS_QUERYREGISTRY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_QUERYSECURITYCONTEXTTOKEN'',''Preemptive'',0) ,(''PREEMPTIVE_OS_REMOVEDIRECTORY'',''Preemptive'',0) ,(''PREEMPTIVE_OS_REPORTEVENT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_REVERTTOSELF'',''Preemptive'',0) ,(''PREEMPTIVE_OS_RSFXDEVICEOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SECURITYOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SERVICEOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SETENDOFFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SETFILEPOINTER'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SETFILEVALIDDATA'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SETNAMEDSECURITYINFO'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SQLCLROPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_SQMLAUNCH'',''Preemptive'',0) ,(''PREEMPTIVE_OS_VERIFYSIGNATURE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_VERIFYTRUST'',''Preemptive'',0) ,(''PREEMPTIVE_OS_VSSOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_WAITFORSINGLEOBJECT'',''Preemptive'',0) ,(''PREEMPTIVE_OS_WINSOCKOPS'',''Preemptive'',0) ,(''PREEMPTIVE_OS_WRITEFILE'',''Preemptive'',0) ,(''PREEMPTIVE_OS_WRITEFILEGATHER'',''Preemptive'',0) ,(''PREEMPTIVE_OS_WSASETLASTERROR'',''Preemptive'',0) ,(''PREEMPTIVE_REENLIST'',''Preemptive'',0) ,(''PREEMPTIVE_RESIZELOG'',''Preemptive'',0) ,(''PREEMPTIVE_ROLLFORWARDREDO'',''Preemptive'',0) ,(''PREEMPTIVE_ROLLFORWARDUNDO'',''Preemptive'',0) ,(''PREEMPTIVE_SB_STOPENDPOINT'',''Preemptive'',0) ,(''PREEMPTIVE_SERVER_STARTUP'',''Preemptive'',0) ,(''PREEMPTIVE_SETRMINFO'',''Preemptive'',0) ,(''PREEMPTIVE_SHAREDMEM_GETDATA'',''Preemptive'',0) ,(''PREEMPTIVE_SNIOPEN'',''Preemptive'',0) ,(''PREEMPTIVE_SOSHOST'',''Preemptive'',0) ,(''PREEMPTIVE_SOSTESTING'',''Preemptive'',0) ,(''PREEMPTIVE_SP_SERVER_DIAGNOSTICS'',''Preemptive'',0) ,(''PREEMPTIVE_STARTRM'',''Preemptive'',0) ,(''PREEMPTIVE_STREAMFCB_CHECKPOINT'',''Preemptive'',0) ,(''PREEMPTIVE_STREAMFCB_RECOVER'',''Preemptive'',0) ,(''PREEMPTIVE_STRESSDRIVER'',''Preemptive'',0) ,(''PREEMPTIVE_TESTING'',''Preemptive'',0) ,(''PREEMPTIVE_TRANSIMPORT'',''Preemptive'',0) ,(''PREEMPTIVE_UNMARSHALPROPAGATIONTOKEN'',''Preemptive'',0) ,(''PREEMPTIVE_VSS_CREATESNAPSHOT'',''Preemptive'',0) ,(''PREEMPTIVE_VSS_CREATEVOLUMESNAPSHOT'',''Preemptive'',0) ,(''PREEMPTIVE_XE_CALLBACKEXECUTE'',''Preemptive'',0) ,(''PREEMPTIVE_XE_CX_FILE_OPEN'',''Preemptive'',0) ,(''PREEMPTIVE_XE_CX_HTTP_CALL'',''Preemptive'',0) ,(''PREEMPTIVE_XE_DISPATCHER'',''Preemptive'',0) ,(''PREEMPTIVE_XE_ENGINEINIT'',''Preemptive'',0) ,(''PREEMPTIVE_XE_GETTARGETSTATE'',''Preemptive'',0) ,(''PREEMPTIVE_XE_SESSIONCOMMIT'',''Preemptive'',0) ,(''PREEMPTIVE_XE_TARGETFINALIZE'',''Preemptive'',0) ,(''PREEMPTIVE_XE_TARGETINIT'',''Preemptive'',0) ,(''PREEMPTIVE_XE_TIMERRUN'',''Preemptive'',0) ,(''PREEMPTIVE_XETESTING'',''Preemptive'',0) ,(''PWAIT_HADR_ACTION_COMPLETED'',''Replication'',0) ,(''PWAIT_HADR_CHANGE_NOTIFIER_TERMINATION_SYNC'',''Replication'',0) ,(''PWAIT_HADR_CLUSTER_INTEGRATION'',''Replication'',0) ,(''PWAIT_HADR_FAILOVER_COMPLETED'',''Replication'',0) ,(''PWAIT_HADR_JOIN'',''Replication'',0) ,(''PWAIT_HADR_OFFLINE_COMPLETED'',''Replication'',0) ,(''PWAIT_HADR_ONLINE_COMPLETED'',''Replication'',0) ,(''PWAIT_HADR_POST_ONLINE_COMPLETED'',''Replication'',0) ,(''PWAIT_HADR_SERVER_READY_CONNECTIONS'',''Replication'',0) ,(''PWAIT_HADR_WORKITEM_COMPLETED'',''Replication'',0) ,(''PWAIT_HADRSIM'',''Replication'',0) ,(''PWAIT_RESOURCE_SEMAPHORE_FT_PARALLEL_QUERY_SYNC'',''Full Text Search'',0) ,(''LCK_M_BU_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_BU_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_IS_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_IS_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_IU_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_IU_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_IX_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_IX_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RIn_NL_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RIn_NL_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RIn_S_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RIn_S_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RIn_U_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RIn_U_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RIn_X_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RIn_X_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RS_S_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RS_S_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RS_U_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RS_U_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RX_S_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RX_S_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RX_U_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RX_U_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_RX_X_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_RX_X_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_S_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_S_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_SCH_M_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_SCH_M_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_SCH_S_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_SCH_S_LOW_PRIORITY'',''Lock'',0) ,(''MEMORY_ALLOCATION_EXT'',''Memory'',0) ,(''MEMORY_GRANT_UPDATE'',''Memory'',0) ,(''LCK_M_SIU_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_SIU_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_SIX_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_SIX_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_U_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_U_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_UIX_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_UIX_LOW_PRIORITY'',''Lock'',0) ,(''LCK_M_X_ABORT_BLOCKERS'',''Lock'',0) ,(''LCK_M_X_LOW_PRIORITY'',''Lock'',0) ,(''LOGMGR_PMM_LOG'',''Tran Log IO'',0) ,(''REPL_HISTORYCACHE_ACCESS'',''Replication'',0) ,(''REPL_TRANFSINFO_ACCESS'',''Replication'',0) ,(''REPL_TRANHASHTABLE_ACCESS'',''Replication'',0) ,(''REPL_TRANTEXTINFO_ACCESS'',''Replication'',0) ,(''RESERVED_MEMORY_ALLOCATION_EXT'',''Memory'',0) ,(''SLEEP_MASTERDBREADY'',''Idle'',0) ,(''SLEEP_MASTERMDREADY'',''Idle'',0) ,(''SLEEP_MASTERUPGRADED'',''Idle'',0) ,(''SLEEP_MEMORYPOOL_ALLOCATEPAGES'',''Idle'',0) ,(''SLEEP_RETRY_VIRTUALALLOC'',''Idle'',0) ,(''SQLTRACE_FILE_BUFFER'',''Tracing'',0) ,(''SQLTRACE_FILE_READ_IO_COMPLETION'',''Tracing'',0) ,(''SQLTRACE_FILE_WRITE_IO_COMPLETION'',''Tracing'',0) ,(''SQLTRACE_INCREMENTAL_FLUSH_SLEEP'',''Idle'',0) ,(''SQLTRACE_PENDING_BUFFER_WRITERS'',''Tracing'',0) ,(''TRACE_EVTNOTIF'',''Tracing'',0) ,(''WRITE_COMPLETION'',''Other Disk IO'',0) ,(''SLEEP_WORKSPACE_ALLOCATEPAGE'',''Idle'',0) ,(''SLEEP_BUFFERPOOL_HELPLW'',''Idle'',0) ,(''ABR'',''Other'',0) ,(''ASSEMBLY_LOAD'',''SQLCLR'',0) ,(''ASYNC_DISKPOOL_LOCK'',''Buffer I/O'',0) ,(''ASYNC_IO_COMPLETION'',''Other Disk IO'',0) ,(''ASYNC_NETWORK_IO'',''Network IO'',0) ,(''BACKUP'',''Backup'',0) ,(''BACKUP_CLIENTLOCK'',''Backup'',0) ,(''BACKUP_OPERATOR'',''Backup'',0) ,(''BACKUPBUFFER'',''Backup'',0) ,(''BACKUPIO'',''Other Disk IO'',0) ,(''BACKUPTHREAD'',''Backup'',0) ,(''BAD_PAGE_PROCESS'',''Other'',0) ,(''BROKER_CONNECTION_RECEIVE_TASK'',''Service Broker'',0) ,(''BROKER_ENDPOINT_STATE_MUTEX'',''Service Broker'',0) ,(''BROKER_EVENTHANDLER'',''Service Broker'',1) ,(''BROKER_INIT'',''Service Broker'',0) ,(''BROKER_MASTERSTART'',''Service Broker'',0) ,(''BROKER_RECEIVE_WAITFOR'',''User Wait'',1) ,(''BROKER_REGISTERALLENDPOINTS'',''Service Broker'',0) ,(''BROKER_SHUTDOWN'',''Service Broker'',0) ,(''BROKER_TASK_STOP'',''Service Broker'',0) ,(''BROKER_TRANSMITTER'',''Service Broker'',1) ,(''BUILTIN_HASHKEY_MUTEX'',''Other'',0) ,(''CHECK_PRINT_RECORD'',''Other'',0) ,(''CHECKPOINT_QUEUE'',''Idle'',1) ,(''CHKPT'',''Tran Log IO'',1) ,(''CLR_AUTO_EVENT'',''SQL CLR'',1) ,(''CLR_CRST'',''SQL CLR'',0) ,(''CLR_JOIN'',''SQL CLR'',0) ,(''CLR_MANUAL_EVENT'',''SQL CLR'',1) ,(''CLR_MEMORY_SPY'',''SQL CLR'',0) ,(''CLR_MONITOR'',''SQL CLR'',0) ,(''CLR_RWLOCK_READER'',''SQL CLR'',0) ,(''CLR_RWLOCK_WRITER'',''SQL CLR'',0) ,(''CLR_SEMAPHORE'',''SQL CLR'',0) ,(''CLR_TASK_START'',''SQL CLR'',0) ,(''CLRHOST_STATE_ACCESS'',''SQL CLR'',0) ,(''CMEMPARTITIONED'',''Memory'',0) ,(''CMEMTHREAD'',''Memory'',0) ,(''CPU'',''CPU'',0) ,(''CURSOR'',''Other'',0) ,(''CURSOR_ASYNC'',''Other'',0) ,(''CXPACKET'',''Parallelism'',1) ,(''DAC_INIT'',''Other'',0) ,(''DBCC_COLUMN_TRANSLATION_CACHE'',''Other'',0) ,(''DBMIRROR_DBM_EVENT'',''Mirroring'',0) ,(''DBMIRROR_DBM_MUTEX'',''Mirroring'',0) ,(''DBMIRROR_EVENTS_QUEUE'',''Mirroring'',0) ,(''DBMIRROR_SEND'',''Mirroring'',0) ,(''DBMIRROR_WORKER_QUEUE'',''Mirroring'',0) ,(''DBMIRRORING_CMD'',''Mirroring'',0) ,(''DBTABLE'',''Other'',0) ,(''DEADLOCK_ENUM_MUTEX'',''Latch'',0) ,(''DEADLOCK_TASK_SEARCH'',''Other'',0) ,(''DEBUG'',''Other'',0) ,(''DISABLE_VERSIONING'',''Other'',0) ,(''DISKIO_SUSPEND'',''Backup'',0) ,(''DLL_LOADING_MUTEX'',''Other'',0) ,(''DROPTEMP'',''Other'',0) ,(''DTC'',''Transaction'',0) ,(''DTC_ABORT_REQUEST'',''Transaction'',0) ,(''DTC_RESOLVE'',''Transaction'',0) ,(''DTC_STATE'',''Transaction'',0) ,(''DTC_TMDOWN_REQUEST'',''Transaction'',0) ,(''DTC_WAITFOR_OUTCOME'',''Transaction'',0) ,(''DUMP_LOG_COORDINATOR'',''Other'',0) ,(''DUMP_LOG_COORDINATOR_QUEUE'',''Other'',0) ,(''DUMPTRIGGER'',''Other'',0) ,(''EC'',''Other'',0) ,(''EE_PMOLOCK'',''Memory'',0) ,(''EE_SPECPROC_MAP_INIT'',''Other'',0) ,(''ENABLE_VERSIONING'',''Other'',0) ,(''ERROR_REPORTING_MANAGER'',''Other'',0) ,(''EXCHANGE'',''Parallelism'',1) ,(''EXECSYNC'',''Parallelism'',1) ,(''EXECUTION_PIPE_EVENT_INTERNAL'',''Other'',0) ,(''FAILPOINT'',''Other'',0) ,(''FCB_REPLICA_READ'',''Replication'',0) ,(''FCB_REPLICA_WRITE'',''Replication'',0) ,(''FS_GARBAGE_COLLECTOR_SHUTDOWN'',''SQLCLR'',0) ,(''FSAGENT'',''Idle'',1) ,(''FT_RESTART_CRAWL'',''Full Text Search'',0) ,(''FT_RESUME_CRAWL'',''Other'',0) ,(''FULLTEXT GATHERER'',''Full Text Search'',0) ,(''GUARDIAN'',''Other'',0) ,(''HTTP_ENDPOINT_COLLCREATE'',''Other'',0) ,(''HTTP_ENUMERATION'',''Other'',0) ,(''HTTP_START'',''Other'',0) ,(''IMP_IMPORT_MUTEX'',''Other'',0) ,(''IMPPROV_IOWAIT'',''Other'',0) ,(''INDEX_USAGE_STATS_MUTEX'',''Latch'',0) ,(''INTERNAL_TESTING'',''Other'',0) ,(''IO_AUDIT_MUTEX'',''Other'',0) ,(''IO_COMPLETION'',''Other Disk IO'',0) ,(''KSOURCE_WAKEUP'',''Idle'',1) ,(''KTM_ENLISTMENT'',''Other'',0) ,(''KTM_RECOVERY_MANAGER'',''Other'',0) ,(''KTM_RECOVERY_RESOLUTION'',''Other'',0) ,(''LATCH_DT'',''Latch'',0) ,(''LATCH_EX'',''Latch'',0) ,(''LATCH_KP'',''Latch'',0) ,(''LATCH_NL'',''Latch'',0) ,(''LATCH_SH'',''Latch'',0) ,(''LATCH_UP'',''Latch'',0) ,(''LAZYWRITER_SLEEP'',''Idle'',1) ,(''LCK_M_BU'',''Lock'',0) ,(''LCK_M_IS'',''Lock'',0) ,(''LCK_M_IU'',''Lock'',0) ,(''LCK_M_IX'',''Lock'',0) ,(''LCK_M_RIn_NL'',''Lock'',0) ,(''LCK_M_RIn_S'',''Lock'',0) ,(''LCK_M_RIn_U'',''Lock'',0) ,(''LCK_M_RIn_X'',''Lock'',0) ,(''LCK_M_RS_S'',''Lock'',0) ,(''LCK_M_RS_U'',''Lock'',0) ,(''LCK_M_RX_S'',''Lock'',0) ,(''LCK_M_RX_U'',''Lock'',0) ,(''LCK_M_RX_X'',''Lock'',0) ,(''LCK_M_S'',''Lock'',0) ,(''LCK_M_SCH_M'',''Lock'',0) ,(''LCK_M_SCH_S'',''Lock'',0) ,(''LCK_M_SIU'',''Lock'',0) ,(''LCK_M_SIX'',''Lock'',0) ,(''LCK_M_U'',''Lock'',0) ,(''LCK_M_UIX'',''Lock'',0) ,(''LCK_M_X'',''Lock'',0) ,(''LOGBUFFER'',''Tran Log IO'',0) ,(''LOGMGR'',''Tran Log IO'',0) ,(''LOGMGR_FLUSH'',''Tran Log IO'',0) ,(''LOGMGR_QUEUE'',''Idle'',1) ,(''LOGMGR_RESERVE_APPEND'',''Tran Log IO'',0) ,(''LOWFAIL_MEMMGR_QUEUE'',''Memory'',0) ,(''MIRROR_SEND_MESSAGE'',''Other'',0) ,(''MISCELLANEOUS'',''Other'',0) ,(''MSQL_DQ'',''Network I/O'',0) ,(''MSQL_SYNC_PIPE'',''Other'',0) ,(''MSQL_XACT_MGR_MUTEX'',''Transaction'',0) ,(''MSQL_XACT_MUTEX'',''Transaction'',0) ,(''MSQL_XP'',''Other'',0) ,(''MSSEARCH'',''Full Text Search'',0) ,(''NET_WAITFOR_PACKET'',''Network IO'',0) ,(''OLEDB'',''Network I/O'',0) ,(''ONDEMAND_TASK_QUEUE'',''Idle'',1) ,(''PAGEIOLATCH_DT'',''Buffer IO'',0) ,(''PAGEIOLATCH_EX'',''Buffer IO'',0) ,(''PAGEIOLATCH_KP'',''Buffer IO'',0) ,(''PAGEIOLATCH_NL'',''Buffer IO'',0) ,(''PAGEIOLATCH_SH'',''Buffer IO'',0) ,(''PAGEIOLATCH_UP'',''Buffer IO'',0) ,(''PAGELATCH_DT'',''Buffer Latch'',0) ,(''PAGELATCH_EX'',''Buffer Latch'',0) ,(''PAGELATCH_KP'',''Buffer Latch'',0) ,(''PAGELATCH_NL'',''Buffer Latch'',0) ,(''PAGELATCH_SH'',''Buffer Latch'',0) ,(''PAGELATCH_UP'',''Buffer Latch'',0) ,(''PARALLEL_BACKUP_QUEUE'',''Other'',0) ,(''PRINT_ROLLBACK_PROGRESS'',''Other'',0) ,(''QNMANAGER_ACQUIRE'',''Other'',0) ,(''QPJOB_KILL'',''Other'',0) ,(''QPJOB_WAITFOR_ABORT'',''Other'',0) ,(''QRY_MEM_GRANT_INFO_MUTEX'',''Other'',0) ,(''QUERY_ERRHDL_SERVICE_DONE'',''Other'',0) ,(''QUERY_EXECUTION_INDEX_SORT_EVENT_OPEN'',''Other'',0) ,(''QUERY_NOTIFICATION_MGR_MUTEX'',''Other'',0) ,(''QUERY_NOTIFICATION_SUBSCRIPTION_MUTEX'',''Other'',0) ,(''QUERY_NOTIFICATION_TABLE_MGR_MUTEX'',''Other'',0) ,(''QUERY_NOTIFICATION_UNITTEST_MUTEX'',''Other'',0) ,(''QUERY_OPTIMIZER_PRINT_MUTEX'',''Other'',0) ,(''QUERY_REMOTE_BRICKS_DONE'',''Other'',0) ,(''QUERY_TRACEOUT'',''Tracing'',0) ,(''RECOVER_CHANGEDB'',''Other'',0) ,(''REPL_CACHE_ACCESS'',''Replication'',0) ,(''REPL_SCHEMA_ACCESS'',''Replication'',0) ,(''REPLICA_WRITES'',''Replication'',0) ,(''REQUEST_DISPENSER_PAUSE'',''Other'',0) ,(''REQUEST_FOR_DEADLOCK_SEARCH'',''Idle'',1) ,(''RESOURCE_QUEUE'',''Idle'',1) ,(''RESOURCE_SEMAPHORE'',''Memory'',0) ,(''RESOURCE_SEMAPHORE_MUTEX'',''Compilation'',0) ,(''RESOURCE_SEMAPHORE_QUERY_COMPILE'',''Compilation'',0) ,(''RESOURCE_SEMAPHORE_SMALL_QUERY'',''Compilation'',0) ,(''SEC_DROP_TEMP_KEY'',''Other'',0) ,(''SEQUENTIAL_GUID'',''Other'',0) ,(''SERVER_IDLE_CHECK'',''Idle'',1) ,(''SHUTDOWN'',''Other'',0) ,(''SLEEP_BPOOL_FLUSH'',''Idle'',1) ,(''SLEEP_DBSTARTUP'',''Idle'',1) ,(''SLEEP_DCOMSTARTUP'',''Idle'',1) ,(''SLEEP_MSDBSTARTUP'',''Idle'',1) ,(''SLEEP_SYSTEMTASK'',''Idle'',1) ,(''SLEEP_TASK'',''Idle'',1) ,(''SLEEP_TEMPDBSTARTUP'',''Idle'',1) ,(''SNI_CRITICAL_SECTION'',''Other'',0) ,(''SNI_HTTP_ACCEPT'',''Idle'',1) ,(''SNI_HTTP_WAITFOR_0_DISCON'',''Other'',0) ,(''SNI_LISTENER_ACCESS'',''Other'',0) ,(''SNI_TASK_COMPLETION'',''Other'',0) ,(''SOAP_READ'',''Full Text Search'',0) ,(''SOAP_WRITE'',''Full Text Search'',0) ,(''SOS_CALLBACK_REMOVAL'',''Other'',0) ,(''SOS_DISPATCHER_MUTEX'',''Other'',0) ,(''SOS_LOCALALLOCATORLIST'',''Other'',0) ,(''SOS_OBJECT_STORE_DESTROY_MUTEX'',''Other'',0) ,(''SOS_PROCESS_AFFINITY_MUTEX'',''Other'',0) ,(''SOS_RESERVEDMEMBLOCKLIST'',''Memory'',0) ,(''SOS_SCHEDULER_YIELD'',''CPU'',0) ,(''SOS_STACKSTORE_INIT_MUTEX'',''Other'',0) ,(''SOS_SYNC_TASK_ENQUEUE_EVENT'',''Other'',0) ,(''SOS_VIRTUALMEMORY_LOW'',''Memory'',0) ,(''SOSHOST_EVENT'',''Other'',0) ,(''SOSHOST_INTERNAL'',''Other'',0) ,(''SOSHOST_MUTEX'',''Other'',0) ,(''SOSHOST_RWLOCK'',''Other'',0) ,(''SOSHOST_SEMAPHORE'',''Other'',0) ,(''SOSHOST_SLEEP'',''Other'',0) ,(''SOSHOST_TRACELOCK'',''Other'',0) ,(''SOSHOST_WAITFORDONE'',''Other'',0) ,(''SQLCLR_APPDOMAIN'',''SQL CLR'',0) ,(''SQLCLR_ASSEMBLY'',''SQL CLR'',0) ,(''SQLCLR_DEADLOCK_DETECTION'',''SQL CLR'',0) ,(''SQLCLR_QUANTUM_PUNISHMENT'',''SQL CLR'',0) ,(''SQLSORT_NORMMUTEX'',''Other'',0) ,(''SQLSORT_SORTMUTEX'',''Other'',0) ,(''SQLTRACE_BUFFER_FLUSH'',''Idle'',1) ,(''SQLTRACE_LOCK'',''Other'',0) ,(''SQLTRACE_SHUTDOWN'',''Tracing'',0) ,(''SQLTRACE_WAIT_ENTRIES'',''Idle'',0) ,(''SRVPROC_SHUTDOWN'',''Other'',0) ,(''TEMPOBJ'',''Other'',0) ,(''THREADPOOL'',''Worker Thread'',0) ,(''TIMEPRIV_TIMEPERIOD'',''Other'',0) ,(''TRACEWRITE'',''Tracing'',1) ,(''TRAN_MARKLATCH_DT'',''Transaction'',0) ,(''TRAN_MARKLATCH_EX'',''Transaction'',0) ,(''TRAN_MARKLATCH_KP'',''Transaction'',0) ,(''TRAN_MARKLATCH_NL'',''Transaction'',0) ,(''TRAN_MARKLATCH_SH'',''Transaction'',0) ,(''TRAN_MARKLATCH_UP'',''Transaction'',0) ,(''TRANSACTION_MUTEX'',''Transaction'',0) ,(''UTIL_PAGE_ALLOC'',''Memory'',0) ,(''VIA_ACCEPT'',''Other'',0) ,(''VIEW_DEFINITION_MUTEX'',''Latch'',0) ,(''WAIT_FOR_RESULTS'',''User Wait'',1) ,(''WAITFOR'',''User Wait'',1) ,(''WAITFOR_TASKSHUTDOWN'',''Idle'',1) ,(''WAITSTAT_MUTEX'',''Other'',0) ,(''WCC'',''Other'',0) ,(''WORKTBL_DROP'',''Other'',0) ,(''WRITELOG'',''Tran Log IO'',0) ,(''XACT_OWN_TRANSACTION'',''Transaction'',0) ,(''XACT_RECLAIM_SESSION'',''Transaction'',0) ,(''XACTLOCKINFO'',''Transaction'',0) ,(''XACTWORKSPACE_MUTEX'',''Transaction'',0) ,(''XE_BUFFERMGR_ALLPROCECESSED_EVENT'',''Other'',0) ,(''XE_BUFFERMGR_FREEBUF_EVENT'',''Other'',0) ,(''XE_DISPATCHER_JOIN'',''Other'',0) ,(''XE_DISPATCHER_WAIT'',''Idle'',1) ,(''XE_MODULEMGR_SYNC'',''Other'',0) ,(''XE_OLS_LOCK'',''Other'',0) ,(''XE_SERVICES_MUTEX'',''Other'',0) ,(''XE_SESSION_CREATE_SYNC'',''Other'',0) ,(''XE_SESSION_SYNC'',''Other'',0) ,(''XE_STM_CREATE'',''Other'',0) ,(''XE_TIMER_EVENT'',''Idle'',1) ,(''XE_TIMER_MUTEX'',''Other'',0) ) AS WsCat ([Wait Type],[Wait Category],[Ignore]) ON bWS.[wait_type] = WsCat.[Wait Type] ' IF @baselineSchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WaitStats, '{0}', @baselineSchema); EXEC(@sql); END IF @replaySchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_WaitStats, '{0}', @replaySchema); EXEC(@sql); END --=========================================================== DECLARE @PowerBI_Time nvarchar(max) = N' ALTER VIEW {0}.[PowerBI_Time] AS SELECT SUM([duration_minutes]) OVER(ORDER BY [end_time] ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS [Elapsed Time (min)] FROM {0}.[Intervals] ' IF @baselineSchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_Time, '{0}', @baselineSchema); EXEC(@sql); END IF @replaySchema IS NOT NULL BEGIN SET @sql = REPLACE(@PowerBI_Time, '{0}', @replaySchema); EXEC(@sql); END COMMIT; END TRY BEGIN CATCH DECLARE @ErrorMessage NVARCHAR(4000) DECLARE @ErrorSeverity INT DECLARE @ErrorState INT SELECT @ErrorMessage = ERROR_MESSAGE(), @ErrorSeverity = ERROR_SEVERITY(), @ErrorState = ERROR_STATE() IF XACT_STATE() <> 0 ROLLBACK TRAN RAISERROR ( @ErrorMessage, @ErrorSeverity, @ErrorState) END CATCH END ================================================ FILE: WorkloadTools/Consumer/BufferedWorkloadConsumer.cs ================================================ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using NLog; namespace WorkloadTools.Consumer { public abstract class BufferedWorkloadConsumer : WorkloadConsumer { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); protected bool stopped = false; protected ConcurrentQueue Buffer { get; set; } = new ConcurrentQueue(); protected Task BufferReader { get; set; } private SpinWait spin = new SpinWait(); public int BufferSize { get; set; } = 100000; public override sealed void Consume(WorkloadEvent evt) { if (evt == null) { return; } // Ensure that the buffer does not get too big while (Buffer.Count >= BufferSize) { logger.Trace("Buffer is full so spinning"); spin.SpinOnce(); } // If the buffer has room, enqueue the event logger.Trace("Adding event {eventType} with start time {startTime:yyyy-MM-ddTHH\\:mm\\:ss.fffffff} to buffer", evt.Type, evt.StartTime); Buffer.Enqueue(evt); if(BufferReader == null) { BufferReader = Task.Factory.StartNew(() => ProcessBuffer()); } } protected void ProcessBuffer() { while (!stopped) { try { WorkloadEvent evt; while (!Buffer.TryDequeue(out evt)) { if (stopped) { return; } spin.SpinOnce(); } if (evt == null) { continue; } ConsumeBuffered(evt); } catch (Exception ex) { logger.Error($"Error occurred consuming buffered events: {ex.Message}."); throw; } } } protected override void Dispose(bool disposing) { stopped = true; } public abstract void ConsumeBuffered(WorkloadEvent evt); } } ================================================ FILE: WorkloadTools/Consumer/Replay/ReplayCommand.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Consumer.Replay { public class ReplayCommand { public string CommandText { get; set; } public string Database { get; set; } public string ApplicationName { get; set; } public double ReplayOffset { get; set; } = 0; // milliseconds public DateTime StartTime { get; set; } public long? EventSequence { get; set; } } } ================================================ FILE: WorkloadTools/Consumer/Replay/ReplayConsumer.cs ================================================ using System; using System.CodeDom.Compiler; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using NLog; namespace WorkloadTools.Consumer.Replay { public class ReplayConsumer : BufferedWorkloadConsumer { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private SpinWait spin = new SpinWait(); public int ThreadLimit = 256;//32 public int InactiveWorkerTerminationTimeoutSeconds = 300; private readonly Semaphore WorkLimiter; public bool DisplayWorkerStats { get; set; } = true; public bool ConsumeResults { get; set; } = true; public int QueryTimeoutSeconds { get; set; } = 30; public int WorkerStatsCommandCount { get; set; } = 1000; public bool MimicApplicationName { get; set; } = false; public int FailRetryCount { get; set; } = 0; public int TimeoutRetryCount { get; set; } = 0; public bool RaiseErrorsToSqlEventTracing { get; private set; } = true; public bool RelativeDelays { get; set; } = false; private LogLevel _CommandErrorLogLevel = LogLevel.Error; public string CommandErrorLogLevel { get => _CommandErrorLogLevel.Name; set => _CommandErrorLogLevel = LogLevel.FromString(value); } public Dictionary DatabaseMap { get; set; } = new Dictionary(); public SqlConnectionInfo ConnectionInfo { get; set; } public ThreadingModeEnum ThreadingMode { get; set; } = ThreadingModeEnum.WorkerTask; private readonly ConcurrentDictionary ReplayWorkers = new ConcurrentDictionary(); private Thread sweeper; private long eventCount; private long dispatchedEventCount; private DateTime startTime = DateTime.MinValue; // holds the total number of events to replay // only available when reading from a file // for realtime replays this is not available private long totalEventCount = 0; // holds the number of events that have been executed by the workers private long executedEventCount; // watchdog: fires if no command has been executed for WatchdogIntervalSeconds public int WatchdogIntervalSeconds { get; set; } = 30; private Timer watchdogTimer; public enum ThreadingModeEnum : int { ThreadPools = 1, Tasks = 2, WorkerTask = 3, Serial = 4 } public ReplayConsumer() { WorkLimiter = new Semaphore(ThreadLimit, ThreadLimit); } private void EnsureWatchdogRunning() { if (watchdogTimer != null) { return; } var intervalMs = WatchdogIntervalSeconds * 1000; watchdogTimer = new Timer( delegate { var current = Interlocked.Read(ref executedEventCount); var dispatched = Interlocked.Read(ref dispatchedEventCount); var bufferedEventCount = ReplayWorkers.Values.Sum(x => x.QueueLength); // Always log on every watchdog tick so the user can see progress // (or lack thereof) at wall-clock intervals, regardless of whether // the event count has crossed a modulus boundary. LogReplayProgress(current, forceLog: true); // Check for completion: all dispatched events have been executed // and nothing is left in any buffer. if (dispatched > 0 && current >= dispatched && Buffer.Count == 0 && bufferedEventCount == 0) { // Stop the watchdog - nothing left to watch. watchdogTimer?.Dispose(); watchdogTimer = null; } }, null, intervalMs, intervalMs); } private string WorkerKey(ExecutionWorkloadEvent evnt) { // In SQL the SPID is only unqiue while the session is in use. // When the SPID is reused it may be for a different database. var result = $"{evnt.SPID}_{evnt.DatabaseName}"; if (MimicApplicationName) { // When the SPID is reused it may be for a different Host, User or Application // but this application can only mimic the Application Name so if we're doing // that include that in the key. result += $"_{evnt.ApplicationName}"; } return result; } public override void ConsumeBuffered(WorkloadEvent evnt) { if (evnt is MessageWorkloadEvent messageEvent) { if (messageEvent.MsgType == MessageWorkloadEvent.MessageType.TotalEvents) { try { totalEventCount = (long)messageEvent.Value; } catch (Exception e) { logger.Error(e, $"Unable to set the total number of events"); } } } // totalEventCount is EVERY Event except the initial MessageWorkloadEvent for the TotalEvents, // so always increment the counter. eventCount++; if (!(evnt is ExecutionWorkloadEvent)) { return; } if (evnt.Type != WorkloadEvent.EventType.RPCStarting && evnt.Type != WorkloadEvent.EventType.BatchStarting) { return; } // dispatchedEventCount tracks only the ExecutionWorkloadEvents that have been // dispatched to a worker, giving a stable monotonic counter to drive log intervals. dispatchedEventCount++; if (startTime == DateTime.MinValue) { // Pad the start time so that the first event isn't behind by the time the worker has started up on a thread. startTime = DateTime.Now.AddTicks(TimeSpan.TicksPerSecond); logger.Info("All future delays will be calculated from this point + 1s, triggered by event {@event}", evnt); } var evt = (ExecutionWorkloadEvent)evnt; if (stopped) { return; } var command = new ReplayCommand() { CommandText = evt.Text, Database = evt.DatabaseName, ApplicationName = evt.ApplicationName, ReplayOffset = evt.ReplayOffset, StartTime = evt.StartTime, EventSequence = evt.EventSequence }; var workerKey = WorkerKey(evt); if (ReplayWorkers.TryGetValue(workerKey, out var rw)) { // Ensure that the buffer does not get too big while (rw.QueueLength >= (BufferSize * .9)) { spin.SpinOnce(); } if (stopped) { return; } rw.AppendCommand(command); } else { logger.Debug("Creating Worker {Worker}", workerKey); rw = new ReplayWorker(workerKey) { ConnectionInfo = ConnectionInfo, ReplayIntervalSeconds = 0, StopOnError = false, DisplayWorkerStats = DisplayWorkerStats, ConsumeResults = ConsumeResults, QueryTimeoutSeconds = QueryTimeoutSeconds, WorkerStatsCommandCount = WorkerStatsCommandCount, MimicApplicationName = MimicApplicationName, DatabaseMap = DatabaseMap, StartTime = startTime, FailRetryCount = FailRetryCount, TimeoutRetryCount = TimeoutRetryCount, CommandErrorLogLevel = _CommandErrorLogLevel, RaiseErrorsToSqlEventTracing = RaiseErrorsToSqlEventTracing, RelativeDelays = RelativeDelays }; rw.CommandExecuted += OnWorkerCommandExecuted; rw.AppendCommand(command); if (stopped) { return; } _ = ReplayWorkers.TryAdd(workerKey, rw); } // Ensure the worker is running. // If new it needs starting for the first time. // If existing it may have stopped if the command queue became empty. RunWorker(rw); if (sweeper == null) { sweeper = new Thread(new ThreadStart( delegate { try { RunSweeper(); } catch (Exception e) { try { logger.Error(e, "Unhandled exception in TraceManager.RunSweeper"); } catch { Console.WriteLine(e.Message); } } } )) { IsBackground = true }; sweeper.Start(); } EnsureWatchdogRunning(); } protected override void Dispose(bool disposing) { logger.Info("Disposing ReplayConsumer"); stopped = true; watchdogTimer?.Dispose(); watchdogTimer = null; foreach (var r in ReplayWorkers.Values) { r.Dispose(); } WorkLimiter.Dispose(); } // Sweeper thread: removes from the workers list all the workers // that have not executed a command in the last 5 minutes private void RunSweeper() { while (!stopped) { logger.Debug("Looking for workers that have been idle for {InactiveWorkerTerminationTimeoutSeconds}s", InactiveWorkerTerminationTimeoutSeconds); try { // Use .ToList() to materialise the list so that ReplayWorkers.TryRemove does not cause an exception that the list has changed during the iteration foreach (var wrk in ReplayWorkers.Values.Where(x => x.LastCommandTime > DateTime.MinValue && x.LastCommandTime < DateTime.Now.AddSeconds(-InactiveWorkerTerminationTimeoutSeconds) && !x.HasCommands).ToList()) { if(stopped) { return; } logger.Debug("Removing worker {Worker} which has not executed a command since {lastCommand}", wrk.Name, wrk.LastCommandTime); RemoveWorker(wrk.Name); } } catch (Exception e) { logger.Warn(e, "Error when removing idle workers"); } Thread.Sleep(InactiveWorkerTerminationTimeoutSeconds * 1000); // sleep some seconds } } private void RemoveWorker(string name) { _ = ReplayWorkers.TryRemove(name, out var outWrk); if (outWrk != null) { outWrk.CommandExecuted -= OnWorkerCommandExecuted; outWrk.Stop(); outWrk.Dispose(); } } private void RunWorker(ReplayWorker wrk) { try { if (stopped) { return; } if (wrk.HasCommands) { if (ThreadingMode == ThreadingModeEnum.ThreadPools) { // Using a semaphore to avoid overwhelming the threadpool // Without this precaution, the memory consumption goes to the roof _ = WorkLimiter.WaitOne(); var queued = false; try { // Queue the execution of a statement in the threadpool. // The statement will get executed in a separate thread eventually. _ = ThreadPool.QueueUserWorkItem( delegate { try { wrk.ExecuteNextCommand(); } catch (Exception e) { try { logger.Error(e, "Unhandled exception in ReplayWorker.ExecuteCommand"); } catch { Console.WriteLine(e.Message); } } finally { // Release only after execution completes to actually // bound the number of concurrently executing commands. _ = WorkLimiter.Release(); } } ); queued = true; } finally { // If queuing itself failed, release the slot we acquired // so the semaphore is not permanently leaked. if (!queued) { _ = WorkLimiter.Release(); } } } else if (ThreadingMode == ThreadingModeEnum.Tasks) { // TODO: Is this not the same as WorkerTask? // Here the task is created by ReplayConsumer. // With WorkerTask the task is created by ReplayWorker.Start. // Using a semaphore to avoid overwhelming the threadpool // Without this precaution, the memory consumption goes to the roof _ = WorkLimiter.WaitOne(); // Start a new Task to run the statement var t = Task.Factory.StartNew( delegate { try { wrk.ExecuteNextCommand(); } catch (Exception e) { try { logger.Error(e, "Unhandled exception in ReplayWorker.ExecuteCommand"); } catch { Console.WriteLine(e.Message); } } finally { // Release only after execution completes to actually // bound the number of concurrently executing commands. _ = WorkLimiter.Release(); } } ); } else if (ThreadingMode == ThreadingModeEnum.WorkerTask) { if (!wrk.IsRunning && !stopped) { wrk.Start(); } } else if (ThreadingMode == ThreadingModeEnum.Serial) { wrk.ExecuteNextCommand(); } } } catch (InvalidOperationException) { //ignore ... } catch (Exception ex) { logger.Error(ex, "Error starting worker"); } } public override bool HasMoreEvents() { return ReplayWorkers.Count(t => t.Value.HasCommands) > 0 || Buffer.Count > 0; } private long completionLogged = 0; private void OnWorkerCommandExecuted(object sender, EventArgs e) { var executed = Interlocked.Increment(ref executedEventCount); EnsureWatchdogRunning(); LogReplayProgress(executed); // Log completion eagerly as soon as the last command is executed, // without waiting for the next watchdog tick which may never fire // if the controller disposes first. // Only applies when totalEventCount is known (i.e. file-based replay). // For realtime replay, totalEventCount is 0 and there is no defined end. if (totalEventCount > 0) { var dispatched = Interlocked.Read(ref dispatchedEventCount); if (executed >= dispatched && Buffer.Count == 0 && ReplayWorkers.Values.Sum(x => x.QueueLength) == 0 && Interlocked.CompareExchange(ref completionLogged, 1, 0) == 0) { LogReplayProgress(executed, forceLog: true); logger.Info("Replay completed: {executed} commands executed out of {dispatched} dispatched", executed, dispatched); } } } private void LogReplayProgress(long executed, bool forceLog = false) { // Determine the log interval: // - If dispatchedEventCount is known and > 0: aim for ~1000 log lines for large workloads, // fall back to ~10 for small ones (< 1000 events). // - If dispatchedEventCount is unknown: fall back to WorkerStatsCommandCount. var dispatched = Interlocked.Read(ref dispatchedEventCount); long logInterval; if (dispatched > 0) { var fineInterval = dispatched / 1000; var coarseInterval = Math.Max(1, dispatched / 10); logInterval = fineInterval >= 1 ? fineInterval : coarseInterval; } else { logInterval = Math.Max(1, WorkerStatsCommandCount); } if (forceLog || executed == 1 || executed % logInterval == 0) { var bufferedEventCount = ReplayWorkers.Values.Sum(x => x.QueueLength); if (dispatched > 0) { var percentInfo = (double)executed / (double)dispatched; logger.Info($"{executed} ({percentInfo:P}) events replayed - {Buffer.Count + bufferedEventCount} events buffered"); } else { logger.Info($"{executed} events replayed - {Buffer.Count + bufferedEventCount} events buffered"); } } } } } ================================================ FILE: WorkloadTools/Consumer/Replay/ReplayWorker.cs ================================================ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Diagnostics; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Threading; using System.Threading.Tasks; using NLog; using WorkloadTools.Consumer.Analysis; using WorkloadTools.Listener; using WorkloadTools.Util; namespace WorkloadTools.Consumer.Replay { internal class ReplayWorker : IDisposable { private const int SkippedDelayCountThreshold = 100; // Unlike the other loggers this one is not static because we // need unique properties for each instance of ReplayWorker. private readonly Logger logger; public bool DisplayWorkerStats { get; set; } public bool ConsumeResults { get; set; } public int QueryTimeoutSeconds { get; set; } public int WorkerStatsCommandCount { get; set; } public bool MimicApplicationName { get; set; } public LogLevel CommandErrorLogLevel { get; set; } public bool RelativeDelays { get; set; } public int FailRetryCount { get; set; } public int TimeoutRetryCount { get; set; } private SqlConnection Conn { get; set; } public SqlConnectionInfo ConnectionInfo { get; set; } public int ReplayIntervalSeconds { get; set; } = 0; public bool StopOnError { get; set; } = false; public string Name { get; private set; } public int SPID { get; set; } public bool IsRunning { get; private set; } = false; public bool RaiseErrorsToSqlEventTracing { get; set; } = true; public DateTime StartTime { get; set; } public Dictionary DatabaseMap { get; set; } = new Dictionary(); private Task runner = null; private CancellationTokenSource tokenSource; private double lastCommandOffet = 0; private bool skipNextDelay = false; public ReplayWorker(string name) { Name = name; logger = LogManager.GetCurrentClassLogger().WithProperty("Worker", name); } public bool HasCommands => !Commands.IsEmpty; public int QueueLength => Commands.Count; public DateTime LastCommandTime { get; private set; } private long commandCount = 0; private long previousCommandCount = 0; private DateTime previousCPSComputeTime = DateTime.Now; private readonly List commandsPerSecond = new List(); private readonly ConcurrentQueue Commands = new ConcurrentQueue(); public bool IsStopped { get; private set; } = false; private readonly SqlTransformer transformer = new SqlTransformer(); private readonly Dictionary preparedStatements = new Dictionary(); private SpinWait _spinWait = new SpinWait(); private int continiousSkippedDelays = 0; private enum UserErrorType { Timeout = 82, Error = 83 } private void InitializeConnection(string applicationName) { logger.Debug("Connecting to server {serverName}", ConnectionInfo.ServerName); ConnectionInfo.DatabaseMap = DatabaseMap; var connString = ConnectionInfo.ConnectionString(applicationName); Conn = new SqlConnection(connString); Conn.Open(); logger.Debug("Connected"); } public void Start() { tokenSource = new CancellationTokenSource(); var token = tokenSource.Token; if (runner != null && runner.IsCompleted) { runner.Dispose(); runner = null; } if (runner == null && !IsStopped) { // Given the potential for lots of Workers we need to allow over-subscription of threads using the LongRunning option. // " // Specifies that a task will be a long-running, coarse-grained operation involving // fewer, larger components than fine-grained systems. It provides a hint to the // System.Threading.Tasks.TaskScheduler that oversubscription may be warranted. // Oversubscription lets you create more threads than the available number of hardware // threads. It also provides a hint to the task scheduler that an additional thread // might be required for the task so that it does not block the forward progress // of other threads or work items on the local thread-pool queue. // " runner = Task.Factory.StartNew(() => Run(), token, TaskCreationOptions.LongRunning, TaskScheduler.Current); } } public void Run() { IsRunning = true; while (!IsStopped && IsRunning) { try { ExecuteNextCommand(); } catch (Exception e) { logger.Error(e, "Error starting Worker"); } } } public void Stop() { logger.Debug("Stopping"); IsStopped = true; IsRunning = false; tokenSource?.Cancel(); logger.Debug("Stopped"); } public event EventHandler CommandExecuted; public void ExecuteNextCommand() { var cmd = GetNextCommand(); if (cmd != null) { try { ExecuteCommand(cmd); } finally { commandCount++; CommandExecuted?.Invoke(this, EventArgs.Empty); } } else { // Release the thread when out of work IsRunning = false; } } public ReplayCommand GetNextCommand() { _ = Commands.TryDequeue(out var result); // Previously this method would loop and use a spinWait. // Memory dumps taken of a large workload showed a very large number of tasks in a Scheduled state on this spin. // Better concurrency has been achieved by letting the task complete. // The ReplayConsumer will then start a task up again if more work comes to this worker in future. return result; } [MethodImpl(MethodImplOptions.Synchronized)] public void ExecuteCommand(ReplayCommand command, int failRetryCount = 0, int timeoutRetryCount = 0) { LastCommandTime = DateTime.Now; var applicationName = "WorkloadTools-ReplayWorker"; if (MimicApplicationName) { applicationName = command.ApplicationName; if (string.IsNullOrEmpty(applicationName)) { ConnectionInfo.ApplicationName = "WorkloadTools-ReplayWorker"; } } if (Conn == null) { try { InitializeConnection(applicationName); } catch (SqlException se) { logger.Error(se, "Unable to acquire the connection. Quitting the ReplayWorker"); return; } } if (Conn != null) { while (Conn.State == ConnectionState.Connecting) { if (IsStopped) { return; } logger.Debug("Connection is in connecting state. Sleeping for 5ms"); Thread.Sleep(5); } } if (Conn == null || (Conn.State == ConnectionState.Closed) || (Conn.State == ConnectionState.Broken)) { InitializeConnection(applicationName); } // Extract the handle from the prepared statement var nst = transformer.Normalize(command.CommandText); // If the command comes with a replay offset, evaluate it now. // The offset in milliseconds is set in FileWorkloadListener. // The other listeners do not set this value, as they // already come with the original timing if (command.ReplayOffset > 0 && !skipNextDelay) { if (RelativeDelays && lastCommandOffet > 0) { var relativeDelay = command.ReplayOffset - lastCommandOffet; logger.Debug("Command start time is {startTime:yyyy-MM-ddTHH\\:mm\\:ss.fffffff} which is an offset of {relativeDelay} from the last command for this session", command.StartTime, relativeDelay); PreciseDelay(relativeDelay); } else { var delayMs = command.ReplayOffset - (DateTime.Now - StartTime).TotalMilliseconds; // Delay execution only if necessary if (delayMs > 0) { // We're not skipping this delay, so reset the counter. continiousSkippedDelays = 0; // Each command has a requested offset from the beginning // of the workload and this class does its best to respect it. // If the previous commands take longer in the target environment // the offset cannot be respected and the command will execute // without further waits, but there is no way to recover // the delay that has built up to that point. logger.Debug("Command start time is {startTime:yyyy-MM-ddTHH\\:mm\\:ss.fffffff} which is an offset of {ReplayOffset}ms from the start so waiting", command.StartTime, command.ReplayOffset); PreciseDelay(delayMs); } else if (delayMs < -10000) { // If we're more than 10s behind then logger.Debug("Command start time is {startTime:yyyy-MM-ddTHH\\:mm\\:ss.fffffff} which is an offset of {ReplayOffset}ms from the start but replay is behind so it should have executed {delayMs}ms ago", command.StartTime, command.ReplayOffset, delayMs); continiousSkippedDelays++; if (continiousSkippedDelays % SkippedDelayCountThreshold == 0) { // If we are consistently behind and the configuration has // requested SynchronizationMode we're actually doing a stress test. logger.Warn("The last {skippedDelays} Commands requested a delay but replay is > 10s behind so were processed immediately which may indicate that either this tool or the target system cannot keep up with the workload. You could try switching to relative delays with the RelativeDelays parameter", continiousSkippedDelays); } } } } if (IsStopped) { return; } // Record the requested and actual command start time in case we're doing relative sleeps lastCommandOffet = command.ReplayOffset; if (nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_RESET_CONNECTION) { // If event is a sp_reset_connection, call a connection close and open to // force connection to get back to connection pool and reset it so that // it's clean for the next event Conn.Close(); Conn.Open(); // Generally appliations that (correctly) close their connection // regularly and rely on the SQL Client Connection Pool, they will do // so after a command. // This results in the opening of the next connection getting a connection // from the pool and triggering SP_Reset_Connection. // As such captured traces show that the gap between SP_Reset_Connection // and the next command is a few ms only. // Given we struggle to do super-accurate delays, and given the original // application likely had 0 delay in this scenario, skip the delay for the // next command. skipNextDelay = true; return; } else if (nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_RESET_CONNECTION_NONPOOLED) { // If event is a nonpooled sp_reset_connection, call a ClearPool(conn) // to force a new connection. ClearPool(Conn); // Generally appliations that (correctly) close their connection // regularly and rely on the SQL Client Connection Pool, they will do // so after a command. // This results in the opening of the next connection getting a connection // from the pool and triggering SP_Reset_Connection. // As such captured traces show that the gap between SP_Reset_Connection // and the next command is a few ms only. // Given we struggle to do super-accurate delays, and given the original // application likely had 0 delay in this scenario, skip the delay for the // next command. skipNextDelay = true; return; } else if (nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_PREPARE) { command.CommandText = nst.NormalizedText; } else if (nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_UNPREPARE || nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_EXECUTE) { // look up the statement to unprepare in the dictionary if (preparedStatements.ContainsKey(nst.Handle)) { // the sp_execute statement has already been "normalized" // by replacing the original statement number with the § placeholder command.CommandText = nst.NormalizedText.ReplaceFirst("§", preparedStatements[nst.Handle].ToString()); if (nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_UNPREPARE) { _ = preparedStatements.Remove(nst.Handle); } } else { return; // statement not found: better return } } // If we get here it isn't a connection reset event, so ensure the next command delays correctly. skipNextDelay = false; try { // Try to remap the database according to the database map if (DatabaseMap.ContainsKey(command.Database)) { command.Database = DatabaseMap[command.Database]; } if (Conn.Database != command.Database) { logger.Debug("Changing database to {databaseName}", command.Database); Conn.ChangeDatabase(command.Database); } using (var cmd = new SqlCommand(command.CommandText)) { cmd.Connection = Conn; cmd.CommandTimeout = QueryTimeoutSeconds; if (nst.CommandType == NormalizedSqlText.CommandTypeEnum.SP_PREPARE) { if (cmd.CommandText == null) { return; } var handle = -1; try { var res = cmd.ExecuteScalar(); if (res != null) { handle = (int)res; if (!preparedStatements.ContainsKey(nst.Handle)) { preparedStatements.Add(nst.Handle, handle); } } } catch (NullReferenceException) { throw; } } else if (ConsumeResults) { using (var reader = cmd.ExecuteReader()) using (var consumer = new ResultSetConsumer(reader)) { consumer.Consume(); } } else { _ = cmd.ExecuteNonQuery(); } } logger.Trace("SUCCESS - \n{commandText}", command.CommandText); if (commandCount > 0 && commandCount % WorkerStatsCommandCount == 0) { var seconds = (DateTime.Now - previousCPSComputeTime).TotalSeconds; var cps = (commandCount - previousCommandCount) / ((seconds == 0) ? 1 : seconds); previousCPSComputeTime = DateTime.Now; previousCommandCount = commandCount; if (DisplayWorkerStats) { commandsPerSecond.Add((int)cps); cps = commandsPerSecond.Average(); logger.Info("{commandCount} commands executed - {pendingCommands} commands pending - Last Event Sequence: {lastEventSequence} - {cps} commands per second", commandCount, Commands.Count, command.EventSequence, (int)cps); } } // Update the LastCommandTime again in case the duration of a Consume() call exceeded LastCommandTime + InactiveWorkerTerminationTimeoutSeconds LastCommandTime = DateTime.Now; } catch (SqlException e) { // handle timeouts if (e.Number == -2) { RaiseTimeoutEvent(command.CommandText); } else { RaiseErrorEvent(command, e.Message); } // If the workload is exepected to include lots of errors then logging at the default Error level can become really noisy! logger.Log(CommandErrorLogLevel, e, "Sequence[{eventSequence}] - Error: \n{commandText}", command.EventSequence, command.CommandText); if (StopOnError) { ClearPool(Conn); throw; } else { if (e.Number != -2 && failRetryCount < FailRetryCount) { logger.Warn("Retrying Sequence[{eventSequence}] - Retrying command (current fail retry: {failRetryCount})", command.EventSequence, failRetryCount); ExecuteCommand(command, ++failRetryCount, timeoutRetryCount); } if (e.Number == -2 && timeoutRetryCount < TimeoutRetryCount) { logger.Warn("Retrying Sequence[{eventSequence}] - Retrying command (current timeout retry: {timeoutRetryCount})", command.EventSequence, timeoutRetryCount); ExecuteCommand(command, failRetryCount, ++timeoutRetryCount); } } } catch (Exception e) { // If the workload is exepected to include lots of errors then logging at the default Error level can become really noisy! logger.Log(CommandErrorLogLevel, e, "Sequence[{eventSequence}] - Error: \n{commandText}", command.EventSequence, command.CommandText); ClearPool(Conn); if (StopOnError) { throw; } } } private void ClearPool(SqlConnection conn) { if (conn == null) { return; } try { SqlConnection.ClearPool(conn); } catch (Exception) { /*swallow */} if (conn.State == ConnectionState.Open) { try { conn.Close(); } catch (Exception) { /* swallow */ } try { conn.Dispose(); conn = null; } catch (Exception) { /* swallow */ } } } private void RaiseTimeoutEvent(string commandText) { if (!RaiseErrorsToSqlEventTracing) { return; } RaiseErrorEvent($"WorkloadTools.Timeout[{QueryTimeoutSeconds}]", commandText, UserErrorType.Timeout); } private void RaiseErrorEvent(ReplayCommand Command, string ErrorMessage) { if (!RaiseErrorsToSqlEventTracing) { return; } var msg = $@"DATABASE: {Command.Database} SEQUENCE: {Command.EventSequence} MESSAGE: {ErrorMessage} -------------------- {Command.CommandText} "; RaiseErrorEvent("WorkloadTools.Replay", msg, UserErrorType.Error); } private void RaiseErrorEvent(string info, string message, UserErrorType type) { if (!RaiseErrorsToSqlEventTracing) { return; } // Raise a custom event. Both SqlTrace and Extended Events can capture this event. var sql = "EXEC sp_trace_generateevent @eventid = @eventid, @userinfo = @userinfo, @userdata = @userdata;"; try { using (var cmd = new SqlCommand(sql)) { // Creating a new connection to raise the custom event to don't mess up // with existing connection as a reset(close the connection) now would cause a // next event call to fail in case it has dependencies of objects or // user settings used in the connection var connString = ConnectionInfo.ConnectionString(); var connErrorEvent = new SqlConnection(connString); connErrorEvent.Open(); cmd.Connection = connErrorEvent; _ = cmd.Parameters.Add(new SqlParameter("@eventid", SqlDbType.Int) { Value = type }); _ = cmd.Parameters.Add(new SqlParameter("@userinfo", SqlDbType.NVarChar, 128) { Value = info }); _ = cmd.Parameters.Add(new SqlParameter("@userdata", SqlDbType.VarBinary, 8000) { Value = Encoding.Unicode.GetBytes(message.Substring(0, message.Length > 8000 ? 8000 : message.Length)) }); _ = cmd.ExecuteNonQuery(); ClearPool(connErrorEvent); } } catch (Exception ex) { logger.Error(ex, "Unable to raise error event"); } } public void AppendCommand(ReplayCommand cmd) { Commands.Enqueue(cmd); } private void PreciseDelay(double delayMs) { var startingTimestamp = Stopwatch.GetTimestamp(); var targetTicks = TimeSpan.TicksPerMillisecond * delayMs; long elapsedMs; while (Stopwatch.GetTimestamp() - startingTimestamp < targetTicks) { if (IsStopped) { return; } elapsedMs = (Stopwatch.GetTimestamp() - startingTimestamp) / TimeSpan.TicksPerMillisecond; // Thread.Sleep is less accurate than Thread.SpinWait, but consumes less CPU while idle. // So to balance accuracy vs CPU impact use a hybrid approach. if (delayMs - elapsedMs > 10000) { logger.Debug("Sleeping for {threadSleepMs}ms - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 8000, delayMs, elapsedMs); Thread.Sleep(8000); } else if (delayMs - elapsedMs > 1000) { logger.Debug("Sleeping for {threadSleepMs}ms - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 800, delayMs, elapsedMs); Thread.Sleep(800); } else if (delayMs - elapsedMs > 500) { logger.Debug("Sleeping for {threadSleepMs}ms - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 300, delayMs, elapsedMs); Thread.Sleep(300); } else if (delayMs - elapsedMs > 20) { logger.Debug("Sleeping for {threadSleepMs}ms - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 5, delayMs, elapsedMs); Thread.Sleep(5); } else if (delayMs - elapsedMs > 5) { logger.Debug("Spinning for {spinIterations} iterations - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 100, delayMs, elapsedMs); Thread.SpinWait(100); } else if (delayMs - elapsedMs > 1) { logger.Debug("Spinning for {spinIterations} iterations - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 50, delayMs, elapsedMs); Thread.SpinWait(50); } else { logger.Debug("Spinning for {spinIterations} iterations - delay is {delayMs}ms - elapsed is {elapsedMs}ms", 10, delayMs, elapsedMs); Thread.SpinWait(10); } } // Highlight if the delays are inaccurate (with 100ms error margin) // If there are a lot of these warnings then it may suggest either // the above ReplayOffsetSleepThresholdMs/ThreadSpinIterations are // too high or the replay host does not have enough CPU capacity to // replay the source workload. elapsedMs = (Stopwatch.GetTimestamp() - startingTimestamp) / TimeSpan.TicksPerMillisecond; if (elapsedMs > delayMs + 100) { logger.Warn("Requested delay was {requestedDelay}ms but actual delay was {actualDelay}ms which may indicate that this tool cannot keep up with the source workload, possibly due to insufficient CPU Cores for parallel processing", delayMs, elapsedMs); } else { logger.Debug("Requested delay was {requestedDelay}ms and actual delay was {actualDelay}ms", delayMs, elapsedMs); } } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected void Dispose(bool disposing) { if (disposing) { logger.Debug("Disposing ReplayWorker"); Stop(); try { if (Conn != null) { if (Conn.State == ConnectionState.Open) { try { Conn.Close(); } catch (Exception) { /* swallow */ } try { Conn.Dispose(); } catch (Exception) { /* swallow */ } } Conn = null; } } catch (Exception ex) { logger.Warn(ex); } try { if (runner != null) { while (!(runner.IsCompleted || runner.IsFaulted || runner.IsCanceled)) { _spinWait.SpinOnce(); } runner.Dispose(); runner = null; } } catch (Exception ex) { logger.Warn(ex); } try { if (tokenSource != null) { tokenSource.Dispose(); tokenSource = null; } } catch (Exception ex) { logger.Warn(ex); } } } } } ================================================ FILE: WorkloadTools/Consumer/Replay/ReplayWorker.cs.bak ================================================ using NLog; using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Threading; namespace WorkloadTools.Consumer.Replay { class ReplayWorker { private static Logger logger = LogManager.GetCurrentClassLogger(); private SqlConnection conn { get; set; } public SqlConnectionInfo ConnectionInfo { get; set; } public int ReplayIntervalSeconds { get; set; } = 0; public bool StopOnError { get; set; } = false; public string Name { get; set; } public int SPID { get; set; } private long commandCount = 0; public int CommandCount { [MethodImpl(MethodImplOptions.Synchronized)] get { return Commands.Count; } } private Queue Commands = new Queue(); private void InitializeConnection() { logger.Info(String.Format("Worker [{0}] - Connecting to server {1} for replay...", Name, ConnectionInfo.ServerName)); string connString = BuildConnectionString(); conn = new SqlConnection(connString); conn.Open(); logger.Info(String.Format("Worker [{0}] - Connected", Name)); } private string BuildConnectionString() { string connectionString = "Data Source=" + ConnectionInfo.ServerName + ";"; if (String.IsNullOrEmpty(ConnectionInfo.DatabaseName)) { connectionString += "Initial Catalog = master; "; } else { connectionString += "Initial Catalog = " + ConnectionInfo.DatabaseName + "; "; } if (String.IsNullOrEmpty(ConnectionInfo.UserName)) { connectionString += "Integrated Security = SSPI; "; } else { connectionString += "User Id = " + ConnectionInfo.UserName + "; "; connectionString += "Password = " + ConnectionInfo.Password + "; "; } connectionString += "Application Name=WorkloadTools;"; return connectionString; } [MethodImpl(MethodImplOptions.Synchronized)] public void ExecuteNextCommand() { ReplayCommand cmd = GetNextCommand(); if (cmd != null) { ExecuteCommand(cmd); commandCount++; } } [MethodImpl(MethodImplOptions.Synchronized)] public ReplayCommand GetNextCommand() { if (Commands.Count == 0) { return null; } return Commands.Dequeue(); } [MethodImpl(MethodImplOptions.Synchronized)] public void ExecuteCommand(ReplayCommand command) { if (conn == null) { try { InitializeConnection(); } catch (SqlException se) { logger.Error(se.Message); logger.Error(String.Format("Worker [{0}] - Unable to acquire the connection. Quitting the ReplayWorker", Name)); return; } } while (conn.State == System.Data.ConnectionState.Connecting) { Thread.Sleep(5); } if ((conn.State == System.Data.ConnectionState.Closed) || (conn.State == System.Data.ConnectionState.Broken)) { conn.Open(); } try { if (conn.Database != command.Database) { logger.Trace(String.Format("Worker [{0}] - Changing database to {1} ", Name, command.Database)); conn.ChangeDatabase(command.Database); } SqlCommand cmd = new SqlCommand(command.CommandText); cmd.Connection = conn; cmd.ExecuteNonQuery(); logger.Trace(String.Format("Worker [{0}] - SUCCES - \n{1}", Name, command.CommandText)); if (commandCount % 100 == 0) logger.Info(String.Format("Worker [{0}] - {1} commands executed.", Name, commandCount.ToString())); } catch (Exception e) { if (StopOnError) { logger.Error(String.Format("Worker[{0}] - Error: \n{1}", Name, command.CommandText)); throw; } else { logger.Trace(String.Format("Worker [{0}] - Error: {1}", Name, command.CommandText)); logger.Warn(String.Format("Worker [{0}] - Error: {1}", Name, e.Message)); logger.Trace(e.StackTrace); } } } [MethodImpl(MethodImplOptions.Synchronized)] public void AppendCommand(ReplayCommand cmd) { Commands.Enqueue(cmd); } [MethodImpl(MethodImplOptions.Synchronized)] public void AppendCommand(string commandText, string databaseName) { Commands.Enqueue(new ReplayCommand() { CommandText = commandText, Database = databaseName }); } } } ================================================ FILE: WorkloadTools/Consumer/Replay/ResultSetConsumer.cs ================================================ using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Text; namespace WorkloadTools.Consumer.Replay { public class ResultSetConsumer : IDisposable { private readonly SqlDataReader reader; public ResultSetConsumer(SqlDataReader sqlDataReader) { reader = sqlDataReader; } public void Consume() { while (reader.Read()) { // do nothing: I just need to pull all the rows } } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { } } } ================================================ FILE: WorkloadTools/Consumer/WorkloadConsumer.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Consumer { public abstract class WorkloadConsumer : IDisposable { public abstract void Consume(WorkloadEvent evt); public abstract bool HasMoreEvents(); public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { } } } ================================================ FILE: WorkloadTools/Consumer/WorkloadFile/WorkloadFileWriterConsumer.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Data; using System.Data.SQLite; using System.IO; using System.Linq; using System.Reflection; using System.Text; namespace WorkloadTools.Consumer.WorkloadFile { public class WorkloadFileWriterConsumer : BufferedWorkloadConsumer { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public string OutputFile { get; set; } public static int CACHE_SIZE = 1000; // controls how often the data is written to the database // if not enough events are generated to flush the cache // a flush is forced every CACHE_FLUSH_HEARTBEAT_MINUTES public static int CACHE_FLUSH_HEARTBEAT_MINUTES = 1; public DateTime lastFlush = DateTime.Now; private bool databaseInitialized = false; private int row_id = 1; private string connectionString; private readonly object syncRoot = new object(); private SQLiteConnection conn; private SQLiteCommand events_cmd; private SQLiteCommand events_update_cmd; private SQLiteCommand waits_cmd; private SQLiteCommand diskperf_cmd; private SQLiteCommand counters_cmd; private long _rowsInserted = 0; private readonly string insert_events = @" INSERT INTO Events ( row_id, event_sequence, event_type, start_time, client_app_name, client_host_name, database_name, server_principal_name, session_id, sql_text, cpu, duration, reads, writes ) VALUES ( $row_id, $event_sequence, $event_type, $start_time, $client_app_name, $client_host_name, $database_name, $server_principal_name, $session_id, $sql_text, $cpu, $duration, $reads, $writes );"; private readonly string update_events = @" UPDATE Events SET cpu = $cpu, duration = $duration, reads = $reads, writes = $writes, sql_text = $sql_text WHERE row_id = (SELECT row_id FROM Events WHERE session_id = $session_id AND event_sequence < $event_sequence AND IFNULL(duration, 0) = 0 AND event_type IN (2, -3) /* 2 = RPCStarting, -3 = BatchStarting */ ORDER BY EVENT_SEQUENCE DESC LIMIT 1);"; private readonly string insert_waits = @" INSERT INTO Waits ( row_id, wait_type, wait_sec, resource_sec, signal_sec, wait_count ) VALUES ( $row_id, $wait_type, $wait_sec, $resource_sec, $signal_sec, $wait_count );"; private readonly string insert_counters = @" INSERT INTO Counters ( row_id, name, value ) VALUES ( $row_id, $name, $value );"; private readonly string insert_diskperf = @" INSERT INTO DiskPerf ( row_id, database_name, physical_filename, logical_filename, file_type, volume_mount_point, read_latency_ms, reads, write_bytes, write_latency_ms, writes, write_bytes, cum_read_latency_ms, cum_reads, cum_read_bytes, cum_write_latency_ms, cum_writes, cum_write_bytes ) VALUES ( $row_id, $database_name, $physical_filename, $logical_filename, $file_type, $volume_mount_point, $read_latency_ms, $reads, $write_bytes, $write_latency_ms, $writes, $write_bytes, $cum_read_latency_ms, $cum_reads, $cum_write_bytes, $cum_write_latency_ms, $cum_writes, $cum_write_bytes );"; private readonly Queue cache = new Queue(CACHE_SIZE); private bool forceFlush = false; public override void ConsumeBuffered(WorkloadEvent evt) { if (!databaseInitialized) { InitializeDatabase(); } lock (syncRoot) { cache.Enqueue(evt); Flush(); } } private void Flush() { if (DateTime.Now.Subtract(lastFlush).TotalMinutes >= CACHE_FLUSH_HEARTBEAT_MINUTES) { forceFlush = true; } if (cache.Count == CACHE_SIZE || forceFlush) { InitializeConnection(); var tran = conn.BeginTransaction(); try { lock (syncRoot) { while (cache.Count > 0) { InsertEvent(cache.Dequeue()); } } tran.Commit(); } catch { try { tran.Rollback(); } catch (Exception) { //swallow } throw; } finally { lastFlush = DateTime.Now; forceFlush = false; } } } /* * Initializes the database connection. * Connection string settings that affect performance: * - synchronous = off | full | normal * - journal mode = memory | delete | persist | off * - cache size = * - temp store = memory * - locking mode = exclusive */ private void InitializeConnection() { if (conn == null && connectionString != null) { conn = new SQLiteConnection(connectionString); conn.Open(); } if (events_cmd == null) { events_cmd = new SQLiteCommand(insert_events, conn); } if (events_update_cmd == null) { events_update_cmd = new SQLiteCommand(update_events, conn); } if (waits_cmd == null) { waits_cmd = new SQLiteCommand(insert_waits, conn); } if (diskperf_cmd == null) { diskperf_cmd = new SQLiteCommand(insert_diskperf, conn); } if (counters_cmd == null) { counters_cmd = new SQLiteCommand(insert_counters, conn); } } private void UpdateExecutionEvent(WorkloadEvent evnt) { var evt = (ExecutionWorkloadEvent)evnt; _ = events_update_cmd.Parameters.AddWithValue("$event_sequence", evt.EventSequence); _ = events_update_cmd.Parameters.AddWithValue("$session_id", evt.SPID); _ = events_update_cmd.Parameters.AddWithValue("$cpu", evt.CPU); _ = events_update_cmd.Parameters.AddWithValue("$duration", evt.Duration); _ = events_update_cmd.Parameters.AddWithValue("$reads", evt.Reads); _ = events_update_cmd.Parameters.AddWithValue("$writes", evt.Writes); _ = events_update_cmd.Parameters.AddWithValue("$sql_text", evt.Text); int rowcount; rowcount = events_update_cmd.ExecuteNonQuery(); if (rowcount == 0) { logger.Debug("Starting event not found - " + $"EventSequence: {evt.EventSequence}"); } } private void InsertExecutionEvent(WorkloadEvent evnt) { var evt = (ExecutionWorkloadEvent)evnt; _ = events_cmd.Parameters.AddWithValue("$row_id", row_id++); _ = events_cmd.Parameters.AddWithValue("$event_sequence", evt.EventSequence); _ = events_cmd.Parameters.AddWithValue("$event_type", evt.Type); _ = events_cmd.Parameters.AddWithValue("$start_time", evt.StartTime); _ = events_cmd.Parameters.AddWithValue("$client_app_name", evt.ApplicationName); _ = events_cmd.Parameters.AddWithValue("$client_host_name", evt.HostName); _ = events_cmd.Parameters.AddWithValue("$database_name", evt.DatabaseName); _ = events_cmd.Parameters.AddWithValue("$server_principal_name", evt.LoginName); _ = events_cmd.Parameters.AddWithValue("$session_id", evt.SPID); _ = events_cmd.Parameters.AddWithValue("$sql_text", evt.Text); _ = events_cmd.Parameters.AddWithValue("$cpu", evt.CPU); _ = events_cmd.Parameters.AddWithValue("$duration", evt.Duration); _ = events_cmd.Parameters.AddWithValue("$reads", evt.Reads); _ = events_cmd.Parameters.AddWithValue("$writes", evt.Writes); _ = events_cmd.ExecuteNonQuery(); } private void InsertEvent(WorkloadEvent evnt) { try { if (evnt is ExecutionWorkloadEvent) { if ((evnt.Type == WorkloadEvent.EventType.BatchCompleted) || (evnt.Type == WorkloadEvent.EventType.RPCCompleted)) { UpdateExecutionEvent(evnt); } else { InsertExecutionEvent(evnt); } } if (evnt is CounterWorkloadEvent) { InsertCounterEvent(evnt); } if (evnt is WaitStatsWorkloadEvent) { InsertWaitEvent(evnt); } if (evnt is DiskPerfWorkloadEvent) { InsertDiskPerfEvent(evnt); } _rowsInserted++; if ((_rowsInserted % CACHE_SIZE == 0) || forceFlush) { if (forceFlush) { forceFlush = false; } logger.Info("{_rowsInserted} events saved", _rowsInserted); } } catch (Exception e) { if (stopped) { return; } logger.Error(e, "Unable to write to the destination file"); throw; } } private void InsertWaitEvent(WorkloadEvent evnt) { var evt = (WaitStatsWorkloadEvent)evnt; var eventRowId = row_id++; _ = events_cmd.Parameters.AddWithValue("$row_id", eventRowId); _ = events_cmd.Parameters.AddWithValue("$event_sequence", null); _ = events_cmd.Parameters.AddWithValue("$event_type", evt.Type); _ = events_cmd.Parameters.AddWithValue("$start_time", evt.StartTime); _ = events_cmd.Parameters.AddWithValue("$client_app_name", null); _ = events_cmd.Parameters.AddWithValue("$client_host_name", null); _ = events_cmd.Parameters.AddWithValue("$database_name", null); _ = events_cmd.Parameters.AddWithValue("$server_principal_name", null); _ = events_cmd.Parameters.AddWithValue("$session_id", null); _ = events_cmd.Parameters.AddWithValue("$sql_text", null); _ = events_cmd.Parameters.AddWithValue("$cpu", null); _ = events_cmd.Parameters.AddWithValue("$duration", null); _ = events_cmd.Parameters.AddWithValue("$reads", null); _ = events_cmd.Parameters.AddWithValue("$writes", null); _ = events_cmd.ExecuteNonQuery(); foreach (DataRow dr in evt.Waits.Rows) { _ = waits_cmd.Parameters.AddWithValue("$row_id", eventRowId); _ = waits_cmd.Parameters.AddWithValue("$wait_type", dr["wait_type"]); _ = waits_cmd.Parameters.AddWithValue("$wait_sec", dr["wait_sec"]); _ = waits_cmd.Parameters.AddWithValue("$resource_sec", dr["resource_sec"]); _ = waits_cmd.Parameters.AddWithValue("$signal_sec", dr["signal_sec"]); _ = waits_cmd.Parameters.AddWithValue("$wait_count", dr["wait_count"]); _ = waits_cmd.ExecuteNonQuery(); } } private void InsertDiskPerfEvent(WorkloadEvent evnt) { var evt = (DiskPerfWorkloadEvent)evnt; var eventRowId = row_id++; _ = events_cmd.Parameters.AddWithValue("$row_id", eventRowId); _ = events_cmd.Parameters.AddWithValue("$event_sequence", null); _ = events_cmd.Parameters.AddWithValue("$event_type", evt.Type); _ = events_cmd.Parameters.AddWithValue("$start_time", evt.StartTime); _ = events_cmd.Parameters.AddWithValue("$client_app_name", null); _ = events_cmd.Parameters.AddWithValue("$client_host_name", null); _ = events_cmd.Parameters.AddWithValue("$database_name", null); _ = events_cmd.Parameters.AddWithValue("$server_principal_name", null); _ = events_cmd.Parameters.AddWithValue("$session_id", null); _ = events_cmd.Parameters.AddWithValue("$sql_text", null); _ = events_cmd.Parameters.AddWithValue("$cpu", null); _ = events_cmd.Parameters.AddWithValue("$duration", null); _ = events_cmd.Parameters.AddWithValue("$reads", null); _ = events_cmd.Parameters.AddWithValue("$writes", null); _ = events_cmd.ExecuteNonQuery(); foreach (DataRow dr in evt.DiskPerf.Rows) { _ = diskperf_cmd.Parameters.AddWithValue("$row_id", eventRowId); _ = diskperf_cmd.Parameters.AddWithValue("$database_name", dr["database_name"]); _ = diskperf_cmd.Parameters.AddWithValue("$physical_filename", dr["physical_filename"]); _ = diskperf_cmd.Parameters.AddWithValue("$logical_filename", dr["logical_filename"]); _ = diskperf_cmd.Parameters.AddWithValue("$file_type", dr["file_type"]); _ = diskperf_cmd.Parameters.AddWithValue("$volume_mount_point", dr["volume_mount_point"]); _ = diskperf_cmd.Parameters.AddWithValue("$read_latency_ms", dr["read_latency_ms"]); _ = diskperf_cmd.Parameters.AddWithValue("$reads", dr["reads"]); _ = diskperf_cmd.Parameters.AddWithValue("$read_bytes", dr["read_bytes"]); _ = diskperf_cmd.Parameters.AddWithValue("$write_latency_ms", dr["write_latency_ms"]); _ = diskperf_cmd.Parameters.AddWithValue("$writes", dr["writes"]); _ = diskperf_cmd.Parameters.AddWithValue("$write_bytes", dr["write_bytes"]); _ = diskperf_cmd.Parameters.AddWithValue("$cum_read_latency_ms", dr["cum_read_latency_ms"]); _ = diskperf_cmd.Parameters.AddWithValue("$cum_reads", dr["cum_reads"]); _ = diskperf_cmd.Parameters.AddWithValue("$cum_read_bytes", dr["cum_read_bytes"]); _ = diskperf_cmd.Parameters.AddWithValue("$cum_write_latency_ms", dr["cum_write_latency_ms"]); _ = diskperf_cmd.Parameters.AddWithValue("$cum_writes", dr["cum_writes"]); _ = diskperf_cmd.Parameters.AddWithValue("$cum_write_bytes", dr["cum_write_bytes"]); _ = diskperf_cmd.ExecuteNonQuery(); } } private void InsertCounterEvent(WorkloadEvent evnt) { var evt = (CounterWorkloadEvent)evnt; var eventRowId = row_id++; _ = events_cmd.Parameters.AddWithValue("$row_id", eventRowId); _ = events_cmd.Parameters.AddWithValue("$event_sequence", null); _ = events_cmd.Parameters.AddWithValue("$event_type", evt.Type); _ = events_cmd.Parameters.AddWithValue("$start_time", evt.StartTime); _ = events_cmd.Parameters.AddWithValue("$client_app_name", null); _ = events_cmd.Parameters.AddWithValue("$client_host_name", null); _ = events_cmd.Parameters.AddWithValue("$database_name", null); _ = events_cmd.Parameters.AddWithValue("$server_principal_name", null); _ = events_cmd.Parameters.AddWithValue("$session_id", null); _ = events_cmd.Parameters.AddWithValue("$sql_text", null); _ = events_cmd.Parameters.AddWithValue("$cpu", null); _ = events_cmd.Parameters.AddWithValue("$duration", null); _ = events_cmd.Parameters.AddWithValue("$reads", null); _ = events_cmd.Parameters.AddWithValue("$writes", null); _ = events_cmd.ExecuteNonQuery(); foreach (var dr in evt.Counters) { _ = counters_cmd.Parameters.AddWithValue("$row_id", eventRowId); _ = counters_cmd.Parameters.AddWithValue("$name", dr.Key.ToString()); _ = counters_cmd.Parameters.AddWithValue("$value", dr.Value); _ = counters_cmd.ExecuteNonQuery(); } } public void InitializeDatabase() { logger.Info("Writing event data to {OutputFile}", OutputFile); if (!File.Exists(OutputFile)) { _ = Directory.CreateDirectory(Directory.GetParent(OutputFile).FullName); SQLiteConnection.CreateFile(OutputFile); } var sqlCreateTable = $@" CREATE TABLE IF NOT EXISTS FileProperties ( name TEXT NOT NULL PRIMARY KEY, value TEXT NOT NULL ); CREATE TABLE IF NOT EXISTS Events ( row_id INTEGER PRIMARY KEY, event_sequence INTEGER, event_type INTEGER, start_time date NOT NULL, client_app_name TEXT NULL, client_host_name TEXT NULL, database_name TEXT NULL, server_principal_name TEXT NULL, session_id INTEGER NULL, sql_text TEXT NULL, cpu INTEGER NULL, duration INTEGER NULL, reads INTEGER NULL, writes INTEGER NULL ); CREATE UNIQUE INDEX IF NOT EXISTS Index_Session_ID_Event_Sequence ON Events( session_id ASC, event_sequence DESC ); CREATE INDEX IF NOT EXISTS Index_Start_Time_Row_ID ON Events( Start_Time ASC, Row_ID ASC ); CREATE TABLE IF NOT EXISTS Counters ( row_id INTEGER, name TEXT NULL, value FLOAT NULL ); CREATE TABLE IF NOT EXISTS Waits ( row_id INTEGER, wait_type TEXT NULL, wait_sec INTEGER NULL, resource_sec INTEGER NULL, signal_sec INTEGER NULL, wait_count INTEGER NULL ); CREATE TABLE IF NOT EXISTS DiskPerf ( row_id INTEGER, database_name TEXT NULL, physical_filename TEXT NULL, logical_filename TEXT NULL, file_type TEXT NULL, volume_mount_point TEXT NULL, read_latency_ms INTEGER NULL, reads INTEGER NULL, read_bytes INTEGER NULL, write_latency_ms INTEGER NULL, writes INTEGER NULL, write_bytes INTEGER NULL, cum_read_latency_ms INTEGER NULL, cum_reads INTEGER NULL, cum_read_bytes INTEGER NULL, cum_write_latency_ms INTEGER NULL, cum_writes INTEGER NULL, cum_write_bytes INTEGER NULL ); INSERT INTO FileProperties (name, value) SELECT 'FormatVersion','{Assembly.GetEntryAssembly().GetName().Version}' WHERE NOT EXISTS (SELECT * FROM FileProperties WHERE name = 'FormatVersion' );"; var sqlMaxSeq = @"SELECT COALESCE(MAX(row_id), 0) + 1 FROM Events;"; connectionString = "Data Source=" + OutputFile + ";Version=3;Cache Size=10000;Locking Mode=Exclusive;Journal Mode=Memory;"; using (var m_dbConnection = new SQLiteConnection(connectionString)) { m_dbConnection.Open(); try { var command = new SQLiteCommand(sqlCreateTable, m_dbConnection); _ = command.ExecuteNonQuery(); command = new SQLiteCommand(sqlMaxSeq, m_dbConnection); row_id = Convert.ToInt32(command.ExecuteScalar()); } catch (Exception) { throw; } } databaseInitialized = true; } protected override void Dispose(bool disposing) { logger.Info("Closing the connection to the output file"); // Signal ProcessBuffer to stop so it no longer competes for the connection stopped = true; // Wait for the background ProcessBuffer task to finish its current operation. // Use a timeout to avoid blocking indefinitely if the task is unresponsive. try { BufferReader?.Wait(TimeSpan.FromSeconds(60)); } catch (Exception) { // Task may have already faulted; continue so we can flush remaining data } // At this point ProcessBuffer has stopped (stopped=true exits its loop) or // we timed out. Drain any events still sitting in the base-class Buffer into // the local cache so they are persisted before we close the connection. // ConcurrentQueue.TryDequeue is thread-safe even in the unlikely case that // the task is still winding down. if (databaseInitialized) { WorkloadEvent evt; while (Buffer.TryDequeue(out evt)) { if (evt != null) { cache.Enqueue(evt); } } } forceFlush = true; if (conn != null) { Flush(); } try { conn?.Close(); conn?.Dispose(); events_cmd?.Dispose(); waits_cmd?.Dispose(); counters_cmd?.Dispose(); } catch (Exception) { //ignore } } public override bool HasMoreEvents() { return cache.Count > 0 || Buffer.Count > 0; } } } ================================================ FILE: WorkloadTools/CounterWorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools { [Serializable] public class CounterWorkloadEvent : WorkloadEvent { public enum CounterNameEnum { AVG_CPU_USAGE = 1 } public Dictionary Counters { get; internal set; } = new Dictionary(); public CounterWorkloadEvent() { Type = EventType.PerformanceCounter; } } } ================================================ FILE: WorkloadTools/DiskPerfWorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Text; namespace WorkloadTools { [Serializable] public class DiskPerfWorkloadEvent : WorkloadEvent { public DataTable DiskPerf; public DiskPerfWorkloadEvent() { Type = EventType.DiskPerf; } } } ================================================ FILE: WorkloadTools/ErrorWorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools { [Serializable] public class ErrorWorkloadEvent : ExecutionWorkloadEvent { public ErrorWorkloadEvent() { Type = EventType.Error; } } } ================================================ FILE: WorkloadTools/ExecutionWorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools { [Serializable] public class ExecutionWorkloadEvent : WorkloadEvent { public string Text { get; set; } public int? SPID { get; set; } public string ApplicationName { get; set; } public string DatabaseName { get; set; } public string LoginName { get; set; } public string HostName { get; set; } public long? Reads { get; set; } public long? Writes { get; set; } public long? CPU { get; set; } // MICROSECONDS public long? Duration { get; set; } // MICROSECONDS public long? EventSequence { get; set; } // This is the requested offset in milliseconds // from the the beginning of the workload public double ReplayOffset { get; set; } = 0; // MILLISECONDS } } ================================================ FILE: WorkloadTools/FilterPredicate.cs ================================================ using System; namespace WorkloadTools { public abstract class FilterPredicate { private string[] _predicateValue; public enum FilterColumnName : byte { DatabaseName = 35, HostName = 8, ApplicationName = 10, LoginName = 11 } public enum FilterComparisonOperator : byte { Equal = 0, Not_Equal = 1, Greater_Than = 2, Less_Than = 3, Greater_Than_Or_Equal = 4, Less_Than_Or_Equal = 5, LIKE = 6, NOT_LIKE = 7 } public FilterPredicate() { } public FilterColumnName ColumnName { get; set; } public string[] PredicateValue { get => _predicateValue; set { _predicateValue = value; if (value != null) { ComparisonOperator = new FilterComparisonOperator[_predicateValue.Length]; for (var i = 0; i < value.Length; i++) { var thisValue = value[i]; if (!string.IsNullOrEmpty(thisValue) && thisValue.StartsWith("^")) { _predicateValue[i] = thisValue.Substring(1); ComparisonOperator[i] = FilterComparisonOperator.Not_Equal; } else { ComparisonOperator[i] = FilterComparisonOperator.Equal; } } } } } public FilterComparisonOperator[] ComparisonOperator { get; set; } public bool IsPredicateSet { get { return PredicateValue != null; } } public bool IsPushedDown { get; set; } = false; public FilterPredicate(FilterColumnName name) { ColumnName = name; } public abstract string PushDown(); protected string EscapeFilter(string value) { return value.Replace("'", "''"); } public static string ComparisonOperatorAsString(FilterComparisonOperator op) { var result = string.Empty; switch (op) { case FilterComparisonOperator.Equal: result = "="; break; case FilterComparisonOperator.Not_Equal: result = "<>"; break; case FilterComparisonOperator.Greater_Than: result = ">"; break; case FilterComparisonOperator.Less_Than: result = "<"; break; case FilterComparisonOperator.Greater_Than_Or_Equal: result = ">="; break; case FilterComparisonOperator.Less_Than_Or_Equal: result = "<="; break; case FilterComparisonOperator.LIKE: result = "LIKE"; break; case FilterComparisonOperator.NOT_LIKE: result = "NOT LIKE"; break; } return result; } } } ================================================ FILE: WorkloadTools/IEventQueue.cs ================================================ using System; namespace WorkloadTools { public enum EventQueueType { MMF, Sqlite, LiteDB, BinarySerialized } public interface IEventQueue : IDisposable { int BufferSize { get; set; } bool TryDequeue(out WorkloadEvent result); bool HasMoreElements(); void Enqueue(WorkloadEvent evt); } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/ExtendedEventsEventFilter.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.ExtendedEvents { public class ExtendedEventsEventFilter : WorkloadEventFilter { public bool IsSqlAzure { get; set; } public ExtendedEventsEventFilter() { ApplicationFilter = new ExtendedEventsFilterPredicate(FilterPredicate.FilterColumnName.ApplicationName); DatabaseFilter = new ExtendedEventsFilterPredicate(FilterPredicate.FilterColumnName.DatabaseName); HostFilter = new ExtendedEventsFilterPredicate(FilterPredicate.FilterColumnName.HostName); LoginFilter = new ExtendedEventsFilterPredicate(FilterPredicate.FilterColumnName.LoginName); ((ExtendedEventsFilterPredicate)ApplicationFilter).IsSqlAzure = IsSqlAzure; ((ExtendedEventsFilterPredicate)DatabaseFilter).IsSqlAzure = IsSqlAzure; ((ExtendedEventsFilterPredicate)HostFilter).IsSqlAzure = IsSqlAzure; ((ExtendedEventsFilterPredicate)LoginFilter).IsSqlAzure = IsSqlAzure; } } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/ExtendedEventsFilterPredicate.cs ================================================ using System; namespace WorkloadTools.Listener.ExtendedEvents { public class ExtendedEventsFilterPredicate : FilterPredicate { public ExtendedEventsFilterPredicate(FilterColumnName name) : base(name) { } public bool IsSqlAzure { get; set; } public override string PushDown() { if (!IsPredicateSet) { return string.Empty; } IsPushedDown = true; var result = "("; // Implementing multivalued filters with negative values // requires analyzing the syntax of the filters // // Let's say I have a filter like this: // "DatabaseFilter" = ["master","model","^tempdb","msdb"] // // It literally says I want master, model and msdb, but I don't want tempdb // In this case, it means that I want master, model and tempdb // But if I only had negative filters, it would mean anything but those databases. var hasPositives = false; var hasNegatives = false; for (var i = 0; i < ComparisonOperator.Length; i++) { if (ComparisonOperator[i] == FilterComparisonOperator.Not_Equal) { hasNegatives = true; } else { hasPositives = true; } } for (var i = 0; i < PredicateValue.Length; i++) { if (hasNegatives && hasPositives && ComparisonOperator[i] == FilterComparisonOperator.Not_Equal) { // In this case I only care for the positives continue; } if (i > 0) { if (hasNegatives && !hasPositives) { result += " AND "; } else { result += " OR "; } } switch (ColumnName) { case FilterColumnName.ApplicationName: result += "sqlserver.client_app_name"; break; case FilterColumnName.HostName: result += "sqlserver.client_hostname"; break; case FilterColumnName.LoginName: if (IsSqlAzure) { result += "sqlserver.username"; } else { result += "sqlserver.server_principal_name"; } break; case FilterColumnName.DatabaseName: result += "sqlserver.database_name"; break; } result += " " + FilterPredicate.ComparisonOperatorAsString(ComparisonOperator[i]) + " N'" + EscapeFilter(PredicateValue[i]) + "'"; } result += ")"; return result; } } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/ExtendedEventsWorkloadListener.cs ================================================ using Microsoft.SqlServer.XEvent.Linq; using NLog; using System; using System.Collections.Generic; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; namespace WorkloadTools.Listener.ExtendedEvents { public class ExtendedEventsWorkloadListener : WorkloadListener { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private SpinWait spin = new SpinWait(); protected XEventDataReader reader; public string SessionName { get; set; } = "sqlworkload"; public bool ReuseExistingSession { get; set; } = false; public enum ServerType { FullInstance, AzureSqlDatabase, AzureSqlManagedInstance, LocalDB } private ServerType serverType { get; set; } // Path to the file target // Mandatory on SqlAzure // If not specified, On Premises SQLServer will use the streaming API public string FileTargetPath { get; set; } private long eventCount; public ExtendedEventsWorkloadListener() : base() { Filter = new ExtendedEventsEventFilter(); Source = WorkloadController.BaseLocation + "\\Listener\\ExtendedEvents\\sqlworkload.sql"; } public override void Initialize() { using (var conn = new SqlConnection()) { if (ConnectionInfo == null) { throw new ArgumentNullException("You need to provide ConnectionInfo to inizialize an ExtendedEventsWorkloadListener"); } conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); LoadServerType(conn); if (serverType == ServerType.AzureSqlDatabase) { if (ConnectionInfo.DatabaseName == null) { throw new ArgumentException("Azure SqlDatabase does not support starting Extended Events sessions on the master database. Please specify a database name."); } ((ExtendedEventsEventFilter)Filter).IsSqlAzure = true; } else { ConnectionInfo.DatabaseName = "master"; } logger.Info($"Reading Extended Events session definition from {Source}"); string sessionSql = null; try { sessionSql = System.IO.File.ReadAllText(Source); // Push Down EventFilters var filters = string.Empty; var appFilter = Filter.ApplicationFilter.PushDown(); var dbFilter = Filter.DatabaseFilter.PushDown(); var hostFilter = Filter.HostFilter.PushDown(); var loginFilter = Filter.LoginFilter.PushDown(); if (appFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + appFilter; } if (dbFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + dbFilter; } if (hostFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + hostFilter; } if (loginFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + loginFilter; } if (filters != string.Empty) { filters = "WHERE " + filters; } var sessionType = serverType == ServerType.AzureSqlDatabase ? "DATABASE" : "SERVER"; var principalName = serverType == ServerType.AzureSqlDatabase ? "username" : "server_principal_name"; sessionSql = string.Format(sessionSql, filters, sessionType, principalName); } catch (Exception e) { throw new ArgumentException("Cannot open the source script to start the extended events session", e); } if (!ReuseExistingSession) { StopSession(conn); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sessionSql; _ = cmd.ExecuteNonQuery(); } if (FileTargetPath != null) { var sql = @" ALTER EVENT SESSION [{2}] ON {0} ADD TARGET package0.event_file(SET filename=N'{1}',max_file_size=(100)) "; sql = string.Format(sql, serverType == ServerType.FullInstance ? "SERVER" : "DATABASE", FileTargetPath, SessionName); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } } } // Mark the transaction SetTransactionMark(serverType != ServerType.AzureSqlDatabase); _ = Task.Factory.StartNew(() => ReadEvents()); //Initialize the source of performance counters events _ = Task.Factory.StartNew(() => ReadPerfCountersEvents()); // Initialize the source of wait stats events _ = Task.Factory.StartNew(() => ReadWaitStatsEvents()); // Initialize the source of disk performance stats _ = Task.Factory.StartNew(() => ReadDiskPerformanceEvents()); } } public override WorkloadEvent Read() { try { WorkloadEvent result = null; while (!Events.TryDequeue(out result)) { if (stopped) { return null; } spin.SpinOnce(); } eventCount++; return result; } catch (Exception) { if (stopped) { return null; } else { throw; } } } protected override void Dispose(bool disposing) { stopped = true; try { logger.Info($"Disposing ExtendedEventsWorkloadListener."); logger.Debug($"[{eventCount}] events read."); logger.Debug($"Events in the queue? [{Events.HasMoreElements()}]"); if(reader != null) { reader.Stop(); } if (!ReuseExistingSession) { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); StopSession(conn); } } } catch (Exception x) { // swallow logger.Warn($"Error disposing ExtendedEventWorkloadListener: {x.Message}"); } logger.Info($"Extended Events session [{SessionName}] stopped successfully."); } private void StopSession(SqlConnection conn) { var sql = @" DECLARE @condition bit = 0; IF SERVERPROPERTY('Edition') = 'SQL Azure' AND SERVERPROPERTY('EngineEdition') = 5 BEGIN SELECT @condition = 1 WHERE EXISTS ( SELECT * FROM sys.database_event_sessions WHERE name = '{1}' ) END ELSE BEGIN SELECT @condition = 1 WHERE EXISTS ( SELECT * FROM sys.server_event_sessions WHERE name = '{1}' ) END IF @condition = 1 BEGIN BEGIN TRY ALTER EVENT SESSION [{1}] ON {0} STATE = STOP; END TRY BEGIN CATCH -- whoops... PRINT ERROR_MESSAGE() END CATCH BEGIN TRY DROP EVENT SESSION [{1}] ON {0}; END TRY BEGIN CATCH -- whoops... PRINT ERROR_MESSAGE() END CATCH END "; sql = string.Format(sql, serverType == ServerType.AzureSqlDatabase ? "DATABASE" : "SERVER", SessionName); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; _ = cmd.ExecuteNonQuery(); } } private void ReadEvents() { try { if (FileTargetPath == null) { reader = new StreamXEventDataReader(ConnectionInfo.ConnectionString(), SessionName, Events); } else { reader = new FileTargetXEventDataReader(ConnectionInfo.ConnectionString(), SessionName, Events, serverType); } reader.ReadEvents(); } catch (Exception ex) { if (!stopped) { logger.Error(ex.Message); logger.Error(ex.StackTrace); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } Dispose(); } else { logger.Warn(ex, "The shutdown workflow generated a warning:"); } } } private void LoadServerType(SqlConnection conn) { using (var cmd = conn.CreateCommand()) { cmd.CommandText = "SELECT SERVERPROPERTY('Edition')"; var edition = (string)cmd.ExecuteScalar(); cmd.CommandText = "SELECT SERVERPROPERTY('EngineEdition')"; var engineEdition = (int)cmd.ExecuteScalar(); if (edition == "SQL Azure") { serverType = ServerType.AzureSqlDatabase; if (engineEdition == 8) { serverType = ServerType.AzureSqlManagedInstance; } } else { serverType = ServerType.FullInstance; } } } } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/FileTargetXEventDataReader.cs ================================================ using NLog; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Text; using System.Threading; using System.Xml; using System.Xml.Linq; using WorkloadTools.Util; namespace WorkloadTools.Listener.ExtendedEvents { public class FileTargetXEventDataReader : XEventDataReader, IDisposable { private readonly RingBuffer ReadIterations = new RingBuffer(10); private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private bool stopped = false; public FileTargetXEventDataReader(string connectionString, string sessionName, IEventQueue events, ExtendedEventsWorkloadListener.ServerType serverType) : base(connectionString, sessionName, events, serverType) { } public override void ReadEvents() { try { while (!stopped) { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionString; conn.Open(); ReadIteration previousIteration = null; if(ReadIterations.Count > 0) { previousIteration = ReadIterations.Last(); } var currentIteration = InitializeReadIteration(conn, previousIteration); if (currentIteration != null) { ReadIterations.Add(currentIteration); } else { Stop(); break; } ReadXEData(conn, currentIteration); // if reading from localdb one iteration is enough if (ServerType == ExtendedEventsWorkloadListener.ServerType.LocalDB) { break; } } } logger.Info($"{EventCount} events captured."); } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } } } private void ReadXEData(SqlConnection conn, ReadIteration currentIteration) { var sqlXE = @" SELECT event_data, file_name, file_offset FROM sys.fn_xe_file_target_read_file( @filename, NULL, @initial_file_name, @initial_offset ) "; logger.Debug("Reading XE data..."); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sqlXE; cmd.CommandTimeout = 0; var paramPath = cmd.Parameters.Add("@filename", System.Data.SqlDbType.NVarChar, 260); if (ServerType != ExtendedEventsWorkloadListener.ServerType.AzureSqlDatabase) { paramPath.Value = currentIteration.GetXEFilePattern(); } else { // Azure SqlDatabase does not support wildcards in file names // Specify an exact file name paramPath.Value = currentIteration.StartFileName; } var paramInitialFile = cmd.Parameters.Add("@initial_file_name", System.Data.SqlDbType.NVarChar, 260); paramInitialFile.Value = currentIteration.StartFileName; var paramInitialOffset = cmd.Parameters.Add("@initial_offset", System.Data.SqlDbType.BigInt); paramInitialOffset.Value = currentIteration.GetInitialOffset(); // don't pass initial file name and offset // read directly from the initial file // until we have some rows read already if ( EventCount == 0 || currentIteration.StartOffset <=0 || currentIteration.StartOffset == currentIteration.MinOffset ) { if (ServerType != ExtendedEventsWorkloadListener.ServerType.LocalDB) { paramPath.Value = currentIteration.StartFileName; } paramInitialFile.Value = DBNull.Value; paramInitialOffset.Value = DBNull.Value; } retryWithNULLS: logger.Debug($"paramPath : {paramPath.Value}"); logger.Debug($"paramInitialFile : {paramInitialFile.Value}"); logger.Debug($"paramInitialOffset: {paramInitialOffset.Value}"); // in case we don't have any data in the xe file // GetInitialOffset returns -1 and we need to wait a bit // to let events flow to the file target if (currentIteration.GetInitialOffset() > 0) { var transformer = new SqlTransformer(); using (var reader = cmd.ExecuteReader()) { try { var skippedRows = 0; while (reader.Read()) { if (reader["file_name"] != DBNull.Value) { currentIteration.EndFileName = (string)reader["file_name"]; } if (reader["file_offset"] != DBNull.Value) { currentIteration.EndOffset = (long)reader["file_offset"]; } var xmldata = (string)reader["event_data"]; var doc = new XmlDocument(); doc.LoadXml(xmldata); var evt = parseEvent(doc); // skip to the correct event in case we're reading again // from the same file and we have a reference sequence if ((currentIteration.RowsRead == 0) && (currentIteration.StartSequence > 0)) { // skip rows until we encounter the reference event_sequence if (evt.EventSequence != currentIteration.StartSequence) { skippedRows++; continue; } else { // skip one more row... skippedRows++; currentIteration.RowsRead++; continue; } } // this is only to print out a message, so consider // getting rid of it if (skippedRows > 0) { logger.Debug($"Skipped rows: {skippedRows}"); skippedRows = 0; } // now we have an event, no matter if good or bad => increment rows read currentIteration.RowsRead++; if (evt.EventSequence != null) { currentIteration.EndSequence = (long)evt.EventSequence; } if (evt.Type == WorkloadEvent.EventType.Unknown) { continue; } if (evt.Type == WorkloadEvent.EventType.BatchStarting || evt.Type == WorkloadEvent.EventType.BatchCompleted || evt.Type == WorkloadEvent.EventType.RPCStarting || evt.Type == WorkloadEvent.EventType.RPCCompleted || evt.Type == WorkloadEvent.EventType.Message) { if (transformer.Skip(evt.Text)) { continue; } evt.Text = transformer.Transform(evt.Text); } // it's a "good" event: add it to the queue Events.Enqueue(evt); EventCount++; } logger.Debug($"currentIteration.EndSequence : {currentIteration.EndSequence}"); } catch (Exception xx) { if (xx.Message.Contains("Specify an offset that exists in the log file")) { // retry the query without specifying the offset / file pair paramInitialFile.Value = DBNull.Value; paramInitialOffset.Value = DBNull.Value; goto retryWithNULLS; } else { throw; } } } } // Wait before querying the events file again if (currentIteration.RowsRead < ReadIteration.DEFAULT_TRACE_ROWS_SLEEP_THRESHOLD) { Thread.Sleep(ReadIteration.DEFAULT_TRACE_INTERVAL_SECONDS * 1000); } } } private ReadIteration InitializeReadIteration(SqlConnection conn, ReadIteration previous) { var sqlPath = @" SELECT file_name, ISNULL(file_offset,-1) AS file_offset FROM ( SELECT CAST(target_data AS xml).value('(/EventFileTarget/File/@name)[1]','nvarchar(1000)') AS file_name FROM sys.dm_xe_{0}session_targets AS t INNER JOIN sys.dm_xe_{0}sessions AS s ON t.event_session_address = s.address WHERE s.name = @sessionName AND target_name = 'event_file' ) AS fileName OUTER APPLY ( SELECT TOP(1) file_offset FROM fn_xe_file_target_read_file(file_name,NULL,NULL,NULL) ) AS fileOffset; "; var sqlPathLocaldb = @" IF OBJECT_ID('tempdb.dbo.trace_reader_queue') IS NOT NULL BEGIN SELECT TOP(1) path, CAST(1 AS bigint) AS file_offset FROM tempdb.dbo.trace_reader_queue ORDER BY ts DESC END ELSE BEGIN SELECT '' AS path, CAST(-1 AS bigint) AS file_offset END "; var databaseSuffix = ServerType == ExtendedEventsWorkloadListener.ServerType.AzureSqlDatabase ? "database_" : ""; ReadIteration currentIteration = null; using (var cmdPath = conn.CreateCommand()) { if (ServerType == ExtendedEventsWorkloadListener.ServerType.LocalDB) { cmdPath.CommandText = sqlPathLocaldb; } else { cmdPath.CommandText = string.Format(sqlPath, databaseSuffix); var paramSessionName = cmdPath.Parameters.Add("@sessionName", System.Data.SqlDbType.NVarChar, 260); paramSessionName.Value = SessionName; } try { logger.Debug("Initializing read iteration"); using (var reader = cmdPath.ExecuteReader()) { // should return only one row if (reader.Read()) { currentIteration = new ReadIteration() { StartFileName = reader.GetString(0), MinOffset = reader.GetInt64(1) }; currentIteration.EndFileName = currentIteration.StartFileName; if (previous != null) { //if we have a previous iteration, keep reading from that file first currentIteration.StartFileName = previous.EndFileName; // we need to read the file from the previous distinct offset // to avoid skipping events. The function fn_xe_file_target_read_file // will skip all events up to the @initial_offset INCLUDED, // so we need to start from the previous offset and skip some rows currentIteration.StartOffset = ReadIteration.GetSecondLastOffset(currentIteration.StartFileName); // we will use the previous event sequence as the boundary to where // we need to start reading events again currentIteration.StartSequence = previous.EndSequence; currentIteration.EndSequence = previous.EndSequence; // if reading from localdb we don't need to wait for more data if (ServerType == ExtendedEventsWorkloadListener.ServerType.LocalDB) { if ( (currentIteration.StartFileName == previous.StartFileName) && (currentIteration.StartSequence == previous.StartSequence) ) { return null; } } } logger.Debug($"currentIteration.StartFileName: {currentIteration.StartFileName}"); logger.Debug($"currentIteration.MinOffset : {currentIteration.MinOffset}"); logger.Debug($"currentIteration.EndFileName : {currentIteration.EndFileName}"); logger.Debug($"currentIteration.StartOffset : {currentIteration.StartOffset}"); logger.Debug($"currentIteration.StartSequence: {currentIteration.StartSequence}"); } } } catch (Exception e) { logger.Error(e.StackTrace); throw; } } return currentIteration; } // Parses all event data from the the data reader private ExecutionWorkloadEvent parseEvent(XmlDocument doc) { var evt = new ExecutionWorkloadEvent(); var eventNode = doc.DocumentElement.SelectSingleNode("/event"); var name = eventNode.Attributes["name"].InnerText; if (name == "sql_batch_completed") { evt.Type = WorkloadEvent.EventType.BatchCompleted; } else if (name == "rpc_completed") { evt.Type = WorkloadEvent.EventType.RPCCompleted; } else if (name == "sql_batch_starting") { evt.Type = WorkloadEvent.EventType.BatchStarting; } else if (name == "rpc_starting") { evt.Type = WorkloadEvent.EventType.RPCStarting; } else if (name == "login") { evt.Type = WorkloadEvent.EventType.RPCStarting; } else if (name == "attention") { evt.Type = WorkloadEvent.EventType.Timeout; } else if (name == "user_event") { evt.Type = WorkloadEvent.EventType.Error; } else { evt.Type = WorkloadEvent.EventType.Unknown; return evt; } var timestamp = DateTimeOffset.Parse(eventNode.Attributes["timestamp"].Value); evt.StartTime = timestamp.LocalDateTime; foreach (XmlNode node in eventNode.ChildNodes) { switch ((string)node.Attributes["name"].Value) { case "statement": if (evt.Type == WorkloadEvent.EventType.RPCCompleted || evt.Type == WorkloadEvent.EventType.RPCStarting) { evt.Text = (string)node.FirstChild.FirstChild.Value; } break; case "batch_text": if (evt.Type == WorkloadEvent.EventType.BatchCompleted || evt.Type == WorkloadEvent.EventType.BatchStarting) { evt.Text = (string)node.FirstChild.FirstChild.Value; } break; case "sql_text": if (evt.Type == WorkloadEvent.EventType.Timeout) { evt.Text = (string)node.FirstChild.FirstChild.Value; } break; case "client_app_name": evt.ApplicationName = (string)node.FirstChild.FirstChild.Value; break; case "database_name": evt.DatabaseName = (string)node.FirstChild.FirstChild.Value; break; case "client_hostname": evt.HostName = (string)node.FirstChild.FirstChild.Value; break; case "server_principal_name": evt.LoginName = (string)node.FirstChild.FirstChild.Value; break; case "username": evt.LoginName = (string)node.FirstChild.FirstChild.Value; break; case "session_id": evt.SPID = Convert.ToInt32(node.FirstChild.FirstChild.Value); break; case "cpu_time": evt.CPU = Convert.ToInt64(node.FirstChild.FirstChild.Value); break; case "duration": evt.Duration = Convert.ToInt64(node.FirstChild.FirstChild.Value); if (evt.Type == WorkloadEvent.EventType.Timeout) { evt.CPU = Convert.ToInt64(evt.Duration); } break; case "logical_reads": evt.Reads = Convert.ToInt64(node.FirstChild.FirstChild.Value); break; case "writes": evt.Writes = Convert.ToInt64(node.FirstChild.FirstChild.Value); break; case "user_data": evt.Text = (string)node.FirstChild.FirstChild.Value; break; case "event_sequence": evt.EventSequence = Convert.ToInt64(node.FirstChild.FirstChild.Value); break; case "is_cached": var vIsCached = Convert.ToBoolean(node.FirstChild.FirstChild.Value); if (!vIsCached) /* If is not cached then consider it a new login */ { // A nonpooled login will trigger Login event with EventSubClass = 1 // Setting text to sp_reset_connection and including comment on to // be able to understand this is a nonpooled login on replay evt.Text = "exec sp_reset_connection /*Nonpooled*/"; } else { evt.Type = WorkloadEvent.EventType.Unknown; evt.Text = ""; return evt; } break; default: break; } } return evt; } public override void Stop() { stopped = true; } public void Dispose() { Events.Dispose(); } } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/StreamXEventDataReader.cs ================================================ using Microsoft.SqlServer.XEvent.Linq; using NLog; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; using static WorkloadTools.Listener.Trace.TraceEventParser; namespace WorkloadTools.Listener.ExtendedEvents { public class StreamXEventDataReader : XEventDataReader { private enum FieldType { Action, Field } private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private bool stopped; public StreamXEventDataReader(string connectionString, string sessionName, IEventQueue events) : base(connectionString, sessionName, events, ExtendedEventsWorkloadListener.ServerType.AzureSqlDatabase) { } public override void ReadEvents() { EventCount = 0; var transformer = new SqlTransformer(); using (var eventstream = new QueryableXEventData( ConnectionString, SessionName, EventStreamSourceOptions.EventStream, EventStreamCacheOptions.CacheToDisk)) { var eventsEnumerator = eventstream.GetEnumerator(); while (!stopped) { if (eventsEnumerator.MoveNext()) { var evt = eventsEnumerator.Current; var workloadEvent = new ExecutionWorkloadEvent(); try { workloadEvent.EventSequence = Convert.ToInt64(TryGetValue(evt, FieldType.Action, "event_sequence")); var commandText = string.Empty; if (evt.Name == "rpc_starting") { commandText = (string)TryGetValue(evt, FieldType.Field, "statement"); workloadEvent.Type = WorkloadEvent.EventType.RPCStarting; } else if (evt.Name == "sql_batch_starting") { commandText = (string)TryGetValue(evt, FieldType.Field, "batch_text"); workloadEvent.Type = WorkloadEvent.EventType.BatchStarting; } else if (evt.Name == "rpc_completed") { commandText = (string)TryGetValue(evt, FieldType.Field, "statement"); workloadEvent.Type = WorkloadEvent.EventType.RPCCompleted; } else if (evt.Name == "sql_batch_completed") { commandText = (string)TryGetValue(evt, FieldType.Field, "batch_text"); workloadEvent.Type = WorkloadEvent.EventType.BatchCompleted; } else if (evt.Name == "login") { var vIsCached = Convert.ToBoolean(TryGetValue(evt, FieldType.Field, "is_cached")); if (!vIsCached) /* If is not cached then consider it a new login */ { workloadEvent.Type = WorkloadEvent.EventType.RPCStarting; // A nonpooled login will trigger Login event with EventSubClass = 1 // Setting text to sp_reset_connection and including comment on to // be able to understand this is a nonpooled login on replay commandText = "exec sp_reset_connection /*Nonpooled*/"; } else { workloadEvent.Type = WorkloadEvent.EventType.Unknown; continue; } } else if (evt.Name == "attention") { workloadEvent = new ErrorWorkloadEvent(); var value = TryGetValue(evt, FieldType.Action, "sql_text"); if (value == null) { continue; } try { if (value is string stringValue) { commandText = stringValue; } else if (value is byte[] byteValue) { commandText = Encoding.Unicode.GetString(byteValue); } else { throw new ArgumentException("Argument is of the wrong type"); } } catch (Exception e) { logger.Error(e, $"Unable to extract sql_text from attention event. Value is of type ${value.GetType().FullName}"); } workloadEvent.Text = commandText; workloadEvent.Type = WorkloadEvent.EventType.Timeout; } else if (evt.Name == "user_event") { workloadEvent = new ErrorWorkloadEvent(); var num = (int)TryGetValue(evt, FieldType.Field, "event_id"); if (num == 83 || num == 82) { if (TryGetString(evt, FieldType.Field, "user_info").StartsWith("WorkloadTools.")) { commandText = TryGetString(evt, FieldType.Field, "user_data"); workloadEvent.Text = commandText; if (num == 83) { workloadEvent.Type = WorkloadEvent.EventType.Error; } else { workloadEvent.Type = WorkloadEvent.EventType.Timeout; } } else { workloadEvent.Type = WorkloadEvent.EventType.Unknown; continue; } } } else { workloadEvent.Type = WorkloadEvent.EventType.Unknown; continue; } try { workloadEvent.ApplicationName = TryGetString(evt, FieldType.Action, "client_app_name"); workloadEvent.DatabaseName = TryGetString(evt, FieldType.Action, "database_name"); workloadEvent.HostName = TryGetString(evt, FieldType.Action, "client_hostname"); workloadEvent.LoginName = TryGetString(evt, FieldType.Action, "server_principal_name"); workloadEvent.SPID = TryGetInt32(evt, FieldType.Action, "session_id"); if (commandText != null) { workloadEvent.Text = commandText; } workloadEvent.StartTime = evt.Timestamp.LocalDateTime; if (workloadEvent.Type == WorkloadEvent.EventType.Error) { workloadEvent.Duration = 0; workloadEvent.CPU = 0; } else if (workloadEvent.Type == WorkloadEvent.EventType.Timeout) { workloadEvent.Duration = TryGetInt64(evt, FieldType.Field, "duration"); workloadEvent.CPU = Convert.ToInt64(workloadEvent.Duration); } else { if (evt.Name == "rpc_completed" || evt.Name == "sql_batch_completed") { workloadEvent.Reads = TryGetInt64(evt, FieldType.Field, "logical_reads"); workloadEvent.Writes = TryGetInt64(evt, FieldType.Field, "writes"); workloadEvent.CPU = TryGetInt64(evt, FieldType.Field, "cpu_time"); workloadEvent.Duration = TryGetInt64(evt, FieldType.Field, "duration"); } } } catch (Exception e) { logger.Error(e, "Error converting XE data from the stream."); throw; } // preprocess and filter events if (workloadEvent.Type == WorkloadEvent.EventType.BatchStarting || workloadEvent.Type == WorkloadEvent.EventType.BatchCompleted || workloadEvent.Type == WorkloadEvent.EventType.RPCStarting || workloadEvent.Type == WorkloadEvent.EventType.RPCCompleted || workloadEvent.Type == WorkloadEvent.EventType.Message) { if (transformer.Skip(workloadEvent.Text)) { continue; } workloadEvent.Text = transformer.Transform(workloadEvent.Text); } Events.Enqueue(workloadEvent); EventCount++; } catch (Exception ex) { logger.Error($"Error converting XE data from the stream: {ex.Message}"); try { logger.Error($" event type : {workloadEvent.Type}"); logger.Error($" client_app_name : {TryGetString(evt, FieldType.Action, "client_app_name")}"); logger.Error($" database_name : {TryGetString(evt, FieldType.Action, "database_name")}"); logger.Error($" client_hostname : {TryGetString(evt, FieldType.Action, "client_hostname")}"); logger.Error($" server_principal_name : {TryGetString(evt, FieldType.Action, "server_principal_name")}"); logger.Error($" session_id : {TryGetString(evt, FieldType.Action, "session_id")}"); logger.Error($" duration : {TryGetString(evt, FieldType.Field, "duration")}"); logger.Error($" logical_reads : {TryGetString(evt, FieldType.Field, "logical_reads")}"); logger.Error($" writes : {TryGetString(evt, FieldType.Field, "writes")}"); logger.Error($" cpu_time : {TryGetString(evt, FieldType.Field, "cpu_time")}"); } catch (Exception) { //ignore, it is only logging } throw; } } } } } private object TryGetValue(PublishedEvent evt, FieldType t, string name) { object result = null; if (t == FieldType.Action) { if (evt.Actions.TryGetValue(name, out var act)) { result = act.Value; } } else { if (evt.Fields.TryGetValue(name, out var fld)) { result = fld.Value; } } // check whether last char is a null char (\0) // because this breaks writing this string to the sqlite database // which considers it as a BLOB if (result is string stringValue) { while (stringValue.EndsWith("\0")) { stringValue = stringValue.Remove(stringValue.Length - 1); } result = stringValue; } return result; } private string TryGetString(PublishedEvent evt, FieldType t, string name) { var tmp = TryGetValue(evt, t, name); if(tmp != null && tmp.GetType() != typeof(DBNull)) { if (tmp is string strinValue) { return strinValue; } else if (tmp is byte[] byteValue) { return Encoding.Unicode.GetString(byteValue); } else { throw new ArgumentException("Argument is of the wrong type"); } } else { return null; } } private int? TryGetInt32(PublishedEvent evt, FieldType t, string name) { var tmp = TryGetValue(evt, t, name); if (tmp != null && tmp.GetType() != typeof(DBNull)) { return Convert.ToInt32(tmp); } else { return null; } } private long? TryGetInt64(PublishedEvent evt, FieldType t, string name) { var tmp = TryGetValue(evt, t, name); if (tmp != null && tmp.GetType() != typeof(DBNull)) { return Convert.ToInt64(tmp); } else { return null; } } public override void Stop() { stopped = true; } } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/XEventDataReader.cs ================================================ using Microsoft.SqlServer.XEvent.Linq; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.ExtendedEvents { public abstract class XEventDataReader { public string ConnectionString { get; set; } public string SessionName { get; set; } public IEventQueue Events { get; set; } public long EventCount { get; protected set; } public ExtendedEventsWorkloadListener.ServerType ServerType { get; set; } public XEventDataReader( string connectionString, string sessionName, IEventQueue events, ExtendedEventsWorkloadListener.ServerType serverType ) { ConnectionString = connectionString; SessionName = sessionName; Events = events; ServerType = serverType; } public abstract void ReadEvents(); public abstract void Stop(); } } ================================================ FILE: WorkloadTools/Listener/ExtendedEvents/sqlworkload.sql ================================================  CREATE EVENT SESSION [sqlworkload] ON {1} ADD EVENT sqlserver.attention ( ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id, sqlserver.sql_text ) {0} ), ADD EVENT sqlserver.rpc_starting ( ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id ) {0} ), ADD EVENT sqlserver.rpc_completed ( SET collect_data_stream = (0), collect_output_parameters = (1), collect_statement = (1) ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id ) {0} ), ADD EVENT sqlserver.sql_batch_starting ( ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id ) {0} ), ADD EVENT sqlserver.sql_batch_completed ( SET collect_batch_text = (1) ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id ) {0} ), ADD EVENT sqlserver.login ( ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id ) {0} ), ADD EVENT sqlserver.user_event( ACTION( package0.event_sequence, sqlserver.client_app_name, sqlserver.client_hostname, sqlserver.database_id, sqlserver.database_name, sqlserver.{2}, sqlserver.session_id ) WHERE [sqlserver].[like_i_sql_unicode_string]([user_info],N'WorkloadTools%') ) WITH ( MAX_MEMORY = 40960 KB, EVENT_RETENTION_MODE = ALLOW_SINGLE_EVENT_LOSS, MAX_DISPATCH_LATENCY = 30 SECONDS, MAX_EVENT_SIZE = 0 KB, MEMORY_PARTITION_MODE = PER_CPU, TRACK_CAUSALITY = OFF, STARTUP_STATE = OFF ); ALTER EVENT SESSION [sqlworkload] ON {1} STATE = START; ================================================ FILE: WorkloadTools/Listener/File/FileEventFilter.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.File { public class FileEventFilter : WorkloadEventFilter { public bool IsSqlAzure { get; set; } public FileEventFilter() { ApplicationFilter = new FileFilterPredicate(FilterPredicate.FilterColumnName.ApplicationName); DatabaseFilter = new FileFilterPredicate(FilterPredicate.FilterColumnName.DatabaseName); HostFilter = new FileFilterPredicate(FilterPredicate.FilterColumnName.HostName); LoginFilter = new FileFilterPredicate(FilterPredicate.FilterColumnName.LoginName); } } } ================================================ FILE: WorkloadTools/Listener/File/FileFilterPredicate.cs ================================================ using System; namespace WorkloadTools.Listener.File { public class FileFilterPredicate : FilterPredicate { public FileFilterPredicate(FilterColumnName name) : base(name) { } public bool IsSqlAzure { get; set; } public override string PushDown() { if (!IsPredicateSet) { return string.Empty; } IsPushedDown = true; var result = "("; var hasPositives = false; var hasNegatives = false; for (var i = 0; i < ComparisonOperator.Length; i++) { if (ComparisonOperator[i] == FilterComparisonOperator.Not_Equal) { hasNegatives = true; } else { hasPositives = true; } } for (var i = 0; i < PredicateValue.Length; i++) { if (hasNegatives && hasPositives && ComparisonOperator[i] == FilterComparisonOperator.Not_Equal) { // In this case I only care for the positives continue; } if (i > 0) { if (hasNegatives && !hasPositives) { result += " AND "; } else { result += " OR "; } } result += ColumnName.ToString(); result += " " + FilterPredicate.ComparisonOperatorAsString(ComparisonOperator[i]) + " '" + EscapeFilter(PredicateValue[i]) + "'"; } result += ")"; return result; } } } ================================================ FILE: WorkloadTools/Listener/File/FileWorkloadListener.cs ================================================ using System; using System.Data; using System.Data.Common; using System.Data.SQLite; using System.Linq; using System.Text; using NLog; using WorkloadTools.Util; namespace WorkloadTools.Listener.File { public class FileWorkloadListener : WorkloadListener { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); // Default behaviour is replay events in synchronization mode // (keeping the same event rate found in the source workload). // The other option is stress mode: events are replayed one // after another without waiting public bool SynchronizationMode { get; set; } = true; private DateTime startTime = DateTime.MinValue; private long totalEvents; private SQLiteConnection conn; private SQLiteDataReader reader; private string connectionString; private MessageWorkloadEvent totalEventsMessage = null; private bool totalEventsMessageSent = false; public FileWorkloadListener() : base() { Filter = new FileEventFilter(); } public override void Initialize() { connectionString = "Data Source=" + Source + ";Version=3;Read Only=True;Journal Mode=Off;Synchronous=Off;"; totalEvents = ValidateFile(); if (totalEvents < 0) { throw new FormatException($"The input file \"{Source}\" is not a valid workload file"); } totalEventsMessage = new MessageWorkloadEvent() { MsgType = MessageWorkloadEvent.MessageType.TotalEvents, Value = totalEvents }; // Push Down EventFilters var filters = GetFilterClause(); try { var sql = string.Empty; // Events are executed on event_sequence order logger.Info("Reading the full data for every event that matches filters. This may take awhile on large trace files please be patient"); sql = "SELECT * FROM Events " + filters + " ORDER BY start_time ASC, row_id ASC"; conn = new SQLiteConnection(connectionString); conn.Open(); var command = new SQLiteCommand(sql, conn); reader = command.ExecuteReader(); } catch (Exception e) { logger.Error(e); throw; } } // returns the number of events to replay, if any // returns -1 in case the file format is invalid private long ValidateFile() { // Push Down EventFilters var filters = GetFilterClause(); string sql; if (string.IsNullOrEmpty(filters)) { // Only works if you didn't delete rows from the table // WorkloadTools doesn't delete anything. If you deleted // rows manually, blame yourself. sql = "SELECT MAX(ROWID) FROM Events LIMIT 1"; } else { // SELECT COUNT(*) can be slow on large traces unless VACUUM is used in // Sqlite, but is the only solution when filters are applied. // When filters are applied extra indexes may be useful in future, // especially on large traces. sql = "SELECT COUNT(*) FROM Events"; } long result = -1; try { using (var m_dbConnection = new SQLiteConnection(connectionString)) { m_dbConnection.Open(); try { var command = new SQLiteCommand(sql, m_dbConnection); result = (long)command.ExecuteScalar(); } catch (Exception e) { result = -1; logger.Error(e, "Unable to query the Events table in source file"); } } logger.Info("The source file contains {result} events", result); } catch (Exception e) { logger.Error(e, "Unable to open the source file"); result = -1; } return result; } public override WorkloadEvent Read() { WorkloadEvent result = null; // first I need to return the event that // contains the total number of events in the file // once this is done I can start sending the actual events if (!totalEventsMessageSent) { totalEventsMessageSent = true; return totalEventsMessage; } // process actual events from the file try { if (reader == null) { return null; } var validEventFound = false; var transformer = new SqlTransformer(); do { if (!reader.Read()) { stopped = true; return null; } result = ReadEvent(reader); // Handle replay sleep for synchronization mode // The sleep cannot happen here, but it has to // happen later in the replay workflow, because // it would only delay the insertion in the queue // and it would not separate the events during the replay if (result is ExecutionWorkloadEvent execEvent) { if (SynchronizationMode) { if (startTime != DateTime.MinValue) { var commandOffset = (result.StartTime - startTime).TotalMilliseconds; if (commandOffset > 0) { execEvent.ReplayOffset = commandOffset; } } else { startTime = execEvent.StartTime; } } else { // Leave it at 0. The replay consumer will interpret this // as "do not wait for the requested offset" and will replay // the event without waiting execEvent.ReplayOffset = 0; } // preprocess and filter events if (execEvent.Type == WorkloadEvent.EventType.BatchStarting || execEvent.Type == WorkloadEvent.EventType.BatchCompleted || execEvent.Type == WorkloadEvent.EventType.RPCStarting || execEvent.Type == WorkloadEvent.EventType.RPCCompleted || execEvent.Type == WorkloadEvent.EventType.Message) { if (transformer.Skip(execEvent.Text)) { continue; } execEvent.Text = transformer.Transform(execEvent.Text); } } // Filter events if (result is ExecutionWorkloadEvent) { validEventFound = Filter.Evaluate(result); } else { validEventFound = true; } } while (!validEventFound); } catch (Exception e) { if (stopped) { return null; } DateTime? eventDate = null; if (result != null) { eventDate = result.StartTime; } logger.Error(e); logger.Error($"Unable to read next event. Current event date: {eventDate}"); throw; } return result; } private WorkloadEvent ReadEvent(SQLiteDataReader reader) { var type = (WorkloadEvent.EventType)reader.GetInt32(reader.GetOrdinal("event_type")); var row_id = reader.GetInt64(reader.GetOrdinal("row_id")); try { switch (type) { case WorkloadEvent.EventType.PerformanceCounter: var cr = new CounterWorkloadEvent { StartTime = reader.GetDateTime(reader.GetOrdinal("start_time")) }; ReadCounters(row_id, cr); return cr; case WorkloadEvent.EventType.WAIT_stats: return new WaitStatsWorkloadEvent { StartTime = reader.GetDateTime(reader.GetOrdinal("start_time")), Type = type }; case WorkloadEvent.EventType.DiskPerf: return new DiskPerfWorkloadEvent { StartTime = reader.GetDateTime(reader.GetOrdinal("start_time")), Type = type }; case WorkloadEvent.EventType.Error: return new ErrorWorkloadEvent { StartTime = reader.GetDateTime(reader.GetOrdinal("start_time")), Type = type, Text = GetString(reader, "sql_text") }; default: return new ExecutionWorkloadEvent { EventSequence = GetInt64(reader, "event_sequence"), ApplicationName = GetString(reader, "client_app_name"), StartTime = reader.GetDateTime(reader.GetOrdinal("start_time")), HostName = GetString(reader, "client_host_name"), DatabaseName = GetString(reader, "database_name"), LoginName = GetString(reader, "server_principal_name"), SPID = reader.GetInt32(reader.GetOrdinal("session_id")), Text = GetString(reader, "sql_text"), CPU = GetInt64(reader, "cpu"), Duration = GetInt64(reader, "duration"), Reads = GetInt64(reader, "reads"), Writes = GetInt64(reader, "writes"), Type = type }; } } catch (Exception e) { throw new InvalidOperationException($"Invalid data at row_id {row_id}", e); } } private string GetString(SQLiteDataReader reader, string columnName) { var result = reader[columnName]; if (result != null) { if (result.GetType() == typeof(DBNull)) { result = null; } else if (result is byte[] v) { result = Encoding.Unicode.GetString(v); } } return (string)result; } private long? GetInt64(SQLiteDataReader reader, string columnName) { var result = reader[columnName]; if (result != null) { if (result.GetType() == typeof(DBNull)) { result = null; } } return (long?)result; } protected override void Dispose(bool disposing) { if ((reader != null) && (!reader.IsClosed)) { reader.Close(); } conn.Dispose(); } protected override void ReadPerfCountersEvents() { } protected override void ReadWaitStatsEvents() { // Why nothing read here? } protected override void ReadDiskPerformanceEvents() { // Why nothing read here? } private void ReadCounters(long row_id, CounterWorkloadEvent cev) { var sql = "SELECT * FROM Counters WHERE row_id = $row_id"; try { using (var m_dbConnection = new SQLiteConnection(connectionString)) { m_dbConnection.Open(); try { using (var command = new SQLiteCommand(sql, m_dbConnection)) { _ = command.Parameters.AddWithValue("$row_id", row_id); using (var rdr = command.ExecuteReader()) { while (rdr.Read()) { var name = (CounterWorkloadEvent.CounterNameEnum)Enum.Parse(typeof(CounterWorkloadEvent.CounterNameEnum), (string)rdr["name"]); cev.Counters.Add(name, rdr.GetFloat(rdr.GetOrdinal("value"))); } rdr.Close(); } } } catch (Exception e) { logger.Error(e, $"Unable to query Counters for row_id {row_id}"); throw; } } } catch (Exception e) { logger.Error(e, "Unable to query Counters from the source file"); } } private string GetFilterClause() { // Push Down EventFilters var filters = string.Empty; var appFilter = Filter.ApplicationFilter.PushDown(); var dbFilter = Filter.DatabaseFilter.PushDown(); var hostFilter = Filter.HostFilter.PushDown(); var loginFilter = Filter.LoginFilter.PushDown(); if (appFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + appFilter; } if (dbFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + dbFilter; } if (hostFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + hostFilter; } if (loginFilter != string.Empty) { filters += ((filters == string.Empty) ? string.Empty : " AND ") + loginFilter; } if (filters != string.Empty) { filters = "WHERE (" + filters + ") "; // these events should not be filtered out // 4 - PerformanceCounter // 5 - Timeout // 6 - WaitStats // 7 - Error filters += "OR event_type IN (4,5,6,7,8)"; } return filters; } } } ================================================ FILE: WorkloadTools/Listener/ReadIteration.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools.Listener { // This class is used internally to keep track // of the files, offsets and event_sequences // when reading events from extended events or trace files internal class ReadIteration { public const int DEFAULT_TRACE_INTERVAL_SECONDS = 5; public const int DEFAULT_TRACE_ROWS_SLEEP_THRESHOLD = 500; public const long TRACE_DEFAULT_OFFSET = 500; #region staticStuff private static int _lastFileHash; private static long _lastOffset; private static readonly Dictionary> recordedOffsets = new Dictionary>(); private static void AddOffset(string filename, long offset) { // perf optimization: most of the time the last // file/offset pair is passed over and over again if (filename.GetHashCode() == _lastFileHash && offset == _lastOffset) { return; } // new values? ok, let's add them // one more check doesn't hurt though if (recordedOffsets.TryGetValue(filename, out var offsets)) { if (!offsets.Contains(offset)) { _ = offsets.Add(offset); } } else { offsets = new SortedSet(); _ = offsets.Add(offset); recordedOffsets.Add(filename, offsets); } // let's keep track of the last inserted values _lastFileHash = filename.GetHashCode(); _lastOffset = offset; } public static long GetLastOffset(string filename) { long result = -1; if (recordedOffsets.TryGetValue(filename, out var offsets)) { result = offsets.Max(); } return result; } public static long GetSecondLastOffset(string filename) { long result = -1; if (recordedOffsets.TryGetValue(filename, out var offsets)) { if (offsets.Count >= 2) { result = offsets.ElementAt(offsets.Count - 2); } } return result; } #endregion staticStuff public string StartFileName { get; set; } public string EndFileName { get; set; } public long MinOffset { get; set; } public long StartOffset { get; set; } private long _endOffset = -1; public long EndOffset { get => _endOffset; set { // add current offset AddOffset(EndFileName, value); // set new value _endOffset = value; } } public long StartSequence { get; set; } public long EndSequence { get; set; } public long RowsRead { get; set; } public int Files { get; set; } // try to identify the root part of the rollover file name // the root is the part of the name before the numeric suffix // EG: mySessionName1234.xel => root = mySessionName public string GetXEFilePattern() { var filePattern = ""; for (var j = StartFileName.Length - 4; j > 1 && StartFileName.Substring(j - 1, 1).All(char.IsDigit); j--) { filePattern = StartFileName.Substring(0, j - 1); } filePattern += "*.xel"; return filePattern; } // Initial offset to be used as a parameter to the fn_xe_file_target_read_file function public long GetInitialOffset() { long result = -1; if (MinOffset > result) { result = MinOffset; } if (StartOffset > result) { result = StartOffset; } if (EndOffset > result) { result = EndOffset; } return result; } public long GetInitialSequence() { long result = -1; if (StartSequence > result) { result = StartSequence; } if (EndSequence > result) { result = EndSequence; } return result; } } } ================================================ FILE: WorkloadTools/Listener/SqlTransformer.cs ================================================ using Microsoft.SqlServer.Management.SqlParser.Metadata; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Text.RegularExpressions; using WorkloadTools.Consumer.Analysis; namespace WorkloadTools.Listener { public class SqlTransformer { private static readonly Regex _execPrepped = new Regex("^EXEC\\s+SP_EXECUTE\\s+(?\\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex _execUnprep = new Regex("EXEC\\s+SP_UNPREPARE\\s+(?\\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex _prepareSql = new Regex("EXEC\\s+(?SP_PREP(ARE|EXEC))\\s+@P1\\s+OUTPUT,\\s*(NULL|(N\\'.*?\\')),\\s*N(?.+)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _preppedSqlStatement = new Regex("^(')(?((?!\\1).|\\1{2})*)\\1", RegexOptions.Compiled | RegexOptions.Singleline); private static readonly Regex _doubleApostrophe = new Regex("('')(?.*?)('')", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.IgnorePatternWhitespace | RegexOptions.CultureInvariant); private static readonly MatchEvaluator decimal38Evaluator = new MatchEvaluator(MakeFloat); private static string MakeFloat(Match match) { if (match.Value.EndsWith("E0")) { return match.Value; } else { return match.Value + "E0"; } } public string Transform(string command) { // remove the handle from the sp_prepexec call if (command.Contains("sp_prepexec ")) { command = RemoveFirstP1(command, out _); if (!command.EndsWith("EXEC sp_unprepare @p1;")) { command += " ; EXEC sp_unprepare @p1;"; } } // remove the handle from the sp_cursoropen call else if (command.Contains("sp_cursoropen ")) { command = RemoveFirstP1(command, out _); if (!command.EndsWith("EXEC sp_cursorclose @p1;")) { command += " ; EXEC sp_cursorclose @p1;"; } } // remove the handle from the sp_cursorprepexec call else if (command.Contains("sp_cursorprepexec ")) { command = RemoveFirstP1(command, out _); if (!command.EndsWith("EXEC sp_cursorunprepare @p1;")) { command += " ; EXEC sp_cursorunprepare @p1;"; } } // trim numbers with precision > 38 // rpc_completed events may return float parameters // as long numeric strings that exceed the maximum decimal // precision of 38. // Any decimal numeric string in T-SQL is interpreted as decimal, // unless it ends with "E0", which designates a float literal. // Any decimal numeric string longer than 38 characters needs to // be appended "E0" to be treated as float. // // Unfortunately RegExs are evil and also match numbers // that already have their "E0" appended, so I need to append // only when not found // // RegEx: \b([0-9\.]{38,})+([E]+[0]+)?\b // \b means "word boundary", including whitespace, punctuation or begin/end input // ([0-9\.]{38,})+ means "numbers or . repeated at least 38 times" // ([E]+[0]+)? means "E0" zero or one time // \b means word boundary again command = Regex.Replace(command, @"\b([0-9\.]{38,})+([E]+[0]+)?\b", decimal38Evaluator); return command; } public bool Skip(string command) { if (string.IsNullOrEmpty(command)) { return true; } // skip reset connection commands //if (command.Contains("sp_reset_connection")) // return true; // skip unprepare commands //if (command.Contains("sp_unprepare ")) // return true; // skip cursor fetch if (command.Contains("sp_cursor ")) { return true; } // skip cursor fetch if (command.Contains("sp_cursorfetch ")) { return true; } // skip cursor close if (command.Contains("sp_cursorclose ")) { return true; } // skip cursor option if (command.Contains("sp_cursoroption ")) { return true; } // skip cursor unprepare if (command.Contains("sp_cursorunprepare ")) { return true; } // skip internal commands if (command.Contains("fn_xe_file_target_read_file") || command.Contains("ALTER EVENT SESSION") || command.Contains("fn_trace_getinfo")) { return true; } // skip KILL commands if (command.StartsWith("KILL")) { return true; } // skip BULK INSERT commands if (command.StartsWith("insert bulk")) { return true; } // skip sp_execute //if (command.Contains("sp_execute ")) // return true; return false; } private string RemoveFirstP1(string command, out string originalP1) { var idx = command.IndexOf("set @p1="); originalP1 = null; if (idx > 0) { originalP1 = ""; var sb = new StringBuilder(command); idx += 8; // move past "set @p1=" // replace numeric chars with 0s while (char.IsNumber(sb[idx])) { originalP1 += sb[idx]; sb[idx] = '0'; idx++; } command = sb.ToString(); } return command; } private string RemoveFirstPrepStatementNum(string command, out string originalStmtNum) { var idx = command.IndexOf(" sp_execute "); originalStmtNum = null; if (idx > 0) { originalStmtNum = ""; var sb = new StringBuilder(command); idx += 12; // move past " sp_execute " // replace numeric chars with § var iter = 0; var initialIdx = idx; while (idx < sb.Length && char.IsNumber(sb[idx])) { originalStmtNum += sb[idx]; if(iter == 0) { sb[idx] = '§'; } else { sb[idx] = ' '; } idx++; iter++; } // remove extra characters after the newly added § symbol if(initialIdx + 1 < sb.Length && iter > 1) { _ = sb.Remove(initialIdx + 1, iter - 1); } command = sb.ToString(); } return command; } public NormalizedSqlText Normalize(string command) { var result = new NormalizedSqlText(command); var num = 0; if (command.Contains("sp_reset_connection")) { if (command.Contains("Nonpooled")) { result.CommandType = NormalizedSqlText.CommandTypeEnum.SP_RESET_CONNECTION_NONPOOLED; } else { result.CommandType = NormalizedSqlText.CommandTypeEnum.SP_RESET_CONNECTION; } return result; } var match3 = _prepareSql.Match(command); if (match3.Success) { if (match3.Groups["preptype"].ToString().ToLower() == "sp_prepare") { if(match3.Groups["stmtnum"].Success) { num = !(match3.Groups["stmtnum"].ToString() == "NULL") ? Convert.ToInt32(match3.Groups["stmtnum"].ToString()) : 0; } var sql = match3.Groups["remaining"].ToString(); var match4 = _preppedSqlStatement.Match(sql); if (match4.Success) { sql = match4.Groups["statement"].ToString(); sql = _doubleApostrophe.Replace(sql, "'${string}'"); result.Statement = sql; result.NormalizedText = RemoveFirstP1(result.OriginalText, out var originalHandle); if (int.TryParse(originalHandle, out var n)) { result.Handle = n; } else { result.Handle = num; } result.CommandType = NormalizedSqlText.CommandTypeEnum.SP_PREPARE; } } return result; } var match5 = _execPrepped.Match(command); if (match5.Success) { num = Convert.ToInt32(match5.Groups["stmtnum"].ToString()); result.Handle = num; var textWithPlaceHolder = RemoveFirstPrepStatementNum(result.Statement, out var originalHandle); if (int.TryParse(originalHandle, out var n)) { result.Handle = n; } else { result.Handle = num; } result.Statement = textWithPlaceHolder; result.NormalizedText = textWithPlaceHolder; result.CommandType = NormalizedSqlText.CommandTypeEnum.SP_EXECUTE; return result; } var match6 = _execUnprep.Match(command); if (match6.Success) { num = Convert.ToInt32(match6.Groups["stmtnum"].ToString()); result.Handle = num; result.Statement = "EXEC sp_unprepare §"; result.NormalizedText = "EXEC sp_unprepare §"; result.CommandType = NormalizedSqlText.CommandTypeEnum.SP_UNPREPARE; return result; } return result; } } } ================================================ FILE: WorkloadTools/Listener/Trace/FileTraceEventDataReader.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools.Util; namespace WorkloadTools.Listener.Trace { public class FileTraceEventDataReader : TraceEventDataReader { private readonly RingBuffer ReadIterations = new RingBuffer(10); private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private int TraceRowsSleepThreshold { get; set; } = 5000; private int TraceIntervalSeconds { get; set; } = 10; private bool stopped = false; private int traceId = -1; private readonly TraceUtils utils; private bool _checkedFormat; public FileTraceEventDataReader(string connectionString, WorkloadEventFilter filter, IEventQueue events) : base(connectionString, filter, events) { utils = new TraceUtils(); } public override void ReadEvents() { try { var retryCount = 0; while (!stopped) { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionString; conn.Open(); ReadIteration previousIteration = null; if (ReadIterations.Count > 0) { previousIteration = ReadIterations.Last(); } var currentIteration = InitializeReadIteration(conn, previousIteration); if (currentIteration != null) { ReadIterations.Add(currentIteration); } else { Stop(); break; } try { ReadTraceData(conn, currentIteration); retryCount = 0; } catch (SqlException) { retryCount++; if(retryCount > 2) { throw; } } } } } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } Dispose(); } } private ReadIteration InitializeReadIteration(SqlConnection conn, ReadIteration previous) { var sqlPath = @" SELECT value AS path FROM ::fn_trace_getinfo(default) WHERE traceid = @traceId AND property = 2; "; var sqlPathLocaldb = @" IF OBJECT_ID('tempdb.dbo.trace_reader_queue') IS NOT NULL BEGIN SELECT TOP(1) path FROM tempdb.dbo.trace_reader_queue ORDER BY ts DESC END ELSE BEGIN SELECT '' AS path END "; ReadIteration currentIteration = null; using (var cmdPath = conn.CreateCommand()) { if (conn.DataSource.StartsWith("(localdb)", StringComparison.InvariantCultureIgnoreCase)) { _checkedFormat = false; cmdPath.CommandText = sqlPathLocaldb; } else { _checkedFormat = true; cmdPath.CommandText = sqlPath; // Get trace id if (traceId == -1) { var tracePath = utils.GetSqlDefaultLogPath(conn); traceId = utils.GetTraceId(conn, Path.Combine(tracePath, "sqlworkload")); if (traceId == -1) { throw new InvalidOperationException("The SqlWorkload capture trace is not running."); } } var paramTraceId = cmdPath.Parameters.Add("@traceId", System.Data.SqlDbType.Int); paramTraceId.Value = traceId; } try { logger.Debug("Initializing read iteration"); using (var reader = cmdPath.ExecuteReader()) { // should return only one row if (reader.Read()) { currentIteration = new ReadIteration() { StartFileName = reader.GetString(0), Files = 1 }; currentIteration.EndFileName = currentIteration.StartFileName; if (previous != null) { //if we have a previous iteration, keep reading from that file first currentIteration.StartFileName = previous.EndFileName; // if the file has changed from the previous iteration // read the default number of files ( = 0 ) if(currentIteration.StartFileName != currentIteration.EndFileName) { currentIteration.Files = 0; } // we will use the previous event sequence as the boundary to where // we need to start reading events again currentIteration.StartSequence = previous.EndSequence; currentIteration.EndSequence = previous.EndSequence; // trace files do not have an offset like xe files but // the offset can be used to go back and read events // from the previous sequence minus a safety offset currentIteration.StartOffset = previous.EndSequence - ReadIteration.TRACE_DEFAULT_OFFSET; // if reading from localdb we don't need to wait for more data if (conn.DataSource.StartsWith("(localdb)", StringComparison.InvariantCultureIgnoreCase)) { if ( (currentIteration.StartFileName == previous.StartFileName) && (currentIteration.StartSequence == previous.StartSequence) ) { return null; } } } logger.Debug($"currentIteration.StartFileName: {currentIteration.StartFileName}"); logger.Debug($"currentIteration.MinOffset : {currentIteration.MinOffset}"); logger.Debug($"currentIteration.EndFileName : {currentIteration.EndFileName}"); logger.Debug($"currentIteration.StartOffset : {currentIteration.StartOffset}"); logger.Debug($"currentIteration.StartSequence: {currentIteration.StartSequence}"); } } } catch (Exception e) { logger.Error(e.StackTrace); throw; } } // check columns in the source file (if localdb) // before returning the read iteration if (!_checkedFormat) { if (!utils.CheckTraceFormat(conn, currentIteration.StartFileName)) { throw new InvalidDataException($"The trace file {currentIteration.StartFileName} lacks critical column information. See the documentation for required trace columns."); } } return currentIteration; } private void ReadTraceData(SqlConnection conn, ReadIteration currentIteration) { var sqlReadTrace = @" SELECT EventSequence ,Error ,TextData ,BinaryData ,DatabaseID ,HostName ,ApplicationName ,LoginName ,SPID ,Duration ,StartTime ,EndTime ,Reads ,Writes ,CPU ,EventClass ,DatabaseName ,EventSubClass FROM fn_trace_gettable(@path, @number_files) "; if (currentIteration.StartSequence > 0) { sqlReadTrace += "WHERE EventSequence > @event_offset"; } logger.Debug("Reading Trace data..."); var parser = new TraceEventParser(); using (var cmd = conn.CreateCommand()) { cmd.CommandText = sqlReadTrace; var paramPath = cmd.Parameters.Add("@path", System.Data.SqlDbType.NVarChar, 260); paramPath.Value = currentIteration.StartFileName; var paramNumberFiles = cmd.Parameters.Add("@number_files", System.Data.SqlDbType.Int); paramNumberFiles.Value = currentIteration.Files; var paramInitialSequence = cmd.Parameters.Add("@event_offset", System.Data.SqlDbType.BigInt); paramInitialSequence.Value = currentIteration.StartOffset; // don't pass initial file name and offset // read directly from the initial file // until we have some rows read already if ( EventCount == 0 || currentIteration.StartOffset <= 0 || currentIteration.StartOffset == currentIteration.MinOffset ) { paramPath.Value = currentIteration.StartFileName; paramNumberFiles.Value = 0; paramInitialSequence.Value = 0; } logger.Debug($"paramPath : {paramPath.Value}"); logger.Debug($"paramNumberFiles : {paramNumberFiles.Value}"); logger.Debug($"paramInitialSequence: {paramInitialSequence.Value}"); var transformer = new SqlTransformer(); try { using (var reader = cmd.ExecuteReader()) { var skippedRows = 0; while (reader.Read()) { if (reader["EventSequence"] != DBNull.Value) { currentIteration.EndSequence = (long)reader["EventSequence"]; } // read the event from the sqldatareader var evt = parser.ParseEvent(reader); // skip invalid events if (evt.Type == WorkloadEvent.EventType.Unknown) { continue; } // skip to the correct event in case we're reading again // from the same file and we have a reference sequence if ((currentIteration.RowsRead == 0) && (currentIteration.StartSequence > 0)) { // skip rows until we encounter the reference event_sequence if (evt.EventSequence != currentIteration.StartSequence) { skippedRows++; continue; } else { // skip one more row... skippedRows++; currentIteration.RowsRead++; continue; } } // this is only to print out a message, so consider // getting rid of it if (skippedRows > 0) { logger.Debug($"Skipped rows: {skippedRows}"); skippedRows = 0; } // now we have an event, no matter if good or bad => increment rows read currentIteration.RowsRead++; if (evt.EventSequence != null) { currentIteration.EndSequence = (long)evt.EventSequence; } if (evt.Type == WorkloadEvent.EventType.BatchStarting || evt.Type == WorkloadEvent.EventType.BatchCompleted || evt.Type == WorkloadEvent.EventType.RPCStarting || evt.Type == WorkloadEvent.EventType.RPCCompleted || evt.Type == WorkloadEvent.EventType.Message) { if (transformer.Skip(evt.Text)) { continue; } if (!Filter.Evaluate(evt)) { continue; } evt.Text = transformer.Transform(evt.Text); } // it's a "good" event: add it to the queue Events.Enqueue(evt); EventCount++; } logger.Debug($"currentIteration.EndSequence : {currentIteration.EndSequence}"); } } catch (Exception) { throw; } // Wait before querying the events file again if (currentIteration.RowsRead < ReadIteration.DEFAULT_TRACE_ROWS_SLEEP_THRESHOLD && currentIteration.StartFileName == currentIteration.EndFileName) { Thread.Sleep(ReadIteration.DEFAULT_TRACE_INTERVAL_SECONDS * 1000); } } } public override void Stop() { stopped = true; } public bool IsStopped { get { return stopped; } } } } ================================================ FILE: WorkloadTools/Listener/Trace/ProfilerEventFilter.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.Trace { public class ProfilerEventFilter : WorkloadEventFilter { public ProfilerEventFilter() { ApplicationFilter = new ProfilerFilterPredicate(FilterPredicate.FilterColumnName.ApplicationName); DatabaseFilter = new ProfilerFilterPredicate(FilterPredicate.FilterColumnName.DatabaseName); HostFilter = new ProfilerFilterPredicate(FilterPredicate.FilterColumnName.HostName); LoginFilter = new ProfilerFilterPredicate(FilterPredicate.FilterColumnName.LoginName); } } } ================================================ FILE: WorkloadTools/Listener/Trace/ProfilerFilterPredicate.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.Trace { public class ProfilerFilterPredicate : FilterPredicate { public ProfilerFilterPredicate(FilterColumnName name) : base(name) { } public override string PushDown() { IsPushedDown = false; return string.Empty; } } } ================================================ FILE: WorkloadTools/Listener/Trace/ProfilerWorkloadListener.cs ================================================ using NLog; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools.Listener.Trace; namespace WorkloadTools.Listener.Trace { public class ProfilerWorkloadListener : WorkloadListener { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private readonly ConcurrentQueue events = new ConcurrentQueue(); private TraceServerWrapper trace; public ProfilerWorkloadListener() : base() { Filter = new ProfilerEventFilter(); Source = WorkloadController.BaseLocation + "\\Listener\\Trace\\sqlworkload.tdf"; } public override void Initialize() { var conn = new SqlConnectionInfoWrapper { ServerName = ConnectionInfo.ServerName, DatabaseName = "master" }; if (string.IsNullOrEmpty(ConnectionInfo.UserName)) { conn.UseIntegratedSecurity = true; } else { conn.UserName = ConnectionInfo.UserName; conn.Password = ConnectionInfo.Password; } trace = new TraceServerWrapper(); try { trace.InitializeAsReader(conn, Source); _ = Task.Factory.StartNew(() => ReadEvents()); } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } throw; } } public override WorkloadEvent Read() { WorkloadEvent result = null; while(!stopped && !events.TryDequeue(out result)) { Thread.Sleep(10); } return result; } protected override void Dispose(bool disposing) { if (stopped) { return; } // close the trace, if open // shut down the reader thread stopped = true; try { trace.Close(); trace.Stop(); } catch (Exception) { // naughty dev swallows exceptions... } } private void ReadEvents() { try { while (trace.Read() && !stopped) { try { var evt = new ExecutionWorkloadEvent(); if (trace.GetValue("EventClass").ToString() == "RPC:Completed") { evt.Type = WorkloadEvent.EventType.RPCCompleted; } else if (trace.GetValue("EventClass").ToString() == "SQL:BatchCompleted") { evt.Type = WorkloadEvent.EventType.BatchCompleted; } else { evt.Type = WorkloadEvent.EventType.Unknown; } evt.ApplicationName = (string)trace.GetValue("ApplicationName"); evt.DatabaseName = (string)trace.GetValue("DatabaseName"); evt.HostName = (string)trace.GetValue("HostName"); evt.LoginName = (string)trace.GetValue("LoginName"); evt.SPID = (int?)trace.GetValue("SPID"); evt.Text = (string)trace.GetValue("TextData"); evt.Reads = (long?)trace.GetValue("Reads"); evt.Writes = (long?)trace.GetValue("Writes"); evt.CPU = (long?)trace.GetValue("CPU") * 1000; // Profiler captures CPU as milliseconds => convert to microseconds evt.Duration = (long?)trace.GetValue("Duration"); evt.StartTime = DateTime.Now; if (!Filter.Evaluate(evt)) { continue; } events.Enqueue(evt); } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } } } // while (Read) } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } Dispose(); } } } } ================================================ FILE: WorkloadTools/Listener/Trace/SqlConnectionInfoWrapper.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; namespace WorkloadTools.Listener.Trace { public class SqlConnectionInfoWrapper { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public object SqlConnectionInfo { get; set; } public string ServerName { get => (string)SqlConnectionInfo.GetType().GetProperty("ServerName")?.GetGetMethod()?.Invoke(SqlConnectionInfo, (object[])null); set => _ = (SqlConnectionInfo.GetType().GetProperty("ServerName")?.GetSetMethod()?.Invoke(SqlConnectionInfo, new object[] { value })); } public string DatabaseName { get => (string)SqlConnectionInfo.GetType().GetProperty("DatabaseName")?.GetGetMethod()?.Invoke(SqlConnectionInfo, (object[])null); set => _ = (SqlConnectionInfo.GetType().GetProperty("DatabaseName")?.GetSetMethod()?.Invoke(SqlConnectionInfo, new object[] { value })); } public bool UseIntegratedSecurity { get => (bool)SqlConnectionInfo.GetType().GetProperty("UseIntegratedSecurity")?.GetGetMethod()?.Invoke(SqlConnectionInfo, (object[])null); set => _ = (SqlConnectionInfo.GetType().GetProperty("UseIntegratedSecurity")?.GetSetMethod()?.Invoke(SqlConnectionInfo, new object[] { value })); } public string UserName { get => (string)SqlConnectionInfo.GetType().GetProperty("UserName")?.GetGetMethod()?.Invoke(SqlConnectionInfo, (object[])null); set => _ = (SqlConnectionInfo.GetType().GetProperty("UserName")?.GetSetMethod()?.Invoke(SqlConnectionInfo, new object[] { value })); } public string Password { get => (string)SqlConnectionInfo.GetType().GetProperty("Password")?.GetGetMethod()?.Invoke(SqlConnectionInfo, (object[])null); set => _ = (SqlConnectionInfo.GetType().GetProperty("Password")?.GetSetMethod()?.Invoke(SqlConnectionInfo, new object[] { value })); } public SqlConnectionInfoWrapper() { Type type; try { #pragma warning disable 618 var assembly = Assembly.LoadWithPartialName("Microsoft.SqlServer.ConnectionInfo"); #pragma warning restore 618 type = assembly.GetType("Microsoft.SqlServer.Management.Common.SqlConnectionInfo"); SqlConnectionInfo = type.InvokeMember((string)null, BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.CreateInstance, (Binder)null, (object)null, (object[])null); } catch (Exception ex) { logger.Error("Unable to load SMO library"); logger.Error(ex.Message); throw; } } } } ================================================ FILE: WorkloadTools/Listener/Trace/SqlTraceWorkloadListener.cs ================================================ using NLog; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools.Listener.Trace; using WorkloadTools.Util; namespace WorkloadTools.Listener.Trace { public class SqlTraceWorkloadListener : WorkloadListener { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public enum StreamSourceEnum { StreamFromFile, StreamFromTDS } private int traceId = -1; private string tracePath; // By default, stream from TDS public StreamSourceEnum StreamSource { get; set; } = StreamSourceEnum.StreamFromTDS; public int TraceSizeMB { get; set; } = 10; public int TraceRolloverCount { get; set; } = 30; private readonly TraceUtils utils; public SqlTraceWorkloadListener() : base() { Filter = new TraceEventFilter(); Source = WorkloadController.BaseLocation + "\\Listener\\Trace\\sqlworkload.sql"; utils = new TraceUtils(); } public override void Initialize() { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); string traceSql = null; try { traceSql = System.IO.File.ReadAllText(Source); // Push Down EventFilters var filters = ""; filters += Environment.NewLine + Filter.ApplicationFilter.PushDown(); filters += Environment.NewLine + Filter.DatabaseFilter.PushDown(); filters += Environment.NewLine + Filter.HostFilter.PushDown(); filters += Environment.NewLine + Filter.LoginFilter.PushDown(); tracePath = utils.GetSqlDefaultLogPath(conn); traceSql = string.Format(traceSql, TraceSizeMB, TraceRolloverCount, Path.Combine(tracePath ,"sqlworkload"), filters); } catch (Exception e) { throw new ArgumentException("Cannot open the source script to start the sql trace", e); } var id = utils.GetTraceId(conn, Path.Combine(tracePath, "sqlworkload")); if(id > 0) { StopTrace(conn, id); } var cmd = conn.CreateCommand(); cmd.CommandText = traceSql; traceId = (int)cmd.ExecuteScalar(); // Mark the transaction SetTransactionMark(true); // Initialize the source of execution related events if (StreamSource == StreamSourceEnum.StreamFromFile) { _ = Task.Factory.StartNew(() => ReadEventsFromFile()); } else if (StreamSource == StreamSourceEnum.StreamFromTDS) { _ = Task.Factory.StartNew(() => ReadEventsFromTDS()); } // Initialize the source of performance counters events _ = Task.Factory.StartNew(() => ReadPerfCountersEvents()); // Initialize the source of wait stats events _ = Task.Factory.StartNew(() => ReadWaitStatsEvents()); // Initialize the source of disk performance stats _ = Task.Factory.StartNew(() => ReadDiskPerformanceEvents()); } } public override WorkloadEvent Read() { try { WorkloadEvent result = null; while (!Events.TryDequeue(out result)) { if (stopped) { return null; } Thread.Sleep(5); } return result; } catch (Exception) { if (stopped) { return null; } else { throw; } } } private void ReadEventsFromTDS() { using (var reader = new FileTraceEventDataReader(ConnectionInfo.ConnectionString(), Filter, Events)) { reader.ReadEvents(); } } // Read Workload events directly from the trace files // on the server, via local path // This method only works when the process is running // on the same machine as the SQL Server private void ReadEventsFromFile() { try { while(!stopped) { // get first trace rollover file var files = Directory.GetFiles(tracePath, "sqlworkload*.trc").ToList(); files.Sort(); var traceFile = files.ElementAt(0); using (var reader = new TraceFileWrapper()) { reader.InitializeAsReader(traceFile); while (reader.Read() && !stopped) { try { var evt = new ExecutionWorkloadEvent(); if (reader.GetValue("EventClass").ToString() == "RPC:Starting") { evt.Type = WorkloadEvent.EventType.RPCStarting; } else if (reader.GetValue("EventClass").ToString() == "SQL:BatchStarting") { evt.Type = WorkloadEvent.EventType.BatchStarting; } else if (reader.GetValue("EventClass").ToString() == "RPC:Completed") { evt.Type = WorkloadEvent.EventType.RPCCompleted; } else if (reader.GetValue("EventClass").ToString() == "SQL:BatchCompleted") { evt.Type = WorkloadEvent.EventType.BatchCompleted; } else { evt.Type = WorkloadEvent.EventType.Unknown; } evt.ApplicationName = (string)reader.GetValue("ApplicationName"); evt.DatabaseName = (string)reader.GetValue("DatabaseName"); evt.HostName = (string)reader.GetValue("HostName"); evt.LoginName = (string)reader.GetValue("LoginName"); evt.SPID = (int?)reader.GetValue("SPID"); evt.Text = (string)reader.GetValue("TextData"); evt.Reads = (long?)reader.GetValue("Reads"); evt.Writes = (long?)reader.GetValue("Writes"); evt.CPU = (long?)Convert.ToInt64(reader.GetValue("CPU")) * 1000; // SqlTrace captures CPU as milliseconds => convert to microseconds evt.Duration = (long?)reader.GetValue("Duration"); evt.StartTime = DateTime.Now; if (evt.Type == WorkloadEvent.EventType.RPCStarting || evt.Type == WorkloadEvent.EventType.BatchStarting) { evt.StartTime = Convert.ToDateTime(reader.GetValue("StartTime")); } if (!Filter.Evaluate(evt)) { continue; } Events.Enqueue(evt); } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } } } // while (Read) } // using reader System.IO.File.Delete(traceFile); } // while not stopped } catch (Exception ex) { logger.Error(ex.Message); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } Dispose(); } } protected override void Dispose(bool disposing) { stopped = true; using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); StopTrace(conn, traceId); } logger.Info("Trace with id={0} stopped successfully.", traceId); } private void StopTrace(SqlConnection conn, int id) { var cmd = conn.CreateCommand(); cmd.CommandText = string.Format(@" IF EXISTS ( SELECT * FROM sys.traces WHERE id = {0} ) BEGIN EXEC sp_trace_setstatus {0}, 0; EXEC sp_trace_setstatus {0}, 2; END ", id); _ = cmd.ExecuteNonQuery(); } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceEventDataReader.cs ================================================ using Microsoft.SqlServer.XEvent.Linq; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.Trace { public abstract class TraceEventDataReader : IDisposable { public string ConnectionString { get; set; } public IEventQueue Events { get; set; } public long EventCount { get; protected set; } public WorkloadEventFilter Filter { get; set; } public TraceEventDataReader( string connectionString, WorkloadEventFilter filter, IEventQueue events ) { ConnectionString = connectionString; Events = events; Filter = filter; } public abstract void ReadEvents(); public abstract void Stop(); public void Dispose() { Events.Dispose(); } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceEventFilter.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.Trace { public class TraceEventFilter : WorkloadEventFilter { public TraceEventFilter() { ApplicationFilter = new TraceFilterPredicate(FilterPredicate.FilterColumnName.ApplicationName); DatabaseFilter = new TraceFilterPredicate(FilterPredicate.FilterColumnName.DatabaseName); HostFilter = new TraceFilterPredicate(FilterPredicate.FilterColumnName.HostName); LoginFilter = new TraceFilterPredicate(FilterPredicate.FilterColumnName.LoginName); } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceEventParser.cs ================================================ using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools.Listener.Trace { public class TraceEventParser { public enum EventClassEnum : int { RPC_Completed = 10, SQL_BatchCompleted = 12, RPC_Starting = 11, SQL_BatchStarting = 13, Audit_Login = 14, Timeout = 82 } private readonly Dictionary columns = new Dictionary(); public ExecutionWorkloadEvent ParseEvent(SqlDataReader reader) { if(columns.Count == 0) { for (var i = 0; i < reader.FieldCount; i++) { var colName = reader.GetName(i); columns.Add(colName.ToLower(), colName); } } var evt = new ExecutionWorkloadEvent(); var eventClass = (int)reader["EventClass"]; if (eventClass == (int)EventClassEnum.RPC_Starting) { evt.Type = WorkloadEvent.EventType.RPCStarting; } else if (eventClass == (int)EventClassEnum.SQL_BatchStarting) { evt.Type = WorkloadEvent.EventType.BatchStarting; } else if (eventClass == (int)EventClassEnum.RPC_Completed) { evt.Type = WorkloadEvent.EventType.RPCCompleted; } else if (eventClass == (int)EventClassEnum.SQL_BatchCompleted) { evt.Type = WorkloadEvent.EventType.BatchCompleted; } else if (eventClass == (int)EventClassEnum.Audit_Login) { if (IsValidColumn("EventSubClass") && reader["EventSubClass"] != DBNull.Value) { var vEventSubClass = (int)reader["EventSubClass"]; if (vEventSubClass == 1) /* 1 - Nonpooled */ { evt.Type = WorkloadEvent.EventType.RPCStarting; // A nonpooled login will trigger Login event with EventSubClass = 1 // Setting text to sp_reset_connection and including comment on to // be able to understand this is a nonpooled login on replay evt.Text = "exec sp_reset_connection /*Nonpooled*/"; } else { evt.Type = WorkloadEvent.EventType.Unknown; return evt; } } } else if (eventClass == (int)EventClassEnum.Timeout) { if (reader["TextData"].ToString().StartsWith("WorkloadTools.Timeout[")) { evt.Type = WorkloadEvent.EventType.Timeout; } } else { evt.Type = WorkloadEvent.EventType.Unknown; return evt; } if (IsValidColumn("ApplicationName") && reader["ApplicationName"] != DBNull.Value) { evt.ApplicationName = (string)reader["ApplicationName"]; } if (IsValidColumn("DatabaseName") && reader["DatabaseName"] != DBNull.Value) { evt.DatabaseName = (string)reader["DatabaseName"]; } if (IsValidColumn("Hostname") && reader["HostName"] != DBNull.Value) { evt.HostName = (string)reader["HostName"]; } if (IsValidColumn("LoginName") && reader["LoginName"] != DBNull.Value) { evt.LoginName = (string)reader["LoginName"]; } if (IsValidColumn("SPID") && reader["SPID"] != DBNull.Value) { evt.SPID = (int?)reader["SPID"]; } if (IsValidColumn("TextData") && reader["TextData"] != DBNull.Value && eventClass != (int)EventClassEnum.Audit_Login) { evt.Text = (string)reader["TextData"]; } if (IsValidColumn("StartTime") && reader["StartTime"] != DBNull.Value) { evt.StartTime = (DateTime)reader["StartTime"]; } if (evt.Type == WorkloadEvent.EventType.Timeout) { if (IsValidColumn("BinaryData") && reader["BinaryData"] != DBNull.Value) { var bytes = (byte[])reader["BinaryData"]; evt.Text = Encoding.Unicode.GetString(bytes); } if(IsValidColumn("TextData") && reader["TextData"] != DBNull.Value) { evt.Duration = ExtractTimeoutDuration(reader["TextData"]); } evt.CPU = Convert.ToInt64(evt.Duration); } else { if (IsValidColumn("Reads") && reader["Reads"] != DBNull.Value) { evt.Reads = (long?)reader["Reads"]; } if (IsValidColumn("Writes") && reader["Writes"] != DBNull.Value) { evt.Writes = (long?)reader["Writes"]; } if (IsValidColumn("CPU") && reader["CPU"] != DBNull.Value) { evt.CPU = (long?)Convert.ToInt64(reader["CPU"]) * 1000; // SqlTrace captures CPU as milliseconds => convert to microseconds } if (IsValidColumn("Duration") && reader["Duration"] != DBNull.Value) { evt.Duration = (long?)reader["Duration"]; } if (IsValidColumn("EventSequence") && reader["EventSequence"] != DBNull.Value) { evt.EventSequence = (long?)reader["EventSequence"]; } } return evt; } private long? ExtractTimeoutDuration(object textData) { long result = 30; if (textData != DBNull.Value) { var description = (string)textData; var durationAsString = new string(description.Where(char.IsDigit).ToArray()); result = Convert.ToInt64(durationAsString); } return result * 1000 * 1000; } private bool IsValidColumn(string colName) { return columns.ContainsKey(colName.ToLower()); } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceFileWrapper.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; namespace WorkloadTools.Listener.Trace { public class TraceFileWrapper : IDisposable { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private static readonly Assembly _baseAssembly; private static readonly Type _baseType; static TraceFileWrapper() { try { #pragma warning disable 618 _baseAssembly = Assembly.LoadWithPartialName("Microsoft.SqlServer.ConnectionInfoExtended"); #pragma warning restore 618 logger.Info(string.Format("SMO Version: {0}", (object)_baseAssembly.FullName.ToString())); _baseType = _baseAssembly.GetType("Microsoft.SqlServer.Management.Trace.TraceFile"); } catch (Exception ex) { logger.Error("Unable to load SMO library"); logger.Error(ex.Message); throw; } } public object TraceFile { get; set; } public TraceFileWrapper() { TraceFile = _baseType.InvokeMember((string)null, BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.CreateInstance, (Binder)null, (object)null, (object[])null); } public object this[string name] { get { var indexer = _baseType .GetProperties() .Single(p => p.GetIndexParameters().Length == 1 && p.GetIndexParameters()[0].ParameterType == typeof(string)); return indexer.GetValue(TraceFile, new object[] { name }); } } public object this[int index] { get { var indexer = _baseType .GetProperties() .Single(p => p.GetIndexParameters().Length == 1 && p.GetIndexParameters()[0].ParameterType == typeof(int)); return indexer.GetValue(TraceFile, new object[] { index }); } } public object GetValue(string Name) { return this[Name]; } public bool HasAttribute(string Name) { try { _ = GetValue(Name); return true; } catch (Exception) { return false; } } public void InitializeAsReader(string fileName) { try { var args = new object[1] { fileName }; _ = _baseType.InvokeMember("InitializeAsReader", BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceFile, args); } catch (Exception) { throw; } } public bool Read() { return (bool)_baseType.InvokeMember("Read", BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceFile, (object[])null); } public void Dispose() { // naughty, naughty... return; } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceFilterPredicate.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools.Listener.Trace { class TraceFilterPredicate : FilterPredicate { public TraceFilterPredicate(FilterColumnName name) : base(name) { } public override string PushDown() { if (!IsPredicateSet) { return string.Empty; } IsPushedDown = true; var result = ""; var hasPositives = false; var hasNegatives = false; for (var i = 0; i < ComparisonOperator.Length; i++) { if (ComparisonOperator[i] == FilterComparisonOperator.Not_Equal) { hasNegatives = true; } else { hasPositives = true; } } for (var i = 0; i < PredicateValue.Length; i++) { if (hasNegatives && hasPositives && ComparisonOperator[i] == FilterComparisonOperator.Not_Equal) { // In this case I only care for the positives continue; } var logicalOperator = "0"; // AND if (i > 0) { if (hasNegatives && !hasPositives) { logicalOperator = " 0 "; // AND } else { result += " 1 "; // OR } } result += "exec sp_trace_setfilter @TraceID, " + (byte)ColumnName + " , " + logicalOperator +", " + (byte)ComparisonOperator[i] + ", N'" + EscapeFilter(PredicateValue[i]) + "'"; } return result; } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceServerWrapper.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Reflection; using System.Runtime.InteropServices; using System.Text; using System.Threading; namespace WorkloadTools.Listener.Trace { public class TraceServerWrapper { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private static readonly Assembly _baseAssembly; private static readonly Type _baseType; private bool isRunning = false; public bool IsRunning { get { return isRunning; } } static TraceServerWrapper() { try { #pragma warning disable 618 _baseAssembly = Assembly.LoadWithPartialName("Microsoft.SqlServer.ConnectionInfoExtended"); #pragma warning restore 618 logger.Info(string.Format("SMO Version: {0}", (object)_baseAssembly.FullName.ToString())); _baseType = _baseAssembly.GetType("Microsoft.SqlServer.Management.Trace.TraceServer"); } catch (Exception ex) { logger.Error("Unable to load SMO library"); logger.Error(ex.Message); throw; } } public TraceServerWrapper() { TraceServer = _baseType.InvokeMember((string)null, BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.CreateInstance, (Binder)null, (object)null, (object[])null); } public object TraceServer { get; set; } public object this[ string name ] { get { var indexer = _baseType .GetProperties() .Single(p => p.GetIndexParameters().Length == 1 && p.GetIndexParameters()[0].ParameterType == typeof(string)); //PropertyInfo indexer = _baseType.GetProperty("Item"); return indexer.GetValue(TraceServer, new object[] { name }); //return _baseType.InvokeMember("get_Item", BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceServer, new object[] { name }); } } public object GetValue(string Name) { return this[Name]; } public bool Read() { return (bool)_baseType.InvokeMember("Read", BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceServer, (object[])null); } public void Stop() { //_baseType.InvokeMember("Pause", BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceServer, (object[])null); isRunning = false; _ = _baseType.InvokeMember("Stop", BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceServer, (object[])null); } public void Close() { _ = _baseType.InvokeMember("Close", BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceServer, (object[])null); } public void InitializeAsReader(SqlConnectionInfoWrapper connectionInfo, string TraceDefinition) { try { var args = new object[2] { connectionInfo.SqlConnectionInfo, TraceDefinition }; _ = _baseType.InvokeMember("InitializeAsReader", BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod, (Binder)null, TraceServer, args); isRunning = true; } catch (Exception) { throw; } } } } ================================================ FILE: WorkloadTools/Listener/Trace/TraceUtils.cs ================================================ using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools.Listener.Trace { internal class TraceUtils { public int GetTraceId(SqlConnection conn, string path) { var sql = @" SELECT TOP(1) id FROM ( SELECT id FROM sys.traces WHERE path LIKE '{0}%' UNION ALL SELECT -1 ) AS i ORDER BY id DESC "; var cmd = conn.CreateCommand(); cmd.CommandText = string.Format(sql, path); return (int)cmd.ExecuteScalar(); } public string GetSqlDefaultLogPath(SqlConnection conn) { var sql = @" DECLARE @defaultLog nvarchar(4000); EXEC master.dbo.xp_instance_regread N'HKEY_LOCAL_MACHINE', N'Software\Microsoft\MSSQLServer\MSSQLServer', N'DefaultLog', @defaultLog OUTPUT; IF @defaultLog IS NULL BEGIN SELECT @defaultLog = REPLACE(physical_name,'mastlog.ldf','') FROM sys.master_files WHERE file_id = 2 AND database_id = 1; END SELECT @defaultLog AS DefaultLog; "; using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; return (string)cmd.ExecuteScalar(); } } public bool CheckTraceFormat(SqlConnection conn, string path) { var sql = @" SELECT COUNT(*) AS cnt FROM( SELECT TOP(100) * FROM fn_trace_gettable(@path, default) ) AS data WHERE EventSequence IS NOT NULL AND SPID IS NOT NULL "; using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; var p = cmd.CreateParameter(); p.ParameterName = "@path"; p.DbType = System.Data.DbType.AnsiString; p.Value = path; _ = cmd.Parameters.Add(p); return ((int)cmd.ExecuteScalar()) > 0; } } } } ================================================ FILE: WorkloadTools/Listener/Trace/sqlworkload.sql ================================================  -- Create a Queue declare @rc int declare @TraceID int declare @maxfilesize bigint set @maxfilesize = '{0}'; declare @maxnumfiles int set @maxnumfiles = '{1}'; exec @rc = sp_trace_create @TraceID output, 2, N'{2}', @maxfilesize, NULL, @maxnumfiles if (@rc != 0) goto error -- Set the events declare @on bit set @on = 1 /* RPC:Starting */ EXEC sp_trace_setevent @TraceID, 11, 1, @on exec sp_trace_setevent @TraceID, 11, 10, @on exec sp_trace_setevent @TraceID, 11, 8, @on exec sp_trace_setevent @TraceID, 11, 11, @on exec sp_trace_setevent @TraceID, 11, 12, @on exec sp_trace_setevent @TraceID, 11, 13, @on exec sp_trace_setevent @TraceID, 11, 14, @on exec sp_trace_setevent @TraceID, 11, 15, @on exec sp_trace_setevent @TraceID, 11, 16, @on exec sp_trace_setevent @TraceID, 11, 17, @on exec sp_trace_setevent @TraceID, 11, 18, @on exec sp_trace_setevent @TraceID, 11, 35, @on exec sp_trace_setevent @TraceID, 11, 3, @on exec sp_trace_setevent @TraceID, 11, 31, @on exec sp_trace_setevent @TraceID, 11, 51, @on /* RPC:Completed */ EXEC sp_trace_setevent @TraceID, 10, 1, @on exec sp_trace_setevent @TraceID, 10, 10, @on exec sp_trace_setevent @TraceID, 10, 8, @on exec sp_trace_setevent @TraceID, 10, 11, @on exec sp_trace_setevent @TraceID, 10, 12, @on exec sp_trace_setevent @TraceID, 10, 13, @on exec sp_trace_setevent @TraceID, 10, 14, @on exec sp_trace_setevent @TraceID, 10, 15, @on exec sp_trace_setevent @TraceID, 10, 16, @on exec sp_trace_setevent @TraceID, 10, 17, @on exec sp_trace_setevent @TraceID, 10, 18, @on exec sp_trace_setevent @TraceID, 10, 35, @on exec sp_trace_setevent @TraceID, 10, 3, @on exec sp_trace_setevent @TraceID, 10, 31, @on exec sp_trace_setevent @TraceID, 10, 51, @on /* SQL:BatchCompleted */ exec sp_trace_setevent @TraceID, 12, 1, @on exec sp_trace_setevent @TraceID, 12, 11, @on exec sp_trace_setevent @TraceID, 12, 8, @on exec sp_trace_setevent @TraceID, 12, 10, @on exec sp_trace_setevent @TraceID, 12, 12, @on exec sp_trace_setevent @TraceID, 12, 13, @on exec sp_trace_setevent @TraceID, 12, 14, @on exec sp_trace_setevent @TraceID, 12, 15, @on exec sp_trace_setevent @TraceID, 12, 16, @on exec sp_trace_setevent @TraceID, 12, 17, @on exec sp_trace_setevent @TraceID, 12, 18, @on exec sp_trace_setevent @TraceID, 12, 35, @on exec sp_trace_setevent @TraceID, 12, 3, @on exec sp_trace_setevent @TraceID, 12, 31, @on exec sp_trace_setevent @TraceID, 12, 51, @on /* SQL:BatchStarting */ exec sp_trace_setevent @TraceID, 13, 1, @on exec sp_trace_setevent @TraceID, 13, 11, @on exec sp_trace_setevent @TraceID, 13, 8, @on exec sp_trace_setevent @TraceID, 13, 10, @on exec sp_trace_setevent @TraceID, 13, 12, @on exec sp_trace_setevent @TraceID, 13, 13, @on exec sp_trace_setevent @TraceID, 13, 14, @on exec sp_trace_setevent @TraceID, 13, 15, @on exec sp_trace_setevent @TraceID, 13, 16, @on exec sp_trace_setevent @TraceID, 13, 17, @on exec sp_trace_setevent @TraceID, 13, 18, @on exec sp_trace_setevent @TraceID, 13, 35, @on exec sp_trace_setevent @TraceID, 13, 3, @on exec sp_trace_setevent @TraceID, 13, 31, @on exec sp_trace_setevent @TraceID, 13, 51, @on /* Audit Login */ exec sp_trace_setevent @TraceID, 14, 1, @on exec sp_trace_setevent @TraceID, 14, 11, @on exec sp_trace_setevent @TraceID, 14, 8, @on exec sp_trace_setevent @TraceID, 14, 10, @on exec sp_trace_setevent @TraceID, 14, 12, @on exec sp_trace_setevent @TraceID, 14, 14, @on exec sp_trace_setevent @TraceID, 14, 35, @on exec sp_trace_setevent @TraceID, 14, 3, @on exec sp_trace_setevent @TraceID, 14, 51, @on exec sp_trace_setevent @TraceID, 14, 21, @on /* UserConfigurable:0 */ exec sp_trace_setevent @TraceID, 82, 1, @on exec sp_trace_setevent @TraceID, 82, 2, @on exec sp_trace_setevent @TraceID, 82, 11, @on exec sp_trace_setevent @TraceID, 82, 8, @on exec sp_trace_setevent @TraceID, 82, 10, @on exec sp_trace_setevent @TraceID, 82, 12, @on exec sp_trace_setevent @TraceID, 82, 14, @on exec sp_trace_setevent @TraceID, 82, 35, @on exec sp_trace_setevent @TraceID, 82, 3, @on exec sp_trace_setevent @TraceID, 82, 51, @on /* UserConfigurable:0 */ exec sp_trace_setevent @TraceID, 83, 1, @on exec sp_trace_setevent @TraceID, 83, 2, @on exec sp_trace_setevent @TraceID, 83, 11, @on exec sp_trace_setevent @TraceID, 83, 8, @on exec sp_trace_setevent @TraceID, 83, 10, @on exec sp_trace_setevent @TraceID, 83, 12, @on exec sp_trace_setevent @TraceID, 83, 14, @on exec sp_trace_setevent @TraceID, 83, 35, @on exec sp_trace_setevent @TraceID, 83, 3, @on exec sp_trace_setevent @TraceID, 83, 51, @on -- Set the Filters exec sp_trace_setfilter @TraceID, 10 , 0, 1, N'WorkloadTools'; {3} -- Set the trace status to start exec sp_trace_setstatus @TraceID, 1 -- display trace id for future references select TraceID=@TraceID goto finish error: select ErrorCode=@rc finish: ================================================ FILE: WorkloadTools/MMFEventQueue.cs ================================================ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; using NFX.ApplicationModel.Pile; namespace WorkloadTools { public class MMFEventQueue : IDisposable , IEventQueue { private readonly ConcurrentQueue pointers; private readonly MMFPile pile; // this has no effect on a memory mapped file... public int BufferSize { get; set; } public MMFEventQueue() { pile = new MMFPile("workloadevents"); pile.DataDirectoryRoot = System.IO.Path.GetTempPath(); pointers = new ConcurrentQueue(); pile.Start(); } public bool TryDequeue(out WorkloadEvent result) { try { if (!pointers.TryDequeue(out var pp)) { result = null; return false; } result = (WorkloadEvent)pile.Get(pp); _ = pile.Delete(pp); } catch(Exception) { result = null; return false; } return true; } public void Enqueue(WorkloadEvent evt) { pointers.Enqueue(pile.Put(evt)); } public void Dispose() { pile.WaitForCompleteStop(); pile.Dispose(); } public bool HasMoreElements() { return pointers.Count > 0; } } } ================================================ FILE: WorkloadTools/MessagWorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools { [Serializable] public class MessageWorkloadEvent : WorkloadEvent { public enum MessageType { TotalEvents } public MessageType MsgType { get; set; } public object Value { get; set; } public MessageWorkloadEvent() { Type = EventType.Message; } } } ================================================ FILE: WorkloadTools/Properties/AssemblyInfo.cs ================================================ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("ae6e4548-8c33-4728-8504-88aa9666020b")] ================================================ FILE: WorkloadTools/Properties/Settings.Designer.cs ================================================ //------------------------------------------------------------------------------ // // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // //------------------------------------------------------------------------------ namespace WorkloadTools.Properties { [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "17.3.0.0")] internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase { private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings()))); public static Settings Default { get { return defaultInstance; } } [global::System.Configuration.ApplicationScopedSettingAttribute()] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Configuration.DefaultSettingValueAttribute("32")] public int ReplayConsumer_SEMAPHORE_LIMIT { get { return ((int)(this["ReplayConsumer_SEMAPHORE_LIMIT"])); } } [global::System.Configuration.ApplicationScopedSettingAttribute()] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Configuration.DefaultSettingValueAttribute("15")] public int ReplayConsumer_WORKER_EXPIRY_TIMEOUT_SECONDS { get { return ((int)(this["ReplayConsumer_WORKER_EXPIRY_TIMEOUT_SECONDS"])); } } } } ================================================ FILE: WorkloadTools/Properties/Settings.settings ================================================  32 15 ================================================ FILE: WorkloadTools/Properties/SharedAssemblyInfo.cs ================================================ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("SqlWorkload")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("spaghettidba")] [assembly: AssemblyProduct("SqlWorkload")] [assembly: AssemblyCopyright("Copyright © 2018 spaghettidba")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.4")] [assembly: AssemblyFileVersion("1.0.4")] ================================================ FILE: WorkloadTools/SqlConnectionInfo.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools { public class SqlConnectionInfo { public string ServerName { get; set; } public string DatabaseName { get; set; } = "master"; public string SchemaName { get; set; } = "dbo"; public bool UseIntegratedSecurity { get; set; } public string UserName { get; set; } public string Password { get; set; } public bool Encrypt { get; set; } = false; public bool TrustServerCertificate { get; set; } = false; public string ApplicationName { get; set; } = "WorkloadTools"; public int MaxPoolSize { get; set; } = 500; public Dictionary DatabaseMap { get; set; } = new Dictionary(); public SqlConnectionInfo() { } public SqlConnectionInfo(SqlConnectionInfo info) { this.ServerName = info.ServerName; this.DatabaseName = info.DatabaseName; this.SchemaName = info.SchemaName; this.UseIntegratedSecurity = info.UseIntegratedSecurity; this.UserName = info.UserName; this.Password = info.Password; this.Encrypt = info.Encrypt; this.TrustServerCertificate = info.TrustServerCertificate; this.ApplicationName = info.ApplicationName; this.MaxPoolSize = info.MaxPoolSize; this.DatabaseMap = info.DatabaseMap; } public string ConnectionString() { return ConnectionString(ApplicationName); } public string ConnectionString(string applicationName) { var connectionString = "Data Source=" + ServerName + "; "; connectionString += "Max Pool Size = " + MaxPoolSize + "; "; if (string.IsNullOrEmpty(DatabaseName)) { connectionString += "Initial Catalog = master; "; } else { // try to replace database name with the name // in the database map, if any var effectiveDatabaseName = DatabaseName; if (DatabaseMap.ContainsKey(DatabaseName)) { effectiveDatabaseName = DatabaseMap[DatabaseName]; } connectionString += "Initial Catalog = " + effectiveDatabaseName + "; "; } if (string.IsNullOrEmpty(UserName)) { connectionString += "Integrated Security = SSPI; "; } else { connectionString += "User Id = " + UserName + "; "; connectionString += "Password = " + Password + "; "; } if (!string.IsNullOrEmpty(applicationName)) { connectionString += "Application Name = " + applicationName + "; "; } if (Encrypt) { connectionString += "Encrypt = true; "; } if (TrustServerCertificate) { connectionString += "TrustServerCertificate = true; "; } return connectionString; } } } ================================================ FILE: WorkloadTools/SqliteEventQueue.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools { public class SqliteEventQueue : BufferedEventQueue { public SqliteEventQueue() : base() { } protected override void Dispose(bool disposing) { throw new NotImplementedException(); } protected override WorkloadEvent[] ReadEvents(int count) { // STRATEGY: // do not attempt deleting rows returned // read all rows from the table and drop it throw new NotImplementedException(); } protected override void WriteEvents(WorkloadEvent[] events) { // STRATEGY: // write to a table // the table name has an index postfix like cache01, cache02... throw new NotImplementedException(); } } } ================================================ FILE: WorkloadTools/Util/DataUtils.cs ================================================ using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Reflection; using System.Text; namespace WorkloadTools.Util { public class DataUtils { /// /// Convert a List{T} to a DataTable. /// public static DataTable ToDataTable(IEnumerable items) { var tb = new DataTable(typeof(T).Name); var props = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (var prop in props) { var t = GetCoreType(prop.PropertyType); _ = tb.Columns.Add(prop.Name, t); } foreach (var item in items) { var values = new object[props.Length]; for (var i = 0; i < props.Length; i++) { values[i] = props[i].GetValue(item, null); } _ = tb.Rows.Add(values); } return tb; } /// /// Determine of specified type is nullable /// public static bool IsNullable(Type t) { return !t.IsValueType || (t.IsGenericType && t.GetGenericTypeDefinition() == typeof(Nullable<>)); } /// /// Return underlying type if type is Nullable otherwise return the type /// public static Type GetCoreType(Type t) { if (t != null && IsNullable(t)) { if (!t.IsValueType) { return t; } else { return Nullable.GetUnderlyingType(t); } } else { return t; } } } } ================================================ FILE: WorkloadTools/Util/ModelConverter.cs ================================================ using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; using System.Web.Script.Serialization; using WorkloadTools; namespace WorkloadTools.Util { public class ModelConverter : JavaScriptConverter { public override IEnumerable SupportedTypes { get { var result = new List(); var currentAssembly = Assembly.GetExecutingAssembly(); var nameSpace = "WorkloadTools"; var types = currentAssembly.GetTypes().Where(t => t != null && t.FullName.StartsWith(nameSpace) & !t.FullName.Contains("+")).ToArray(); foreach (var t in types) { try { result.Add(t); } catch (Exception) { throw; } } return result; } } public override object Deserialize(IDictionary dictionary, Type type, JavaScriptSerializer serializer) { object p; try { // try to create the object using its parameterless constructor p = Activator.CreateInstance(type); } catch { // try to create the object using this scary initializer that // doesn't need the parameterless constructor p = System.Runtime.Serialization.FormatterServices.GetUninitializedObject(type); } var props = type.GetProperties(); foreach (var key in dictionary.Keys) { var prop = props.Where(t => t.Name == key).FirstOrDefault(); if (prop != null) { if (prop.Name.EndsWith("Filter")) { if (dictionary[key] is string stringValue) { prop.SetValue(p, new string[] { stringValue }, null); } else { prop.SetValue(p, (string[])((ArrayList)dictionary[key]).ToArray(typeof(string)), null); } } else { if (dictionary[key] is Dictionary dictionaryValue) { if (prop.PropertyType.IsGenericType && prop.PropertyType.GetGenericTypeDefinition() == typeof(Dictionary<,>)) { var rawDic = dictionaryValue; var obj = Activator.CreateInstance(prop.PropertyType); foreach (var itm in rawDic.Keys) { ((Dictionary)obj).Add(itm, rawDic[itm].ToString()); } prop.SetValue(p, obj, null); } else { prop.SetValue(p, Deserialize(dictionaryValue, prop.PropertyType, serializer), null); } } else { if (dictionary[key] is IList && prop.PropertyType.IsGenericType) { var obj = Activator.CreateInstance(prop.PropertyType); foreach (var itm in (IEnumerable)dictionary[key]) { _ = ((IList)obj).Add(itm); } prop.SetValue(p, obj, null); } else { prop.SetValue(p, GetValueOfType(dictionary[key], prop.PropertyType), null); } } } } } return p; } private object GetValueOfType(object v, Type propertyType) { if (propertyType == typeof(string)) { return (string)v; } else if (propertyType == typeof(bool)) { return Convert.ToBoolean(v); } else if (propertyType == typeof(int)) { return Convert.ToInt32(v); } else if (propertyType == typeof(long)) { return Convert.ToInt64(v); } else if (propertyType == typeof(DateTime)) { return Convert.ToDateTime(v); } else { return v; } } public override IDictionary Serialize(object obj, JavaScriptSerializer serializer) { throw new NotImplementedException(); } } } ================================================ FILE: WorkloadTools/Util/RingBuffer.cs ================================================ using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; namespace WorkloadTools.Util { /// /// Represents a fixted length ring buffer to store a specified maximal count of items within. /// /// The generic type of the items stored within the ring buffer. [DebuggerDisplay("Count = {Count}")] public class RingBuffer : IList, ICollection, IEnumerable { /// /// Creates a new instance of a with a /// specified cache size. /// /// The maximal count of items to be stored within /// the ring buffer. public RingBuffer(int capacity) { // validate capacity if (capacity <= 0) { throw new ArgumentException("Must be greater than zero", "capacity"); } // set capacity and init the cache Capacity = capacity; _buffer = new T[capacity]; } /// /// the internal buffer /// readonly T[] _buffer; /// /// The all-over position within the ring buffer. The position /// increases continously by adding new items to the buffer. This /// value is needed to calculate the current relative position within the /// buffer. /// int _position; /// /// The current version of the buffer, this is required for a correct /// exception handling while enumerating over the items of the buffer. /// long _version; /// /// Gets or sets an item for a specified position within the ring buffer. /// /// The position to get or set an item. /// The fond item at the specified position within the ring buffer. /// /// public T this[int index] { get { // validate the index if (index < 0 || index >= Count) { throw new IndexOutOfRangeException(); } // calculate the relative position within the rolling base array var index2 = (_position - Count + index) % Capacity; return _buffer[index2]; } set => Insert(index, value); } public T Last() { return this[Count-1]; } /// /// Gets the maximal count of items within the ring buffer. /// public int Capacity { get; private set; } /// /// Get the current count of items within the ring buffer. /// public int Count { get; private set; } /// /// Adds a new item to the buffer. /// /// The item to be added to the buffer. public void Add(T item) { // avoid an arithmetic overflow if (_position == int.MaxValue) { _position = _position % Capacity; } // add a new item to the current relative position within the // buffer and increase the position _buffer[_position++ % Capacity] = item; // increase the count if capacity is not yet reached if (Count < Capacity) { Count++; } // buffer changed; next version _version++; } /// /// Clears the whole buffer and releases all referenced objects /// currently stored within the buffer. /// public void Clear() { for (var i = 0; i < Count; i++) { _buffer[i] = default(T); } _position = 0; Count = 0; _version++; } /// /// Determines if a specified item is currently present within /// the buffer. /// /// The item to search for within the current /// buffer. /// True if the specified item is currently present within /// the buffer; otherwise false. public bool Contains(T item) { var index = IndexOf(item); return index != -1; } /// /// Copies the current items within the buffer to a specified array. /// /// The target array to copy the items of /// the buffer to. /// The start position witihn the target /// array to start copying. public void CopyTo(T[] array, int arrayIndex) { for (var i = 0; i < Count; i++) { array[i + arrayIndex] = _buffer[(_position - Count + i) % Capacity]; } } /// /// Gets an enumerator over the current items within the buffer. /// /// An enumerator over the current items within the buffer. /// public IEnumerator GetEnumerator() { var version = _version; for (var i = 0; i < Count; i++) { if (version != _version) { throw new InvalidOperationException("Collection changed"); } yield return this[i]; } } /// /// Gets the position of a specied item within the ring buffer. /// /// The item to get the current position for. /// The zero based index of the found item within the /// buffer. If the item was not present within the buffer, this /// method returns -1. public int IndexOf(T item) { // loop over the current count of items for (var i = 0; i < Count; i++) { // get the item at the relative position within the internal array var item2 = _buffer[(_position - Count + i) % Capacity]; // if both items are null, return true if (null == item && null == item2) { return i; } // if equal return the position if (item != null && item.Equals(item2)) { return i; } } // nothing found return -1; } /// /// Inserts an item at a specified position into the buffer. /// /// The position within the buffer to add /// the new item. /// The new item to be added to the buffer. /// /// /// If the specified index is equal to the current count of items /// within the buffer, the specified item will be added. /// /// Warning /// Frequent usage of this method might become a bad idea if you are /// working with a large buffer capacity. The insertion of an item /// at a specified position within the buffer causes causes all present /// items below the specified position to be moved one position. /// public void Insert(int index, T item) { // validate index if (index < 0 || index > Count) { throw new IndexOutOfRangeException(); } // add if index equals to count if (index == Count) { Add(item); return; } // get the maximal count of items to be moved var count = Math.Min(Count, Capacity - 1) - index; // get the relative position of the new item within the buffer var index2 = (_position - Count + index) % Capacity; // move all items below the specified position for (var i = index2 + count; i > index2; i--) { var to = i % Capacity; var from = (i - 1) % Capacity; _buffer[to] = _buffer[from]; } // set the new item _buffer[index2] = item; // adjust storage information if (Count < Capacity) { Count++; _position++; } // buffer changed; next version _version++; } /// /// Removes a specified item from the current buffer. /// /// The item to be removed. /// True if the specified item was successfully removed /// from the buffer; otherwise false. /// /// Warning /// Frequent usage of this method might become a bad idea if you are /// working with a large buffer capacity. The removing of an item /// requires a scan of the buffer to get the position of the specified /// item. If the item was found, the deletion requires a move of all /// items stored abouve the found position. /// public bool Remove(T item) { // find the position of the specified item var index = IndexOf(item); // item was not found; return false if (index == -1) { return false; } // remove the item at the specified position RemoveAt(index); return true; } /// /// Removes an item at a specified position within the buffer. /// /// The position of the item to be removed. /// /// /// Warning /// Frequent usage of this method might become a bad idea if you are /// working with a large buffer capacity. The deletion requires a move /// of all items stored abouve the found position. /// public void RemoveAt(int index) { // validate the index if (index < 0 || index >= Count) { throw new IndexOutOfRangeException(); } // move all items above the specified position one step // closer to zeri for (var i = index; i < Count - 1; i++) { // get the next relative target position of the item var to = (_position - Count + i) % Capacity; // get the next relative source position of the item var from = (_position - Count + i + 1) % Capacity; // move the item _buffer[to] = _buffer[from]; } // get the relative position of the last item, which becomes empty // after deletion and set the item as empty var last = (_position - 1) % Capacity; _buffer[last] = default(T); // adjust storage information _position--; Count--; // buffer changed; next version _version++; } /// /// Gets if the buffer is read-only. This method always returns false. /// bool ICollection.IsReadOnly { get { return false; } } /// /// See generic implementation of . /// /// See generic implementation of . /// IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } } } ================================================ FILE: WorkloadTools/Util/StringExtensions.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadTools.Util { public static class StringExtensions { public static string Right(this string value, int count) { //Check if the value is valid if (string.IsNullOrEmpty(value)) { //Set valid empty string as string could be null value = string.Empty; } else if (value.Length > count) { //Make the string no longer than the max length value = value.Substring(value.Length - count, count); } //Return the string return value; } public static string ReplaceFirst(this string text, string search, string replace) { var pos = text.IndexOf(search); if (pos < 0) { return text; } return text.Substring(0, pos) + replace + text.Substring(pos + search.Length); } } } ================================================ FILE: WorkloadTools/WaitStatsWorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Text; namespace WorkloadTools { [Serializable] public class WaitStatsWorkloadEvent : WorkloadEvent { public DataTable Waits; public WaitStatsWorkloadEvent() { Type = EventType.WAIT_stats; } } } ================================================ FILE: WorkloadTools/WorkloadController.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using WorkloadTools.Consumer; namespace WorkloadTools { public class WorkloadController : IDisposable { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public static string BaseLocation = new Uri(System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().CodeBase)).LocalPath; public WorkloadListener Listener { get; set; } public List Consumers { get; set; } = new List(); private bool forceStopped = false; private bool stopped = false; private bool disposed = false; private const int MAX_DISPOSE_TIMEOUT_SECONDS = 5; public WorkloadController() { } public void Run() { try { var startTime = Listener.StartAt; var endTime = DateTime.MaxValue; Listener.Initialize(); logger.Info("Listener of type {ListenerTypeName} initialized correctly", Listener.GetType().Name); logger.Info("Event collection starts at {startTime}", startTime); // wait until Listener.StartAt has been reached while (DateTime.Now.CompareTo(startTime) < 0) { Thread.Sleep(100); } logger.Info("Waiting for events"); do { try { if ((!Listener.IsRunning && Consumers.All(c => !c.HasMoreEvents())) || (endTime < DateTime.Now)) { stopped = true; } if (endTime == DateTime.MaxValue && Listener.TimeoutMinutes != 0) { endTime = startTime.AddMinutes(Listener.TimeoutMinutes); } var evt = Listener.Read(); if (evt == null) { continue; } logger.Debug($"Event of type {evt.Type} read. Start Time: {evt.StartTime}"); _ = Parallel.ForEach(Consumers, (cons) => { cons.Consume(evt); }); } catch (Exception e) { logger.Error("Exception reading event"); logger.Error(e.Message); logger.Error(e.StackTrace); } } while (!stopped); // even when the listener has finished, wait until all buffered consumers are finished // unless the controller has been explicitly stopped by invoking Stop() // give max 1 minute grace time if (!forceStopped) { var beginWait = DateTime.Now; while (Consumers.Where(c => c is BufferedWorkloadConsumer).Any(c => c.HasMoreEvents()) && DateTime.Now < beginWait.AddMinutes(1)) { Thread.Sleep(10); } } } catch (Exception e) { logger.Error("Uncaught Exception"); logger.Error(e.Message); logger.Error(e.StackTrace); var ex = e; while ((ex = ex.InnerException) != null){ logger.Error(ex.Message); logger.Error(ex.StackTrace); } } } public void Stop() { forceStopped = true; stopped = true; } public void Dispose() { if (!disposed) { disposed = true; foreach (var cons in Consumers) { cons?.Dispose(); } Listener?.Dispose(); } } } } ================================================ FILE: WorkloadTools/WorkloadEvent.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools { [Serializable] public abstract class WorkloadEvent { public enum EventType { Message = 0, RPCCompleted = 1, RPCStarting = 2, BatchStarting = -3, BatchCompleted = 3, PerformanceCounter = 4, Timeout = 5, WAIT_stats = 6, Error = 7, DiskPerf = 8, Unknown = -1 } public DateTime StartTime{ get; set; } public EventType Type { get; set; } = EventType.Unknown; } } ================================================ FILE: WorkloadTools/WorkloadEventFilter.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace WorkloadTools { public abstract class WorkloadEventFilter { public FilterPredicate ApplicationFilter { get; set; } public FilterPredicate DatabaseFilter { get; set; } public FilterPredicate HostFilter { get; set; } public FilterPredicate LoginFilter { get; set; } public WorkloadEventFilter() { } public bool Evaluate(WorkloadEvent evnt) { // don't filter events that are not supposed to be filtered if (!(evnt is ExecutionWorkloadEvent)) { return true; } var evt = (ExecutionWorkloadEvent)evnt; if (evt.Type != WorkloadEvent.EventType.BatchStarting && evt.Type != WorkloadEvent.EventType.RPCStarting && evt.Type != WorkloadEvent.EventType.BatchCompleted && evt.Type != WorkloadEvent.EventType.RPCCompleted) { return false; } if (!(DatabaseFilter.IsPredicateSet || LoginFilter.IsPredicateSet || HostFilter.IsPredicateSet || ApplicationFilter.IsPredicateSet)) { return true; } var applicationFilterResults = !ApplicationFilter.IsPredicateSet || ApplicationFilter.IsPushedDown; var databaseFilterResults = !DatabaseFilter.IsPredicateSet || DatabaseFilter.IsPushedDown; var loginFilterResults = !LoginFilter.IsPredicateSet || LoginFilter.IsPushedDown; var hostFilterResults = !HostFilter.IsPredicateSet || HostFilter.IsPushedDown; if (ApplicationFilter.IsPredicateSet && !ApplicationFilter.IsPushedDown) { applicationFilterResults = ApplicationFilter.PredicateValue.Contains(evt.ApplicationName, StringComparer.CurrentCultureIgnoreCase); } if (DatabaseFilter.IsPredicateSet && !DatabaseFilter.IsPushedDown) { databaseFilterResults = DatabaseFilter.PredicateValue.Contains(evt.DatabaseName, StringComparer.CurrentCultureIgnoreCase); } if (LoginFilter.IsPredicateSet && !LoginFilter.IsPushedDown) { loginFilterResults = LoginFilter.PredicateValue.Contains(evt.LoginName, StringComparer.CurrentCultureIgnoreCase); } if (HostFilter.IsPredicateSet && !HostFilter.IsPushedDown) { hostFilterResults = HostFilter.PredicateValue.Contains(evt.HostName, StringComparer.CurrentCultureIgnoreCase); } return applicationFilterResults && databaseFilterResults && loginFilterResults && hostFilterResults; } public void PushDown(FilterPredicate predicate) { _ = predicate.PushDown(); } } } ================================================ FILE: WorkloadTools/WorkloadListener.cs ================================================ using NLog; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Linq; using System.Text; using System.Threading; using WorkloadTools.Util; namespace WorkloadTools { public abstract class WorkloadListener : IDisposable { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public SqlConnectionInfo ConnectionInfo { get; set; } public string Source { get; set; } private string[] _applicationFilter; private string[] _databaseFilter; private string[] _hostFilter; private string[] _loginFilter; public string[] ApplicationFilter { get => _applicationFilter; set { _applicationFilter = value; if (_filter != null) { _filter.ApplicationFilter.PredicateValue = _applicationFilter; } } } public string[] DatabaseFilter { get => _databaseFilter; set { _databaseFilter = value; if (_filter != null) { _filter.DatabaseFilter.PredicateValue = _databaseFilter; } } } public string[] HostFilter { get => _hostFilter; set { _hostFilter = value; if (_filter != null) { _filter.HostFilter.PredicateValue = _hostFilter; } } } public string[] LoginFilter { get => _loginFilter; set { _loginFilter = value; if (_filter != null) { _filter.LoginFilter.PredicateValue = _loginFilter; } } } public int StatsCollectionIntervalSeconds { get; set; } = 60; public int TimeoutMinutes { get; set; } = 0; public DateTime StartAt { get; set; } = DateTime.Now; private WorkloadEventFilter _filter; protected WorkloadEventFilter Filter { get { if (_filter != null) { return _filter; } else { return null; } } set => _filter = value; } protected IEventQueue Events; public EventQueueType QueueType = EventQueueType.BinarySerialized; protected bool stopped = false; public WorkloadListener() { switch (QueueType) { case EventQueueType.MMF: Events = new MMFEventQueue(); break; case EventQueueType.LiteDB: throw new NotImplementedException(); case EventQueueType.Sqlite: throw new NotImplementedException(); case EventQueueType.BinarySerialized: Events = new BinarySerializedBufferedEventQueue(); Events.BufferSize = 10000; break; } } public void Dispose() { stopped = true; Events.Dispose(); Dispose(true); GC.SuppressFinalize(this); } protected abstract void Dispose(bool disposing); public abstract WorkloadEvent Read(); public abstract void Initialize(); public bool IsRunning { get { return !stopped; } } // Collects some performance counters protected virtual void ReadPerfCountersEvents() { try { while (!stopped) { var evt = new CounterWorkloadEvent(); evt.Type = WorkloadEvent.EventType.PerformanceCounter; evt.StartTime = DateTime.Now; evt.Counters.Add( CounterWorkloadEvent.CounterNameEnum.AVG_CPU_USAGE, GetLastCPUUsage() ); Events.Enqueue(evt); Thread.Sleep(StatsCollectionIntervalSeconds * 1000); // 1 minute } } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } } } private int GetLastCPUUsage() { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); // Calculate CPU usage during the last minute interval var sql = @" IF SERVERPROPERTY('Edition') = 'SQL Azure' AND SERVERPROPERTY('EngineEdition') = 5 BEGIN WITH CPU_Usage AS ( SELECT avg_cpu_percent, end_time AS Event_Time FROM sys.dm_db_resource_stats WITH (NOLOCK) ) SELECT CAST(ISNULL(AVG(avg_cpu_percent),0) AS int) AS avg_CPU_percent FROM CPU_Usage WHERE [Event_Time] >= DATEADD(minute, -{0}, GETDATE()) OPTION (RECOMPILE); END IF SERVERPROPERTY('Edition') = 'SQL Azure' AND SERVERPROPERTY('EngineEdition') = 8 -- Managed Instance BEGIN WITH PerfCounters AS ( SELECT DISTINCT RTrim(spi.[object_name]) AS [object_name] ,RTrim(spi.[counter_name]) AS [counter_name] ,RTRIM(spi.instance_name) AS [instance_name] ,CAST(spi.[cntr_value] AS BIGINT) AS [cntr_value] ,spi.[cntr_type] FROM sys.dm_os_performance_counters AS spi LEFT JOIN sys.databases AS d ON LEFT(spi.[instance_name], 36) -- some instance_name values have an additional identifier appended after the GUID = d.[name] WHERE counter_name IN ( 'CPU usage %' ,'CPU usage % base' ) ) SELECT CAST(SUM(value) AS int) AS avg_CPU_percent FROM ( SELECT CAST(CASE WHEN pc.[cntr_type] = 537003264 AND pc1.[cntr_value] > 0 THEN (pc.[cntr_value] * 1.0) / (pc1.[cntr_value] * 1.0) * 100 ELSE pc.[cntr_value] END AS float(10)) AS [value] from PerfCounters pc LEFT OUTER JOIN PerfCounters AS pc1 ON ( pc.[counter_name] = REPLACE(pc1.[counter_name],' base','') OR pc.[counter_name] = REPLACE(pc1.[counter_name],' base',' (ms)') ) AND pc.[object_name] = pc1.[object_name] AND pc.[instance_name] = pc1.[instance_name] AND pc1.[counter_name] LIKE '%base' WHERE pc.[counter_name] NOT LIKE '% base' AND pc.object_name LIKE '%:Resource Pool Stats' ) AS p OPTION (RECOMPILE); END ELSE -- On Premises BEGIN WITH ts_now(ts_now) AS ( SELECT cpu_ticks/(cpu_ticks/ms_ticks) FROM sys.dm_os_sys_info WITH (NOLOCK) ), CPU_Usage AS ( SELECT TOP(256) SQLProcessUtilization, DATEADD(ms, -1 * (ts_now.ts_now - [timestamp]), GETDATE()) AS [Event_Time] FROM ( SELECT record.value('(./Record/@id)[1]', 'int') AS record_id, record.value('(./Record/SchedulerMonitorEvent/SystemHealth/SystemIdle)[1]', 'int') AS [SystemIdle], record.value('(./Record/SchedulerMonitorEvent/SystemHealth/ProcessUtilization)[1]', 'int') AS [SQLProcessUtilization], [timestamp] FROM ( SELECT [timestamp], CONVERT(xml, record) AS [record] FROM sys.dm_os_ring_buffers WITH (NOLOCK) WHERE ring_buffer_type = N'RING_BUFFER_SCHEDULER_MONITOR' AND record LIKE N'%%' ) AS x ) AS y CROSS JOIN ts_now ) SELECT ISNULL(AVG(SQLProcessUtilization),0) AS avg_CPU_percent FROM CPU_Usage WHERE [Event_Time] >= DATEADD(minute, -{0}, GETDATE()) OPTION (RECOMPILE); END "; sql = string.Format(sql,StatsCollectionIntervalSeconds / 60); var avg_CPU_percent = -1; using (var cmd = conn.CreateCommand()) { cmd.CommandText = sql; avg_CPU_percent = (int)cmd.ExecuteScalar(); } return avg_CPU_percent; } } protected virtual void ReadWaitStatsEvents() { try { DataTable lastWaits = null; while (!stopped) { var evt = new WaitStatsWorkloadEvent(); evt.Type = WorkloadEvent.EventType.WAIT_stats; evt.StartTime = DateTime.Now; var newWaits = GetWaits(); evt.Waits = GetDiffWaits(newWaits, lastWaits); lastWaits = newWaits; Events.Enqueue(evt); Thread.Sleep(StatsCollectionIntervalSeconds * 1000); // 1 minute } } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } } } private DataTable GetDiffWaits(DataTable newWaits, DataTable lastWaits) { // no baseline established already // return all zeros if (lastWaits == null) { var result = newWaits.Clone(); foreach (DataRow dr in newWaits.Rows) { var nr = result.Rows.Add(); nr["wait_type"] = dr["wait_type"]; nr["wait_sec"] = 0; nr["resource_sec"] = 0; nr["signal_sec"] = 0; nr["wait_count"] = 0; } return result; } // catch the case when stats are reset long newWaitCount = 0; var newWaitCountObj = newWaits.Compute("SUM(wait_count)", null); if (newWaitCountObj != DBNull.Value) { newWaitCount = Convert.ToInt64(newWaitCountObj); } long lastWaitCount = 0; var lastWaitCountObj = lastWaits.Compute("SUM(wait_count)", null); if (lastWaitCountObj != DBNull.Value) { lastWaitCount = Convert.ToInt64(lastWaitCountObj); } // if newWaits < lastWaits --> reset // I can return newWaits without having to compute the diff if (newWaitCount < lastWaitCount) { return newWaits; } var results = from table1 in newWaits.AsEnumerable() join table2 in lastWaits.AsEnumerable() on table1["wait_type"] equals table2["wait_type"] select new { wait_type = Convert.ToString(table1["wait_type"]), wait_sec = Convert.ToDouble(table1["wait_sec"]) - Convert.ToDouble(table2["wait_sec"]), resource_sec = Convert.ToDouble(table1["resource_sec"]) - Convert.ToDouble(table2["resource_sec"]), signal_sec = Convert.ToDouble(table1["signal_sec"]) - Convert.ToDouble(table2["signal_sec"]), wait_count = Convert.ToDouble(table1["wait_count"]) - Convert.ToDouble(table2["wait_count"]) }; return DataUtils.ToDataTable(results.Where(w => w.wait_sec > 0)); } private DataTable GetWaits() { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); // Calculate waits since instance restart var sql = @" WITH [Waits] AS ( SELECT wait_type, wait_time_ms/ 1000.0 AS [WaitS], (wait_time_ms - signal_wait_time_ms) / 1000.0 AS [ResourceS], signal_wait_time_ms / 1000.0 AS [SignalS], waiting_tasks_count AS [WaitCount] FROM sys.dm_os_wait_stats WITH (NOLOCK) WHERE [wait_type] NOT IN ( N'BROKER_EVENTHANDLER', N'BROKER_RECEIVE_WAITFOR', N'BROKER_TASK_STOP', N'BROKER_TO_FLUSH', N'BROKER_TRANSMITTER', N'CHECKPOINT_QUEUE', N'CHKPT', N'CLR_AUTO_EVENT', N'CLR_MANUAL_EVENT', N'CLR_SEMAPHORE', N'DBMIRROR_DBM_EVENT', N'DBMIRROR_EVENTS_QUEUE', N'DBMIRROR_WORKER_QUEUE', N'DBMIRRORING_CMD', N'DIRTY_PAGE_POLL', N'DISPATCHER_QUEUE_SEMAPHORE', N'EXECSYNC', N'FSAGENT', N'FT_IFTS_SCHEDULER_IDLE_WAIT', N'FT_IFTSHC_MUTEX', N'HADR_CLUSAPI_CALL', N'HADR_FILESTREAM_IOMGR_IOCOMPLETION', N'HADR_LOGCAPTURE_WAIT', N'HADR_NOTIFICATION_DEQUEUE', N'HADR_TIMER_TASK', N'HADR_WORK_QUEUE', N'KSOURCE_WAKEUP', N'LAZYWRITER_SLEEP', N'LOGMGR_QUEUE', N'MEMORY_ALLOCATION_EXT', N'ONDEMAND_TASK_QUEUE', N'PARALLEL_REDO_DRAIN_WORKER', N'PARALLEL_REDO_LOG_CACHE', N'PARALLEL_REDO_TRAN_LIST', N'PARALLEL_REDO_WORKER_SYNC', N'PARALLEL_REDO_WORKER_WAIT_WORK', N'PREEMPTIVE_HADR_LEASE_MECHANISM', N'PREEMPTIVE_SP_SERVER_DIAGNOSTICS', N'PREEMPTIVE_OS_LIBRARYOPS', N'PREEMPTIVE_OS_COMOPS', N'PREEMPTIVE_OS_CRYPTOPS', N'PREEMPTIVE_OS_PIPEOPS', N'PREEMPTIVE_OS_AUTHENTICATIONOPS', N'PREEMPTIVE_OS_GENERICOPS', N'PREEMPTIVE_OS_VERIFYTRUST', N'PREEMPTIVE_OS_FILEOPS', N'PREEMPTIVE_OS_DEVICEOPS', N'PREEMPTIVE_OS_QUERYREGISTRY', N'PREEMPTIVE_OS_WRITEFILE', N'PREEMPTIVE_XE_CALLBACKEXECUTE', N'PREEMPTIVE_XE_DISPATCHER', N'PREEMPTIVE_XE_GETTARGETSTATE', N'PREEMPTIVE_XE_SESSIONCOMMIT', N'PREEMPTIVE_XE_TARGETINIT', N'PREEMPTIVE_XE_TARGETFINALIZE', N'PWAIT_ALL_COMPONENTS_INITIALIZED', N'PWAIT_DIRECTLOGCONSUMER_GETNEXT', N'QDS_PERSIST_TASK_MAIN_LOOP_SLEEP', N'QDS_ASYNC_QUEUE', N'QDS_CLEANUP_STALE_QUERIES_TASK_MAIN_LOOP_SLEEP', N'REQUEST_FOR_DEADLOCK_SEARCH', N'RESOURCE_QUEUE', N'SERVER_IDLE_CHECK', N'SLEEP_BPOOL_FLUSH', N'SLEEP_DBSTARTUP', N'SLEEP_DCOMSTARTUP', N'SLEEP_MASTERDBREADY', N'SLEEP_MASTERMDREADY', N'SLEEP_MASTERUPGRADED', N'SLEEP_MSDBSTARTUP', N'SLEEP_SYSTEMTASK', N'SLEEP_TASK', N'SLEEP_TEMPDBSTARTUP', N'SNI_HTTP_ACCEPT', N'SP_SERVER_DIAGNOSTICS_SLEEP', N'SQLTRACE_BUFFER_FLUSH', N'SQLTRACE_INCREMENTAL_FLUSH_SLEEP', N'SQLTRACE_WAIT_ENTRIES', N'WAIT_FOR_RESULTS', N'WAITFOR', N'WAITFOR_TASKSHUTDOWN', N'WAIT_XTP_HOST_WAIT', N'WAIT_XTP_OFFLINE_CKPT_NEW_LOG', N'WAIT_XTP_CKPT_CLOSE', N'WAIT_XTP_RECOVERY', N'XE_BUFFERMGR_ALLPROCESSED_EVENT', N'XE_DISPATCHER_JOIN', N'XE_DISPATCHER_WAIT', N'XE_LIVE_TARGET_TVF', N'XE_TIMER_EVENT') AND waiting_tasks_count > 0 ) SELECT W1.wait_type, CAST (MAX (W1.WaitS) AS DECIMAL (16,2)) AS [wait_sec], CAST (MAX (W1.ResourceS) AS DECIMAL (16,2)) AS [resource_sec], CAST (MAX (W1.SignalS) AS DECIMAL (16,2)) AS [signal_sec], MAX (W1.WaitCount) AS [wait_count] FROM Waits AS W1 GROUP BY W1.wait_type HAVING CAST (MAX (W1.WaitS) AS DECIMAL (16,2)) > 0 ORDER BY wait_sec DESC OPTION (RECOMPILE); "; DataTable waits = null; using (var adapter = new SqlDataAdapter(sql, conn)) { using (var ds = new DataSet()) { _ = adapter.Fill(ds); waits = ds.Tables[0]; } } var results = from table1 in waits.AsEnumerable() select new { wait_type = Convert.ToString(table1["wait_type"]), wait_sec = Convert.ToDouble(table1["wait_sec"]), resource_sec = Convert.ToDouble(table1["resource_sec"]), signal_sec = Convert.ToDouble(table1["signal_sec"]), wait_count = Convert.ToDouble(table1["wait_count"]) }; return DataUtils.ToDataTable(results); } } protected virtual void ReadDiskPerformanceEvents() { try { DataTable lastDiskPerf = null; while (!stopped) { var evt = new DiskPerfWorkloadEvent(); evt.Type = WorkloadEvent.EventType.DiskPerf; evt.StartTime = DateTime.Now; var newDiskPerf = GetDiskPerf(); evt.DiskPerf = GetDiffDiskPerf(newDiskPerf, lastDiskPerf); lastDiskPerf = newDiskPerf; Events.Enqueue(evt); Thread.Sleep(StatsCollectionIntervalSeconds * 1000); // 1 minute } } catch (Exception ex) { logger.Error(ex.Message); logger.Error(ex.StackTrace); if (ex.InnerException != null) { logger.Error(ex.InnerException.Message); } } } private DataTable GetDiffDiskPerf(DataTable newDiskPerf, DataTable lastDiskPerf) { // no baseline established already // return all zeros if (lastDiskPerf == null) { var result = newDiskPerf.Clone(); if (!result.Columns.Contains("cum_read_latency_ms")) _ = result.Columns.Add("cum_read_latency_ms", typeof(double)); if (!result.Columns.Contains("cum_reads")) _ = result.Columns.Add("cum_reads", typeof(double)); if (!result.Columns.Contains("cum_read_bytes")) _ = result.Columns.Add("cum_read_bytes", typeof(double)); if (!result.Columns.Contains("cum_write_latency_ms")) _ = result.Columns.Add("cum_write_latency_ms", typeof(double)); if (!result.Columns.Contains("cum_writes")) _ = result.Columns.Add("cum_writes", typeof(double)); if (!result.Columns.Contains("cum_write_bytes")) _ = result.Columns.Add("cum_write_bytes", typeof(double)); foreach (DataRow dr in newDiskPerf.Rows) { var nr = result.Rows.Add(); nr["database_name"] = dr["database_name"]; nr["physical_filename"] = dr["physical_filename"]; nr["logical_filename"] = dr["logical_filename"]; nr["file_type"] = dr["file_type"]; nr["read_latency_ms"] = 0; nr["reads"] = 0; nr["read_bytes"] = 0; nr["write_latency_ms"] = 0; nr["writes"] = 0; nr["write_bytes"] = 0; nr["cum_read_latency_ms"] = 0; nr["cum_reads"] = 0; nr["cum_read_bytes"] = 0; nr["cum_write_latency_ms"] = 0; nr["cum_writes"] = 0; nr["cum_write_bytes"] = 0; if (newDiskPerf.Columns.Contains("volume_mount_point")) { nr["volume_mount_point"] = dr["volume_mount_point"]; } } return result; } var results = from table1 in newDiskPerf.AsEnumerable() join table2 in lastDiskPerf.AsEnumerable() on new { database_name = table1["database_name"], physical_filename = table1["physical_filename"], logical_filename = table1["logical_filename"], file_type = table1["file_type"], volume_mount_point = table1["volume_mount_point"] } equals new { database_name = table2["database_name"], physical_filename = table2["physical_filename"], logical_filename = table2["logical_filename"], file_type = table2["file_type"], volume_mount_point = table2["volume_mount_point"] } select new { database_name = Convert.ToString(table1["database_name"]), physical_filename = Convert.ToString(table1["physical_filename"]), logical_filename = Convert.ToString(table1["logical_filename"]), file_type = Convert.ToString(table1["file_type"]), volume_mount_point = Convert.ToString(table1["volume_mount_point"]), read_latency_ms = Convert.ToDouble(table1["read_latency_ms"]) - Convert.ToDouble(table2["read_latency_ms"]), reads = Convert.ToDouble(table1["reads"]) - Convert.ToDouble(table2["reads"]), read_bytes = Convert.ToDouble(table1["read_bytes"]) - Convert.ToDouble(table2["read_bytes"]), write_latency_ms = Convert.ToDouble(table1["write_latency_ms"]) - Convert.ToDouble(table2["write_latency_ms"]), writes = Convert.ToDouble(table1["writes"]) - Convert.ToDouble(table2["writes"]), write_bytes = Convert.ToDouble(table1["write_bytes"]) - Convert.ToDouble(table2["write_bytes"]), cum_read_latency_ms = Convert.ToDouble(table1["read_latency_ms"]), cum_reads = Convert.ToDouble(table1["reads"]), cum_read_bytes = Convert.ToDouble(table1["read_bytes"]), cum_write_latency_ms = Convert.ToDouble(table1["write_latency_ms"]), cum_writes = Convert.ToDouble(table1["writes"]), cum_write_bytes = Convert.ToDouble(table1["write_bytes"]), }; return DataUtils.ToDataTable(results); } private DataTable GetDiskPerf() { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); // Calculate disk performance var sql = @" DECLARE @SqlStatement AS nvarchar(max) ,@MajorMinorVersion AS int = CAST(PARSENAME(CAST(SERVERPROPERTY('ProductVersion') AS nvarchar),4) AS int) * 100 + CAST(PARSENAME(CAST(SERVERPROPERTY('ProductVersion') AS nvarchar),3) AS int) ,@Columns AS nvarchar(max) = '' ,@Tables AS nvarchar(max) = '' IF CAST(SERVERPROPERTY('ProductVersion') AS varchar(50)) >= '10.50.2500.0' BEGIN SET @Columns += N' ,vs.[volume_mount_point]' SET @Tables += N' CROSS APPLY sys.dm_os_volume_stats(mf.[database_id], mf.[file_id]) AS vs' END SET @SqlStatement = N' SELECT DB_NAME(vfs.[database_id]) AS [database_name] ,COALESCE(mf.[physical_name],''RBPEX'') AS [physical_filename] --RPBEX = Resilient Buffer Pool Extension ,COALESCE(mf.[name],''RBPEX'') AS [logical_filename] --RPBEX = Resilient Buffer Pool Extension ,mf.[type_desc] AS [file_type] ,vfs.[io_stall_read_ms] AS [read_latency_ms] ,vfs.[num_of_reads] AS [reads] ,vfs.[num_of_bytes_read] AS [read_bytes] ,vfs.[io_stall_write_ms] AS [write_latency_ms] ,vfs.[num_of_writes] AS [writes] ,vfs.[num_of_bytes_written] AS [write_bytes]' + @Columns + N' FROM sys.dm_io_virtual_file_stats(NULL, NULL) AS vfs INNER JOIN sys.master_files AS mf WITH (NOLOCK) ON vfs.[database_id] = mf.[database_id] AND vfs.[file_id] = mf.[file_id]' + @Tables + ' OPTION (RECOMPILE)'; EXEC sp_executesql @SqlStatement "; DataTable diskPerf = null; using (var adapter = new SqlDataAdapter(sql, conn)) { using (var ds = new DataSet()) { _ = adapter.Fill(ds); diskPerf = ds.Tables[0]; } } var results = from table1 in diskPerf.AsEnumerable() select new { database_name = Convert.ToString(table1["database_name"]), physical_filename = Convert.ToString(table1["physical_filename"]), logical_filename = Convert.ToString(table1["logical_filename"]), file_type = Convert.ToString(table1["file_type"]), volume_mount_point = Convert.ToString(table1["volume_mount_point"]), read_latency_ms = Convert.ToDouble(table1["read_latency_ms"]), reads = Convert.ToDouble(table1["reads"]), read_bytes = Convert.ToDouble(table1["read_bytes"]), write_latency_ms = Convert.ToDouble(table1["write_latency_ms"]), writes = Convert.ToDouble(table1["writes"]), write_bytes = Convert.ToDouble(table1["write_bytes"]), }; return DataUtils.ToDataTable(results); } } protected virtual void SetTransactionMark(bool allDatabases) { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString(); conn.Open(); // Create Marked Transaction var sql = @" DECLARE @dbname sysname DECLARE @sql nvarchar(max), @qry nvarchar(max) SET @qry = ' PRINT DB_NAME() BEGIN TRAN WorkloadTools WITH MARK ''WorkloadTools''; BEGIN TRY CREATE TYPE WorkloadToolsType FROM int; DROP TYPE WorkloadToolsType; IF XACT_STATE() = 1 COMMIT TRAN WorkloadTools; END TRY BEGIN CATCH IF XACT_STATE() <> 0 ROLLBACK TRAN WorkloadTools; END CATCH ' DECLARE c CURSOR STATIC LOCAL FORWARD_ONLY READ_ONLY FOR SELECT name FROM sys.databases WHERE database_id > 4 " + (allDatabases ? "" : "AND database_id = DB_ID()") + @" ORDER BY name OPEN c FETCH NEXT FROM c INTO @dbname WHILE @@FETCH_STATUS = 0 BEGIN SET @sql = 'EXEC ' + QUOTENAME(@dbname) + '.sys.sp_executesql @qry' BEGIN TRY EXEC sp_executesql @sql, N'@qry nvarchar(max)', @qry END TRY BEGIN CATCH PRINT 'Unable to mark the transaction on database ' + @dbname END CATCH FETCH NEXT FROM c INTO @dbname END CLOSE c DEALLOCATE c "; using (var cmd = new SqlCommand(sql, conn)) { _ = cmd.ExecuteNonQuery(); } } } } } ================================================ FILE: WorkloadTools/WorkloadTools.csproj ================================================  Debug AnyCPU {AE6E4548-8C33-4728-8504-88AA9666020B} Library Properties WorkloadTools WorkloadTools 512 .NETFramework v4.8 true full false bin\Debug\ DEBUG;TRACE prompt 4 false pdbonly true bin\Release\ TRACE prompt 4 false x86 x64 true bin\x86\Debug\ DEBUG;TRACE full x86 prompt MinimumRecommendedRules.ruleset false bin\x86\Release\ TRACE true pdbonly x86 prompt MinimumRecommendedRules.ruleset false ..\packages\DouglasCrockford.JsMin.1.1.3\lib\net40-client\DouglasCrockford.JsMin.dll ..\packages\FastMember.1.5.0\lib\net461\FastMember.dll ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.AzureStorageEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.BatchParserClient.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.ConnectionInfo.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.ConnectionInfoExtended.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Diagnostics.Strace.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Dmf.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Dmf.Common.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.Collector.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.CollectorEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.RegisteredServers.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.Sdk.Sfc.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.SqlParser.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.Utility.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.UtilityEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.XEvent.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.XEventDbScoped.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.XEventDbScopedEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Management.XEventEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.PolicyEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.RegSvrEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.ServiceBrokerEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Smo.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.SmoExtended.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.SqlClrProvider.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.SqlEnum.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.SqlTDiagm.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.SqlWmiManagement.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.SString.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.Types.dll True ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\lib\net40\Microsoft.SqlServer.WmiEnum.dll True False ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\runtimes\win-$(CurrentPlatform)\native\Microsoft.SqlServer.Xe.Core.dll False ..\packages\Microsoft.SqlServer.SqlManagementObjects.140.17279.0\runtimes\win-$(CurrentPlatform)\native\Microsoft.SqlServer.XEvent.Linq.dll ..\packages\NFX.3.5.0.5\lib\NFX.dll ..\packages\NLog.4.7.15\lib\net45\NLog.dll ..\packages\System.Data.SQLite.Core.1.0.112.0\lib\net46\System.Data.SQLite.dll True True Settings.settings Always Always Always Always SettingsSingleFileGenerator Settings.Designer.cs Always Always Always Always This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. ================================================ FILE: WorkloadTools/app.config ================================================
64 15 ================================================ FILE: WorkloadTools/packages.config ================================================  ================================================ FILE: WorkloadTools.sln ================================================  Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.6.33829.357 MinimumVisualStudioVersion = 10.0.40219.1 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SqlWorkload", "SqlWorkload\SqlWorkload.csproj", "{FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}" ProjectSection(ProjectDependencies) = postProject {AE6E4548-8C33-4728-8504-88AA9666020B} = {AE6E4548-8C33-4728-8504-88AA9666020B} EndProjectSection EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WorkloadTools", "WorkloadTools\WorkloadTools.csproj", "{AE6E4548-8C33-4728-8504-88AA9666020B}" EndProject Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "Setup", "Setup\Setup.wixproj", "{BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}" ProjectSection(ProjectDependencies) = postProject {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A} = {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A} {62E37C03-BA08-46CE-A583-D71FB7A8825B} = {62E37C03-BA08-46CE-A583-D71FB7A8825B} {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973} = {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973} {AE6E4548-8C33-4728-8504-88AA9666020B} = {AE6E4548-8C33-4728-8504-88AA9666020B} {898DF47E-429A-441C-B879-AC0D9EC7FA0E} = {898DF47E-429A-441C-B879-AC0D9EC7FA0E} EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{CB5A2D19-E789-4555-B04D-FEA2908A5C92}" ProjectSection(SolutionItems) = preProject .editorconfig = .editorconfig build.ps1 = build.ps1 LICENSE.md = LICENSE.md README.md = README.md SharedAssemblyInfo.cs = SharedAssemblyInfo.cs EndProjectSection EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConvertWorkload", "ConvertWorkload\ConvertWorkload.csproj", "{62E37C03-BA08-46CE-A583-D71FB7A8825B}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WorkloadToolsTests", "WorkloadToolsTests\WorkloadToolsTests.csproj", "{898DF47E-429A-441C-B879-AC0D9EC7FA0E}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WorkloadViewer", "WorkloadViewer\WorkloadViewer.csproj", "{6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}" ProjectSection(ProjectDependencies) = postProject {AE6E4548-8C33-4728-8504-88AA9666020B} = {AE6E4548-8C33-4728-8504-88AA9666020B} EndProjectSection EndProject Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "SetupBootstrapper", "SetupBootstrapper\SetupBootstrapper.wixproj", "{CAD976C4-D0C6-4313-B605-EC3749A23B5F}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Debug|x64 = Debug|x64 Debug|x86 = Debug|x86 Release|Any CPU = Release|Any CPU Release|x64 = Release|x64 Release|x86 = Release|x86 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Debug|Any CPU.Build.0 = Debug|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Debug|x64.ActiveCfg = Debug|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Debug|x64.Build.0 = Debug|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Debug|x86.ActiveCfg = Debug|x86 {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Debug|x86.Build.0 = Debug|x86 {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Release|Any CPU.ActiveCfg = Release|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Release|Any CPU.Build.0 = Release|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Release|x64.ActiveCfg = Release|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Release|x64.Build.0 = Release|Any CPU {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Release|x86.ActiveCfg = Release|x86 {FB46AD2C-DF81-4D35-B419-D93E5EF9D98A}.Release|x86.Build.0 = Release|x86 {AE6E4548-8C33-4728-8504-88AA9666020B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Debug|Any CPU.Build.0 = Debug|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Debug|x64.ActiveCfg = Debug|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Debug|x64.Build.0 = Debug|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Debug|x86.ActiveCfg = Debug|x86 {AE6E4548-8C33-4728-8504-88AA9666020B}.Debug|x86.Build.0 = Debug|x86 {AE6E4548-8C33-4728-8504-88AA9666020B}.Release|Any CPU.ActiveCfg = Release|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Release|Any CPU.Build.0 = Release|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Release|x64.ActiveCfg = Release|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Release|x64.Build.0 = Release|Any CPU {AE6E4548-8C33-4728-8504-88AA9666020B}.Release|x86.ActiveCfg = Release|x86 {AE6E4548-8C33-4728-8504-88AA9666020B}.Release|x86.Build.0 = Release|x86 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}.Debug|Any CPU.ActiveCfg = Debug|x64 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}.Debug|x64.ActiveCfg = Debug|x64 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}.Debug|x86.ActiveCfg = Debug|x86 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}.Release|Any CPU.ActiveCfg = Release|x86 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}.Release|x64.ActiveCfg = Release|x64 {BBF5FDA0-C08F-48C9-9B98-E017DD8ABB5D}.Release|x86.ActiveCfg = Release|x86 {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Debug|Any CPU.ActiveCfg = Debug|x86 {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Debug|Any CPU.Build.0 = Debug|x86 {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Debug|x64.ActiveCfg = Debug|Any CPU {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Debug|x64.Build.0 = Debug|Any CPU {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Debug|x86.ActiveCfg = Debug|x86 {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Debug|x86.Build.0 = Debug|x86 {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Release|Any CPU.ActiveCfg = Release|Any CPU {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Release|Any CPU.Build.0 = Release|Any CPU {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Release|x64.ActiveCfg = Release|Any CPU {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Release|x64.Build.0 = Release|Any CPU {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Release|x86.ActiveCfg = Release|x86 {62E37C03-BA08-46CE-A583-D71FB7A8825B}.Release|x86.Build.0 = Release|x86 {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Debug|Any CPU.Build.0 = Debug|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Debug|x64.ActiveCfg = Debug|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Debug|x64.Build.0 = Debug|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Debug|x86.ActiveCfg = Debug|x86 {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Debug|x86.Build.0 = Debug|x86 {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Release|Any CPU.ActiveCfg = Release|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Release|Any CPU.Build.0 = Release|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Release|x64.ActiveCfg = Release|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Release|x64.Build.0 = Release|Any CPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Release|x86.ActiveCfg = Release|x86 {898DF47E-429A-441C-B879-AC0D9EC7FA0E}.Release|x86.Build.0 = Release|x86 {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Debug|Any CPU.Build.0 = Debug|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Debug|x64.ActiveCfg = Debug|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Debug|x64.Build.0 = Debug|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Debug|x86.ActiveCfg = Debug|x86 {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Debug|x86.Build.0 = Debug|x86 {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Release|Any CPU.ActiveCfg = Release|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Release|Any CPU.Build.0 = Release|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Release|x64.ActiveCfg = Release|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Release|x64.Build.0 = Release|Any CPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Release|x86.ActiveCfg = Release|x86 {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973}.Release|x86.Build.0 = Release|x86 {CAD976C4-D0C6-4313-B605-EC3749A23B5F}.Debug|Any CPU.ActiveCfg = Debug|x86 {CAD976C4-D0C6-4313-B605-EC3749A23B5F}.Debug|x64.ActiveCfg = Debug|x64 {CAD976C4-D0C6-4313-B605-EC3749A23B5F}.Debug|x86.ActiveCfg = Debug|x86 {CAD976C4-D0C6-4313-B605-EC3749A23B5F}.Release|Any CPU.ActiveCfg = Release|x86 {CAD976C4-D0C6-4313-B605-EC3749A23B5F}.Release|x64.ActiveCfg = Release|x64 {CAD976C4-D0C6-4313-B605-EC3749A23B5F}.Release|x86.ActiveCfg = Release|x86 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {3DC1D965-A979-4B9F-A16F-7A938A0DDD8C} EndGlobalSection EndGlobal ================================================ FILE: WorkloadToolsTests/Properties/AssemblyInfo.cs ================================================ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; [assembly: Guid("898df47e-429a-441c-b879-ac0d9ec7fa0e")] ================================================ FILE: WorkloadToolsTests/WorkloadTools/BinarySerializedBufferedEventQueueTest.cs ================================================ using System; using System.Diagnostics; using Microsoft.VisualStudio.TestTools.UnitTesting; using WorkloadTools; namespace WorkloadToolsTests.WorkloadTools { [TestClass] public class BinarySerializedBufferedEventQueueTest { [TestMethod] public void TestEnqueueDequeueFixedList() { int[] numbers = { +9772 , -9479 , +70255 , -49216 , +18796 , -39641 , +60528 , -60690 , +78808 , -49406 , +42422 , -72132 , +65861 , -34935 , +55297 , -10699 , +96237 , -72432 , +55697 , -85962 , +18370 , -72056 , +97085 , -50146 , +43353 , -53808 , +28408 , -76107 , +51235 , -50290 , +67421 , -9696 , +65303 , -45014 , +53121 , -50691 , +68663 , -54973 , +34989 , -66099 , +15014 , -53872 , +97248 , -38096 , +705 , -23998 , +13872 , -42048 , +77390 , -71767 , +86413 , -6260 , +61030 , -51330 , +14412 , -37716 , +16394 , -20109 , +5862 , -64988 , +67733 , -84421 , +23954 , -3518 , +81985 , -32726 , +14828 , -20847 , +81813 , -4605 , +42036 , -41263 , +37442 , -89598 , +70947 , -64497 , +74808 , -58988 , +49441 , -19355 , -166474 }; using (var queue = new BinarySerializedBufferedEventQueue()) { queue.BufferSize = 10000; var total = 0; for (var i = 0; i < numbers.Length; i++) { if (i == 28) { Debug.WriteLine("Uh oh"); } var num = numbers[i]; if (num > 0) { var initialCount = queue.Count; for (var j = 0; j < num; j++) { queue.Enqueue(new ExecutionWorkloadEvent() { Text = $"SELECT {j} FROM sometable WHERE somecolumn = someValue ORDER BY someOtherColumn" }); if (i == 28) { Console.WriteLine($" {j}: should be {initialCount + j + 1} | is {queue.Count}"); if (initialCount + j + 1 == 8854) { Console.WriteLine($"Aaaaah!"); } } } } else { var initialCount = queue.Count; num = num * -1; WorkloadEvent evnt = null; for (var k = 0; k < num; k++) { queue.TryDequeue(out evnt); //Console.WriteLine($" {k}: should be {initialCount - k} | is {queue.Count}"); } num = num * -1; } total += num; Assert.AreEqual(queue.Count, total); } } } [TestMethod] public void TestEnqueueRandomDequeueAll() { using (var queue = new BinarySerializedBufferedEventQueue()) { queue.BufferSize = 10000; var r = new Random(); var watch = new Stopwatch(); for (var j = 0; j < 10; j++) { watch.Reset(); watch.Start(); var numElements = (int)(r.NextDouble() * 100000); for (var i = 0; i < numElements; i++) { queue.Enqueue(new ExecutionWorkloadEvent() { Text = $"SELECT {i} FROM sometable WHERE somecolumn = someValue ORDER BY someOtherColumn" }); } watch.Stop(); Console.WriteLine($"Enqueue {numElements} elements elapsed: {watch.Elapsed}"); var queueLen = queue.Count; numElements = (int)(r.NextDouble() * 100000); while (numElements > queueLen) { numElements -= 500; } watch.Reset(); watch.Start(); for (var i = 0; i < numElements; i++) { WorkloadEvent evt = null; queue.TryDequeue(out evt); //Console.WriteLine(((ExecutionWorkloadEvent)evt).Text); } watch.Stop(); Console.WriteLine($"Dequeue {numElements} elements elapsed: {watch.Elapsed}"); } watch.Reset(); watch.Start(); var len = queue.Count; WorkloadEvent evnt = null; while (queue.TryDequeue(out evnt)) { ; } watch.Stop(); Console.WriteLine($"Dequeue all {len} remaining elements elapsed: {watch.Elapsed}"); Assert.AreEqual(queue.Count, 0); } } } } ================================================ FILE: WorkloadToolsTests/WorkloadTools/SqlTextNormalizerTest.cs ================================================ using Microsoft.VisualStudio.TestTools.UnitTesting; using WorkloadTools.Consumer.Analysis; namespace WorkloadToolsTests.WorkloadTools { [TestClass] public class SqlTextNormalizerTest { private SqlTextNormalizer _normalizer; [TestInitialize] public void Initialize() { _normalizer = new SqlTextNormalizer(); } [TestMethod] public void NormalizeSqlText_SimpleStringParam_Normalized() { var sql = "exec SampleStoredProcedure @Param1=N'Name1',@Key=N'123456'"; var result = _normalizer.NormalizeSqlText(sql, 1, false); Assert.IsNotNull(result); StringAssert.Contains(result.NormalizedText, "@PARAM1 = {STR}"); StringAssert.Contains(result.NormalizedText, "@KEY = {STR}"); } [TestMethod] public void NormalizeSqlText_StringParamWithDoubleQuotes_Normalized() { var sql = "exec SampleStoredProcedure @Param1=N'Name1',@Key=N'123456',@Content=N'value'"; var result = _normalizer.NormalizeSqlText(sql, 1, false); Assert.IsNotNull(result); StringAssert.Contains(result.NormalizedText, "@PARAM1 = {STR}"); StringAssert.Contains(result.NormalizedText, "@KEY = {STR}"); StringAssert.Contains(result.NormalizedText, "@CONTENT = {STR}"); Assert.IsFalse(result.NormalizedText.Contains("ATTRIBUTE1"), "The XML attribute content should have been replaced by {STR}"); } [TestMethod] public void NormalizeSqlText_StringParamWithEscapedSingleQuotes_Normalized() { var sql = "exec SampleStoredProcedure @Param1=N'It''s a test'"; var result = _normalizer.NormalizeSqlText(sql, 1, false); Assert.IsNotNull(result); StringAssert.Contains(result.NormalizedText, "@PARAM1 = {STR}"); } } } ================================================ FILE: WorkloadToolsTests/WorkloadToolsTests.csproj ================================================  Debug AnyCPU {898DF47E-429A-441C-B879-AC0D9EC7FA0E} Library Properties WorkloadToolsTests WorkloadToolsTests v4.8 512 {3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} 15.0 $(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion) $(ProgramFiles)\Common Files\microsoft shared\VSTT\$(VisualStudioVersion)\UITestExtensionPackages False UnitTest true full false bin\Debug\ DEBUG;TRACE prompt 4 pdbonly true bin\Release\ TRACE prompt 4 true bin\x86\Debug\ DEBUG;TRACE full x86 prompt MinimumRecommendedRules.ruleset bin\x86\Release\ TRACE true pdbonly x86 prompt MinimumRecommendedRules.ruleset ..\packages\MSTest.TestFramework.1.3.2\lib\net45\Microsoft.VisualStudio.TestPlatform.TestFramework.dll ..\packages\MSTest.TestFramework.1.3.2\lib\net45\Microsoft.VisualStudio.TestPlatform.TestFramework.Extensions.dll Properties\SharedAssemblyInfo.cs {62e37c03-ba08-46ce-a583-d71fb7a8825b} ConvertWorkload {fb46ad2c-df81-4d35-b419-d93e5ef9d98a} SqlWorkload {ae6e4548-8c33-4728-8504-88aa9666020b} WorkloadTools ================================================ FILE: WorkloadToolsTests/app.config ================================================ ================================================ FILE: WorkloadToolsTests/packages.config ================================================  ================================================ FILE: WorkloadViewer/App.config ================================================ ================================================ FILE: WorkloadViewer/App.xaml ================================================ ================================================ FILE: WorkloadViewer/App.xaml.cs ================================================ using CommandLine; using CommandLine.Text; using NLog; using NLog.Targets; using System; using System.Collections.Generic; using System.Configuration; using System.Data; using System.IO; using System.Linq; using System.Threading.Tasks; using System.Windows; namespace WorkloadViewer { /// /// Interaction logic for App.xaml /// public partial class App : Application { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public Options Options { get; private set; } protected override void OnStartup(StartupEventArgs e) { base.OnStartup(e); Options = new Options(); var optionsAreGood = CommandLine.Parser.Default.ParseArguments(e.Args, Options); if (!optionsAreGood) { _ = MessageBox.Show(Options.GetUsage()); Shutdown(); } // reconfigure loggers to use a file in the current directory // or the file specified by the "Log" commandline parameter if (LogManager.Configuration != null) { var target = (FileTarget)LogManager.Configuration.FindTargetByName("logfile"); if (target != null) { var pathToLog = Options.LogFile ?? Path.Combine(Environment.CurrentDirectory, "WorkloadViewer.log"); if (!Path.IsPathRooted(pathToLog)) { pathToLog = Path.Combine(Environment.CurrentDirectory, pathToLog); } Console.WriteLine($"Writing logs to {pathToLog}"); target.FileName = pathToLog; LogManager.ReconfigExistingLoggers(); } else { Console.WriteLine($"No file targets configured"); } } else { Console.WriteLine($"NLog not configured"); } logger.Info("Starting application"); } } public class Options { [Option('F', "File", HelpText = "Configuration file")] public string ConfigurationFile { get; set; } [Option('L', "Log", HelpText = "Log File")] public string LogFile { get; set; } [Option('S', "BaselineServer", HelpText = "Baseline Server")] public string BaselineServer { get; set; } [Option('D', "BaselineDatabase", HelpText = "Baseline Database")] public string BaselineDatabase { get; set; } [Option('M', "BaselineSchema", HelpText = "Baseline Schema")] public string BaselineSchema { get; set; } [Option('U', "BaselineUsername", HelpText = "Baseline Username")] public string BaselineUsername { get; set; } [Option('P', "BaselinePassword", HelpText = "Baseline Password")] public string BaselinePassword { get; set; } [Option('T', "BenchmarkServer", HelpText = "Benchmark Server")] public string BenchmarkServer { get; set; } [Option('E', "BenchmarkDatabase", HelpText = "Benchmark Database")] public string BenchmarkDatabase { get; set; } [Option('N', "BenchmarkSchema", HelpText = "Benchmark Schema")] public string BenchmarkSchema { get; set; } [Option('V', "BenchmarkUsername", HelpText = "Benchmark Username")] public string BenchmarkUsername { get; set; } [Option('Q', "BenchmarkPassword", HelpText = "Benchmark Password")] public string BenchmarkPassword { get; set; } [ParserState] public IParserState LastParserState { get; set; } [HelpOption] public string GetUsage() { return HelpText.AutoBuild(this, (HelpText current) => HelpText.DefaultParsingErrorsHandler(this, current)); } } } ================================================ FILE: WorkloadViewer/Comparer/QueryResultEqualityComparer.cs ================================================ using System.Collections.Generic; using WorkloadViewer.ViewModel; public class QueryResultEqualityComparer : IEqualityComparer { public bool Equals(QueryResult x, QueryResult y) { return x?.query_hash == y?.query_hash; } public int GetHashCode(QueryResult obj) { return obj?.query_hash.GetHashCode() ?? 0; } } ================================================ FILE: WorkloadViewer/Model/NormalizedQuery.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.Model { public class NormalizedQuery { public long Hash { get; set; } public string NormalizedText { get; set; } public string ExampleText { get; set; } } } ================================================ FILE: WorkloadViewer/Model/QueryDetails.cs ================================================ using OxyPlot; using OxyPlot.Axes; using OxyPlot.Series; using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.Model { public class QueryDetails { public NormalizedQuery Query { get; private set; } private WorkloadAnalysis Benchmark { get; set; } private WorkloadAnalysis Baseline { get; set; } public QueryDetails(NormalizedQuery query, WorkloadAnalysis baseline, WorkloadAnalysis benchmark) { Query = query; Baseline = baseline; Benchmark = benchmark; } public DataTable QueryStats { get { return LoadQueryStats(); } } public PlotModel DetailPlotModel { get { return LoadPlotModel(); } } private DataTable LoadQueryStats() { var result = new DataTable(); result.Columns.Add(new DataColumn("Application", typeof(String))); result.Columns.Add(new DataColumn("Database", typeof(String))); result.Columns.Add(new DataColumn("Host", typeof(String))); result.Columns.Add(new DataColumn("Login", typeof(String))); result.Columns.Add(new DataColumn("avg_duration_us", typeof(Int64))); result.Columns.Add(new DataColumn("avg_duration_us2", typeof(Int64))); result.Columns.Add(new DataColumn("avg_cpu_us", typeof(Int64))); result.Columns.Add(new DataColumn("avg_cpu_us2", typeof(Int64))); result.Columns.Add(new DataColumn("avg_reads", typeof(Int64))); result.Columns.Add(new DataColumn("avg_reads2", typeof(Int64))); result.Columns.Add(new DataColumn("avg_writes", typeof(Int64))); result.Columns.Add(new DataColumn("avg_writes2", typeof(Int64))); result.Columns.Add(new DataColumn("execution_count", typeof(Int64))); result.Columns.Add(new DataColumn("execution_count2", typeof(Int64))); var baseline = from t in Baseline.Points where t.NormalizedQuery.Hash == Query.Hash group t by new { t.ApplicationName, t.DatabaseName, t.HostName, t.LoginName } into grp select new { grp.Key.ApplicationName, grp.Key.DatabaseName, grp.Key.HostName, grp.Key.LoginName, avg_duration_us = grp.Average(t => t.AvgDurationUs), avg_cpu_us = grp.Average(t => t.AvgCpuUs), avg_reads = grp.Average(t => t.AvgReads), avg_writes = grp.Average(t => t.AvgWrites), execution_count = grp.Sum(t => t.ExecutionCount) }; var benchmark = from t in baseline where false select new { t.ApplicationName, t.DatabaseName, t.HostName, t.LoginName, t.avg_duration_us, t.avg_cpu_us, t.avg_reads, t.avg_writes, t.execution_count }; if (Benchmark != null) { benchmark = from t in Benchmark.Points where t.NormalizedQuery.Hash == Query.Hash group t by new { t.ApplicationName, t.DatabaseName, t.HostName, t.LoginName } into grp select new { grp.Key.ApplicationName, grp.Key.DatabaseName, grp.Key.HostName, grp.Key.LoginName, avg_duration_us = grp.Average(t => t.AvgDurationUs), avg_cpu_us = grp.Average(t => t.AvgCpuUs), avg_reads = grp.Average(t => t.AvgReads), avg_writes = grp.Average(t => t.AvgWrites), execution_count = grp.Sum(t => t.ExecutionCount) }; } foreach (var itm in baseline) { var newRow = result.Rows.Add(); newRow["Application"] = itm.ApplicationName; newRow["Database"] = itm.DatabaseName; newRow["Host"] = itm.HostName; newRow["Login"] = itm.LoginName; newRow["avg_duration_us"] = itm.avg_duration_us; newRow["avg_cpu_us"] = itm.avg_cpu_us; newRow["avg_reads"] = itm.avg_reads; newRow["avg_writes"] = itm.avg_reads; newRow["execution_count"] = itm.execution_count; if (Benchmark != null) { var _itm = from t in benchmark where t.ApplicationName == itm.ApplicationName && t.DatabaseName == itm.DatabaseName && t.HostName == itm.HostName && t.LoginName == itm.LoginName select new { t.avg_cpu_us, t.avg_duration_us, t.avg_reads, t.avg_writes, t.execution_count }; var itm2 = _itm.ToList(); if(itm2.Count > 0) { newRow["avg_duration_us2"] = itm2[0].avg_duration_us; newRow["avg_cpu_us2"] = itm2[0].avg_cpu_us; newRow["avg_reads2"] = itm2[0].avg_reads; newRow["avg_writes2"] = itm2[0].avg_reads; newRow["execution_count2"] = itm2[0].execution_count; } else { newRow["avg_duration_us2"] = 0; newRow["avg_cpu_us2"] = 0; newRow["avg_reads2"] = 0; newRow["avg_writes2"] = 0; newRow["execution_count2"] = 0; } } } foreach (var itm in benchmark) { var res = from row in result.AsEnumerable() where row.Field("Application") == itm.ApplicationName && row.Field("Database") == itm.DatabaseName && row.Field("Host") == itm.HostName && row.Field("Login") == itm.LoginName select row; if (res.Count() == 0) { var newRow = result.Rows.Add(); newRow["Application"] = itm.ApplicationName; newRow["Database"] = itm.DatabaseName; newRow["Host"] = itm.HostName; newRow["Login"] = itm.LoginName; newRow["avg_duration_us2"] = itm.avg_duration_us; newRow["avg_cpu_us2"] = itm.avg_cpu_us; newRow["avg_reads2"] = itm.avg_reads; newRow["avg_writes2"] = itm.avg_reads; newRow["execution_count2"] = itm.execution_count; var _itm = from t in baseline where t.ApplicationName == itm.ApplicationName && t.DatabaseName == itm.DatabaseName && t.HostName == itm.HostName && t.LoginName == itm.LoginName select new { t.avg_cpu_us, t.avg_duration_us, t.avg_reads, t.avg_writes, t.execution_count }; var itm2 = _itm.ToList(); if (itm2.Count > 0) { newRow["avg_duration_us"] = itm2[0].avg_duration_us; newRow["avg_cpu_us"] = itm2[0].avg_cpu_us; newRow["avg_reads"] = itm2[0].avg_reads; newRow["avg_writes"] = itm2[0].avg_reads; newRow["execution_count"] = itm2[0].execution_count; } else { newRow["avg_duration_us"] = 0; newRow["avg_cpu_us"] = 0; newRow["avg_reads"] = 0; newRow["avg_writes"] = 0; newRow["execution_count"] = 0; } } } return result; } private PlotModel LoadPlotModel() { var plotModel = new PlotModel(); plotModel.LegendOrientation = LegendOrientation.Horizontal; plotModel.LegendPlacement = LegendPlacement.Inside; plotModel.LegendPosition = LegendPosition.TopLeft; plotModel.LegendBackground = OxyColor.FromAColor(200, OxyColors.White); plotModel.Title = "Average Duration"; var offsetAxis = new LinearAxis() { MajorGridlineStyle = LineStyle.Dot, MinorGridlineStyle = LineStyle.None, Position = AxisPosition.Bottom, Title = "Offset minutes", AbsoluteMinimum = 0, MinorTickSize = 0 }; plotModel.Axes.Add(offsetAxis); var valueAxis1 = new LinearAxis() { MajorGridlineStyle = LineStyle.Dot, MinorGridlineStyle = LineStyle.None, Position = AxisPosition.Left, StringFormat = "N0", IsZoomEnabled = false, AbsoluteMinimum = 0, MaximumPadding = 0.2, MinorTickSize = 0, Title = "Duration (us)" }; plotModel.Axes.Add(valueAxis1); plotModel.PlotMargins = new OxyThickness(70, 0, 0, 30); plotModel.Series.Clear(); plotModel.Series.Add(LoadDurationSeries(Baseline, OxyColor.Parse("#01B8AA"))); if(Benchmark != null) { plotModel.Series.Add(LoadDurationSeries(Benchmark, OxyColor.Parse("#000000"))); } return plotModel; } private Series LoadDurationSeries(WorkloadAnalysis analysis, OxyColor color) { var durationSeries = new LineSeries() { StrokeThickness = 2, MarkerSize = 3, MarkerStroke = OxyColor.Parse("#FF0000"), //Red MarkerType = MarkerType.None, CanTrackerInterpolatePoints = false, TrackerFormatString = "Offset: {2:0}\n{0}: {4:0}", Title = analysis.Name, Color = color, Smooth = false }; var Table = from t in analysis.Points where t.NormalizedQuery.Hash == Query.Hash group t by new { offset = t.OffsetMinutes } into grp orderby grp.Key.offset select new { offset_minutes = grp.Key.offset, duration = grp.Average(t => t.AvgDurationUs) }; foreach (var p in Table) { durationSeries.Points.Add(new DataPoint(p.offset_minutes, p.duration)); } return durationSeries; } } } ================================================ FILE: WorkloadViewer/Model/SqlConnectionInfo.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.Model { public class SqlConnectionInfo { public string ServerName { get; set; } public string DatabaseName { get; set; } = "master"; public string SchemaName { get; set; } = "dbo"; public bool UseIntegratedSecurity { get; set; } public string UserName { get; set; } public string Password { get; set; } public bool Encrypt { get; set; } = false; public bool TrustServerCertificate { get; set; } = false; public string ApplicationName { get; set; } = "WorkloadAnalyzer"; public string ConnectionString { get { var connectionString = "Data Source=" + ServerName + ";"; if (String.IsNullOrEmpty(DatabaseName)) { connectionString += "Initial Catalog = master; "; } else { connectionString += "Initial Catalog = " + DatabaseName + "; "; } if (String.IsNullOrEmpty(UserName)) { connectionString += "Integrated Security = SSPI; "; } else { connectionString += "User Id = " + UserName + "; "; connectionString += "Password = " + Password + "; "; } if (!String.IsNullOrEmpty(ApplicationName)) { connectionString += "Application Name = " + ApplicationName + "; "; } if (Encrypt) { connectionString += "Encrypt = true; "; } if (TrustServerCertificate) { connectionString += "TrustServerCertificate = true; "; } return connectionString; } } } } ================================================ FILE: WorkloadViewer/Model/WorkloadAnalysis.cs ================================================ using NLog; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Data.SqlClient; using System.IO; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.Model { public class WorkloadAnalysis { private static Logger logger = LogManager.GetCurrentClassLogger(); public ObservableCollection Points { get; set; } public string Name { get; set; } public DateTime StartDate { get; set; } public SqlConnectionInfo ConnectionInfo { get; set; } public void Load() { using (var conn = new SqlConnection()) { conn.ConnectionString = ConnectionInfo.ConnectionString; conn.Open(); var NormalizedQueries = new Dictionary(); var numIntervals = 0; var preaggregation = 1; using (var cmd = conn.CreateCommand()) { cmd.CommandText = "SELECT COUNT(*) FROM " + ConnectionInfo.SchemaName + ".Intervals WHERE duration_minutes > 0;"; cmd.CommandTimeout = 0; numIntervals = (int)cmd.ExecuteScalar(); } if (numIntervals > 500) // around 8 hours { preaggregation = 15; } if (numIntervals > 1000) // around 16 hours { preaggregation = 30; } if (numIntervals > 2000) // around 32 hours { preaggregation = 60; } using (var cmd = conn.CreateCommand()) { cmd.CommandText = "SELECT TOP(1) end_time FROM " + ConnectionInfo.SchemaName + ".Intervals ORDER BY interval_id ASC "; cmd.CommandTimeout = 0; try { StartDate = (DateTime)cmd.ExecuteScalar(); } catch (Exception) { StartDate = DateTime.Today; } } using (var cmd = conn.CreateCommand()) { cmd.CommandText = "SELECT * FROM " + ConnectionInfo.SchemaName + ".NormalizedQueries"; cmd.CommandTimeout = 0; using (var rdr = cmd.ExecuteReader()) { while (rdr.Read()) { NormalizedQueries.Add(rdr.GetInt64(rdr.GetOrdinal("sql_hash")), new NormalizedQuery() { Hash = rdr.GetInt64(rdr.GetOrdinal("sql_hash")), NormalizedText = rdr.GetString(rdr.GetOrdinal("normalized_text")), ExampleText = rdr.GetString(rdr.GetOrdinal("example_text")) }); } } } using (var cmd = conn.CreateCommand()) { cmd.CommandTimeout = 0; var sqlText = WorkloadViewer.Properties.Resources.WorkloadAnalysis; cmd.CommandText = sqlText.Replace("capture", ConnectionInfo.SchemaName); cmd.CommandText = cmd.CommandText.Replace("preaggregation", preaggregation.ToString()); cmd.CommandTimeout = 0; using (var rdr = cmd.ExecuteReader()) { Points = new ObservableCollection(); while (rdr.Read()) { try { var point = new WorkloadAnalysisPoint() { OffsetMinutes = rdr.GetInt32(rdr.GetOrdinal("offset_minutes")), DurationMinutes = rdr.GetInt32(rdr.GetOrdinal("duration_minutes")), NormalizedQuery = NormalizedQueries[rdr.GetInt64(rdr.GetOrdinal("sql_hash"))], ApplicationName = rdr.GetString(rdr.GetOrdinal("application_name")), DatabaseName = rdr.GetString(rdr.GetOrdinal("database_name")), LoginName = rdr.GetString(rdr.GetOrdinal("login_name")), HostName = rdr.GetString(rdr.GetOrdinal("host_name")), AvgCpuUs = rdr.GetInt64(rdr.GetOrdinal("avg_cpu_us")), MinCpuUs = rdr.GetInt64(rdr.GetOrdinal("min_cpu_us")), MaxCpuUs = rdr.GetInt64(rdr.GetOrdinal("max_cpu_us")), SumCpuUs = rdr.GetInt64(rdr.GetOrdinal("sum_cpu_us")), AvgReads = rdr.GetInt64(rdr.GetOrdinal("avg_reads")), MinReads = rdr.GetInt64(rdr.GetOrdinal("min_reads")), MaxReads = rdr.GetInt64(rdr.GetOrdinal("max_reads")), SumReads = rdr.GetInt64(rdr.GetOrdinal("sum_reads")), AvgWrites = rdr.GetInt64(rdr.GetOrdinal("avg_writes")), MinWrites = rdr.GetInt64(rdr.GetOrdinal("min_writes")), MaxWrites = rdr.GetInt64(rdr.GetOrdinal("max_writes")), SumWrites = rdr.GetInt64(rdr.GetOrdinal("sum_writes")), AvgDurationUs = rdr.GetInt64(rdr.GetOrdinal("avg_duration_us")), MinDurationUs = rdr.GetInt64(rdr.GetOrdinal("min_duration_us")), MaxDurationUs = rdr.GetInt64(rdr.GetOrdinal("max_duration_us")), SumDurationUs = rdr.GetInt64(rdr.GetOrdinal("sum_duration_us")), ExecutionCount = rdr.GetInt64(rdr.GetOrdinal("execution_count")) }; Points.Add(point); } catch(Exception e) { logger.Warn($"Skipping invalid datapoint at {rdr.GetInt32(rdr.GetOrdinal("offset_minutes"))} because of Exception: {e.StackTrace}"); } } } } } } } } ================================================ FILE: WorkloadViewer/Model/WorkloadAnalysisPoint.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.Model { public class WorkloadAnalysisPoint { public int OffsetMinutes { get; set; } public int DurationMinutes { get; set; } public NormalizedQuery NormalizedQuery { get; set; } public string ApplicationName { get; set; } public string DatabaseName { get; set; } public string LoginName { get; set; } public string HostName { get; set; } public long AvgCpuUs { get; set; } public long MinCpuUs { get; set; } public long MaxCpuUs { get; set; } public long SumCpuUs { get; set; } public long AvgReads { get; set; } public long MinReads { get; set; } public long MaxReads { get; set; } public long SumReads { get; set; } public long AvgWrites { get; set; } public long MinWrites { get; set; } public long MaxWrites { get; set; } public long SumWrites { get; set; } public long AvgDurationUs { get; set; } public long MinDurationUs { get; set; } public long MaxDurationUs { get; set; } public long SumDurationUs { get; set; } public long ExecutionCount { get; set; } } } ================================================ FILE: WorkloadViewer/NLog.config ================================================  ================================================ FILE: WorkloadViewer/Properties/AssemblyInfo.cs ================================================ using System.Reflection; using System.Resources; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Windows; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. //In order to begin building localizable applications, set //CultureYouAreCodingWith in your .csproj file //inside a . For example, if you are using US english //in your source files, set the to en-US. Then uncomment //the NeutralResourceLanguage attribute below. Update the "en-US" in //the line below to match the UICulture setting in the project file. //[assembly: NeutralResourcesLanguage("en-US", UltimateResourceFallbackLocation.Satellite)] [assembly: ThemeInfo( ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located //(used if a resource is not found in the page, // or application resource dictionaries) ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located //(used if a resource is not found in the page, // app, or any theme specific resource dictionaries) )] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] ================================================ FILE: WorkloadViewer/Properties/Resources.Designer.cs ================================================ //------------------------------------------------------------------------------ // // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // //------------------------------------------------------------------------------ namespace WorkloadViewer.Properties { using System; /// /// A strongly-typed resource class, for looking up localized strings, etc. /// // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] public class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// /// Returns the cached ResourceManager instance used by this class. /// [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("WorkloadViewer.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] public static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } /// /// Looks up a localized resource of type System.Byte[]. /// public static byte[] TSQL { get { object obj = ResourceManager.GetObject("TSQL", resourceCulture); return ((byte[])(obj)); } } /// /// Looks up a localized string similar to WITH baseData AS ( /// SELECT /// DATEDIFF(minute, Base.end_time, bIn.end_time) AS offset_minutes, /// bWD.sql_hash, /// bWD.avg_cpu_us, /// bWD.min_cpu_us, /// bWD.max_cpu_us, /// bWD.sum_cpu_us, /// bWD.avg_reads, /// bWD.min_reads, /// bWD.max_reads, /// bWD.sum_reads, /// bWD.avg_writes, /// bWD.min_writes, /// bWD.max_writes, /// bWD.sum_writes, /// bWD.avg_duration_us, /// bWD.min_duration_us, /// bWD.max_duration_us, /// bWD.sum_duration_us, /// bWD.execution_count, /// bIn.duration_minutes, /// bNQ.norm [rest of string was truncated]";. /// public static string WorkloadAnalysis { get { return ResourceManager.GetString("WorkloadAnalysis", resourceCulture); } } } } ================================================ FILE: WorkloadViewer/Properties/Resources.resx ================================================  text/microsoft-resx 2.0 System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 ..\Resources\TSQL.xshd;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 ..\Resources\WorkloadAnalysis.sql;System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089;utf-8 ================================================ FILE: WorkloadViewer/Properties/Settings.Designer.cs ================================================ //------------------------------------------------------------------------------ // // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // //------------------------------------------------------------------------------ namespace WorkloadViewer.Properties { [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "17.3.0.0")] internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase { private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings()))); public static Settings Default { get { return defaultInstance; } } } } ================================================ FILE: WorkloadViewer/Properties/Settings.settings ================================================  ================================================ FILE: WorkloadViewer/Resources/TSQL.xshd ================================================  TODO FIXME HACK UNDONE ' ' " " /\* \*/ ABSOLUTE ACTION ADD ALTER AS ASC AT AUTHORIZATION BACKUP BEGIN BIT BREAK BROWSE BULK BY CASCADE CASE CATALOG CHAR CHARACTER CHECK CHECKPOINT CLOSE CLUSTERED COLUMN COMMIT COMPUTE CONNECT CONSTRAINT CONTAINSTABLE CONTINUE CREATE CURRENT CURRENT_DATE CURSOR DATABASE DATE DBCC DEALLOCATE DEC DECIMAL DECLARE DEFAULT DELETE DENY DESC DISK DISTINCT DISTRIBUTED DOUBLE DROP DUMP ELSE END ERRLVL ESCAPE EXCEPT EXEC EXECUTE EXIT EXTERNAL FETCH FILE FILLFACTOR FIRST FLOAT FOR FOREIGN FREETEXT FREETEXTTABLE FROM FULL FUNCTION GET GLOBAL GO GOTO GRANT GROUP HAVING HOLDLOCK IDENTITY IDENTITY_INSERT IDENTITYCOL IF IMMEDIATE INCLUDE INDEX INSENSITIVE INSERT INT INTEGER INTERSECT INTO ISOLATION KEY KILL LANGUAGE LAST LEVEL LINENO LOAD LOCAL MATCH MERGE NATIONAL NCHAR NEXT NO NOCHECK NONCLUSTERED NONE NUMERIC OF OFF OFFSETS ON OPEN OPENDATASOURCE OPENQUERY OPENROWSET OPENXML OPTION ORDER OUTPUT OVER PARTIAL PERCENT PLAN PRECISION PRIMARY PRINT PRIOR PROC PROCEDURE PUBLIC RAISERROR READ READTEXT REAL RECONFIGURE REFERENCES RELATIVE REPLICATION RESTORE RESTRICT RESTRICT RETURN REVERT REVOKE ROLLBACK ROLLBACK ROWCOUNT ROWGUIDCOL ROWS RULE SAVE SCHEMA SCROLL SECURITYAUDIT SELECT SEMANTICKEYPHRASETABLE SEMANTICSIMILARITYDETAILSTABLE SEMANTICSIMILARITYTABLE SESSION SET SETUSER SHUTDOWN SMALLINT SQL STATISTICS TABLE TABLESAMPLE TEXTSIZE THEN TIME TIMESTAMP TO TOP TRAN TRANSACTION TRIGGER TRUNCATE UNION UNIQUE UPDATETEXT USE USER USING VALUE VALUES VARCHAR VARYING VARYING VIEW WAITFOR WHEN WHERE WHILE WITH WITHIN GROUP WRITETEXT ZONE COALESCE COLLATE SESSION_USER CONTAINS CONVERT SYSTEM_USER CURRENT_TIME CURRENT_TIMESTAMP CURRENT_USER NULLIF TRY_CONVERT TSEQUAL UPDATE EXTRACT AVG BIT_LENGTH HOUR SECOND CAST SESSION_USER COALESCE SPACE COLLATE SUBSTRING SUM SYSTEM_USER CONVERT COUNT CURRENT_TIME LOWER CURRENT_TIMESTAMP CURRENT_USER MAX MIN MINUTE DAY TRIM MONTH UPDATE UPPER NULLIF OCTET_LENGTH YEAR ALL AND ANY BETWEEN CROSS EXISTS GO IN INNER IS JOIN LEFT LIKE NOT NULL OR OUTER PIVOT RIGHT SOME UNPIVOT all_columns all_objects all_parameters all_sql_modules all_views allocation_units assemblies assembly_files assembly_modules assembly_references assembly_types asymmetric_keys availability_databases_cluster availability_group_listener_ip_addresses availability_group_listeners availability_groups_cluster availability_groups availability_read_only_routing_lists availability_replicas backup_devices certificates change_tracking_databases change_tracking_tables check_constraints CHECK_CONSTRAINTS COLUMN_DOMAIN_USAGE column_encryption_key_values column_encryption_keys column_master_keys COLUMN_PRIVILEGES column_store_dictionaries column_store_row_groups column_store_segments column_type_usages column_xml_schema_collection_usages columns COLUMNS computed_columns configurations CONSTRAINT_COLUMN_USAGE CONSTRAINT_TABLE_USAGE conversation_endpoints conversation_groups conversation_priorities credentials crypt_properties cryptographic_providers data_spaces database_audit_specification_details database_audit_specifications database_credentials database_files database_filestream_options database_mirroring_endpoints database_mirroring_witnesses database_mirroring database_permissions database_principals database_query_store_options database_recovery_status database_role_members database_scoped_configurations database_scoped_credentials databases default_constraints destination_data_spaces dm_audit_actions dm_audit_class_type_map dm_broker_activated_tasks dm_broker_connections dm_broker_forwarded_messages dm_broker_queue_monitors dm_cdc_errors dm_cdc_log_scan_sessions dm_clr_appdomains dm_clr_loaded_assemblies dm_clr_properties dm_clr_tasks dm_column_store_object_pool dm_cryptographic_provider_algorithms dm_cryptographic_provider_keys dm_cryptographic_provider_properties dm_cryptographic_provider_sessions dm_database_encryption_keys dm_db_column_store_row_group_operational_stats dm_db_column_store_row_group_physical_stats dm_db_database_page_allocations dm_db_file_space_usage dm_db_fts_index_physical_stats dm_db_incremental_stats_properties dm_db_index_operational_stats dm_db_index_physical_stats dm_db_index_usage_stats dm_db_log_space_usage dm_db_mirroring_auto_page_repair dm_db_mirroring_connections dm_db_mirroring_past_actions dm_db_missing_index_columns dm_db_missing_index_details dm_db_missing_index_group_stats dm_db_missing_index_groups dm_db_objects_disabled_on_compatibility_level_change dm_db_partition_stats dm_db_persisted_sku_features dm_db_rda_migration_status dm_db_rda_schema_update_status dm_db_script_level dm_db_session_space_usage dm_db_stats_histogram dm_db_stats_properties_internal dm_db_stats_properties dm_db_task_space_usage dm_db_uncontained_entities dm_db_xtp_checkpoint_files dm_db_xtp_checkpoint_stats dm_db_xtp_gc_cycle_stats dm_db_xtp_hash_index_stats dm_db_xtp_index_stats dm_db_xtp_memory_consumers dm_db_xtp_nonclustered_index_stats dm_db_xtp_object_stats dm_db_xtp_table_memory_stats dm_db_xtp_transactions dm_exec_background_job_queue_stats dm_exec_background_job_queue dm_exec_cached_plan_dependent_objects dm_exec_cached_plans dm_exec_compute_node_errors dm_exec_compute_node_status dm_exec_compute_nodes dm_exec_connections dm_exec_cursors dm_exec_describe_first_result_set_for_object dm_exec_describe_first_result_set dm_exec_distributed_request_steps dm_exec_distributed_requests dm_exec_distributed_sql_requests dm_exec_dms_services dm_exec_dms_workers dm_exec_external_operations dm_exec_external_work dm_exec_function_stats dm_exec_input_buffer dm_exec_plan_attributes dm_exec_procedure_stats dm_exec_query_memory_grants dm_exec_query_optimizer_info dm_exec_query_optimizer_memory_gateways dm_exec_query_parallel_workers dm_exec_query_plan dm_exec_query_profiles dm_exec_query_resource_semaphores dm_exec_query_statistics_xml dm_exec_query_stats dm_exec_query_transformation_stats dm_exec_requests dm_exec_session_wait_stats dm_exec_sessions dm_exec_sql_text dm_exec_text_query_plan dm_exec_trigger_stats dm_exec_valid_use_hints dm_exec_xml_handles dm_external_script_execution_stats dm_external_script_requests dm_filestream_file_io_handles dm_filestream_file_io_requests dm_filestream_non_transacted_handles dm_fts_active_catalogs dm_fts_fdhosts dm_fts_index_keywords_by_document dm_fts_index_keywords_by_property dm_fts_index_keywords_position_by_document dm_fts_index_keywords dm_fts_index_population dm_fts_memory_buffers dm_fts_memory_pools dm_fts_outstanding_batches dm_fts_parser dm_fts_population_ranges dm_fts_semantic_similarity_population dm_hadr_auto_page_repair dm_hadr_automatic_seeding dm_hadr_availability_group_states dm_hadr_availability_replica_cluster_nodes dm_hadr_availability_replica_cluster_states dm_hadr_availability_replica_states dm_hadr_cluster_members dm_hadr_cluster_networks dm_hadr_cluster dm_hadr_database_replica_cluster_states dm_hadr_database_replica_states dm_hadr_instance_node_map dm_hadr_name_id_map dm_hadr_physical_seeding_stats dm_io_backup_tapes dm_io_cluster_shared_drives dm_io_cluster_valid_path_names dm_io_pending_io_requests dm_io_virtual_file_stats dm_logconsumer_cachebufferrefs dm_logconsumer_privatecachebuffers dm_logpool_consumers dm_logpool_hashentries dm_logpool_sharedcachebuffers dm_logpool_stats dm_logpoolmgr_freepools dm_logpoolmgr_respoolsize dm_logpoolmgr_stats dm_os_buffer_descriptors dm_os_buffer_pool_extension_configuration dm_os_child_instances dm_os_cluster_nodes dm_os_cluster_properties dm_os_dispatcher_pools dm_os_dispatchers dm_os_hosts dm_os_latch_stats dm_os_loaded_modules dm_os_memory_allocations dm_os_memory_broker_clerks dm_os_memory_brokers dm_os_memory_cache_clock_hands dm_os_memory_cache_counters dm_os_memory_cache_entries dm_os_memory_cache_hash_tables dm_os_memory_clerks dm_os_memory_node_access_stats dm_os_memory_nodes dm_os_memory_objects dm_os_memory_pools dm_os_nodes dm_os_performance_counters dm_os_process_memory dm_os_ring_buffers dm_os_schedulers dm_os_server_diagnostics_log_configurations dm_os_spinlock_stats dm_os_stacks dm_os_sublatches dm_os_sys_info dm_os_sys_memory dm_os_tasks dm_os_threads dm_os_virtual_address_dump dm_os_volume_stats dm_os_wait_stats dm_os_waiting_tasks dm_os_windows_info dm_os_worker_local_storage dm_os_workers dm_qn_subscriptions dm_repl_articles dm_repl_schemas dm_repl_tranhash dm_repl_traninfo dm_resource_governor_configuration dm_resource_governor_external_resource_pool_affinity dm_resource_governor_external_resource_pools dm_resource_governor_resource_pool_affinity dm_resource_governor_resource_pool_volumes dm_resource_governor_resource_pools dm_resource_governor_workload_groups dm_server_audit_status dm_server_memory_dumps dm_server_registry dm_server_services dm_sql_referenced_entities dm_sql_referencing_entities dm_tcp_listener_states dm_tran_active_snapshot_database_transactions dm_tran_active_transactions dm_tran_commit_table dm_tran_current_snapshot dm_tran_current_transaction dm_tran_database_transactions dm_tran_global_recovery_transactions dm_tran_global_transactions_enlistments dm_tran_global_transactions_log dm_tran_global_transactions dm_tran_locks dm_tran_session_transactions dm_tran_top_version_generators dm_tran_transactions_snapshot dm_tran_version_store dm_xe_map_values dm_xe_object_columns dm_xe_objects dm_xe_packages dm_xe_session_event_actions dm_xe_session_events dm_xe_session_object_columns dm_xe_session_targets dm_xe_sessions dm_xtp_gc_queue_stats dm_xtp_gc_stats dm_xtp_system_memory_consumers dm_xtp_threads dm_xtp_transaction_recent_rows dm_xtp_transaction_stats DOMAIN_CONSTRAINTS DOMAINS endpoint_webmethods endpoints event_notification_event_types event_notifications events extended_procedures extended_properties external_data_sources external_file_formats external_tables filegroups filetable_system_defined_objects filetables fn_builtin_permissions fn_cColvEntries_80 fn_cdc_check_parameters fn_cdc_get_column_ordinal fn_cdc_get_max_lsn fn_cdc_get_min_lsn fn_cdc_has_column_changed fn_cdc_hexstrtobin fn_cdc_map_lsn_to_time fn_cdc_map_time_to_lsn fn_check_object_signatures fn_column_store_row_groups fn_db_backup_file_snapshots fn_dblog_xtp fn_dblog fn_dump_dblog_xtp fn_dump_dblog fn_EnumCurrentPrincipals fn_fIsColTracked fn_get_audit_file fn_get_sql fn_GetCurrentPrincipal fn_GetRowsetIdFromRowDump fn_hadr_backup_is_preferred_replica fn_hadr_distributed_ag_database_replica fn_hadr_distributed_ag_replica fn_hadr_is_primary_replica fn_hadr_is_same_replica fn_helpcollations fn_helpdatatypemap fn_IsBitSetInBitmask fn_isrolemember fn_listextendedproperty fn_MapSchemaType fn_MSdayasnumber fn_MSgeneration_downloadonly fn_MSget_dynamic_filter_login fn_MSorbitmaps fn_MSrepl_map_resolver_clsid fn_MStestbit fn_MSvector_downloadonly fn_MSxe_read_event_stream fn_my_permissions fn_numberOf1InBinaryAfterLoc fn_numberOf1InVarBinary fn_PhysLocCracker fn_PhysLocFormatter fn_repladjustcolumnmap fn_repldecryptver4 fn_replformatdatetime fn_replgetcolidfrombitmap fn_replgetparsedddlcmd fn_replp2pversiontotranid fn_replreplacesinglequote fn_replreplacesinglequoteplusprotectstring fn_repluniquename fn_replvarbintoint fn_RowDumpCracker fn_servershareddrives fn_sqlagent_job_history fn_sqlagent_jobs fn_sqlagent_jobsteps_logs fn_sqlagent_jobsteps fn_sqlagent_subsystems fn_sqlvarbasetostr fn_stmt_sql_handle_from_sql_stmt fn_trace_geteventinfo fn_trace_getfilterinfo fn_trace_getinfo fn_trace_gettable fn_translate_permissions fn_validate_plan_guide fn_varbintohexstr fn_varbintohexsubstring fn_virtualfilestats fn_virtualservernodes fn_xe_file_target_read_file fn_yukonsecuritymodelrequired foreign_key_columns foreign_keys fulltext_catalogs fulltext_document_types fulltext_index_catalog_usages fulltext_index_columns fulltext_index_fragments fulltext_indexes fulltext_languages fulltext_semantic_language_statistics_database fulltext_semantic_languages fulltext_stoplists fulltext_stopwords fulltext_system_stopwords function_order_columns hash_indexes http_endpoints identity_columns index_columns indexes internal_partitions internal_tables KEY_COLUMN_USAGE key_constraints key_encryptions linked_logins login_token masked_columns master_files master_key_passwords memory_optimized_tables_internal_attributes message_type_xml_schema_collection_usages messages module_assembly_usages numbered_procedure_parameters numbered_procedures objects openkeys parameter_type_usages parameter_xml_schema_collection_usages parameters PARAMETERS partition_functions partition_parameters partition_range_values partition_schemes partitions periods plan_guides plan_persist_context_settings plan_persist_plan plan_persist_query_text plan_persist_query plan_persist_runtime_stats_interval plan_persist_runtime_stats procedures query_context_settings query_store_plan query_store_query_text query_store_query query_store_runtime_stats_interval query_store_runtime_stats queue_messages_1003150619 queue_messages_1035150733 queue_messages_1067150847 REFERENTIAL_CONSTRAINTS registered_search_properties registered_search_property_lists remote_data_archive_databases remote_data_archive_tables remote_logins remote_service_bindings resource_governor_configuration resource_governor_external_resource_pool_affinity resource_governor_external_resource_pools resource_governor_resource_pool_affinity resource_governor_resource_pools resource_governor_workload_groups routes ROUTINE_COLUMNS ROUTINES schemas SCHEMATA securable_classes security_policies security_predicates selective_xml_index_namespaces selective_xml_index_paths sequences SEQUENCES server_assembly_modules server_audit_specification_details server_audit_specifications server_audits server_event_notifications server_event_session_actions server_event_session_events server_event_session_fields server_event_session_targets server_event_sessions server_events server_file_audits server_permissions server_principal_credentials server_principals server_role_members server_sql_modules server_trigger_events server_triggers servers service_broker_endpoints service_contract_message_usages service_contract_usages service_contracts service_message_types service_queue_usages service_queues services soap_endpoints sp_add_agent_parameter sp_add_agent_profile sp_add_data_file_recover_suspect_db sp_add_log_file_recover_suspect_db sp_add_log_shipping_alert_job sp_add_log_shipping_primary_database sp_add_log_shipping_primary_secondary sp_add_log_shipping_secondary_database sp_add_log_shipping_secondary_primary sp_addapprole sp_addarticle sp_adddatatype sp_adddatatypemapping sp_adddistpublisher sp_adddistributiondb sp_adddistributor sp_adddynamicsnapshot_job sp_addextendedproc sp_addextendedproperty sp_AddFunctionalUnitToComponent sp_addlinkedserver sp_addlinkedsrvlogin sp_addlogin sp_addlogreader_agent sp_addmergealternatepublisher sp_addmergearticle sp_addmergefilter sp_addmergelogsettings sp_addmergepartition sp_addmergepublication sp_addmergepullsubscription_agent sp_addmergepullsubscription sp_addmergepushsubscription_agent sp_addmergesubscription sp_addmessage sp_addpublication_snapshot sp_addpublication sp_addpullsubscription_agent sp_addpullsubscription sp_addpushsubscription_agent sp_addqreader_agent sp_addqueued_artinfo sp_addremotelogin sp_addrole sp_addrolemember sp_addscriptexec sp_addserver sp_addsrvrolemember sp_addsubscriber_schedule sp_addsubscriber sp_addsubscription sp_addsynctriggers sp_addsynctriggerscore sp_addtabletocontents sp_addtype sp_addumpdevice sp_adduser sp_adjustpublisheridentityrange sp_altermessage sp_approlepassword sp_article_validation sp_articlecolumn sp_articlefilter sp_articleview sp_assemblies_rowset_rmt sp_assemblies_rowset sp_assemblies_rowset2 sp_assembly_dependencies_rowset_rmt sp_assembly_dependencies_rowset sp_assembly_dependencies_rowset2 sp_attach_db sp_attach_single_file_db sp_attachsubscription sp_audit_write sp_autostats sp_availability_group_command_internal sp_bcp_dbcmptlevel sp_begin_parallel_nested_tran sp_bindefault sp_bindrule sp_bindsession sp_browsemergesnapshotfolder sp_browsereplcmds sp_browsesnapshotfolder sp_can_tlog_be_applied sp_catalogs_rowset_rmt sp_catalogs_rowset sp_catalogs_rowset2 sp_catalogs sp_cdc_add_job sp_cdc_change_job sp_cdc_cleanup_change_table sp_cdc_dbsnapshotLSN sp_cdc_disable_db sp_cdc_disable_table sp_cdc_drop_job sp_cdc_enable_db sp_cdc_enable_table sp_cdc_generate_wrapper_function sp_cdc_get_captured_columns sp_cdc_get_ddl_history sp_cdc_help_change_data_capture sp_cdc_help_jobs sp_cdc_restoredb sp_cdc_scan sp_cdc_start_job sp_cdc_stop_job sp_cdc_vupgrade_databases sp_cdc_vupgrade sp_certify_removable sp_change_agent_parameter sp_change_agent_profile sp_change_log_shipping_primary_database sp_change_log_shipping_secondary_database sp_change_log_shipping_secondary_primary sp_change_subscription_properties sp_change_tracking_waitforchanges sp_change_users_login sp_changearticle sp_changearticlecolumndatatype sp_changedbowner sp_changedistpublisher sp_changedistributiondb sp_changedistributor_password sp_changedistributor_property sp_changedynamicsnapshot_job sp_changelogreader_agent sp_changemergearticle sp_changemergefilter sp_changemergelogsettings sp_changemergepublication sp_changemergepullsubscription sp_changemergesubscription sp_changeobjectowner sp_changepublication_snapshot sp_changepublication sp_changeqreader_agent sp_changereplicationserverpasswords sp_changesubscriber_schedule sp_changesubscriber sp_changesubscription sp_changesubscriptiondtsinfo sp_changesubstatus sp_check_constbytable_rowset sp_check_constbytable_rowset2 sp_check_constraints_rowset sp_check_constraints_rowset2 sp_check_dynamic_filters sp_check_for_sync_trigger sp_check_join_filter sp_check_log_shipping_monitor_alert sp_check_publication_access sp_check_removable sp_check_subset_filter sp_check_sync_trigger sp_checkinvalidivarticle sp_checkOraclepackageversion sp_clean_db_file_free_space sp_clean_db_free_space sp_cleanmergelogfiles sp_cleanup_log_shipping_history sp_cleanup_temporal_history sp_cleanupdbreplication sp_column_privileges_ex sp_column_privileges_rowset_rmt sp_column_privileges_rowset sp_column_privileges_rowset2 sp_column_privileges sp_columns_100_rowset sp_columns_100_rowset2 sp_columns_100 sp_columns_90_rowset_rmt sp_columns_90_rowset sp_columns_90_rowset2 sp_columns_90 sp_columns_ex_100 sp_columns_ex_90 sp_columns_ex sp_columns_managed sp_columns_rowset_rmt sp_columns_rowset sp_columns_rowset2 sp_columns sp_commit_parallel_nested_tran sp_configure_peerconflictdetection sp_configure sp_constr_col_usage_rowset sp_constr_col_usage_rowset2 sp_control_dbmasterkey_password sp_control_plan_guide sp_copymergesnapshot sp_copysnapshot sp_copysubscription sp_create_plan_guide_from_handle sp_create_plan_guide sp_create_removable sp_createmergepalrole sp_createorphan sp_createstats sp_createtranpalrole sp_cursor_list sp_cursor sp_cursorclose sp_cursorexecute sp_cursorfetch sp_cursoropen sp_cursoroption sp_cursorprepare sp_cursorprepexec sp_cursorunprepare sp_cycle_errorlog sp_databases sp_datatype_info_100 sp_datatype_info_90 sp_datatype_info sp_db_ebcdic277_2 sp_db_increased_partitions sp_db_selective_xml_index sp_db_vardecimal_storage_format sp_dbcmptlevel sp_dbfixedrolepermission sp_dbmmonitoraddmonitoring sp_dbmmonitorchangealert sp_dbmmonitorchangemonitoring sp_dbmmonitordropalert sp_dbmmonitordropmonitoring sp_dbmmonitorhelpalert sp_dbmmonitorhelpmonitoring sp_dbmmonitorresults sp_dbmmonitorupdate sp_dbremove sp_ddopen sp_defaultdb sp_defaultlanguage sp_delete_backup_file_snapshot sp_delete_backup sp_delete_http_namespace_reservation sp_delete_log_shipping_alert_job sp_delete_log_shipping_primary_database sp_delete_log_shipping_primary_secondary sp_delete_log_shipping_secondary_database sp_delete_log_shipping_secondary_primary sp_deletemergeconflictrow sp_deletepeerrequesthistory sp_deletetracertokenhistory sp_denylogin sp_depends sp_describe_cursor_columns sp_describe_cursor_tables sp_describe_cursor sp_describe_first_result_set sp_describe_parameter_encryption sp_describe_undeclared_parameters sp_detach_db sp_disableagentoffload sp_distcounters sp_drop_agent_parameter sp_drop_agent_profile sp_dropanonymousagent sp_dropanonymoussubscription sp_dropapprole sp_droparticle sp_dropdatatypemapping sp_dropdevice sp_dropdistpublisher sp_dropdistributiondb sp_dropdistributor sp_dropdynamicsnapshot_job sp_dropextendedproc sp_dropextendedproperty sp_droplinkedsrvlogin sp_droplogin sp_dropmergealternatepublisher sp_dropmergearticle sp_dropmergefilter sp_dropmergelogsettings sp_dropmergepartition sp_dropmergepublication sp_dropmergepullsubscription sp_dropmergesubscription sp_dropmessage sp_droporphans sp_droppublication sp_droppublisher sp_droppullsubscription sp_dropremotelogin sp_dropreplsymmetrickey sp_droprole sp_droprolemember sp_dropserver sp_dropsrvrolemember sp_dropsubscriber sp_dropsubscription sp_droptype sp_dropuser sp_dsninfo sp_enable_heterogeneous_subscription sp_enable_sql_debug sp_enableagentoffload sp_enum_oledb_providers sp_enumcustomresolvers sp_enumdsn sp_enumeratependingschemachanges sp_enumerrorlogs sp_enumfullsubscribers sp_enumoledbdatasources sp_estimate_data_compression_savings sp_estimated_rowsize_reduction_for_vardecimal sp_execute_external_script sp_execute sp_executesql sp_expired_subscription_cleanup sp_filestream_force_garbage_collection sp_filestream_recalculate_container_size sp_firstonly_bitmap sp_fkeys sp_flush_commit_table_on_demand sp_flush_commit_table sp_flush_CT_internal_table_on_demand sp_flush_log sp_foreign_keys_rowset_rmt sp_foreign_keys_rowset sp_foreign_keys_rowset2 sp_foreign_keys_rowset3 sp_foreignkeys sp_fulltext_catalog sp_fulltext_column sp_fulltext_database sp_fulltext_getdata sp_fulltext_keymappings sp_fulltext_load_thesaurus_file sp_fulltext_pendingchanges sp_fulltext_recycle_crawl_log sp_fulltext_semantic_register_language_statistics_db sp_fulltext_semantic_unregister_language_statistics_db sp_fulltext_service sp_fulltext_table sp_generate_agent_parameter sp_generatefilters sp_get_database_scoped_credential sp_get_distributor sp_get_job_status_mergesubscription_agent sp_get_mergepublishedarticleproperties sp_get_Oracle_publisher_metadata sp_get_query_template sp_get_redirected_publisher sp_getagentparameterlist sp_getapplock sp_getbindtoken sp_getdefaultdatatypemapping sp_getmergedeletetype sp_getProcessorUsage sp_getpublisherlink sp_getqueuedarticlesynctraninfo sp_getqueuedrows sp_getschemalock sp_getsqlqueueversion sp_getsubscription_status_hsnapshot sp_getsubscriptiondtspackagename sp_gettopologyinfo sp_getVolumeFreeSpace sp_grant_publication_access sp_grantdbaccess sp_grantlogin sp_help_agent_default sp_help_agent_parameter sp_help_agent_profile sp_help_datatype_mapping sp_help_fulltext_catalog_components sp_help_fulltext_catalogs_cursor sp_help_fulltext_catalogs sp_help_fulltext_columns_cursor sp_help_fulltext_columns sp_help_fulltext_system_components sp_help_fulltext_tables_cursor sp_help_fulltext_tables sp_help_log_shipping_alert_job sp_help_log_shipping_monitor_primary sp_help_log_shipping_monitor_secondary sp_help_log_shipping_monitor sp_help_log_shipping_primary_database sp_help_log_shipping_primary_secondary sp_help_log_shipping_secondary_database sp_help_log_shipping_secondary_primary sp_help_peerconflictdetection sp_help_publication_access sp_help_spatial_geography_histogram sp_help_spatial_geography_index_xml sp_help_spatial_geography_index sp_help_spatial_geometry_histogram sp_help_spatial_geometry_index_xml sp_help_spatial_geometry_index sp_help sp_helpallowmerge_publication sp_helparticle sp_helparticlecolumns sp_helparticledts sp_helpconstraint sp_helpdatatypemap sp_helpdb sp_helpdbfixedrole sp_helpdevice sp_helpdistpublisher sp_helpdistributiondb sp_helpdistributor_properties sp_helpdistributor sp_helpdynamicsnapshot_job sp_helpextendedproc sp_helpfile sp_helpfilegroup sp_helpindex sp_helplanguage sp_helplinkedsrvlogin sp_helplogins sp_helplogreader_agent sp_helpmergealternatepublisher sp_helpmergearticle sp_helpmergearticlecolumn sp_helpmergearticleconflicts sp_helpmergeconflictrows sp_helpmergedeleteconflictrows sp_helpmergefilter sp_helpmergelogfiles sp_helpmergelogfileswithdata sp_helpmergelogsettings sp_helpmergepartition sp_helpmergepublication sp_helpmergepullsubscription sp_helpmergesubscription sp_helpntgroup sp_helppeerrequests sp_helppeerresponses sp_helppublication_snapshot sp_helppublication sp_helppublicationsync sp_helppullsubscription sp_helpqreader_agent sp_helpremotelogin sp_helpreplfailovermode sp_helpreplicationdb sp_helpreplicationdboption sp_helpreplicationoption sp_helprole sp_helprolemember sp_helprotect sp_helpserver sp_helpsort sp_helpsrvrole sp_helpsrvrolemember sp_helpstats sp_helpsubscriberinfo sp_helpsubscription_properties sp_helpsubscription sp_helpsubscriptionerrors sp_helptext sp_helptracertokenhistory sp_helptracertokens sp_helptrigger sp_helpuser sp_helpxactsetjob sp_http_generate_wsdl_complex sp_http_generate_wsdl_defaultcomplexorsimple sp_http_generate_wsdl_defaultsimpleorcomplex sp_http_generate_wsdl_simple sp_identitycolumnforreplication sp_IH_LR_GetCacheData sp_IHadd_sync_command sp_IHarticlecolumn sp_IHget_loopback_detection sp_IHScriptIdxFile sp_IHScriptSchFile sp_IHValidateRowFilter sp_IHXactSetJob sp_indexcolumns_managed sp_indexes_100_rowset sp_indexes_100_rowset2 sp_indexes_90_rowset_rmt sp_indexes_90_rowset sp_indexes_90_rowset2 sp_indexes_managed sp_indexes_rowset_rmt sp_indexes_rowset sp_indexes_rowset2 sp_indexes sp_indexoption sp_invalidate_textptr sp_is_makegeneration_needed sp_ivindexhasnullcols sp_kill_filestream_non_transacted_handles sp_kill_oldest_transaction_on_secondary sp_lightweightmergemetadataretentioncleanup sp_link_publication sp_linkedservers_rowset sp_linkedservers_rowset2 sp_linkedservers sp_lock sp_logshippinginstallmetadata sp_lookupcustomresolver sp_mapdown_bitmap sp_markpendingschemachange sp_marksubscriptionvalidation sp_memory_optimized_cs_migration sp_mergearticlecolumn sp_mergecleanupmetadata sp_mergedummyupdate sp_mergemetadataretentioncleanup sp_mergesubscription_cleanup sp_mergesubscriptionsummary sp_migrate_user_to_contained sp_monitor sp_MS_marksystemobject sp_MS_replication_installed sp_MSacquireHeadofQueueLock sp_MSacquireserverresourcefordynamicsnapshot sp_MSacquireSlotLock sp_MSacquiresnapshotdeliverysessionlock sp_MSactivate_auto_sub sp_MSactivatelogbasedarticleobject sp_MSactivateprocedureexecutionarticleobject sp_MSadd_anonymous_agent sp_MSadd_article sp_MSadd_compensating_cmd sp_MSadd_distribution_agent sp_MSadd_distribution_history sp_MSadd_dynamic_snapshot_location sp_MSadd_filteringcolumn sp_MSadd_log_shipping_error_detail sp_MSadd_log_shipping_history_detail sp_MSadd_logreader_agent sp_MSadd_logreader_history sp_MSadd_merge_agent sp_MSadd_merge_anonymous_agent sp_MSadd_merge_history sp_MSadd_merge_history90 sp_MSadd_merge_subscription sp_MSadd_mergereplcommand sp_MSadd_mergesubentry_indistdb sp_MSadd_publication sp_MSadd_qreader_agent sp_MSadd_qreader_history sp_MSadd_repl_alert sp_MSadd_repl_command sp_MSadd_repl_commands27hp sp_MSadd_repl_error sp_MSadd_replcmds_mcit sp_MSadd_replmergealert sp_MSadd_snapshot_agent sp_MSadd_snapshot_history sp_MSadd_subscriber_info sp_MSadd_subscriber_schedule sp_MSadd_subscription_3rd sp_MSadd_subscription sp_MSadd_tracer_history sp_MSadd_tracer_token sp_MSaddanonymousreplica sp_MSadddynamicsnapshotjobatdistributor sp_MSaddguidcolumn sp_MSaddguidindex sp_MSaddinitialarticle sp_MSaddinitialpublication sp_MSaddinitialschemaarticle sp_MSaddinitialsubscription sp_MSaddlightweightmergearticle sp_MSaddmergedynamicsnapshotjob sp_MSaddmergetriggers_from_template sp_MSaddmergetriggers_internal sp_MSaddmergetriggers sp_MSaddpeerlsn sp_MSaddsubscriptionarticles sp_MSadjust_pub_identity sp_MSagent_retry_stethoscope sp_MSagent_stethoscope sp_MSallocate_new_identity_range sp_MSalreadyhavegeneration sp_MSanonymous_status sp_MSarticlecleanup sp_MSbrowsesnapshotfolder sp_MScache_agent_parameter sp_MScdc_capture_job sp_MScdc_cleanup_job sp_MScdc_db_ddl_event sp_MScdc_ddl_event sp_MScdc_logddl sp_MSchange_article sp_MSchange_distribution_agent_properties sp_MSchange_logreader_agent_properties sp_MSchange_merge_agent_properties sp_MSchange_mergearticle sp_MSchange_mergepublication sp_MSchange_originatorid sp_MSchange_priority sp_MSchange_publication sp_MSchange_retention_period_unit sp_MSchange_retention sp_MSchange_snapshot_agent_properties sp_MSchange_subscription_dts_info sp_MSchangearticleresolver sp_MSchangedynamicsnapshotjobatdistributor sp_MSchangedynsnaplocationatdistributor sp_MSchangeobjectowner sp_MScheck_agent_instance sp_MScheck_dropobject sp_MScheck_Jet_Subscriber sp_MScheck_logicalrecord_metadatamatch sp_MScheck_merge_subscription_count sp_MScheck_pub_identity sp_MScheck_pull_access sp_MScheck_snapshot_agent sp_MScheck_subscription_expiry sp_MScheck_subscription_partition sp_MScheck_subscription sp_MScheck_tran_retention sp_MScheckexistsgeneration sp_MScheckexistsrecguid sp_MScheckfailedprevioussync sp_MScheckidentityrange sp_MScheckIsPubOfSub sp_MSchecksharedagentforpublication sp_MSchecksnapshotstatus sp_MScleanup_agent_entry sp_MScleanup_conflict sp_MScleanup_publication_ADinfo sp_MScleanup_subscription_distside_entry sp_MScleanupdynamicsnapshotfolder sp_MScleanupdynsnapshotvws sp_MSCleanupForPullReinit sp_MScleanupmergepublisher_internal sp_MScleanupmergepublisher sp_MSclear_dynamic_snapshot_location sp_MSclearresetpartialsnapshotprogressbit sp_MScomputelastsentgen sp_MScomputemergearticlescreationorder sp_MScomputemergeunresolvedrefs sp_MSconflicttableexists sp_MScreate_all_article_repl_views sp_MScreate_article_repl_views sp_MScreate_dist_tables sp_MScreate_logical_record_views sp_MScreate_sub_tables sp_MScreate_tempgenhistorytable sp_MScreatedisabledmltrigger sp_MScreatedummygeneration sp_MScreateglobalreplica sp_MScreatelightweightinsertproc sp_MScreatelightweightmultipurposeproc sp_MScreatelightweightprocstriggersconstraints sp_MScreatelightweightupdateproc sp_MScreatemergedynamicsnapshot sp_MScreateretry sp_MSdbuseraccess sp_MSdbuserpriv sp_MSdefer_check sp_MSdelete_tracer_history sp_MSdeletefoldercontents sp_MSdeletemetadataactionrequest sp_MSdeletepeerconflictrow sp_MSdeleteretry sp_MSdeletetranconflictrow sp_MSdelgenzero sp_MSdelrow sp_MSdelrowsbatch_downloadonly sp_MSdelrowsbatch sp_MSdelsubrows sp_MSdelsubrowsbatch sp_MSdependencies sp_MSdetect_nonlogged_shutdown sp_MSdetectinvalidpeerconfiguration sp_MSdetectinvalidpeersubscription sp_MSdist_activate_auto_sub sp_MSdist_adjust_identity sp_MSdistpublisher_cleanup sp_MSdistribution_counters sp_MSdistributoravailable sp_MSdodatabasesnapshotinitiation sp_MSdopartialdatabasesnapshotinitiation sp_MSdrop_6x_publication sp_MSdrop_6x_replication_agent sp_MSdrop_anonymous_entry sp_MSdrop_article sp_MSdrop_distribution_agent sp_MSdrop_distribution_agentid_dbowner_proxy sp_MSdrop_dynamic_snapshot_agent sp_MSdrop_logreader_agent sp_MSdrop_merge_agent sp_MSdrop_merge_subscription sp_MSdrop_publication sp_MSdrop_qreader_history sp_MSdrop_snapshot_agent sp_MSdrop_snapshot_dirs sp_MSdrop_subscriber_info sp_MSdrop_subscription_3rd sp_MSdrop_subscription sp_MSdrop_tempgenhistorytable sp_MSdroparticleconstraints sp_MSdroparticletombstones sp_MSdropconstraints sp_MSdropdynsnapshotvws sp_MSdropfkreferencingarticle sp_MSdropmergearticle sp_MSdropmergedynamicsnapshotjob sp_MSdropobsoletearticle sp_MSdropretry sp_MSdroptemptable sp_MSdummyupdate_logicalrecord sp_MSdummyupdate sp_MSdummyupdate90 sp_MSdummyupdatelightweight sp_MSdynamicsnapshotjobexistsatdistributor sp_MSenable_publication_for_het_sub sp_MSensure_single_instance sp_MSenum_distribution_s sp_MSenum_distribution_sd sp_MSenum_distribution sp_MSenum_logicalrecord_changes sp_MSenum_logreader_s sp_MSenum_logreader_sd sp_MSenum_logreader sp_MSenum_merge_agent_properties sp_MSenum_merge_s sp_MSenum_merge_sd sp_MSenum_merge_subscriptions_90_publication sp_MSenum_merge_subscriptions_90_publisher sp_MSenum_merge_subscriptions sp_MSenum_merge sp_MSenum_metadataaction_requests sp_MSenum_qreader_s sp_MSenum_qreader_sd sp_MSenum_qreader sp_MSenum_replication_agents sp_MSenum_replication_job sp_MSenum_replqueues sp_MSenum_replsqlqueues sp_MSenum_snapshot_s sp_MSenum_snapshot_sd sp_MSenum_snapshot sp_MSenum_subscriptions sp_MSenumallpublications sp_MSenumallsubscriptions sp_MSenumarticleslightweight sp_MSenumchanges_belongtopartition sp_MSenumchanges_notbelongtopartition sp_MSenumchanges sp_MSenumchangesdirect sp_MSenumchangeslightweight sp_MSenumcolumns sp_MSenumcolumnslightweight sp_MSenumdeletes_forpartition sp_MSenumdeleteslightweight sp_MSenumdeletesmetadata sp_MSenumdistributionagentproperties sp_MSenumerate_PAL sp_MSenumgenerations sp_MSenumgenerations90 sp_MSenumpartialchanges sp_MSenumpartialchangesdirect sp_MSenumpartialdeletes sp_MSenumpubreferences sp_MSenumreplicas sp_MSenumreplicas90 sp_MSenumretries sp_MSenumschemachange sp_MSenumsubscriptions sp_MSenumthirdpartypublicationvendornames sp_MSestimatemergesnapshotworkload sp_MSestimatesnapshotworkload sp_MSevalsubscriberinfo sp_MSevaluate_change_membership_for_all_articles_in_pubid sp_MSevaluate_change_membership_for_pubid sp_MSevaluate_change_membership_for_row sp_MSexecwithlsnoutput sp_MSfast_delete_trans sp_MSfetchAdjustidentityrange sp_MSfetchidentityrange sp_MSfillupmissingcols sp_MSfilterclause sp_MSfix_6x_tasks sp_MSfixlineageversions sp_MSFixSubColumnBitmaps sp_MSfixupbeforeimagetables sp_MSflush_access_cache sp_MSforce_drop_distribution_jobs sp_MSforcereenumeration sp_MSforeach_worker sp_MSforeachdb sp_MSforeachtable sp_MSgenerateexpandproc sp_MSget_agent_names sp_MSget_attach_state sp_MSget_DDL_after_regular_snapshot sp_MSget_dynamic_snapshot_location sp_MSget_identity_range_info sp_MSget_jobstate sp_MSget_last_transaction sp_MSget_latest_peerlsn sp_MSget_load_hint sp_MSget_log_shipping_new_sessionid sp_MSget_logicalrecord_lineage sp_MSget_max_used_identity sp_MSget_min_seqno sp_MSget_MSmerge_rowtrack_colinfo sp_MSget_new_xact_seqno sp_MSget_oledbinfo sp_MSget_partitionid_eval_proc sp_MSget_publication_from_taskname sp_MSget_publisher_rpc sp_MSget_repl_cmds_anonymous sp_MSget_repl_commands sp_MSget_repl_error sp_MSget_session_statistics sp_MSget_shared_agent sp_MSget_snapshot_history sp_MSget_subscriber_partition_id sp_MSget_subscription_dts_info sp_MSget_subscription_guid sp_MSget_synctran_commands sp_MSget_type_wrapper sp_MSgetagentoffloadinfo sp_MSgetalertinfo sp_MSgetalternaterecgens sp_MSgetarticlereinitvalue sp_MSgetchangecount sp_MSgetconflictinsertproc sp_MSgetconflicttablename sp_MSGetCurrentPrincipal sp_MSgetdatametadatabatch sp_MSgetdbversion sp_MSgetdynamicsnapshotapplock sp_MSgetdynsnapvalidationtoken sp_MSgetgenstatus4rows sp_MSgetisvalidwindowsloginfromdistributor sp_MSgetlastrecgen sp_MSgetlastsentgen sp_MSgetlastsentrecgens sp_MSgetlastupdatedtime sp_MSgetlightweightmetadatabatch sp_MSgetmakegenerationapplock_90 sp_MSgetmakegenerationapplock sp_MSgetmaxbcpgen sp_MSgetmaxsnapshottimestamp sp_MSgetmergeadminapplock sp_MSgetmetadata_changedlogicalrecordmembers sp_MSgetmetadatabatch sp_MSgetmetadatabatch90 sp_MSgetmetadatabatch90new sp_MSgetonerow sp_MSgetonerowlightweight sp_MSgetpeerconflictrow sp_MSgetpeerlsns sp_MSgetpeertopeercommands sp_MSgetpeerwinnerrow sp_MSgetpubinfo sp_MSgetreplicainfo sp_MSgetreplicastate sp_MSgetrowmetadata sp_MSgetrowmetadatalightweight sp_MSGetServerProperties sp_MSgetsetupbelong_cost sp_MSgetsubscriberinfo sp_MSgetsupportabilitysettings sp_MSgettrancftsrcrow sp_MSgettranconflictrow sp_MSgetversion sp_MSgrantconnectreplication sp_MShaschangeslightweight sp_MShasdbaccess sp_MShelp_article sp_MShelp_distdb sp_MShelp_distribution_agentid sp_MShelp_identity_property sp_MShelp_logreader_agentid sp_MShelp_merge_agentid sp_MShelp_profile sp_MShelp_profilecache sp_MShelp_publication sp_MShelp_repl_agent sp_MShelp_replication_status sp_MShelp_replication_table sp_MShelp_snapshot_agent sp_MShelp_snapshot_agentid sp_MShelp_subscriber_info sp_MShelp_subscription_status sp_MShelp_subscription sp_MShelpcolumns sp_MShelpconflictpublications sp_MShelpcreatebeforetable sp_MShelpdestowner sp_MShelpdynamicsnapshotjobatdistributor sp_MShelpfulltextindex sp_MShelpfulltextscript sp_MShelpindex sp_MShelplogreader_agent sp_MShelpmergearticles sp_MShelpmergeconflictcounts sp_MShelpmergedynamicsnapshotjob sp_MShelpmergeidentity sp_MShelpmergeschemaarticles sp_MShelpobjectpublications sp_MShelpreplicationtriggers sp_MShelpsnapshot_agent sp_MShelpsummarypublication sp_MShelptracertokenhistory sp_MShelptracertokens sp_MShelptranconflictcounts sp_MShelptype sp_MShelpvalidationdate sp_MSIfExistsSubscription sp_MSindexspace sp_MSinit_publication_access sp_MSinit_subscription_agent sp_MSinitdynamicsubscriber sp_MSinsert_identity sp_MSinsertdeleteconflict sp_MSinserterrorlineage sp_MSinsertgenerationschemachanges sp_MSinsertgenhistory sp_MSinsertlightweightschemachange sp_MSinsertschemachange sp_MSinvalidate_snapshot sp_MSisnonpkukupdateinconflict sp_MSispeertopeeragent sp_MSispkupdateinconflict sp_MSispublicationqueued sp_MSisreplmergeagent sp_MSissnapshotitemapplied sp_MSkilldb sp_MSlock_auto_sub sp_MSlock_distribution_agent sp_MSlocktable sp_MSloginmappings sp_MSmakearticleprocs sp_MSmakebatchinsertproc sp_MSmakebatchupdateproc sp_MSmakeconflictinsertproc sp_MSmakectsview sp_MSmakedeleteproc sp_MSmakedynsnapshotvws sp_MSmakeexpandproc sp_MSmakegeneration sp_MSmakeinsertproc sp_MSmakemetadataselectproc sp_MSmakeselectproc sp_MSmakesystableviews sp_MSmakeupdateproc sp_MSmap_partitionid_to_generations sp_MSmarkreinit sp_MSmatchkey sp_MSmerge_alterschemaonly sp_MSmerge_altertrigger sp_MSmerge_alterview sp_MSmerge_ddldispatcher sp_MSmerge_getgencount sp_MSmerge_getgencur_public sp_MSmerge_is_snapshot_required sp_MSmerge_log_identity_range_allocations sp_MSmerge_parsegenlist sp_MSmerge_upgrade_subscriber sp_MSmergesubscribedb sp_MSmergeupdatelastsyncinfo sp_MSneedmergemetadataretentioncleanup sp_MSNonSQLDDL sp_MSNonSQLDDLForSchemaDDL sp_MSobjectprivs sp_MSpeerapplyresponse sp_MSpeerapplytopologyinfo sp_MSpeerconflictdetection_statuscollection_applyresponse sp_MSpeerconflictdetection_statuscollection_sendresponse sp_MSpeerconflictdetection_topology_applyresponse sp_MSpeerdbinfo sp_MSpeersendresponse sp_MSpeersendtopologyinfo sp_MSpeertopeerfwdingexec sp_MSpost_auto_proc sp_MSpostapplyscript_forsubscriberprocs sp_MSprep_exclusive sp_MSprepare_mergearticle sp_MSprofile_in_use sp_MSproxiedmetadata sp_MSproxiedmetadatabatch sp_MSproxiedmetadatalightweight sp_MSpub_adjust_identity sp_MSpublication_access sp_MSpublicationcleanup sp_MSpublicationview sp_MSquery_syncstates sp_MSquerysubtype sp_MSrecordsnapshotdeliveryprogress sp_MSreenable_check sp_MSrefresh_anonymous sp_MSrefresh_publisher_idrange sp_MSregenerate_mergetriggersprocs sp_MSregisterdynsnapseqno sp_MSregistermergesnappubid sp_MSregistersubscription sp_MSreinit_failed_subscriptions sp_MSreinit_hub sp_MSreinit_subscription sp_MSreinitoverlappingmergepublications sp_MSreleasedynamicsnapshotapplock sp_MSreleasemakegenerationapplock sp_MSreleasemergeadminapplock sp_MSreleaseSlotLock sp_MSreleasesnapshotdeliverysessionlock sp_MSremove_mergereplcommand sp_MSremoveoffloadparameter sp_MSrepl_agentstatussummary sp_MSrepl_backup_complete sp_MSrepl_backup_start sp_MSrepl_check_publisher sp_MSrepl_createdatatypemappings sp_MSrepl_distributionagentstatussummary sp_MSrepl_dropdatatypemappings sp_MSrepl_enumarticlecolumninfo sp_MSrepl_enumpublications sp_MSrepl_enumpublishertables sp_MSrepl_enumsubscriptions sp_MSrepl_enumtablecolumninfo sp_MSrepl_FixPALRole sp_MSrepl_getdistributorinfo sp_MSrepl_getpkfkrelation sp_MSrepl_gettype_mappings sp_MSrepl_helparticlermo sp_MSrepl_init_backup_lsns sp_MSrepl_isdbowner sp_MSrepl_IsLastPubInSharedSubscription sp_MSrepl_IsUserInAnyPAL sp_MSrepl_linkedservers_rowset sp_MSrepl_mergeagentstatussummary sp_MSrepl_PAL_rolecheck sp_MSrepl_raiserror sp_MSrepl_schema sp_MSrepl_setNFR sp_MSrepl_snapshot_helparticlecolumns sp_MSrepl_snapshot_helppublication sp_MSrepl_startup_internal sp_MSrepl_startup sp_MSrepl_subscription_rowset sp_MSrepl_testadminconnection sp_MSrepl_testconnection sp_MSreplagentjobexists sp_MSreplcheck_permission sp_MSreplcheck_pull sp_MSreplcheck_subscribe_withddladmin sp_MSreplcheck_subscribe sp_MSreplcheckoffloadserver sp_MSreplcopyscriptfile sp_MSreplraiserror sp_MSreplremoveuncdir sp_MSreplupdateschema sp_MSrequestreenumeration_lightweight sp_MSrequestreenumeration sp_MSreset_attach_state sp_MSreset_queued_reinit sp_MSreset_subscription_seqno sp_MSreset_subscription sp_MSreset_synctran_bit sp_MSreset_transaction sp_MSresetsnapshotdeliveryprogress sp_MSrestoresavedforeignkeys sp_MSretrieve_publication_attributes sp_MSscript_article_view sp_MSscript_dri sp_MSscript_pub_upd_trig sp_MSscript_sync_del_proc sp_MSscript_sync_del_trig sp_MSscript_sync_ins_proc sp_MSscript_sync_ins_trig sp_MSscript_sync_upd_proc sp_MSscript_sync_upd_trig sp_MSscriptcustomdelproc sp_MSscriptcustominsproc sp_MSscriptcustomupdproc sp_MSscriptdatabase sp_MSscriptdb_worker sp_MSscriptforeignkeyrestore sp_MSscriptsubscriberprocs sp_MSscriptviewproc sp_MSsendtosqlqueue sp_MSset_dynamic_filter_options sp_MSset_logicalrecord_metadata sp_MSset_new_identity_range sp_MSset_oledb_prop sp_MSset_snapshot_xact_seqno sp_MSset_sub_guid sp_MSset_subscription_properties sp_MSsetaccesslist sp_MSsetalertinfo sp_MSsetartprocs sp_MSsetbit sp_MSsetconflictscript sp_MSsetconflicttable sp_MSsetcontext_bypasswholeddleventbit sp_MSsetcontext_replagent sp_MSsetgentozero sp_MSsetlastrecgen sp_MSsetlastsentgen sp_MSsetreplicainfo sp_MSsetreplicaschemaversion sp_MSsetreplicastatus sp_MSsetrowmetadata sp_MSSetServerProperties sp_MSsetsubscriberinfo sp_MSsettopology sp_MSsetup_identity_range sp_MSsetup_partition_groups sp_MSsetup_use_partition_groups sp_MSsetupbelongs sp_MSsetupnosyncsubwithlsnatdist_cleanup sp_MSsetupnosyncsubwithlsnatdist_helper sp_MSsetupnosyncsubwithlsnatdist sp_MSSharedFixedDisk sp_MSSQLDMO70_version sp_MSSQLDMO80_version sp_MSSQLDMO90_version sp_MSSQLOLE_version sp_MSSQLOLE65_version sp_MSstartdistribution_agent sp_MSstartmerge_agent sp_MSstartsnapshot_agent sp_MSstopdistribution_agent sp_MSstopmerge_agent sp_MSstopsnapshot_agent sp_MSsub_check_identity sp_MSsub_set_identity sp_MSsubscription_status sp_MSsubscriptionvalidated sp_MStablechecks sp_MStablekeys sp_MStablerefs sp_MStablespace sp_MStestbit sp_MStran_ddlrepl sp_MStran_is_snapshot_required sp_MStrypurgingoldsnapshotdeliveryprogress sp_MSuniquename sp_MSunmarkifneeded sp_MSunmarkreplinfo sp_MSunmarkschemaobject sp_MSunregistersubscription sp_MSupdate_agenttype_default sp_MSupdate_singlelogicalrecordmetadata sp_MSupdate_subscriber_info sp_MSupdate_subscriber_schedule sp_MSupdate_subscriber_tracer_history sp_MSupdate_subscription sp_MSupdate_tracer_history sp_MSupdatecachedpeerlsn sp_MSupdategenerations_afterbcp sp_MSupdategenhistory sp_MSupdateinitiallightweightsubscription sp_MSupdatelastsyncinfo sp_MSupdatepeerlsn sp_MSupdaterecgen sp_MSupdatereplicastate sp_MSupdatesysmergearticles sp_MSuplineageversion sp_MSuploadsupportabilitydata sp_MSuselightweightreplication sp_MSvalidate_dest_recgen sp_MSvalidate_subscription sp_MSvalidate_wellpartitioned_articles sp_MSvalidatearticle sp_MSwritemergeperfcounter sp_new_parallel_nested_tran_id sp_OACreate sp_OADestroy sp_OAGetErrorInfo sp_OAGetProperty sp_OAMethod sp_OASetProperty sp_OAStop sp_objectfilegroup sp_oledb_database sp_oledb_defdb sp_oledb_deflang sp_oledb_language sp_oledb_ro_usrname sp_oledbinfo sp_ORbitmap sp_password sp_peerconflictdetection_tableaug sp_pkeys sp_polybase_join_group sp_polybase_leave_group sp_PostAgentInfo sp_posttracertoken sp_prepare sp_prepexec sp_prepexecrpc sp_primary_keys_rowset_rmt sp_primary_keys_rowset sp_primary_keys_rowset2 sp_primarykeys sp_procedure_params_100_managed sp_procedure_params_100_rowset sp_procedure_params_100_rowset2 sp_procedure_params_90_rowset sp_procedure_params_90_rowset2 sp_procedure_params_managed sp_procedure_params_rowset sp_procedure_params_rowset2 sp_procedures_rowset sp_procedures_rowset2 sp_processlogshippingmonitorhistory sp_processlogshippingmonitorprimary sp_processlogshippingmonitorsecondary sp_processlogshippingretentioncleanup sp_procoption sp_prop_oledb_provider sp_provider_types_100_rowset sp_provider_types_90_rowset sp_provider_types_rowset sp_publication_validation sp_publicationsummary sp_publishdb sp_publisherproperty sp_query_store_flush_db sp_query_store_force_plan sp_query_store_remove_plan sp_query_store_remove_query sp_query_store_reset_exec_stats sp_query_store_unforce_plan sp_rda_deauthorize_db sp_rda_get_rpo_duration sp_rda_reauthorize_db sp_rda_reconcile_batch sp_rda_reconcile_columns sp_rda_reconcile_indexes sp_rda_set_query_mode sp_rda_set_rpo_duration sp_rda_test_connection sp_readerrorlog sp_recompile sp_redirect_publisher sp_refresh_heterogeneous_publisher sp_refresh_log_shipping_monitor sp_refresh_parameter_encryption sp_refreshsqlmodule sp_refreshsubscriptions sp_refreshview sp_register_custom_scripting sp_registercustomresolver sp_reinitmergepullsubscription sp_reinitmergesubscription sp_reinitpullsubscription sp_reinitsubscription sp_releaseapplock sp_releaseschemalock sp_remote_data_archive_event sp_remoteoption sp_removedbreplication sp_removedistpublisherdbreplication sp_removesrvreplication sp_rename sp_renamedb sp_repl_generate_subscriber_event sp_repl_generateevent sp_repladdcolumn sp_replcleanupccsprocs sp_replcmds sp_replcounters sp_replddlparser sp_repldeletequeuedtran sp_repldone sp_repldropcolumn sp_replflush sp_replgetparsedddlcmd sp_replhelp sp_replica sp_replication_agent_checkup sp_replicationdboption sp_replincrementlsn sp_replmonitorchangepublicationthreshold sp_replmonitorhelpmergesession sp_replmonitorhelpmergesessiondetail sp_replmonitorhelpmergesubscriptionmoreinfo sp_replmonitorhelppublication sp_replmonitorhelppublicationthresholds sp_replmonitorhelppublisher sp_replmonitorhelpsubscription sp_replmonitorrefreshjob sp_replmonitorsubscriptionpendingcmds sp_replpostsyncstatus sp_replqueuemonitor sp_replrestart sp_replrethrow sp_replsendtoqueue sp_replsetoriginator sp_replsetsyncstatus sp_replshowcmds sp_replsqlqgetrows sp_replsync sp_repltrans sp_replwritetovarbin sp_requestpeerresponse sp_requestpeertopologyinfo sp_reserve_http_namespace sp_reset_connection sp_reset_session_context sp_resetsnapshotdeliveryprogress sp_resetstatus sp_resign_database sp_resolve_logins sp_restoredbreplication sp_restoremergeidentityrange sp_resyncexecute sp_resyncexecutesql sp_resyncmergesubscription sp_resyncprepare sp_resyncuniquetable sp_revoke_publication_access sp_revokedbaccess sp_revokelogin sp_rollback_parallel_nested_tran sp_schemafilter sp_schemata_rowset sp_script_reconciliation_delproc sp_script_reconciliation_insproc sp_script_reconciliation_sinsproc sp_script_reconciliation_vdelproc sp_script_reconciliation_xdelproc sp_script_synctran_commands sp_scriptdelproc sp_scriptdynamicupdproc sp_scriptinsproc sp_scriptmappedupdproc sp_scriptpublicationcustomprocs sp_scriptsinsproc sp_scriptsubconflicttable sp_scriptsupdproc sp_scriptupdproc sp_scriptvdelproc sp_scriptvupdproc sp_scriptxdelproc sp_scriptxupdproc sp_sequence_get_range sp_server_diagnostics sp_server_info sp_serveroption sp_set_session_context sp_setapprole sp_SetAutoSAPasswordAndDisable sp_setdefaultdatatypemapping sp_setnetname sp_SetOBDCertificate sp_setOraclepackageversion sp_setreplfailovermode sp_setsubscriptionxactseqno sp_settriggerorder sp_setuserbylogin sp_showcolv sp_showlineage sp_showmemo_xml sp_showpendingchanges sp_showrowreplicainfo sp_sm_detach sp_spaceused_remote_data_archive sp_spaceused sp_sparse_columns_100_rowset sp_special_columns_100 sp_special_columns_90 sp_special_columns sp_sproc_columns_100 sp_sproc_columns_90 sp_sproc_columns sp_sqlagent_add_job sp_sqlagent_add_jobstep sp_sqlagent_delete_job sp_sqlagent_help_jobstep sp_sqlagent_log_job_history sp_sqlagent_start_job sp_sqlagent_stop_job sp_sqlagent_verify_database_context sp_sqlagent_write_jobstep_log sp_sqlexec sp_srvrolepermission sp_start_user_instance sp_startmergepullsubscription_agent sp_startmergepushsubscription_agent sp_startpublication_snapshot sp_startpullsubscription_agent sp_startpushsubscription_agent sp_statistics_100 sp_statistics_rowset sp_statistics_rowset2 sp_statistics sp_stopmergepullsubscription_agent sp_stopmergepushsubscription_agent sp_stoppublication_snapshot sp_stoppullsubscription_agent sp_stoppushsubscription_agent sp_stored_procedures sp_subscribe sp_subscription_cleanup sp_subscriptionsummary sp_syspolicy_execute_policy sp_syspolicy_subscribe_to_policy_category sp_syspolicy_unsubscribe_from_policy_category sp_syspolicy_update_ddl_trigger sp_syspolicy_update_event_notification sp_table_constraints_rowset sp_table_constraints_rowset2 sp_table_privileges_ex sp_table_privileges_rowset_rmt sp_table_privileges_rowset sp_table_privileges_rowset2 sp_table_privileges sp_table_statistics_rowset sp_table_statistics2_rowset sp_table_type_columns_100_rowset sp_table_type_columns_100 sp_table_type_pkeys sp_table_type_primary_keys_rowset sp_table_types_rowset sp_table_types sp_table_validation sp_tablecollations_100 sp_tablecollations_90 sp_tablecollations sp_tableoption sp_tables_ex sp_tables_info_90_rowset_64 sp_tables_info_90_rowset sp_tables_info_90_rowset2_64 sp_tables_info_90_rowset2 sp_tables_info_rowset_64 sp_tables_info_rowset sp_tables_info_rowset2_64 sp_tables_info_rowset2 sp_tables_rowset_rmt sp_tables_rowset sp_tables_rowset2 sp_tables sp_tableswc sp_testlinkedserver sp_trace_create sp_trace_generateevent sp_trace_getdata sp_trace_setevent sp_trace_setfilter sp_trace_setstatus sp_try_set_session_context sp_unbindefault sp_unbindrule sp_unprepare sp_unregister_custom_scripting sp_unregistercustomresolver sp_unsetapprole sp_unsubscribe sp_update_agent_profile sp_update_user_instance sp_updateextendedproperty sp_updatestats sp_upgrade_log_shipping sp_user_counter1 sp_user_counter10 sp_user_counter2 sp_user_counter3 sp_user_counter4 sp_user_counter5 sp_user_counter6 sp_user_counter7 sp_user_counter8 sp_user_counter9 sp_usertypes_rowset_rmt sp_usertypes_rowset sp_usertypes_rowset2 sp_validate_redirected_publisher sp_validate_replica_hosts_as_publishers sp_validatecache sp_validatelogins sp_validatemergepublication sp_validatemergepullsubscription sp_validatemergesubscription sp_validlang sp_validname sp_verifypublisher sp_views_rowset sp_views_rowset2 sp_vupgrade_mergeobjects sp_vupgrade_mergetables sp_vupgrade_replication sp_vupgrade_replsecurity_metadata sp_who sp_who2 sp_xml_preparedocument sp_xml_removedocument sp_xml_schema_rowset sp_xml_schema_rowset2 sp_xp_cmdshell_proxy_account sp_xtp_bind_db_resource_pool sp_xtp_checkpoint_force_garbage_collection sp_xtp_control_proc_exec_stats sp_xtp_control_query_exec_stats sp_xtp_flush_temporal_history sp_xtp_kill_active_transactions sp_xtp_merge_checkpoint_files sp_xtp_objects_present sp_xtp_set_memory_quota sp_xtp_slo_can_downgrade sp_xtp_slo_downgrade_finished sp_xtp_slo_prepare_to_downgrade sp_xtp_unbind_db_resource_pool spatial_index_tessellations spatial_indexes spatial_reference_systems spt_values sql_dependencies sql_expression_dependencies sql_logins sql_modules sqlagent_job_history sqlagent_jobs sqlagent_jobsteps_logs sqlagent_jobsteps stats_columns stats symmetric_keys synonyms sys sysallocunits sysaltfiles sysasymkeys sysaudacts sysbinobjs sysbinsubobjs sysbrickfiles syscacheobjects syscerts syscharsets syschildinsts sysclones sysclsobjs syscolpars syscolumns syscomments syscommittab syscompfragments sysconfigures sysconstraints sysconvgroup syscscolsegments syscsdictionaries syscsrowgroups syscurconfigs syscursorcolumns syscursorrefs syscursors syscursortables sysdatabases sysdbfiles sysdbfrag sysdbreg sysdepends sysdercv sysdesend sysdevices sysendpts sysextendedrecoveryforks sysextfileformats sysextsources sysexttables sysfgfrag sysfilegroups sysfiles sysfiles1 sysfoqueues sysforeignkeys sysfos sysftinds sysftproperties sysftsemanticsdb sysftstops sysfulltextcatalogs sysguidrefs sysidxstats sysindexes sysindexkeys sysiscols syslanguages syslnklgns syslockinfo syslogins syslogshippers sysmatrixageforget sysmatrixages sysmatrixbricks sysmatrixconfig sysmatrixmanagers sysmembers sysmessages sysmultiobjrefs sysmultiobjvalues sysnsobjs sysobjects sysobjkeycrypts sysobjvalues sysoledbusers sysopentapes sysowners sysperfinfo syspermissions sysphfg syspriorities sysprivs sysprocesses sysprotects syspru sysprufiles sysqnames sysreferences sysremotelogins sysremsvcbinds sysrmtlgns sysrowsetrefs sysrowsets sysrscols sysrts sysscalartypes sysschobjs sysseobjvalues sysservers syssingleobjrefs syssoftobjrefs syssqlguides system_columns system_components_surface_area_configuration system_internals_allocation_units system_internals_partition_columns system_internals_partitions system_objects system_parameters system_sql_modules system_views systypedsubobjs systypes sysusermsgs sysusers syswebmethods sysxlgns sysxmitbody sysxmitqueue sysxmlcomponent sysxmlfacet sysxmlplacement sysxprops sysxsrvs TABLE_CONSTRAINTS TABLE_PRIVILEGES table_types tables TABLES tcp_endpoints time_zone_info trace_categories trace_columns trace_event_bindings trace_events trace_subclass_values traces transmission_queue trigger_event_types trigger_events triggers type_assembly_usages types user_token via_endpoints VIEW_COLUMN_USAGE VIEW_TABLE_USAGE VIEWS views xml_indexes xml_schema_attributes xml_schema_collections xml_schema_component_placements xml_schema_components xml_schema_elements xml_schema_facets xml_schema_model_groups xml_schema_namespaces xml_schema_types xml_schema_wildcard_namespaces xml_schema_wildcards xp_availablemedia xp_cmdshell xp_create_subdir xp_delete_file xp_dirtree xp_enum_oledb_providers xp_enumerrorlogs xp_enumgroups xp_fileexist xp_fixeddrives xp_get_tape_devices xp_getnetname xp_grantlogin xp_instance_regaddmultistring xp_instance_regdeletekey xp_instance_regdeletevalue xp_instance_regenumkeys xp_instance_regenumvalues xp_instance_regread xp_instance_regremovemultistring xp_instance_regwrite xp_logevent xp_loginconfig xp_logininfo xp_msver xp_msx_enlist xp_passAgentInfo xp_prop_oledb_provider xp_qv xp_readerrorlog xp_regaddmultistring xp_regdeletekey xp_regdeletevalue xp_regenumkeys xp_regenumvalues xp_regread xp_regremovemultistring xp_regwrite xp_repl_convert_encrypt_sysadmin_wrapper xp_replposteor xp_revokelogin xp_servicecontrol xp_sprintf xp_sqlagent_enum_jobs xp_sqlagent_is_starting xp_sqlagent_monitor xp_sqlagent_notify xp_sqlagent_param xp_sqlmaint xp_sscanf xp_subdirs xp_sysmail_activate xp_sysmail_attachment_load xp_sysmail_format_query ([\d\w]+)\.([\d\w]+)\.([\d\w]+) ([\d\w]+)\.([\d\w]+) ([\d\w]+)\.([\d\w]+)\.([\d\w]+) \[([\d\w]+)\]\.\[([\d\w]+)\]\.\[([\d\w]+)\] \[([\d\w]+)\]\.\[([\d\w]+)\] [?,.;()\[\]{}+\-/%*<>^+~!|&]+ [\d\w_]+(?=(\s*\()) @@([\w]+) @([\w]+) \b0[xX][0-9a-fA-F]+ # hex number | ( \b\d+(\.[0-9]+)? #number with optional floating point | \.[0-9]+ #or just starting with floating point ) ([eE][+-]?[0-9]+)? # optional exponent ================================================ FILE: WorkloadViewer/Resources/WorkloadAnalysis.sql ================================================ WITH baseData AS ( SELECT DATEDIFF(minute, Base.end_time, bIn.end_time) AS offset_minutes, bWD.sql_hash, bWD.avg_cpu_us, bWD.min_cpu_us, bWD.max_cpu_us, bWD.sum_cpu_us, bWD.avg_reads, bWD.min_reads, bWD.max_reads, bWD.sum_reads, bWD.avg_writes, bWD.min_writes, bWD.max_writes, bWD.sum_writes, bWD.avg_duration_us, bWD.min_duration_us, bWD.max_duration_us, bWD.sum_duration_us, bWD.execution_count, bIn.duration_minutes, bNQ.normalized_text, bNQ.example_text, bAp.application_name, bDB.database_name, bHS.host_name, bLI.login_name FROM capture.WorkloadDetails AS bWD INNER JOIN capture.Intervals AS bIn ON bIn.interval_id = bWD.interval_id INNER JOIN capture.NormalizedQueries AS bNQ ON bNQ.sql_hash = bWD.sql_hash INNER JOIN capture.Applications AS bAp ON bAp.application_id = bWD.application_id INNER JOIN capture.Databases AS bDB ON bDB.database_id = bWD.database_id INNER JOIN capture.Hosts AS bHS ON bHS.host_id = bWD.host_id INNER JOIN capture.Logins AS bLI ON bLI.login_id = bWD.login_id CROSS APPLY ( SELECT TOP(1) base.end_time FROM capture.Intervals AS base ORDER BY interval_id ) AS Base ) SELECT (offset_minutes / preaggregation) * preaggregation AS offset_minutes, preaggregation AS duration_minutes, sql_hash, application_name, database_name, host_name, login_name, AVG(avg_cpu_us) AS avg_cpu_us, MIN(min_cpu_us) AS min_cpu_us, MAX(max_cpu_us) AS max_cpu_us, SUM(sum_cpu_us) AS sum_cpu_us, AVG(avg_reads) AS avg_reads, MIN(min_reads) AS min_reads, MAX(max_reads) AS max_reads, SUM(sum_reads) AS sum_reads, AVG(avg_writes) AS avg_writes, MIN(min_writes) AS min_writes, MAX(max_writes) AS max_writes, SUM(sum_writes) AS sum_writes, AVG(avg_duration_us) AS avg_duration_us, MIN(min_duration_us) AS min_duration_us, MAX(max_duration_us) AS max_duration_us, SUM(sum_duration_us) AS sum_duration_us, SUM(execution_count) AS execution_count FROM baseData GROUP BY (offset_minutes / preaggregation) * preaggregation, duration_minutes % preaggregation, sql_hash, application_name, database_name, host_name, login_name ORDER BY offset_minutes; ================================================ FILE: WorkloadViewer/View/ConnectionInfoDialog.xaml ================================================  ================================================ FILE: WorkloadViewer/View/ConnectionInfoDialog.xaml.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Shapes; namespace WorkloadViewer.View { /// /// Interaction logic for ConnectionInfoDialog.xaml /// public partial class ConnectionInfoDialog : MahApps.Metro.Controls.Dialogs.CustomDialog { public ConnectionInfoDialog() { InitializeComponent(); } } } ================================================ FILE: WorkloadViewer/View/ConnectionInfoDialogStyle.xaml ================================================  ================================================ FILE: WorkloadViewer/View/ConnectionInfoEditor.xaml ================================================  Baseline Benchmark ================================================ FILE: WorkloadViewer/View/ConnectionInfoEditor.xaml.cs ================================================ using GalaSoft.MvvmLight.Messaging; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Automation.Peers; using System.Windows.Automation.Provider; using System.Windows.Controls; using System.Windows.Controls.Primitives; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Navigation; using System.Windows.Shapes; using WorkloadViewer.ViewModel; namespace WorkloadViewer.View { /// /// Interaction logic for ConnectionInfoEditor.xaml /// public partial class ConnectionInfoEditor : UserControl { public ConnectionInfoEditor() { InitializeComponent(); Messenger.Default.Register(this, (msg) => ReceiveMessage(msg)); } private void ReceiveMessage(Message msg) { if(msg.Text == "OK") { //Fist of all, remove focus from the current text control and set it to the button Keyboard.Focus(OKButton); // Then fire the click event and its associated command var peer = new ButtonAutomationPeer(OKButton); var invokeProv = peer.GetPattern(PatternInterface.Invoke) as IInvokeProvider; invokeProv.Invoke(); } } private void Baseline_PasswordChanged(object sender, RoutedEventArgs e) { if (DataContext != null) { ((dynamic)DataContext).BaselinePassword = ((PasswordBox)sender).Password; } } private void Benchmark_PasswordChanged(object sender, RoutedEventArgs e) { if (DataContext != null) { ((dynamic)DataContext).BenchmarkPassword = ((PasswordBox)sender).Password; } } } } ================================================ FILE: WorkloadViewer/View/MainWindow.xaml ================================================  ================================================ FILE: WorkloadViewer/View/MainWindow.xaml.cs ================================================ using GalaSoft.MvvmLight.Messaging; using ICSharpCode.AvalonEdit; using MahApps.Metro.Controls; using System; using System.Collections.Generic; using System.ComponentModel; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Navigation; using System.Windows.Shapes; using WorkloadViewer.ViewModel; using Path = System.IO.Path; namespace WorkloadViewer { /// /// Interaction logic for MainWindow.xaml /// public partial class MainWindow : MetroWindow { public MainWindow() { InitializeComponent(); Messenger.Default.Register(this, (msg) => ReceiveSortMessage(msg)); using (var stream = new MemoryStream(WorkloadViewer.Properties.Resources.TSQL)) { using (var reader = new System.Xml.XmlTextReader(stream)) { var highlighting = ICSharpCode.AvalonEdit.Highlighting.Xshd.HighlightingLoader.Load(reader, ICSharpCode.AvalonEdit.Highlighting.HighlightingManager.Instance); QueryText.SyntaxHighlighting = highlighting; QueryDetailText.SyntaxHighlighting = highlighting; } } } private void ReceiveSortMessage(SortColMessage msg) { try { var dgc = Queries.Columns.First(el => el.Header.ToString().Equals(msg.ColumnName)); if(dgc != null) { dgc.SortDirection = msg.Direction; var sd = new SortDescription(dgc.SortMemberPath, msg.Direction); var cvs = (CollectionViewSource)Resources["WorkloadQueries"]; cvs.SortDescriptions.Clear(); cvs.SortDescriptions.Add(new SortDescription(dgc.SortMemberPath, msg.Direction)); } } catch(Exception) { //swallow } } private void DataGridDoubleClick(object sender, MouseButtonEventArgs e) { if(((DataGrid)sender).SelectedItem == null) { return; } Dispatcher.BeginInvoke((Action)(() => MainTabControl.SelectedIndex = 2)); } private void QueryText_MouseDoubleClick(object sender, MouseButtonEventArgs e) { OpenFileWithDefaultApp(sender); } private void OpenFileWithDefaultApp(object sender) { // save text to a temp file and open with windows try { var editor = (TextEditor)sender; var docPath = Path.Combine(Path.GetTempPath(), editor.Tag + ".sql"); // Write the string array to a new file named "WriteLines.txt". using (var outputFile = new StreamWriter(docPath)) { outputFile.WriteLine(editor.Text); } System.Diagnostics.Process.Start(docPath); } catch (Exception) { // swallow } } } } ================================================ FILE: WorkloadViewer/ViewModel/ConnectionInfoEditorViewModel.cs ================================================ using GalaSoft.MvvmLight; using GalaSoft.MvvmLight.Command; using GalaSoft.MvvmLight.Messaging; using MahApps.Metro.Controls.Dialogs; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Input; namespace WorkloadViewer.ViewModel { public class ConnectionInfoEditorViewModel : ViewModelBase { private string _baselineServer; private string _baselineDatabase; public string BaselineServer { get { return _baselineServer; } set { _baselineServer = value; if (String.IsNullOrEmpty(BenchmarkServer)) { BenchmarkServer = _baselineServer; RaisePropertyChanged("BenchmarkServer"); } } } public string BaselineDatabase { get { return _baselineDatabase; } set { _baselineDatabase = value; if (String.IsNullOrEmpty(BenchmarkDatabase)) { BenchmarkDatabase = _baselineDatabase; RaisePropertyChanged("BenchmarkDatabase"); } } } public string BaselineSchema { get; set; } public string BaselineUsername { get; set; } public string BaselinePassword { get; set; } public string BenchmarkServer { get; set; } public string BenchmarkDatabase { get; set; } public string BenchmarkSchema { get; set; } public string BenchmarkUsername { get; set; } public string BenchmarkPassword { get; set; } public ICommand CancelCommand { get; set; } public ICommand OKCommand { get; set; } public ICommand KeyDownCommand { get; set; } public bool Cancel = false; private IDialogCoordinator _dialogCoordinator; public Exception Exception; public MainViewModel Context; public BaseMetroDialog Dialog; public ConnectionInfoEditorViewModel() { CancelCommand = new RelayCommand(Cancel_Pressed); OKCommand = new RelayCommand(OK_Pressed); KeyDownCommand = new RelayCommand(KeyDown); _dialogCoordinator = DialogCoordinator.Instance; Cancel = false; Exception = null; } private void KeyDown(KeyEventArgs e) { if (e.Key == Key.Enter) { var msg = new Message("OK"); Messenger.Default.Send(msg); } } private async void Cancel_Pressed(RoutedEventArgs e) { Cancel = true; await _dialogCoordinator.HideMetroDialogAsync(Context, Dialog); //App.Current.Shutdown(); } private async void OK_Pressed(RoutedEventArgs e) { Cancel = false; try { Context.SetConnectionInfo(this); } catch (Exception ex) { Exception = ex; } finally { if(Dialog.IsVisible) { await _dialogCoordinator.HideMetroDialogAsync(Context, Dialog); } } } } } ================================================ FILE: WorkloadViewer/ViewModel/DictionaryExtensions.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.ViewModel { public static class DictionaryExtensions { public static void AddOrUpdate( this IDictionary dict, TKey key, TValue addValue) { if (dict.ContainsKey(key)) { dict[key] = addValue; } else { dict.Add(key, addValue); } } public static TValue AddOrUpdate( this IDictionary dict, TKey key, TValue addValue, Func updateValueFactory) { TValue existing; if (dict.TryGetValue(key, out existing)) { addValue = updateValueFactory(key, existing); dict[key] = addValue; } else { dict.Add(key, addValue); } return addValue; } public static TValue AddOrUpdate( this IDictionary dict, TKey key, Func addValueFactory, Func updateValueFactory) { TValue existing; if (dict.TryGetValue(key, out existing)) { existing = updateValueFactory(key, existing); dict[key] = existing; } else { existing = addValueFactory(key); dict.Add(key, existing); } return existing; } } } ================================================ FILE: WorkloadViewer/ViewModel/FilterDefinition.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.ViewModel { public class FilterDefinition : IComparable, IEquatable { public string Name { get; set; } public bool IsChecked { get; set; } public int CompareTo(object obj) { var result = -1; if(obj is FilterDefinition) { result = Name.CompareTo(((FilterDefinition)obj).Name); } return result; } public bool Equals(FilterDefinition other) { return CompareTo(other) == 0; } } } ================================================ FILE: WorkloadViewer/ViewModel/LinqExtensions.cs ================================================ using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Text; using System.Threading.Tasks; namespace WorkloadTools.Util { public static class LinqExtensions { public static IEnumerable FullOuterJoin( this IEnumerable left, IEnumerable right, Func leftKeySelector, Func rightKeySelector, Func resultSelector, IEqualityComparer comparator = null, TLeft defaultLeft = default(TLeft), TRight defaultRight = default(TRight)) { if (left == null) { throw new ArgumentNullException("left"); } if (right == null) { throw new ArgumentNullException("right"); } if (leftKeySelector == null) { throw new ArgumentNullException("leftKeySelector"); } if (rightKeySelector == null) { throw new ArgumentNullException("rightKeySelector"); } if (resultSelector == null) { throw new ArgumentNullException("resultSelector"); } comparator = comparator ?? EqualityComparer.Default; return FullOuterJoinIterator(left, right, leftKeySelector, rightKeySelector, resultSelector, comparator, defaultLeft, defaultRight); } internal static IEnumerable FullOuterJoinIterator( this IEnumerable left, IEnumerable right, Func leftKeySelector, Func rightKeySelector, Func resultSelector, IEqualityComparer comparator, TLeft defaultLeft, TRight defaultRight) { var leftLookup = left.ToLookup(leftKeySelector, comparator); var rightLookup = right.ToLookup(rightKeySelector, comparator); var keys = leftLookup.Select(g => g.Key).Union(rightLookup.Select(g => g.Key), comparator); foreach (var key in keys) { foreach (var leftValue in leftLookup[key].DefaultIfEmpty(defaultLeft)) { foreach (var rightValue in rightLookup[key].DefaultIfEmpty(defaultRight)) { yield return resultSelector(leftValue, rightValue, key); } } } } } } ================================================ FILE: WorkloadViewer/ViewModel/MainViewModel.cs ================================================ using GalaSoft.MvvmLight; using GalaSoft.MvvmLight.Command; using MahApps.Metro.Controls.Dialogs; using OxyPlot; using OxyPlot.Axes; using OxyPlot.Series; using System; using System.Collections.Generic; using System.Windows.Input; using System.Linq; using WorkloadViewer.Model; using System.Data; using System.Windows; using GalaSoft.MvvmLight.Messaging; using NLog; using System.Threading.Tasks; using System.Threading; using WorkloadTools.Util; namespace WorkloadViewer.ViewModel { public class MainViewModel : ViewModelBase { private static Logger logger = LogManager.GetCurrentClassLogger(); public bool CompareMode { get { return _benchmarkWorkloadAnalysis != null; } } public Visibility CompareModeVisibility { get { if(CompareMode) { return Visibility.Visible; } else { return Visibility.Collapsed; } } } internal Options _options; internal bool _invalidOptions = false; private WorkloadAnalysis _baselineWorkloadAnalysis; private WorkloadAnalysis _benchmarkWorkloadAnalysis; public string StatusMessage { get; set; } private PlotModel[] PlotModels = new PlotModel[3]; public PlotModel CpuPlotModel { get; private set; } public PlotModel DurationPlotModel { get; private set; } public PlotModel BatchesPlotModel { get; private set; } public List HostList { get; set; } public List ApplicationList { get; set; } public List DatabaseList { get; set; } public List LoginList { get; set; } public ICommand LoadedCommand { get; set; } public ICommand RenderedCommand { get; set; } public ICommand KeyDownCommand { get; set; } public ICommand ApplyCommand { get; set; } public IEnumerable Queries { get; private set; } public bool Initialized { get; private set; } = false; private IDialogCoordinator _dialogCoordinator; private DateTime _lastAxisAdjust = DateTime.Now; public MainViewModel() { LoadedCommand = new RelayCommand(Loaded); RenderedCommand = new RelayCommand(Rendered); KeyDownCommand = new RelayCommand(KeyDown); ApplyCommand = new RelayCommand(ApplyFilters); _dialogCoordinator = DialogCoordinator.Instance; PlotModels = new PlotModel[3]; } private void ApplyFilters(EventArgs obj) { InitializeCharts(); InitializeQueries(); RefreshAllCharts(); } private void Rendered(EventArgs ev) { if (_invalidOptions) { ShowStatusMessage("ShowConnectionInfoDialog"); ShowConnectionInfoDialog(); } else { ShowStatusMessage("Initializing"); InitializeAll(); ShowStatusMessage("Initialized"); } } private async void InitializeAll() { var controller = await _dialogCoordinator.ShowProgressAsync(this, "Loading data", String.Empty, false); controller.SetIndeterminate(); try { Initialized = false; await Task.Run(() => { InitializeWorkloadAnalysis(); InitializeFilters(); InitializeCharts(); RefreshAllCharts(); }); // This cannot be run async due to threading errors // in AvalonEdit.TextEditor // "TextDocument can be accessed only from the thread that owns it" InitializeQueries(); Initialized = true; } catch (Exception e) { ShowStatusMessage($"Exception: {e.Message}"); await _dialogCoordinator.ShowMessageAsync(this, "WorkloadViewer", "Unable to load data: " + e.Message); ShowConnectionInfoDialog(); } finally { await controller.CloseAsync(); while (controller.IsOpen) { await controller.CloseAsync(); Thread.Sleep(5); } } } private async void ShowConnectionInfoDialog() { var editor = new View.ConnectionInfoDialog(); var viewModel = new ConnectionInfoEditorViewModel() { Context = this, Dialog = editor }; editor.DataContext = viewModel; viewModel.BaselineServer = _options.BaselineServer; viewModel.BaselineDatabase = _options.BaselineDatabase; viewModel.BaselineSchema = _options.BaselineSchema; viewModel.BaselineUsername = _options.BaselineUsername; viewModel.BaselinePassword = _options.BaselinePassword; viewModel.BenchmarkServer = _options.BenchmarkServer; viewModel.BenchmarkDatabase = _options.BenchmarkDatabase; viewModel.BenchmarkSchema = _options.BenchmarkSchema; viewModel.BenchmarkUsername = _options.BenchmarkUsername; viewModel.BenchmarkPassword = _options.BenchmarkPassword; await _dialogCoordinator.ShowMetroDialogAsync(this, editor); } public async void SetConnectionInfo(ConnectionInfoEditorViewModel viewModel) { _options.BaselineServer = viewModel.BaselineServer; _options.BaselineDatabase = viewModel.BaselineDatabase; _options.BaselineSchema = viewModel.BaselineSchema; _options.BaselineUsername = viewModel.BaselineUsername; _options.BaselinePassword = viewModel.BaselinePassword; _options.BenchmarkServer = viewModel.BenchmarkServer; _options.BenchmarkDatabase = viewModel.BenchmarkDatabase; _options.BenchmarkSchema = viewModel.BenchmarkSchema; _options.BenchmarkUsername = viewModel.BenchmarkUsername; _options.BenchmarkPassword = viewModel.BenchmarkPassword; _invalidOptions = false; ShowStatusMessage("Pre InitializeAll"); // now that the options are filled, I can invoke the initialization InitializeAll(); BaseMetroDialog showingDialog = null; showingDialog = await _dialogCoordinator.GetCurrentDialogAsync(this); if(showingDialog != null) { await _dialogCoordinator.HideMetroDialogAsync(this, showingDialog); } ShowStatusMessage("Post InitializeAll"); } private void Loaded(EventArgs ev) { if (!ParseOptions()) { _invalidOptions = true; } } private void InitializeWorkloadAnalysis() { _baselineWorkloadAnalysis = new WorkloadAnalysis() { Name = "Baseline" }; _baselineWorkloadAnalysis.ConnectionInfo = new SqlConnectionInfo() { ServerName = _options.BaselineServer, DatabaseName = _options.BaselineDatabase, SchemaName = _options.BaselineSchema, UserName = _options.BaselineUsername, Password = _options.BaselinePassword }; _baselineWorkloadAnalysis.Load(); if(_options.BenchmarkSchema != null) { _benchmarkWorkloadAnalysis = new WorkloadAnalysis() { Name = "Benchmark" }; _benchmarkWorkloadAnalysis.ConnectionInfo = new SqlConnectionInfo() { ServerName = _options.BenchmarkServer, DatabaseName = _options.BenchmarkDatabase, SchemaName = _options.BenchmarkSchema, UserName = _options.BenchmarkUsername, Password = _options.BenchmarkPassword }; _benchmarkWorkloadAnalysis.Load(); } } private void InitializeQueries() { // Initialize the queries logger.Info("Entering baseline evaluation"); var zoomIsSet = PlotModels[0].DefaultXAxis != null; double xstart = 0; double xend = 0; if (zoomIsSet) { xstart = PlotModels[0].DefaultXAxis.ActualMinimum; xend = PlotModels[0].DefaultXAxis.ActualMaximum; if (xstart < 0) { xstart = 0; } } var baseline = from t in _baselineWorkloadAnalysis.Points where ApplicationList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.ApplicationName) && HostList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.HostName) && DatabaseList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.DatabaseName) && LoginList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.LoginName) && (!zoomIsSet || t.OffsetMinutes >= xstart ) && (!zoomIsSet || t.OffsetMinutes <= xend) group t by new { query = t.NormalizedQuery } into grp select new { query = grp.Key.query, sum_duration_us = grp.Sum(t => t.SumDurationUs), avg_duration_us = grp.Average(t => t.AvgDurationUs), sum_cpu_us = grp.Sum(t => t.SumCpuUs), avg_cpu_us = grp.Average(t => t.AvgCpuUs), sum_reads = grp.Sum(t => t.SumReads), avg_reads = grp.Average(t => t.AvgReads), execution_count = grp.Sum(t => t.ExecutionCount) }; logger.Info("Baseline evaluation completed"); logger.Info("Entering benchmark evaluation"); var benchmark = from t in baseline where false select new { t.query, t.sum_duration_us, t.avg_duration_us, t.sum_cpu_us, t.avg_cpu_us, t.sum_reads, t.avg_reads, t.execution_count }; if (_benchmarkWorkloadAnalysis != null) { benchmark = from t in _benchmarkWorkloadAnalysis.Points where ApplicationList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.ApplicationName) && HostList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.HostName) && DatabaseList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.DatabaseName) && LoginList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.LoginName) && (!zoomIsSet || t.OffsetMinutes >= xstart) && (!zoomIsSet || t.OffsetMinutes <= xend) group t by new { query = t.NormalizedQuery } into grp select new { query = grp.Key.query, sum_duration_us = grp.Sum(t => t.SumDurationUs), avg_duration_us = grp.Average(t => t.AvgDurationUs), sum_cpu_us = grp.Sum(t => t.SumCpuUs), avg_cpu_us = grp.Average(t => t.AvgCpuUs), sum_reads = grp.Sum(t => t.SumReads), avg_reads = grp.Average(t => t.AvgReads), execution_count = grp.Sum(t => t.ExecutionCount) }; } logger.Info("Benchmark evaluation completed"); logger.Info("Merging sets"); var leftOuterJoin = from b in baseline join k in benchmark on b.query.Hash equals k.query.Hash into joinedData from j in joinedData.DefaultIfEmpty() select new QueryResult { query_hash = b.query.Hash, query_text = b.query.ExampleText, query_normalized = b.query.NormalizedText, sum_duration_us = b.sum_duration_us, avg_duration_us = b.avg_duration_us, sum_cpu_us = b.sum_cpu_us, avg_cpu_us = b.avg_cpu_us, sum_reads = b.sum_reads, avg_reads = b.avg_reads, execution_count = b.execution_count, sum_duration_us2 = j == null ? 0 : j.sum_duration_us, diff_sum_duration_us = j == null ? 0 : j.sum_duration_us - b.sum_duration_us, avg_duration_us2 = j == null ? 0 : j.avg_duration_us, diff_avg_duration_us = j == null ? 0 : j.avg_duration_us - b.avg_duration_us, sum_cpu_us2 = j == null ? 0 : j.sum_cpu_us, diff_sum_cpu_us = j == null ? 0 : j.sum_cpu_us - b.sum_cpu_us, avg_cpu_us2 = j == null ? 0 : j.avg_cpu_us, diff_avg_cpu_us = j == null ? 0 : j.avg_cpu_us - b.avg_cpu_us, sum_reads2 = j == null ? 0 : j.sum_reads, diff_sum_reads = j == null ? 0 : j.sum_reads - b.sum_reads, avg_reads2 = j == null ? 0 : j.avg_reads, diff_avg_reads = j == null ? 0 : j.avg_reads - b.avg_reads, execution_count2 = j == null ? 0 : j.execution_count, diff_execution_count = j == null ? 0 : j.execution_count - b.execution_count, querydetails = new QueryDetails(b.query, _baselineWorkloadAnalysis, _benchmarkWorkloadAnalysis), document = new ICSharpCode.AvalonEdit.Document.TextDocument() { Text = b.query.ExampleText } }; var rightOuterJoin = from b in benchmark join k in baseline on b.query.Hash equals k.query.Hash into joinedData from j in joinedData.DefaultIfEmpty() select new QueryResult { query_hash = b.query.Hash, query_text = b.query.ExampleText, query_normalized = b.query.NormalizedText, sum_duration_us = b.sum_duration_us, avg_duration_us = b.avg_duration_us, sum_cpu_us = b.sum_cpu_us, avg_cpu_us = b.avg_cpu_us, sum_reads = b.sum_reads, avg_reads = b.avg_reads, execution_count = b.execution_count, sum_duration_us2 = j == null ? 0 : j.sum_duration_us, diff_sum_duration_us = j == null ? 0 : j.sum_duration_us - b.sum_duration_us, avg_duration_us2 = j == null ? 0 : j.avg_duration_us, diff_avg_duration_us = j == null ? 0 : j.avg_duration_us - b.avg_duration_us, sum_cpu_us2 = j == null ? 0 : j.sum_cpu_us, diff_sum_cpu_us = j == null ? 0 : j.sum_cpu_us - b.sum_cpu_us, avg_cpu_us2 = j == null ? 0 : j.avg_cpu_us, diff_avg_cpu_us = j == null ? 0 : j.avg_cpu_us - b.avg_cpu_us, sum_reads2 = j == null ? 0 : j.sum_reads, diff_sum_reads = j == null ? 0 : j.sum_reads - b.sum_reads, avg_reads2 = j == null ? 0 : j.avg_reads, diff_avg_reads = j == null ? 0 : j.avg_reads - b.avg_reads, execution_count2 = j == null ? 0 : j.execution_count, diff_execution_count = j == null ? 0 : j.execution_count - b.execution_count, querydetails = new QueryDetails(b.query, _baselineWorkloadAnalysis, _benchmarkWorkloadAnalysis), document = new ICSharpCode.AvalonEdit.Document.TextDocument() { Text = b.query.ExampleText } }; var merged = leftOuterJoin .Union(rightOuterJoin, new QueryResultEqualityComparer()) .ToList(); Queries = merged; logger.Info("Sets merged"); RaisePropertyChanged("Queries"); RaisePropertyChanged("CompareModeVisibility"); RaisePropertyChanged("CompareMode"); var sortCol = CompareMode ? "diff_sum_duration_us" : "sum_duration_us"; var msg = new SortColMessage(sortCol, System.ComponentModel.ListSortDirection.Descending); Messenger.Default.Send(msg); } private bool ParseOptions() { _options = ((WorkloadViewer.App)App.Current).Options; if(_options.ConfigurationFile != null) { // TODO: read configuration from file } else { if(_options.BaselineServer == null || _options.BaselineDatabase == null) { return false; } } return true; } private void KeyDown(KeyEventArgs e) { if(e.Key == Key.F5) { // TODO: refreshing should keep zoom and filters InitializeAll(); } if (e.Key == Key.F8) { ShowConnectionInfoDialog(); } } private void RefreshAllCharts() { RaisePropertyChanged("CpuPlotModel"); RaisePropertyChanged("DurationPlotModel"); RaisePropertyChanged("BatchesPlotModel"); } private void InitializeCharts() { var useDateAxis = _options.BenchmarkSchema == null; var baseOffset = useDateAxis ? DateTimeAxis.ToDouble(_baselineWorkloadAnalysis.StartDate) : 0; CpuPlotModel = InitializePlotModel(useDateAxis); CpuPlotModel.Axes[1].Title = "Cpu (us)"; CpuPlotModel.Title = "Cpu"; CpuPlotModel.Series.Add(LoadCpuSeries(_baselineWorkloadAnalysis, OxyColor.Parse("#01B8AA"),baseOffset)); if(_options.BenchmarkSchema != null) { CpuPlotModel.Series.Add(LoadCpuSeries(_benchmarkWorkloadAnalysis, OxyColor.Parse("#000000"), baseOffset)); } CpuPlotModel.PlotAreaBorderThickness = new OxyThickness(1,0,0,1); PlotModels[0] = CpuPlotModel; DurationPlotModel = InitializePlotModel(useDateAxis); DurationPlotModel.Axes[1].Title = "Duration (us)"; DurationPlotModel.Title = "Duration"; DurationPlotModel.Series.Add(LoadDurationSeries(_baselineWorkloadAnalysis, OxyColor.Parse("#01B8AA"), baseOffset)); if (_options.BenchmarkSchema != null) { DurationPlotModel.Series.Add(LoadDurationSeries(_benchmarkWorkloadAnalysis, OxyColor.Parse("#000000"), baseOffset)); } DurationPlotModel.PlotAreaBorderThickness = new OxyThickness(1, 0, 0, 1); PlotModels[1] = DurationPlotModel; BatchesPlotModel = InitializePlotModel(useDateAxis); BatchesPlotModel.Axes[1].Title = "Batches/second"; BatchesPlotModel.Title = "Batches/second"; BatchesPlotModel.Series.Add(LoadBatchesSeries(_baselineWorkloadAnalysis, OxyColor.Parse("#01B8AA"), baseOffset)); if (_options.BenchmarkSchema != null) { BatchesPlotModel.Series.Add(LoadBatchesSeries(_benchmarkWorkloadAnalysis, OxyColor.Parse("#000000"), baseOffset)); } BatchesPlotModel.PlotAreaBorderThickness = new OxyThickness(1, 0, 0, 1); PlotModels[2] = BatchesPlotModel; } private PlotModel InitializePlotModel(bool dateXAxis) { var plotModel = new PlotModel(); plotModel.LegendOrientation = LegendOrientation.Horizontal; plotModel.LegendPlacement = LegendPlacement.Inside; plotModel.LegendPosition = LegendPosition.TopLeft; plotModel.LegendBackground = OxyColor.FromAColor(200, OxyColors.White); if (!dateXAxis) { var offsetAxis = new LinearAxis() { MajorGridlineStyle = LineStyle.Dot, MinorGridlineStyle = LineStyle.None, Position = AxisPosition.Bottom, Title = "Offset minutes", AbsoluteMinimum = 0, MinorTickSize = 0 }; plotModel.Axes.Add(offsetAxis); } else { LinearAxis offsetAxis = new DateTimeAxis() { MajorGridlineStyle = LineStyle.Dot, MinorGridlineStyle = LineStyle.None, Position = AxisPosition.Bottom, Title = "Time", StringFormat = "HH:mm", MinorIntervalType = DateTimeIntervalType.Minutes, IntervalType = DateTimeIntervalType.Minutes, }; plotModel.Axes.Add(offsetAxis); } var valueAxis1 = new LinearAxis() { MajorGridlineStyle = LineStyle.Dot, MinorGridlineStyle = LineStyle.None, Position = AxisPosition.Left, StringFormat = "N0", IsZoomEnabled = false, IsPanEnabled = false, AbsoluteMinimum = 0, MaximumPadding = 0.2, MinorTickSize = 0 }; plotModel.Axes.Add(valueAxis1); plotModel.PlotMargins = new OxyThickness(70, 0, 0, 30); plotModel.Series.Clear(); foreach (var ax in plotModel.Axes) { ax.AxisChanged += (sender, e) => SynchronizeCharts(plotModel, sender, e); } return plotModel; } private void SynchronizeCharts(PlotModel plotModel, object sender, AxisChangedEventArgs e) { if (DateTime.Now.Subtract(_lastAxisAdjust).TotalMilliseconds < 100) { return; } _lastAxisAdjust = DateTime.Now; try { var xstart = plotModel.DefaultXAxis.ActualMinimum; var xend = plotModel.DefaultXAxis.ActualMaximum; if (xstart < 0) { xstart = 0; } foreach (var pm in PlotModels) { // set x zoom only for the charts not being zoomed if (pm.Title != plotModel.Title) { pm.DefaultXAxis.Zoom(xstart, xend); } pm.InvalidatePlot(true); } InitializeQueries(); } finally { _lastAxisAdjust = DateTime.Now; } } private Series LoadCpuSeries(WorkloadAnalysis analysis, OxyColor color, double baseOffset) { if (analysis == null) { return null; } var cpuSeries = new LineSeries() { StrokeThickness = 2, MarkerSize = 3, MarkerStroke = OxyColor.Parse("#FF0000"), //Red MarkerType = MarkerType.None, CanTrackerInterpolatePoints = false, Title = analysis.Name, Color = color, Smooth = false }; if(baseOffset == 0) { cpuSeries.TrackerFormatString = "Offset: {2:0}\n{0}: {4:0}"; } var Table = from t in analysis.Points where ApplicationList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.ApplicationName) && HostList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.HostName) && DatabaseList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.DatabaseName) && LoginList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.LoginName) group t by new { offset = t.OffsetMinutes } into grp orderby grp.Key.offset select new { offset_minutes = grp.Key.offset, cpu = grp.Sum(t => t.SumCpuUs) }; foreach (var p in Table) { double xValue = 0; if (baseOffset > 0) { xValue = DateTimeAxis.ToDouble(DateTimeAxis.ToDateTime(baseOffset).AddMinutes(p.offset_minutes)); } else { xValue = p.offset_minutes; } cpuSeries.Points.Add(new DataPoint(xValue , p.cpu)); } return cpuSeries; } private Series LoadDurationSeries(WorkloadAnalysis analysis, OxyColor color, double baseOffset) { if (analysis == null) { return null; } var durationSeries = new LineSeries() { StrokeThickness = 2, MarkerSize = 3, MarkerStroke = OxyColor.Parse("#FF0000"), //Red MarkerType = MarkerType.None, CanTrackerInterpolatePoints = false, Title = analysis.Name, Color = color, Smooth = false }; if (baseOffset == 0) { durationSeries.TrackerFormatString = "Offset: {2:0}\n{0}: {4:0}"; } var Table = from t in analysis.Points where ApplicationList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.ApplicationName) && HostList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.HostName) && DatabaseList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.DatabaseName) && LoginList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.LoginName) group t by new { offset = t.OffsetMinutes } into grp orderby grp.Key.offset select new { offset_minutes = grp.Key.offset, duration = grp.Sum(t => t.SumDurationUs) }; foreach (var p in Table) { double xValue = 0; if (baseOffset > 0) { xValue = DateTimeAxis.ToDouble(DateTimeAxis.ToDateTime(baseOffset).AddMinutes(p.offset_minutes)); } else { xValue = p.offset_minutes; } durationSeries.Points.Add(new DataPoint(xValue, p.duration)); } return durationSeries; } private Series LoadBatchesSeries(WorkloadAnalysis analysis, OxyColor color, double baseOffset) { if (analysis == null) { return null; } var batchesSeries = new LineSeries() { StrokeThickness = 2, MarkerSize = 3, MarkerStroke = OxyColor.Parse("#FF0000"), //Red MarkerType = MarkerType.None, CanTrackerInterpolatePoints = false, Title = analysis.Name, Color = color, Smooth = false }; if (baseOffset == 0) { batchesSeries.TrackerFormatString = "Offset: {2:0}\n{0}: {4:0}"; } var Table = from t in analysis.Points where ApplicationList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.ApplicationName) && HostList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.HostName) && DatabaseList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.DatabaseName) && LoginList.Where(f => f.IsChecked).Select(f => f.Name).Contains(t.LoginName) group t by new { offset = t.OffsetMinutes } into grp orderby grp.Key.offset select new { offset_minutes = grp.Key.offset, execution_count = grp.Sum(t => t.ExecutionCount / ((t.DurationMinutes == 0 ? 1 : t.DurationMinutes) * 60)) }; foreach (var p in Table) { double xValue = 0; if (baseOffset > 0) { xValue = DateTimeAxis.ToDouble(DateTimeAxis.ToDateTime(baseOffset).AddMinutes(p.offset_minutes)); } else { xValue = p.offset_minutes; } batchesSeries.Points.Add(new DataPoint(xValue, p.execution_count)); } return batchesSeries; } private void InitializeFilters() { var baseApplications = from t in _baselineWorkloadAnalysis.Points group t by new { application = t.ApplicationName } into grp select grp.Key.application ; if(_benchmarkWorkloadAnalysis != null) { baseApplications = baseApplications.Union( from t in _benchmarkWorkloadAnalysis.Points group t by new { application = t.ApplicationName } into grp select grp.Key.application ).Distinct(); } ApplicationList = new List( from name in baseApplications orderby name select new FilterDefinition() { Name = name, IsChecked = true } ); var baseHosts = from t in _baselineWorkloadAnalysis.Points group t by new { host = t.HostName } into grp select grp.Key.host ; if (_benchmarkWorkloadAnalysis != null) { baseHosts = baseHosts.Union( from t in _benchmarkWorkloadAnalysis.Points group t by new { host = t.HostName } into grp select grp.Key.host ).Distinct(); } HostList = new List( from name in baseHosts orderby name select new FilterDefinition() { Name = name, IsChecked = true } ); var baseDatabases = from t in _baselineWorkloadAnalysis.Points group t by new { database = t.DatabaseName } into grp select grp.Key.database ; if (_benchmarkWorkloadAnalysis != null) { baseDatabases = baseDatabases.Union( from t in _benchmarkWorkloadAnalysis.Points group t by new { database = t.DatabaseName } into grp select grp.Key.database ).Distinct(); } DatabaseList = new List( from name in baseDatabases orderby name select new FilterDefinition() { Name = name, IsChecked = true } ); var baseLogins = from t in _baselineWorkloadAnalysis.Points group t by new { login = t.LoginName } into grp select grp.Key.login ; if (_benchmarkWorkloadAnalysis != null) { baseLogins = baseLogins.Union( from t in _benchmarkWorkloadAnalysis.Points group t by new { login = t.LoginName } into grp select grp.Key.login ).Distinct(); } LoginList = new List( from name in baseLogins orderby name select new FilterDefinition() { Name = name, IsChecked = true } ); RaisePropertyChanged("ApplicationList"); RaisePropertyChanged("HostList"); RaisePropertyChanged("DatabaseList"); RaisePropertyChanged("LoginList"); } private void ShowStatusMessage(string message) { StatusMessage = message; RaisePropertyChanged("StatusMessage"); } } } ================================================ FILE: WorkloadViewer/ViewModel/Message.cs ================================================ using System; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.ViewModel { public class Message { public string Text { get; set; } public Message(string name) { Text = name; } } } ================================================ FILE: WorkloadViewer/ViewModel/QueryResult.cs ================================================ using WorkloadViewer.Model; namespace WorkloadViewer.ViewModel { public class QueryResult { public long query_hash { get; set; } public string query_text { get; set; } public string query_normalized { get; set; } public long sum_duration_us { get; set; } public double avg_duration_us { get; set; } public long sum_cpu_us { get; set; } public double avg_cpu_us { get; set; } public long sum_reads { get; set; } public double avg_reads { get; set; } public long execution_count { get; set; } public long sum_duration_us2 { get; set; } public long diff_sum_duration_us { get; set; } public double avg_duration_us2 { get; set; } public double diff_avg_duration_us { get; set; } public long sum_cpu_us2 { get; set; } public long diff_sum_cpu_us { get; set; } public double avg_cpu_us2 { get; set; } public double diff_avg_cpu_us { get; set; } public long sum_reads2 { get; set; } public long diff_sum_reads { get; set; } public double avg_reads2 { get; set; } public double diff_avg_reads { get; set; } public long execution_count2 { get; set; } public long diff_execution_count { get; set; } public QueryDetails querydetails { get; set; } public ICSharpCode.AvalonEdit.Document.TextDocument document { get; set; } } } ================================================ FILE: WorkloadViewer/ViewModel/SortColMessage.cs ================================================ using System; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WorkloadViewer.ViewModel { public class SortColMessage { public string ColumnName { get; set; } public ListSortDirection Direction { get; set; } public SortColMessage(string columnName, ListSortDirection direction) { ColumnName = columnName; Direction = direction; } } } ================================================ FILE: WorkloadViewer/ViewModel/ViewModelLocator.cs ================================================ /* In App.xaml: In the View: DataContext="{Binding Source={StaticResource Locator}, Path=ViewModelName}" You can also use Blend to do all this with the tool's support. See http://www.galasoft.ch/mvvm */ using CommonServiceLocator; using GalaSoft.MvvmLight; using GalaSoft.MvvmLight.Ioc; namespace WorkloadViewer.ViewModel { /// /// This class contains static references to all the view models in the /// application and provides an entry point for the bindings. /// public class ViewModelLocator { /// /// Initializes a new instance of the ViewModelLocator class. /// public ViewModelLocator() { ServiceLocator.SetLocatorProvider(() => SimpleIoc.Default); ////if (ViewModelBase.IsInDesignModeStatic) ////{ //// // Create design time view services and models //// SimpleIoc.Default.Register(); ////} ////else ////{ //// // Create run time view services and models //// SimpleIoc.Default.Register(); ////} SimpleIoc.Default.Register(); } public MainViewModel Main { get { return ServiceLocator.Current.GetInstance(); } } public static void Cleanup() { // TODO Clear the ViewModels } } } ================================================ FILE: WorkloadViewer/WorkloadViewer.csproj ================================================  Debug AnyCPU {6E10E31F-D04D-4CB7-8BB9-71ABD4B6B973} Exe WorkloadViewer WorkloadViewer v4.8 512 {60dc8134-eba5-43b8-bcc9-bb4bc16c2548};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} 4 true true AnyCPU true full false bin\x64\Debug\ DEBUG;TRACE prompt 4 false AnyCPU pdbonly true bin\x64\Release\ TRACE prompt 4 false true bin\x86\Debug\ DEBUG;TRACE full x86 prompt MinimumRecommendedRules.ruleset true bin\x86\Release\ TRACE true pdbonly x86 prompt MinimumRecommendedRules.ruleset true Icon.ico ..\packages\CommandLineParser.1.9.71\lib\net45\CommandLine.dll ..\packages\CommonServiceLocator.2.0.2\lib\net45\CommonServiceLocator.dll ..\packages\ControlzEx.3.0.2.4\lib\net45\ControlzEx.dll ..\packages\MvvmLightLibs.5.4.1.1\lib\net45\GalaSoft.MvvmLight.dll ..\packages\MvvmLightLibs.5.4.1.1\lib\net45\GalaSoft.MvvmLight.Extras.dll ..\packages\MvvmLightLibs.5.4.1.1\lib\net45\GalaSoft.MvvmLight.Platform.dll ..\packages\AvalonEdit.5.0.4\lib\Net40\ICSharpCode.AvalonEdit.dll ..\packages\MahApps.Metro.1.6.5\lib\net46\MahApps.Metro.dll ..\packages\NLog.4.4.12\lib\net45\NLog.dll ..\packages\OxyPlot.Core.1.0.0\lib\net45\OxyPlot.dll ..\packages\OxyPlot.Wpf.1.0.0\lib\net45\OxyPlot.Wpf.dll ..\packages\System.Console.4.0.0\lib\net46\System.Console.dll ..\packages\System.Reflection.TypeExtensions.4.1.0\lib\net46\System.Reflection.TypeExtensions.dll ..\packages\MvvmLightLibs.5.4.1.1\lib\net45\System.Windows.Interactivity.dll 4.0 MSBuild:Compile Designer Properties\SharedAssemblyInfo.cs Designer MSBuild:Compile Designer MSBuild:Compile Designer MSBuild:Compile MSBuild:Compile Designer App.xaml Code ConnectionInfoDialog.xaml ConnectionInfoEditor.xaml MainWindow.xaml Code Code True True Resources.resx True Settings.settings True PublicResXFileCodeGenerator Resources.Designer.cs Always SettingsSingleFileGenerator Settings.Designer.cs Designer ================================================ FILE: WorkloadViewer/packages.config ================================================  ================================================ FILE: build.ps1 ================================================ # --------------------------------------------------------------------------- # Locate MSBuild via vswhere (ships with Visual Studio 2017+) # --------------------------------------------------------------------------- $vswhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" if (-not (Test-Path $vswhere)) { throw "vswhere.exe not found at '$vswhere'. Visual Studio 2017 or newer is required." } $msbuild = & $vswhere -latest -requires Microsoft.Component.MSBuild ` -find MSBuild\**\Bin\MSBuild.exe | Select-Object -First 1 if (-not $msbuild) { throw "MSBuild.exe not found. Please install Visual Studio with the MSBuild component." } # --------------------------------------------------------------------------- # Build the .NET projects (SqlWorkload, WorkloadViewer, ConvertWorkload, etc.) # The Setup and SetupBootstrapper WiX projects are excluded from the solution # build and are handled separately by buildexe.ps1 below. # --------------------------------------------------------------------------- & $msbuild "$PSScriptRoot\WorkloadTools.sln" -t:Rebuild -p:Configuration=Release -p:Platform=x64 . $PSScriptRoot\SetupBootstrapper\buildexe.ps1 -Platform x64 & $msbuild "$PSScriptRoot\WorkloadTools.sln" -t:Rebuild -p:Configuration=Release -p:Platform=x86 . $PSScriptRoot\SetupBootstrapper\buildexe.ps1 -Platform x86