diff --git a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks - Linux.csv b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks - Linux.csv deleted file mode 100644 index a00c63309e2..00000000000 --- a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks - Linux.csv +++ /dev/null @@ -1,9 +0,0 @@ -Legend,Base CommandLine -FortunesEf_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/database.benchmarks.yml --scenario fortunes_ef --application.options.collectCounters true --property os=windows --property arch=x64 --profile aspnet-citrine-win -JsonMin_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/json.benchmarks.yml --scenario mapaction --application.framework net8.0 --application.options.collectCounters true --property os=windows --property arch=x64 --profile aspnet-citrine-win -FortunesEf_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/database.benchmarks.yml --scenario fortunes_ef --application.framework net8.0 --application.options.collectCounters true --profile aspnet-citrine-lin --property os=linux --property arch=x64 -JsonMin_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/json.benchmarks.yml --scenario mapaction --application.framework net8.0 --application.options.collectCounters true --profile aspnet-citrine-lin --property os=linux --property arch=x64 -FortunesEf_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/database.benchmarks.yml --scenario fortunes_ef --application.framework net8.0 --application.options.collectCounters true --profile aspnet-citrine-lin --property os=linux --property arch=x64 -JsonMin_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/json.benchmarks.yml --scenario mapaction --application.framework net8.0 --application.options.collectCounters true --profile aspnet-citrine-lin --property os=linux --property arch=x64 -Stage1Grpc_Windows, crank --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-win-app --profile intel-lin-load --application.options.collectCounters true -Stage1Grpc_Linux, crank --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-load2-app --profile amd-lin2-load --profile amd-lin2-db --application.options.collectCounters true \ No newline at end of file diff --git a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks - All.csv b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-All.csv similarity index 84% rename from src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks - All.csv rename to src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-All.csv index 19d69fea4a8..92366107465 100644 --- a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks - All.csv +++ b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-All.csv @@ -41,23 +41,14 @@ PlaintextPlatformInline_Windows, --config https://raw.githubusercontent.com/aspn JsonPlatform_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/platform.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/azure.profile.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario json --profile intel-win-app --profile intel-load2-load --application.collectDependencies true --application.options.collectCounters true PlaintextPlatform_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/platform.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/azure.profile.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario plaintext --profile intel-win-app --profile intel-load2-load --application.framework net8.0 --application.collectDependencies true --application.options.collectCounters true Stage1_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapivanilla --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.framework net8.0 --application.options.collectCounters true -Stage1Aot_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences "Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion)" --application.framework net8.0 --application.options.collectCounters true -Stage1GrpcAotServerGC_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcpublishaot --profile intel-win-app --profile intel-lin-load --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.environmentVariables DOTNET_gcServer=1 --application.options.collectCounters true -Stage1GrpcAot_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcpublishaot --profile intel-win-app --profile intel-lin-load --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.options.collectCounters true Stage1GrpcTrimR2RSingleFile_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcpublishtrimr2rsinglefile --profile intel-win-app --profile intel-lin-load --application.options.collectCounters true Stage1GrpcServerGC_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-win-app --profile intel-lin-load --application.options.collectCounters true Stage1Grpc_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-win-app --profile intel-lin-load --application.options.collectCounters true -Stage2AotServerGC_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:ServerGarbageCollection=true --application.options.collectCounters true -Stage2Aot_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.options.collectCounters true Stage2TrimR2RSingleFile_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapipublishtrimr2rsinglefile --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.options.collectCounters true Stage2ServerGC_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapivanilla --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.buildArguments /p:ServerGarbageCollection=true --application.options.collectCounters true Stage2_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapivanilla --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.options.collectCounters true -Stage1AotSpeedOpt_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:OptimizationPreference=Speed --application.options.collectCounters true -Stage1AotServerGC_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapipublishaot --profile intel-win-app --profile intel-lin-load --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.environmentVariables DOTNET_gcServer=1 --application.options.collectCounters true Stage1TrimR2RSingleFile_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapipublishtrimr2rsinglefile --profile intel-win-app --profile intel-lin-load --application.options.collectCounters true Stage1ServerGC_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapivanilla --profile intel-win-app --profile intel-lin-load --application.environmentVariables DOTNET_gcServer=1 --application.options.collectCounters true Stage1GrpcPgo_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-win-app --profile intel-lin-load --application.environmentVariables DOTNET_TieredPGO=1 --application.options.collectCounters true Stage2Pgo_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapivanilla --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.environmentVariables DOTNET_TieredPGO=1 --application.options.collectCounters true -Stage1Pgo_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapivanilla --profile intel-win-app --profile intel-lin-load --application.environmentVariables DOTNET_TieredPGO=1 --application.options.collectCounters true -Stage1GrpcAotSpeedOpt_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcpublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:OptimizationPreference=Speed --application.options.collectCounters true -Stage2AotSpeedOpt_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:OptimizationPreference=Speed --application.options.collectCounters true \ No newline at end of file +Stage1Pgo_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapivanilla --profile intel-win-app --profile intel-lin-load --application.environmentVariables DOTNET_TieredPGO=1 --application.options.collectCounters true \ No newline at end of file diff --git a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-Aot-Windows.csv b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-Aot-Windows.csv new file mode 100644 index 00000000000..c77eeb4ba1d --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-Aot-Windows.csv @@ -0,0 +1,7 @@ +Legend,Base CommandLine +Stage1Aot_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences "Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion)" --application.framework net8.0 --application.options.collectCounters true +Stage1GrpcAot_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcpublishaot --profile intel-win-app --profile intel-lin-load --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.options.collectCounters true +Stage1AotSpeedOpt_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicminimalapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:OptimizationPreference=Speed --application.options.collectCounters true +Stage1GrpcAotSpeedOpt_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcpublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:OptimizationPreference=Speed --application.options.collectCounters true +Stage2Aot_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.options.collectCounters true +Stage2AotSpeedOpt_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario todosapipublishaot --profile intel-win-app --profile intel-lin-load --profile amd-lin2-db --application.packageReferences Microsoft.Dotnet.ILCompiler=$(MicrosoftNETCoreAppPackageVersion) --application.buildArguments /p:OptimizationPreference=Speed --application.options.collectCounters true \ No newline at end of file diff --git a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-Linux.csv b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-Linux.csv new file mode 100644 index 00000000000..c487eb268ef --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks-Linux.csv @@ -0,0 +1,4 @@ +Legend,Base CommandLine +FortunesEf_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/database.benchmarks.yml --scenario fortunes_ef --application.framework net8.0 --application.options.collectCounters true --profile aspnet-citrine-lin --property os=linux --property arch=x64 +JsonMin_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/json.benchmarks.yml --scenario mapaction --application.framework net8.0 --application.options.collectCounters true --profile aspnet-citrine-lin --property os=linux --property arch=x64 +Stage1Grpc_Linux, crank --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-load2-app --profile amd-lin2-load --profile amd-lin2-db --application.options.collectCounters true \ No newline at end of file diff --git a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.csv b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.csv index b0a9ec48e19..1024e3d1f73 100644 --- a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.csv +++ b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.csv @@ -1,2 +1,3 @@ Legend,Base CommandLine -Stage1Grpc_Linux, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/goldilocks.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario basicgrpcvanilla --profile intel-load2-app --profile amd-lin2-load --profile amd-lin2-db --application.options.collectCounters true \ No newline at end of file +JsonMvc_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/json.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/azure.profile.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario mvc --profile intel-win-app --profile intel-load2-load --application.collectDependencies true --application.options.collectCounters true +FortunesEf_Windows, --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/scenarios/database.benchmarks.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/azure.profile.yml --config https://raw.githubusercontent.com/aspnet/Benchmarks/main/build/ci.profile.yml --scenario fortunes_ef --profile intel-win-app --profile intel-load-load --profile intel-db-db --application.options.collectCounters true --application.collectDependencies true \ No newline at end of file diff --git a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.yaml b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.yaml index 7742453ec43..412206079fb 100644 --- a/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.yaml +++ b/src/benchmarks/gc/GC.Infrastructure/Configurations/ASPNetBenchmarks/ASPNetBenchmarks.yaml @@ -3,6 +3,7 @@ runs: corerun: C:\CoreRuns\Empty\ environment_variables: COMPlus_GCServer: 1 + framework_version: net6.0 run: corerun: C:\CoreRuns\Empty\ environment_variables: @@ -10,8 +11,15 @@ runs: environment: environment_variables: {} default_max_seconds: 300 + framework_version: net8.0 benchmark_settings: benchmark_file: C:\InfraRuns\RunNew_All\Suites\ASPNETBenchmarks\ASPNetBenchmarks.csv + # To take a dump: --application.options.dumpType full --application.options.dumpOutput + # To fetch the build artifacts: --application.options.fetch true + additional_arguments: --chart --chart-type hex + # Can optionally filter for specific benchmarks with a list of regexes. + # benchmark_filters: + # - Platform* output: path: C:\InfraRuns\RunNew_All\ASPNetBenchmarks columns: diff --git a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/CommandBuilders/ASPNetBenchmarks.CommandBuilder.cs b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/CommandBuilders/ASPNetBenchmarks.CommandBuilder.cs index 8571eddc701..f1285d6cf24 100644 --- a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/CommandBuilders/ASPNetBenchmarks.CommandBuilder.cs +++ b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/CommandBuilders/ASPNetBenchmarks.CommandBuilder.cs @@ -6,13 +6,13 @@ namespace GC.Infrastructure.Core.CommandBuilders { public static class ASPNetBenchmarksCommandBuilder { - public static (string, string) Build(ASPNetBenchmarksConfiguration configuration, KeyValuePair run, KeyValuePair baseConfiguration, OS os) + public static (string, string) Build(ASPNetBenchmarksConfiguration configuration, KeyValuePair run, KeyValuePair benchmarkNameToCommand, OS os) { string processName = "crank"; StringBuilder commandStringBuilder = new(); // Load the base configuration. - commandStringBuilder.Append(baseConfiguration.Value); + commandStringBuilder.Append(benchmarkNameToCommand.Value); // Environment Variables. // Add the environment variables from the configuration. @@ -33,7 +33,20 @@ public static (string, string) Build(ASPNetBenchmarksConfiguration configuration foreach (var env in environmentVariables) { - commandStringBuilder.Append($" --application.environmentVariables {env.Key}={env.Value} "); + string variable = env.Value; + + // Check if the log file is specified, also add the fact that we want to retrieve the log file back. + // This log file should be named in concordance with the name of the run and the benchmark. + if (string.CompareOrdinal(env.Key, "DOTNET_GCLogFile") == 0 || + string.CompareOrdinal(env.Key, "COMPlus_GCLogFile") == 0) + { + string fileNameOfLog = Path.GetFileName(env.Value); + commandStringBuilder.Append( $" --application.options.downloadFiles \"*{fileNameOfLog}.log\" " ); + string fileName = Path.GetFileNameWithoutExtension(env.Value); + commandStringBuilder.Append( $" --application.options.downloadFilesOutput \"{Path.Combine(configuration.Output.Path, run.Key, $"{benchmarkNameToCommand.Key}_GCLog")}\" " ); + } + + commandStringBuilder.Append($" --application.environmentVariables {env.Key}={variable} "); } // Trace Collection. @@ -69,19 +82,42 @@ public static (string, string) Build(ASPNetBenchmarksConfiguration configuration } // Add name of output. - commandStringBuilder.Append($" --application.options.traceOutput {Path.Combine(configuration.Output.Path, run.Key, (baseConfiguration.Key + "." + collectType)) + traceFileSuffix}"); + commandStringBuilder.Append($" --application.options.traceOutput {Path.Combine(configuration.Output.Path, run.Key, (benchmarkNameToCommand.Key + "." + collectType)) + traceFileSuffix}"); + } + + // Add any additional arguments specified. + if (!string.IsNullOrEmpty(configuration.benchmark_settings.additional_arguments)) + { + commandStringBuilder.Append($" {configuration.benchmark_settings.additional_arguments} "); } - commandStringBuilder.Append($" --application.framework net8.0 "); + string frameworkVersion = configuration.Environment.framework_version; + // Override the framework version if it's specified at the level of the run. + if (!string.IsNullOrEmpty(run.Value.framework_version)) + { + frameworkVersion = run.Value.framework_version; + } + commandStringBuilder.Append($" --application.framework {frameworkVersion} "); - string corerunToSend = run.Value.corerun.EndsWith("\\") ? run.Value.corerun.Remove(run.Value.corerun.Length - 1) : run.Value.corerun; - commandStringBuilder.Append($" --application.options.outputFiles {Path.Combine(Path.GetDirectoryName(corerunToSend), "*.*" )}"); + string artifactsToUpload = run.Value.corerun!; - // Get the log. + // If the corerun specified is a directory, upload the entire directory. + // Else, we upload just the file. + if (Directory.Exists(run.Value.corerun!)) + { + artifactsToUpload = Path.Combine(artifactsToUpload, "*.*"); + } + commandStringBuilder.Append($" --application.options.outputFiles {artifactsToUpload} "); + + // Get the logs. commandStringBuilder.Append(" --application.options.downloadOutput true "); + commandStringBuilder.Append($" --application.options.downloadOutputOutput {Path.Combine(configuration.Output.Path, run.Key, $"{benchmarkNameToCommand.Key}_{run.Key}.output.log")} "); + commandStringBuilder.Append(" --application.options.downloadBuildLog true "); + commandStringBuilder.Append($" --application.options.downloadBuildLogOutput {Path.Combine(configuration.Output.Path, run.Key, $"{benchmarkNameToCommand.Key}_{run.Key}.build.log")} "); + - commandStringBuilder.Append($" --json {Path.Combine(configuration.Output.Path, run.Key, $"{baseConfiguration.Key}_{run.Key}.json")}"); + commandStringBuilder.Append($" --json {Path.Combine(configuration.Output.Path, run.Key, $"{benchmarkNameToCommand.Key}_{run.Key}.json")}"); return (processName, commandStringBuilder.ToString()); } } diff --git a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/Configurations/ASPNetBenchmark.Configuration.cs b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/Configurations/ASPNetBenchmark.Configuration.cs index f30e3ff4b04..2cb86f3acc4 100644 --- a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/Configurations/ASPNetBenchmark.Configuration.cs +++ b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure.Core/Configurations/ASPNetBenchmark.Configuration.cs @@ -2,26 +2,30 @@ { public sealed class ASPNetBenchmarksConfiguration : ConfigurationBase { - public Dictionary Runs { get; set; } - public Environment Environment { get; set; } - public BenchmarkSettings benchmark_settings { get; set; } - public Output Output { get; set; } + public Dictionary? Runs { get; set; } + public Environment? Environment { get; set; } + public BenchmarkSettings? benchmark_settings { get; set; } + public Output? Output { get; set; } } public sealed class Run : RunBase { public string? corerun { get; set; } + public string? framework_version { get; set; } } public class Environment { public Dictionary environment_variables { get; set; } = new(); public uint default_max_seconds { get; set; } = 300; + public string framework_version { get; set; } = "net8.0"; } public class BenchmarkSettings { - public string benchmark_file { get; set; } + public string? benchmark_file { get; set; } + public string additional_arguments { get; set; } = ""; + public List benchmarkFilters { get; set; } = new(); } public class Output : OutputBase { } @@ -32,16 +36,26 @@ public static ASPNetBenchmarksConfiguration Parse(string path) // Preconditions. if (string.IsNullOrEmpty(path) || !File.Exists(path)) { - throw new ArgumentNullException($"ASPNetBenchmarksConfigurationParser: {nameof(path)} is null/empty or doesn't exist. You must specify a valid path."); + throw new ArgumentNullException($"{nameof(ASPNetBenchmarksConfigurationParser)}: {nameof(path)} is null/empty or doesn't exist. You must specify a valid path."); } string serializedConfiguration = File.ReadAllText(path); - ASPNetBenchmarksConfiguration configuration = Common.Deserializer.Deserialize(serializedConfiguration); + + ASPNetBenchmarksConfiguration? configuration = null; + try + { + configuration = Common.Deserializer.Deserialize(serializedConfiguration); + } + + catch (Exception ex) + { + throw new ArgumentException($"{nameof(ASPNetBenchmarksConfiguration)}: Unable to parse the yaml file because of an error in the syntax. Please use the configurations under: Configuration/GCPerfSim/*.yaml in as example to ensure the file is formatted correctly. Exception: {ex.Message} \n Call Stack: {ex.StackTrace}"); + } // Checks if mandatory arguments are specified in the configuration. if (configuration == null) { - throw new ArgumentNullException($"ASPNetBnechmarksConfigurationParser: {nameof(configuration)} is null. Check the syntax of the configuration."); + throw new ArgumentNullException($"{nameof(ASPNetBenchmarksConfigurationParser)}: {nameof(configuration)} is null. Check the syntax of the configuration."); } return configuration; diff --git a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksAnalyzeCommand.cs b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksAnalyzeCommand.cs index 4bf8cd7c5b6..59caa5ad188 100644 --- a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksAnalyzeCommand.cs +++ b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksAnalyzeCommand.cs @@ -21,6 +21,9 @@ public sealed class AspNetBenchmarkAnalyzeSettings : CommandSettings public override int Execute([NotNull] CommandContext context, [NotNull] AspNetBenchmarkAnalyzeSettings settings) { + AnsiConsole.Write(new Rule("ASP.NET Benchmarks Analyzer")); + AnsiConsole.WriteLine(); + ConfigurationChecker.VerifyFile(settings.ConfigurationPath, nameof(AspNetBenchmarksCommand)); ASPNetBenchmarksConfiguration configuration = ASPNetBenchmarksConfigurationParser.Parse(settings.ConfigurationPath); // Parse the CSV file for the information. @@ -38,7 +41,17 @@ public override int Execute([NotNull] CommandContext context, [NotNull] AspNetBe configurationToCommand[line[0]] = line[1]; } - Dictionary> results = ExecuteAnalysis(configuration, configurationToCommand, new()); + Dictionary> result = ExecuteAnalysis(configuration, configurationToCommand, new()); + if (result.Count == 0) + { + AnsiConsole.MarkupLine($"[bold green] No report generated since there were no results to compare. [/]"); + } + + else + { + AnsiConsole.MarkupLine($"[bold green] Report generated at: {Path.Combine(configuration.Output.Path, "Results.md")} [/]"); + } + return 0; } @@ -47,6 +60,13 @@ public static Dictionary> ExecuteAnalysis(ASPNetBench // Benchmark to Run to Path. Dictionary> benchmarkToRunToPaths = new(); + bool singleRun = configuration.Runs.Count == 1; + // Don't generate a report in case of a single report. + if (singleRun) + { + return new(); + } + // For each Run, grab the paths of each of the benchmarks. string outputPath = configuration.Output.Path; foreach (var c in configuration.Runs) @@ -64,7 +84,6 @@ public static Dictionary> ExecuteAnalysis(ASPNetBench } } - // Launch new process. Dictionary benchmarkToComparisons = new(); Dictionary> metricResults = new(); @@ -102,6 +121,7 @@ public static Dictionary> ExecuteAnalysis(ASPNetBench using (StreamWriter sw = new StreamWriter(Path.Combine(configuration.Output.Path, "Results.md"))) { + // Ignore the summary section in case there is only one run. sw.WriteLine("# Summary"); var topLevelSummarySet = new HashSet(new List { "Working Set (MB)", "Private Memory (MB)", "Requests/sec", "Mean Latency (MSec)", "Latency 50th (MSec)", "Latency 75th (MSec)", "Latency 90th (MSec)", "Latency 99th (MSec)" }); @@ -142,7 +162,7 @@ public static Dictionary> ExecuteAnalysis(ASPNetBench foreach (var benchmark in benchmarkToComparisons) { - sw.WriteLine($"- [{benchmark.Key}](##{benchmark.Key})"); + sw.WriteLine($"- [{benchmark.Key}](#{benchmark.Key.ToLower().Replace(" ", "-")})"); } sw.WriteLine(); diff --git a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksCommand.cs b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksCommand.cs index 301d5e73e81..3aedc2168de 100644 --- a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksCommand.cs +++ b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/ASPNetBenchmarks/AspNetBenchmarksCommand.cs @@ -8,6 +8,7 @@ using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Text; +using System.Text.RegularExpressions; namespace GC.Infrastructure.Commands.ASPNetBenchmarks { @@ -31,12 +32,12 @@ public sealed class AspNetBenchmarkSettings : CommandSettings { [Description("Path to Configuration.")] [CommandOption("-c|--configuration")] - public string? ConfigurationPath { get; init; } + public required string ConfigurationPath { get; init; } } public override int Execute([NotNull] CommandContext context, [NotNull] AspNetBenchmarkSettings settings) { - AnsiConsole.Write(new Rule("ASPNet Benchmarks Orchestrator")); + AnsiConsole.Write(new Rule("ASP.NET Benchmarks Orchestrator")); AnsiConsole.WriteLine(); ConfigurationChecker.VerifyFile(settings.ConfigurationPath, nameof(AspNetBenchmarksCommand)); @@ -53,7 +54,7 @@ public static AspNetBenchmarkResults RunASPNetBenchmarks(ASPNetBenchmarksConfigu // Parse the CSV file for the information. string[] lines = File.ReadAllLines(configuration.benchmark_settings.benchmark_file); - Dictionary configurationToCommand = new(StringComparer.OrdinalIgnoreCase); + Dictionary benchmarkNameToCommand = new(StringComparer.OrdinalIgnoreCase); for (int lineIdx = 0; lineIdx < lines.Length; lineIdx++) { @@ -64,15 +65,54 @@ public static AspNetBenchmarkResults RunASPNetBenchmarks(ASPNetBenchmarksConfigu string[] line = lines[lineIdx].Split(',', StringSplitOptions.TrimEntries); Debug.Assert(line.Length == 2); - configurationToCommand[line[0]] = line[1]; + + string benchmarkName = line[0]; + string benchmarkCommands = line[1]; + + benchmarkNameToCommand[benchmarkName] = benchmarkCommands; + } + + List> benchmarkToNameCommandAsKvpList = new(); + bool noBenchmarkFilters = + (configuration.benchmark_settings.benchmarkFilters == null || configuration.benchmark_settings.benchmarkFilters.Count == 0); + + // If the user has specified benchmark filters, retrieve them in that order. + if (!noBenchmarkFilters) + { + foreach (var filter in configuration.benchmark_settings.benchmarkFilters!) + { + foreach (var kvp in benchmarkNameToCommand) + { + // Check if we simply end with a "*", if so, match. + if (filter.EndsWith("*") && kvp.Key.StartsWith(filter.Replace("*", ""))) + { + benchmarkToNameCommandAsKvpList.Add(new KeyValuePair(kvp.Key, kvp.Value)); + } + + // Regular Regex check. + else if (Regex.IsMatch(kvp.Key, $"^{filter}$")) + { + benchmarkToNameCommandAsKvpList.Add(new KeyValuePair(kvp.Key, kvp.Value)); + } + } + } + } + + // Else, add all the benchmarks. + else + { + foreach (var kvp in benchmarkNameToCommand) + { + benchmarkToNameCommandAsKvpList.Add(new KeyValuePair(kvp.Key, kvp.Value)); + } } - foreach (var c in configurationToCommand) + // For each benchmark, iterate over all specified runs. + foreach (var c in benchmarkToNameCommandAsKvpList) { foreach (var run in configuration.Runs) { OS os = !c.Key.Contains("Win") ? OS.Linux : OS.Windows; - // Build Commandline. (string, string) commandLine = ASPNetBenchmarksCommandBuilder.Build(configuration, run, c, os); string outputPath = Path.Combine(configuration.Output.Path, run.Key); @@ -81,7 +121,14 @@ public static AspNetBenchmarkResults RunASPNetBenchmarks(ASPNetBenchmarksConfigu Directory.CreateDirectory(outputPath); } - // Launch new process. + // There are 3 main ASP.NET errors: + // 1. The server is unavailable - this could be because you aren't connected to CorpNet or the machine is down. + // 2. The crank commands are incorrect. + // 3. Test fails because of a test error. + + // Launch new crank process. + int exitCode = -1; + string logfileOutput = Path.Combine(outputPath, $"{GetKey(c.Key, run.Key)}.log"); StringBuilder output = new(); StringBuilder error = new(); @@ -94,32 +141,42 @@ public static AspNetBenchmarkResults RunASPNetBenchmarks(ASPNetBenchmarksConfigu crankProcess.StartInfo.RedirectStandardOutput = true; crankProcess.StartInfo.CreateNoWindow = true; - AnsiConsole.MarkupLine($"[green bold] ({DateTime.Now}) Running ASPNetBenchmark for Configuration {configuration.Name} {run.Key} {c.Key} [/]"); + AnsiConsole.MarkupLine($"[green bold] ({DateTime.Now}) Running ASP.NET Benchmark for Configuration {configuration.Name} {run.Key} {c.Key} [/]"); crankProcess.OutputDataReceived += (s, d) => { - output.AppendLine(d.Data); + output.AppendLine(d?.Data); }; crankProcess.ErrorDataReceived += (s, d) => { - error.Append(d.Data); + error.AppendLine(d?.Data); }; crankProcess.Start(); crankProcess.BeginOutputReadLine(); crankProcess.BeginErrorReadLine(); - bool exited = crankProcess.WaitForExit((int)configuration.Environment.default_max_seconds * 1000); - } + bool exited = crankProcess.WaitForExit((int)configuration.Environment!.default_max_seconds * 1000); - int exitCode = -1; + // If the process still hasn't exited, it has timed out from the crank side of things and we'll need to rerun this benchmark. + if (!crankProcess.HasExited) + { + AnsiConsole.MarkupLine($"[red bold] ASP.NET Benchmark timed out for: {configuration.Name} {run.Key} {c.Key} - skipping the results but writing stdout and stderror to {logfileOutput} [/]"); + File.WriteAllText(logfileOutput, "Output: \n" + output.ToString() + "\n Errors: \n" + error.ToString()); + continue; + } + + exitCode = crankProcess.ExitCode; + } string outputFile = Path.Combine(configuration.Output.Path, run.Key, $"{c.Key}_{run.Key}.json"); + string outputDetails = output.ToString(); + if (File.Exists(outputFile)) { string[] outputLines = File.ReadAllLines(outputFile); - // In a quick and dirty way check the returnCode from the file. + // In a quick and dirty way, check the returnCode from the file that'll tell us if the test failed. foreach (var o in outputLines) { if (o.Contains("returnCode")) @@ -132,27 +189,31 @@ public static AspNetBenchmarkResults RunASPNetBenchmarks(ASPNetBenchmarksConfigu } } - string outputDetails = output.ToString(); - File.WriteAllText(Path.Combine(outputPath, $"{GetKey(c.Key, run.Key)}.log"), "Output: \n" + outputDetails + "\n Errors: \n" + error.ToString()); + else + { + // For the case where the output file doesn't exist implies that was an issue connecting to the asp.net machines or error number 1. + // This case also applies for incorrect crank arguments or error number 2. + // Move the standard out to the standard error as the process failed. + error.AppendLine(outputDetails); + } if (exitCode != 0) { - StringBuilder errorLines = new(); - - errorLines.AppendLine(error.ToString()); string[] outputLines = outputDetails.Split("\n"); foreach (var o in outputLines) { // Crank provides the standard error from the test itself by this mechanism. + // Error #3: Issues with test run. if (o.StartsWith("[STDERR]")) { - errorLines.AppendLine(o); + error.AppendLine(o.Replace("[STDERR]", "")); } } - AnsiConsole.Markup($"[red bold] Failed with the following errors:\n {Markup.Escape(errorLines.ToString())} [/]"); + AnsiConsole.Markup($"[red bold] Failed with the following errors:\n {Markup.Escape(error.ToString())} Check the log file for more information: {logfileOutput} \n[/]"); } + File.WriteAllText(logfileOutput, "Output: \n" + outputDetails + "\n Errors: \n" + error.ToString()); executionDetails[GetKey(c.Key, run.Key)] = new ProcessExecutionDetails(key: GetKey(c.Key, run.Key), commandlineArgs: commandLine.Item1 + " " + commandLine.Item2, environmentVariables: new(), @@ -162,7 +223,7 @@ public static AspNetBenchmarkResults RunASPNetBenchmarks(ASPNetBenchmarksConfigu } } - Dictionary> results = AspNetBenchmarksAnalyzeCommand.ExecuteAnalysis(configuration, configurationToCommand, executionDetails); + Dictionary> results = AspNetBenchmarksAnalyzeCommand.ExecuteAnalysis(configuration, benchmarkNameToCommand, executionDetails); return new AspNetBenchmarkResults(executionDetails, results); } } diff --git a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/RunCommand/CreateSuiteCommand.cs b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/RunCommand/CreateSuiteCommand.cs index 0f534275228..c565469d805 100644 --- a/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/RunCommand/CreateSuiteCommand.cs +++ b/src/benchmarks/gc/GC.Infrastructure/GC.Infrastructure/Commands/RunCommand/CreateSuiteCommand.cs @@ -119,7 +119,7 @@ internal static string CreateASPNetBenchmarkSuite(InputConfiguration inputConfig { configuration.Runs.Add(r.Key, new Core.Configurations.ASPNetBenchmarks.Run { - corerun = r.Value.Path, + corerun = Directory.GetParent(r.Value.Path).FullName, environment_variables = r.Value.environment_variables, }); } diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/ASPNetBenchmarkAnalysis.ipynb b/src/benchmarks/gc/GC.Infrastructure/Notebooks/ASPNetBenchmarkAnalysis.ipynb new file mode 100644 index 00000000000..5e554d83d75 --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/ASPNetBenchmarkAnalysis.ipynb @@ -0,0 +1,1745 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# ASP.NET Benchmark Analysis \n", + "\n", + "This notebook highlights the steps associated with analyzing data from the ASP.NET benchmarks obtained using crank." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#r \"nuget: Microsoft.Diagnostics.Tracing.TraceEvent, 3.0.1\"\n", + "#r \"nuget: YamlDotnet\"\n", + "#r \"nuget: XPlot.Plotly\"\n", + "#r \"nuget: XPlot.Plotly.Interactive\"\n", + "#r \"nuget: Microsoft.Data.Analysis, 0.19.1\"\n", + "#r \"nuget: Newtonsoft.Json\"\n", + "#r \"nuget: Microsoft.Playwright, 1.16.0\"\n", + "\n", + "using Etlx = Microsoft.Diagnostics.Tracing.Etlx;\n", + "using Microsoft.Data.Analysis;\n", + "using Microsoft.Diagnostics.Tracing.Analysis.GC;\n", + "using Microsoft.Diagnostics.Tracing.Analysis;\n", + "using Microsoft.Diagnostics.Tracing.Parsers.Clr;\n", + "using Microsoft.Diagnostics.Tracing;\n", + "using System.Diagnostics;\n", + "using XPlot.Plotly;\n", + "\n", + "using System.IO;\n", + "using Newtonsoft.Json;" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Building and Using The GC Analysis API" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "pwsh" + }, + "polyglot_notebook": { + "kernelName": "pwsh" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dotnet build -c Release \"..\\GC.Analysis.API\"" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// TODO: Ensure you are pointing to the right artifacts folder.\n", + "#r \"..\\..\\..\\..\\..\\artifacts\\bin\\GC.Analysis.API\\Release\\net6.0\\GC.Analysis.API.dll\"\n", + "\n", + "using GC.Analysis.API;" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Data Acquisition\n", + "\n", + "The next few cells detail how to retrieve the data from a base path. The run name below is the name of the folder generated from running the ``aspnetbenchmarks`` command from the GC.Infrastructure API. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// The LoadInfo class consists of all the pertinent fields needed to represent both the result from a particular benchmark\n", + "// as well as the the comparison between two runs where the Data2 represents the GCProcessData of the comparand.\n", + "public sealed class LoadInfo\n", + "{\n", + " public double WorkingSetMB {get;set;} = double.NaN;\n", + " public double PrivateMemoryMB {get;set;} = double.NaN;\n", + " public double Latency50thMS {get; set;} = double.NaN;\n", + " public double Latency75thMS {get; set;} = double.NaN;\n", + " public double Latency90thMS {get; set;} = double.NaN;\n", + " public double Latency99thMS {get; set;} = double.NaN;\n", + " public double MeanLatencyMS {get; set;} = double.NaN;\n", + " public int ProcessId {get;set;}\n", + " public double RequestsPerMSec {get; set;} = double.NaN;\n", + " public string Run {get; set;}\n", + " public GCProcessData Data {get;set;}\n", + " public GCProcessData? Data2 {get;set;}\n", + " public string CommandLine {get;set;}\n", + " public double NumberOfHeapCountSwitches {get;set;} = 0;\n", + " public string Benchmark {get; set;}\n", + " public string Id {get; set;}\n", + " public double TotalSuspensionTimeMSec {get;set;} = double.NaN;\n", + " public double PercentPauseTimeInGC {get; set;} = double.NaN;\n", + " public double PercentTimeInGC {get; set;} = double.NaN;\n", + " public double MeanHeapSizeBeforeMB {get; set;} = double.NaN;\n", + " public double MaxHeapSizeMB {get; set;} = double.NaN;\n", + " public double TotalAllocationsMB {get;set;} = double.NaN;\n", + " public double GCScore {get;set;} = double.NaN;\n", + " public string TracePath {get; set;}\n", + " public string ProcessName {get;set;}\n", + "}\n", + "\n", + "public class BenchmarkVolatilityData\n", + "{\n", + " public string Benchmark {get; set;}\n", + " public double WorkingSetMB {get;set;} = double.NaN;\n", + " public double PrivateMemoryMB {get;set;} = double.NaN;\n", + " public double RequestsPerMSec {get;set;} = double.NaN;\n", + " public double MeanLatencyMS {get; set;} = double.NaN;\n", + " public double Latency50thMS {get; set;} = double.NaN;\n", + " public double Latency75thMS {get; set;} = double.NaN;\n", + " public double Latency90thMS {get; set;} = double.NaN;\n", + " public double Latency99thMS {get; set;} = double.NaN;\n", + " public double HeapCount { get; set; }= double.NaN;\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// The DataManager is responsible for parsing all the data from ASP.NET results from a basepath.\n", + "public class DataManager \n", + "{\n", + " private readonly Dictionary> _benchmarkToRunData = new();\n", + " private readonly Dictionary> _runToBenchmarkData = new();\n", + " private readonly Dictionary _data; \n", + " private readonly string _basePath;\n", + "\n", + " public DataManager(string basePath)\n", + " {\n", + " _basePath = basePath;\n", + " _data = GetLoadInfoFromBasePath(basePath);\n", + " foreach (var d in _data)\n", + " {\n", + " if (!_benchmarkToRunData.TryGetValue(d.Value.Benchmark, out var runData))\n", + " {\n", + " _benchmarkToRunData[d.Value.Benchmark] = runData = new();\n", + " }\n", + " runData[d.Value.Run] = d.Value;\n", + "\n", + " if (!_runToBenchmarkData.TryGetValue(d.Value.Run, out var benchmarkData))\n", + " {\n", + " _runToBenchmarkData[d.Value.Run] = benchmarkData = new();\n", + " }\n", + "\n", + " benchmarkData[d.Value.Benchmark] = d.Value;\n", + " }\n", + " }\n", + "\n", + " public static double DeltaPercent (double baseline, double comparand) => Math.Round((comparand - baseline) / baseline * 100, 2);\n", + "\n", + " public LoadInfo GetComparison(LoadInfo baseline, LoadInfo comparand)\n", + " {\n", + " return new LoadInfo\n", + " {\n", + " WorkingSetMB = DeltaPercent(baseline.WorkingSetMB, comparand.WorkingSetMB),\n", + " PrivateMemoryMB = DeltaPercent(baseline.PrivateMemoryMB, comparand.PrivateMemoryMB),\n", + " Latency50thMS = DeltaPercent(baseline.Latency50thMS, comparand.Latency50thMS),\n", + " Latency75thMS = DeltaPercent(baseline.Latency75thMS, comparand.Latency75thMS),\n", + " Latency90thMS = DeltaPercent(baseline.Latency90thMS, comparand.Latency90thMS), \n", + " Latency99thMS = DeltaPercent(baseline.Latency99thMS, comparand.Latency99thMS), \n", + " MeanLatencyMS = DeltaPercent(baseline.MeanLatencyMS, comparand.MeanLatencyMS),\n", + " RequestsPerMSec = DeltaPercent(baseline.RequestsPerMSec, comparand.RequestsPerMSec),\n", + " GCScore = DeltaPercent(baseline.GCScore, comparand.GCScore),\n", + " NumberOfHeapCountSwitches = DeltaPercent(baseline.NumberOfHeapCountSwitches, comparand.NumberOfHeapCountSwitches),\n", + " Data = baseline.Data,\n", + " Data2 = comparand.Data,\n", + " Run = $\"{baseline.Run} vs. {comparand.Run}\",\n", + " Benchmark = baseline.Benchmark,\n", + " Id = $\"{baseline.Run} vs. {comparand.Run} for {baseline.Benchmark}\"\n", + " };\n", + " }\n", + "\n", + " public void SummarizeResults(Dictionary info = null)\n", + " {\n", + " if (info == null)\n", + " {\n", + " info = _data;\n", + " }\n", + "\n", + " Console.WriteLine(\"{0,10} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\", \n", + " \"run\", \"benchmark\", \"gen0\", \"pause\", \"gen1\", \"pause\", \"ngc2\", \"pause\", \"bgc\", \"pause\", \"allocMB\", \"alloc/gc\", \"pct\", \"peakMB\", \"meanMB\");\n", + " Console.WriteLine(\"{0}\", new String('-', 174));\n", + "\n", + " foreach (var kvp in info)\n", + " {\n", + " List gcs = kvp.Value?.Data?.GCs;\n", + " if (gcs == null || gcs.Count == 0)\n", + " {\n", + " continue;\n", + " }\n", + "\n", + " int[] gc_counts = new int[4];\n", + " double[] gc_pauses = new double[4];\n", + " for (int i = 0; i < gcs.Count; i++)\n", + " {\n", + " TraceGC gc = gcs[i];\n", + " if (gc.Generation < 2)\n", + " {\n", + " gc_counts[gc.Generation]++;\n", + " gc_pauses[gc.Generation] += gc.PauseDurationMSec;\n", + " }\n", + " else\n", + " {\n", + " if (gc.Type == GCType.BackgroundGC)\n", + " {\n", + " gc_counts[3]++;\n", + " gc_pauses[3] += gc.PauseDurationMSec;\n", + " }\n", + " else\n", + " {\n", + " gc_counts[2]++;\n", + " gc_pauses[2] += gc.PauseDurationMSec;\n", + " }\n", + " }\n", + " }\n", + " \n", + " for (int i = 0; i < 4; i++)\n", + " {\n", + " if (gc_counts[i] > 0)\n", + " {\n", + " gc_pauses[i] /= gc_counts[i];\n", + " }\n", + " }\n", + " \n", + " Console.WriteLine(\"{0,10} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\",\n", + " kvp.Value.Run, kvp.Value.Benchmark, gc_counts[0], gc_pauses[0], gc_counts[1], gc_pauses[1], gc_counts[2], gc_pauses[2], gc_counts[3], gc_pauses[3],\n", + " kvp.Value.Data.Stats.TotalAllocatedMB, (kvp.Value.Data.Stats.TotalAllocatedMB / gcs.Count), kvp.Value.Data.Stats.GetGCPauseTimePercentage(), kvp.Value.Data.Stats.MaxSizePeakMB, kvp.Value.Data.Stats.MeanSizePeakMB);\n", + " }\n", + " }\n", + "\n", + " public Dictionary? GetAllBenchmarksForRun(string run)\n", + " {\n", + " if (!_runToBenchmarkData.TryGetValue(run, out var benchmarksForRun))\n", + " {\n", + " Console.WriteLine($\"No benchmarks found for run: {run}\");\n", + " return null;\n", + " }\n", + "\n", + " return benchmarksForRun;\n", + " }\n", + "\n", + " public void SaveBenchmarkData(string outputPath = \"\")\n", + " {\n", + " if (string.IsNullOrEmpty(outputPath))\n", + " {\n", + " outputPath = _basePath;\n", + " }\n", + "\n", + " StringBuilder sb = new();\n", + " sb.AppendLine($\"Run,Benchmark,Working Set (MB), Private Memory (MB), Request/MSec, Mean Latency (MSec), Latency 50th Percentile MSec, Latency 75th Percentile MSec, Latency 90th Percentile MSec, Latency 99th Percentile MSec\");\n", + " foreach (var b in _data)\n", + " {\n", + " var val = b.Value; \n", + " sb.AppendLine($\"{val.Run},{val.Benchmark},{val.WorkingSetMB},{val.PrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS}\");\n", + " }\n", + "\n", + " File.WriteAllText(Path.Combine(outputPath, \"AllBenchmarks.csv\"), sb.ToString());\n", + " }\n", + "\n", + " public Dictionary? GetAllRunsForBenchmark(string benchmark)\n", + " {\n", + " if (!_benchmarkToRunData.TryGetValue(benchmark, out var runsForBenchmark))\n", + " {\n", + " Console.WriteLine($\"No runs found for benchmark: {benchmark}\");\n", + " return null;\n", + " }\n", + "\n", + " return runsForBenchmark;\n", + " }\n", + "\n", + " public LoadInfo? GetBenchmarkData(string benchmark, string run)\n", + " {\n", + " if (!_benchmarkToRunData.TryGetValue(benchmark, out var runData))\n", + " {\n", + " Console.WriteLine($\"Benchmark: {benchmark} not found!\");\n", + " return null;\n", + " }\n", + "\n", + " if (!runData.TryGetValue(run, out var loadInfo))\n", + " {\n", + " Console.WriteLine($\"Run: {run} not found!\");\n", + " return null;\n", + " }\n", + "\n", + " return loadInfo;\n", + " }\n", + "\n", + " public Dictionary Data => _data; \n", + "\n", + " private Dictionary GetLoadInfoFromBasePath(string basePath)\n", + " {\n", + " Dictionary flatLoadMap = new();\n", + " var files = Directory.GetFiles(basePath, \"*.log\", SearchOption.AllDirectories);\n", + "\n", + " foreach (var f in files)\n", + " {\n", + " if (f.Contains(\"build.log\") || f.Contains(\"output.log\") || f.Contains(\"_GCLog\"))\n", + " {\n", + " continue;\n", + " }\n", + "\n", + " LoadInfo info = new();\n", + "\n", + " string[] lines = File.ReadAllLines(f);\n", + " int idxOfApplication = Int32.MaxValue;\n", + " int idxOfLoad = Int32.MaxValue;\n", + " int idx = 0;\n", + "\n", + " foreach (var line in lines)\n", + " {\n", + " string[] sp = line.Split(\"|\", StringSplitOptions.TrimEntries);\n", + " if (line.Contains(\"| application\"))\n", + " {\n", + " idxOfApplication = idx;\n", + " }\n", + "\n", + " else if (line.Contains(\"| load\"))\n", + " {\n", + " idxOfLoad = idx;\n", + " }\n", + "\n", + " else if (line.Contains(\"| Latency 50th\"))\n", + " {\n", + " info.Latency50thMS = double.Parse(sp[2]);\n", + " }\n", + "\n", + " else if (line.Contains(\"| Latency 75th\"))\n", + " {\n", + " info.Latency75thMS = double.Parse(sp[2]);\n", + " }\n", + "\n", + " else if (line.Contains(\"| Latency 90th\"))\n", + " {\n", + " info.Latency90thMS = double.Parse(sp[2]);\n", + " }\n", + "\n", + " else if (line.Contains(\"| Latency 99th\"))\n", + " {\n", + " info.Latency99thMS = double.Parse(sp[2]);\n", + " }\n", + "\n", + " else if (line.Contains(\"Requests/sec\"))\n", + " {\n", + " info.RequestsPerMSec = double.Parse(sp[2]) / 1000;\n", + " }\n", + "\n", + " else if (line.Contains(\"Mean latency\"))\n", + " {\n", + " info.MeanLatencyMS = double.Parse(sp[2]);\n", + " }\n", + "\n", + " else if (line.Contains(\"Private Memory\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", + " {\n", + " info.PrivateMemoryMB = double.Parse(sp[2]);\n", + " }\n", + "\n", + " else if (line.Contains(\"Working Set\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", + " {\n", + " info.WorkingSetMB = double.Parse(sp[2]);\n", + " }\n", + "\n", + " ++idx;\n", + " }\n", + "\n", + " string[] split = f.Replace(\".5\", \"5\").Split(\".\");\n", + " string run = split[1];\n", + " string benchmark = Path.GetFileName( split[0] ).Replace(\"_Windows\", \"\").Replace(\"_Linux\", \"\").Replace(\".gc\", \"\").Replace(\".nettrace\", \"\");\n", + "\n", + " string key = $\"{run} | {benchmark}\";\n", + " info.Benchmark = benchmark;\n", + " info.Run = run;\n", + " info.Id = key;\n", + "\n", + " flatLoadMap[key] = info;\n", + " }\n", + "\n", + " var traceFiles = Directory.GetFiles(basePath, \"*.etl.zip\", SearchOption.AllDirectories).ToList();\n", + " var nettraceFiles = Directory.GetFiles(basePath, \"*.nettrace\", SearchOption.AllDirectories);\n", + " traceFiles.AddRange(nettraceFiles);\n", + "\n", + " HashSet pertinentProcesses = new HashSet\n", + " {\n", + " \"PlatformBenchmarks\",\n", + " \"Benchmarks\",\n", + " \"MapAction\",\n", + " \"TodosApi\",\n", + " \"BasicGrpc\",\n", + " \"BasicMinimalApi\",\n", + " };\n", + "\n", + " Parallel.ForEach(traceFiles, (t) => {\n", + " string[] sp = t.Split(\"\\\\\");\n", + " string benchmark = Path.GetFileNameWithoutExtension(sp[sp.Length - 1]).Replace(\"_Windows\", \"\").Replace(\".gc.etl\", \"\").Replace(\"_Linux\", \"\").Replace(\".nettrace\", \"\").Replace(\".gc\", \"\");\n", + " string name = sp[sp.Length - 2].Replace(\".5\", \"5\");\n", + " string key = $\"{name} | {benchmark}\";\n", + "\n", + " Analyzer analyzer = AnalyzerManager.GetAnalyzer(t);\n", + " GCProcessData? data = null;\n", + "\n", + " if (t.Contains(\".nettrace\"))\n", + " {\n", + " data = analyzer.AllGCProcessData.First().Value.First();\n", + " }\n", + "\n", + " else\n", + " {\n", + " foreach (var p in pertinentProcesses)\n", + " {\n", + " data = analyzer.GetProcessGCData(p).FirstOrDefault();\n", + " if (data != null)\n", + " {\n", + " break;\n", + " }\n", + " }\n", + " }\n", + "\n", + " if (data == null)\n", + " {\n", + " Console.WriteLine($\"The following key doesn't have the pertinent process {key} - {t}: {string.Join(\" , \", analyzer.TraceLog.Processes.Select(p => p.Name))}\");\n", + " }\n", + "\n", + " else\n", + " {\n", + " lock (flatLoadMap)\n", + " {\n", + " if (flatLoadMap.TryGetValue(key, out var f))\n", + " {\n", + " f.MeanHeapSizeBeforeMB = data.Stats.MeanSizePeakMB;\n", + " f.MaxHeapSizeMB = data.Stats.MaxSizePeakMB;\n", + " f.PercentTimeInGC = (data.GCs.Sum(gc => gc.PauseDurationMSec - gc.SuspendDurationMSec) / (data.Stats.ProcessDuration) ) * 100;\n", + " f.TracePath = data.Parent.TraceLogPath;\n", + " f.TotalAllocationsMB = data.Stats.TotalAllocatedMB;\n", + " f.CommandLine = data.CommandLine;\n", + " f.PercentPauseTimeInGC = data.Stats.GetGCPauseTimePercentage();\n", + " f.GCScore = (f.MaxHeapSizeMB / f.PercentPauseTimeInGC);\n", + " f.ProcessId = data.ProcessID;\n", + " f.Data = data;\n", + " f.ProcessName = data.ProcessName;\n", + " f.TotalSuspensionTimeMSec = data.GCs.Sum(gc => gc.SuspendDurationMSec);\n", + "\n", + " for (int i = 0; i < data.GCs.Count - 1; i++)\n", + " {\n", + " if ( data.GCs[i].GlobalHeapHistory?.NumHeaps != data.GCs[i + 1].GlobalHeapHistory?.NumHeaps)\n", + " {\n", + " ++f.NumberOfHeapCountSwitches;\n", + " }\n", + " }\n", + "\n", + " }\n", + "\n", + " else\n", + " {\n", + " Console.WriteLine($\"{key} not found - Check if the trace has any elements: {t}\");\n", + " }\n", + " }\n", + " }\n", + " });\n", + "\n", + " return flatLoadMap;\n", + " }\n", + "\n", + " public Dictionary GetBenchmarkToComparison(string baselineRun, string comparandRun)\n", + " {\n", + " Dictionary comparisons = new();\n", + "\n", + " Dictionary baselineData = new();\n", + " Dictionary comparandData = new();\n", + " HashSet allBenchmarks = new();\n", + "\n", + " foreach (var d in _data)\n", + " {\n", + " allBenchmarks.Add(d.Value.Benchmark);\n", + "\n", + " string run = d.Key.Split(\"|\", StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)[0];\n", + "\n", + " if (string.CompareOrdinal(run, baselineRun) == 0 && !baselineData.TryGetValue(d.Key, out var baselineInfo))\n", + " {\n", + " baselineInfo = baselineData[d.Value.Benchmark] = d.Value;\n", + " }\n", + "\n", + " else if (string.CompareOrdinal(run, comparandRun) == 0 && !comparandData.TryGetValue(d.Key, out var comparandInfo))\n", + " {\n", + " comparandInfo = comparandData[d.Value.Benchmark] = d.Value;\n", + " }\n", + " }\n", + "\n", + " foreach (var benchmark in allBenchmarks)\n", + " {\n", + " if (!baselineData.TryGetValue(benchmark, out var baselineBenchmarkInfo))\n", + " {\n", + " Console.WriteLine($\"Benchmark: {benchmark} not found on the baseline: {baselineRun}\");\n", + " continue;\n", + " }\n", + "\n", + " if (!comparandData.TryGetValue(benchmark, out var comparandBenchmarkInfo))\n", + " {\n", + " Console.WriteLine($\"Benchmark: {benchmark} not found on the comparand: {comparandRun}\");\n", + " continue;\n", + " }\n", + "\n", + " LoadInfo comparison = GetComparison(baselineBenchmarkInfo, comparandBenchmarkInfo);\n", + " comparisons[benchmark] = comparison;\n", + " }\n", + " \n", + " return comparisons;\n", + " }\n", + "\n", + " private static double ComputeVolatility(List data)\n", + " {\n", + " var max = data.Max();\n", + " var min = data.Min();\n", + " return Math.Round(((max - min) / min) * 100, 2);\n", + " }\n", + "\n", + " public void SaveVolatilityData(List namesOfBuilds, List sortingCriteria = null)\n", + " {\n", + " // Build Parent -> < Build Name -> < Benchmark -> Data >>>\n", + " Dictionary>> listOfData = new();\n", + "\n", + " foreach (var build in namesOfBuilds)\n", + " {\n", + " if (!listOfData.TryGetValue(build, out var b))\n", + " {\n", + " listOfData[build] = b = new();\n", + " }\n", + "\n", + " foreach (var run in _runToBenchmarkData)\n", + " {\n", + " if (run.Key.Contains(build))\n", + " {\n", + " b.Add(run.Key, run.Value);\n", + " }\n", + " }\n", + " }\n", + "\n", + " // At this point all the data has been categorized.\n", + " Dictionary> buildToBenchmarkVolatilityData = new();\n", + "\n", + " // Get the Volatility Data Per Build.\n", + " foreach (var b in listOfData)\n", + " {\n", + " if (!buildToBenchmarkVolatilityData.TryGetValue(b.Key, out var volData))\n", + " {\n", + " buildToBenchmarkVolatilityData[b.Key] = volData = new();\n", + " }\n", + "\n", + " foreach (var br in _benchmarkToRunData)\n", + " {\n", + " volData[br.Key] = new();\n", + " }\n", + "\n", + " Dictionary> benchmarkToData = new();\n", + " foreach (var run in b.Value)\n", + " {\n", + " foreach (var benchmark in run.Value)\n", + " {\n", + " if (!benchmarkToData.TryGetValue(benchmark.Key, out var d))\n", + " {\n", + " benchmarkToData[benchmark.Key] = d = new();\n", + " }\n", + "\n", + " d.Add(benchmark.Value);\n", + " }\n", + " }\n", + "\n", + " foreach (var benchmark in benchmarkToData)\n", + " {\n", + " volData[benchmark.Key] = new BenchmarkVolatilityData\n", + " {\n", + " WorkingSetMB = ComputeVolatility( benchmark.Value.Select(v => v.WorkingSetMB) ),\n", + " PrivateMemoryMB = ComputeVolatility( benchmark.Value.Select(v => v.PrivateMemoryMB) ),\n", + " RequestsPerMSec = ComputeVolatility( benchmark.Value.Select(v => v.RequestsPerMSec) ),\n", + " MeanLatencyMS = ComputeVolatility( benchmark.Value.Select(v => v.MeanLatencyMS) ),\n", + " Latency50thMS = ComputeVolatility( benchmark.Value.Select(v => v.Latency50thMS) ),\n", + " Latency75thMS = ComputeVolatility( benchmark.Value.Select(v => v.Latency75thMS) ),\n", + " Latency90thMS = ComputeVolatility( benchmark.Value.Select(v => v.Latency90thMS) ),\n", + " Latency99thMS = ComputeVolatility( benchmark.Value.Select(v => v.Latency99thMS) ),\n", + " HeapCount = ComputeVolatility( benchmark.Value.Select(v => (double)v.NumberOfHeapCountSwitches )),\n", + " Benchmark = benchmark.Key,\n", + " };\n", + " }\n", + " }\n", + " \n", + " Dictionary> sortedPerBuildVolatility = new();\n", + "\n", + " string DisplayDetailsForABenchmark(BenchmarkVolatilityData val) =>\n", + " $\"{val.Benchmark},{val.WorkingSetMB},{val.PrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS},{val.HeapCount}\";\n", + " if (sortingCriteria == null)\n", + " {\n", + " sortingCriteria = new() { nameof(LoadInfo.PrivateMemoryMB) };\n", + " }\n", + "\n", + " foreach (var s in sortingCriteria)\n", + " {\n", + " Func, double> sortingFunctor = null;\n", + " Func selectionFunctor = null;\n", + "\n", + " switch (s)\n", + " {\n", + " case nameof(BenchmarkVolatilityData.WorkingSetMB):\n", + " sortingFunctor = (data) => data.Value.WorkingSetMB;\n", + " selectionFunctor = (data) => data.WorkingSetMB;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.Value.PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.PrivateMemoryMB;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.RequestsPerMSec):\n", + " sortingFunctor = (data) => data.Value.RequestsPerMSec;\n", + " selectionFunctor = (data) => data.RequestsPerMSec;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.MeanLatencyMS):\n", + " sortingFunctor = (data) => data.Value.MeanLatencyMS;\n", + " selectionFunctor = (data) => data.MeanLatencyMS;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.Latency50thMS):\n", + " sortingFunctor = (data) => data.Value.Latency50thMS;\n", + " selectionFunctor = (data) => data.Latency50thMS;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.Latency75thMS):\n", + " sortingFunctor = (data) => data.Value.Latency75thMS;\n", + " selectionFunctor = (data) => data.Latency75thMS;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.Latency90thMS):\n", + " sortingFunctor = (data) => data.Value.Latency90thMS;\n", + " selectionFunctor = (data) => data.Latency90thMS;\n", + " break;\n", + " case nameof(BenchmarkVolatilityData.Latency99thMS):\n", + " sortingFunctor = (data) => data.Value.Latency99thMS;\n", + " selectionFunctor = (data) => data.Latency99thMS;\n", + " break;\n", + " default:\n", + " sortingFunctor = (data) => data.Value.PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.PrivateMemoryMB;\n", + " break;\n", + " }\n", + "\n", + " foreach (var b in buildToBenchmarkVolatilityData)\n", + " {\n", + " sortedPerBuildVolatility[b.Key] = b.Value.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList();\n", + " }\n", + "\n", + " // Create CSV.\n", + " StringBuilder top = new();\n", + "\n", + " // Iterate over each of the runs.\n", + " const int singleBuildColumnSize = 10;\n", + " int numberOfBuilds = buildToBenchmarkVolatilityData.Count;\n", + " string columnHeader = \"Benchmark Name,WorkingSetMB,PrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS,HeapCount\";\n", + "\n", + " // Assumption: the same benchmarks are present for all runs.\n", + " int totalCountOfBenchmarks = buildToBenchmarkVolatilityData.First().Value.Count;\n", + "\n", + " string first = string.Join(\",\", namesOfBuilds.Select(build => build + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize))));\n", + " string second = string.Join(\",,\", Enumerable.Repeat(columnHeader, numberOfBuilds));\n", + "\n", + " top.AppendLine(first);\n", + " top.AppendLine(second);\n", + "\n", + " for (int benchmarkIdx = 0; benchmarkIdx < totalCountOfBenchmarks; benchmarkIdx++)\n", + " {\n", + " top.AppendLine(string.Join(\",,\", namesOfBuilds.Select(buildName => DisplayDetailsForABenchmark(sortedPerBuildVolatility[buildName][benchmarkIdx]))));\n", + " }\n", + "\n", + " File.WriteAllText(Path.Combine(_basePath, $\"Volatility_{s}.csv\"), top.ToString());\n", + "\n", + " // Chart the sorted % Vol Results.\n", + "\n", + " List scatters = new();\n", + "\n", + " var layout = new Layout.Layout\n", + " {\n", + " xaxis = new Xaxis { title = \"Benchmark Name\" },\n", + " yaxis = new Yaxis { title = \"Metric Volatility Score\" },\n", + " width = 1500,\n", + " title = $\"Volatility Scores Sorted by {s} for {sortedPerBuildVolatility.First().Key}\"\n", + " };\n", + "\n", + " foreach (var b in sortedPerBuildVolatility)\n", + " {\n", + " var scatter = new Scatter\n", + " {\n", + " x = b.Value.Select(s => s.Benchmark),\n", + " y = b.Value.Select(selectionFunctor),\n", + " mode = \"markers\",\n", + " name = b.Key,\n", + " };\n", + "\n", + " scatters.Add(scatter);\n", + " }\n", + "\n", + " Chart.Plot(scatters, layout).Display();\n", + "\n", + " scatters.Clear();\n", + " layout = new Layout.Layout\n", + " {\n", + " xaxis = new Xaxis { title = \"Volatility Index\" },\n", + " yaxis = new Yaxis { title = \"Metric Volatility Score\" },\n", + " width = 1500,\n", + " title = $\"Volatility Index Sorted by {s} for {sortedPerBuildVolatility.First().Key}\"\n", + " };\n", + "\n", + " foreach (var b in sortedPerBuildVolatility)\n", + " {\n", + " var sorted = b.Value.OrderByDescending(selectionFunctor);\n", + " var scatter = new Scatter\n", + " {\n", + " x = Enumerable.Range(0, sorted.Count()),\n", + " y = sorted.Select(selectionFunctor),\n", + " mode = \"markers\",\n", + " name = b.Key,\n", + " text = sorted.Select(ss => ss.Benchmark),\n", + " };\n", + "\n", + " scatters.Add(scatter);\n", + " }\n", + " \n", + " Chart.Plot(scatters, layout).Display();\n", + " }\n", + " }\n", + "\n", + " public void SaveDifferences(string baseline, string comparand, List sortingCriteria = null)\n", + " {\n", + " // This function assumes the runs are all in:\n", + " // {build}_{iteration} form.\n", + " // Else, it will except.\n", + "\n", + " // Iteration -> LoadInfos\n", + " Dictionary> iterationData = new();\n", + "\n", + " // Get the max iteration.\n", + " int maxIteration = -1;\n", + " foreach (var run in _runToBenchmarkData)\n", + " {\n", + " string runName = run.Key;\n", + " string[] split = run.Key.Split(\"_\");\n", + " Debug.Assert(split.Length == 2);\n", + " string build = split[0];\n", + " string iterationAsString = split[1];\n", + " int iteration = Convert.ToInt32(iterationAsString);\n", + " maxIteration = System.Math.Max(iteration, maxIteration);\n", + " }\n", + "\n", + " // Compute Average Diff\n", + " // Build to Benchmark -> Data\n", + " Dictionary> averageData = new();\n", + "\n", + " for (int i = 0; i <= maxIteration; i++)\n", + " {\n", + " string baselineIteration = baseline + \"_\" + i.ToString();\n", + " string comparandIteration = comparand + \"_\" + i.ToString();\n", + "\n", + " Dictionary baselineIterationRuns = _runToBenchmarkData[baselineIteration];\n", + " Dictionary comparandIterationRuns = _runToBenchmarkData[comparandIteration];\n", + "\n", + " foreach (var b in baselineIterationRuns)\n", + " {\n", + " if (!iterationData.TryGetValue(i, out var benchmarks))\n", + " {\n", + " iterationData[i] = benchmarks = new();\n", + " }\n", + "\n", + " benchmarks.Add(GetComparison(baselineIterationRuns[b.Key], comparandIterationRuns[b.Key]));\n", + " }\n", + "\n", + " if (!averageData.TryGetValue(baseline, out var bVal))\n", + " {\n", + " averageData[baseline] = bVal = new();\n", + " foreach (var benchmark in baselineIterationRuns)\n", + " {\n", + " bVal[benchmark.Key] = new LoadInfo\n", + " {\n", + " Benchmark = benchmark.Key,\n", + " WorkingSetMB = benchmark.Value.WorkingSetMB,\n", + " PrivateMemoryMB = benchmark.Value.PrivateMemoryMB,\n", + " RequestsPerMSec = benchmark.Value.RequestsPerMSec,\n", + " MeanLatencyMS = benchmark.Value.MeanLatencyMS,\n", + " Latency50thMS = benchmark.Value.Latency50thMS, \n", + " Latency75thMS = benchmark.Value.Latency75thMS,\n", + " Latency90thMS = benchmark.Value.Latency90thMS,\n", + " Latency99thMS = benchmark.Value.Latency99thMS,\n", + " NumberOfHeapCountSwitches = benchmark.Value.NumberOfHeapCountSwitches,\n", + " };\n", + " }\n", + " }\n", + "\n", + " else\n", + " {\n", + " foreach (var benchmark in baselineIterationRuns)\n", + " {\n", + " var data = bVal[benchmark.Key];\n", + " data.Benchmark = benchmark.Key;\n", + " data.WorkingSetMB += benchmark.Value.WorkingSetMB;\n", + " data.PrivateMemoryMB += benchmark.Value.PrivateMemoryMB;\n", + " data.RequestsPerMSec += benchmark.Value.RequestsPerMSec;\n", + " data.MeanLatencyMS += benchmark.Value.MeanLatencyMS;\n", + " data.Latency50thMS += benchmark.Value.Latency50thMS; \n", + " data.Latency75thMS += benchmark.Value.Latency75thMS;\n", + " data.Latency90thMS += benchmark.Value.Latency90thMS;\n", + " data.Latency99thMS += benchmark.Value.Latency99thMS;\n", + " data.NumberOfHeapCountSwitches += benchmark.Value.NumberOfHeapCountSwitches;\n", + " }\n", + " }\n", + "\n", + " if (!averageData.TryGetValue(comparand, out var cVal))\n", + " {\n", + " averageData[comparand] = cVal = new();\n", + " foreach (var benchmark in comparandIterationRuns)\n", + " {\n", + " cVal[benchmark.Key] = new LoadInfo\n", + " {\n", + " Benchmark = benchmark.Key,\n", + " WorkingSetMB = benchmark.Value.WorkingSetMB,\n", + " PrivateMemoryMB = benchmark.Value.PrivateMemoryMB,\n", + " RequestsPerMSec = benchmark.Value.RequestsPerMSec,\n", + " MeanLatencyMS = benchmark.Value.MeanLatencyMS,\n", + " Latency50thMS = benchmark.Value.Latency50thMS, \n", + " Latency75thMS = benchmark.Value.Latency75thMS,\n", + " Latency90thMS = benchmark.Value.Latency90thMS,\n", + " Latency99thMS = benchmark.Value.Latency99thMS,\n", + " NumberOfHeapCountSwitches = benchmark.Value.NumberOfHeapCountSwitches,\n", + " };\n", + " }\n", + " }\n", + "\n", + " else\n", + " {\n", + " foreach (var benchmark in comparandIterationRuns)\n", + " {\n", + " var data = cVal[benchmark.Key];\n", + " data.Benchmark = benchmark.Key;\n", + " data.WorkingSetMB += benchmark.Value.WorkingSetMB;\n", + " data.PrivateMemoryMB += benchmark.Value.PrivateMemoryMB;\n", + " data.RequestsPerMSec += benchmark.Value.RequestsPerMSec;\n", + " data.MeanLatencyMS += benchmark.Value.MeanLatencyMS;\n", + " data.Latency50thMS += benchmark.Value.Latency50thMS; \n", + " data.Latency75thMS += benchmark.Value.Latency75thMS;\n", + " data.Latency90thMS += benchmark.Value.Latency90thMS;\n", + " data.Latency99thMS += benchmark.Value.Latency99thMS;\n", + " data.NumberOfHeapCountSwitches += benchmark.Value.NumberOfHeapCountSwitches;\n", + " }\n", + " }\n", + " }\n", + "\n", + " foreach (var benchmark in _benchmarkToRunData)\n", + " {\n", + " foreach (var build in averageData)\n", + " {\n", + " var data = build.Value[benchmark.Key];\n", + " data.Benchmark = benchmark.Key;\n", + " data.WorkingSetMB /= (maxIteration + 1); \n", + " data.PrivateMemoryMB /= (maxIteration + 1);\n", + " data.RequestsPerMSec /= (maxIteration + 1);\n", + " data.MeanLatencyMS /= (maxIteration + 1);\n", + " data.Latency50thMS /= (maxIteration + 1);\n", + " data.Latency75thMS /= (maxIteration + 1);\n", + " data.Latency90thMS /= (maxIteration + 1);\n", + " data.Latency99thMS /= (maxIteration + 1);\n", + " data.NumberOfHeapCountSwitches /= (maxIteration + 1);\n", + " }\n", + " }\n", + "\n", + " string DisplayDetailsForABenchmark(LoadInfo val) =>\n", + " $\"{val.Benchmark},{val.WorkingSetMB},{val.PrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS},{val.NumberOfHeapCountSwitches}\";\n", + "\n", + " if (sortingCriteria == null)\n", + " {\n", + " sortingCriteria = new() { nameof(LoadInfo.PrivateMemoryMB) };\n", + " }\n", + "\n", + " foreach (var s in sortingCriteria)\n", + " {\n", + " Func sortingFunctor = null;\n", + " Func, double> selectionFunctor = null;\n", + "\n", + " switch (s)\n", + " {\n", + " case nameof(LoadInfo.WorkingSetMB):\n", + " sortingFunctor = (data) => data.WorkingSetMB;\n", + " selectionFunctor = (data) => data.Value.WorkingSetMB;\n", + " break;\n", + " case nameof(LoadInfo.PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.PrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.RequestsPerMSec):\n", + " sortingFunctor = (data) => data.RequestsPerMSec;\n", + " selectionFunctor = (data) => data.Value.RequestsPerMSec;\n", + " break;\n", + " case nameof(LoadInfo.MeanLatencyMS):\n", + " sortingFunctor = (data) => data.MeanLatencyMS;\n", + " selectionFunctor = (data) => data.Value.MeanLatencyMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency50thMS):\n", + " sortingFunctor = (data) => data.Latency50thMS;\n", + " selectionFunctor = (data) => data.Value.Latency50thMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency75thMS):\n", + " sortingFunctor = (data) => data.Latency75thMS;\n", + " selectionFunctor = (data) => data.Value.Latency75thMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency90thMS):\n", + " sortingFunctor = (data) => data.Latency90thMS;\n", + " selectionFunctor = (data) => data.Value.Latency90thMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency99thMS):\n", + " sortingFunctor = (data) => data.Latency99thMS;\n", + " selectionFunctor = (data) => data.Value.Latency99thMS;\n", + " break;\n", + " case nameof(LoadInfo.NumberOfHeapCountSwitches):\n", + " sortingFunctor = (data) => data.NumberOfHeapCountSwitches;\n", + " selectionFunctor = (data) => data.Value.NumberOfHeapCountSwitches;\n", + " break;\n", + " default:\n", + " sortingFunctor = (data) => data.PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.PrivateMemoryMB;\n", + " break;\n", + " }\n", + "\n", + " List> sortedLoadInfo = new(); \n", + " foreach (var iteration in iterationData)\n", + " {\n", + " sortedLoadInfo.Add(iteration.Value.OrderByDescending(sortingFunctor).ToList());\n", + " }\n", + "\n", + " List sortedAverages = new();\n", + "\n", + " foreach (var benchmark in averageData[baseline])\n", + " {\n", + " LoadInfo baselineInfo = benchmark.Value;\n", + " LoadInfo comparandInfo = averageData[comparand][benchmark.Key];\n", + " LoadInfo comparisonInfo = GetComparison(baselineInfo, comparandInfo);\n", + " sortedAverages.Add(comparisonInfo);\n", + " }\n", + " sortedAverages = sortedAverages.OrderByDescending(sortingFunctor).ToList();\n", + "\n", + " // Create CSV.\n", + " StringBuilder top = new();\n", + "\n", + " // Iterate over each of the runs.\n", + " const int singleBuildColumnSize = 11;\n", + " int numberOfIterations = maxIteration + 1;\n", + " string columnHeader = \"Benchmark Name,WorkingSetMB,PrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS,HeapCount\";\n", + "\n", + " int totalCountOfBenchmarks = sortedLoadInfo.First().Count;\n", + "\n", + " string first = string.Join(\"\", Enumerable.Range(0, numberOfIterations).Select(build => build + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize))));\n", + " string second = string.Join(\",,\", Enumerable.Repeat(columnHeader, numberOfIterations));\n", + "\n", + " // Add the average diff.\n", + " first += \"Average Diff %\" + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize));\n", + " second += \",,\" + string.Join(\",,\", columnHeader);\n", + "\n", + " top.AppendLine(first);\n", + " top.AppendLine(second);\n", + "\n", + " for (int benchmarkIdx = 0; benchmarkIdx < totalCountOfBenchmarks; benchmarkIdx++)\n", + " {\n", + " string benchmarkData = string.Join(\",,\", Enumerable.Range(0, numberOfIterations).Select(iteration => DisplayDetailsForABenchmark(sortedLoadInfo[iteration][benchmarkIdx])));\n", + " benchmarkData += $\",,{DisplayDetailsForABenchmark(sortedAverages[benchmarkIdx])}\";\n", + "\n", + " top.AppendLine(benchmarkData);\n", + " }\n", + "\n", + " File.WriteAllText(Path.Combine(_basePath, $\"Difference_{s}.csv\"), top.ToString());\n", + "\n", + " var layout = new Layout.Layout\n", + " {\n", + " xaxis = new Xaxis { title = \"Benchmark Name\" },\n", + " yaxis = new Yaxis { title = $\"{s}\" },\n", + " width = 1500,\n", + " title = $\"Raw values of {s} for Runs\"\n", + " };\n", + "\n", + " List scatters = new();\n", + "\n", + " const int baseColor = 150;\n", + "\n", + " for (int iterationIdx = 0; iterationIdx <= maxIteration; iterationIdx++)\n", + " {\n", + " string baselineIteration = baseline + \"_\" + iterationIdx.ToString();\n", + " string comparandIteration = comparand + \"_\" + iterationIdx.ToString();\n", + "\n", + " Dictionary baselineData = _runToBenchmarkData[baselineIteration];\n", + " Dictionary comparandData = _runToBenchmarkData[comparandIteration];\n", + "\n", + " if (iterationIdx == 0)\n", + " {\n", + " var sortedBaseline = baselineData.Values.OrderByDescending(sortingFunctor);\n", + " baselineData = sortedBaseline.ToDictionary(d => d.Benchmark);\n", + " }\n", + "\n", + " Scatter baselineScatter = new()\n", + " {\n", + " x = baselineData.Select(b => b.Key),\n", + " y = baselineData.Select(selectionFunctor),\n", + " name = $\"{baselineIteration} - {s}\",\n", + " mode = \"markers\",\n", + " marker = new Marker { color = $\"rgb({baseColor + iterationIdx * 50}, 0, 0)\" } \n", + " };\n", + "\n", + " Scatter comparandScatter = new()\n", + " {\n", + " x = comparandData.Select(b => b.Key),\n", + " y = comparandData.Select(selectionFunctor),\n", + " name = $\"{comparandIteration} - {s}\",\n", + " mode = \"markers\",\n", + " marker = new Marker { color = $\"rgb(0, 0, {baseColor + iterationIdx * 50})\" } \n", + " };\n", + "\n", + " scatters.Add(baselineScatter);\n", + " scatters.Add(comparandScatter);\n", + " }\n", + "\n", + " Chart.Plot(scatters, layout).Display();\n", + " }\n", + " }\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "string basePath = @\"C:\\Traces\\GCTraces\\DATAS_5_Fixed\";\n", + "var dataManager = new DataManager(basePath);" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dataManager.SummarizeResults()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dataManager.SaveVolatilityData(new List { \"baseline\", \"run\" }, new List { nameof(LoadInfo.PrivateMemoryMB), nameof(LoadInfo.RequestsPerMSec )});" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dataManager.SaveDifferences(\"baseline\", \"run\", new List { nameof(LoadInfo.PrivateMemoryMB), nameof(LoadInfo.RequestsPerMSec) });" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dataManager.Data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Using the DataManager\n", + "\n", + "The following cells demonstrates how to make use of the ``DataManager``. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// The name of the run from the yaml file for which the ASP.NET run is created for.\n", + "string runName = \"run\";\n", + "\n", + "Dictionary run = dataManager.GetAllBenchmarksForRun(runName);\n", + "dataManager.Data.Display();\n", + "List> runsWithGCData = dataManager.GetAllBenchmarksForRun(runName).Where(gc => gc.Value.Data != null);" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "string benchmarkName = \"Name of the specific benchmark\";\n", + "LoadInfo benchmarkData = dataManager.GetBenchmarkData(benchmark: benchmarkName, run: runName);\n", + "benchmarkData.Id" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "Dictionary allRunsForBenchmark = dataManager.GetAllRunsForBenchmark(benchmark: benchmarkName);\n", + "allRunsForBenchmark.Keys" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Saving The Benchmark Results\n", + "\n", + "The following call will persist a flat list of all the results." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dataManager.SaveBenchmarkData()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Build to Build Comparison and Volatility Analysis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var run1_vs_run2 = dataManager.GetBenchmarkToComparison(\"datas_2\", \"datas_3\");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "static bool IsNotInvalidDouble(double val) => \n", + " !double.IsNaN(val) && \n", + " !double.IsInfinity(val) && \n", + " !double.IsPositiveInfinity(val) && \n", + " !double.IsNegativeInfinity(val);\n", + "\n", + "public class SummaryTable\n", + "{\n", + " public SummaryTable(Dictionary> comparisons)\n", + " {\n", + " Comparisons = comparisons;\n", + " }\n", + "\n", + " private string GenerateSummaryForComparison(string comparisonKey, Dictionary comparison)\n", + " {\n", + " double averageWorkingSet = comparison.Where(a => IsNotInvalidDouble(a.Value.WorkingSetMB)).Average(a => a.Value.WorkingSetMB);\n", + " double privateMemory = comparison.Where(a => IsNotInvalidDouble(a.Value.PrivateMemoryMB)).Average(a => a.Value.PrivateMemoryMB);\n", + " double throughput = comparison.Where(a => IsNotInvalidDouble(a.Value.RequestsPerMSec)).Average(a => a.Value.RequestsPerMSec);\n", + " double meanLatency = comparison.Where(a => IsNotInvalidDouble(a.Value.MeanLatencyMS)).Average(a => a.Value.MeanLatencyMS);\n", + "\n", + " double p50Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency50thMS)).Average(a => a.Value.Latency50thMS);\n", + " double p75Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency75thMS)).Average(a => a.Value.Latency75thMS);\n", + " double p90Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency90thMS)).Average(a => a.Value.Latency90thMS);\n", + " double p99Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency99thMS)).Average(a => a.Value.Latency99thMS);\n", + "\n", + " return $\"{comparisonKey},{averageWorkingSet},{privateMemory},{throughput},{meanLatency},{p50Latency},{p75Latency},{p90Latency},{p99Latency}\";\n", + " }\n", + "\n", + " public string GenerateSummaryForComparisons()\n", + " {\n", + " StringBuilder sb = new();\n", + " sb.AppendLine(\"Build to Build,Average Working Set (MB) %, Average Private Memory (MB) %, Average Request/MSec %, Average Mean Latency (MSec), Average P50 Latency (MSec) %, Average P75 Latency (MSec) %, Average P90 Latency (MSec) %, Average P99 Latency (MSec) %\");\n", + " foreach (var comparison in Comparisons)\n", + " {\n", + " sb.AppendLine(GenerateSummaryForComparison(comparison.Key, comparison.Value));\n", + " }\n", + "\n", + " return sb.ToString();\n", + " }\n", + "\n", + " private int GetCountOfRegressions(List selected, double thresholdPercentage, bool lessIsBetter = true)\n", + " {\n", + " // If throughput, less is worse => threshold <= -5%.\n", + " var comparison = selected.Where(d => IsNotInvalidDouble(d) && ( (lessIsBetter) ? (d >= thresholdPercentage) : (d <= -thresholdPercentage)));\n", + " return comparison.Count;\n", + " }\n", + "\n", + " private int GetCountOfAbsRegressions(List selected, double thresholdPercentage)\n", + " {\n", + " var comparison = selected.Where(d => IsNotInvalidDouble(d) && Math.Abs(d) >= thresholdPercentage);\n", + " return comparison.Count;\n", + " }\n", + "\n", + " // # of benchmarks with throughput regressed by >= 5% and 10%\n", + " private string GenerateRegressionSummary(string comparisonKey, Dictionary comparison)\n", + " {\n", + " List workingSet = comparison.Select(c => c.Value.WorkingSetMB);\n", + " int workingSetCountGT_5 = GetCountOfRegressions(workingSet, 5);\n", + " int workingSetCountGT_10 = GetCountOfRegressions(workingSet, 10);\n", + "\n", + " List privateMemory = comparison.Select(c => c.Value.PrivateMemoryMB);\n", + " int privateMemoryCountGT_5 = GetCountOfRegressions(privateMemory, 5);\n", + " int privateMemoryCountGT_10 = GetCountOfRegressions(privateMemory, 10);\n", + "\n", + " List throughput = comparison.Select(a => a.Value.RequestsPerMSec);\n", + " int throughputCountGT_5 = GetCountOfRegressions(throughput, 5, false);\n", + " int throughputCountGT_10 = GetCountOfRegressions(throughput, 10, false);\n", + "\n", + " List meanLatency = comparison.Select(a => a.Value.MeanLatencyMS);\n", + " int meanLatencyCountGT_5 = GetCountOfRegressions(meanLatency, 5);\n", + " int meanLatencyCountGT_10 = GetCountOfRegressions(meanLatency, 10);\n", + "\n", + " List p50Latency = comparison.Select(a => a.Value.Latency50thMS);\n", + " int p50LatencyCountGT_5 = GetCountOfRegressions(p50Latency, 5);\n", + " int p50LatencyCountGT_10 = GetCountOfRegressions(p50Latency, 10);\n", + "\n", + " List p75Latency = comparison.Select(a => a.Value.Latency75thMS);\n", + " int p75LatencyCountGT_5 = GetCountOfRegressions(p75Latency, 5);\n", + " int p75LatencyCountGT_10 = GetCountOfRegressions(p75Latency, 10);\n", + "\n", + " List p90Latency = comparison.Select(a => a.Value.Latency90thMS);\n", + " int p90LatencyCountGT_5 = GetCountOfRegressions(p90Latency, 5);\n", + " int p90LatencyCountGT_10 = GetCountOfRegressions(p90Latency, 10);\n", + " \n", + " List p99Latency = comparison.Select(a => a.Value.Latency99thMS);\n", + " int p99LatencyCountGT_5 = GetCountOfRegressions(p99Latency, 5);\n", + " int p99LatencyCountGT_10 = GetCountOfRegressions(p99Latency, 10);\n", + "\n", + " return $\"{comparisonKey},{workingSetCountGT_5},{workingSetCountGT_10},{privateMemoryCountGT_5},{privateMemoryCountGT_10},{throughputCountGT_5},{throughputCountGT_10},{meanLatencyCountGT_5},{meanLatencyCountGT_10},{p50LatencyCountGT_5},{p50LatencyCountGT_10},{p75LatencyCountGT_5},{p75LatencyCountGT_10},{p90LatencyCountGT_5},{p90LatencyCountGT_10},{p99LatencyCountGT_5},{p99LatencyCountGT_10}\";\n", + " }\n", + "\n", + " public string GenerateRegressionSummaryForComparisons()\n", + " {\n", + " StringBuilder sb = new();\n", + " sb.AppendLine(\"Build to Build,Reg. Count - Working Set (MB),Large Reg. Count - Working Set (MB),Reg. Count - Private Memory (MB),Large Reg. Count - Private Memory (MB),Reg. Count - Throughput, Large Reg. Count - Throughput,Reg. Count - Mean Latency,Large Reg. Count - Mean Latency,Reg. Count - P50 Latency, Large Reg. Count - P50 Latency, Reg. Count - P75 Latency, Large Reg. Count - P75 Latency,Reg. Count - P90 Latency, Large Reg. Count - P90 Latency,Reg. Count - P99 Latency, Large Reg. Count - P99 Latency\");\n", + " foreach (var comparison in Comparisons)\n", + " {\n", + " sb.AppendLine(GenerateRegressionSummary(comparison.Key, comparison.Value));\n", + " }\n", + "\n", + " return sb.ToString();\n", + " }\n", + "\n", + " public Dictionary GenerateRegressionAnalysisForComparison(string comparisonKey)\n", + " {\n", + " StringBuilder sb = new();\n", + " Dictionary csvData = new();\n", + " Dictionary comparison = Comparisons[comparisonKey];\n", + "\n", + " string header = \"Benchmark,WorkingSetMB,PrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS\";\n", + "\n", + " // Generate Memory Regressions.\n", + " StringBuilder memRegressions = new();\n", + " memRegressions.AppendLine(header);\n", + " foreach (var benchmark in comparison.Where(c => c.Value.WorkingSetMB >= 10 || c.Value.PrivateMemoryMB >= 10 ))\n", + " {\n", + " memRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.WorkingSetMB},{benchmark.Value.PrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"memory\"] = memRegressions.ToString();\n", + "\n", + " // Generate Throughput Regressions.\n", + " StringBuilder throughputRegressions = new();\n", + " throughputRegressions.AppendLine(header);\n", + " foreach (var benchmark in comparison.Where(c => c.Value.RequestsPerMSec <= -10))\n", + " {\n", + " throughputRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.WorkingSetMB},{benchmark.Value.PrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"throughput\"] = throughputRegressions.ToString();\n", + "\n", + " // Generate Latency Regressions.\n", + " StringBuilder latencyRegressions = new();\n", + " latencyRegressions.AppendLine(header);\n", + " foreach (var benchmark in comparison.Where(c => c.Value.MeanLatencyMS >= 10 || \n", + " c.Value.Latency50thMS >= 10 || \n", + " c.Value.Latency75thMS >= 10 || \n", + " c.Value.Latency90thMS >= 10 || \n", + " c.Value.Latency99thMS >= 10 ))\n", + " {\n", + " latencyRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.WorkingSetMB},{benchmark.Value.PrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"latency\"] = latencyRegressions.ToString();\n", + "\n", + " // All.\n", + " StringBuilder all = new();\n", + " all.AppendLine(header);\n", + " foreach (var benchmark in comparison)\n", + " {\n", + " all.AppendLine($\"{benchmark.Key},{benchmark.Value.WorkingSetMB},{benchmark.Value.PrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"all\"] = all.ToString();\n", + "\n", + " return csvData;\n", + " }\n", + "\n", + " public void SaveComparisons(string basePath)\n", + " {\n", + " // Add Summary for Comparisons.\n", + " string summaryOfComparisons = GenerateSummaryForComparisons();\n", + " File.WriteAllText(Path.Combine(basePath, \"SummaryOfComparisons.csv\"), summaryOfComparisons);\n", + "\n", + " // Add Regression Summary for Comparisons.\n", + " string regressionSummary = GenerateRegressionSummaryForComparisons();\n", + " File.WriteAllText(Path.Combine(basePath, \"RegressionSummary.csv\"), regressionSummary);\n", + "\n", + " // Add Large Regression Analysis for Comparison.\n", + " string perComparisonDataPath = Path.Combine(basePath, \"PerComparisonData\");\n", + " if (!Directory.Exists(perComparisonDataPath))\n", + " {\n", + " Directory.CreateDirectory(perComparisonDataPath);\n", + " }\n", + "\n", + " foreach (var comparison in Comparisons)\n", + " {\n", + " string comparisonPath = Path.Combine(perComparisonDataPath, comparison.Key);\n", + " Directory.CreateDirectory(comparisonPath);\n", + "\n", + " Dictionary regressionComparisons = GenerateRegressionAnalysisForComparison(comparison.Key);\n", + "\n", + " // Memory\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"MemoryRegressions.csv\"), regressionComparisons[\"memory\"]);\n", + "\n", + " // Throughput\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"ThroughputRegressions.csv\"), regressionComparisons[\"throughput\"]);\n", + "\n", + " // Latency\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"LatencyRegressions.csv\"), regressionComparisons[\"latency\"]);\n", + "\n", + " // All\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"All.csv\"), regressionComparisons[\"all\"]);\n", + " }\n", + " }\n", + "\n", + " public Dictionary> Comparisons { get; }\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var datas3_vs_datas_4 = dataManager.GetBenchmarkToComparison(\"datas_3\", \"datas_4\");\n", + "\n", + "Dictionary> comparisons = new()\n", + "{\n", + " { nameof(run1_vs_run2), run1_vs_run2 },\n", + "};\n", + "\n", + "SummaryTable summaryTable = new(comparisons);\n", + "summaryTable.SaveComparisons(\"./\");" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Charting Helpers\n", + "\n", + "The following cells highlight how to chart certain properties of the LoadInfo class." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "void ChartProperty(LoadInfo baseline, LoadInfo comparand, string nameOfProperty)\n", + "{\n", + " GCProcessData baselineGC = baseline.Data;\n", + " GCProcessData comparandGC = comparand.Data;\n", + "\n", + " List<(string scatterName, List gcs)> gcData = \n", + " new()\n", + " {\n", + " { ( scatterName : $\"{nameOfProperty} for {baseline.Id}\" , gcs : baselineGC.GCs )},\n", + " { ( scatterName : $\"{nameOfProperty} for {comparand.Id}\" , gcs : comparandGC.GCs )}\n", + " };\n", + "\n", + " GCCharting.ChartGCData(gcData : gcData, \n", + " title : $\"{nameOfProperty} Comparison Between {baseline.Run} and {comparand.Run}\", \n", + " isXAxisRelative : false,\n", + " fieldName : nameOfProperty).Display();\n", + "\n", + "}\n", + "\n", + "void ChartProperty(LoadInfo comparison, string nameOfProperty)\n", + "{\n", + " GCProcessData baselineGC = comparison.Data;\n", + " GCProcessData comparandGC = comparison.Data2;\n", + "\n", + " List<(string scatterName, List gcs)> gcData = \n", + " new()\n", + " {\n", + " { ( scatterName : $\"{nameOfProperty} for Baseline\" , gcs : baselineGC.GCs )},\n", + " { ( scatterName : $\"{nameOfProperty} for Comparand\" , gcs : comparandGC.GCs )}\n", + " };\n", + "\n", + " GCCharting.ChartGCData(gcData : gcData, \n", + " title : $\"{nameOfProperty} Comparison\", \n", + " isXAxisRelative : false,\n", + " fieldName : nameOfProperty).Display();\n", + "\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "dataManager.Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var run1_Benchmark = dataManager.GetBenchmarkData(benchmark: \"CachingPlatform\", \"datas_2\");\n", + "var run2_Benchmark = dataManager.GetBenchmarkData(benchmark: \"CachingPlatform\", \"datas_3\");\n", + "\n", + "// Chart the PauseDurationMSec for the run1 vs. run2.\n", + "ChartProperty(baseline: run1_Benchmark, comparand: run2_Benchmark, nameof(TraceGC.PauseDurationMSec))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Debugging" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "System.Diagnostics.Process.GetCurrentProcess().Id" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#!about" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".NET (C#)", + "language": "C#", + "name": ".net-csharp" + }, + "language_info": { + "name": "python" + }, + "orig_nbformat": 4, + "polyglot_notebook": { + "kernelInfo": { + "defaultKernelName": "csharp", + "items": [ + { + "aliases": [], + "name": "csharp" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/README.md b/src/benchmarks/gc/GC.Infrastructure/Notebooks/README.md index 1b42f9d8a21..079608f07d9 100644 --- a/src/benchmarks/gc/GC.Infrastructure/Notebooks/README.md +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/README.md @@ -17,3 +17,4 @@ Each of these examples assume you have cloned the performance repository to C:\p ## Notebooks 1. [Volatility Reporting](./VolatilityReport.ipynb). +2. [ASP.NET Benchmark Analysis](./ASPNetBenchmarkAnalysis.ipynb) diff --git a/src/benchmarks/gc/GC.Infrastructure/README.md b/src/benchmarks/gc/GC.Infrastructure/README.md index cd53080c50b..7de664d1d1e 100644 --- a/src/benchmarks/gc/GC.Infrastructure/README.md +++ b/src/benchmarks/gc/GC.Infrastructure/README.md @@ -144,39 +144,51 @@ The path to this file can be passed in as an optional argument for the ``microbe ##### ASP.NET Benchmarks -To run the infrastructure on a specific set of ASP.NET Benchmarks such as the suite comprising of The Json Min, Fortunes ETF and the Stage1gRPC run the following: +To run the infrastructure on a specific set of ASP.NET Benchmarks, do the following: 1. ``cd C:\performance\artifacts\bin\GC.Infrastructure\Release\net7.0\``. -2. ``.\GC.Infrastructure.exe aspnetbenchmarks --configuration C:\GC.Analysis.API\Configurations\ASPNetBenchmarks\ASPNetBenchmarks.yaml``. +2. ``.\GC.Infrastructure.exe aspnetbenchmarks --configuration C:\performance\src\benchmarks\gc\GC.Infrastructure\Configurations\ASPNetBenchmarks\ASPNetBenchmarks.csv``. -###### Uploading Only A Subset of the Binaries +More details about running and troubleshooting ASP.NET benchmarks can be found [here](./docs/ASPNETBenchmarks.md). -The ASP.NET benchmarks can be run without any of the users changes however, if the user wants to upload modified binaries with their changes, it is advisable to only upload those as long as they are compatible with the version of .NET runtime you wish to test against. Currently, the default framework to run these tests is net8.0. +###### Uploading Your Own Binaries -This can be accomplished in 2 steps: +The ASP.NET benchmarks can be run without any of the users changes however, if the user wants to upload modified binaries with their changes, it is advisable to only upload those as long as they are compatible with the version of .NET runtime you wish to test against. The infrastructure allows you to either upload a single binary or a directory with one or more binaries. -1. Copy over the binaries you want to change to a new empty folder. -2. Set the run's corerun path in the configuration to that of a folder with just the copied over binaries. +This can be accomplished by specifying either a file or a directory as the corerun path of a particular run: -As an example, if I were to only update ``gc.cpp`` and build a standalone ``clrgc.dll``, I would copy the binary to a folder such as the following, update the ``runs`` section of the configuration and point to the folder containing the binary; NOTE: the environment variable ``COMPlus_GCName`` must be set in this case: +As an example, if I were to only update ``gc.cpp`` and build a standalone ``clrgc.dll``, specifically set the ``corerun`` field of the said run to the path of the ``clrgc.dll``. +NOTE: the environment variable ``COMPlus_GCName`` must be set in this case: -1. Copy the clrgc.dll to a new and empty folder. +1. Assume your ``clrgc.dll`` is placed in ``C:\ASPNETUpload``: ```powershell C:\ASPNETUPLOAD |-----> clrgc.dll ``` -2. Adjust the corerun to point to the new folder: +2. Adjust the corerun to point to the path of clrgc.dll: ```yaml runs: run: - corerun: C:\ASPNetUpload\ # This was updated. + corerun: C:\ASPNetUpload\clrgc.dll environment_variables: COMPlus_GCName: clrgc.dll # This environment variable was set. ``` +NOTE: For this case, ensure the environment variable ``COMPlus_GCName`` or ``DOTNET_GCName`` is set to clrgc.dll. + +On the other hand, if you want upload the entire directory, say ``C:\ASPNETUpload2``, simply set the path to the directory in the corerun of a corerun: + +```yaml +runs: + run: + corerun: C:\ASPNetUpload2 + environment_variables: + COMPlus_GCName: clrgc.dll +``` + ###### Updating Which Benchmarks to Run The file that dictates which ASP.NET benchmarks to run is a CSV file and can be configured based on what test you need to run; an example of this file can be found [here](./Configurations/ASPNetBenchmarks/ASPNetBenchmarks.csv). @@ -185,7 +197,7 @@ You can update this file by changing the following field: ```yaml benchmark_settings: - benchmark_file: C:\InfraRuns\RunNew_All\Suites\ASPNETBenchmarks\ASPNetBenchmarks.csv # Change this. + benchmark_file: C:\InfraRuns\RunNew_All\Suites\ASPNETBenchmarks\ASPNetBenchmarks.csv ``` The format of this file is: @@ -202,7 +214,7 @@ It's worth noting that if you have specified Linux based binaries in the corerun ###### How To Add New Benchmarks 1. If you are collecting traces, make sure to include Linux (_Linux) or Windows (_Windows) suffix in the Legend column because we run PerfView to collect traces for Windows and dotnet-trace for `gc` trace; currently not working for other types of traces on Linux. -2. Find the base commandline for the benchmark to run by choosing the appropriate test and configuration from the [ASP.NET Dashboard](https://msit.powerbi.com/groups/me/reports/10265790-7e2e-41d3-9388-86ab72be3fe9/ReportSection30725cd056a647733762?experience=power-bi) +2. Find the base command line for the benchmark to run by choosing the appropriate test and configuration from the [ASP.NET Dashboard](https://msit.powerbi.com/groups/me/reports/10265790-7e2e-41d3-9388-86ab72be3fe9/ReportSection30725cd056a647733762?experience=power-bi) 3. Copy over the command line from the table to the Base CommandLine column after: 1. Remove the ``crank`` prefix from the command line. 2. Remove the ``--application.aspNetCoreVersion``, ``--application.runtimeVersion`` and ``--application.sdkVersion`` command args from the command line that you paste in the CSV as the versions are set by the infrastructure itself. diff --git a/src/benchmarks/gc/GC.Infrastructure/docs/ASPNETBenchmarks.md b/src/benchmarks/gc/GC.Infrastructure/docs/ASPNETBenchmarks.md new file mode 100644 index 00000000000..14033f2716b --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/docs/ASPNETBenchmarks.md @@ -0,0 +1,49 @@ +# ASP.NET Benchmarks + +## Troubleshooting + +There are four main types of errors while running ASP.NET Benchmarks using crank and these are: + +### 1. Inability To Connect To The Server + +The typical error message associated with this is of the following form: + +```cmd +The specified endpoint url 'http://asp-citrine-win:5001' for 'application' is invalid or not responsive: "No such host is known. (asp-citrine-win:5001)" +``` + +This is as a result of not being connected to CorpNet. To troubleshoot this issue, ensure you are connected to CorpNet by making sure your VPN is appropriately set: +![image](./images/CorpNetConnected.png) + +Additionally, the reason for the error could be because the associated machine is down. Reaching out to the appropriate ASP.NET machine owners is the best option here. + +### 2. Incorrect Crank Arguments + +Fix arguments by referring to [this](https://github.com/dotnet/crank/blob/main/src/Microsoft.Crank.Controller/README.md) document. If you are still experiencing issues even though you have checked that the crank commands are correct, ensure that you have the latest version of crank. + +### 3. Test Failures + +The test failures could be one of the following: + +#### 1. Build Failures + +To confirm this is the case, check the ``*build.log`` file associated with the run. The resolution here is to check with the test owners. + +#### 2. Runtime Test Failures + +These will show up in the following form: + +```psh +[STDERR] GC initialization failed with error 0x8007007E +[STDERR] Failed to create CoreCLR, HRESULT: 0x8007007E +``` + +This issue specifically indicates a version mismatch between the uploaded binaries and the test binaries. If you are connected to CorpNet, ``errors/`` will shed light on the meaning of errors. + +#### 3. Test Failures from the Managed Side of Things + +To get more details, check the ``*output.log`` file associated with the run. The resolution is usually to check if the framework version you are trying to run matches with the run and if that doesn't turn out to be the case, reach out to the test owners. + +### 4. Missing Artifacts + +For the case of missing artifacts such as missing traces, examine the log file for the exception reasons. diff --git a/src/benchmarks/gc/GC.Infrastructure/docs/images/CorpNetConnected.png b/src/benchmarks/gc/GC.Infrastructure/docs/images/CorpNetConnected.png new file mode 100644 index 00000000000..d21b0653b7f Binary files /dev/null and b/src/benchmarks/gc/GC.Infrastructure/docs/images/CorpNetConnected.png differ