mirror of
https://github.com/dart-lang/sdk
synced 2024-10-14 11:31:57 +00:00
Add upload tool for analysis_server benchmarks.
Bug: b/74375756 Change-Id: I480188f7f021e571cb276bddaf1da2eef4991b08 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/283660 Commit-Queue: William Hesse <whesse@google.com> Reviewed-by: Jonas Termansen <sortie@google.com>
This commit is contained in:
parent
d8c1da97b5
commit
4b3bd7564c
124
pkg/analysis_server/benchmark/perf/benchmark_uploader.dart
Normal file
124
pkg/analysis_server/benchmark/perf/benchmark_uploader.dart
Normal file
|
@ -0,0 +1,124 @@
|
|||
// Copyright (c) 2023, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
// This script runs the benchmarks in this directory and uploads the
|
||||
// results to cloud storage. These results are then ingested by our
|
||||
// performance measurement system.
|
||||
//
|
||||
// The script only works when run on a LUCI builder in the dart-ci project,
|
||||
// and uploads results to paths within gs://dart-test-results/benchmark-results.
|
||||
//
|
||||
// This script is needed to run benchmarks on platforms that we only have
|
||||
// in our LUCI CI system, not in our performance lab, such as Windows.
|
||||
// The script is currently only used and tested on Windows.
|
||||
|
||||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
void main() async {
|
||||
try {
|
||||
final results = <Map<String, dynamic>>[];
|
||||
results.addAll(await runBenchmarks(warm: true));
|
||||
results.addAll(await runBenchmarks(warm: false));
|
||||
|
||||
if (!Platform.isWindows) {
|
||||
print("Analyzer benchmark uploads only run on Windows");
|
||||
exit(1);
|
||||
}
|
||||
final targetResults = [
|
||||
for (final result in results)
|
||||
{
|
||||
'cpu': 'Windows VM',
|
||||
'machineType': 'windows-x64',
|
||||
'target': 'dart-analysis-server-external',
|
||||
...result,
|
||||
}
|
||||
];
|
||||
await uploadResults(targetResults);
|
||||
} catch (e, st) {
|
||||
print('$e\n$st');
|
||||
}
|
||||
}
|
||||
|
||||
Future<List<Map<String, dynamic>>> runBenchmarks({required bool warm}) async {
|
||||
final temperature = warm ? 'warm' : 'cold';
|
||||
final benchmarkResults = await Process.run(Platform.resolvedExecutable, [
|
||||
'pkg/analysis_server/benchmark/benchmarks.dart',
|
||||
'run',
|
||||
if (warm) 'analysis-server' else 'analysis-server-cold',
|
||||
]);
|
||||
|
||||
print(benchmarkResults.stdout);
|
||||
print(benchmarkResults.stderr);
|
||||
if (benchmarkResults.exitCode != 0) {
|
||||
throw 'Failed to run $temperature benchmarks';
|
||||
}
|
||||
final result = jsonDecode(LineSplitter()
|
||||
.convert(benchmarkResults.stdout as String)
|
||||
.where((line) => line.startsWith('{"benchmark":'))
|
||||
.single);
|
||||
|
||||
return <Map<String, dynamic>>[
|
||||
{
|
||||
'benchmark': 'analysis-server-$temperature-memory',
|
||||
'metric': 'MemoryUse',
|
||||
'score': result['result']['analysis-server-$temperature-memory']['bytes'],
|
||||
},
|
||||
{
|
||||
'benchmark': 'analysis-server-$temperature-analysis',
|
||||
'metric': 'RunTimeRaw',
|
||||
'score': result['result']['analysis-server-$temperature-analysis']
|
||||
['micros'],
|
||||
},
|
||||
if (warm)
|
||||
{
|
||||
'benchmark': 'analysis-server-edit',
|
||||
'metric': 'RunTimeRaw',
|
||||
'score': result['result']['analysis-server-edit']['micros'],
|
||||
},
|
||||
if (warm)
|
||||
{
|
||||
'benchmark': 'analysis-server-completion',
|
||||
'metric': 'RunTimeRaw',
|
||||
'score': result['result']['analysis-server-completion']['micros'],
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
Future<void> uploadResults(List<Map<String, dynamic>> results) async {
|
||||
// Create JSON results in the desired format
|
||||
// Write results file to cloud storage.
|
||||
final tempDir =
|
||||
await Directory.systemTemp.createTemp('analysis-server-benchmarks');
|
||||
try {
|
||||
final resultsJson = jsonEncode(results);
|
||||
final resultsFile = File.fromUri(tempDir.uri.resolve('results.json'));
|
||||
resultsFile.writeAsStringSync(resultsJson, flush: true);
|
||||
|
||||
final taskId = Platform.environment['SWARMING_TASK_ID'] ?? "test_task_id";
|
||||
if (taskId == "test_task_id") {
|
||||
print('Benchmark_uploader requires SWARMING_TASK_ID in the environment.');
|
||||
}
|
||||
final cloudStoragePath =
|
||||
'gs://dart-test-results/benchmarks/$taskId/results.json';
|
||||
final args = [
|
||||
'third_party/gsutil/gsutil',
|
||||
'cp',
|
||||
resultsFile.path,
|
||||
cloudStoragePath
|
||||
];
|
||||
final python = 'python3.exe';
|
||||
print('Running $python ${args.join(' ')}');
|
||||
final commandResult = await Process.run(python, args);
|
||||
final exitCode = commandResult.exitCode;
|
||||
print(commandResult.stdout);
|
||||
print(commandResult.stderr);
|
||||
print('exit code: $exitCode');
|
||||
if (exitCode != 0) {
|
||||
throw 'Gsutil upload failed. Exit code $exitCode';
|
||||
}
|
||||
} finally {
|
||||
await tempDir.delete(recursive: true);
|
||||
}
|
||||
}
|
|
@ -2976,8 +2976,7 @@
|
|||
{
|
||||
"builders": [
|
||||
"analyzer-linux-release",
|
||||
"analyzer-mac-release",
|
||||
"analyzer-win-release"
|
||||
"analyzer-mac-release"
|
||||
],
|
||||
"meta": {
|
||||
"description": "This configuration is used by the analyzer builders."
|
||||
|
@ -3066,6 +3065,96 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"builders": [
|
||||
"analyzer-win-release"
|
||||
],
|
||||
"meta": {
|
||||
"description": "This configuration is used by the analyzer windows builder."
|
||||
},
|
||||
"steps": [
|
||||
{
|
||||
"name": "build dart",
|
||||
"script": "tools/build.py",
|
||||
"arguments": [
|
||||
"create_sdk",
|
||||
"utils/dartanalyzer"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "analyzer unit tests",
|
||||
"arguments": [
|
||||
"-nanalyzer-unittest-asserts-${mode}-${system}",
|
||||
"pkg/analyzer"
|
||||
],
|
||||
"shards": 4,
|
||||
"fileset": "analyzer_unit_tests"
|
||||
},
|
||||
{
|
||||
"name": "analysis_server unit tests",
|
||||
"arguments": [
|
||||
"-nanalyzer-unittest-asserts-${mode}-${system}",
|
||||
"pkg/analysis_server"
|
||||
],
|
||||
"shards": 4,
|
||||
"fileset": "analyzer_unit_tests"
|
||||
},
|
||||
{
|
||||
"name": "nnbd_migration unit tests",
|
||||
"arguments": [
|
||||
"-nanalyzer-unittest-asserts-${mode}-${system}",
|
||||
"pkg/nnbd_migration"
|
||||
],
|
||||
"shards": 1,
|
||||
"fileset": "analyzer_unit_tests"
|
||||
},
|
||||
{
|
||||
"name": "analyze tests enable-asserts",
|
||||
"arguments": [
|
||||
"-nanalyzer-asserts-${system}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "analyze migrated tests enable-asserts",
|
||||
"arguments": [
|
||||
"-nanalyzer-asserts-${system}",
|
||||
"corelib",
|
||||
"ffi",
|
||||
"language",
|
||||
"lib",
|
||||
"standalone"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "analyze pkg tests enable-asserts",
|
||||
"arguments": [
|
||||
"-nanalyzer-asserts-${system}",
|
||||
"pkg"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "analyzer_cli unit tests",
|
||||
"arguments": [
|
||||
"-nanalyzer-unittest-asserts-${mode}-${system}",
|
||||
"pkg/analyzer_cli"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "analyzer_plugin unit tests",
|
||||
"arguments": [
|
||||
"-nanalyzer-unittest-asserts-${mode}-${system}",
|
||||
"pkg/analyzer_plugin"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "benchmark analysis server",
|
||||
"script": "out/ReleaseX64/dart-sdk/bin/dart",
|
||||
"arguments": [
|
||||
"pkg/analysis_server/benchmark/perf/benchmark_uploader.dart"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"builders": [
|
||||
"analyzer-analysis-server-linux"
|
||||
|
|
Loading…
Reference in a new issue