Record/replay support to test.py

R=ricow@google.com

Review URL: https://codereview.chromium.org//15944005

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@23216 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
kustermann@google.com 2013-05-27 12:23:39 +00:00
parent a8da79c004
commit 4353bead01
5 changed files with 289 additions and 11 deletions

View file

@ -0,0 +1,67 @@
#!/usr/bin/env python
import sys
import json
import subprocess
import time
import threading
def run_command(name, executable, arguments, timeout_in_seconds):
print "Running %s: '%s'" % (name, [executable] + arguments)
# The timeout_handler will set this to True if the command times out.
timeout_value = {'did_timeout' : False}
start = time.time()
process = subprocess.Popen([executable] + arguments,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def timeout_handler():
timeout_value['did_timeout'] = True
process.kill()
timer = threading.Timer(timeout_in_seconds, timeout_handler)
timer.start()
stdout, stderr = process.communicate()
exit_code = process.wait()
timer.cancel()
end = time.time()
return (exit_code, stdout, stderr, end - start, timeout_value['did_timeout'])
def main(args):
recording_file = args[0]
result_file = args[1]
with open(recording_file) as fd:
test_cases = json.load(fd)
for test_case in test_cases:
name = test_case['name']
command = test_case['command']
executable = command['executable']
arguments = command['arguments']
timeout_limit = command['timeout_limit']
exit_code, stdout, stderr, duration, did_timeout = (
run_command(name, executable, arguments, timeout_limit))
test_case['command_output'] = {
'exit_code' : exit_code,
'stdout' : stdout,
'stderr' : stderr,
'duration' : duration,
'did_timeout' : did_timeout,
}
with open(result_file, 'w') as fd:
json.dump(test_cases, fd)
if __name__ == '__main__':
if len(sys.argv) != 3:
print >> sys.stderr, ("Usage: %s <input-file.json> <output-file.json>"
% sys.argv[0])
sys.exit(1)
sys.exit(main(sys.argv[1:]))

View file

@ -26,11 +26,12 @@ library test;
import "dart:async";
import "dart:io";
import "testing/dart/test_runner.dart";
import "testing/dart/test_options.dart";
import "testing/dart/test_suite.dart";
import "testing/dart/test_progress.dart";
import "testing/dart/http_server.dart";
import "testing/dart/record_and_replay.dart";
import "testing/dart/test_options.dart";
import "testing/dart/test_progress.dart";
import "testing/dart/test_runner.dart";
import "testing/dart/test_suite.dart";
import "testing/dart/utils.dart";
import "../compiler/tests/dartc/test_config.dart";
@ -87,6 +88,27 @@ void testConfigurations(List<Map> configurations) {
var printTiming = firstConf['time'];
var listTests = firstConf['list'];
var recordingPath = firstConf['record_to_file'];
var recordingOutputPath = firstConf['replay_from_file'];
if (recordingPath != null && recordingOutputPath != null) {
print("Fatal: Can't have the '--record_to_file' and '--replay_from_file'"
"at the same time. Exiting ...");
exit(1);
}
var testCaseRecorder;
if (recordingPath != null) {
testCaseRecorder = new TestCaseRecorder(new Path(recordingPath));
}
var testCaseOutputArchive;
if (recordingOutputPath != null) {
testCaseOutputArchive = new TestCaseOutputArchive();
testCaseOutputArchive.loadFromPath(new Path(recordingOutputPath));
}
if (!firstConf['append_logs']) {
var file = new File(TestUtils.flakyFileName());
if (file.existsSync()) {
@ -216,6 +238,7 @@ void testConfigurations(List<Map> configurations) {
}
eventListener.add(new ExitCodeSetter());
void startProcessQueue() {
// Start process queue.
new ProcessQueue(maxProcesses,
@ -225,7 +248,9 @@ void testConfigurations(List<Map> configurations) {
eventListener,
allTestsFinished,
verbose,
listTests);
listTests,
testCaseRecorder,
testCaseOutputArchive);
}
// Start all the HTTP servers required before starting the process queue.

View file

@ -0,0 +1,126 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library record_and_replay;
import 'dart:io';
import 'dart:json' as json;
import 'dart:utf';
import 'test_runner.dart';
/*
* Json files look like this:
*
* [
* {
* 'name' : '...',
* 'configuration' : '...',
* 'command' : {
* 'timeout_limit' : 60,
* 'executable' : '...',
* 'arguments' : ['arg1, 'arg2', '...'],
* },
* 'command_output' : {
* 'exit_code' : 42,
* 'stdout' : '...',
* 'stderr' : '...',
* 'duration' : 1.5,
* 'did_timeout' : false,
* },
* },
* ....
* ]
*/
class TestCaseRecorder {
Path _outputPath;
List<Map> _recordedCommandInvocations = [];
var _cwd;
TestCaseRecorder(this._outputPath) {
_cwd = new Directory.current().path;
}
void nextTestCase(TestCase testCase) {
assert(testCase.commands.length == 1);
var command = testCase.commands[0];
assert(command.environment == null);
var arguments = [];
for (var rawArgument in command.arguments) {
if (rawArgument.startsWith(_cwd)) {
var relative = new Path(rawArgument).relativeTo(new Path(_cwd));
arguments.add(relative.toNativePath());
} else {
arguments.add(rawArgument);
}
}
var commandExecution = {
'name' : testCase.displayName,
'configuration' : testCase.configurationString,
'command' : {
'timeout_limit' : testCase.timeout,
'executable' : command.executable,
'arguments' : arguments,
},
};
_recordedCommandInvocations.add(commandExecution);
}
void finish() {
var file = new File.fromPath(_outputPath);
var jsonString = json.stringify(_recordedCommandInvocations);
file.writeAsStringSync(jsonString);
print("TestCaseRecorder: written all TestCases to ${_outputPath}");
}
}
class TestCaseOutputArchive {
Map<String, Map> _testCaseOutputRecords;
void loadFromPath(Path recordingPath) {
var file = new File.fromPath(recordingPath);
var testCases = json.parse(file.readAsStringSync());
_testCaseOutputRecords = {};
for (var testCase in testCases) {
var key = _indexKey(testCase['configuration'], testCase['name']);
_testCaseOutputRecords[key] = testCase['command_output'];
}
}
CommandOutput outputOf(TestCase testCase) {
var key = _indexKey(testCase.configurationString, testCase.displayName);
var command_output = _testCaseOutputRecords[key];
if (command_output == null) {
print("Sorry, but there is no command output for "
"${testCase.displayName}");
exit(42);
}
double seconds = command_output['duration'];
var duration = new Duration(seconds: seconds.round(),
milliseconds: (seconds/1000).round());
var commandOutput = new CommandOutput.fromCase(
testCase,
testCase.commands.first,
command_output['exit_code'],
false,
command_output['did_timeout'],
encodeUtf8(command_output['stdout']),
encodeUtf8(command_output['stderr']),
duration,
false);
return commandOutput;
}
String _indexKey(String configuration, String name) {
return "${configuration}__$name";
}
}

View file

@ -318,7 +318,21 @@ Note: currently only implemented for dart2js.''',
'This address is also used for browsers to connect.',
['--local_ip'],
[],
'127.0.0.1'),];
'127.0.0.1'),
new _TestOptionSpecification(
'record_to_file',
'Records all the commands that need to be executed and writes it '
'out to a file.',
['--record_to_file'],
[],
null),
new _TestOptionSpecification(
'replay_from_file',
'Records all the commands that need to be executed and writes it '
'out to a file.',
['--replay_from_file'],
[],
null),];
}

View file

@ -24,6 +24,7 @@ import "status_file_parser.dart";
import "test_progress.dart";
import "test_suite.dart";
import "utils.dart";
import 'record_and_replay.dart';
const int NO_TIMEOUT = 0;
const int SLOW_TIMEOUT_MULTIPLIER = 4;
@ -1337,6 +1338,10 @@ class ProcessQueue {
int _numFailedTests = 0;
bool _allTestsWereEnqueued = false;
// Support for recording and replaying test commands.
TestCaseRecorder _testCaseRecorder;
TestCaseOutputArchive _testCaseOutputArchive;
/** The number of tests we allow to actually fail before we stop retrying. */
int _MAX_FAILED_NO_RETRY = 4;
bool _verbose;
@ -1380,7 +1385,9 @@ class ProcessQueue {
this._eventListener,
this._allDone,
[bool verbose = false,
bool listTests = false])
bool listTests = false,
this._testCaseRecorder,
this._testCaseOutputArchive])
: _verbose = verbose,
_listTests = listTests,
_tests = new Queue<TestCase>(),
@ -1411,6 +1418,47 @@ class ProcessQueue {
}
void _runTests(List<TestSuite> testSuites) {
var newTest;
var allTestsKnown;
if (_testCaseRecorder != null) {
// Mode: recording.
newTest = _testCaseRecorder.nextTestCase;
allTestsKnown = () {
// We don't call any event*() methods, so test_progress.dart will not be
// notified (that's fine, since we're not running any tests).
_testCaseRecorder.finish();
_allDone();
};
} else {
if (_testCaseOutputArchive != null) {
// Mode: replaying.
newTest = (TestCase testCase) {
// We're doing this asynchronously to emulate the normal behaviour.
eventTestAdded(testCase);
Timer.run(() {
var output = _testCaseOutputArchive.outputOf(testCase);
testCase.completed();
eventFinishedTestCase(testCase);
});
};
allTestsKnown = () {
// If we're replaying commands, we need to call [_cleanupAndMarkDone]
// manually. We're putting it at the end of the event queue to make
// sure all the previous events were fired.
Timer.run(() => _cleanupAndMarkDone());
};
} else {
// Mode: none (we're not recording/replaying).
newTest = (TestCase testCase) {
_tests.add(testCase);
eventTestAdded(testCase);
_runTest(testCase);
};
allTestsKnown = _checkDone;
}
}
// FIXME: For some reason we cannot call this method on all test suites
// in parallel.
// If we do, not all tests get enqueued (if --arch=all was specified,
@ -1420,10 +1468,10 @@ class ProcessQueue {
void enqueueNextSuite() {
if (!iterator.moveNext()) {
_allTestsWereEnqueued = true;
allTestsKnown();
eventAllTestsKnown();
_checkDone();
} else {
iterator.current.forEachTest(_runTest, _testCache, enqueueNextSuite);
iterator.current.forEachTest(newTest, _testCache, enqueueNextSuite);
}
}
enqueueNextSuite();
@ -1495,8 +1543,6 @@ class ProcessQueue {
browserUsed = test.configuration['runtime'];
if (_needsSelenium) _ensureSeleniumServerRunning();
}
eventTestAdded(test);
_tests.add(test);
_tryRunTest();
}