Use native async/await support in pub.

R=rnystrom@google.com

Review URL: https://codereview.chromium.org//896623005

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@43483 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
nweiz@google.com 2015-02-04 20:45:49 +00:00
parent b744bed3db
commit 1835c63d0a
574 changed files with 14 additions and 49751 deletions

View file

@ -24,9 +24,9 @@
# the pub directory instead of in tests/pub. Xcode can only handle
# a certain amount of files in one list (also depending on the
# length of the path from where you run). This regexp excludes
# pub/test and pub_generated/test
# pub/test
'<!@(["python", "tools/list_files.py",'
'"^(?!.*pub/test)(?!.*pub_generated/test).*dart$",'
'"^(?!.*pub/test).*dart$",'
'"sdk/lib"])',
'<!@(["python", "tools/list_files.py", "", '
'"sdk/lib/_internal/compiler/js_lib/preambles"])',

View file

@ -56,6 +56,6 @@ fi
DART="$BUILD_DIR/dart-sdk/bin/dart"
PACKAGES_DIR="$BUILD_DIR/packages/"
# Run the async/await compiled pub.
PUB="$SDK_DIR/lib/_internal/pub_generated/bin/pub.dart"
# Run pub.
PUB="$SDK_DIR/lib/_internal/pub/bin/pub.dart"
exec "$DART" "${VM_OPTIONS[@]}" "--package-root=$PACKAGES_DIR" "$PUB" "$@"

View file

@ -34,8 +34,8 @@ set BUILD_DIR=%SDK_DIR%\..\build\ReleaseIA32
set PACKAGES_DIR=%BUILD_DIR%\packages
set DART=%BUILD_DIR%\dart-sdk\bin\dart
rem Run the async/await compiled pub.
set PUB="%SDK_DIR%\lib\_internal\pub_generated\bin\pub.dart"
rem Run pub.
set PUB="%SDK_DIR%\lib\_internal\pub.dart"
"%DART%" %VM_OPTIONS% --package-root="%PACKAGES_DIR%" "%PUB%" %*
endlocal

View file

@ -1,294 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'dart:io';
import 'package:args/args.dart';
import 'package:analyzer/src/services/formatter_impl.dart';
import 'package:async_await/async_await.dart' as async_await;
import 'package:stack_trace/stack_trace.dart';
import 'package:path/path.dart' as p;
/// The path to pub's root directory (sdk/lib/_internal/pub) in the Dart repo.
///
/// This assumes this script is itself being run from within the repo.
final sourceDir = p.dirname(p.dirname(p.fromUri(Platform.script)));
/// The [sourceDir] as a URL, for use in import strings.
final sourceUrl = p.toUri(sourceDir).toString();
/// The directory that compiler output should be written to.
String generatedDir;
/// `true` if any file failed to compile.
bool hadFailure = false;
bool verbose = false;
/// Prefix for imports in pub that import dart2js libraries.
final _compilerPattern = new RegExp(r"import '(\.\./)+compiler");
/// Matches the Git commit hash of the compiler stored in the README.md file.
///
/// This is used both to find the current commit and replace it with the new
/// one.
final _commitPattern = new RegExp(r"[a-f0-9]{40}");
/// The template for the README that's added to the generated source.
///
/// This is used to store the current commit of the async_await compiler.
const _README = """
Pub is currently dogfooding the new Dart async/await syntax. Since the Dart VM
doesn't natively support it yet, we are using the [async-await][] compiler
package.
[async-await]: https://github.com/dart-lang/async_await
We run that to compile pub-using-await from sdk/lib/_internal/pub down to
vanilla Dart code which is what you see here. To interoperate more easily with
the rest of the repositry, we check in that generated code.
When bug #104 is fixed, we can remove this entirely.
The code here was compiled using the async-await compiler at commit:
<<COMMIT>>
(Note: this file is also parsed by a tool to update the above commit, so be
careful not to reformat it.)
""";
/// This runs the async/await compiler on all of the pub source code.
///
/// It reads from the repo and writes the compiled output into the given build
/// directory (using the same file names and relative layout). Does not
/// compile files that haven't changed since the last time they were compiled.
// TODO(rnystrom): Remove this when #104 is fixed.
void main(List<String> arguments) {
var parser = new ArgParser(allowTrailingOptions: true);
parser.addFlag("verbose", callback: (value) => verbose = value);
var force = false;
parser.addFlag("force", callback: (value) => force = value);
var buildDir;
parser.addOption("snapshot-build-dir", callback: (value) => buildDir = value);
try {
var rest = parser.parse(arguments).rest;
if (rest.isEmpty) {
throw new FormatException('Missing generated directory.');
} else if (rest.length > 1) {
throw new FormatException(
'Unexpected arguments: ${rest.skip(1).join(" ")}.');
}
generatedDir = rest.first;
} on FormatException catch(ex) {
stderr.writeln(ex);
stderr.writeln();
stderr.writeln(
"Usage: dart async_compile.dart [--verbose] [--force] "
"[--snapshot-build-dir <dir>] <generated dir>");
exit(64);
}
// See what version (i.e. Git commit) of the async-await compiler we
// currently have. If this is different from the version that was used to
// compile the sources, recompile everything.
var currentCommit = _getCurrentCommit();
var readmePath = p.join(generatedDir, "README.md");
var lastCommit;
try {
var readme = new File(readmePath).readAsStringSync();
var match = _commitPattern.firstMatch(readme);
if (match == null) {
stderr.writeln("Could not find compiler commit hash in README.md.");
exit(1);
}
lastCommit = match[0];
} on IOException catch (error, stackTrace) {
if (verbose) {
stderr.writeln("Failed to load $readmePath: $error\n"
"${new Trace.from(stackTrace)}");
}
}
var numFiles = 0;
var numCompiled = 0;
// Compile any modified or missing files.
var sources = new Set();
for (var entry in new Directory(sourceDir).listSync(recursive: true)) {
if (p.extension(entry.path) != ".dart") continue;
numFiles++;
var relative = p.relative(entry.path, from: sourceDir);
sources.add(relative);
var sourceFile = entry as File;
var destPath = p.join(generatedDir, relative);
var destFile = new File(destPath);
if (force ||
currentCommit != lastCommit ||
!destFile.existsSync() ||
entry.lastModifiedSync().isAfter(destFile.lastModifiedSync())) {
_compile(sourceFile.path, sourceFile.readAsStringSync(), destPath);
numCompiled++;
if (verbose) print("Compiled $relative");
}
}
// Delete any previously compiled files whose source no longer exists.
for (var entry in new Directory(generatedDir).listSync(recursive: true)) {
if (p.extension(entry.path) != ".dart") continue;
var relative = p.relative(entry.path, from: generatedDir);
if (!sources.contains(relative)) {
_deleteFile(entry.path);
if (verbose) print("Deleted $relative");
}
}
// Update the README.
if (currentCommit != lastCommit) {
_writeFile(readmePath, _README.replaceAll("<<COMMIT>>", currentCommit));
if (verbose) print("Updated README.md");
}
if (numCompiled > 0 && buildDir != null) _generateSnapshot(buildDir);
if (verbose) print("Compiled $numCompiled out of $numFiles files");
if (hadFailure) exit(1);
}
String _getCurrentCommit() {
var command = "git";
var args = ["rev-parse", "HEAD"];
// Spawning a process on Windows will not look for the executable in the
// system path so spawn git through a shell to find it.
if (Platform.operatingSystem == "windows") {
command = "cmd";
args = ["/c", "git"]..addAll(args);
}
var result = Process.runSync(command, args, workingDirectory:
p.join(sourceDir, "../../../../third_party/pkg/async_await"));
if (result.exitCode != 0) {
stderr.writeln("Could not get Git revision of async_await compiler.");
exit(1);
}
return result.stdout.trim();
}
void _compile(String sourcePath, String source, String destPath) {
var destDir = new Directory(p.dirname(destPath));
destDir.createSync(recursive: true);
source = _translateAsyncAwait(sourcePath, source);
if (source != null) source = _fixDart2jsImports(sourcePath, source, destPath);
if (source == null) {
// If the async compile fails, delete the file so that we don't try to
// run the stale previous output and so that we try to recompile it later.
_deleteFile(destPath);
} else {
_writeFile(destPath, source);
}
}
/// Runs the async/await compiler on [source].
///
/// Returns the translated Dart code or `null` if the compiler failed.
String _translateAsyncAwait(String sourcePath, String source) {
if (p.isWithin(p.join(sourceDir, "asset"), sourcePath)) {
// Don't run the async compiler on the special "asset" source files. These
// have preprocessor comments that get discarded by the compiler.
return source;
}
try {
source = async_await.compile(source);
// Reformat the result since the compiler ditches all whitespace.
// TODO(rnystrom): Remove when this is fixed:
// https://github.com/dart-lang/async_await/issues/12
var result = new CodeFormatter().format(CodeKind.COMPILATION_UNIT, source);
return result.source;
} catch (ex) {
stderr.writeln("Async compile failed on $sourcePath:\n$ex");
hadFailure = true;
return null;
}
}
/// Fix relative imports to dart2js libraries.
///
/// Pub imports dart2js using relative imports that reach outside of pub's
/// source tree. Since the build directory is in a different location, we need
/// to fix those to be valid relative imports from the build directory.
String _fixDart2jsImports(String sourcePath, String source, String destPath) {
var compilerDir = p.url.join(sourceUrl, "../compiler");
var relative = p.url.relative(compilerDir,
from: p.url.dirname(p.toUri(destPath).toString()));
return source.replaceAll(_compilerPattern, "import '$relative");
}
/// Regenerate the pub snapshot from the async/await-compiled output. We do
/// this here since the tests need it and it's faster than doing a full SDK
/// build.
void _generateSnapshot(String buildDir) {
buildDir = p.normalize(buildDir);
new Directory(buildDir).createSync(recursive: true);
var entrypoint = p.join(generatedDir, 'bin/pub.dart');
var packageRoot = p.join(buildDir, 'packages');
var snapshot = p.join(buildDir, 'dart-sdk/bin/snapshots/pub.dart.snapshot');
var result = Process.runSync(Platform.executable, [
"--package-root=$packageRoot",
"--snapshot=$snapshot",
entrypoint
]);
if (result.exitCode != 0) {
stderr.writeln("Failed to generate snapshot:");
if (result.stderr.trim().isNotEmpty) stderr.writeln(result.stderr);
if (result.stdout.trim().isNotEmpty) stderr.writeln(result.stdout);
exit(result.exitCode);
}
if (verbose) print("Created pub snapshot");
}
/// Deletes the file at [path], ignoring any IO errors that occur.
///
/// This swallows errors to accommodate multiple compilers running concurrently.
/// Since they will produce the same output anyway, a failure of one is fine.
void _deleteFile(String path) {
try {
new File(path).deleteSync();
} on IOException catch (ex) {
// Do nothing.
}
}
/// Writes [contents] to [path], ignoring any IO errors that occur.
///
/// This swallows errors to accommodate multiple compilers running concurrently.
/// Since they will produce the same output anyway, a failure of one is fine.
void _writeFile(String path, String contents) {
try {
new File(path).writeAsStringSync(contents);
} on IOException catch (ex) {
// Do nothing.
}
}

View file

@ -1,28 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'dart:io';
import 'package:path/path.dart' as p;
import 'package:scheduled_test/descriptor.dart' as d;
import 'package:scheduled_test/scheduled_test.dart';
import 'package:scheduled_test/scheduled_process.dart';
import 'test_pub.dart';
import '../lib/src/io.dart';
void main() {
integration("the generated pub source is up to date", () {
var compilerArgs = Platform.executableArguments.toList()..addAll([
p.join(pubRoot, 'bin', 'async_compile.dart'),
'--force', '--verbose',
p.join(sandboxDir, "pub_generated")
]);
new ScheduledProcess.start(Platform.executable, compilerArgs).shouldExit(0);
new d.DirectoryDescriptor.fromFilesystem("pub_generated",
p.join(pubRoot, "..", "pub_generated")).validate();
});
}

View file

@ -809,16 +809,8 @@ Map packageMap(String name, String version, [Map dependencies]) {
}
/// Resolves [target] relative to the path to pub's `test/asset` directory.
String testAssetPath(String target) {
var libPath = libraryPath('test_pub');
// We are running from the generated directory, but non-dart assets are only
// in the canonical directory.
// TODO(rnystrom): Remove this when #104 is fixed.
libPath = libPath.replaceAll('pub_generated', 'pub');
return p.join(p.dirname(libPath), 'asset', target);
}
String testAssetPath(String target) =>
p.join(p.dirname(libraryPath('test_pub')), 'asset', target);
/// Returns a Map in the format used by the pub.dartlang.org API to represent a
/// package version.

View file

@ -1,18 +0,0 @@
Pub is currently dogfooding the new Dart async/await syntax. Since the Dart VM
doesn't natively support it yet, we are using the [async-await][] compiler
package.
[async-await]: https://github.com/dart-lang/async_await
We run that to compile pub-using-await from sdk/lib/_internal/pub down to
vanilla Dart code which is what you see here. To interoperate more easily with
the rest of the repositry, we check in that generated code.
When bug #104 is fixed, we can remove this entirely.
The code here was compiled using the async-await compiler at commit:
8b401a9f2e5e81dca5f70dbe7564112a0823dee6
(Note: this file is also parsed by a tool to update the above commit, so be
careful not to reformat it.)

View file

@ -1,184 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
//# if source_maps >=0.9.0 <0.10.0
//> import 'package:source_maps/span.dart';
//# end
//# if source_span
import 'package:source_span/source_span.dart';
//# end
import 'serialize/exception.dart';
import 'utils.dart';
export 'serialize/aggregate_transform.dart';
export 'serialize/exception.dart';
export 'serialize/transform.dart';
export 'serialize/transformer.dart';
/// Converts [id] into a serializable map.
Map serializeId(AssetId id) => {'package': id.package, 'path': id.path};
/// Converts a serializable map into an [AssetId].
AssetId deserializeId(Map id) => new AssetId(id['package'], id['path']);
/// Converts [span] into a serializable map.
///
/// [span] may be a [SourceSpan] or a [Span].
Map serializeSpan(span) {
// TODO(nweiz): convert FileSpans to FileSpans.
// Handily, this code works for both source_map and source_span spans.
return {
'sourceUrl': span.sourceUrl.toString(),
'start': serializeLocation(span.start),
'end': serializeLocation(span.end),
'text': span.text,
};
}
/// Converts a serializable map into a [SourceSpan].
SourceSpan deserializeSpan(Map span) {
return new SourceSpan(
deserializeLocation(span['start']),
deserializeLocation(span['end']),
span['text']);
}
/// Converts [location] into a serializable map.
///
/// [location] may be a [SourceLocation] or a [SourceLocation].
Map serializeLocation(location) {
//# if source_maps >=0.9.0 <0.10.0
//> if (location is Location) {
//> return {
//> 'sourceUrl': location.sourceUrl,
//> 'offset': location.offset,
//> 'line': location.line,
//> 'column': location.column
//> };
//> }
//# end
//# if source_span
// TODO(nweiz): convert FileLocations to FileLocations.
if (location is SourceLocation) {
return {
'sourceUrl': location.sourceUrl.toString(),
'offset': location.offset,
'line': location.line,
'column': location.column
};
}
//# end
throw new ArgumentError("Unknown type ${location.runtimeType} for location.");
}
/// Converts a serializable map into a [Location].
SourceLocation deserializeLocation(Map location) {
return new SourceLocation(location['offset'],
sourceUrl: location['sourceUrl'],
line: location['line'],
column: location['column']);
}
/// Converts [stream] into a serializable map.
///
/// [serializeEvent] is used to serialize each event from the stream.
Map serializeStream(Stream stream, serializeEvent(event)) {
var receivePort = new ReceivePort();
var map = {'replyTo': receivePort.sendPort};
receivePort.first.then((message) {
var sendPort = message['replyTo'];
stream.listen((event) {
sendPort.send({
'type': 'event',
'value': serializeEvent(event)
});
}, onError: (error, stackTrace) {
sendPort.send({
'type': 'error',
'error': serializeException(error, stackTrace)
});
}, onDone: () => sendPort.send({'type': 'done'}));
});
return map;
}
/// Converts a serializable map into a [Stream].
///
/// [deserializeEvent] is used to deserialize each event from the stream.
Stream deserializeStream(Map stream, deserializeEvent(event)) {
return callbackStream(() {
var receivePort = new ReceivePort();
stream['replyTo'].send({'replyTo': receivePort.sendPort});
var controller = new StreamController(sync: true);
receivePort.listen((event) {
switch (event['type']) {
case 'event':
controller.add(deserializeEvent(event['value']));
break;
case 'error':
var exception = deserializeException(event['error']);
controller.addError(exception, exception.stackTrace);
break;
case 'done':
controller.close();
receivePort.close();
break;
}
});
return controller.stream;
});
}
/// Wraps [message] and sends it across [port], then waits for a response which
/// should be sent using [respond].
///
/// The returned Future will complete to the value or error returned by
/// [respond].
Future call(SendPort port, message) {
var receivePort = new ReceivePort();
port.send({
'message': message,
'replyTo': receivePort.sendPort
});
return receivePort.first.then((response) {
if (response['type'] == 'success') return response['value'];
assert(response['type'] == 'error');
var exception = deserializeException(response['error']);
return new Future.error(exception, exception.stackTrace);
});
}
/// Responds to a message sent by [call].
///
/// [wrappedMessage] is the raw message sent by [call]. This unwraps it and
/// passes the contents of the message to [callback], then sends the return
/// value of [callback] back to [call]. If [callback] returns a Future or
/// throws an error, that will also be sent.
void respond(wrappedMessage, callback(message)) {
var replyTo = wrappedMessage['replyTo'];
new Future.sync(() => callback(wrappedMessage['message']))
.then((result) => replyTo.send({'type': 'success', 'value': result}))
.catchError((error, stackTrace) {
replyTo.send({
'type': 'error',
'error': serializeException(error, stackTrace)
});
});
}

View file

@ -1,173 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.aggregate_transform;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
// TODO(nweiz): don't import from "src" once issue 14966 is fixed.
import 'package:barback/src/internal_asset.dart';
import '../serialize.dart';
import 'get_input_transform.dart';
/// Serialize the methods shared between [AggregateTransform] and
/// [DeclaringAggregateTransform].
///
/// [additionalFields] contains additional serialized fields to add to the
/// serialized transform. [methodHandlers] is a set of additional methods. Each
/// value should take a JSON message and return the response (which may be a
/// Future).
Map _serializeBaseAggregateTransform(transform, Map additionalFields,
Map<String, Function> methodHandlers) {
var receivePort = new ReceivePort();
receivePort.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
var handler = methodHandlers[message['type']];
if (handler != null) return handler(message);
if (message['type'] == 'consumePrimary') {
transform.consumePrimary(deserializeId(message['assetId']));
return null;
}
assert(message['type'] == 'log');
var method = {
'Info': transform.logger.info,
'Fine': transform.logger.fine,
'Warning': transform.logger.warning,
'Error': transform.logger.error
}[message['level']];
assert(method != null);
var assetId = message['assetId'] == null ? null :
deserializeId(message['assetId']);
var span = message['span'] == null ? null :
deserializeSpan(message['span']);
method(message['message'], asset: assetId, span: span);
});
});
return {
'port': receivePort.sendPort,
'key': transform.key,
'package': transform.package
}..addAll(additionalFields);
}
/// Converts [transform] into a serializable map.
Map serializeAggregateTransform(AggregateTransform transform) {
return _serializeBaseAggregateTransform(transform, {
'primaryInputs': serializeStream(transform.primaryInputs, serializeAsset)
}, {
'getInput': (message) => transform.getInput(deserializeId(message['id']))
.then((asset) => serializeAsset(asset)),
'addOutput': (message) =>
transform.addOutput(deserializeAsset(message['output']))
});
}
/// Converts [transform] into a serializable map.
Map serializeDeclaringAggregateTransform(
DeclaringAggregateTransform transform) {
return _serializeBaseAggregateTransform(transform, {
'primaryIds': serializeStream(transform.primaryIds, serializeId)
}, {
'declareOutput': (message) =>
transform.declareOutput(deserializeId(message['output']))
});
}
/// The base class for wrappers for [AggregateTransform]s that are in the host
/// isolate.
class _ForeignBaseAggregateTransform {
/// The port with which we communicate with the host isolate.
///
/// This port and all messages sent across it are specific to this transform.
final SendPort _port;
final String key;
final String package;
TransformLogger get logger => _logger;
TransformLogger _logger;
_ForeignBaseAggregateTransform(Map transform)
: _port = transform['port'],
key = transform['key'],
package = transform['package'] {
_logger = new TransformLogger((assetId, level, message, span) {
call(_port, {
'type': 'log',
'level': level.name,
'message': message,
'assetId': assetId == null ? null : serializeId(assetId),
'span': span == null ? null : serializeSpan(span)
});
});
}
void consumePrimary(AssetId id) {
call(_port, {'type': 'consumePrimary', 'assetId': serializeId(id)});
}
}
// We can get away with only removing the class declarations in incompatible
// barback versions because merely referencing undefined types in type
// annotations isn't a static error. Only implementing an undefined interface is
// a static error.
//# if barback >=0.14.1
/// A wrapper for an [AggregateTransform] that's in the host isolate.
///
/// This retrieves inputs from and sends outputs and logs to the host isolate.
class ForeignAggregateTransform extends _ForeignBaseAggregateTransform
with GetInputTransform implements AggregateTransform {
final Stream<Asset> primaryInputs;
/// Creates a transform from a serialized map sent from the host isolate.
ForeignAggregateTransform(Map transform)
: primaryInputs = deserializeStream(
transform['primaryInputs'], deserializeAsset),
super(transform);
Future<Asset> getInput(AssetId id) {
return call(_port, {
'type': 'getInput',
'id': serializeId(id)
}).then(deserializeAsset);
}
void addOutput(Asset output) {
call(_port, {
'type': 'addOutput',
'output': serializeAsset(output)
});
}
}
/// A wrapper for a [DeclaringAggregateTransform] that's in the host isolate.
class ForeignDeclaringAggregateTransform
extends _ForeignBaseAggregateTransform
implements DeclaringAggregateTransform {
final Stream<AssetId> primaryIds;
/// Creates a transform from a serializable map sent from the host isolate.
ForeignDeclaringAggregateTransform(Map transform)
: primaryIds = deserializeStream(
transform['primaryIds'], deserializeId),
super(transform);
void declareOutput(AssetId id) {
call(_port, {
'type': 'declareOutput',
'output': serializeId(id)
});
}
}
//# end

View file

@ -1,102 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.exception;
import 'package:barback/barback.dart';
import 'package:stack_trace/stack_trace.dart';
import '../utils.dart';
/// An exception that was originally raised in another isolate.
///
/// Exception objects can't cross isolate boundaries in general, so this class
/// wraps as much information as can be consistently serialized.
class CrossIsolateException implements Exception {
/// The name of the type of exception thrown.
///
/// This is the return value of [error.runtimeType.toString()]. Keep in mind
/// that objects in different libraries may have the same type name.
final String type;
/// The exception's message, or its [toString] if it didn't expose a `message`
/// property.
final String message;
/// The exception's stack chain, or `null` if no stack chain was available.
final Chain stackTrace;
/// Loads a [CrossIsolateException] from a serialized representation.
///
/// [error] should be the result of [CrossIsolateException.serialize].
CrossIsolateException.deserialize(Map error)
: type = error['type'],
message = error['message'],
stackTrace = error['stack'] == null ? null :
new Chain.parse(error['stack']);
/// Serializes [error] to an object that can safely be passed across isolate
/// boundaries.
static Map serialize(error, [StackTrace stack]) {
if (stack == null && error is Error) stack = error.stackTrace;
return {
'type': error.runtimeType.toString(),
'message': getErrorMessage(error),
'stack': stack == null ? null : new Chain.forTrace(stack).toString()
};
}
String toString() => "$message\n$stackTrace";
}
/// An [AssetNotFoundException] that was originally raised in another isolate.
class _CrossIsolateAssetNotFoundException extends CrossIsolateException
implements AssetNotFoundException {
final AssetId id;
String get message => "Could not find asset $id.";
/// Loads a [_CrossIsolateAssetNotFoundException] from a serialized
/// representation.
///
/// [error] should be the result of
/// [_CrossIsolateAssetNotFoundException.serialize].
_CrossIsolateAssetNotFoundException.deserialize(Map error)
: id = new AssetId(error['package'], error['path']),
super.deserialize(error);
/// Serializes [error] to an object that can safely be passed across isolate
/// boundaries.
static Map serialize(AssetNotFoundException error, [StackTrace stack]) {
var map = CrossIsolateException.serialize(error);
map['package'] = error.id.package;
map['path'] = error.id.path;
return map;
}
}
/// Serializes [error] to an object that can safely be passed across isolate
/// boundaries.
///
/// This handles [AssetNotFoundException]s specially, ensuring that their
/// metadata is preserved.
Map serializeException(error, [StackTrace stack]) {
if (error is AssetNotFoundException) {
return _CrossIsolateAssetNotFoundException.serialize(error, stack);
} else {
return CrossIsolateException.serialize(error, stack);
}
}
/// Loads an exception from a serialized representation.
///
/// This handles [AssetNotFoundException]s specially, ensuring that their
/// metadata is preserved.
CrossIsolateException deserializeException(Map error) {
if (error['type'] == 'AssetNotFoundException') {
return new _CrossIsolateAssetNotFoundException.deserialize(error);
} else {
return new CrossIsolateException.deserialize(error);
}
}

View file

@ -1,34 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.get_input_transform;
import 'dart:async';
import 'dart:convert';
import 'package:barback/barback.dart';
import '../utils.dart';
/// A mixin for transforms that support [getInput] and the associated suite of
/// methods.
abstract class GetInputTransform {
Future<Asset> getInput(AssetId id);
Future<String> readInputAsString(AssetId id, {Encoding encoding}) {
if (encoding == null) encoding = UTF8;
return getInput(id).then((input) =>
input.readAsString(encoding: encoding));
}
Stream<List<int>> readInput(AssetId id) =>
futureStream(getInput(id).then((input) => input.read()));
Future<bool> hasInput(AssetId id) {
return getInput(id).then((_) => true).catchError((error) {
if (error is AssetNotFoundException && error.id == id) return false;
throw error;
});
}
}

View file

@ -1,149 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.transform;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
// TODO(nweiz): don't import from "src" once issue 14966 is fixed.
import 'package:barback/src/internal_asset.dart';
import '../serialize.dart';
import 'get_input_transform.dart';
/// Serialize the methods shared between [Transform] and [DeclaringTransform].
///
/// [additionalFields] contains additional serialized fields to add to the
/// serialized transform. [methodHandlers] is a set of additional methods. Each
/// value should take a JSON message and return the response (which may be a
/// Future).
Map _serializeBaseTransform(transform, Map additionalFields,
Map<String, Function> methodHandlers) {
var receivePort = new ReceivePort();
receivePort.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
var handler = methodHandlers[message['type']];
if (handler != null) return handler(message);
if (message['type'] == 'consumePrimary') {
transform.consumePrimary();
return null;
}
assert(message['type'] == 'log');
var method = {
'Info': transform.logger.info,
'Fine': transform.logger.fine,
'Warning': transform.logger.warning,
'Error': transform.logger.error
}[message['level']];
assert(method != null);
var assetId = message['assetId'] == null ? null :
deserializeId(message['assetId']);
var span = message['span'] == null ? null :
deserializeSpan(message['span']);
method(message['message'], asset: assetId, span: span);
});
});
return {'port': receivePort.sendPort}..addAll(additionalFields);
}
/// Converts [transform] into a serializable map.
Map serializeTransform(Transform transform) {
return _serializeBaseTransform(transform, {
'primaryInput': serializeAsset(transform.primaryInput)
}, {
'getInput': (message) => transform.getInput(deserializeId(message['id']))
.then((asset) => serializeAsset(asset)),
'addOutput': (message) =>
transform.addOutput(deserializeAsset(message['output']))
});
}
/// Converts [transform] into a serializable map.
Map serializeDeclaringTransform(DeclaringTransform transform) {
return _serializeBaseTransform(transform, {
'primaryId': serializeId(transform.primaryId)
}, {
'declareOutput': (message) =>
transform.declareOutput(deserializeId(message['output']))
});
}
/// The base class for wrappers for [Transform]s that are in the host isolate.
class _ForeignBaseTransform {
/// The port with which we communicate with the host isolate.
///
/// This port and all messages sent across it are specific to this transform.
final SendPort _port;
TransformLogger get logger => _logger;
TransformLogger _logger;
_ForeignBaseTransform(Map transform)
: _port = transform['port'] {
_logger = new TransformLogger((assetId, level, message, span) {
call(_port, {
'type': 'log',
'level': level.name,
'message': message,
'assetId': assetId == null ? null : serializeId(assetId),
'span': span == null ? null : serializeSpan(span)
});
});
}
void consumePrimary() {
call(_port, {'type': 'consumePrimary'});
}
}
/// A wrapper for a [Transform] that's in the host isolate.
///
/// This retrieves inputs from and sends outputs and logs to the host isolate.
class ForeignTransform extends _ForeignBaseTransform
with GetInputTransform implements Transform {
final Asset primaryInput;
/// Creates a transform from a serialized map sent from the host isolate.
ForeignTransform(Map transform)
: primaryInput = deserializeAsset(transform['primaryInput']),
super(transform);
Future<Asset> getInput(AssetId id) {
return call(_port, {
'type': 'getInput',
'id': serializeId(id)
}).then(deserializeAsset);
}
void addOutput(Asset output) {
call(_port, {
'type': 'addOutput',
'output': serializeAsset(output)
});
}
}
/// A wrapper for a [DeclaringTransform] that's in the host isolate.
class ForeignDeclaringTransform extends _ForeignBaseTransform
implements DeclaringTransform {
final AssetId primaryId;
/// Creates a transform from a serializable map sent from the host isolate.
ForeignDeclaringTransform(Map transform)
: primaryId = deserializeId(transform['primaryId']),
super(transform);
void declareOutput(AssetId id) {
call(_port, {
'type': 'declareOutput',
'output': serializeId(id)
});
}
}

View file

@ -1,126 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.transformer;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
import '../serialize.dart';
import 'transform.dart';
/// Converts [transformer] into a serializable map.
Map _serializeTransformer(Transformer transformer) {
var port = new ReceivePort();
port.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
if (message['type'] == 'isPrimary') {
return transformer.isPrimary(deserializeId(message['id']));
} else if (message['type'] == 'declareOutputs') {
return new Future.sync(() {
return (transformer as DeclaringTransformer).declareOutputs(
new ForeignDeclaringTransform(message['transform']));
}).then((_) => null);
} else {
assert(message['type'] == 'apply');
// Make sure we return null so that if the transformer's [apply] returns
// a non-serializable value it doesn't cause problems.
return new Future.sync(() {
return transformer.apply(new ForeignTransform(message['transform']));
}).then((_) => null);
}
});
});
var type;
if (transformer is LazyTransformer) {
type = 'LazyTransformer';
} else if (transformer is DeclaringTransformer) {
type = 'DeclaringTransformer';
} else {
type = 'Transformer';
}
return {
'type': type,
'toString': transformer.toString(),
'port': port.sendPort
};
}
/// Converts [transformer] into a serializable map.
Map _serializeAggregateTransformer(AggregateTransformer transformer) {
var port = new ReceivePort();
port.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
if (message['type'] == 'classifyPrimary') {
return transformer.classifyPrimary(deserializeId(message['id']));
} else if (message['type'] == 'declareOutputs') {
return new Future.sync(() {
return (transformer as DeclaringAggregateTransformer).declareOutputs(
new ForeignDeclaringAggregateTransform(message['transform']));
}).then((_) => null);
} else {
assert(message['type'] == 'apply');
// Make sure we return null so that if the transformer's [apply] returns
// a non-serializable value it doesn't cause problems.
return new Future.sync(() {
return transformer.apply(
new ForeignAggregateTransform(message['transform']));
}).then((_) => null);
}
});
});
var type;
if (transformer is LazyAggregateTransformer) {
type = 'LazyAggregateTransformer';
} else if (transformer is DeclaringAggregateTransformer) {
type = 'DeclaringAggregateTransformer';
} else {
type = 'AggregateTransformer';
}
return {
'type': type,
'toString': transformer.toString(),
'port': port.sendPort
};
}
// Converts [group] into a serializable map.
Map _serializeTransformerGroup(TransformerGroup group) {
if (group.phases == null) {
throw "TransformerGroup $group phases cannot be null.";
}
return {
'type': 'TransformerGroup',
'toString': group.toString(),
'phases': group.phases.map((phase) {
return phase.map(serializeTransformerLike).toList();
}).toList()
};
}
/// Converts [transformerLike] into a serializable map.
///
/// [transformerLike] can be a [Transformer], an [AggregateTransformer], or a
/// [TransformerGroup].
Map serializeTransformerLike(transformerLike) {
if (transformerLike is Transformer) {
return _serializeTransformer(transformerLike);
} else if (transformerLike is TransformerGroup) {
return _serializeTransformerGroup(transformerLike);
} else {
// This has to be last, since "transformerLike is AggregateTransformer" will
// throw on older versions of barback.
assert(transformerLike is AggregateTransformer);
return _serializeAggregateTransformer(transformerLike);
}
}

View file

@ -1,112 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.transformer_isolate;
import 'dart:convert';
import 'dart:isolate';
import 'dart:mirrors';
import 'package:barback/barback.dart';
import 'serialize.dart';
/// The mirror system.
///
/// Cached to avoid re-instantiating each time a transformer is initialized.
final _mirrors = currentMirrorSystem();
/// Sets up the initial communication with the host isolate.
void loadTransformers(SendPort replyTo) {
var port = new ReceivePort();
replyTo.send(port.sendPort);
port.listen((wrappedMessage) {
// TODO(nweiz): When issue 19228 is fixed, spin up a separate isolate for
// libraries loaded beyond the first so they can run in parallel.
respond(wrappedMessage, (message) {
var configuration = JSON.decode(message['configuration']);
var mode = new BarbackMode(message['mode']);
return _initialize(message['library'], configuration, mode).
map(serializeTransformerLike).toList();
});
});
}
/// Loads all the transformers and groups defined in [uri].
///
/// Loads the library, finds any [Transformer] or [TransformerGroup] subclasses
/// in it, instantiates them with [configuration] and [mode], and returns them.
List _initialize(String uri, Map configuration, BarbackMode mode) {
var transformerClass = reflectClass(Transformer);
var aggregateClass = _aggregateTransformerClass;
var groupClass = reflectClass(TransformerGroup);
var seen = new Set();
var transformers = [];
loadFromLibrary(library) {
if (seen.contains(library)) return;
seen.add(library);
// Load transformers from libraries exported by [library].
for (var dependency in library.libraryDependencies) {
if (!dependency.isExport) continue;
loadFromLibrary(dependency.targetLibrary);
}
// TODO(nweiz): if no valid transformers are found, throw an error message
// describing candidates and why they were rejected.
transformers.addAll(library.declarations.values.map((declaration) {
if (declaration is! ClassMirror) return null;
var classMirror = declaration;
if (classMirror.isPrivate) return null;
if (classMirror.isAbstract) return null;
if (!classMirror.isSubtypeOf(transformerClass) &&
!classMirror.isSubtypeOf(groupClass) &&
(aggregateClass == null ||
!classMirror.isSubtypeOf(aggregateClass))) {
return null;
}
var constructor = _getConstructor(classMirror, 'asPlugin');
if (constructor == null) return null;
if (constructor.parameters.isEmpty) {
if (configuration.isNotEmpty) return null;
return classMirror.newInstance(const Symbol('asPlugin'), []).reflectee;
}
if (constructor.parameters.length != 1) return null;
return classMirror.newInstance(const Symbol('asPlugin'),
[new BarbackSettings(configuration, mode)]).reflectee;
}).where((classMirror) => classMirror != null));
}
var library = _mirrors.libraries[Uri.parse(uri)];
// This should only happen if something's wrong with the logic in pub itself.
// If it were user error, the entire isolate would fail to load.
if (library == null) throw "Couldn't find library at $uri.";
loadFromLibrary(library);
return transformers;
}
// TODO(nweiz): clean this up when issue 13248 is fixed.
MethodMirror _getConstructor(ClassMirror classMirror, String constructor) {
var name = new Symbol("${MirrorSystem.getName(classMirror.simpleName)}"
".$constructor");
var candidate = classMirror.declarations[name];
if (candidate is MethodMirror && candidate.isConstructor) return candidate;
return null;
}
// Older barbacks don't support [AggregateTransformer], and calling
// [reflectClass] on an undefined class will throw an error, so we just define a
// null getter for them.
//# if barback >=0.14.1
ClassMirror get _aggregateTransformerClass =>
reflectClass(AggregateTransformer);
//# else
//> ClassMirror get _aggregateTransformerClass => null;
//# end

View file

@ -1,86 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Functions go in this file as opposed to lib/src/utils.dart if they need to
/// be accessible to the transformer-loading isolate.
library pub.asset.utils;
import 'dart:async';
/// A regular expression to match the exception prefix that some exceptions'
/// [Object.toString] values contain.
final _exceptionPrefix = new RegExp(r'^([A-Z][a-zA-Z]*)?(Exception|Error): ');
/// Get a string description of an exception.
///
/// Many exceptions include the exception class name at the beginning of their
/// [toString], so we remove that if it exists.
String getErrorMessage(error) =>
error.toString().replaceFirst(_exceptionPrefix, '');
/// Returns a buffered stream that will emit the same values as the stream
/// returned by [future] once [future] completes.
///
/// If [future] completes to an error, the return value will emit that error and
/// then close.
///
/// If [broadcast] is true, a broadcast stream is returned. This assumes that
/// the stream returned by [future] will be a broadcast stream as well.
/// [broadcast] defaults to false.
Stream futureStream(Future<Stream> future, {bool broadcast: false}) {
var subscription;
var controller;
future = future.catchError((e, stackTrace) {
// Since [controller] is synchronous, it's likely that emitting an error
// will cause it to be cancelled before we call close.
if (controller != null) controller.addError(e, stackTrace);
if (controller != null) controller.close();
controller = null;
});
onListen() {
future.then((stream) {
if (controller == null) return;
subscription = stream.listen(
controller.add,
onError: controller.addError,
onDone: controller.close);
});
}
onCancel() {
if (subscription != null) subscription.cancel();
subscription = null;
controller = null;
}
if (broadcast) {
controller = new StreamController.broadcast(
sync: true, onListen: onListen, onCancel: onCancel);
} else {
controller = new StreamController(
sync: true, onListen: onListen, onCancel: onCancel);
}
return controller.stream;
}
/// Returns a [Stream] that will emit the same values as the stream returned by
/// [callback].
///
/// [callback] will only be called when the returned [Stream] gets a subscriber.
Stream callbackStream(Stream callback()) {
var subscription;
var controller;
controller = new StreamController(onListen: () {
subscription = callback().listen(controller.add,
onError: controller.addError,
onDone: controller.close);
},
onCancel: () => subscription.cancel(),
onPause: () => subscription.pause(),
onResume: () => subscription.resume(),
sync: true);
return controller.stream;
}

View file

@ -1,294 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'dart:io';
import 'package:args/args.dart';
import 'package:analyzer/src/services/formatter_impl.dart';
import 'package:async_await/async_await.dart' as async_await;
import 'package:stack_trace/stack_trace.dart';
import 'package:path/path.dart' as p;
/// The path to pub's root directory (sdk/lib/_internal/pub) in the Dart repo.
///
/// This assumes this script is itself being run from within the repo.
final sourceDir = p.dirname(p.dirname(p.fromUri(Platform.script)));
/// The [sourceDir] as a URL, for use in import strings.
final sourceUrl = p.toUri(sourceDir).toString();
/// The directory that compiler output should be written to.
String generatedDir;
/// `true` if any file failed to compile.
bool hadFailure = false;
bool verbose = false;
/// Prefix for imports in pub that import dart2js libraries.
final _compilerPattern = new RegExp(r"import '(\.\./)+compiler");
/// Matches the Git commit hash of the compiler stored in the README.md file.
///
/// This is used both to find the current commit and replace it with the new
/// one.
final _commitPattern = new RegExp(r"[a-f0-9]{40}");
/// The template for the README that's added to the generated source.
///
/// This is used to store the current commit of the async_await compiler.
const _README = """
Pub is currently dogfooding the new Dart async/await syntax. Since the Dart VM
doesn't natively support it yet, we are using the [async-await][] compiler
package.
[async-await]: https://github.com/dart-lang/async_await
We run that to compile pub-using-await from sdk/lib/_internal/pub down to
vanilla Dart code which is what you see here. To interoperate more easily with
the rest of the repositry, we check in that generated code.
When bug #104 is fixed, we can remove this entirely.
The code here was compiled using the async-await compiler at commit:
<<COMMIT>>
(Note: this file is also parsed by a tool to update the above commit, so be
careful not to reformat it.)
""";
/// This runs the async/await compiler on all of the pub source code.
///
/// It reads from the repo and writes the compiled output into the given build
/// directory (using the same file names and relative layout). Does not
/// compile files that haven't changed since the last time they were compiled.
// TODO(rnystrom): Remove this when #104 is fixed.
void main(List<String> arguments) {
var parser = new ArgParser(allowTrailingOptions: true);
parser.addFlag("verbose", callback: (value) => verbose = value);
var force = false;
parser.addFlag("force", callback: (value) => force = value);
var buildDir;
parser.addOption("snapshot-build-dir", callback: (value) => buildDir = value);
try {
var rest = parser.parse(arguments).rest;
if (rest.isEmpty) {
throw new FormatException('Missing generated directory.');
} else if (rest.length > 1) {
throw new FormatException(
'Unexpected arguments: ${rest.skip(1).join(" ")}.');
}
generatedDir = rest.first;
} on FormatException catch (ex) {
stderr.writeln(ex);
stderr.writeln();
stderr.writeln(
"Usage: dart async_compile.dart [--verbose] [--force] "
"[--snapshot-build-dir <dir>] <generated dir>");
exit(64);
}
// See what version (i.e. Git commit) of the async-await compiler we
// currently have. If this is different from the version that was used to
// compile the sources, recompile everything.
var currentCommit = _getCurrentCommit();
var readmePath = p.join(generatedDir, "README.md");
var lastCommit;
try {
var readme = new File(readmePath).readAsStringSync();
var match = _commitPattern.firstMatch(readme);
if (match == null) {
stderr.writeln("Could not find compiler commit hash in README.md.");
exit(1);
}
lastCommit = match[0];
} on IOException catch (error, stackTrace) {
if (verbose) {
stderr.writeln(
"Failed to load $readmePath: $error\n" "${new Trace.from(stackTrace)}");
}
}
var numFiles = 0;
var numCompiled = 0;
// Compile any modified or missing files.
var sources = new Set();
for (var entry in new Directory(sourceDir).listSync(recursive: true)) {
if (p.extension(entry.path) != ".dart") continue;
numFiles++;
var relative = p.relative(entry.path, from: sourceDir);
sources.add(relative);
var sourceFile = entry as File;
var destPath = p.join(generatedDir, relative);
var destFile = new File(destPath);
if (force ||
currentCommit != lastCommit ||
!destFile.existsSync() ||
entry.lastModifiedSync().isAfter(destFile.lastModifiedSync())) {
_compile(sourceFile.path, sourceFile.readAsStringSync(), destPath);
numCompiled++;
if (verbose) print("Compiled $relative");
}
}
// Delete any previously compiled files whose source no longer exists.
for (var entry in new Directory(generatedDir).listSync(recursive: true)) {
if (p.extension(entry.path) != ".dart") continue;
var relative = p.relative(entry.path, from: generatedDir);
if (!sources.contains(relative)) {
_deleteFile(entry.path);
if (verbose) print("Deleted $relative");
}
}
// Update the README.
if (currentCommit != lastCommit) {
_writeFile(readmePath, _README.replaceAll("<<COMMIT>>", currentCommit));
if (verbose) print("Updated README.md");
}
if (numCompiled > 0 && buildDir != null) _generateSnapshot(buildDir);
if (verbose) print("Compiled $numCompiled out of $numFiles files");
if (hadFailure) exit(1);
}
String _getCurrentCommit() {
var command = "git";
var args = ["rev-parse", "HEAD"];
// Spawning a process on Windows will not look for the executable in the
// system path so spawn git through a shell to find it.
if (Platform.operatingSystem == "windows") {
command = "cmd";
args = ["/c", "git"]..addAll(args);
}
var result = Process.runSync(
command,
args,
workingDirectory: p.join(sourceDir, "../../../../third_party/pkg/async_await"));
if (result.exitCode != 0) {
stderr.writeln("Could not get Git revision of async_await compiler.");
exit(1);
}
return result.stdout.trim();
}
void _compile(String sourcePath, String source, String destPath) {
var destDir = new Directory(p.dirname(destPath));
destDir.createSync(recursive: true);
source = _translateAsyncAwait(sourcePath, source);
if (source != null) source = _fixDart2jsImports(sourcePath, source, destPath);
if (source == null) {
// If the async compile fails, delete the file so that we don't try to
// run the stale previous output and so that we try to recompile it later.
_deleteFile(destPath);
} else {
_writeFile(destPath, source);
}
}
/// Runs the async/await compiler on [source].
///
/// Returns the translated Dart code or `null` if the compiler failed.
String _translateAsyncAwait(String sourcePath, String source) {
if (p.isWithin(p.join(sourceDir, "asset"), sourcePath)) {
// Don't run the async compiler on the special "asset" source files. These
// have preprocessor comments that get discarded by the compiler.
return source;
}
try {
source = async_await.compile(source);
// Reformat the result since the compiler ditches all whitespace.
// TODO(rnystrom): Remove when this is fixed:
// https://github.com/dart-lang/async_await/issues/12
var result = new CodeFormatter().format(CodeKind.COMPILATION_UNIT, source);
return result.source;
} catch (ex) {
stderr.writeln("Async compile failed on $sourcePath:\n$ex");
hadFailure = true;
return null;
}
}
/// Fix relative imports to dart2js libraries.
///
/// Pub imports dart2js using relative imports that reach outside of pub's
/// source tree. Since the build directory is in a different location, we need
/// to fix those to be valid relative imports from the build directory.
String _fixDart2jsImports(String sourcePath, String source, String destPath) {
var compilerDir = p.url.join(sourceUrl, "../compiler");
var relative =
p.url.relative(compilerDir, from: p.url.dirname(p.toUri(destPath).toString()));
return source.replaceAll(_compilerPattern, "import '$relative");
}
/// Regenerate the pub snapshot from the async/await-compiled output. We do
/// this here since the tests need it and it's faster than doing a full SDK
/// build.
void _generateSnapshot(String buildDir) {
buildDir = p.normalize(buildDir);
new Directory(buildDir).createSync(recursive: true);
var entrypoint = p.join(generatedDir, 'bin/pub.dart');
var packageRoot = p.join(buildDir, 'packages');
var snapshot = p.join(buildDir, 'dart-sdk/bin/snapshots/pub.dart.snapshot');
var result = Process.runSync(
Platform.executable,
["--package-root=$packageRoot", "--snapshot=$snapshot", entrypoint]);
if (result.exitCode != 0) {
stderr.writeln("Failed to generate snapshot:");
if (result.stderr.trim().isNotEmpty) stderr.writeln(result.stderr);
if (result.stdout.trim().isNotEmpty) stderr.writeln(result.stdout);
exit(result.exitCode);
}
if (verbose) print("Created pub snapshot");
}
/// Deletes the file at [path], ignoring any IO errors that occur.
///
/// This swallows errors to accommodate multiple compilers running concurrently.
/// Since they will produce the same output anyway, a failure of one is fine.
void _deleteFile(String path) {
try {
new File(path).deleteSync();
} on IOException catch (ex) {
// Do nothing.
}
}
/// Writes [contents] to [path], ignoring any IO errors that occur.
///
/// This swallows errors to accommodate multiple compilers running concurrently.
/// Since they will produce the same output anyway, a failure of one is fine.
void _writeFile(String path, String contents) {
try {
new File(path).writeAsStringSync(contents);
} on IOException catch (ex) {
// Do nothing.
}
}

View file

@ -1,9 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import '../lib/src/command_runner.dart';
void main(List<String> arguments) {
new PubCommandRunner().run(arguments);
}

View file

@ -1,173 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// A simple library for rendering tree-like structures in ASCII.
library pub.ascii_tree;
import 'package:path/path.dart' as path;
import 'log.dart' as log;
import 'utils.dart';
/// Draws a tree for the given list of files. Given files like:
///
/// TODO
/// example/console_example.dart
/// example/main.dart
/// example/web copy/web_example.dart
/// test/absolute_test.dart
/// test/basename_test.dart
/// test/dirname_test.dart
/// test/extension_test.dart
/// test/is_absolute_test.dart
/// test/is_relative_test.dart
/// test/join_test.dart
/// test/normalize_test.dart
/// test/relative_test.dart
/// test/split_test.dart
/// .gitignore
/// README.md
/// lib/path.dart
/// pubspec.yaml
/// test/all_test.dart
/// test/path_posix_test.dart
/// test/path_windows_test.dart
///
/// this renders:
///
/// |-- .gitignore
/// |-- README.md
/// |-- TODO
/// |-- example
/// | |-- console_example.dart
/// | |-- main.dart
/// | '-- web copy
/// | '-- web_example.dart
/// |-- lib
/// | '-- path.dart
/// |-- pubspec.yaml
/// '-- test
/// |-- absolute_test.dart
/// |-- all_test.dart
/// |-- basename_test.dart
/// | (7 more...)
/// |-- path_windows_test.dart
/// |-- relative_test.dart
/// '-- split_test.dart
///
/// If [baseDir] is passed, it will be used as the root of the tree.
///
/// If [showAllChildren] is `false`, then directories with more than ten items
/// will have their contents truncated. Defaults to `false`.
String fromFiles(List<String> files, {String baseDir, bool showAllChildren}) {
// Parse out the files into a tree of nested maps.
var root = {};
for (var file in files) {
if (baseDir != null) file = path.relative(file, from: baseDir);
var parts = path.split(file);
var directory = root;
for (var part in path.split(file)) {
directory = directory.putIfAbsent(part, () => {});
}
}
// Walk the map recursively and render to a string.
return fromMap(root, showAllChildren: showAllChildren);
}
/// Draws a tree from a nested map. Given a map like:
///
/// {
/// "analyzer": {
/// "args": {
/// "collection": ""
/// },
/// "logging": {}
/// },
/// "barback": {}
/// }
///
/// this renders:
///
/// analyzer
/// |-- args
/// | '-- collection
/// '---logging
/// barback
///
/// Items with no children should have an empty map as the value.
///
/// If [showAllChildren] is `false`, then directories with more than ten items
/// will have their contents truncated. Defaults to `false`.
String fromMap(Map map, {bool showAllChildren}) {
var buffer = new StringBuffer();
_draw(buffer, "", null, map, showAllChildren: showAllChildren);
return buffer.toString();
}
void _drawLine(StringBuffer buffer, String prefix, bool isLastChild,
String name) {
// Print lines.
buffer.write(prefix);
if (name != null) {
if (isLastChild) {
buffer.write(log.gray("'-- "));
} else {
buffer.write(log.gray("|-- "));
}
}
// Print name.
buffer.writeln(name);
}
String _getPrefix(bool isRoot, bool isLast) {
if (isRoot) return "";
if (isLast) return " ";
return log.gray("| ");
}
void _draw(StringBuffer buffer, String prefix, String name, Map children,
{bool showAllChildren, bool isLast: false}) {
if (showAllChildren == null) showAllChildren = false;
// Don't draw a line for the root node.
if (name != null) _drawLine(buffer, prefix, isLast, name);
// Recurse to the children.
var childNames = ordered(children.keys);
drawChild(bool isLastChild, String child) {
var childPrefix = _getPrefix(name == null, isLast);
_draw(
buffer,
'$prefix$childPrefix',
child,
children[child],
showAllChildren: showAllChildren,
isLast: isLastChild);
}
if (name == null || showAllChildren || childNames.length <= 10) {
// Not too many, so show all the children.
for (var i = 0; i < childNames.length; i++) {
drawChild(i == childNames.length - 1, childNames[i]);
}
} else {
// Show the first few.
drawChild(false, childNames[0]);
drawChild(false, childNames[1]);
drawChild(false, childNames[2]);
// Elide the middle ones.
buffer.write(prefix);
buffer.write(_getPrefix(name == null, isLast));
buffer.writeln(log.gray('| (${childNames.length - 6} more...)'));
// Show the last few.
drawChild(false, childNames[childNames.length - 3]);
drawChild(false, childNames[childNames.length - 2]);
drawChild(true, childNames[childNames.length - 1]);
}
}

View file

@ -1,89 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback;
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
/// The currently supported versions of packages that this version of pub works
/// with.
///
/// Pub implicitly constrains these packages to these versions as long as
/// barback is a dependency.
///
/// Users' transformers are loaded in an isolate that uses the entrypoint
/// package's dependency versions. However, that isolate also loads code
/// provided by pub (`asset/dart/transformer_isolate.dart` and associated
/// files). This code uses these packages as well, so these constraints exist to
/// ensure that its usage of the packages remains valid.
///
/// Most constraints here are like normal version constraints in that their
/// upper bound is the next major version of the package (or minor version for
/// pre-1.0.0 packages). If a new major version of the package is released,
/// these *must* be incremented to synchronize with that.
///
/// The constraint on barback is different. Its upper bound is the next *patch*
/// version of barbackthat is, the next version with new features. This is
/// because most barback features need additional serialization code to be fully
/// supported in pub, even if they're otherwise backwards-compatible.
///
/// Whenever a new minor or patch version of barback is published, this *must*
/// be incremented to synchronize with that. See the barback [compatibility
/// documentation][compat] for details on the relationship between this
/// constraint and barback's version.
///
/// [compat]: https://gist.github.com/nex3/10942218
final pubConstraints = {
"barback": new VersionConstraint.parse(">=0.13.0 <0.15.3"),
"source_span": new VersionConstraint.parse(">=1.0.0 <2.0.0"),
"stack_trace": new VersionConstraint.parse(">=0.9.1 <2.0.0")
};
/// Converts [id] to a "package:" URI.
///
/// This will throw an [ArgumentError] if [id] doesn't represent a library in
/// `lib/`.
Uri idToPackageUri(AssetId id) {
if (!id.path.startsWith('lib/')) {
throw new ArgumentError("Asset id $id doesn't identify a library.");
}
return new Uri(
scheme: 'package',
path: p.url.join(id.package, id.path.replaceFirst('lib/', '')));
}
/// Converts [uri] into an [AssetId] if its path is within "packages".
///
/// If the URL contains a special directory, but lacks a following package name,
/// throws a [FormatException].
///
/// If the URI doesn't contain one of those special directories, returns null.
AssetId packagesUrlToId(Uri url) {
var parts = p.url.split(url.path);
// Strip the leading "/" from the URL.
if (parts.isNotEmpty && parts.first == "/") parts = parts.skip(1).toList();
if (parts.isEmpty) return null;
// Check for "packages" in the URL.
// TODO(rnystrom): If we rewrite "package:" imports to relative imports that
// point to a canonical "packages" directory, we can limit "packages" to the
// root of the URL as well. See: #16649.
var index = parts.indexOf("packages");
if (index == -1) return null;
// There should be a package name after "packages".
if (parts.length <= index + 1) {
throw new FormatException(
'Invalid URL path "${url.path}". Expected package name ' 'after "packages".');
}
var package = parts[index + 1];
var assetPath = p.url.join("lib", p.url.joinAll(parts.skip(index + 2)));
return new AssetId(package, assetPath);
}

View file

@ -1,70 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.admin_server;
import 'dart:async';
import 'dart:io';
import 'package:http_parser/http_parser.dart';
import 'package:shelf/shelf.dart' as shelf;
import 'package:shelf_web_socket/shelf_web_socket.dart';
import '../io.dart';
import '../log.dart' as log;
import 'asset_environment.dart';
import 'base_server.dart';
import 'web_socket_api.dart';
/// The web admin interface to pub serve.
// TODO(rnystrom): Currently this just provides access to the Web Socket API.
// See #16954.
class AdminServer extends BaseServer {
/// All currently open [WebSocket] connections.
final _webSockets = new Set<CompatibleWebSocket>();
shelf.Handler _handler;
/// Creates a new server and binds it to [port] of [host].
static Future<AdminServer> bind(AssetEnvironment environment, String host,
int port) {
return bindServer(host, port).then((server) {
log.fine('Bound admin server to $host:$port.');
return new AdminServer._(environment, server);
});
}
AdminServer._(AssetEnvironment environment, HttpServer server)
: super(environment, server) {
_handler = new shelf.Cascade().add(
webSocketHandler(_handleWebSocket)).add(_handleHttp).handler;
}
/// Closes the server and all Web Socket connections.
Future close() {
var futures = [super.close()];
futures.addAll(_webSockets.map((socket) => socket.close()));
return Future.wait(futures);
}
handleRequest(shelf.Request request) => _handler(request);
/// Handles an HTTP request.
_handleHttp(shelf.Request request) {
// TODO(rnystrom): Actually respond to requests once there is an admin
// interface. See #16954.
logRequest(request, "501 Not Implemented");
return new shelf.Response(
501,
body: "Currently this server only accepts Web Socket connections.");
}
/// Creates a web socket for [request] which should be an upgrade request.
void _handleWebSocket(CompatibleWebSocket socket) {
_webSockets.add(socket);
var api = new WebSocketApi(socket, environment);
api.listen().whenComplete(
() => _webSockets.remove(api)).catchError(addError);
}
}

View file

@ -1,960 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.asset_environment;
import 'dart:async';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import 'package:watcher/watcher.dart';
import '../cached_package.dart';
import '../entrypoint.dart';
import '../exceptions.dart';
import '../io.dart';
import '../log.dart' as log;
import '../package.dart';
import '../package_graph.dart';
import '../sdk.dart' as sdk;
import '../source/cached.dart';
import '../utils.dart';
import 'admin_server.dart';
import 'barback_server.dart';
import 'dart_forwarding_transformer.dart';
import 'dart2js_transformer.dart';
import 'load_all_transformers.dart';
import 'pub_package_provider.dart';
import 'source_directory.dart';
/// The entire "visible" state of the assets of a package and all of its
/// dependencies, taking into account the user's configuration when running pub.
///
/// Where [PackageGraph] just describes the entrypoint's dependencies as
/// specified by pubspecs, this includes "transient" information like the mode
/// that the user is running pub in, or which directories they want to
/// transform.
class AssetEnvironment {
/// Creates a new build environment for working with the assets used by
/// [entrypoint] and its dependencies.
///
/// HTTP servers that serve directories from this environment will be bound
/// to [hostname] and have ports based on [basePort]. If omitted, they
/// default to "localhost" and "0" (use ephemeral ports), respectively.
///
/// Loads all used transformers using [mode] (including dart2js if
/// [useDart2JS] is true).
///
/// This will only add the root package's "lib" directory to the environment.
/// Other directories can be added to the environment using [serveDirectory].
///
/// If [watcherType] is not [WatcherType.NONE] (the default), watches source
/// assets for modification.
///
/// If [packages] is passed, only those packages' assets are loaded and
/// served.
///
/// If [entrypoints] is passed, only transformers necessary to run those
/// entrypoints are loaded. Each entrypoint is expected to refer to a Dart
/// library.
///
/// Returns a [Future] that completes to the environment once the inputs,
/// transformers, and server are loaded and ready.
static Future<AssetEnvironment> create(Entrypoint entrypoint,
BarbackMode mode, {WatcherType watcherType, String hostname, int basePort,
Iterable<String> packages, Iterable<AssetId> entrypoints, bool useDart2JS:
true}) {
if (watcherType == null) watcherType = WatcherType.NONE;
if (hostname == null) hostname = "localhost";
if (basePort == null) basePort = 0;
return log.progress("Loading asset environment", () {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(entrypoint.loadPackageGraph()).then((x0) {
try {
var graph = x0;
graph = _adjustPackageGraph(graph, mode, packages);
var barback = new Barback(new PubPackageProvider(graph));
barback.log.listen(_log);
var environment =
new AssetEnvironment._(graph, barback, mode, watcherType, hostname, basePort);
new Future.value(
environment._load(entrypoints: entrypoints, useDart2JS: useDart2JS)).then((x1) {
try {
x1;
completer0.complete(environment);
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}, fine: true);
}
/// Return a version of [graph] that's restricted to [packages] (if passed)
/// and loads cached packages (if [mode] is [BarbackMode.DEBUG]).
static PackageGraph _adjustPackageGraph(PackageGraph graph, BarbackMode mode,
Iterable<String> packages) {
if (mode != BarbackMode.DEBUG && packages == null) return graph;
packages = (packages == null ? graph.packages.keys : packages).toSet();
return new PackageGraph(
graph.entrypoint,
graph.lockFile,
new Map.fromIterable(packages, value: (packageName) {
var package = graph.packages[packageName];
if (mode != BarbackMode.DEBUG) return package;
var cache = path.join('.pub/deps/debug', packageName);
if (!dirExists(cache)) return package;
return new CachedPackage(package, cache);
}));
}
/// The server for the Web Socket API and admin interface.
AdminServer _adminServer;
/// The public directories in the root package that are included in the asset
/// environment, keyed by their root directory.
final _directories = new Map<String, SourceDirectory>();
/// The [Barback] instance used to process assets in this environment.
final Barback barback;
/// The root package being built.
Package get rootPackage => graph.entrypoint.root;
/// The graph of packages whose assets and transformers are loaded in this
/// environment.
///
/// This isn't necessarily identical to the graph that's passed in to the
/// environment. It may expose fewer packages if some packages' assets don't
/// need to be loaded, and it may expose some [CachedPackage]s.
final PackageGraph graph;
/// The mode to run the transformers in.
final BarbackMode mode;
/// The [Transformer]s that should be appended by default to the root
/// package's transformer cascade. Will be empty if there are none.
final _builtInTransformers = <Transformer>[];
/// How source files should be watched.
final WatcherType _watcherType;
/// The hostname that servers are bound to.
final String _hostname;
/// The starting number for ports that servers will be bound to.
///
/// Servers will be bound to ports starting at this number and then
/// incrementing from there. However, if this is zero, then ephemeral port
/// numbers will be selected for each server.
final int _basePort;
/// The modified source assets that have not been sent to barback yet.
///
/// The build environment can be paused (by calling [pauseUpdates]) and
/// resumed ([resumeUpdates]). While paused, all source asset updates that
/// come from watching or adding new directories are not sent to barback.
/// When resumed, all pending source updates are sent to barback.
///
/// This lets pub serve and pub build create an environment and bind several
/// servers before barback starts building and producing results
/// asynchronously.
///
/// If this is `null`, then the environment is "live" and all updates will
/// go to barback immediately.
Set<AssetId> _modifiedSources;
AssetEnvironment._(this.graph, this.barback, this.mode, this._watcherType,
this._hostname, this._basePort);
/// Gets the built-in [Transformer]s that should be added to [package].
///
/// Returns `null` if there are none.
Iterable<Transformer> getBuiltInTransformers(Package package) {
// Built-in transformers only apply to the root package.
if (package.name != rootPackage.name) return null;
// The built-in transformers are for dart2js and forwarding assets around
// dart2js.
if (_builtInTransformers.isEmpty) return null;
return _builtInTransformers;
}
/// Starts up the admin server on an appropriate port and returns it.
///
/// This may only be called once on the build environment.
Future<AdminServer> startAdminServer([int port]) {
// Can only start once.
assert(_adminServer == null);
// The admin server is bound to one before the base port by default, unless
// it's ephemeral in which case the admin port is too.
if (port == null) port = _basePort == 0 ? 0 : _basePort - 1;
return AdminServer.bind(this, _hostname, port).then((server) => _adminServer =
server);
}
/// Binds a new port to serve assets from within [rootDirectory] in the
/// entrypoint package.
///
/// Adds and watches the sources within that directory. Returns a [Future]
/// that completes to the bound server.
///
/// If [rootDirectory] is already being served, returns that existing server.
Future<BarbackServer> serveDirectory(String rootDirectory) {
// See if there is already a server bound to the directory.
var directory = _directories[rootDirectory];
if (directory != null) {
return directory.server.then((server) {
log.fine('Already serving $rootDirectory on ${server.url}.');
return server;
});
}
// See if the new directory overlaps any existing servers.
var overlapping = _directories.keys.where(
(directory) =>
path.isWithin(directory, rootDirectory) ||
path.isWithin(rootDirectory, directory)).toList();
if (overlapping.isNotEmpty) {
return new Future.error(
new OverlappingSourceDirectoryException(overlapping));
}
var port = _basePort;
// If not using an ephemeral port, find the lowest-numbered available one.
if (port != 0) {
var boundPorts =
_directories.values.map((directory) => directory.port).toSet();
while (boundPorts.contains(port)) {
port++;
}
}
var sourceDirectory =
new SourceDirectory(this, rootDirectory, _hostname, port);
_directories[rootDirectory] = sourceDirectory;
return _provideDirectorySources(
rootPackage,
rootDirectory).then((subscription) {
sourceDirectory.watchSubscription = subscription;
return sourceDirectory.serve();
});
}
/// Binds a new port to serve assets from within the "bin" directory of
/// [package].
///
/// Adds the sources within that directory and then binds a server to it.
/// Unlike [serveDirectory], this works with packages that are not the
/// entrypoint.
///
/// Returns a [Future] that completes to the bound server.
Future<BarbackServer> servePackageBinDirectory(String package) {
return _provideDirectorySources(
graph.packages[package],
"bin").then(
(_) =>
BarbackServer.bind(this, _hostname, 0, package: package, rootDirectory: "bin"));
}
/// Precompiles all of [packageName]'s executables to snapshots in
/// [directory].
///
/// If [executableIds] is passed, only those executables are precompiled.
///
/// Returns a map from executable name to path for the snapshots that were
/// successfully precompiled.
Future<Map<String, String>> precompileExecutables(String packageName,
String directory, {Iterable<AssetId> executableIds}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
log.fine("Executables for ${packageName}: ${executableIds}");
join1() {
var package = graph.packages[packageName];
new Future.value(servePackageBinDirectory(packageName)).then((x0) {
try {
var server = x0;
join2() {
completer0.complete();
}
finally0(cont0) {
server.close();
cont0();
}
catch0(e1, s1) {
finally0(() => completer0.completeError(e1, s1));
}
try {
var precompiled = {};
new Future.value(waitAndPrintErrors(executableIds.map(((id) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var basename = path.url.basename(id.path);
var snapshotPath =
path.join(directory, "${basename}.snapshot");
new Future.value(
runProcess(
Platform.executable,
[
'--snapshot=${snapshotPath}',
server.url.resolve(basename).toString()])).then((x0) {
try {
var result = x0;
join0() {
completer0.complete();
}
if (result.success) {
log.message(
"Precompiled ${_formatExecutable(id)}.");
precompiled[path.withoutExtension(basename)] =
snapshotPath;
join0();
} else {
throw new ApplicationException(
log.yellow("Failed to precompile ${_formatExecutable(id)}:\n") +
result.stderr.join('\n'));
join0();
}
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x1) {
try {
x1;
final v0 = precompiled;
finally0(() {
completer0.complete(v0);
});
} catch (e2, s2) {
catch0(e2, s2);
}
}, onError: catch0);
} catch (e3, s3) {
catch0(e3, s3);
}
} catch (e4, s4) {
completer0.completeError(e4, s4);
}
}, onError: completer0.completeError);
}
if (executableIds.isEmpty) {
completer0.complete([]);
} else {
join1();
}
}
if (executableIds == null) {
executableIds = graph.packages[packageName].executableIds;
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Returns the executable name for [id].
///
/// [id] is assumed to be an executable in a bin directory. The return value
/// is intended for log output and may contain formatting.
String _formatExecutable(AssetId id) =>
log.bold("${id.package}:${path.basenameWithoutExtension(id.path)}");
/// Stops the server bound to [rootDirectory].
///
/// Also removes any source files within that directory from barback. Returns
/// the URL of the unbound server, of `null` if [rootDirectory] was not
/// bound to a server.
Future<Uri> unserveDirectory(String rootDirectory) {
log.fine("Unserving $rootDirectory.");
var directory = _directories.remove(rootDirectory);
if (directory == null) return new Future.value();
return directory.server.then((server) {
var url = server.url;
return directory.close().then((_) {
_removeDirectorySources(rootDirectory);
return url;
});
});
}
/// Gets the source directory that contains [assetPath] within the entrypoint
/// package.
///
/// If [assetPath] is not contained within a source directory, this throws
/// an exception.
String getSourceDirectoryContaining(String assetPath) =>
_directories.values.firstWhere(
(dir) => path.isWithin(dir.directory, assetPath)).directory;
/// Return all URLs serving [assetPath] in this environment.
Future<List<Uri>> getUrlsForAssetPath(String assetPath) {
// Check the three (mutually-exclusive) places the path could be pointing.
return _lookUpPathInServerRoot(assetPath).then((urls) {
if (urls.isNotEmpty) return urls;
return _lookUpPathInPackagesDirectory(assetPath);
}).then((urls) {
if (urls.isNotEmpty) return urls;
return _lookUpPathInDependency(assetPath);
});
}
/// Look up [assetPath] in the root directories of servers running in the
/// entrypoint package.
Future<List<Uri>> _lookUpPathInServerRoot(String assetPath) {
// Find all of the servers whose root directories contain the asset and
// generate appropriate URLs for each.
return Future.wait(
_directories.values.where(
(dir) => path.isWithin(dir.directory, assetPath)).map((dir) {
var relativePath = path.relative(assetPath, from: dir.directory);
return dir.server.then(
(server) => server.url.resolveUri(path.toUri(relativePath)));
}));
}
/// Look up [assetPath] in the "packages" directory in the entrypoint package.
Future<List<Uri>> _lookUpPathInPackagesDirectory(String assetPath) {
var components = path.split(path.relative(assetPath));
if (components.first != "packages") return new Future.value([]);
if (!graph.packages.containsKey(components[1])) return new Future.value([]);
return Future.wait(_directories.values.map((dir) {
return dir.server.then(
(server) => server.url.resolveUri(path.toUri(assetPath)));
}));
}
/// Look up [assetPath] in the "lib" or "asset" directory of a dependency
/// package.
Future<List<Uri>> _lookUpPathInDependency(String assetPath) {
for (var packageName in graph.packages.keys) {
var package = graph.packages[packageName];
var libDir = package.path('lib');
var assetDir = package.path('asset');
var uri;
if (path.isWithin(libDir, assetPath)) {
uri = path.toUri(
path.join('packages', package.name, path.relative(assetPath, from: libDir)));
} else if (path.isWithin(assetDir, assetPath)) {
uri = path.toUri(
path.join('assets', package.name, path.relative(assetPath, from: assetDir)));
} else {
continue;
}
return Future.wait(_directories.values.map((dir) {
return dir.server.then((server) => server.url.resolveUri(uri));
}));
}
return new Future.value([]);
}
/// Given a URL to an asset served by this environment, returns the ID of the
/// asset that would be accessed by that URL.
///
/// If no server can serve [url], completes to `null`.
Future<AssetId> getAssetIdForUrl(Uri url) {
return Future.wait(
_directories.values.map((dir) => dir.server)).then((servers) {
var server = servers.firstWhere((server) {
if (server.port != url.port) return false;
return isLoopback(server.address.host) == isLoopback(url.host) ||
server.address.host == url.host;
}, orElse: () => null);
if (server == null) return null;
return server.urlToId(url);
});
}
/// Determines if [sourcePath] is contained within any of the directories in
/// the root package that are visible to this build environment.
bool containsPath(String sourcePath) {
var directories = ["lib"];
directories.addAll(_directories.keys);
return directories.any((dir) => path.isWithin(dir, sourcePath));
}
/// Pauses sending source asset updates to barback.
void pauseUpdates() {
// Cannot pause while already paused.
assert(_modifiedSources == null);
_modifiedSources = new Set<AssetId>();
}
/// Sends any pending source updates to barback and begins the asynchronous
/// build process.
void resumeUpdates() {
// Cannot resume while not paused.
assert(_modifiedSources != null);
barback.updateSources(_modifiedSources);
_modifiedSources = null;
}
/// Loads the assets and transformers for this environment.
///
/// This transforms and serves all library and asset files in all packages in
/// the environment's package graph. It loads any transformer plugins defined
/// in packages in [graph] and re-runs them as necessary when any input files
/// change.
///
/// If [useDart2JS] is `true`, then the [Dart2JSTransformer] is implicitly
/// added to end of the root package's transformer phases.
///
/// If [entrypoints] is passed, only transformers necessary to run those
/// entrypoints will be loaded.
///
/// Returns a [Future] that completes once all inputs and transformers are
/// loaded.
Future _load({Iterable<AssetId> entrypoints, bool useDart2JS}) {
return log.progress("Initializing barback", () {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var containsDart2JS =
graph.entrypoint.root.pubspec.transformers.any(((transformers) {
return transformers.any(
(config) => config.id.package == '\$dart2js');
}));
join0() {
new Future.value(BarbackServer.bind(this, _hostname, 0)).then((x0) {
try {
var transformerServer = x0;
var errorStream = barback.errors.map(((error) {
if (error is! AssetLoadException) throw error;
log.error(log.red(error.message));
log.fine(error.stackTrace.terse);
}));
new Future.value(_withStreamErrors((() {
return log.progress("Loading source assets", _provideSources);
}), [errorStream, barback.results])).then((x1) {
try {
x1;
log.fine("Provided sources.");
errorStream = barback.errors.map(((error) {
if (error is! TransformerException) throw error;
var message = error.error.toString();
if (error.stackTrace != null) {
message += "\n" + error.stackTrace.terse.toString();
}
_log(
new LogEntry(
error.transform,
error.transform.primaryId,
LogLevel.ERROR,
message,
null));
}));
new Future.value(_withStreamErrors((() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
completer0.complete(
log.progress("Loading transformers", (() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(
loadAllTransformers(
this,
transformerServer,
entrypoints: entrypoints)).then((x0) {
try {
x0;
transformerServer.close();
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}), fine: true));
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}),
[errorStream, barback.results, transformerServer.results])).then((x2) {
try {
x2;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
}, onError: completer0.completeError);
}
if (!containsDart2JS && useDart2JS) {
_builtInTransformers.addAll(
[new Dart2JSTransformer(this, mode), new DartForwardingTransformer(mode)]);
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}, fine: true);
}
/// Provides the public source assets in the environment to barback.
///
/// If [watcherType] is not [WatcherType.NONE], enables watching on them.
Future _provideSources() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(Future.wait(graph.packages.values.map(((package) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
new Future.value(
_provideDirectorySources(package, "lib")).then((x0) {
try {
x0;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (graph.isPackageStatic(package.name)) {
completer0.complete(null);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x0) {
try {
x0;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Provides all of the source assets within [dir] in [package] to barback.
///
/// If [watcherType] is not [WatcherType.NONE], enables watching on them.
/// Returns the subscription to the watcher, or `null` if none was created.
Future<StreamSubscription<WatchEvent>>
_provideDirectorySources(Package package, String dir) {
log.fine("Providing sources for ${package.name}|$dir.");
// TODO(rnystrom): Handle overlapping directories. If two served
// directories overlap like so:
//
// $ pub serve example example/subdir
//
// Then the sources of the subdirectory will be updated and watched twice.
// See: #17454
if (_watcherType == WatcherType.NONE) {
_updateDirectorySources(package, dir);
return new Future.value();
}
// Watch the directory before listing is so we don't miss files that
// are added between the initial list and registering the watcher.
return _watchDirectorySources(package, dir).then((_) {
_updateDirectorySources(package, dir);
});
}
/// Updates barback with all of the files in [dir] inside [package].
void _updateDirectorySources(Package package, String dir) {
var ids = _listDirectorySources(package, dir);
if (_modifiedSources == null) {
barback.updateSources(ids);
} else {
_modifiedSources.addAll(ids);
}
}
/// Removes all of the files in [dir] in the root package from barback.
void _removeDirectorySources(String dir) {
var ids = _listDirectorySources(rootPackage, dir);
if (_modifiedSources == null) {
barback.removeSources(ids);
} else {
_modifiedSources.removeAll(ids);
}
}
/// Lists all of the source assets in [dir] inside [package].
///
/// For large packages, listing the contents is a performance bottleneck, so
/// this is optimized for our needs in here instead of using the more general
/// but slower [listDir].
Iterable<AssetId> _listDirectorySources(Package package, String dir) {
// This is used in some performance-sensitive paths and can list many, many
// files. As such, it leans more havily towards optimization as opposed to
// readability than most code in pub. In particular, it avoids using the
// path package, since re-parsing a path is very expensive relative to
// string operations.
return package.listFiles(beneath: dir).map((file) {
// From profiling, path.relative here is just as fast as a raw substring
// and is correct in the case where package.dir has a trailing slash.
var relative = package.relative(file);
if (Platform.operatingSystem == 'windows') {
relative = relative.replaceAll("\\", "/");
}
var uri = new Uri(pathSegments: relative.split("/"));
return new AssetId(package.name, uri.toString());
});
}
/// Adds a file watcher for [dir] within [package], if the directory exists
/// and the package needs watching.
Future<StreamSubscription<WatchEvent>> _watchDirectorySources(Package package,
String dir) {
// If this package comes from a cached source, its contents won't change so
// we don't need to monitor it. `packageId` will be null for the
// application package, since that's not locked.
var packageId = graph.lockFile.packages[package.name];
if (packageId != null &&
graph.entrypoint.cache.sources[packageId.source] is CachedSource) {
return new Future.value();
}
var subdirectory = package.path(dir);
if (!dirExists(subdirectory)) return new Future.value();
// TODO(nweiz): close this watcher when [barback] is closed.
var watcher = _watcherType.create(subdirectory);
var subscription = watcher.events.listen((event) {
// Don't watch files symlinked into these directories.
// TODO(rnystrom): If pub gets rid of symlinks, remove this.
var parts = path.split(event.path);
if (parts.contains("packages")) return;
// Skip files that were (most likely) compiled from nearby ".dart"
// files. These are created by the Editor's "Run as JavaScript"
// command and are written directly into the package's directory.
// When pub's dart2js transformer then tries to create the same file
// name, we get a build error. To avoid that, just don't consider
// that file to be a source.
// TODO(rnystrom): Remove these when the Editor no longer generates
// .js files and users have had enough time that they no longer have
// these files laying around. See #15859.
if (event.path.endsWith(".dart.js")) return;
if (event.path.endsWith(".dart.js.map")) return;
if (event.path.endsWith(".dart.precompiled.js")) return;
var idPath = package.relative(event.path);
var id = new AssetId(package.name, path.toUri(idPath).toString());
if (event.type == ChangeType.REMOVE) {
if (_modifiedSources != null) {
_modifiedSources.remove(id);
} else {
barback.removeSources([id]);
}
} else if (_modifiedSources != null) {
_modifiedSources.add(id);
} else {
barback.updateSources([id]);
}
});
return watcher.ready.then((_) => subscription);
}
/// Returns the result of [futureCallback] unless any stream in [streams]
/// emits an error before it's done.
///
/// If a stream does emit an error, that error is thrown instead.
/// [futureCallback] is a callback rather than a plain future to ensure that
/// [streams] are listened to before any code that might cause an error starts
/// running.
Future _withStreamErrors(Future futureCallback(), List<Stream> streams) {
var completer = new Completer.sync();
var subscriptions = streams.map(
(stream) => stream.listen((_) {}, onError: completer.completeError)).toList();
new Future.sync(futureCallback).then((_) {
if (!completer.isCompleted) completer.complete();
}).catchError((error, stackTrace) {
if (!completer.isCompleted) completer.completeError(error, stackTrace);
});
return completer.future.whenComplete(() {
for (var subscription in subscriptions) {
subscription.cancel();
}
});
}
}
/// Log [entry] using Pub's logging infrastructure.
///
/// Since both [LogEntry] objects and the message itself often redundantly
/// show the same context like the file where an error occurred, this tries
/// to avoid showing redundant data in the entry.
void _log(LogEntry entry) {
messageMentions(text) =>
entry.message.toLowerCase().contains(text.toLowerCase());
messageMentionsAsset(id) =>
messageMentions(id.toString()) ||
messageMentions(path.fromUri(entry.assetId.path));
var prefixParts = [];
// Show the level (unless the message mentions it).
if (!messageMentions(entry.level.name)) {
prefixParts.add("${entry.level} from");
}
// Show the transformer.
prefixParts.add(entry.transform.transformer);
// Mention the primary input of the transform unless the message seems to.
if (!messageMentionsAsset(entry.transform.primaryId)) {
prefixParts.add("on ${entry.transform.primaryId}");
}
// If the relevant asset isn't the primary input, mention it unless the
// message already does.
if (entry.assetId != entry.transform.primaryId &&
!messageMentionsAsset(entry.assetId)) {
prefixParts.add("with input ${entry.assetId}");
}
var prefix = "[${prefixParts.join(' ')}]:";
var message = entry.message;
if (entry.span != null) {
message = entry.span.message(entry.message);
}
switch (entry.level) {
case LogLevel.ERROR:
log.error("${log.red(prefix)}\n$message");
break;
case LogLevel.WARNING:
log.warning("${log.yellow(prefix)}\n$message");
break;
case LogLevel.INFO:
log.message("${log.cyan(prefix)}\n$message");
break;
case LogLevel.FINE:
log.fine("${log.gray(prefix)}\n$message");
break;
}
}
/// Exception thrown when trying to serve a new directory that overlaps one or
/// more directories already being served.
class OverlappingSourceDirectoryException implements Exception {
/// The relative paths of the directories that overlap the one that could not
/// be served.
final List<String> overlappingDirectories;
OverlappingSourceDirectoryException(this.overlappingDirectories);
}
/// An enum describing different modes of constructing a [DirectoryWatcher].
abstract class WatcherType {
/// A watcher that automatically chooses its type based on the operating
/// system.
static const AUTO = const _AutoWatcherType();
/// A watcher that always polls the filesystem for changes.
static const POLLING = const _PollingWatcherType();
/// No directory watcher at all.
static const NONE = const _NoneWatcherType();
/// Creates a new DirectoryWatcher.
DirectoryWatcher create(String directory);
String toString();
}
class _AutoWatcherType implements WatcherType {
const _AutoWatcherType();
DirectoryWatcher create(String directory) => new DirectoryWatcher(directory);
String toString() => "auto";
}
class _PollingWatcherType implements WatcherType {
const _PollingWatcherType();
DirectoryWatcher create(String directory) =>
new PollingDirectoryWatcher(directory);
String toString() => "polling";
}
class _NoneWatcherType implements WatcherType {
const _NoneWatcherType();
DirectoryWatcher create(String directory) => null;
String toString() => "none";
}

View file

@ -1,211 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.server;
import 'dart:async';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:mime/mime.dart';
import 'package:path/path.dart' as path;
import 'package:shelf/shelf.dart' as shelf;
import 'package:stack_trace/stack_trace.dart';
import '../barback.dart';
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'base_server.dart';
import 'asset_environment.dart';
/// Callback for determining if an asset with [id] should be served or not.
typedef bool AllowAsset(AssetId id);
/// A server that serves assets transformed by barback.
class BarbackServer extends BaseServer<BarbackServerResult> {
/// The package whose assets are being served.
final String package;
/// The directory in the root which will serve as the root of this server as
/// a native platform path.
///
/// This may be `null` in which case no files in the root package can be
/// served and only assets in "lib" directories are available.
final String rootDirectory;
/// Optional callback to determine if an asset should be served.
///
/// This can be set to allow outside code to filter out assets. Pub serve
/// uses this after plug-ins are loaded to avoid serving ".dart" files in
/// release mode.
///
/// If this is `null`, all assets may be served.
AllowAsset allowAsset;
/// Creates a new server and binds it to [port] of [host].
///
/// This server serves assets from [barback], and uses [rootDirectory]
/// (which is relative to the root directory of [package]) as the root
/// directory. If [rootDirectory] is omitted, the bound server can only be
/// used to serve assets from packages' lib directories (i.e. "packages/..."
/// URLs). If [package] is omitted, it defaults to the entrypoint package.
static Future<BarbackServer> bind(AssetEnvironment environment, String host,
int port, {String package, String rootDirectory}) {
if (package == null) package = environment.rootPackage.name;
return bindServer(host, port).then((server) {
if (rootDirectory == null) {
log.fine('Serving packages on $host:$port.');
} else {
log.fine('Bound "$rootDirectory" to $host:$port.');
}
return new BarbackServer._(environment, server, package, rootDirectory);
});
}
BarbackServer._(AssetEnvironment environment, HttpServer server, this.package,
this.rootDirectory)
: super(environment, server);
/// Converts a [url] served by this server into an [AssetId] that can be
/// requested from barback.
AssetId urlToId(Uri url) {
// See if it's a URL to a public directory in a dependency.
var id = packagesUrlToId(url);
if (id != null) return id;
if (rootDirectory == null) {
throw new FormatException(
"This server cannot serve out of the root directory. Got $url.");
}
// Otherwise, it's a path in current package's [rootDirectory].
var parts = path.url.split(url.path);
// Strip the leading "/" from the URL.
if (parts.isNotEmpty && parts.first == "/") parts = parts.skip(1);
var relativePath = path.url.join(rootDirectory, path.url.joinAll(parts));
return new AssetId(package, relativePath);
}
/// Handles an HTTP request.
handleRequest(shelf.Request request) {
if (request.method != "GET" && request.method != "HEAD") {
return methodNotAllowed(request);
}
var id;
try {
id = urlToId(request.url);
} on FormatException catch (ex) {
// If we got here, we had a path like "/packages" which is a special
// directory, but not a valid path since it lacks a following package
// name.
return notFound(request, error: ex.message);
}
// See if the asset should be blocked.
if (allowAsset != null && !allowAsset(id)) {
return notFound(
request,
error: "Asset $id is not available in this configuration.",
asset: id);
}
return environment.barback.getAssetById(id).then((result) {
return result;
}).then((asset) => _serveAsset(request, asset)).catchError((error, trace) {
if (error is! AssetNotFoundException) throw error;
return environment.barback.getAssetById(
id.addExtension("/index.html")).then((asset) {
if (request.url.path.endsWith('/')) return _serveAsset(request, asset);
// We only want to serve index.html if the URL explicitly ends in a
// slash. For other URLs, we redirect to one with the slash added to
// implicitly support that too. This follows Apache's behavior.
logRequest(request, "302 Redirect to ${request.url}/");
return new shelf.Response.found('${request.url}/');
}).catchError((newError, newTrace) {
// If we find neither the original file or the index, we should report
// the error about the original to the user.
throw newError is AssetNotFoundException ? error : newError;
});
}).catchError((error, trace) {
if (error is! AssetNotFoundException) {
trace = new Chain.forTrace(trace);
logRequest(request, "$error\n$trace");
addError(error, trace);
close();
return new shelf.Response.internalServerError();
}
addResult(new BarbackServerResult._failure(request.url, id, error));
return notFound(request, asset: id);
}).then((response) {
// Allow requests of any origin to access "pub serve". This is useful for
// running "pub serve" in parallel with another development server. Since
// "pub serve" is only used as a development server and doesn't require
// any sort of credentials anyway, this is secure.
return response.change(headers: const {
"Access-Control-Allow-Origin": "*"
});
});
}
/// Returns the body of [asset] as a response to [request].
Future<shelf.Response> _serveAsset(shelf.Request request, Asset asset) {
return validateStream(asset.read()).then((stream) {
addResult(new BarbackServerResult._success(request.url, asset.id));
var headers = {};
var mimeType = lookupMimeType(asset.id.path);
if (mimeType != null) headers['Content-Type'] = mimeType;
return new shelf.Response.ok(stream, headers: headers);
}).catchError((error, trace) {
addResult(new BarbackServerResult._failure(request.url, asset.id, error));
// If we couldn't read the asset, handle the error gracefully.
if (error is FileSystemException) {
// Assume this means the asset was a file-backed source asset
// and we couldn't read it, so treat it like a missing asset.
return notFound(request, error: error.toString(), asset: asset.id);
}
trace = new Chain.forTrace(trace);
logRequest(request, "$error\n$trace");
// Otherwise, it's some internal error.
return new shelf.Response.internalServerError(body: error.toString());
});
}
}
/// The result of the server handling a URL.
///
/// Only requests for which an asset was requested from barback will emit a
/// result. Malformed requests will be handled internally.
class BarbackServerResult {
/// The requested url.
final Uri url;
/// The id that [url] identifies.
final AssetId id;
/// The error thrown by barback.
///
/// If the request was served successfully, this will be null.
final error;
/// Whether the request was served successfully.
bool get isSuccess => error == null;
/// Whether the request was served unsuccessfully.
bool get isFailure => !isSuccess;
BarbackServerResult._success(this.url, this.id)
: error = null;
BarbackServerResult._failure(this.url, this.id, this.error);
}

View file

@ -1,129 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.base_server;
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:shelf/shelf.dart' as shelf;
import 'package:shelf/shelf_io.dart' as shelf_io;
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
/// Base class for a pub-controlled server.
abstract class BaseServer<T> {
/// The [BuildEnvironment] being served.
final AssetEnvironment environment;
/// The underlying HTTP server.
final HttpServer _server;
/// The server's port.
int get port => _server.port;
/// The servers's address.
InternetAddress get address => _server.address;
/// The server's base URL.
Uri get url => baseUrlForAddress(_server.address, port);
/// The results of requests handled by the server.
///
/// These can be used to provide visual feedback for the server's processing.
/// This stream is also used to emit any programmatic errors that occur in the
/// server.
Stream<T> get results => _resultsController.stream;
final _resultsController = new StreamController<T>.broadcast();
BaseServer(this.environment, this._server) {
shelf_io.serveRequests(
_server,
const shelf.Pipeline().addMiddleware(
shelf.createMiddleware(errorHandler: _handleError)).addHandler(handleRequest));
}
/// Closes this server.
Future close() {
return Future.wait([_server.close(), _resultsController.close()]);
}
/// Handles an HTTP request.
handleRequest(shelf.Request request);
/// Returns a 405 response to [request].
shelf.Response methodNotAllowed(shelf.Request request) {
logRequest(request, "405 Method Not Allowed");
return new shelf.Response(
405,
body: "The ${request.method} method is not allowed for ${request.url}.",
headers: {
'Allow': 'GET, HEAD'
});
}
/// Returns a 404 response to [request].
///
/// If [asset] is given, it is the ID of the asset that couldn't be found.
shelf.Response notFound(shelf.Request request, {String error,
AssetId asset}) {
logRequest(request, "Not Found");
// TODO(rnystrom): Apply some styling to make it visually clear that this
// error is coming from pub serve itself.
var body = new StringBuffer();
body.writeln("""
<!DOCTYPE html>
<head>
<title>404 Not Found</title>
</head>
<body>
<h1>404 Not Found</h1>""");
if (asset != null) {
body.writeln(
"<p>Could not find asset "
"<code>${HTML_ESCAPE.convert(asset.path)}</code> in package "
"<code>${HTML_ESCAPE.convert(asset.package)}</code>.</p>");
}
if (error != null) {
body.writeln("<p>Error: ${HTML_ESCAPE.convert(error)}</p>");
}
body.writeln("""
</body>""");
// Force a UTF-8 encoding so that error messages in non-English locales are
// sent correctly.
return new shelf.Response.notFound(body.toString(), headers: {
'Content-Type': 'text/html; charset=utf-8'
});
}
/// Log [message] at [log.Level.FINE] with metadata about [request].
void logRequest(shelf.Request request, String message) =>
log.fine("$this ${request.method} ${request.url}\n$message");
/// Adds [result] to the server's [results] stream.
void addResult(T result) {
_resultsController.add(result);
}
/// Adds [error] as an error to the server's [results] stream.
void addError(error, [stackTrace]) {
_resultsController.addError(error, stackTrace);
}
/// Handles an error thrown by [handleRequest].
_handleError(error, StackTrace stackTrace) {
_resultsController.addError(error, stackTrace);
close();
return new shelf.Response.internalServerError();
}
}

View file

@ -1,60 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS d.file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.cycle_exception;
import '../exceptions.dart';
/// An exception thrown when a transformer dependency cycle is detected.
///
/// A cycle exception is usually produced within a deeply-nested series of
/// calls. The API is designed to make it easy for each of these calls to add to
/// the message so that the full reasoning for the cycle is made visible to the
/// user.
///
/// Each call's individual message is called a "step". A [CycleException] is
/// represented internally as a linked list of steps.
class CycleException implements ApplicationException {
/// The step for this exception.
final String _step;
/// The next exception in the linked list.
///
/// [_next]'s steps come after [_step].
final CycleException _next;
/// A list of all steps in the cycle.
List<String> get steps {
if (_step == null) return [];
var exception = this;
var steps = [];
while (exception != null) {
steps.add(exception._step);
exception = exception._next;
}
return steps;
}
String get message {
var steps = this.steps;
if (steps.isEmpty) return "Transformer cycle detected.";
return "Transformer cycle detected:\n" +
steps.map((step) => " $step").join("\n");
}
/// Creates a new [CycleException] with zero or one steps.
CycleException([this._step])
: _next = null;
CycleException._(this._step, this._next);
/// Returns a copy of [this] with [step] added to the beginning of [steps].
CycleException prependStep(String step) {
if (_step == null) return new CycleException(step);
return new CycleException._(step, this);
}
String toString() => message;
}

View file

@ -1,431 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.dart2js_transformer;
import 'dart:async';
import 'dart:convert';
import 'package:analyzer/analyzer.dart';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import 'package:pool/pool.dart';
import '../../../../../../../pkg/compiler/lib/compiler.dart' as compiler;
import '../../../../../../../pkg/compiler/lib/src/dart2js.dart' show AbortLeg;
import '../../../../../../../pkg/compiler/lib/src/io/source_file.dart';
import '../barback.dart';
import '../dart.dart' as dart;
import '../utils.dart';
import 'asset_environment.dart';
/// The set of all valid configuration options for this transformer.
final _validOptions = new Set<String>.from(
[
'commandLineOptions',
'checked',
'csp',
'minify',
'verbose',
'environment',
'preserveUris',
'suppressWarnings',
'suppressHints',
'suppressPackageWarnings',
'terse']);
/// A [Transformer] that uses dart2js's library API to transform Dart
/// entrypoints in "web" to JavaScript.
class Dart2JSTransformer extends Transformer implements LazyTransformer {
/// We use this to ensure that only one compilation is in progress at a time.
///
/// Dart2js uses lots of memory, so if we try to actually run compiles in
/// parallel, it takes down the VM. The tracking bug to do something better
/// is here: https://code.google.com/p/dart/issues/detail?id=14730.
static final _pool = new Pool(1);
final AssetEnvironment _environment;
final BarbackSettings _settings;
/// Whether source maps should be generated for the compiled JS.
bool get _generateSourceMaps => _settings.mode != BarbackMode.RELEASE;
Dart2JSTransformer.withSettings(this._environment, this._settings) {
var invalidOptions =
_settings.configuration.keys.toSet().difference(_validOptions);
if (invalidOptions.isEmpty) return;
throw new FormatException(
"Unrecognized dart2js " "${pluralize('option', invalidOptions.length)} "
"${toSentence(invalidOptions.map((option) => '"$option"'))}.");
}
Dart2JSTransformer(AssetEnvironment environment, BarbackMode mode)
: this.withSettings(environment, new BarbackSettings({}, mode));
/// Only ".dart" entrypoint files within a buildable directory are processed.
bool isPrimary(AssetId id) {
if (id.extension != ".dart") return false;
// "lib" should only contain libraries. For efficiency's sake, we don't
// look for entrypoints in there.
return !id.path.startsWith("lib/");
}
Future apply(Transform transform) {
// TODO(nweiz): If/when barback starts reporting what assets were modified,
// don't re-run the entrypoint detection logic unless the primary input was
// actually modified. See issue 16817.
return _isEntrypoint(transform.primaryInput).then((isEntrypoint) {
if (!isEntrypoint) return null;
// Wait for any ongoing apply to finish first.
return _pool.withResource(() {
transform.logger.info("Compiling ${transform.primaryInput.id}...");
var stopwatch = new Stopwatch()..start();
return _doCompilation(transform).then((_) {
stopwatch.stop();
transform.logger.info(
"Took ${stopwatch.elapsed} to compile " "${transform.primaryInput.id}.");
});
});
});
}
void declareOutputs(DeclaringTransform transform) {
var primaryId = transform.primaryId;
transform.declareOutput(primaryId.addExtension(".js"));
if (_generateSourceMaps) {
transform.declareOutput(primaryId.addExtension(".js.map"));
}
}
/// Returns whether or not [asset] might be an entrypoint.
Future<bool> _isEntrypoint(Asset asset) {
return asset.readAsString().then((code) {
try {
var name = asset.id.path;
if (asset.id.package != _environment.rootPackage.name) {
name += " in ${asset.id.package}";
}
var parsed = parseCompilationUnit(code, name: name);
return dart.isEntrypoint(parsed);
} on AnalyzerErrorGroup catch (e) {
// If we get a parse error, consider the asset primary so we report
// dart2js's more detailed error message instead.
return true;
}
});
}
/// Run the dart2js compiler.
Future _doCompilation(Transform transform) {
var provider = new _BarbackCompilerProvider(
_environment,
transform,
generateSourceMaps: _generateSourceMaps);
// Create a "path" to the entrypoint script. The entrypoint may not actually
// be on disk, but this gives dart2js a root to resolve relative paths
// against.
var id = transform.primaryInput.id;
var entrypoint = _environment.graph.packages[id.package].path(id.path);
// TODO(rnystrom): Should have more sophisticated error-handling here. Need
// to report compile errors to the user in an easily visible way. Need to
// make sure paths in errors are mapped to the original source path so they
// can understand them.
return dart.compile(
entrypoint,
provider,
commandLineOptions: _configCommandLineOptions,
csp: _configBool('csp'),
checked: _configBool('checked'),
minify: _configBool(
'minify',
defaultsTo: _settings.mode == BarbackMode.RELEASE),
verbose: _configBool('verbose'),
environment: _configEnvironment,
packageRoot: _environment.rootPackage.path("packages"),
analyzeAll: _configBool('analyzeAll'),
preserveUris: _configBool('preserveUris'),
suppressWarnings: _configBool('suppressWarnings'),
suppressHints: _configBool('suppressHints'),
suppressPackageWarnings: _configBool(
'suppressPackageWarnings',
defaultsTo: true),
terse: _configBool('terse'),
includeSourceMapUrls: _settings.mode != BarbackMode.RELEASE);
}
/// Parses and returns the "commandLineOptions" configuration option.
List<String> get _configCommandLineOptions {
if (!_settings.configuration.containsKey('commandLineOptions')) return null;
var options = _settings.configuration['commandLineOptions'];
if (options is List && options.every((option) => option is String)) {
return options;
}
throw new FormatException(
'Invalid value for '
'\$dart2js.commandLineOptions: ${JSON.encode(options)} (expected list '
'of strings).');
}
/// Parses and returns the "environment" configuration option.
Map<String, String> get _configEnvironment {
if (!_settings.configuration.containsKey('environment')) return null;
var environment = _settings.configuration['environment'];
if (environment is Map &&
environment.keys.every((key) => key is String) &&
environment.values.every((key) => key is String)) {
return environment;
}
throw new FormatException(
'Invalid value for \$dart2js.environment: '
'${JSON.encode(environment)} (expected map from strings to strings).');
}
/// Parses and returns a boolean configuration option.
///
/// [defaultsTo] is the default value of the option.
bool _configBool(String name, {bool defaultsTo: false}) {
if (!_settings.configuration.containsKey(name)) return defaultsTo;
var value = _settings.configuration[name];
if (value is bool) return value;
throw new FormatException(
'Invalid value for \$dart2js.$name: '
'${JSON.encode(value)} (expected true or false).');
}
}
/// Defines an interface for dart2js to communicate with barback and pub.
///
/// Note that most of the implementation of diagnostic handling here was
/// copied from [FormattingDiagnosticHandler] in dart2js. The primary
/// difference is that it uses barback's logging code and, more importantly, it
/// handles missing source files more gracefully.
class _BarbackCompilerProvider implements dart.CompilerProvider {
Uri get libraryRoot => Uri.parse("${path.toUri(_libraryRootPath)}/");
final AssetEnvironment _environment;
final Transform _transform;
String _libraryRootPath;
/// The map of previously loaded files.
///
/// Used to show where an error occurred in a source file.
final _sourceFiles = new Map<String, SourceFile>();
// TODO(rnystrom): Make these configurable.
/// Whether or not warnings should be logged.
var _showWarnings = true;
/// Whether or not hints should be logged.
var _showHints = true;
/// Whether or not verbose info messages should be logged.
var _verbose = false;
/// Whether an exception should be thrown on an error to stop compilation.
var _throwOnError = false;
/// This gets set after a fatal error is reported to quash any subsequent
/// errors.
var _isAborting = false;
final bool generateSourceMaps;
compiler.Diagnostic _lastKind = null;
static final int _FATAL =
compiler.Diagnostic.CRASH.ordinal |
compiler.Diagnostic.ERROR.ordinal;
static final int _INFO =
compiler.Diagnostic.INFO.ordinal |
compiler.Diagnostic.VERBOSE_INFO.ordinal;
_BarbackCompilerProvider(this._environment, this._transform,
{this.generateSourceMaps: true}) {
// Dart2js outputs source maps that reference the Dart SDK sources. For
// that to work, those sources need to be inside the build environment. We
// do that by placing them in a special "$sdk" pseudo-package. In order for
// dart2js to generate the right URLs to point to that package, we give it
// a library root that corresponds to where that package can be found
// relative to the public source directory containing that entrypoint.
//
// For example, say the package being compiled is "/dev/myapp", the
// entrypoint is "web/sub/foo/bar.dart", and the source directory is
// "web/sub". This means the SDK sources will be (conceptually) at:
//
// /dev/myapp/web/sub/packages/$sdk/lib/
//
// This implies that the asset path for a file in the SDK is:
//
// $sdk|lib/lib/...
//
// TODO(rnystrom): Fix this if #17751 is fixed.
var buildDir =
_environment.getSourceDirectoryContaining(_transform.primaryInput.id.path);
_libraryRootPath =
_environment.rootPackage.path(buildDir, "packages", r"$sdk");
}
/// A [CompilerInputProvider] for dart2js.
Future<String> provideInput(Uri resourceUri) {
// We only expect to get absolute "file:" URLs from dart2js.
assert(resourceUri.isAbsolute);
assert(resourceUri.scheme == "file");
var sourcePath = path.fromUri(resourceUri);
return _readResource(resourceUri).then((source) {
_sourceFiles[resourceUri.toString()] =
new StringSourceFile(path.relative(sourcePath), source);
return source;
});
}
/// A [CompilerOutputProvider] for dart2js.
EventSink<String> provideOutput(String name, String extension) {
// TODO(rnystrom): Do this more cleanly. See: #17403.
if (!generateSourceMaps && extension.endsWith(".map")) {
return new NullSink<String>();
}
// TODO(nweiz): remove this special case when dart2js stops generating these
// files.
if (extension.endsWith(".precompiled.js")) return new NullSink<String>();
var primaryId = _transform.primaryInput.id;
// Dart2js uses an empty string for the name of the entrypoint library.
// Otherwise, it's the name of a deferred library.
var outPath;
if (name == "") {
outPath = _transform.primaryInput.id.path;
} else {
var dirname = path.url.dirname(_transform.primaryInput.id.path);
outPath = path.url.join(dirname, name);
}
var id = new AssetId(primaryId.package, "$outPath.$extension");
// Make a sink that dart2js can write to.
var sink = new StreamController<String>();
// dart2js gives us strings, but stream assets expect byte lists.
var stream = UTF8.encoder.bind(sink.stream);
// And give it to barback as a stream it can read from.
_transform.addOutput(new Asset.fromStream(id, stream));
return sink;
}
/// A [DiagnosticHandler] for dart2js, loosely based on
/// [FormattingDiagnosticHandler].
void handleDiagnostic(Uri uri, int begin, int end, String message,
compiler.Diagnostic kind) {
// TODO(ahe): Remove this when source map is handled differently.
if (kind.name == "source map") return;
if (_isAborting) return;
_isAborting = (kind == compiler.Diagnostic.CRASH);
var isInfo = (kind.ordinal & _INFO) != 0;
if (isInfo && uri == null && kind != compiler.Diagnostic.INFO) {
if (!_verbose && kind == compiler.Diagnostic.VERBOSE_INFO) return;
_transform.logger.info(message);
return;
}
// [_lastKind] records the previous non-INFO kind we saw.
// This is used to suppress info about a warning when warnings are
// suppressed, and similar for hints.
if (kind != compiler.Diagnostic.INFO) _lastKind = kind;
var logFn;
if (kind == compiler.Diagnostic.ERROR) {
logFn = _transform.logger.error;
} else if (kind == compiler.Diagnostic.WARNING) {
if (!_showWarnings) return;
logFn = _transform.logger.warning;
} else if (kind == compiler.Diagnostic.HINT) {
if (!_showHints) return;
logFn = _transform.logger.warning;
} else if (kind == compiler.Diagnostic.CRASH) {
logFn = _transform.logger.error;
} else if (kind == compiler.Diagnostic.INFO) {
if (_lastKind == compiler.Diagnostic.WARNING && !_showWarnings) return;
if (_lastKind == compiler.Diagnostic.HINT && !_showHints) return;
logFn = _transform.logger.info;
} else {
throw new Exception('Unknown kind: $kind (${kind.ordinal})');
}
var fatal = (kind.ordinal & _FATAL) != 0;
if (uri == null) {
logFn(message);
} else {
SourceFile file = _sourceFiles[uri.toString()];
if (file == null) {
// We got a message before loading the file, so just report the message
// itself.
logFn('$uri: $message');
} else {
logFn(file.getLocationMessage(message, begin, end));
}
}
if (fatal && _throwOnError) {
_isAborting = true;
throw new AbortLeg(message);
}
}
Future<String> _readResource(Uri url) {
return new Future.sync(() {
// Find the corresponding asset in barback.
var id = _sourceUrlToId(url);
if (id != null) return _transform.readInputAsString(id);
// Don't allow arbitrary file paths that point to things not in packages.
// Doing so won't work in Dartium.
throw new Exception(
"Cannot read $url because it is outside of the build environment.");
});
}
AssetId _sourceUrlToId(Uri url) {
// See if it's a package path.
var id = packagesUrlToId(url);
if (id != null) return id;
// See if it's a path to a "public" asset within the root package. All
// other files in the root package are not visible to transformers, so
// should be loaded directly from disk.
var sourcePath = path.fromUri(url);
if (_environment.containsPath(sourcePath)) {
var relative =
path.toUri(_environment.rootPackage.relative(sourcePath)).toString();
return new AssetId(_environment.rootPackage.name, relative);
}
return null;
}
}
/// An [EventSink] that discards all data. Provided to dart2js when we don't
/// want an actual output.
class NullSink<T> implements EventSink<T> {
void add(T event) {}
void addError(errorEvent, [StackTrace stackTrace]) {}
void close() {}
}

View file

@ -1,32 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.dart_forwarding_transformer;
import 'dart:async';
import 'package:barback/barback.dart';
import '../utils.dart';
/// A single transformer that just forwards any ".dart" file as an output when
/// not in release mode.
///
/// Since the [Dart2JSTransformer] consumes its inputs, this is used in
/// parallel to make sure the original Dart file is still available for use by
/// Dartium.
class DartForwardingTransformer extends Transformer {
/// The mode that the transformer is running in.
final BarbackMode _mode;
DartForwardingTransformer(this._mode);
String get allowedExtensions => ".dart";
Future apply(Transform transform) {
return newFuture(() {
transform.addOutput(transform.primaryInput);
});
}
}

View file

@ -1,429 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.dependency_computer;
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import '../dart.dart';
import '../io.dart';
import '../package.dart';
import '../package_graph.dart';
import '../utils.dart';
import 'cycle_exception.dart';
import 'transformer_config.dart';
import 'transformer_id.dart';
/// A class for determining dependencies between transformers and from Dart
/// libraries onto transformers.
class DependencyComputer {
/// The package graph being analyzed.
final PackageGraph _graph;
/// The names of packages for which [_PackageDependencyComputer]s are
/// currently loading.
///
/// This is used to detect transformer cycles. If a package's libraries or
/// transformers are referenced while the transformers that apply to it are
/// being processed, that indicates an unresolvable cycle.
final _loadingPackageComputers = new Set<String>();
/// [_PackageDependencyComputer]s that have been loaded.
final _packageComputers = new Map<String, _PackageDependencyComputer>();
/// A cache of the results of [transformersNeededByPackage].
final _transformersNeededByPackages = new Map<String, Set<TransformerId>>();
/// The set of all packages that neither use transformers themselves nor
/// import packages that use transformers.
///
/// This is precomputed before any package computers are loaded.
final _untransformedPackages = new Set<String>();
DependencyComputer(this._graph) {
for (var package in ordered(_graph.packages.keys)) {
if (_graph.transitiveDependencies(
package).every((dependency) => dependency.pubspec.transformers.isEmpty)) {
_untransformedPackages.add(package);
}
}
ordered(_graph.packages.keys).forEach(_loadPackageComputer);
}
/// Returns a dependency graph for [transformers], or for all transformers if
/// [transformers] is `null`.
///
/// This graph is represented by a map whose keys are the vertices and whose
/// values are sets representing edges from the given vertex. Each vertex is a
/// [TransformerId]. If there's an edge from `T1` to `T2`, then `T2` must be
/// loaded before `T1` can be loaded.
///
/// The returned graph is transitively closed. That is, if there's an edge
/// from `T1` to `T2` and an edge from `T2` to `T3`, there's also an edge from
/// `T1` to `T2`.
Map<TransformerId, Set<TransformerId>>
transformersNeededByTransformers([Iterable<TransformerId> transformers]) {
var result = {};
if (transformers == null) {
transformers = ordered(_graph.packages.keys).expand((packageName) {
var package = _graph.packages[packageName];
return package.pubspec.transformers.expand((phase) {
return phase.expand((config) {
var id = config.id;
if (id.isBuiltInTransformer) return [];
if (id.package != _graph.entrypoint.root.name &&
!config.canTransformPublicFiles) {
return [];
}
return [id];
});
});
});
}
for (var id in transformers) {
result[id] = _transformersNeededByTransformer(id);
}
return result;
}
/// Returns the set of all transformers needed to load the library identified
/// by [id].
Set<TransformerId> transformersNeededByLibrary(AssetId id) {
var library = _graph.packages[id.package].path(p.fromUri(id.path));
_loadPackageComputer(id.package);
return _packageComputers[id.package].transformersNeededByLibrary(
library).where((id) => !id.isBuiltInTransformer).toSet();
}
/// Returns the set of all transformers that need to be loaded before [id] is
/// loaded.
Set<TransformerId> _transformersNeededByTransformer(TransformerId id) {
if (id.isBuiltInTransformer) return new Set();
_loadPackageComputer(id.package);
return _packageComputers[id.package]._transformersNeededByTransformer(id);
}
/// Returns the set of all transformers that need to be loaded before
/// [packageUri] (a "package:" URI) can be safely imported from an external
/// package.
Set<TransformerId> _transformersNeededByPackageUri(Uri packageUri) {
var components = p.split(p.fromUri(packageUri.path));
var packageName = components.first;
if (_untransformedPackages.contains(packageName)) return new Set();
var package = _graph.packages[packageName];
if (package == null) {
// TODO(nweiz): include source range information here.
fail(
'A transformer imported unknown package "$packageName" (in ' '"$packageUri").');
}
var library = package.path('lib', p.joinAll(components.skip(1)));
_loadPackageComputer(packageName);
return _packageComputers[packageName].transformersNeededByLibrary(library);
}
/// Returns the set of all transformers that need to be loaded before
/// everything in [rootPackage] can be used.
///
/// This is conservative in that it returns all transformers that could
/// theoretically affect [rootPackage]. It only looks at which transformers
/// packages use and which packages they depend on; it ignores imports
/// entirely.
///
/// We fall back on this conservative analysis when a transformer
/// (transitively) imports a transformed library. The result of the
/// transformation may import any dependency or hit any transformer, so we
/// have to assume that it will.
Set<TransformerId> _transformersNeededByPackage(String rootPackage) {
if (_untransformedPackages.contains(rootPackage)) return new Set();
if (_transformersNeededByPackages.containsKey(rootPackage)) {
return _transformersNeededByPackages[rootPackage];
}
var results = new Set();
var seen = new Set();
traversePackage(packageName) {
if (seen.contains(packageName)) return;
seen.add(packageName);
var package = _graph.packages[packageName];
for (var phase in package.pubspec.transformers) {
for (var config in phase) {
var id = config.id;
if (id.isBuiltInTransformer) continue;
if (_loadingPackageComputers.contains(id.package)) {
throw new CycleException("$packageName is transformed by $id");
}
results.add(id);
}
}
var dependencies = packageName == _graph.entrypoint.root.name ?
package.immediateDependencies :
package.dependencies;
for (var dep in dependencies) {
try {
traversePackage(dep.name);
} on CycleException catch (error) {
throw error.prependStep("$packageName depends on ${dep.name}");
}
}
}
traversePackage(rootPackage);
_transformersNeededByPackages[rootPackage] = results;
return results;
}
/// Ensure that a [_PackageDependencyComputer] for [packageName] is loaded.
///
/// If the computer has already been loaded, this does nothing. If the
/// computer is in the process of being loaded, this throws a
/// [CycleException].
void _loadPackageComputer(String packageName) {
if (_loadingPackageComputers.contains(packageName)) {
throw new CycleException();
}
if (_packageComputers.containsKey(packageName)) return;
_loadingPackageComputers.add(packageName);
_packageComputers[packageName] =
new _PackageDependencyComputer(this, packageName);
_loadingPackageComputers.remove(packageName);
}
}
/// A helper class for [computeTransformersNeededByTransformers] that keeps
/// package-specific state and caches over the course of the computation.
class _PackageDependencyComputer {
/// The parent [DependencyComputer].
final DependencyComputer _dependencyComputer;
/// The package whose dependencies [this] is computing.
final Package _package;
/// The set of transformers that currently apply to [this].
///
/// This is added to phase-by-phase while [this] is being initialized. This is
/// necessary to model the dependencies of a transformer that's applied to its
/// own package.
final _applicableTransformers = new Set<TransformerConfig>();
/// A cache of imports and exports parsed from libraries in this package.
final _directives = new Map<Uri, Set<Uri>>();
/// The set of libraries for which there are currently active
/// [transformersNeededByLibrary] calls.
///
/// This is used to guard against infinite loops caused by libraries in
/// different packages importing one another circularly.
/// [transformersNeededByLibrary] will return an empty set for any active
/// libraries.
final _activeLibraries = new Set<String>();
/// A cache of the results of [_transformersNeededByTransformer].
final _transformersNeededByTransformers =
new Map<TransformerId, Set<TransformerId>>();
/// A cache of the results of [_getTransitiveExternalDirectives].
///
/// This is invalidated whenever [_applicableTransformers] changes.
final _transitiveExternalDirectives = new Map<String, Set<Uri>>();
_PackageDependencyComputer(DependencyComputer dependencyComputer,
String packageName)
: _dependencyComputer = dependencyComputer,
_package = dependencyComputer._graph.packages[packageName] {
// If [_package] uses its own transformers, there will be fewer transformers
// running on [_package] while its own transformers are loading than there
// will be once all its transformers are finished loading. To handle this,
// we run [_transformersNeededByTransformer] to pre-populate
// [_transformersNeededByLibraries] while [_applicableTransformers] is
// smaller.
for (var phase in _package.pubspec.transformers) {
for (var config in phase) {
var id = config.id;
try {
if (id.package != _package.name) {
// Probe [id]'s transformer dependencies to ensure that it doesn't
// depend on this package. If it does, a CycleError will be thrown.
_dependencyComputer._transformersNeededByTransformer(id);
} else {
// Store the transformers needed specifically with the current set
// of [_applicableTransformers]. When reporting this transformer's
// dependencies, [computeTransformersNeededByTransformers] will use
// this stored set of dependencies rather than the potentially wider
// set that would be recomputed if [transformersNeededByLibrary]
// were called anew.
_transformersNeededByTransformers[id] =
transformersNeededByLibrary(_package.transformerPath(id));
}
} on CycleException catch (error) {
throw error.prependStep("$packageName is transformed by $id");
}
}
// Clear the cached imports and exports because the new transformers may
// start transforming a library whose directives were previously
// statically analyzable.
_transitiveExternalDirectives.clear();
_applicableTransformers.addAll(phase);
}
}
/// Returns the set of all transformers that need to be loaded before [id] is
/// loaded.
///
/// [id] must refer to a transformer in [_package].
Set<TransformerId> _transformersNeededByTransformer(TransformerId id) {
assert(id.package == _package.name);
if (_transformersNeededByTransformers.containsKey(id)) {
return _transformersNeededByTransformers[id];
}
_transformersNeededByTransformers[id] =
transformersNeededByLibrary(_package.transformerPath(id));
return _transformersNeededByTransformers[id];
}
/// Returns the set of all transformers that need to be loaded before
/// [library] is imported.
///
/// If [library] or anything it imports/exports within this package is
/// transformed by [_applicableTransformers], this will return a conservative
/// set of transformers (see also
/// [DependencyComputer._transformersNeededByPackage]).
Set<TransformerId> transformersNeededByLibrary(String library) {
library = p.normalize(library);
if (_activeLibraries.contains(library)) return new Set();
_activeLibraries.add(library);
try {
var externalDirectives = _getTransitiveExternalDirectives(library);
if (externalDirectives == null) {
var rootName = _dependencyComputer._graph.entrypoint.root.name;
var dependencies = _package.name == rootName ?
_package.immediateDependencies :
_package.dependencies;
// If anything transitively imported/exported by [library] within this
// package is modified by a transformer, we don't know what it will
// load, so we take the conservative approach and say it depends on
// everything.
return _applicableTransformers.map(
(config) => config.id).toSet().union(unionAll(dependencies.map((dep) {
try {
return _dependencyComputer._transformersNeededByPackage(dep.name);
} on CycleException catch (error) {
throw error.prependStep("${_package.name} depends on ${dep.name}");
}
})));
} else {
// If nothing's transformed, then we only depend on the transformers
// used by the external packages' libraries that we import or export.
return unionAll(externalDirectives.map((uri) {
try {
return _dependencyComputer._transformersNeededByPackageUri(uri);
} on CycleException catch (error) {
var packageName = p.url.split(uri.path).first;
throw error.prependStep("${_package.name} depends on $packageName");
}
}));
}
} finally {
_activeLibraries.remove(library);
}
}
/// Returns the set of all external package libraries transitively imported or
/// exported by [rootLibrary].
///
/// All of the returned URIs will have the "package:" scheme. None of them
/// will be URIs for this package.
///
/// If [rootLibrary] transitively imports or exports a library that's modified
/// by a transformer, this will return `null`.
Set<Uri> _getTransitiveExternalDirectives(String rootLibrary) {
rootLibrary = p.normalize(rootLibrary);
if (_transitiveExternalDirectives.containsKey(rootLibrary)) {
return _transitiveExternalDirectives[rootLibrary];
}
var results = new Set();
var seen = new Set();
traverseLibrary(library) {
library = p.normalize(library);
if (seen.contains(library)) return true;
seen.add(library);
var directives = _getDirectives(library);
if (directives == null) return false;
for (var uri in directives) {
var path;
if (uri.scheme == 'package') {
var components = p.split(p.fromUri(uri.path));
if (components.first != _package.name) {
results.add(uri);
continue;
}
path = _package.path('lib', p.joinAll(components.skip(1)));
} else if (uri.scheme == '' || uri.scheme == 'file') {
path = p.join(p.dirname(library), p.fromUri(uri));
} else {
// Ignore "dart:" URIs and theoretically-possible "http:" URIs.
continue;
}
if (!traverseLibrary(path)) return false;
}
return true;
}
_transitiveExternalDirectives[rootLibrary] =
traverseLibrary(rootLibrary) ? results : null;
return _transitiveExternalDirectives[rootLibrary];
}
/// Returns the set of all imports or exports in [library].
///
/// If [library] is modified by a transformer, this will return `null`.
Set<Uri> _getDirectives(String library) {
var libraryUri = p.toUri(p.normalize(library));
var relative = p.toUri(_package.relative(library)).path;
if (_applicableTransformers.any(
(config) => config.canTransform(relative))) {
_directives[libraryUri] = null;
return null;
}
// Check the cache *after* checking [_applicableTransformers] because
// [_applicableTransformers] changes over time so the directives may be
// invalidated.
if (_directives.containsKey(libraryUri)) return _directives[libraryUri];
// If a nonexistent library is imported, it will probably be generated by a
// transformer.
if (!fileExists(library)) {
_directives[libraryUri] = null;
return null;
}
_directives[libraryUri] = parseImportsAndExports(
readTextFile(library),
name: library).map((directive) => Uri.parse(directive.uri.stringValue)).toSet();
return _directives[libraryUri];
}
}

View file

@ -1,69 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.excluding_aggregate_transformer;
import 'dart:async';
import 'package:barback/barback.dart';
import 'transformer_config.dart';
/// Decorates an inner [AggregateTransformer] and handles including and
/// excluding primary inputs.
class ExcludingAggregateTransformer extends AggregateTransformer {
/// If [config] defines includes or excludes, wraps [inner] in an
/// [ExcludingAggregateTransformer] that handles those.
///
/// Otherwise, just returns [inner] unmodified.
static AggregateTransformer wrap(AggregateTransformer inner,
TransformerConfig config) {
if (!config.hasExclusions) return inner;
if (inner is LazyAggregateTransformer) {
return new _LazyExcludingAggregateTransformer(
inner as LazyAggregateTransformer,
config);
} else if (inner is DeclaringAggregateTransformer) {
return new _DeclaringExcludingAggregateTransformer(
inner as DeclaringAggregateTransformer,
config);
} else {
return new ExcludingAggregateTransformer._(inner, config);
}
}
final AggregateTransformer _inner;
/// The config containing rules for which assets to include or exclude.
final TransformerConfig _config;
ExcludingAggregateTransformer._(this._inner, this._config);
classifyPrimary(AssetId id) {
if (!_config.canTransform(id.path)) return null;
return _inner.classifyPrimary(id);
}
Future apply(AggregateTransform transform) => _inner.apply(transform);
String toString() => _inner.toString();
}
class _DeclaringExcludingAggregateTransformer extends
ExcludingAggregateTransformer implements DeclaringAggregateTransformer {
_DeclaringExcludingAggregateTransformer(DeclaringAggregateTransformer inner,
TransformerConfig config)
: super._(inner as AggregateTransformer, config);
Future declareOutputs(DeclaringAggregateTransform transform) =>
(_inner as DeclaringAggregateTransformer).declareOutputs(transform);
}
class _LazyExcludingAggregateTransformer extends
_DeclaringExcludingAggregateTransformer implements LazyAggregateTransformer {
_LazyExcludingAggregateTransformer(DeclaringAggregateTransformer inner,
TransformerConfig config)
: super(inner, config);
}

View file

@ -1,67 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.excluding_transformer;
import 'dart:async';
import 'package:barback/barback.dart';
import 'transformer_config.dart';
/// Decorates an inner [Transformer] and handles including and excluding
/// primary inputs.
class ExcludingTransformer extends Transformer {
/// If [config] defines includes or excludes, wraps [inner] in an
/// [ExcludingTransformer] that handles those.
///
/// Otherwise, just returns [inner] unmodified.
static Transformer wrap(Transformer inner, TransformerConfig config) {
if (!config.hasExclusions) return inner;
if (inner is LazyTransformer) {
// TODO(nweiz): Remove these unnecessary "as"es when issue 19046 is fixed.
return new _LazyExcludingTransformer(inner as LazyTransformer, config);
} else if (inner is DeclaringTransformer) {
return new _DeclaringExcludingTransformer(
inner as DeclaringTransformer,
config);
} else {
return new ExcludingTransformer._(inner, config);
}
}
final Transformer _inner;
/// The config containing rules for which assets to include or exclude.
final TransformerConfig _config;
ExcludingTransformer._(this._inner, this._config);
isPrimary(AssetId id) {
if (!_config.canTransform(id.path)) return false;
return _inner.isPrimary(id);
}
Future apply(Transform transform) => _inner.apply(transform);
String toString() => _inner.toString();
}
class _DeclaringExcludingTransformer extends ExcludingTransformer implements
DeclaringTransformer {
_DeclaringExcludingTransformer(DeclaringTransformer inner,
TransformerConfig config)
: super._(inner as Transformer, config);
Future declareOutputs(DeclaringTransform transform) =>
(_inner as DeclaringTransformer).declareOutputs(transform);
}
class _LazyExcludingTransformer extends _DeclaringExcludingTransformer
implements LazyTransformer {
_LazyExcludingTransformer(DeclaringTransformer inner,
TransformerConfig config)
: super(inner, config);
}

View file

@ -1,171 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.foreign_transformer;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
import '../../../asset/dart/serialize.dart';
import 'excluding_transformer.dart';
import 'excluding_aggregate_transformer.dart';
import 'transformer_config.dart';
/// A wrapper for a transformer that's in a different isolate.
class _ForeignTransformer extends Transformer {
/// The port with which we communicate with the child isolate.
///
/// This port and all messages sent across it are specific to this
/// transformer.
final SendPort _port;
/// The result of calling [toString] on the transformer in the isolate.
final String _toString;
_ForeignTransformer(Map map)
: _port = map['port'],
_toString = map['toString'];
Future<bool> isPrimary(AssetId id) {
return call(_port, {
'type': 'isPrimary',
'id': serializeId(id)
});
}
Future apply(Transform transform) {
return call(_port, {
'type': 'apply',
'transform': serializeTransform(transform)
});
}
String toString() => _toString;
}
class _ForeignDeclaringTransformer extends _ForeignTransformer implements
DeclaringTransformer {
_ForeignDeclaringTransformer(Map map)
: super(map);
Future declareOutputs(DeclaringTransform transform) {
return call(_port, {
'type': 'declareOutputs',
'transform': serializeDeclaringTransform(transform)
});
}
}
class _ForeignLazyTransformer extends _ForeignDeclaringTransformer implements
LazyTransformer {
_ForeignLazyTransformer(Map map)
: super(map);
}
/// A wrapper for an aggregate transformer that's in a different isolate.
class _ForeignAggregateTransformer extends AggregateTransformer {
/// The port with which we communicate with the child isolate.
///
/// This port and all messages sent across it are specific to this
/// transformer.
final SendPort _port;
/// The result of calling [toString] on the transformer in the isolate.
final String _toString;
_ForeignAggregateTransformer(Map map)
: _port = map['port'],
_toString = map['toString'];
Future<String> classifyPrimary(AssetId id) {
return call(_port, {
'type': 'classifyPrimary',
'id': serializeId(id)
});
}
Future apply(AggregateTransform transform) {
return call(_port, {
'type': 'apply',
'transform': serializeAggregateTransform(transform)
});
}
String toString() => _toString;
}
class _ForeignDeclaringAggregateTransformer extends _ForeignAggregateTransformer
implements DeclaringAggregateTransformer {
_ForeignDeclaringAggregateTransformer(Map map)
: super(map);
Future declareOutputs(DeclaringAggregateTransform transform) {
return call(_port, {
'type': 'declareOutputs',
'transform': serializeDeclaringAggregateTransform(transform)
});
}
}
class _ForeignLazyAggregateTransformer extends
_ForeignDeclaringAggregateTransformer implements LazyAggregateTransformer {
_ForeignLazyAggregateTransformer(Map map)
: super(map);
}
/// A wrapper for a transformer group that's in a different isolate.
class _ForeignGroup implements TransformerGroup {
final Iterable<Iterable> phases;
/// The result of calling [toString] on the transformer group in the isolate.
final String _toString;
_ForeignGroup(TransformerConfig config, Map map)
: phases = map['phases'].map((phase) {
return phase.map(
(transformer) => deserializeTransformerLike(transformer, config)).toList();
}).toList(),
_toString = map['toString'];
String toString() => _toString;
}
/// Converts a serializable map into a [Transformer], an [AggregateTransformer],
/// or a [TransformerGroup].
deserializeTransformerLike(Map map, TransformerConfig config) {
var transformer;
switch (map['type']) {
case 'TransformerGroup':
return new _ForeignGroup(config, map);
case 'Transformer':
transformer = new _ForeignTransformer(map);
break;
case 'DeclaringTransformer':
transformer = new _ForeignDeclaringTransformer(map);
break;
case 'LazyTransformer':
transformer = new _ForeignLazyTransformer(map);
break;
case 'AggregateTransformer':
transformer = new _ForeignAggregateTransformer(map);
break;
case 'DeclaringAggregateTransformer':
transformer = new _ForeignDeclaringAggregateTransformer(map);
break;
case 'LazyAggregateTransformer':
transformer = new _ForeignLazyAggregateTransformer(map);
break;
default:
assert(false);
}
if (transformer is Transformer) {
return ExcludingTransformer.wrap(transformer, config);
} else {
assert(transformer is AggregateTransformer);
return ExcludingAggregateTransformer.wrap(transformer, config);
}
}

View file

@ -1,276 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.load_all_transformers;
import 'dart:async';
import 'package:barback/barback.dart';
import '../log.dart' as log;
import '../package_graph.dart';
import '../utils.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
import 'dependency_computer.dart';
import 'transformer_id.dart';
import 'transformer_loader.dart';
/// Loads all transformers depended on by packages in [environment].
///
/// This uses [environment]'s primary server to serve the Dart files from which
/// transformers are loaded, then adds the transformers to
/// `environment.barback`.
///
/// Any built-in transformers that are provided by the environment will
/// automatically be added to the end of the root package's cascade.
///
/// If [entrypoints] is passed, only transformers necessary to run those
/// entrypoints will be loaded.
Future loadAllTransformers(AssetEnvironment environment,
BarbackServer transformerServer, {Iterable<AssetId> entrypoints}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var dependencyComputer = new DependencyComputer(environment.graph);
var necessaryTransformers;
join0() {
var transformersNeededByTransformers =
dependencyComputer.transformersNeededByTransformers(necessaryTransformers);
var buffer = new StringBuffer();
buffer.writeln("Transformer dependencies:");
transformersNeededByTransformers.forEach(((id, dependencies) {
if (dependencies.isEmpty) {
buffer.writeln("$id: -");
} else {
buffer.writeln("$id: ${toSentence(dependencies)}");
}
}));
log.fine(buffer);
var stagedTransformers =
_stageTransformers(transformersNeededByTransformers);
var packagesThatUseTransformers =
_packagesThatUseTransformers(environment.graph);
var loader = new TransformerLoader(environment, transformerServer);
join1(x0) {
var cache = x0;
var first = true;
var it0 = stagedTransformers.iterator;
break0() {
join2() {
new Future.value(
Future.wait(environment.graph.packages.values.map(((package) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(
loader.transformersForPhases(package.pubspec.transformers)).then((x0) {
try {
var phases = x0;
var transformers =
environment.getBuiltInTransformers(package);
join0() {
join1() {
newFuture((() {
return environment.barback.updateTransformers(
package.name,
phases);
}));
completer0.complete();
}
if (phases.isEmpty) {
completer0.complete(null);
} else {
join1();
}
}
if (transformers != null) {
phases.add(transformers);
join0();
} else {
join0();
}
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x1) {
try {
x1;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (cache != null) {
cache.save();
join2();
} else {
join2();
}
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var stage = it0.current;
join3(x2) {
var snapshotPath = x2;
first = false;
new Future.value(
loader.load(stage, snapshot: snapshotPath)).then((x3) {
trampoline0 = () {
trampoline0 = null;
try {
x3;
var packagesToUpdate = unionAll(stage.map(((id) {
return packagesThatUseTransformers[id];
})));
new Future.value(
Future.wait(packagesToUpdate.map(((packageName) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var package =
environment.graph.packages[packageName];
new Future.value(
loader.transformersForPhases(package.pubspec.transformers)).then((x0) {
try {
var phases = x0;
environment.barback.updateTransformers(
packageName,
phases);
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x4) {
trampoline0 = () {
trampoline0 = null;
try {
x4;
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: completer0.completeError);
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: completer0.completeError);
}
if (cache == null || !first) {
join3(null);
} else {
join3(cache.snapshotPath(stage));
}
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
if (environment.rootPackage.dir == null) {
join1(null);
} else {
join1(environment.graph.loadTransformerCache());
}
}
if (entrypoints != null) {
join4() {
necessaryTransformers =
unionAll(entrypoints.map(dependencyComputer.transformersNeededByLibrary));
join5() {
join0();
}
if (necessaryTransformers.isEmpty) {
log.fine(
"No transformers are needed for ${toSentence(entrypoints)}.");
completer0.complete(null);
} else {
join5();
}
}
if (entrypoints.isEmpty) {
completer0.complete(null);
} else {
join4();
}
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Given [transformerDependencies], a directed acyclic graph, returns a list of
/// "stages" (sets of transformers).
///
/// Each stage must be fully loaded and passed to barback before the next stage
/// can be safely loaded. However, transformers within a stage can be safely
/// loaded in parallel.
List<Set<TransformerId>> _stageTransformers(Map<TransformerId,
Set<TransformerId>> transformerDependencies) {
// A map from transformer ids to the indices of the stages that those
// transformer ids should end up in. Populated by [stageNumberFor].
var stageNumbers = {};
var stages = [];
stageNumberFor(id) {
// Built-in transformers don't have to be loaded in stages, since they're
// run from pub's source. Return -1 so that the "next stage" is 0.
if (id.isBuiltInTransformer) return -1;
if (stageNumbers.containsKey(id)) return stageNumbers[id];
var dependencies = transformerDependencies[id];
stageNumbers[id] =
dependencies.isEmpty ? 0 : maxAll(dependencies.map(stageNumberFor)) + 1;
return stageNumbers[id];
}
for (var id in transformerDependencies.keys) {
var stageNumber = stageNumberFor(id);
if (stages.length <= stageNumber) stages.length = stageNumber + 1;
if (stages[stageNumber] == null) stages[stageNumber] = new Set();
stages[stageNumber].add(id);
}
return stages;
}
/// Returns a map from transformer ids to all packages in [graph] that use each
/// transformer.
Map<TransformerId, Set<String>> _packagesThatUseTransformers(PackageGraph graph)
{
var results = {};
for (var package in graph.packages.values) {
for (var phase in package.pubspec.transformers) {
for (var config in phase) {
results.putIfAbsent(config.id, () => new Set()).add(package.name);
}
}
}
return results;
}

View file

@ -1,125 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.pub_package_provider;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import '../io.dart';
import '../package_graph.dart';
import '../preprocess.dart';
import '../sdk.dart' as sdk;
import '../utils.dart';
/// An implementation of barback's [PackageProvider] interface so that barback
/// can find assets within pub packages.
class PubPackageProvider implements StaticPackageProvider {
final PackageGraph _graph;
final List<String> staticPackages;
Iterable<String> get packages =>
_graph.packages.keys.toSet().difference(staticPackages.toSet());
PubPackageProvider(PackageGraph graph)
: _graph = graph,
staticPackages = [
r"$pub",
r"$sdk"]..addAll(graph.packages.keys.where(graph.isPackageStatic));
Future<Asset> getAsset(AssetId id) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
join1() {
var nativePath = path.fromUri(id.path);
var file = _graph.packages[id.package].path(nativePath);
_assertExists(file, id);
completer0.complete(new Asset.fromPath(id, file));
}
if (id.package == r'$sdk') {
var parts = path.split(path.fromUri(id.path));
assert(parts.isNotEmpty && parts[0] == 'lib');
parts = parts.skip(1);
var file = path.join(sdk.rootDirectory, path.joinAll(parts));
_assertExists(file, id);
completer0.complete(new Asset.fromPath(id, file));
} else {
join1();
}
}
if (id.package == r'$pub') {
var components = path.url.split(id.path);
assert(components.isNotEmpty);
assert(components.first == 'lib');
components[0] = 'dart';
var file = assetPath(path.joinAll(components));
_assertExists(file, id);
join2() {
var versions = mapMap(_graph.packages, value: ((_, package) {
return package.version;
}));
var contents = readTextFile(file);
contents = preprocess(contents, versions, path.toUri(file));
completer0.complete(new Asset.fromString(id, contents));
}
if (!_graph.packages.containsKey("barback")) {
completer0.complete(new Asset.fromPath(id, file));
} else {
join2();
}
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Throw an [AssetNotFoundException] for [id] if [path] doesn't exist.
void _assertExists(String path, AssetId id) {
if (!fileExists(path)) throw new AssetNotFoundException(id);
}
Stream<AssetId> getAllAssetIds(String packageName) {
if (packageName == r'$pub') {
// "$pub" is a pseudo-package that allows pub's transformer-loading
// infrastructure to share code with pub proper. We provide it only during
// the initial transformer loading process.
var dartPath = assetPath('dart');
return new Stream.fromIterable(
listDir(dartPath, recursive: true)// Don't include directories.
.where((file) => path.extension(file) == ".dart").map((library) {
var idPath = path.join('lib', path.relative(library, from: dartPath));
return new AssetId('\$pub', path.toUri(idPath).toString());
}));
} else if (packageName == r'$sdk') {
// "$sdk" is a pseudo-package that allows the dart2js transformer to find
// the Dart core libraries without hitting the file system directly. This
// ensures they work with source maps.
var libPath = path.join(sdk.rootDirectory, "lib");
return new Stream.fromIterable(
listDir(
libPath,
recursive: true).where((file) => path.extension(file) == ".dart").map((file) {
var idPath =
path.join("lib", path.relative(file, from: sdk.rootDirectory));
return new AssetId('\$sdk', path.toUri(idPath).toString());
}));
} else {
var package = _graph.packages[packageName];
return new Stream.fromIterable(
package.listFiles(beneath: 'lib').map((file) {
return new AssetId(
packageName,
path.toUri(package.relative(file)).toString());
}));
}
}
}

View file

@ -1,71 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.source_directory;
import 'dart:async';
import 'package:watcher/watcher.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
/// A directory in the entrypoint package whose contents have been made
/// available to barback and that are bound to a server.
class SourceDirectory {
final AssetEnvironment _environment;
/// The relative directory path within the package.
final String directory;
/// The hostname to serve this directory on.
final String hostname;
/// The port to serve this directory on.
final int port;
/// The server bound to this directory.
///
/// This is a future that will complete once [serve] has been called and the
/// server has been successfully spun up.
Future<BarbackServer> get server => _serverCompleter.future;
final _serverCompleter = new Completer<BarbackServer>();
/// The subscription to the [DirectoryWatcher] used to watch this directory
/// for changes.
///
/// If the directory is not being watched, this will be `null`.
StreamSubscription<WatchEvent> watchSubscription;
SourceDirectory(this._environment, this.directory, this.hostname, this.port);
/// Binds a server running on [hostname]:[port] to this directory.
Future<BarbackServer> serve() {
return BarbackServer.bind(
_environment,
hostname,
port,
rootDirectory: directory).then((server) {
_serverCompleter.complete(server);
return server;
});
}
/// Removes the source directory from the build environment.
///
/// Closes the server, removes the assets from barback, and stops watching it.
Future close() {
return server.then((server) {
var futures = [server.close()];
// Stop watching the directory.
if (watchSubscription != null) {
var cancel = watchSubscription.cancel();
if (cancel != null) futures.add(cancel);
}
return Future.wait(futures);
});
}
}

View file

@ -1,144 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_cache;
import 'package:path/path.dart' as p;
import '../io.dart';
import '../log.dart' as log;
import '../package_graph.dart';
import '../sdk.dart' as sdk;
import '../utils.dart';
import 'transformer_id.dart';
/// A cache for managing a snapshot of the first "stage" of transformers to
/// load.
///
/// This uses the [_stageTransformers] notion of a stage. Transformers are
/// divided into stages for loading based on which transformers are needed to
/// load one another. For example, if a transformer T1 produces a file that's
/// imported by another transformer T2, T2 must be put in a stage after T1.
///
/// We only cache the first stage because it's the only stage whose contents are
/// independent of any configuration. Since most transformers don't import the
/// output of other transformers, many packages will only have one stage.
class TransformerCache {
final PackageGraph _graph;
/// The set of transformer ids that were previously cached.
///
/// If there was no previous cache, this will be empty.
Set<TransformerId> _oldTransformers;
/// The set of transformer ids that are newly cached or re-used from the
/// previous cache.
Set<TransformerId> _newTransformers;
/// The directory in which transformers are cached.
///
/// This may be `null` if there's no physical entrypoint directory.
String _dir;
/// The directory of the manifest listing which transformers were cached.
String get _manifestPath => p.join(_dir, "manifest.txt");
/// Loads the transformer cache for [environment].
///
/// This may modify the cache.
TransformerCache.load(PackageGraph graph)
: _graph = graph,
_dir = graph.entrypoint.root.path(".pub/transformers") {
_oldTransformers = _parseManifest();
}
/// Clear the cache if it depends on any package in [changedPackages].
void clearIfOutdated(Set<String> changedPackages) {
var snapshotDependencies = unionAll(_oldTransformers.map((id) {
// If the transformer cache contains transformers we don't know about,
// that's fine; we just won't load them.
if (!_graph.packages.containsKey(id.package)) return new Set();
return _graph.transitiveDependencies(
id.package).map((package) => package.name).toSet();
}));
// If none of the snapshot's dependencies have changed, then we can reuse
// it.
if (!overlaps(changedPackages, snapshotDependencies)) return;
// Otherwise, delete it.
deleteEntry(_dir);
_oldTransformers = new Set();
}
/// Returns the path for the transformer snapshot for [transformers], or
/// `null` if the transformers shouldn't be cached.
///
/// There may or may not exist a file at the returned path. If one does exist,
/// it can safely be used to load the stage. Otherwise, a snapshot of the
/// stage should be written there.
String snapshotPath(Set<TransformerId> transformers) {
var path = p.join(_dir, "transformers.snapshot");
if (_newTransformers != null) return path;
if (transformers.any((id) => _graph.isPackageMutable(id.package))) {
log.fine("Not caching mutable transformers.");
deleteEntry(_dir);
return null;
}
if (!_oldTransformers.containsAll(transformers)) {
log.fine("Cached transformer snapshot is out-of-date, deleting.");
deleteEntry(path);
} else {
log.fine("Using cached transformer snapshot.");
}
_newTransformers = transformers;
return path;
}
/// Saves the manifest to the transformer cache.
void save() {
// If we didn't write any snapshots, there's no need to write a manifest.
if (_newTransformers == null) {
if (_dir != null) deleteEntry(_dir);
return;
}
// We only need to rewrite the manifest if we created a new snapshot.
if (_oldTransformers.containsAll(_newTransformers)) return;
ensureDir(_dir);
writeTextFile(
_manifestPath,
"${sdk.version}\n" +
ordered(_newTransformers.map((id) => id.serialize())).join(","));
}
/// Parses the cache manifest and returns the set of previously-cached
/// transformers.
///
/// If the manifest indicates that the SDK version is out-of-date, this
/// deletes the existing cache. Otherwise,
Set<TransformerId> _parseManifest() {
if (!fileExists(_manifestPath)) return new Set();
var manifest = readTextFile(_manifestPath).split("\n");
// The first line of the manifest is the SDK version. We want to clear out
// the snapshots even if they're VM-compatible, since pub's transformer
// isolate scaffolding may have changed.
if (manifest.removeAt(0) != sdk.version.toString()) {
deleteEntry(_dir);
return new Set();
}
/// The second line of the manifest is a list of transformer ids used to
/// create the existing snapshot.
return manifest.single.split(
",").map((id) => new TransformerId.parse(id, null)).toSet();
}
}

View file

@ -1,166 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_config;
import 'package:glob/glob.dart';
import 'package:path/path.dart' as p;
import 'package:source_span/source_span.dart';
import 'package:yaml/yaml.dart';
import 'transformer_id.dart';
/// The configuration for a transformer.
///
/// This corresponds to the transformers listed in a pubspec, which have both an
/// [id] indicating the location of the transformer and configuration specific
/// to that use of the transformer.
class TransformerConfig {
/// The [id] of the transformer [this] is configuring.
final TransformerId id;
/// The configuration to pass to the transformer.
///
/// Any pub-specific configuration (i.e. keys starting with "$") will have
/// been stripped out of this and handled separately. This will be an empty
/// map if no configuration was provided.
final Map configuration;
/// The source span from which this configuration was parsed.
final SourceSpan span;
/// The primary input inclusions.
///
/// Each inclusion is an asset path. If this set is non-empty, then *only*
/// matching assets are allowed as a primary input by this transformer. If
/// `null`, all assets are included.
///
/// This is processed before [excludes]. If a transformer has both includes
/// and excludes, then the set of included assets is determined and assets
/// are excluded from that resulting set.
final Set<Glob> includes;
/// The primary input exclusions.
///
/// Any asset whose pach is in this is not allowed as a primary input by
/// this transformer.
///
/// This is processed after [includes]. If a transformer has both includes
/// and excludes, then the set of included assets is determined and assets
/// are excluded from that resulting set.
final Set<Glob> excludes;
/// Returns whether this config excludes certain asset ids from being
/// processed.
bool get hasExclusions => includes != null || excludes != null;
/// Returns whether this transformer might transform a file that's visible to
/// the package's dependers.
bool get canTransformPublicFiles {
if (includes == null) return true;
return includes.any((glob) {
// Check whether the first path component of the glob is "lib", "bin", or
// contains wildcards that may cause it to match "lib" or "bin".
var first = p.posix.split(glob.toString()).first;
if (first.contains('{') ||
first.contains('*') ||
first.contains('[') ||
first.contains('?')) {
return true;
}
return first == 'lib' || first == 'bin';
});
}
/// Parses [identifier] as a [TransformerId] with [configuration].
///
/// [identifierSpan] is the source span for [identifier].
factory TransformerConfig.parse(String identifier, SourceSpan identifierSpan,
YamlMap configuration) =>
new TransformerConfig(
new TransformerId.parse(identifier, identifierSpan),
configuration);
factory TransformerConfig(TransformerId id, YamlMap configurationNode) {
parseField(key) {
if (!configurationNode.containsKey(key)) return null;
var fieldNode = configurationNode.nodes[key];
var field = fieldNode.value;
if (field is String) {
return new Set.from([new Glob(field, context: p.url, recursive: true)]);
}
if (field is! List) {
throw new SourceSpanFormatException(
'"$key" field must be a string or list.',
fieldNode.span);
}
return new Set.from(field.nodes.map((node) {
if (node.value is String) {
return new Glob(node.value, context: p.url, recursive: true);
}
throw new SourceSpanFormatException(
'"$key" field may contain only strings.',
node.span);
}));
}
var includes = null;
var excludes = null;
var configuration;
var span;
if (configurationNode == null) {
configuration = {};
span = id.span;
} else {
// Don't write to the immutable YAML map.
configuration = new Map.from(configurationNode);
span = configurationNode.span;
// Pull out the exclusions/inclusions.
includes = parseField("\$include");
configuration.remove("\$include");
excludes = parseField("\$exclude");
configuration.remove("\$exclude");
// All other keys starting with "$" are unexpected.
for (var key in configuration.keys) {
if (key is! String || !key.startsWith(r'$')) continue;
throw new SourceSpanFormatException(
'Unknown reserved field.',
configurationNode.nodes[key].span);
}
}
return new TransformerConfig._(id, configuration, span, includes, excludes);
}
TransformerConfig._(this.id, this.configuration, this.span, this.includes,
this.excludes);
String toString() => id.toString();
/// Returns whether the include/exclude rules allow the transformer to run on
/// [pathWithinPackage].
///
/// [pathWithinPackage] must be a URL-style path relative to the containing
/// package's root directory.
bool canTransform(String pathWithinPackage) {
if (excludes != null) {
// If there are any excludes, it must not match any of them.
for (var exclude in excludes) {
if (exclude.matches(pathWithinPackage)) return false;
}
}
// If there are any includes, it must match one of them.
return includes == null ||
includes.any((include) => include.matches(pathWithinPackage));
}
}

View file

@ -1,99 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_id;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:source_span/source_span.dart';
import '../utils.dart';
/// A list of the names of all built-in transformers that pub exposes.
const _BUILT_IN_TRANSFORMERS = const ['\$dart2js'];
/// An identifier that indicates the library that contains a transformer.
///
/// It's possible that the library identified by [this] defines multiple
/// transformers. If so, they're all always loaded in the same phase.
class TransformerId {
/// The package containing the library where the transformer is defined.
final String package;
/// The `/`-separated path to the library that contains this transformer.
///
/// This is relative to the `lib/` directory in [package], and doesn't end in
/// `.dart`.
///
/// This can be null; if so, it indicates that the transformer(s) should be
/// loaded from `lib/transformer.dart` if that exists, and `lib/$package.dart`
/// otherwise.
final String path;
/// The source span from which this id was parsed.
final SourceSpan span;
/// Whether this ID points to a built-in transformer exposed by pub.
bool get isBuiltInTransformer => package.startsWith('\$');
/// Parses a transformer identifier.
///
/// A transformer identifier is a string of the form "package_name" or
/// "package_name/path/to/library". It does not have a trailing extension. If
/// it just has a package name, it expands to lib/transformer.dart if that
/// exists, or lib/${package}.dart otherwise. Otherwise, it expands to
/// lib/${path}.dart. In either case it's located in the given package.
factory TransformerId.parse(String identifier, SourceSpan span) {
if (identifier.isEmpty) {
throw new FormatException('Invalid library identifier: "".');
}
var parts = split1(identifier, "/");
if (parts.length == 1) {
return new TransformerId(parts.single, null, span);
}
return new TransformerId(parts.first, parts.last, span);
}
TransformerId(this.package, this.path, this.span) {
if (!package.startsWith('\$')) return;
if (_BUILT_IN_TRANSFORMERS.contains(package)) return;
throw new SourceSpanFormatException(
'Unsupported built-in transformer $package.',
span);
}
bool operator ==(other) =>
other is TransformerId && other.package == package && other.path == path;
int get hashCode => package.hashCode ^ path.hashCode;
/// Returns a serialized form of [this] that can be passed to
/// [new TransformerId.parse].
String serialize() => path == null ? package : '$package/$path';
String toString() => serialize();
/// Returns the asset id for the library identified by this transformer id.
///
/// If `path` is null, this will determine which library to load. Unlike
/// [getAssetId], this doesn't take generated assets into account; it's used
/// to determine transformers' dependencies, which requires looking at files
/// on disk.
Future<AssetId> getAssetId(Barback barback) {
if (path != null) {
return new Future.value(new AssetId(package, 'lib/$path.dart'));
}
var transformerAsset = new AssetId(package, 'lib/transformer.dart');
return barback.getAssetById(
transformerAsset).then(
(_) =>
transformerAsset).catchError(
(e) => new AssetId(package, 'lib/$package.dart'),
test: (e) => e is AssetNotFoundException);
}
}

View file

@ -1,141 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.transformer_isolate;
import 'dart:async';
import 'dart:convert';
import 'dart:isolate';
import 'package:barback/barback.dart';
import 'package:source_span/source_span.dart';
import 'package:stack_trace/stack_trace.dart';
import '../../../asset/dart/serialize.dart';
import '../barback.dart';
import '../exceptions.dart';
import '../dart.dart' as dart;
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
import 'foreign_transformer.dart';
import 'transformer_config.dart';
import 'transformer_id.dart';
/// A wrapper for an isolate from which transformer plugins can be instantiated.
class TransformerIsolate {
/// The port used to communicate with the wrapped isolate.
final SendPort _port;
/// A map indicating the barback server URLs for each [TransformerId] that's
/// loaded in the wrapped isolate.
///
/// A barback server URL is the URL for the library that the given id
/// identifies. For example, the URL for "polymer/src/mirrors_remover" might
/// be "http://localhost:56234/packages/polymer/src/mirrors_remover.dart".
final Map<TransformerId, Uri> _idsToUrls;
/// The barback mode for this run of pub.
final BarbackMode _mode;
/// Spawns an isolate that loads all transformer libraries defined by [ids].
///
/// This doesn't actually instantiate any transformers, since a
/// [TransformerId] doesn't define the transformers' configuration. The
/// transformers can be constructed using [create].
///
/// If [snapshot] is passed, the isolate will be loaded from that path if it
/// exists. Otherwise, a snapshot of the isolate's code will be saved to that
/// path once the isolate is loaded.
static Future<TransformerIsolate> spawn(AssetEnvironment environment,
BarbackServer transformerServer, List<TransformerId> ids, {String snapshot}) {
return mapFromIterableAsync(ids, value: (id) {
return id.getAssetId(environment.barback);
}).then((idsToAssetIds) {
var baseUrl = transformerServer.url;
var idsToUrls = mapMap(idsToAssetIds, value: (id, assetId) {
var path = assetId.path.replaceFirst('lib/', '');
return Uri.parse('package:${id.package}/$path');
});
var code = new StringBuffer();
code.writeln("import 'dart:isolate';");
for (var url in idsToUrls.values) {
code.writeln("import '$url';");
}
code.writeln("import r'package:\$pub/transformer_isolate.dart';");
code.writeln(
"void main(_, SendPort replyTo) => loadTransformers(replyTo);");
log.fine("Loading transformers from $ids");
var port = new ReceivePort();
return dart.runInIsolate(
code.toString(),
port.sendPort,
packageRoot: baseUrl.resolve('packages'),
snapshot: snapshot).then((_) => port.first).then((sendPort) {
return new TransformerIsolate._(sendPort, environment.mode, idsToUrls);
}).catchError((error, stackTrace) {
if (error is! CrossIsolateException) throw error;
if (error.type != 'IsolateSpawnException') throw error;
// TODO(nweiz): don't parse this as a string once issues 12617 and 12689
// are fixed.
var firstErrorLine = error.message.split('\n')[1];
// The isolate error message contains the fully expanded path, not the
// "package:" URI, so we have to be liberal in what we look for in the
// error message.
var missingTransformer = idsToUrls.keys.firstWhere(
(id) =>
firstErrorLine.startsWith("Uncaught Error: Load Error: Failure getting ") &&
firstErrorLine.contains(idsToUrls[id].path),
orElse: () => throw error);
var packageUri = idToPackageUri(idsToAssetIds[missingTransformer]);
// If there was an IsolateSpawnException and the import that actually
// failed was the one we were loading transformers from, throw an
// application exception with a more user-friendly message.
fail('Transformer library "$packageUri" not found.', error, stackTrace);
});
});
}
TransformerIsolate._(this._port, this._mode, this._idsToUrls);
/// Instantiate the transformers in the [config.id] with
/// [config.configuration].
///
/// If there are no transformers defined in the given library, this will
/// return an empty set.
Future<Set<Transformer>> create(TransformerConfig config) {
return call(_port, {
'library': _idsToUrls[config.id].toString(),
'mode': _mode.name,
'configuration': JSON.encode(config.configuration)
}).then((transformers) {
transformers = transformers.map(
(transformer) => deserializeTransformerLike(transformer, config)).toSet();
log.fine("Transformers from $config: $transformers");
return transformers;
}).catchError((error, stackTrace) {
throw new TransformerLoadError(error, config.span);
});
}
}
/// An error thrown when a transformer fails to load.
class TransformerLoadError extends SourceSpanException implements
WrappedException {
final CrossIsolateException innerError;
Chain get innerChain => innerError.stackTrace;
TransformerLoadError(CrossIsolateException error, SourceSpan span)
: innerError = error,
super("Error loading transformer: ${error.message}", span);
}

View file

@ -1,249 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_loader;
import 'dart:async';
import 'package:barback/barback.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
import 'dart2js_transformer.dart';
import 'excluding_transformer.dart';
import 'transformer_config.dart';
import 'transformer_id.dart';
import 'transformer_isolate.dart';
/// A class that loads transformers defined in specific files.
class TransformerLoader {
final AssetEnvironment _environment;
final BarbackServer _transformerServer;
final _isolates = new Map<TransformerId, TransformerIsolate>();
final _transformers = new Map<TransformerConfig, Set<Transformer>>();
/// The packages that use each transformer id.
///
/// Used for error reporting.
final _transformerUsers = new Map<TransformerId, Set<String>>();
TransformerLoader(this._environment, this._transformerServer) {
for (var package in _environment.graph.packages.values) {
for (var config in unionAll(package.pubspec.transformers)) {
_transformerUsers.putIfAbsent(
config.id,
() => new Set<String>()).add(package.name);
}
}
}
/// Loads a transformer plugin isolate that imports the transformer libraries
/// indicated by [ids].
///
/// Once the returned future completes, transformer instances from this
/// isolate can be created using [transformersFor] or [transformersForPhase].
///
/// This skips any ids that have already been loaded.
Future load(Iterable<TransformerId> ids, {String snapshot}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
ids = ids.where(((id) {
return !_isolates.containsKey(id);
})).toList();
join0() {
new Future.value(
log.progress("Loading ${toSentence(ids)} transformers", (() {
return TransformerIsolate.spawn(
_environment,
_transformerServer,
ids,
snapshot: snapshot);
}))).then((x0) {
try {
var isolate = x0;
var it0 = ids.iterator;
break0() {
completer0.complete();
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var id = it0.current;
_isolates[id] = isolate;
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (ids.isEmpty) {
completer0.complete(null);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Instantiates and returns all transformers in the library indicated by
/// [config] with the given configuration.
///
/// If this is called before the library has been loaded into an isolate via
/// [load], it will return an empty set.
Future<Set<Transformer>> transformersFor(TransformerConfig config) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
join1() {
var transformer;
join2() {
_transformers[config] =
new Set.from([ExcludingTransformer.wrap(transformer, config)]);
completer0.complete(_transformers[config]);
}
catch0(error, stackTrace) {
try {
if (error is FormatException) {
fail(error.message, error, stackTrace);
join2();
} else {
throw error;
}
} catch (error, stackTrace) {
completer0.completeError(error, stackTrace);
}
}
try {
transformer = new Dart2JSTransformer.withSettings(
_environment,
new BarbackSettings(config.configuration, _environment.mode));
join2();
} catch (e0, s0) {
catch0(e0, s0);
}
}
if (_isolates.containsKey(config.id)) {
new Future.value(_isolates[config.id].create(config)).then((x0) {
try {
var transformers = x0;
join3() {
var message = "No transformers";
join4() {
var location;
join5() {
var users =
toSentence(ordered(_transformerUsers[config.id]));
fail(
"${message} were defined in ${location},\n" "required by ${users}.");
join1();
}
if (config.id.path == null) {
location =
'package:${config.id.package}/transformer.dart or '
'package:${config.id.package}/${config.id.package}.dart';
join5();
} else {
location = 'package:${config}.dart';
join5();
}
}
if (config.configuration.isNotEmpty) {
message += " that accept configuration";
join4();
} else {
join4();
}
}
if (transformers.isNotEmpty) {
_transformers[config] = transformers;
completer0.complete(transformers);
} else {
join3();
}
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
} else {
join6() {
join1();
}
if (config.id.package != '\$dart2js') {
completer0.complete(new Future.value(new Set()));
} else {
join6();
}
}
}
if (_transformers.containsKey(config)) {
completer0.complete(_transformers[config]);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Loads all transformers defined in each phase of [phases].
///
/// If any library hasn't yet been loaded via [load], it will be ignored.
Future<List<Set<Transformer>>>
transformersForPhases(Iterable<Set<TransformerConfig>> phases) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(Future.wait(phases.map(((phase) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(
waitAndPrintErrors(phase.map(transformersFor))).then((x0) {
try {
var transformers = x0;
completer0.complete(unionAll(transformers));
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x0) {
try {
var result = x0;
completer0.complete(result.toList());
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,321 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.web_socket_api;
import 'dart:async';
import 'dart:io';
import 'package:http_parser/http_parser.dart';
import 'package:path/path.dart' as path;
import 'package:json_rpc_2/json_rpc_2.dart' as json_rpc;
import '../exit_codes.dart' as exit_codes;
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
/// Implements the [WebSocket] API for communicating with a running pub serve
/// process, mainly for use by the Editor.
///
/// This is a [JSON-RPC 2.0](http://www.jsonrpc.org/specification) server. Its
/// methods are described in the method-level documentation below.
class WebSocketApi {
final AssetEnvironment _environment;
final json_rpc.Server _server;
/// Whether the application should exit when this connection closes.
bool _exitOnClose = false;
WebSocketApi(CompatibleWebSocket socket, this._environment)
: _server = new json_rpc.Server(socket) {
_server.registerMethod("urlToAssetId", _urlToAssetId);
_server.registerMethod("pathToUrls", _pathToUrls);
_server.registerMethod("serveDirectory", _serveDirectory);
_server.registerMethod("unserveDirectory", _unserveDirectory);
/// Tells the server to exit as soon as this WebSocket connection is closed.
///
/// This takes no arguments and returns no results. It can safely be called
/// as a JSON-RPC notification.
_server.registerMethod("exitOnClose", () {
_exitOnClose = true;
});
}
/// Listens on the socket.
///
/// Returns a future that completes when the socket has closed. It will
/// complete with an error if the socket had an error, otherwise it will
/// complete to `null`.
Future listen() {
return _server.listen().then((_) {
if (!_exitOnClose) return;
log.message("WebSocket connection closed, terminating.");
flushThenExit(exit_codes.SUCCESS);
});
}
/// Given a URL to an asset that is served by pub, returns the ID of the
/// asset that would be accessed by that URL.
///
/// The method name is "urlToAssetId" and it takes a "url" parameter for the
/// URL being mapped:
///
/// "params": {
/// "url": "http://localhost:8080/index.html"
/// }
///
/// If successful, it returns a map containing the asset ID's package and
/// path:
///
/// "result": {
/// "package": "myapp",
/// "path": "web/index.html"
/// }
///
/// The "path" key in the result is a URL path that's relative to the root
/// directory of the package identified by "package". The location of this
/// package may vary depending on which source it was installed from.
///
/// An optional "line" key may be provided whose value must be an integer. If
/// given, the result will also include a "line" key that maps the line in
/// the served final file back to the corresponding source line in the asset
/// that was used to generate that file.
///
/// Examples (where "myapp" is the root package and pub serve is being run
/// normally with "web" bound to port 8080 and "test" to 8081):
///
/// http://localhost:8080/index.html -> myapp|web/index.html
/// http://localhost:8081/sub/main.dart -> myapp|test/sub/main.dart
///
/// If the URL is not a domain being served by pub, this returns an error:
///
/// http://localhost:1234/index.html -> NOT_SERVED error
///
/// This does *not* currently support the implicit index.html behavior that
/// pub serve provides for user-friendliness:
///
/// http://localhost:1234 -> NOT_SERVED error
///
/// This does *not* currently check to ensure the asset actually exists. It
/// only maps what the corresponding asset *should* be for that URL.
Future<Map> _urlToAssetId(json_rpc.Parameters params) {
var url = params["url"].asUri;
// If a line number was given, map it to the output line.
var line = params["line"].asIntOr(null);
return _environment.getAssetIdForUrl(url).then((id) {
if (id == null) {
throw new json_rpc.RpcException(
_Error.NOT_SERVED,
'"${url.host}:${url.port}" is not being served by pub.');
}
// TODO(rnystrom): When this is hooked up to actually talk to barback to
// see if assets exist, consider supporting implicit index.html at that
// point.
var result = {
"package": id.package,
"path": id.path
};
// Map the line.
// TODO(rnystrom): Right now, source maps are not supported and it just
// passes through the original line. This lets the editor start using
// this API before we've fully implemented it. See #12339 and #16061.
if (line != null) result["line"] = line;
return result;
});
}
/// Given a path on the filesystem, returns the URLs served by pub that can be
/// used to access asset found at that path.
///
/// The method name is "pathToUrls" and it takes a "path" key (a native OS
/// path which may be absolute or relative to the root directory of the
/// entrypoint package) for the path being mapped:
///
/// "params": {
/// "path": "web/index.html"
/// }
///
/// If successful, it returns a map containing the list of URLs that can be
/// used to access that asset.
///
/// "result": {
/// "urls": ["http://localhost:8080/index.html"]
/// }
///
/// The "path" key may refer to a path in another package, either by referring
/// to its location within the top-level "packages" directory or by referring
/// to its location on disk. Only the "lib" directory is visible in other
/// packages:
///
/// "params": {
/// "path": "packages/http/http.dart"
/// }
///
/// Assets in the "lib" directory will usually have one URL for each server:
///
/// "result": {
/// "urls": [
/// "http://localhost:8080/packages/http/http.dart",
/// "http://localhost:8081/packages/http/http.dart"
/// ]
/// }
///
/// An optional "line" key may be provided whose value must be an integer. If
/// given, the result will also include a "line" key that maps the line in
/// the source file to the corresponding output line in the resulting asset
/// served at the URL.
///
/// Examples (where "myapp" is the root package and pub serve is being run
/// normally with "web" bound to port 8080 and "test" to 8081):
///
/// web/index.html -> http://localhost:8080/index.html
/// test/sub/main.dart -> http://localhost:8081/sub/main.dart
///
/// If the asset is not in a directory being served by pub, returns an error:
///
/// example/index.html -> NOT_SERVED error
Future<Map> _pathToUrls(json_rpc.Parameters params) {
var assetPath = params["path"].asString;
var line = params["line"].asIntOr(null);
return _environment.getUrlsForAssetPath(assetPath).then((urls) {
if (urls.isEmpty) {
throw new json_rpc.RpcException(
_Error.NOT_SERVED,
'Asset path "$assetPath" is not currently being served.');
}
var result = {
"urls": urls.map((url) => url.toString()).toList()
};
// Map the line.
// TODO(rnystrom): Right now, source maps are not supported and it just
// passes through the original line. This lets the editor start using
// this API before we've fully implemented it. See #12339 and #16061.
if (line != null) result["line"] = line;
return result;
});
}
/// Given a relative directory path within the entrypoint package, binds a
/// new port to serve from that path and returns its URL.
///
/// The method name is "serveDirectory" and it takes a "path" key (a native
/// OS path relative to the root of the entrypoint package) for the directory
/// being served:
///
/// "params": {
/// "path": "example/awesome"
/// }
///
/// If successful, it returns a map containing the URL that can be used to
/// access the directory.
///
/// "result": {
/// "url": "http://localhost:8083"
/// }
///
/// If the directory is already being served, returns the previous URL.
Future<Map> _serveDirectory(json_rpc.Parameters params) {
var rootDirectory = _validateRelativePath(params, "path");
return _environment.serveDirectory(rootDirectory).then((server) {
return {
"url": server.url.toString()
};
}).catchError((error) {
if (error is! OverlappingSourceDirectoryException) throw error;
var dir = pluralize(
"directory",
error.overlappingDirectories.length,
plural: "directories");
var overlapping =
toSentence(error.overlappingDirectories.map((dir) => '"$dir"'));
print("data: ${error.overlappingDirectories}");
throw new json_rpc.RpcException(
_Error.OVERLAPPING,
'Path "$rootDirectory" overlaps already served $dir $overlapping.',
data: {
"directories": error.overlappingDirectories
});
});
}
/// Given a relative directory path within the entrypoint package, unbinds
/// the server previously bound to that directory and returns its (now
/// unreachable) URL.
///
/// The method name is "unserveDirectory" and it takes a "path" key (a
/// native OS path relative to the root of the entrypoint package) for the
/// directory being unserved:
///
/// "params": {
/// "path": "example/awesome"
/// }
///
/// If successful, it returns a map containing the URL that used to be used
/// to access the directory.
///
/// "result": {
/// "url": "http://localhost:8083"
/// }
///
/// If no server is bound to that directory, it returns a `NOT_SERVED` error.
Future<Map> _unserveDirectory(json_rpc.Parameters params) {
var rootDirectory = _validateRelativePath(params, "path");
return _environment.unserveDirectory(rootDirectory).then((url) {
if (url == null) {
throw new json_rpc.RpcException(
_Error.NOT_SERVED,
'Directory "$rootDirectory" is not bound to a server.');
}
return {
"url": url.toString()
};
});
}
/// Validates that [command] has a field named [key] whose value is a string
/// containing a relative path that doesn't reach out of the entrypoint
/// package's root directory.
///
/// Returns the path if found, or throws a [_WebSocketException] if
/// validation failed.
String _validateRelativePath(json_rpc.Parameters params, String key) {
var pathString = params[key].asString;
if (!path.isRelative(pathString)) {
throw new json_rpc.RpcException.invalidParams(
'"$key" must be a relative path. Got "$pathString".');
}
if (!path.isWithin(".", pathString)) {
throw new json_rpc.RpcException.invalidParams(
'"$key" cannot reach out of its containing directory. ' 'Got "$pathString".');
}
return pathString;
}
}
/// The pub-specific JSON RPC error codes.
class _Error {
/// The specified directory is not being served.
static const NOT_SERVED = 1;
/// The specified directory overlaps one or more ones already being served.
static const OVERLAPPING = 2;
}

View file

@ -1,88 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.cached_package;
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
import 'package:yaml/yaml.dart';
import 'barback/transformer_config.dart';
import 'io.dart';
import 'package.dart';
import 'pubspec.dart';
/// A [Package] whose `lib` directory has been precompiled and cached.
///
/// When users of this class request path information about files that are
/// cached, this returns the cached information. It also wraps the package's
/// pubspec to report no transformers, since the transformations have all been
/// applied already.
class CachedPackage extends Package {
/// The directory contianing the cached assets from this package.
///
/// Although only `lib` is cached, this directory corresponds to the root of
/// the package. The actual cached assets exist in `$_cacheDir/lib`.
final String _cacheDir;
/// Creates a new cached package wrapping [inner] with the cache at
/// [_cacheDir].
CachedPackage(Package inner, this._cacheDir)
: super(new _CachedPubspec(inner.pubspec), inner.dir);
String path(String part1, [String part2, String part3, String part4,
String part5, String part6, String part7]) {
if (_pathInCache(part1)) {
return p.join(_cacheDir, part1, part2, part3, part4, part5, part6, part7);
} else {
return super.path(part1, part2, part3, part4, part5, part6, part7);
}
}
String relative(String path) {
if (p.isWithin(path, _cacheDir)) return p.relative(path, from: _cacheDir);
return super.relative(path);
}
/// This will include the cached, transformed versions of files if [beneath]
/// is within a cached directory, but not otherwise.
List<String> listFiles({String beneath, recursive: true, bool useGitIgnore:
false}) {
if (beneath == null) {
return super.listFiles(recursive: recursive, useGitIgnore: useGitIgnore);
}
if (_pathInCache(beneath)) return listDir(p.join(_cacheDir, beneath));
return super.listFiles(
beneath: beneath,
recursive: recursive,
useGitIgnore: useGitIgnore);
}
/// Returns whether [relativePath], a path relative to the package's root,
/// is in a cached directory.
bool _pathInCache(String relativePath) => p.isWithin('lib', relativePath);
}
/// A pubspec wrapper that reports no transformers.
class _CachedPubspec implements Pubspec {
final Pubspec _inner;
YamlMap get fields => _inner.fields;
String get name => _inner.name;
Version get version => _inner.version;
List<PackageDep> get dependencies => _inner.dependencies;
List<PackageDep> get devDependencies => _inner.devDependencies;
List<PackageDep> get dependencyOverrides => _inner.dependencyOverrides;
PubspecEnvironment get environment => _inner.environment;
String get publishTo => _inner.publishTo;
Map<String, String> get executables => _inner.executables;
bool get isPrivate => _inner.isPrivate;
bool get isEmpty => _inner.isEmpty;
List<PubspecException> get allErrors => _inner.allErrors;
List<Set<TransformerConfig>> get transformers => const [];
_CachedPubspec(this._inner);
}

View file

@ -1,96 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command;
import 'package:args/args.dart';
import 'package:args/command_runner.dart';
import 'package:path/path.dart' as path;
import 'entrypoint.dart';
import 'log.dart' as log;
import 'global_packages.dart';
import 'system_cache.dart';
/// The base class for commands for the pub executable.
///
/// A command may either be a "leaf" command or it may be a parent for a set
/// of subcommands. Only leaf commands are ever actually invoked. If a command
/// has subcommands, then one of those must always be chosen.
abstract class PubCommand extends Command {
SystemCache get cache {
if (_cache == null) {
_cache = new SystemCache.withSources(isOffline: isOffline);
}
return _cache;
}
SystemCache _cache;
GlobalPackages get globals {
if (_globals == null) {
_globals = new GlobalPackages(cache);
}
return _globals;
}
GlobalPackages _globals;
/// Gets the [Entrypoint] package for the current working directory.
///
/// This will load the pubspec and fail with an error if the current directory
/// is not a package.
Entrypoint get entrypoint {
// Lazy load it.
if (_entrypoint == null) {
_entrypoint = new Entrypoint(
path.current,
cache,
packageSymlinks: globalResults['package-symlinks']);
}
return _entrypoint;
}
Entrypoint _entrypoint;
/// The URL for web documentation for this command.
String get docUrl => null;
/// Override this and return `false` to disallow trailing options from being
/// parsed after a non-option argument is parsed.
bool get allowTrailingOptions => true;
ArgParser get argParser {
// Lazily initialize the parser because the superclass constructor requires
// it but we want to initialize it based on [allowTrailingOptions].
if (_argParser == null) {
_argParser = new ArgParser(allowTrailingOptions: allowTrailingOptions);
}
return _argParser;
}
ArgParser _argParser;
/// Override this to use offline-only sources instead of hitting the network.
///
/// This will only be called before the [SystemCache] is created. After that,
/// it has no effect. This only needs to be set in leaf commands.
bool get isOffline => false;
String get usageFooter {
if (docUrl == null) return null;
return "See $docUrl for detailed documentation.";
}
void printUsage() {
log.message(usage);
}
/// Parses a user-supplied integer [intString] named [name].
///
/// If the parsing fails, prints a usage message and exits.
int parseInt(String intString, String name) {
try {
return int.parse(intString);
} on FormatException catch (_) {
usageException('Could not parse $name "$intString".');
}
}
}

View file

@ -1,204 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.barback;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import '../command.dart';
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
final _arrow = getSpecial('\u2192', '=>');
/// The set of top level directories in the entrypoint package that are built
/// when the user does "--all".
final _allSourceDirectories =
new Set<String>.from(["benchmark", "bin", "example", "test", "web"]);
/// Shared base class for [BuildCommand] and [ServeCommand].
abstract class BarbackCommand extends PubCommand {
/// The build mode.
BarbackMode get mode => new BarbackMode(argResults["mode"]);
/// The directories in the entrypoint package that should be added to the
/// build environment.
final sourceDirectories = new Set<String>();
/// The default build mode.
BarbackMode get defaultMode => BarbackMode.RELEASE;
/// Override this to specify the default source directories if none are
/// provided on the command line.
List<String> get defaultSourceDirectories;
BarbackCommand() {
argParser.addOption(
"mode",
defaultsTo: defaultMode.toString(),
help: "Mode to run transformers in.");
argParser.addFlag(
"all",
help: "Use all default source directories.",
defaultsTo: false,
negatable: false);
}
Future run() {
// Switch to JSON output if specified. We need to do this before parsing
// the source directories so an error will be correctly reported in JSON
// format.
log.json.enabled =
argResults.options.contains("format") && argResults["format"] == "json";
_parseSourceDirectories();
return onRunTransformerCommand();
}
/// Override this to run the actual command.
Future onRunTransformerCommand();
/// Parses the command-line arguments to determine the set of source
/// directories to add to the build environment.
///
/// If there are no arguments, this will just be [defaultSourceDirectories].
///
/// If the `--all` flag is set, then it will be all default directories
/// that exist.
///
/// Otherwise, all arguments should be the paths of directories to include.
///
/// Throws an exception if the arguments are invalid.
void _parseSourceDirectories() {
if (argResults["all"]) {
_addAllDefaultSources();
return;
}
// If no directories were specified, use the defaults.
if (argResults.rest.isEmpty) {
_addDefaultSources();
return;
}
sourceDirectories.addAll(argResults.rest);
// Prohibit "lib".
var disallowed = sourceDirectories.where((dir) {
var parts = path.split(path.normalize(dir));
return parts.isNotEmpty && parts.first == "lib";
});
if (disallowed.isNotEmpty) {
usageException(
_directorySentence(disallowed, "is", "are", "not allowed"));
}
// Make sure the source directories don't reach out of the package.
var invalid = sourceDirectories.where((dir) => !path.isWithin('.', dir));
if (invalid.isNotEmpty) {
usageException(
_directorySentence(invalid, "isn't", "aren't", "in this package"));
}
// Make sure all of the source directories exist.
var missing =
sourceDirectories.where((dir) => !dirExists(entrypoint.root.path(dir)));
if (missing.isNotEmpty) {
dataError(_directorySentence(missing, "does", "do", "not exist"));
}
// Make sure the directories don't overlap.
var sources = sourceDirectories.toList();
var overlapping = new Set();
for (var i = 0; i < sources.length; i++) {
for (var j = i + 1; j < sources.length; j++) {
if (path.isWithin(sources[i], sources[j]) ||
path.isWithin(sources[j], sources[i])) {
overlapping.add(sources[i]);
overlapping.add(sources[j]);
}
}
}
if (overlapping.isNotEmpty) {
usageException(
_directorySentence(overlapping, "cannot", "cannot", "overlap"));
}
}
/// Handles "--all" by adding all default source directories that are
/// present.
void _addAllDefaultSources() {
if (argResults.rest.isNotEmpty) {
usageException('Directory names are not allowed if "--all" is passed.');
}
// Include every build directory that exists in the package.
var dirs =
_allSourceDirectories.where((dir) => dirExists(entrypoint.root.path(dir)));
if (dirs.isEmpty) {
var defaultDirs =
toSentence(_allSourceDirectories.map((name) => '"$name"'));
dataError(
'There are no source directories present.\n'
'The default directories are $defaultDirs.');
}
sourceDirectories.addAll(dirs);
}
/// Adds the default sources that should be used if no directories are passed
/// on the command line.
void _addDefaultSources() {
sourceDirectories.addAll(
defaultSourceDirectories.where((dir) => dirExists(entrypoint.root.path(dir))));
// TODO(rnystrom): Hackish. Assumes there will only be one or two
// default sources. That's true for pub build and serve, but isn't as
// general as it could be.
if (sourceDirectories.isEmpty) {
var defaults;
if (defaultSourceDirectories.length == 1) {
defaults = 'a "${defaultSourceDirectories.first}" directory';
} else {
defaults =
'"${defaultSourceDirectories[0]}" and/or '
'"${defaultSourceDirectories[1]}" directories';
}
dataError(
"Your package must have $defaults,\n"
"or you must specify the source directories.");
}
}
/// Converts a list of [directoryNames] to a sentence.
///
/// After the list of directories, [singularVerb] will be used if there is
/// only one directory and [pluralVerb] will be used if there are more than
/// one. Then [suffix] is added to the end of the sentence, and, finally, a
/// period is added.
String _directorySentence(Iterable<String> directoryNames,
String singularVerb, String pluralVerb, String suffix) {
var directories =
pluralize('Directory', directoryNames.length, plural: 'Directories');
var names = toSentence(directoryNames.map((dir) => '"$dir"'));
var verb =
pluralize(singularVerb, directoryNames.length, plural: pluralVerb);
var result = "$directories $names $verb";
if (suffix != null) result += " $suffix";
result += ".";
return result;
}
}

View file

@ -1,275 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.build;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import '../barback/asset_environment.dart';
import '../exit_codes.dart' as exit_codes;
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'barback.dart';
final _arrow = getSpecial('\u2192', '=>');
/// Handles the `build` pub command.
class BuildCommand extends BarbackCommand {
String get name => "build";
String get description => "Apply transformers to build a package.";
String get invocation => "pub build [options] [directories...]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-build.html";
List<String> get aliases => const ["deploy", "settle-up"];
/// The path to the application's build output directory.
String get outputDirectory => argResults["output"];
List<String> get defaultSourceDirectories => ["web"];
/// The number of files that have been built and written to disc so far.
int builtFiles = 0;
BuildCommand() {
argParser.addOption(
"format",
help: "How output should be displayed.",
allowed: ["text", "json"],
defaultsTo: "text");
argParser.addOption(
"output",
abbr: "o",
help: "Directory to write build outputs to.",
defaultsTo: "build");
}
Future onRunTransformerCommand() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
cleanDir(outputDirectory);
var errorsJson = [];
var logJson = [];
completer0.complete(
AssetEnvironment.create(
entrypoint,
mode,
useDart2JS: true).then(((environment) {
environment.barback.errors.listen((error) {
log.error(log.red("Build error:\n$error"));
if (log.json.enabled) {
errorsJson.add({
"error": error.toString()
});
}
});
if (log.json.enabled) {
environment.barback.log.listen(
(entry) => logJson.add(_logEntryToJson(entry)));
}
return log.progress("Building ${entrypoint.root.name}", () {
return Future.wait(
sourceDirectories.map((dir) => environment.serveDirectory(dir))).then((_) {
return environment.barback.getAllAssets();
});
}).then((assets) {
var dart2JSEntrypoints = assets.where(
(asset) => asset.id.path.endsWith(".dart.js")).map((asset) => asset.id);
return Future.wait(assets.map(_writeAsset)).then((_) {
builtFiles += _copyBrowserJsFiles(dart2JSEntrypoints);
log.message(
'Built $builtFiles ${pluralize('file', builtFiles)} ' 'to "$outputDirectory".');
log.json.message({
"buildResult": "success",
"outputDirectory": outputDirectory,
"numFiles": builtFiles,
"log": logJson
});
});
});
})).catchError(((error) {
if (error is! BarbackException) throw error;
log.error(log.red("Build failed."));
log.json.message({
"buildResult": "failure",
"errors": errorsJson,
"log": logJson
});
return flushThenExit(exit_codes.DATA);
})));
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Writes [asset] to the appropriate build directory.
///
/// If [asset] is in the special "packages" directory, writes it to every
/// build directory.
Future _writeAsset(Asset asset) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var destPath = _idToPath(asset.id);
join1() {
completer0.complete(_writeOutputFile(asset, destPath));
}
if (path.isWithin("packages", destPath)) {
completer0.complete(Future.wait(sourceDirectories.map(((buildDir) {
return _writeOutputFile(asset, path.join(buildDir, destPath));
}))));
} else {
join1();
}
}
if (mode == BarbackMode.RELEASE && asset.id.extension == ".dart") {
completer0.complete(null);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Converts [id] to a relative path in the output directory for that asset.
///
/// This corresponds to the URL that could be used to request that asset from
/// pub serve.
///
/// Examples (where entrypoint is "myapp"):
///
/// myapp|web/index.html -> web/index.html
/// myapp|lib/lib.dart -> packages/myapp/lib.dart
/// foo|lib/foo.dart -> packages/foo/foo.dart
/// myapp|test/main.dart -> test/main.dart
/// foo|test/main.dart -> ERROR
///
/// Throws a [FormatException] if [id] is not a valid public asset.
String _idToPath(AssetId id) {
var parts = path.split(path.fromUri(id.path));
if (parts.length < 2) {
throw new FormatException(
"Can not build assets from top-level directory.");
}
// Map "lib" to the "packages" directory.
if (parts[0] == "lib") {
return path.join("packages", id.package, path.joinAll(parts.skip(1)));
}
// Shouldn't be trying to access non-public directories of other packages.
assert(id.package == entrypoint.root.name);
// Allow any path in the entrypoint package.
return path.joinAll(parts);
}
/// Writes the contents of [asset] to [relativePath] within the build
/// directory.
Future _writeOutputFile(Asset asset, String relativePath) {
builtFiles++;
var destPath = path.join(outputDirectory, relativePath);
ensureDir(path.dirname(destPath));
return createFileFromStream(asset.read(), destPath);
}
/// If this package depends directly on the `browser` package, this ensures
/// that the JavaScript bootstrap files are copied into `packages/browser/`
/// directories next to each entrypoint in [entrypoints].
///
/// Returns the number of files it copied.
int _copyBrowserJsFiles(Iterable<AssetId> entrypoints) {
// Must depend on the browser package.
if (!entrypoint.root.immediateDependencies.any(
(dep) => dep.name == 'browser' && dep.source == 'hosted')) {
return 0;
}
// Get all of the subdirectories that contain Dart entrypoints.
var entrypointDirs =
entrypoints// Convert the asset path to a native-separated one and get the
// directory containing the entrypoint.
.map(
(id) =>
path.dirname(
path.fromUri(
id.path)))// Don't copy files to the top levels of the build directories since
// the normal lib asset copying will take care of that.
.where((dir) => path.split(dir).length > 1).toSet();
for (var dir in entrypointDirs) {
// TODO(nweiz): we should put browser JS files next to any HTML file
// rather than any entrypoint. An HTML file could import an entrypoint
// that's not adjacent.
_addBrowserJs(dir, "dart");
_addBrowserJs(dir, "interop");
}
return entrypointDirs.length * 2;
}
// TODO(nweiz): do something more principled when issue 6101 is fixed.
/// Ensures that the [name].js file is copied into [directory] in [target],
/// under `packages/browser/`.
void _addBrowserJs(String directory, String name) {
var jsPath = entrypoint.root.path(
outputDirectory,
directory,
'packages',
'browser',
'$name.js');
ensureDir(path.dirname(jsPath));
// TODO(rnystrom): This won't work if we get rid of symlinks and the top
// level "packages" directory. Will need to copy from the browser
// directory.
copyFile(path.join(entrypoint.packagesDir, 'browser', '$name.js'), jsPath);
}
/// Converts [entry] to a JSON object for use with JSON-formatted output.
Map _logEntryToJson(LogEntry entry) {
var data = {
"level": entry.level.name,
"transformer": {
"name": entry.transform.transformer.toString(),
"primaryInput": {
"package": entry.transform.primaryId.package,
"path": entry.transform.primaryId.path
},
},
"assetId": {
"package": entry.assetId.package,
"path": entry.assetId.path
},
"message": entry.message
};
if (entry.span != null) {
data["span"] = {
"url": entry.span.sourceUrl,
"start": {
"line": entry.span.start.line,
"column": entry.span.start.column
},
"end": {
"line": entry.span.end.line,
"column": entry.span.end.column
},
};
}
return data;
}
}

View file

@ -1,24 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache;
import '../command.dart';
import 'cache_add.dart';
import 'cache_list.dart';
import 'cache_repair.dart';
/// Handles the `cache` pub command.
class CacheCommand extends PubCommand {
String get name => "cache";
String get description => "Work with the system cache.";
String get invocation => "pub cache <subcommand>";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-cache.html";
CacheCommand() {
addSubcommand(new CacheAddCommand());
addSubcommand(new CacheListCommand());
addSubcommand(new CacheRepairCommand());
}
}

View file

@ -1,96 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache_add;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../command.dart';
import '../log.dart' as log;
import '../package.dart';
import '../utils.dart';
/// Handles the `cache add` pub command.
class CacheAddCommand extends PubCommand {
String get name => "add";
String get description => "Install a package.";
String get invocation =>
"pub cache add <package> [--version <constraint>] [--all]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-cache.html";
CacheAddCommand() {
argParser.addFlag(
"all",
help: "Install all matching versions.",
negatable: false);
argParser.addOption("version", abbr: "v", help: "Version constraint.");
}
Future run() {
// Make sure there is a package.
if (argResults.rest.isEmpty) {
usageException("No package to add given.");
}
// Don't allow extra arguments.
if (argResults.rest.length > 1) {
var unexpected = argResults.rest.skip(1).map((arg) => '"$arg"');
var arguments = pluralize("argument", unexpected.length);
usageException("Unexpected $arguments ${toSentence(unexpected)}.");
}
var package = argResults.rest.single;
// Parse the version constraint, if there is one.
var constraint = VersionConstraint.any;
if (argResults["version"] != null) {
try {
constraint = new VersionConstraint.parse(argResults["version"]);
} on FormatException catch (error) {
usageException(error.message);
}
}
// TODO(rnystrom): Support installing from git too.
var source = cache.sources["hosted"];
// TODO(rnystrom): Allow specifying the server.
return source.getVersions(package, package).then((versions) {
versions = versions.where(constraint.allows).toList();
if (versions.isEmpty) {
// TODO(rnystrom): Show most recent unmatching version?
fail("Package $package has no versions that match $constraint.");
}
downloadVersion(Version version) {
var id = new PackageId(package, source.name, version, package);
return cache.contains(id).then((contained) {
if (contained) {
// TODO(rnystrom): Include source and description if not hosted.
// See solve_report.dart for code to harvest.
log.message("Already cached ${id.name} ${id.version}.");
return null;
}
// Download it.
return source.downloadToSystemCache(id);
});
}
if (argResults["all"]) {
// Install them in ascending order.
versions.sort();
return Future.forEach(versions, downloadVersion);
} else {
// Pick the best matching version.
versions.sort(Version.prioritize);
return downloadVersion(versions.last);
}
});
}
}

View file

@ -1,39 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache_list;
import 'dart:convert';
import '../command.dart';
import '../log.dart' as log;
import '../source/cached.dart';
/// Handles the `cache list` pub command.
class CacheListCommand extends PubCommand {
String get name => "list";
String get description => "List packages in the system cache.";
String get invocation => "pub cache list";
bool get hidden => true;
bool get takesArguments => false;
void run() {
// TODO(keertip): Add flag to list packages from non default sources.
var packagesObj = <String, Map>{};
var source = cache.sources.defaultSource as CachedSource;
for (var package in source.getCachedPackages()) {
var packageInfo = packagesObj.putIfAbsent(package.name, () => {});
packageInfo[package.version.toString()] = {
'location': package.dir
};
}
// TODO(keertip): Add support for non-JSON format and check for --format
// flag.
log.message(JSON.encode({
'packages': packagesObj
}));
}
}

View file

@ -1,143 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache_repair;
import 'dart:async';
import '../command.dart';
import '../exit_codes.dart' as exit_codes;
import '../io.dart';
import '../log.dart' as log;
import '../source/cached.dart';
import '../utils.dart';
/// Handles the `cache repair` pub command.
class CacheRepairCommand extends PubCommand {
String get name => "repair";
String get description => "Reinstall cached packages.";
String get invocation => "pub cache repair";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-cache.html";
bool get takesArguments => false;
Future run() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var successes = 0;
var failures = 0;
var it0 = cache.sources.iterator;
break0() {
join0() {
join1() {
new Future.value(globals.repairActivatedPackages()).then((x0) {
try {
var results = x0;
join2() {
join3() {
join4() {
join5() {
completer0.complete();
}
if (failures > 0) {
new Future.value(
flushThenExit(exit_codes.UNAVAILABLE)).then((x1) {
try {
x1;
join5();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} else {
join5();
}
}
if (successes == 0 && failures == 0) {
log.message(
"No packages in cache, so nothing to repair.");
join4();
} else {
join4();
}
}
if (results.last > 0) {
var packages = pluralize("package", results.last);
log.message(
"Failed to reactivate ${log.red(results.last)} ${packages}.");
join3();
} else {
join3();
}
}
if (results.first > 0) {
var packages = pluralize("package", results.first);
log.message(
"Reactivated ${log.green(results.first)} ${packages}.");
join2();
} else {
join2();
}
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (failures > 0) {
var packages = pluralize("package", failures);
log.message(
"Failed to reinstall ${log.red(failures)} ${packages}.");
join1();
} else {
join1();
}
}
if (successes > 0) {
var packages = pluralize("package", successes);
log.message("Reinstalled ${log.green(successes)} ${packages}.");
join0();
} else {
join0();
}
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var source = it0.current;
join6() {
new Future.value(source.repairCachedPackages()).then((x2) {
trampoline0 = () {
trampoline0 = null;
try {
var results = x2;
successes += results.first;
failures += results.last;
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: completer0.completeError);
}
if (source is! CachedSource) {
continue0();
} else {
join6();
}
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,204 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.list;
import 'dart:async';
import 'dart:collection';
import '../ascii_tree.dart' as tree;
import '../command.dart';
import '../log.dart' as log;
import '../package.dart';
import '../package_graph.dart';
import '../utils.dart';
/// Handles the `deps` pub command.
class DepsCommand extends PubCommand {
String get name => "deps";
String get description => "Print package dependencies.";
List<String> get aliases => const ["dependencies", "tab"];
String get invocation => "pub deps";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-deps.html";
bool get takesArguments => false;
/// The loaded package graph.
PackageGraph _graph;
/// The [StringBuffer] used to accumulate the output.
StringBuffer _buffer;
DepsCommand() {
argParser.addOption(
"style",
abbr: "s",
help: "How output should be displayed.",
allowed: ["compact", "tree", "list"],
defaultsTo: "tree");
}
Future run() {
return entrypoint.loadPackageGraph().then((graph) {
_graph = graph;
_buffer = new StringBuffer();
_buffer.writeln(_labelPackage(entrypoint.root));
switch (argResults["style"]) {
case "compact":
_outputCompact();
break;
case "list":
_outputList();
break;
case "tree":
_outputTree();
break;
}
log.message(_buffer);
});
}
/// Outputs a list of all of the package's immediate, dev, override, and
/// transitive dependencies.
///
/// For each dependency listed, *that* package's immediate dependencies are
/// shown. Unlike [_outputList], this prints all of these dependencies on one
/// line.
void _outputCompact() {
var root = entrypoint.root;
_outputCompactPackages(
"dependencies",
root.dependencies.map((dep) => dep.name));
_outputCompactPackages(
"dev dependencies",
root.devDependencies.map((dep) => dep.name));
_outputCompactPackages(
"dependency overrides",
root.dependencyOverrides.map((dep) => dep.name));
var transitive = _getTransitiveDependencies();
_outputCompactPackages("transitive dependencies", transitive);
}
/// Outputs one section of packages in the compact output.
_outputCompactPackages(String section, Iterable<String> names) {
if (names.isEmpty) return;
_buffer.writeln();
_buffer.writeln("$section:");
for (var name in ordered(names)) {
var package = _graph.packages[name];
_buffer.write("- ${_labelPackage(package)}");
if (package.dependencies.isEmpty) {
_buffer.writeln();
} else {
var depNames = package.dependencies.map((dep) => dep.name);
var depsList = "[${depNames.join(' ')}]";
_buffer.writeln(" ${log.gray(depsList)}");
}
}
}
/// Outputs a list of all of the package's immediate, dev, override, and
/// transitive dependencies.
///
/// For each dependency listed, *that* package's immediate dependencies are
/// shown.
void _outputList() {
var root = entrypoint.root;
_outputListSection(
"dependencies",
root.dependencies.map((dep) => dep.name));
_outputListSection(
"dev dependencies",
root.devDependencies.map((dep) => dep.name));
_outputListSection(
"dependency overrides",
root.dependencyOverrides.map((dep) => dep.name));
var transitive = _getTransitiveDependencies();
if (transitive.isEmpty) return;
_outputListSection("transitive dependencies", ordered(transitive));
}
/// Outputs one section of packages in the list output.
_outputListSection(String name, Iterable<String> deps) {
if (deps.isEmpty) return;
_buffer.writeln();
_buffer.writeln("$name:");
for (var name in deps) {
var package = _graph.packages[name];
_buffer.writeln("- ${_labelPackage(package)}");
for (var dep in package.dependencies) {
_buffer.writeln(
" - ${log.bold(dep.name)} ${log.gray(dep.constraint)}");
}
}
}
/// Generates a dependency tree for the root package.
///
/// If a package is encountered more than once (i.e. a shared or circular
/// dependency), later ones are not traversed. This is done in breadth-first
/// fashion so that a package will always be expanded at the shallowest
/// depth that it appears at.
void _outputTree() {
// The work list for the breadth-first traversal. It contains the package
// being added to the tree, and the parent map that will receive that
// package.
var toWalk = new Queue<Pair<Package, Map>>();
var visited = new Set<String>();
// Start with the root dependencies.
var packageTree = {};
for (var dep in entrypoint.root.immediateDependencies) {
toWalk.add(new Pair(_graph.packages[dep.name], packageTree));
}
// Do a breadth-first walk to the dependency graph.
while (toWalk.isNotEmpty) {
var pair = toWalk.removeFirst();
var package = pair.first;
var map = pair.last;
if (visited.contains(package.name)) {
map[log.gray('${package.name}...')] = {};
continue;
}
visited.add(package.name);
// Populate the map with this package's dependencies.
var childMap = {};
map[_labelPackage(package)] = childMap;
for (var dep in package.dependencies) {
toWalk.add(new Pair(_graph.packages[dep.name], childMap));
}
}
_buffer.write(tree.fromMap(packageTree, showAllChildren: true));
}
String _labelPackage(Package package) =>
"${log.bold(package.name)} ${package.version}";
/// Gets the names of the non-immediate dependencies of the root package.
Set<String> _getTransitiveDependencies() {
var transitive = _graph.packages.keys.toSet();
var root = entrypoint.root;
transitive.remove(root.name);
transitive.removeAll(root.dependencies.map((dep) => dep.name));
transitive.removeAll(root.devDependencies.map((dep) => dep.name));
transitive.removeAll(root.dependencyOverrides.map((dep) => dep.name));
return transitive;
}
}

View file

@ -1,68 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.downgrade;
import 'dart:async';
import '../command.dart';
import '../log.dart' as log;
import '../solver/version_solver.dart';
/// Handles the `downgrade` pub command.
class DowngradeCommand extends PubCommand {
String get name => "downgrade";
String get description =>
"Downgrade the current package's dependencies to oldest versions.\n\n"
"This doesn't modify the lockfile, so it can be reset with \"pub get\".";
String get invocation => "pub downgrade [dependencies...]";
bool get isOffline => argResults['offline'];
DowngradeCommand() {
argParser.addFlag(
'offline',
help: 'Use cached packages instead of accessing the network.');
argParser.addFlag(
'dry-run',
abbr: 'n',
negatable: false,
help: "Report what dependencies would change but don't change any.");
}
Future run() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var dryRun = argResults['dry-run'];
new Future.value(
entrypoint.acquireDependencies(
SolveType.DOWNGRADE,
useLatest: argResults.rest,
dryRun: dryRun)).then((x0) {
try {
x0;
join0() {
completer0.complete();
}
if (isOffline) {
log.warning(
"Warning: Downgrading when offline may not update you to "
"the oldest versions of your dependencies.");
join0();
} else {
join0();
}
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,38 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.get;
import 'dart:async';
import '../command.dart';
import '../solver/version_solver.dart';
/// Handles the `get` pub command.
class GetCommand extends PubCommand {
String get name => "get";
String get description => "Get the current package's dependencies.";
String get invocation => "pub get";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-get.html";
List<String> get aliases => const ["install"];
bool get isOffline => argResults["offline"];
GetCommand() {
argParser.addFlag(
'offline',
help: 'Use cached packages instead of accessing the network.');
argParser.addFlag(
'dry-run',
abbr: 'n',
negatable: false,
help: "Report what dependencies would change but don't change any.");
}
Future run() {
return entrypoint.acquireDependencies(
SolveType.GET,
dryRun: argResults['dry-run']);
}
}

View file

@ -1,25 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global;
import '../command.dart';
import 'global_activate.dart';
import 'global_deactivate.dart';
import 'global_list.dart';
import 'global_run.dart';
/// Handles the `global` pub command.
class GlobalCommand extends PubCommand {
String get name => "global";
String get description => "Work with global packages.";
String get invocation => "pub global <subcommand>";
GlobalCommand() {
addSubcommand(new GlobalActivateCommand());
addSubcommand(new GlobalDeactivateCommand());
addSubcommand(new GlobalListCommand());
addSubcommand(new GlobalRunCommand());
}
}

View file

@ -1,117 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_activate;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../command.dart';
import '../utils.dart';
/// Handles the `global activate` pub command.
class GlobalActivateCommand extends PubCommand {
String get name => "activate";
String get description => "Make a package's executables globally available.";
String get invocation => "pub global activate <package...>";
GlobalActivateCommand() {
argParser.addOption(
"source",
abbr: "s",
help: "The source used to find the package.",
allowed: ["git", "hosted", "path"],
defaultsTo: "hosted");
argParser.addFlag(
"no-executables",
negatable: false,
help: "Do not put executables on PATH.");
argParser.addOption(
"executable",
abbr: "x",
help: "Executable(s) to place on PATH.",
allowMultiple: true);
argParser.addFlag(
"overwrite",
negatable: false,
help: "Overwrite executables from other packages with the same name.");
}
Future run() {
// Default to `null`, which means all executables.
var executables;
if (argResults.wasParsed("executable")) {
if (argResults.wasParsed("no-executables")) {
usageException("Cannot pass both --no-executables and --executable.");
}
executables = argResults["executable"];
} else if (argResults["no-executables"]) {
// An empty list means no executables.
executables = [];
}
var overwrite = argResults["overwrite"];
var args = argResults.rest;
readArg([String error]) {
if (args.isEmpty) usageException(error);
var arg = args.first;
args = args.skip(1);
return arg;
}
validateNoExtraArgs() {
if (args.isEmpty) return;
var unexpected = args.map((arg) => '"$arg"');
var arguments = pluralize("argument", unexpected.length);
usageException("Unexpected $arguments ${toSentence(unexpected)}.");
}
switch (argResults["source"]) {
case "git":
var repo = readArg("No Git repository given.");
// TODO(rnystrom): Allow passing in a Git ref too.
validateNoExtraArgs();
return globals.activateGit(
repo,
executables,
overwriteBinStubs: overwrite);
case "hosted":
var package = readArg("No package to activate given.");
// Parse the version constraint, if there is one.
var constraint = VersionConstraint.any;
if (args.isNotEmpty) {
try {
constraint = new VersionConstraint.parse(readArg());
} on FormatException catch (error) {
usageException(error.message);
}
}
validateNoExtraArgs();
return globals.activateHosted(
package,
constraint,
executables,
overwriteBinStubs: overwrite);
case "path":
var path = readArg("No package to activate given.");
validateNoExtraArgs();
return globals.activatePath(
path,
executables,
overwriteBinStubs: overwrite);
}
throw "unreachable";
}
}

View file

@ -1,36 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_deactivate;
import 'dart:async';
import '../command.dart';
import '../log.dart' as log;
import '../utils.dart';
/// Handles the `global deactivate` pub command.
class GlobalDeactivateCommand extends PubCommand {
String get name => "deactivate";
String get description => "Remove a previously activated package.";
String get invocation => "pub global deactivate <package>";
void run() {
// Make sure there is a package.
if (argResults.rest.isEmpty) {
usageException("No package to deactivate given.");
}
// Don't allow extra arguments.
if (argResults.rest.length > 1) {
var unexpected = argResults.rest.skip(1).map((arg) => '"$arg"');
var arguments = pluralize("argument", unexpected.length);
usageException("Unexpected $arguments ${toSentence(unexpected)}.");
}
if (!globals.deactivate(argResults.rest.first)) {
dataError("No active package ${log.bold(argResults.rest.first)}.");
}
}
}

View file

@ -1,22 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_list;
import 'dart:async';
import '../command.dart';
/// Handles the `global list` pub command.
class GlobalListCommand extends PubCommand {
String get name => "list";
String get description => 'List globally activated packages.';
String get invocation => 'pub global list';
bool get allowTrailingOptions => false;
bool get takesArguments => false;
void run() {
globals.listActivePackages();
}
}

View file

@ -1,92 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_run;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import '../command.dart';
import '../io.dart';
import '../utils.dart';
/// Handles the `global run` pub command.
class GlobalRunCommand extends PubCommand {
String get name => "run";
String get description =>
"Run an executable from a globally activated package.\n"
"NOTE: We are currently optimizing this command's startup time.";
String get invocation => "pub global run <package>:<executable> [args...]";
bool get allowTrailingOptions => false;
/// The mode for barback transformers.
BarbackMode get mode => new BarbackMode(argResults["mode"]);
GlobalRunCommand() {
argParser.addOption(
"mode",
defaultsTo: "release",
help: 'Mode to run transformers in.');
}
Future run() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var package;
var executable = argResults.rest[0];
join1() {
var args = argResults.rest.skip(1).toList();
join2() {
new Future.value(
globals.runExecutable(package, executable, args, mode: mode)).then((x0) {
try {
var exitCode = x0;
new Future.value(flushThenExit(exitCode)).then((x1) {
try {
x1;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (p.split(executable).length > 1) {
usageException(
'Cannot run an executable in a subdirectory of a global ' + 'package.');
join2();
} else {
join2();
}
}
if (executable.contains(":")) {
var parts = split1(executable, ":");
package = parts[0];
executable = parts[1];
join1();
} else {
package = executable;
join1();
}
}
if (argResults.rest.isEmpty) {
usageException("Must specify an executable to run.");
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,200 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.lish;
import 'dart:async';
import 'package:http/http.dart' as http;
import '../command.dart';
import '../ascii_tree.dart' as tree;
import '../http.dart';
import '../io.dart';
import '../log.dart' as log;
import '../oauth2.dart' as oauth2;
import '../source/hosted.dart';
import '../utils.dart';
import '../validator.dart';
/// Handles the `lish` and `publish` pub commands.
class LishCommand extends PubCommand {
String get name => "publish";
String get description => "Publish the current package to pub.dartlang.org.";
String get invocation => "pub publish [options]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-lish.html";
List<String> get aliases => const ["lish", "lush"];
bool get takesArguments => false;
/// The URL of the server to which to upload the package.
Uri get server {
// An explicit argument takes precedence.
if (argResults.wasParsed('server')) {
return Uri.parse(argResults['server']);
}
// Otherwise, use the one specified in the pubspec.
if (entrypoint.root.pubspec.publishTo != null) {
return Uri.parse(entrypoint.root.pubspec.publishTo);
}
// Otherwise, use the default.
return Uri.parse(HostedSource.defaultUrl);
}
/// Whether the publish is just a preview.
bool get dryRun => argResults['dry-run'];
/// Whether the publish requires confirmation.
bool get force => argResults['force'];
LishCommand() {
argParser.addFlag(
'dry-run',
abbr: 'n',
negatable: false,
help: 'Validate but do not publish the package.');
argParser.addFlag(
'force',
abbr: 'f',
negatable: false,
help: 'Publish without confirmation if there are no errors.');
argParser.addOption(
'server',
defaultsTo: HostedSource.defaultUrl,
help: 'The package server to which to upload this package.');
}
Future _publish(packageBytes) {
var cloudStorageUrl;
return oauth2.withClient(cache, (client) {
return log.progress('Uploading', () {
// TODO(nweiz): Cloud Storage can provide an XML-formatted error. We
// should report that error and exit.
var newUri = server.resolve("/api/packages/versions/new");
return client.get(newUri, headers: PUB_API_HEADERS).then((response) {
var parameters = parseJsonResponse(response);
var url = _expectField(parameters, 'url', response);
if (url is! String) invalidServerResponse(response);
cloudStorageUrl = Uri.parse(url);
var request = new http.MultipartRequest('POST', cloudStorageUrl);
request.headers['Pub-Request-Timeout'] = 'None';
var fields = _expectField(parameters, 'fields', response);
if (fields is! Map) invalidServerResponse(response);
fields.forEach((key, value) {
if (value is! String) invalidServerResponse(response);
request.fields[key] = value;
});
request.followRedirects = false;
request.files.add(
new http.MultipartFile.fromBytes(
'file',
packageBytes,
filename: 'package.tar.gz'));
return client.send(request);
}).then(http.Response.fromStream).then((response) {
var location = response.headers['location'];
if (location == null) throw new PubHttpException(response);
return location;
}).then(
(location) =>
client.get(location, headers: PUB_API_HEADERS)).then(handleJsonSuccess);
});
}).catchError((error) {
if (error is! PubHttpException) throw error;
var url = error.response.request.url;
if (urisEqual(url, cloudStorageUrl)) {
// TODO(nweiz): the response may have XML-formatted information about
// the error. Try to parse that out once we have an easily-accessible
// XML parser.
fail('Failed to upload the package.');
} else if (urisEqual(Uri.parse(url.origin), Uri.parse(server.origin))) {
handleJsonError(error.response);
} else {
throw error;
}
});
}
Future run() {
if (force && dryRun) {
usageException('Cannot use both --force and --dry-run.');
}
if (entrypoint.root.pubspec.isPrivate) {
dataError(
'A private package cannot be published.\n'
'You can enable this by changing the "publish_to" field in your ' 'pubspec.');
}
var files = entrypoint.root.listFiles(useGitIgnore: true);
log.fine('Archiving and publishing ${entrypoint.root}.');
// Show the package contents so the user can verify they look OK.
var package = entrypoint.root;
log.message(
'Publishing ${package.name} ${package.version} to $server:\n'
'${tree.fromFiles(files, baseDir: entrypoint.root.dir)}');
var packageBytesFuture =
createTarGz(files, baseDir: entrypoint.root.dir).toBytes();
// Validate the package.
return _validate(
packageBytesFuture.then((bytes) => bytes.length)).then((isValid) {
if (isValid) return packageBytesFuture.then(_publish);
});
}
/// Returns the value associated with [key] in [map]. Throws a user-friendly
/// error if [map] doens't contain [key].
_expectField(Map map, String key, http.Response response) {
if (map.containsKey(key)) return map[key];
invalidServerResponse(response);
}
/// Validates the package. Completes to false if the upload should not
/// proceed.
Future<bool> _validate(Future<int> packageSize) {
return Validator.runAll(entrypoint, packageSize).then((pair) {
var errors = pair.first;
var warnings = pair.last;
if (!errors.isEmpty) {
log.error(
"Sorry, your package is missing "
"${(errors.length > 1) ? 'some requirements' : 'a requirement'} "
"and can't be published yet.\nFor more information, see: "
"http://pub.dartlang.org/doc/pub-lish.html.\n");
return false;
}
if (force) return true;
if (dryRun) {
var s = warnings.length == 1 ? '' : 's';
log.warning("\nPackage has ${warnings.length} warning$s.");
return false;
}
var message = '\nLooks great! Are you ready to upload your package';
if (!warnings.isEmpty) {
var s = warnings.length == 1 ? '' : 's';
message = "\nPackage has ${warnings.length} warning$s. Upload anyway";
}
return confirm(message).then((confirmed) {
if (!confirmed) {
log.error("Package upload canceled.");
return false;
}
return true;
});
});
}
}

View file

@ -1,63 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.list_package_dirs;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../command.dart';
import '../log.dart' as log;
import '../utils.dart';
/// Handles the `list-package-dirs` pub command.
class ListPackageDirsCommand extends PubCommand {
String get name => "list-package-dirs";
String get description => "Print local paths to dependencies.";
String get invocation => "pub list-package-dirs";
bool get takesArguments => false;
bool get hidden => true;
ListPackageDirsCommand() {
argParser.addOption(
"format",
help: "How output should be displayed.",
allowed: ["json"]);
}
Future run() {
log.json.enabled = true;
if (!entrypoint.lockFileExists) {
dataError('Package "myapp" has no lockfile. Please run "pub get" first.');
}
var output = {};
// Include the local paths to all locked packages.
var packages = {};
var futures = [];
entrypoint.lockFile.packages.forEach((name, package) {
var source = entrypoint.cache.sources[package.source];
futures.add(source.getDirectory(package).then((packageDir) {
packages[name] = path.join(packageDir, "lib");
}));
});
output["packages"] = packages;
// Include the self link.
packages[entrypoint.root.name] = entrypoint.root.path("lib");
// Include the file(s) which when modified will affect the results. For pub,
// that's just the pubspec and lockfile.
output["input_files"] = [entrypoint.lockFilePath, entrypoint.pubspecPath];
return Future.wait(futures).then((_) {
log.json.message(output);
});
}
}

View file

@ -1,107 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.run;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import '../command.dart';
import '../executable.dart';
import '../io.dart';
import '../utils.dart';
/// Handles the `run` pub command.
class RunCommand extends PubCommand {
String get name => "run";
String get description =>
"Run an executable from a package.\n"
"NOTE: We are currently optimizing this command's startup time.";
String get invocation => "pub run <executable> [args...]";
bool get allowTrailingOptions => false;
RunCommand() {
argParser.addOption(
"mode",
help: 'Mode to run transformers in.\n'
'(defaults to "release" for dependencies, "debug" for ' 'entrypoint)');
}
Future run() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var package = entrypoint.root.name;
var executable = argResults.rest[0];
var args = argResults.rest.skip(1).toList();
join1() {
var mode;
join2() {
new Future.value(
runExecutable(entrypoint, package, executable, args, mode: mode)).then((x0) {
try {
var exitCode = x0;
new Future.value(flushThenExit(exitCode)).then((x1) {
try {
x1;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (argResults['mode'] != null) {
mode = new BarbackMode(argResults['mode']);
join2();
} else {
join3() {
join2();
}
if (package == entrypoint.root.name) {
mode = BarbackMode.DEBUG;
join3();
} else {
mode = BarbackMode.RELEASE;
join3();
}
}
}
if (executable.contains(":")) {
var components = split1(executable, ":");
package = components[0];
executable = components[1];
join4() {
join1();
}
if (p.split(executable).length > 1) {
usageException(
"Cannot run an executable in a subdirectory of a " + "dependency.");
join4();
} else {
join4();
}
} else {
join1();
}
}
if (argResults.rest.isEmpty) {
usageException("Must specify an executable to run.");
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,264 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.serve;
import 'dart:async';
import 'dart:math' as math;
import 'package:barback/barback.dart';
import '../barback/asset_environment.dart';
import '../barback/pub_package_provider.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'barback.dart';
final _arrow = getSpecial('\u2192', '=>');
/// Handles the `serve` pub command.
class ServeCommand extends BarbackCommand {
String get name => "serve";
String get description =>
'Run a local web development server.\n\n'
'By default, this serves "web/" and "test/", but an explicit list of \n'
'directories to serve can be provided as well.';
String get invocation => "pub serve [directories...]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-serve.html";
PubPackageProvider _provider;
String get hostname => argResults['hostname'];
/// The base port for the servers.
///
/// This will print a usage error and exit if the specified port is invalid.
int get port => parseInt(argResults['port'], 'port');
/// The port for the admin UI.
///
/// This will print a usage error and exit if the specified port is invalid.
int get adminPort {
var adminPort = argResults['admin-port'];
return adminPort == null ? null : parseInt(adminPort, 'admin port');
}
/// `true` if Dart entrypoints should be compiled to JavaScript.
bool get useDart2JS => argResults['dart2js'];
/// `true` if the admin server URL should be displayed on startup.
bool get logAdminUrl => argResults['log-admin-url'];
BarbackMode get defaultMode => BarbackMode.DEBUG;
List<String> get defaultSourceDirectories => ["web", "test"];
/// This completer is used to keep pub running (by not completing) and to
/// pipe fatal errors to pub's top-level error-handling machinery.
final _completer = new Completer();
ServeCommand() {
argParser.addOption(
'hostname',
defaultsTo: 'localhost',
help: 'The hostname to listen on.');
argParser.addOption(
'port',
defaultsTo: '8080',
help: 'The base port to listen on.');
// TODO(rnystrom): A hidden option to print the URL that the admin server
// is bound to on startup. Since this is currently only used for the Web
// Socket interface, we don't want to show it to users, but the tests and
// Editor need this logged to know what port to bind to.
// Remove this (and always log) when #16954 is fixed.
argParser.addFlag('log-admin-url', defaultsTo: false, hide: true);
// TODO(nweiz): Make this public when issue 16954 is fixed.
argParser.addOption('admin-port', hide: true);
argParser.addFlag(
'dart2js',
defaultsTo: true,
help: 'Compile Dart to JavaScript.');
argParser.addFlag(
'force-poll',
defaultsTo: false,
help: 'Force the use of a polling filesystem watcher.');
}
Future onRunTransformerCommand() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var port = parseInt(argResults['port'], 'port');
join0(x0) {
var adminPort = x0;
join1(x1) {
var watcherType = x1;
new Future.value(
AssetEnvironment.create(
entrypoint,
mode,
watcherType: watcherType,
hostname: hostname,
basePort: port,
useDart2JS: useDart2JS)).then((x2) {
try {
var environment = x2;
var directoryLength = sourceDirectories.map(((dir) {
return dir.length;
})).reduce(math.max);
new Future.value(
environment.startAdminServer(adminPort)).then((x3) {
try {
var server = x3;
server.results.listen(((_) {
assert(false);
}), onError: _fatalError);
join2() {
environment.pauseUpdates();
var it0 = sourceDirectories.iterator;
break0() {
environment.barback.errors.listen(((error) {
log.error(log.red("Build error:\n$error"));
}));
environment.barback.results.listen(((result) {
if (result.succeeded) {
log.message(
"Build completed ${log.green('successfully')}");
} else {
log.message(
"Build completed with " "${log.red(result.errors.length)} errors.");
}
}), onError: _fatalError);
environment.resumeUpdates();
new Future.value(_completer.future).then((x4) {
try {
x4;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var directory = it0.current;
new Future.value(
_startServer(environment, directory, directoryLength)).then((x5) {
trampoline0 = () {
trampoline0 = null;
try {
x5;
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: completer0.completeError);
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
if (logAdminUrl) {
log.message(
"Running admin server on " "${log.bold('http://${hostname}:${server.port}')}");
join2();
} else {
join2();
}
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
}, onError: completer0.completeError);
} catch (e3, s3) {
completer0.completeError(e3, s3);
}
}, onError: completer0.completeError);
}
if (argResults['force-poll']) {
join1(WatcherType.POLLING);
} else {
join1(WatcherType.AUTO);
}
}
if (argResults['admin-port'] == null) {
join0(null);
} else {
join0(parseInt(argResults['admin-port'], 'admin port'));
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
Future _startServer(AssetEnvironment environment, String rootDirectory,
int directoryLength) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(environment.serveDirectory(rootDirectory)).then((x0) {
try {
var server = x0;
join0() {
var prefix =
log.gray(padRight("[${server.rootDirectory}]", directoryLength + 2));
server.results.listen(((result) {
var buffer = new StringBuffer();
buffer.write("$prefix ");
if (result.isSuccess) {
buffer.write(
"${log.green('GET')} ${result.url.path} $_arrow ${result.id}");
} else {
buffer.write("${log.red('GET')} ${result.url.path} $_arrow");
var error = result.error.toString();
if (error.contains("\n")) {
buffer.write("\n${prefixLines(error)}");
} else {
buffer.write(" $error");
}
}
log.message(buffer);
}), onError: _fatalError);
log.message(
"Serving ${entrypoint.root.name} "
"${padRight(server.rootDirectory, directoryLength)} "
"on ${log.bold('http://${hostname}:${server.port}')}");
completer0.complete();
}
if (mode == BarbackMode.RELEASE) {
server.allowAsset = ((url) {
return !url.path.endsWith(".dart");
});
join0();
} else {
join0();
}
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Reports [error] and exits the server.
void _fatalError(error, [stackTrace]) {
if (_completer.isCompleted) return;
_completer.completeError(error, stackTrace);
}
}

View file

@ -1,69 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.upgrade;
import 'dart:async';
import '../command.dart';
import '../log.dart' as log;
import '../solver/version_solver.dart';
/// Handles the `upgrade` pub command.
class UpgradeCommand extends PubCommand {
String get name => "upgrade";
String get description =>
"Upgrade the current package's dependencies to latest versions.";
String get invocation => "pub upgrade [dependencies...]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-upgrade.html";
List<String> get aliases => const ["update"];
bool get isOffline => argResults['offline'];
UpgradeCommand() {
argParser.addFlag(
'offline',
help: 'Use cached packages instead of accessing the network.');
argParser.addFlag(
'dry-run',
abbr: 'n',
negatable: false,
help: "Report what dependencies would change but don't change any.");
}
Future run() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var dryRun = argResults['dry-run'];
new Future.value(
entrypoint.acquireDependencies(
SolveType.UPGRADE,
useLatest: argResults.rest,
dryRun: dryRun)).then((x0) {
try {
x0;
join0() {
completer0.complete();
}
if (isOffline) {
log.warning(
"Warning: Upgrading when offline may not update you to the "
"latest versions of your dependencies.");
join0();
} else {
join0();
}
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,88 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.uploader;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../command.dart';
import '../entrypoint.dart';
import '../exit_codes.dart' as exit_codes;
import '../http.dart';
import '../io.dart';
import '../log.dart' as log;
import '../oauth2.dart' as oauth2;
import '../source/hosted.dart';
/// Handles the `uploader` pub command.
class UploaderCommand extends PubCommand {
String get name => "uploader";
String get description =>
"Manage uploaders for a package on pub.dartlang.org.";
String get invocation => "pub uploader [options] {add/remove} <email>";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-uploader.html";
/// The URL of the package hosting server.
Uri get server => Uri.parse(argResults['server']);
UploaderCommand() {
argParser.addOption(
'server',
defaultsTo: HostedSource.defaultUrl,
help: 'The package server on which the package is hosted.');
argParser.addOption(
'package',
help: 'The package whose uploaders will be modified.\n'
'(defaults to the current package)');
}
Future run() {
if (argResults.rest.isEmpty) {
log.error('No uploader command given.');
this.printUsage();
return flushThenExit(exit_codes.USAGE);
}
var rest = argResults.rest.toList();
// TODO(rnystrom): Use subcommands for these.
var command = rest.removeAt(0);
if (!['add', 'remove'].contains(command)) {
log.error('Unknown uploader command "$command".');
this.printUsage();
return flushThenExit(exit_codes.USAGE);
} else if (rest.isEmpty) {
log.error('No uploader given for "pub uploader $command".');
this.printUsage();
return flushThenExit(exit_codes.USAGE);
}
return new Future.sync(() {
var package = argResults['package'];
if (package != null) return package;
return new Entrypoint(path.current, cache).root.name;
}).then((package) {
var uploader = rest[0];
return oauth2.withClient(cache, (client) {
if (command == 'add') {
var url =
server.resolve("/api/packages/" "${Uri.encodeComponent(package)}/uploaders");
return client.post(url, headers: PUB_API_HEADERS, body: {
"email": uploader
});
} else { // command == 'remove'
var url = server.resolve(
"/api/packages/" "${Uri.encodeComponent(package)}/uploaders/"
"${Uri.encodeComponent(uploader)}");
return client.delete(url, headers: PUB_API_HEADERS);
}
});
}).then(
handleJsonSuccess).catchError(
(error) => handleJsonError(error.response),
test: (e) => e is PubHttpException);
}
}

View file

@ -1,20 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.version;
import '../command.dart';
import '../log.dart' as log;
import '../sdk.dart' as sdk;
/// Handles the `version` pub command.
class VersionCommand extends PubCommand {
String get name => "version";
String get description => "Print pub version.";
String get invocation => "pub version";
void run() {
log.message("Pub ${sdk.version}");
}
}

View file

@ -1,339 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command_runner;
import 'dart:async';
import 'dart:io';
import 'package:args/args.dart';
import 'package:args/command_runner.dart';
import 'package:http/http.dart' as http;
import 'package:stack_trace/stack_trace.dart';
import 'command/build.dart';
import 'command/cache.dart';
import 'command/deps.dart';
import 'command/downgrade.dart';
import 'command/get.dart';
import 'command/global.dart';
import 'command/lish.dart';
import 'command/list_package_dirs.dart';
import 'command/run.dart';
import 'command/serve.dart';
import 'command/upgrade.dart';
import 'command/uploader.dart';
import 'command/version.dart';
import 'exceptions.dart';
import 'exit_codes.dart' as exit_codes;
import 'http.dart';
import 'io.dart';
import 'log.dart' as log;
import 'sdk.dart' as sdk;
import 'solver/version_solver.dart';
import 'utils.dart';
class PubCommandRunner extends CommandRunner {
String get usageFooter =>
"See http://dartlang.org/tools/pub for detailed " "documentation.";
PubCommandRunner()
: super("pub", "Pub is a package manager for Dart.") {
argParser.addFlag('version', negatable: false, help: 'Print pub version.');
argParser.addFlag(
'trace',
help: 'Print debugging information when an error occurs.');
argParser.addOption(
'verbosity',
help: 'Control output verbosity.',
allowed: ['normal', 'io', 'solver', 'all'],
allowedHelp: {
'normal': 'Show errors, warnings, and user messages.',
'io': 'Also show IO operations.',
'solver': 'Show steps during version resolution.',
'all': 'Show all output including internal tracing messages.'
});
argParser.addFlag(
'verbose',
abbr: 'v',
negatable: false,
help: 'Shortcut for "--verbosity=all".');
argParser.addFlag(
'with-prejudice',
hide: !isAprilFools,
negatable: false,
help: 'Execute commands with prejudice.');
argParser.addFlag(
'package-symlinks',
hide: true,
negatable: true,
defaultsTo: true);
addCommand(new BuildCommand());
addCommand(new CacheCommand());
addCommand(new DepsCommand());
addCommand(new DowngradeCommand());
addCommand(new GlobalCommand());
addCommand(new GetCommand());
addCommand(new ListPackageDirsCommand());
addCommand(new LishCommand());
addCommand(new RunCommand());
addCommand(new ServeCommand());
addCommand(new UpgradeCommand());
addCommand(new UploaderCommand());
addCommand(new VersionCommand());
}
Future run(List<String> arguments) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var options;
join0() {
new Future.value(runCommand(options)).then((x0) {
try {
x0;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
catch0(error, s1) {
try {
if (error is UsageException) {
log.error(error.message);
new Future.value(flushThenExit(exit_codes.USAGE)).then((x1) {
try {
x1;
join0();
} catch (e1, s2) {
completer0.completeError(e1, s2);
}
}, onError: completer0.completeError);
} else {
throw error;
}
} catch (error, s1) {
completer0.completeError(error, s1);
}
}
try {
options = super.parse(arguments);
join0();
} catch (e2, s3) {
catch0(e2, s3);
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
Future runCommand(ArgResults options) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
log.withPrejudice = options['with-prejudice'];
join0() {
join1() {
break0() {
log.fine('Pub ${sdk.version}');
new Future.value(_validatePlatform()).then((x0) {
try {
x0;
var captureStackChains =
options['trace'] ||
options['verbose'] ||
options['verbosity'] == 'all';
join2() {
completer0.complete();
}
catch0(error, chain) {
try {
log.exception(error, chain);
join3() {
new Future.value(
flushThenExit(_chooseExitCode(error))).then((x1) {
try {
x1;
join2();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (options['trace']) {
log.dumpTranscript();
join3();
} else {
join4() {
join3();
}
if (!isUserFacingException(error)) {
log.error("""
This is an unexpected error. Please run
pub --trace ${options.arguments.map(((arg) {
return "'$arg'";
})).join(' ')}
and include the results in a bug report on http://dartbug.com/new.
""");
join4();
} else {
join4();
}
}
} catch (error, chain) {
completer0.completeError(error, chain);
}
}
try {
new Future.value(captureErrors((() {
return super.runCommand(options);
}), captureStackChains: captureStackChains)).then((x2) {
try {
x2;
new Future.value(
flushThenExit(exit_codes.SUCCESS)).then((x3) {
try {
x3;
join2();
} catch (e1, s1) {
catch0(e1, s1);
}
}, onError: catch0);
} catch (e2, s2) {
catch0(e2, s2);
}
}, onError: catch0);
} catch (e3, s3) {
catch0(e3, s3);
}
} catch (e4, s4) {
completer0.completeError(e4, s4);
}
}, onError: completer0.completeError);
}
switch (options['verbosity']) {
case 'normal':
log.verbosity = log.Verbosity.NORMAL;
break0();
break;
case 'io':
log.verbosity = log.Verbosity.IO;
break0();
break;
case 'solver':
log.verbosity = log.Verbosity.SOLVER;
break0();
break;
case 'all':
log.verbosity = log.Verbosity.ALL;
break0();
break;
default:
join5() {
break0();
}
if (options['verbose']) {
log.verbosity = log.Verbosity.ALL;
join5();
} else {
join5();
}
break;
}
}
if (options['trace']) {
log.recordTranscript();
join1();
} else {
join1();
}
}
if (options['version']) {
log.message('Pub ${sdk.version}');
completer0.complete(null);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
void printUsage() {
log.message(usage);
}
/// Returns the appropriate exit code for [exception], falling back on 1 if no
/// appropriate exit code could be found.
int _chooseExitCode(exception) {
while (exception is WrappedException) exception = exception.innerError;
if (exception is HttpException ||
exception is http.ClientException ||
exception is SocketException ||
exception is PubHttpException ||
exception is DependencyNotFoundException) {
return exit_codes.UNAVAILABLE;
} else if (exception is FormatException || exception is DataException) {
return exit_codes.DATA;
} else if (exception is UsageException) {
return exit_codes.USAGE;
} else {
return 1;
}
}
/// Checks that pub is running on a supported platform.
///
/// If it isn't, it prints an error message and exits. Completes when the
/// validation is done.
Future _validatePlatform() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
new Future.value(runProcess('ver', [])).then((x0) {
try {
var result = x0;
join1() {
completer0.complete();
}
if (result.stdout.join('\n').contains('XP')) {
log.error('Sorry, but pub is not supported on Windows XP.');
new Future.value(flushThenExit(exit_codes.USAGE)).then((x1) {
try {
x1;
join1();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} else {
join1();
}
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (Platform.operatingSystem != 'windows') {
completer0.complete(null);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
}

View file

@ -1,323 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// A library for compiling Dart code and manipulating analyzer parse trees.
library pub.dart;
import 'dart:async';
import 'dart:io';
import 'dart:isolate';
import 'package:analyzer/analyzer.dart';
import 'package:path/path.dart' as path;
import '../../../../../../pkg/compiler/lib/compiler.dart' as compiler;
import '../../../../../../pkg/compiler/lib/src/filenames.dart' show appendSlash;
import '../../asset/dart/serialize.dart';
import 'io.dart';
import 'log.dart' as log;
/// Interface to communicate with dart2js.
///
/// This is basically an amalgamation of dart2js's
/// [compiler.CompilerInputProvider], [compiler.CompilerOutputProvider], and
/// [compiler.DiagnosticHandler] function types so that we can provide them
/// as a single unit.
abstract class CompilerProvider {
/// The URI to the root directory where "dart:" libraries can be found.
///
/// This is used as the base URL to generate library URLs that are then sent
/// back to [provideInput].
Uri get libraryRoot;
/// Given [uri], responds with a future that completes to the contents of
/// the input file at that URI.
///
/// The future can complete to a string or a list of bytes.
Future /*<String | List<int>>*/ provideInput(Uri uri);
/// Reports a diagnostic message from dart2js to the user.
void handleDiagnostic(Uri uri, int begin, int end, String message,
compiler.Diagnostic kind);
/// Given a [name] (which will be "" for the entrypoint) and a file extension,
/// returns an [EventSink] that dart2js can write to to emit an output file.
EventSink<String> provideOutput(String name, String extension);
}
/// Compiles [entrypoint] to JavaScript (or to Dart if [toDart] is true) as
/// well as any ancillary outputs dart2js creates.
///
/// Uses [provider] to communcate between dart2js and the caller. Returns a
/// future that completes when compilation is done.
///
/// By default, the package root is assumed to be adjacent to [entrypoint], but
/// if [packageRoot] is passed that will be used instead.
Future compile(String entrypoint, CompilerProvider provider,
{Iterable<String> commandLineOptions, bool checked: false, bool csp: false,
bool minify: true, bool verbose: false, Map<String, String> environment,
String packageRoot, bool analyzeAll: false, bool preserveUris: false,
bool suppressWarnings: false, bool suppressHints: false,
bool suppressPackageWarnings: true, bool terse: false,
bool includeSourceMapUrls: false, bool toDart: false}) {
return new Future.sync(() {
var options = <String>['--categories=Client,Server'];
if (checked) options.add('--enable-checked-mode');
if (csp) options.add('--csp');
if (minify) options.add('--minify');
if (verbose) options.add('--verbose');
if (analyzeAll) options.add('--analyze-all');
if (preserveUris) options.add('--preserve-uris');
if (suppressWarnings) options.add('--suppress-warnings');
if (suppressHints) options.add('--suppress-hints');
if (!suppressPackageWarnings) options.add('--show-package-warnings');
if (terse) options.add('--terse');
if (toDart) options.add('--output-type=dart');
var sourceUrl = path.toUri(entrypoint);
options.add("--out=$sourceUrl.js");
// Add the source map URLs.
if (includeSourceMapUrls) {
options.add("--source-map=$sourceUrl.js.map");
}
if (environment == null) environment = {};
if (commandLineOptions != null) options.addAll(commandLineOptions);
if (packageRoot == null) {
packageRoot = path.join(path.dirname(entrypoint), 'packages');
}
return compiler.compile(
path.toUri(entrypoint),
provider.libraryRoot,
path.toUri(appendSlash(packageRoot)),
provider.provideInput,
provider.handleDiagnostic,
options,
provider.provideOutput,
environment);
});
}
/// Returns whether [dart] looks like an entrypoint file.
bool isEntrypoint(CompilationUnit dart) {
// Allow two or fewer arguments so that entrypoints intended for use with
// [spawnUri] get counted.
//
// TODO(nweiz): this misses the case where a Dart file doesn't contain main(),
// but it parts in another file that does.
return dart.declarations.any((node) {
return node is FunctionDeclaration &&
node.name.name == "main" &&
node.functionExpression.parameters.parameters.length <= 2;
});
}
/// Efficiently parses the import and export directives in [contents].
///
/// If [name] is passed, it's used as the filename for error reporting.
List<UriBasedDirective> parseImportsAndExports(String contents, {String name}) {
var collector = new _DirectiveCollector();
parseDirectives(contents, name: name).accept(collector);
return collector.directives;
}
/// A simple visitor that collects import and export nodes.
class _DirectiveCollector extends GeneralizingAstVisitor {
final directives = <UriBasedDirective>[];
visitUriBasedDirective(UriBasedDirective node) => directives.add(node);
}
/// Runs [code] in an isolate.
///
/// [code] should be the contents of a Dart entrypoint. It may contain imports;
/// they will be resolved in the same context as the host isolate. [message] is
/// passed to the [main] method of the code being run; the caller is responsible
/// for using this to establish communication with the isolate.
///
/// [packageRoot] controls the package root of the isolate. It may be either a
/// [String] or a [Uri].
///
/// If [snapshot] is passed, the isolate will be loaded from that path if it
/// exists. Otherwise, a snapshot of the isolate's code will be saved to that
/// path once the isolate is loaded.
Future runInIsolate(String code, message, {packageRoot, String snapshot}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
new Future.value(withTempDir(((dir) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var dartPath = path.join(dir, 'runInIsolate.dart');
writeTextFile(dartPath, code, dontLogContents: true);
var port = new ReceivePort();
join0(x0) {
new Future.value(Isolate.spawn(_isolateBuffer, {
'replyTo': port.sendPort,
'uri': path.toUri(dartPath).toString(),
'packageRoot': x0,
'message': message
})).then((x1) {
try {
x1;
new Future.value(port.first).then((x2) {
try {
var response = x2;
join1() {
join2() {
ensureDir(path.dirname(snapshot));
var snapshotArgs = [];
join3() {
snapshotArgs.addAll(
['--snapshot=${snapshot}', dartPath]);
new Future.value(
runProcess(Platform.executable, snapshotArgs)).then((x3) {
try {
var result = x3;
join4() {
log.warning(
"Failed to compile a snapshot to " "${path.relative(snapshot)}:\n" +
result.stderr.join("\n"));
completer0.complete();
}
if (result.success) {
completer0.complete(null);
} else {
join4();
}
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (packageRoot != null) {
snapshotArgs.add('--package-root=${packageRoot}');
join3();
} else {
join3();
}
}
if (snapshot == null) {
completer0.complete(null);
} else {
join2();
}
}
if (response['type'] == 'error') {
throw new CrossIsolateException.deserialize(
response['error']);
join1();
} else {
join1();
}
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
}, onError: completer0.completeError);
}
if (packageRoot == null) {
join0(null);
} else {
join0(packageRoot.toString());
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}))).then((x0) {
try {
x0;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (snapshot != null && fileExists(snapshot)) {
log.fine("Spawning isolate from ${snapshot}.");
join1() {
join2() {
join0();
}
catch0(error, s1) {
try {
if (error is IsolateSpawnException) {
log.fine(
"Couldn't load existing snapshot ${snapshot}:\n${error}");
join2();
} else {
throw error;
}
} catch (error, s1) {
completer0.completeError(error, s1);
}
}
try {
new Future.value(
Isolate.spawnUri(
path.toUri(snapshot),
[],
message,
packageRoot: packageRoot)).then((x1) {
try {
x1;
completer0.complete(null);
} catch (e1, s2) {
catch0(e1, s2);
}
}, onError: catch0);
} catch (e2, s3) {
catch0(e2, s3);
}
}
if (packageRoot != null) {
packageRoot = packageRoot.toString();
join1();
} else {
join1();
}
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
// TODO(nweiz): remove this when issue 12617 is fixed.
/// A function used as a buffer between the host isolate and [spawnUri].
///
/// [spawnUri] synchronously loads the file and its imports, which can deadlock
/// the host isolate if there's an HTTP import pointing at a server in the host.
/// Adding an additional isolate in the middle works around this.
void _isolateBuffer(message) {
var replyTo = message['replyTo'];
var packageRoot = message['packageRoot'];
if (packageRoot != null) packageRoot = Uri.parse(packageRoot);
Isolate.spawnUri(
Uri.parse(message['uri']),
[],
message['message'],
packageRoot: packageRoot).then((_) => replyTo.send({
'type': 'success'
})).catchError((e, stack) {
replyTo.send({
'type': 'error',
'error': CrossIsolateException.serialize(e, stack)
});
});
}

View file

@ -1,951 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.entrypoint;
import 'dart:async';
import 'package:path/path.dart' as path;
import 'package:barback/barback.dart';
import 'barback/asset_environment.dart';
import 'io.dart';
import 'lock_file.dart';
import 'log.dart' as log;
import 'package.dart';
import 'package_graph.dart';
import 'sdk.dart' as sdk;
import 'solver/version_solver.dart';
import 'source/cached.dart';
import 'system_cache.dart';
import 'utils.dart';
/// The context surrounding the root package pub is operating on.
///
/// Pub operates over a directed graph of dependencies that starts at a root
/// "entrypoint" package. This is typically the package where the current
/// working directory is located. An entrypoint knows the [root] package it is
/// associated with and is responsible for managing the "packages" directory
/// for it.
///
/// That directory contains symlinks to all packages used by an app. These links
/// point either to the [SystemCache] or to some other location on the local
/// filesystem.
///
/// While entrypoints are typically applications, a pure library package may end
/// up being used as an entrypoint. Also, a single package may be used as an
/// entrypoint in one context but not in another. For example, a package that
/// contains a reusable library may not be the entrypoint when used by an app,
/// but may be the entrypoint when you're running its tests.
class Entrypoint {
/// The root package this entrypoint is associated with.
final Package root;
/// The system-wide cache which caches packages that need to be fetched over
/// the network.
final SystemCache cache;
/// Whether to create and symlink a "packages" directory containing links to
/// the installed packages.
final bool _packageSymlinks;
/// The lockfile for the entrypoint.
///
/// If not provided to the entrypoint, it will be laoded lazily from disc.
LockFile _lockFile;
/// The graph of all packages reachable from the entrypoint.
PackageGraph _packageGraph;
/// Loads the entrypoint from a package at [rootDir].
///
/// If [packageSymlinks] is `true`, this will create a "packages" directory
/// with symlinks to the installed packages. This directory will be symlinked
/// into any directory that might contain an entrypoint.
Entrypoint(String rootDir, SystemCache cache, {bool packageSymlinks: true})
: root = new Package.load(null, rootDir, cache.sources),
cache = cache,
_packageSymlinks = packageSymlinks;
/// Creates an entrypoint given package and lockfile objects.
Entrypoint.inMemory(this.root, this._lockFile, this.cache)
: _packageSymlinks = false;
/// The path to the entrypoint's "packages" directory.
String get packagesDir => root.path('packages');
/// `true` if the entrypoint package currently has a lock file.
bool get lockFileExists => _lockFile != null || entryExists(lockFilePath);
LockFile get lockFile {
if (_lockFile != null) return _lockFile;
if (!lockFileExists) {
_lockFile = new LockFile.empty();
} else {
_lockFile = new LockFile.load(lockFilePath, cache.sources);
}
return _lockFile;
}
/// The path to the entrypoint package's pubspec.
String get pubspecPath => root.path('pubspec.yaml');
/// The path to the entrypoint package's lockfile.
String get lockFilePath => root.path('pubspec.lock');
/// Gets all dependencies of the [root] package.
///
/// Performs version resolution according to [SolveType].
///
/// [useLatest], if provided, defines a list of packages that will be
/// unlocked and forced to their latest versions. If [upgradeAll] is
/// true, the previous lockfile is ignored and all packages are re-resolved
/// from scratch. Otherwise, it will attempt to preserve the versions of all
/// previously locked packages.
///
/// Shows a report of the changes made relative to the previous lockfile. If
/// this is an upgrade or downgrade, all transitive dependencies are shown in
/// the report. Otherwise, only dependencies that were changed are shown. If
/// [dryRun] is `true`, no physical changes are made.
Future acquireDependencies(SolveType type, {List<String> useLatest,
bool dryRun: false}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(
resolveVersions(
type,
cache.sources,
root,
lockFile: lockFile,
useLatest: useLatest)).then((x0) {
try {
var result = x0;
join0() {
result.showReport(type);
join1() {
join2() {
new Future.value(
Future.wait(result.packages.map(_get))).then((x1) {
try {
var ids = x1;
_saveLockFile(ids);
join3() {
_linkOrDeleteSecondaryPackageDirs();
result.summarizeChanges(type, dryRun: dryRun);
new Future.value(loadPackageGraph(result)).then((x2) {
try {
var packageGraph = x2;
packageGraph.loadTransformerCache().clearIfOutdated(
result.changedPackages);
join4() {
completer0.complete();
}
catch0(error, stackTrace) {
try {
log.exception(error, stackTrace);
join4();
} catch (error, stackTrace) {
completer0.completeError(error, stackTrace);
}
}
try {
new Future.value(
precompileDependencies(changed: result.changedPackages)).then((x3) {
try {
x3;
new Future.value(
precompileExecutables(changed: result.changedPackages)).then((x4) {
try {
x4;
join4();
} catch (e0, s0) {
catch0(e0, s0);
}
}, onError: catch0);
} catch (e1, s1) {
catch0(e1, s1);
}
}, onError: catch0);
} catch (e2, s2) {
catch0(e2, s2);
}
} catch (e3, s3) {
completer0.completeError(e3, s3);
}
}, onError: completer0.completeError);
}
if (_packageSymlinks) {
_linkSelf();
join3();
} else {
join3();
}
} catch (e4, s4) {
completer0.completeError(e4, s4);
}
}, onError: completer0.completeError);
}
if (_packageSymlinks) {
cleanDir(packagesDir);
join2();
} else {
deleteEntry(packagesDir);
join2();
}
}
if (dryRun) {
result.summarizeChanges(type, dryRun: dryRun);
completer0.complete(null);
} else {
join1();
}
}
if (!result.succeeded) {
throw result.error;
join0();
} else {
join0();
}
} catch (e5, s5) {
completer0.completeError(e5, s5);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Precompile any transformed dependencies of the entrypoint.
///
/// If [changed] is passed, only dependencies whose contents might be changed
/// if one of the given packages changes will be recompiled.
Future precompileDependencies({Iterable<String> changed}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
new Future.value(loadPackageGraph()).then((x0) {
try {
var graph = x0;
var depsDir = path.join('.pub', 'deps', 'debug');
var dependenciesToPrecompile =
graph.packages.values.where(((package) {
if (package.pubspec.transformers.isEmpty) return false;
if (graph.isPackageMutable(package.name)) return false;
if (!dirExists(path.join(depsDir, package.name))) return true;
if (changed == null) return true;
return overlaps(
graph.transitiveDependencies(
package.name).map((package) => package.name).toSet(),
changed);
})).map(((package) {
return package.name;
})).toSet();
join1() {
join2() {
join3() {
completer0.complete();
}
catch0(_, s0) {
try {
var it0 = dependenciesToPrecompile.iterator;
break0() {
completer0.completeError(_, s0);
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var package = it0.current;
deleteEntry(path.join(depsDir, package));
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (_, s0) {
completer0.completeError(_, s0);
}
}
try {
new Future.value(
log.progress("Precompiling dependencies", (() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var packagesToLoad = unionAll(
dependenciesToPrecompile.map(graph.transitiveDependencies)).map(((package) {
return package.name;
})).toSet();
new Future.value(
AssetEnvironment.create(
this,
BarbackMode.DEBUG,
packages: packagesToLoad,
useDart2JS: false)).then((x0) {
try {
var environment = x0;
environment.barback.errors.listen(((_) {
}));
new Future.value(
environment.barback.getAllAssets()).then((x1) {
try {
var assets = x1;
new Future.value(
waitAndPrintErrors(assets.map(((asset) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var destPath =
path.join(depsDir, asset.id.package, path.fromUri(asset.id.path));
ensureDir(path.dirname(destPath));
new Future.value(
createFileFromStream(asset.read(), destPath)).then((x0) {
try {
x0;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (!dependenciesToPrecompile.contains(
asset.id.package)) {
completer0.complete(null);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x2) {
try {
x2;
log.message(
"Precompiled " +
toSentence(ordered(dependenciesToPrecompile).map(log.bold)) +
".");
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}))).then((x1) {
try {
x1;
join3();
} catch (e0, s1) {
catch0(e0, s1);
}
}, onError: catch0);
} catch (e1, s2) {
catch0(e1, s2);
}
}
if (dependenciesToPrecompile.isEmpty) {
completer0.complete(null);
} else {
join2();
}
}
if (dirExists(depsDir)) {
var it1 = dependenciesToPrecompile.iterator;
break1() {
var it2 = listDir(depsDir).iterator;
break2() {
join1();
}
var trampoline2;
continue2() {
trampoline2 = null;
if (it2.moveNext()) {
var subdir = it2.current;
var package = graph.packages[path.basename(subdir)];
join4() {
trampoline2 = continue2;
do trampoline2(); while (trampoline2 != null);
}
if (package == null ||
package.pubspec.transformers.isEmpty ||
graph.isPackageMutable(package.name)) {
deleteEntry(subdir);
join4();
} else {
join4();
}
} else {
break2();
}
}
trampoline2 = continue2;
do trampoline2(); while (trampoline2 != null);
}
var trampoline1;
continue1() {
trampoline1 = null;
if (it1.moveNext()) {
var package = it1.current;
deleteEntry(path.join(depsDir, package));
trampoline1 = continue1;
do trampoline1(); while (trampoline1 != null);
} else {
break1();
}
}
trampoline1 = continue1;
do trampoline1(); while (trampoline1 != null);
} else {
join1();
}
} catch (e2, s3) {
completer0.completeError(e2, s3);
}
}, onError: completer0.completeError);
}
if (changed != null) {
changed = changed.toSet();
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Precompiles all executables from dependencies that don't transitively
/// depend on [this] or on a path dependency.
Future precompileExecutables({Iterable<String> changed}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var binDir = path.join('.pub', 'bin');
var sdkVersionPath = path.join(binDir, 'sdk-version');
var sdkMatches =
fileExists(sdkVersionPath) &&
readTextFile(sdkVersionPath) == "${sdk.version}\n";
join1() {
new Future.value(loadPackageGraph()).then((x0) {
try {
var graph = x0;
var executables =
new Map.fromIterable(root.immediateDependencies, key: ((dep) {
return dep.name;
}), value: ((dep) {
return _executablesForPackage(graph, dep.name, changed);
}));
var it0 = executables.keys.toList().iterator;
break0() {
join2() {
join3() {
new Future.value(
log.progress("Precompiling executables", (() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
ensureDir(binDir);
writeTextFile(sdkVersionPath, "${sdk.version}\n");
var packagesToLoad =
unionAll(executables.keys.map(graph.transitiveDependencies)).map(((package) {
return package.name;
})).toSet();
var executableIds =
unionAll(executables.values.map(((ids) {
return ids.toSet();
})));
new Future.value(
AssetEnvironment.create(
this,
BarbackMode.RELEASE,
packages: packagesToLoad,
entrypoints: executableIds,
useDart2JS: false)).then((x0) {
try {
var environment = x0;
environment.barback.errors.listen(((error) {
log.error(log.red("Build error:\n$error"));
}));
new Future.value(
waitAndPrintErrors(executables.keys.map(((package) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var dir = path.join(binDir, package);
cleanDir(dir);
new Future.value(
environment.precompileExecutables(
package,
dir,
executableIds: executables[package])).then((x0) {
try {
x0;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x1) {
try {
x1;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}))).then((x1) {
try {
x1;
completer0.complete();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (executables.isEmpty) {
completer0.complete(null);
} else {
join3();
}
}
if (!sdkMatches) {
deleteEntry(binDir);
join2();
} else {
join2();
}
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var package = it0.current;
join4() {
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
if (executables[package].isEmpty) {
executables.remove(package);
join4();
} else {
join4();
}
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (!sdkMatches) {
changed = null;
join1();
} else {
join1();
}
}
if (changed != null) {
changed = changed.toSet();
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Returns the list of all executable assets for [packageName] that should be
/// precompiled.
///
/// If [changed] isn't `null`, executables for [packageName] will only be
/// compiled if they might depend on a package in [changed].
List<AssetId> _executablesForPackage(PackageGraph graph, String packageName,
Set<String> changed) {
var package = graph.packages[packageName];
var binDir = package.path('bin');
if (!dirExists(binDir)) return [];
if (graph.isPackageMutable(packageName)) return [];
var executables = package.executableIds;
// If we don't know which packages were changed, always precompile the
// executables.
if (changed == null) return executables;
// If any of the package's dependencies changed, recompile the executables.
if (graph.transitiveDependencies(
packageName).any((package) => changed.contains(package.name))) {
return executables;
}
// If any executables doesn't exist, precompile them regardless of what
// changed. Since we delete the bin directory before recompiling, we need to
// recompile all executables.
var executablesExist = executables.every(
(executable) =>
fileExists(
path.join(
'.pub',
'bin',
packageName,
"${path.url.basename(executable.path)}.snapshot")));
if (!executablesExist) return executables;
// Otherwise, we don't need to recompile.
return [];
}
/// Makes sure the package at [id] is locally available.
///
/// This automatically downloads the package to the system-wide cache as well
/// if it requires network access to retrieve (specifically, if the package's
/// source is a [CachedSource]).
Future<PackageId> _get(PackageId id) {
if (id.isRoot) return new Future.value(id);
var source = cache.sources[id.source];
return new Future.sync(() {
if (!_packageSymlinks) {
if (source is! CachedSource) return null;
return source.downloadToSystemCache(id);
}
var packageDir = path.join(packagesDir, id.name);
if (entryExists(packageDir)) deleteEntry(packageDir);
return source.get(id, packageDir);
}).then((_) => source.resolveId(id));
}
/// Determines whether or not the lockfile is out of date with respect to the
/// pubspec.
///
/// This will be `false` if there is no lockfile at all, or if the pubspec
/// contains dependencies that are not in the lockfile or that don't match
/// what's in there.
bool _isLockFileUpToDate(LockFile lockFile) {
/// If this is an entrypoint for an in-memory package, trust the in-memory
/// lockfile provided for it.
if (root.dir == null) return true;
return root.immediateDependencies.every((package) {
var locked = lockFile.packages[package.name];
if (locked == null) return false;
if (package.source != locked.source) return false;
if (!package.constraint.allows(locked.version)) return false;
var source = cache.sources[package.source];
if (source == null) return false;
return source.descriptionsEqual(package.description, locked.description);
});
}
/// Determines whether all of the packages in the lockfile are already
/// installed and available.
///
/// Note: this assumes [isLockFileUpToDate] has already been called and
/// returned `true`.
Future<bool> _arePackagesAvailable(LockFile lockFile) {
return Future.wait(lockFile.packages.values.map((package) {
var source = cache.sources[package.source];
// This should only be called after [_isLockFileUpToDate] has returned
// `true`, which ensures all of the sources in the lock file are valid.
assert(source != null);
// We only care about cached sources. Uncached sources aren't "installed".
// If one of those is missing, we want to show the user the file not
// found error later since installing won't accomplish anything.
if (source is! CachedSource) return new Future.value(true);
// Get the directory.
return source.getDirectory(package).then((dir) {
// See if the directory is there and looks like a package.
return dirExists(dir) || fileExists(path.join(dir, "pubspec.yaml"));
});
})).then((results) {
// Make sure they are all true.
return results.every((result) => result);
});
}
/// Gets dependencies if the lockfile is out of date with respect to the
/// pubspec.
Future ensureLockFileIsUpToDate() {
return new Future.sync(() {
// If we don't have a current lock file, we definitely need to install.
if (!_isLockFileUpToDate(lockFile)) {
if (lockFileExists) {
log.message(
"Your pubspec has changed, so we need to update your lockfile:");
} else {
log.message(
"You don't have a lockfile, so we need to generate that:");
}
return false;
}
// If we do have a lock file, we still need to make sure the packages
// are actually installed. The user may have just gotten a package that
// includes a lockfile.
return _arePackagesAvailable(lockFile).then((available) {
if (!available) {
log.message(
"You are missing some dependencies, so we need to install them " "first:");
}
return available;
});
}).then((upToDate) {
if (upToDate) return null;
return acquireDependencies(SolveType.GET);
});
}
/// Loads the package graph for the application and all of its transitive
/// dependencies.
///
/// If [result] is passed, this loads the graph from it without re-parsing the
/// lockfile or any pubspecs. Otherwise, before loading, this makes sure the
/// lockfile and dependencies are installed and up to date.
Future<PackageGraph> loadPackageGraph([SolveResult result]) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
new Future.value(log.progress("Loading package graph", (() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
new Future.value(ensureLockFileIsUpToDate()).then((x0) {
try {
x0;
new Future.value(
Future.wait(lockFile.packages.values.map(((id) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var source = cache.sources[id.source];
new Future.value(
source.getDirectory(id)).then((x0) {
try {
var dir = x0;
completer0.complete(
new Package.load(id.name, dir, cache.sources));
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x1) {
try {
var packages = x1;
var packageMap =
new Map.fromIterable(packages, key: ((p) {
return p.name;
}));
packageMap[root.name] = root;
completer0.complete(
new PackageGraph(this, lockFile, packageMap));
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (result != null) {
new Future.value(Future.wait(result.packages.map(((id) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(
cache.sources[id.source].getDirectory(id)).then((x0) {
try {
var dir = x0;
completer0.complete(
new Package(result.pubspecs[id.name], dir));
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
})))).then((x2) {
try {
var packages = x2;
completer0.complete(
new PackageGraph(
this,
new LockFile(result.packages),
new Map.fromIterable(packages, key: ((package) {
return package.name;
}))));
} catch (e2, s2) {
completer0.completeError(e2, s2);
}
}, onError: completer0.completeError);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}), fine: true)).then((x0) {
try {
var graph = x0;
_packageGraph = graph;
completer0.complete(graph);
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (_packageGraph != null) {
completer0.complete(_packageGraph);
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Saves a list of concrete package versions to the `pubspec.lock` file.
void _saveLockFile(List<PackageId> packageIds) {
_lockFile = new LockFile(packageIds);
var lockFilePath = root.path('pubspec.lock');
writeTextFile(lockFilePath, _lockFile.serialize(root.dir, cache.sources));
}
/// Creates a self-referential symlink in the `packages` directory that allows
/// a package to import its own files using `package:`.
void _linkSelf() {
var linkPath = path.join(packagesDir, root.name);
// Create the symlink if it doesn't exist.
if (entryExists(linkPath)) return;
ensureDir(packagesDir);
createPackageSymlink(
root.name,
root.dir,
linkPath,
isSelfLink: true,
relative: true);
}
/// If [packageSymlinks] is true, add "packages" directories to the whitelist
/// of directories that may contain Dart entrypoints.
///
/// Otherwise, delete any "packages" directories in the whitelist of
/// directories that may contain Dart entrypoints.
void _linkOrDeleteSecondaryPackageDirs() {
// Only the main "bin" directory gets a "packages" directory, not its
// subdirectories.
var binDir = root.path('bin');
if (dirExists(binDir)) _linkOrDeleteSecondaryPackageDir(binDir);
// The others get "packages" directories in subdirectories too.
for (var dir in ['benchmark', 'example', 'test', 'tool', 'web']) {
_linkOrDeleteSecondaryPackageDirsRecursively(root.path(dir));
}
}
/// If [packageSymlinks] is true, creates a symlink to the "packages"
/// directory in [dir] and all its subdirectories.
///
/// Otherwise, deletes any "packages" directories in [dir] and all its
/// subdirectories.
void _linkOrDeleteSecondaryPackageDirsRecursively(String dir) {
if (!dirExists(dir)) return;
_linkOrDeleteSecondaryPackageDir(dir);
_listDirWithoutPackages(
dir).where(dirExists).forEach(_linkOrDeleteSecondaryPackageDir);
}
// TODO(nweiz): roll this into [listDir] in io.dart once issue 4775 is fixed.
/// Recursively lists the contents of [dir], excluding hidden `.DS_Store`
/// files and `package` files.
List<String> _listDirWithoutPackages(dir) {
return flatten(listDir(dir).map((file) {
if (path.basename(file) == 'packages') return [];
if (!dirExists(file)) return [];
var fileAndSubfiles = [file];
fileAndSubfiles.addAll(_listDirWithoutPackages(file));
return fileAndSubfiles;
}));
}
/// If [packageSymlinks] is true, creates a symlink to the "packages"
/// directory in [dir].
///
/// Otherwise, deletes a "packages" directories in [dir] if one exists.
void _linkOrDeleteSecondaryPackageDir(String dir) {
var symlink = path.join(dir, 'packages');
if (entryExists(symlink)) deleteEntry(symlink);
if (_packageSymlinks) createSymlink(packagesDir, symlink, relative: true);
}
}

View file

@ -1,299 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.error_group;
import 'dart:async';
/// An [ErrorGroup] entangles the errors of multiple [Future]s and [Stream]s
/// with one another.
///
/// This allows APIs to expose multiple [Future]s and [Stream]s that have
/// identical error conditions without forcing API consumers to attach error
/// handling to objects they don't care about.
///
/// To use an [ErrorGroup], register [Future]s and [Stream]s with it using
/// [registerFuture] and [registerStream]. These methods return wrapped versions
/// of the [Future]s and [Stream]s, which should then be used in place of the
/// originals. For example:
///
/// var errorGroup = new ErrorGroup();
/// future = errorGroup.registerFuture(future);
/// stream = errorGroup.registerStream(stream);
///
/// An [ErrorGroup] has two major effects on its wrapped members:
///
/// * An error in any member of the group will be propagated to every member
/// that hasn't already completed. If those members later complete, their
/// values will be ignored.
/// * If any member of this group has a listener, errors on members without
/// listeners won't get passed to the top-level error handler.
class ErrorGroup {
/// The [Future]s that are members of [this].
final _futures = <_ErrorGroupFuture>[];
/// The [Stream]s that are members of [this].
final _streams = <_ErrorGroupStream>[];
/// Whether [this] has completed, either successfully or with an error.
var _isDone = false;
/// The [Completer] for [done].
final _doneCompleter = new Completer();
/// The underlying [Future] for [done].
///
/// We need to be able to access it internally as an [_ErrorGroupFuture] so
/// we can check if it has listeners and signal errors on it.
_ErrorGroupFuture _done;
/// Returns a [Future] that completes successully when all members of [this]
/// are complete, or with an error if any member receives an error.
///
/// This [Future] is effectively in the group in that an error on it won't be
/// passed to the top-level error handler unless no members of the group have
/// listeners attached.
Future get done => _done;
/// Creates a new group with no members.
ErrorGroup() {
this._done = new _ErrorGroupFuture(this, _doneCompleter.future);
}
/// Registers a [Future] as a member of [this].
///
/// Returns a wrapped version of [future] that should be used in its place.
///
/// If all members of [this] have already completed successfully or with an
/// error, it's a [StateError] to try to register a new [Future].
Future registerFuture(Future future) {
if (_isDone) {
throw new StateError(
"Can't register new members on a complete " "ErrorGroup.");
}
var wrapped = new _ErrorGroupFuture(this, future);
_futures.add(wrapped);
return wrapped;
}
/// Registers a [Stream] as a member of [this].
///
/// Returns a wrapped version of [stream] that should be used in its place.
/// The returned [Stream] will be multi-subscription if and only if [stream]
/// is.
///
/// Since all errors in a group are passed to all members, the returned
/// [Stream] will automatically unsubscribe all its listeners when it
/// encounters an error.
///
/// If all members of [this] have already completed successfully or with an
/// error, it's a [StateError] to try to register a new [Stream].
Stream registerStream(Stream stream) {
if (_isDone) {
throw new StateError(
"Can't register new members on a complete " "ErrorGroup.");
}
var wrapped = new _ErrorGroupStream(this, stream);
_streams.add(wrapped);
return wrapped;
}
/// Sends [error] to all members of [this].
///
/// Like errors that come from members, this will only be passed to the
/// top-level error handler if no members have listeners.
///
/// If all members of [this] have already completed successfully or with an
/// error, it's a [StateError] to try to signal an error.
void signalError(var error, [StackTrace stackTrace]) {
if (_isDone) {
throw new StateError("Can't signal errors on a complete ErrorGroup.");
}
_signalError(error, stackTrace);
}
/// Signal an error internally.
///
/// This is just like [signalError], but instead of throwing an error if
/// [this] is complete, it just does nothing.
void _signalError(var error, [StackTrace stackTrace]) {
if (_isDone) return;
var caught = false;
for (var future in _futures) {
if (future._isDone || future._hasListeners) caught = true;
future._signalError(error, stackTrace);
}
for (var stream in _streams) {
if (stream._isDone || stream._hasListeners) caught = true;
stream._signalError(error, stackTrace);
}
_isDone = true;
_done._signalError(error, stackTrace);
if (!caught && !_done._hasListeners) scheduleMicrotask(() {
throw error;
});
}
/// Notifies [this] that one of its member [Future]s is complete.
void _signalFutureComplete(_ErrorGroupFuture future) {
if (_isDone) return;
_isDone = _futures.every((future) => future._isDone) &&
_streams.every((stream) => stream._isDone);
if (_isDone) _doneCompleter.complete();
}
/// Notifies [this] that one of its member [Stream]s is complete.
void _signalStreamComplete(_ErrorGroupStream stream) {
if (_isDone) return;
_isDone = _futures.every((future) => future._isDone) &&
_streams.every((stream) => stream._isDone);
if (_isDone) _doneCompleter.complete();
}
}
/// A [Future] wrapper that keeps track of whether it's been completed and
/// whether it has any listeners.
///
/// It also notifies its parent [ErrorGroup] when it completes successfully or
/// receives an error.
class _ErrorGroupFuture implements Future {
/// The parent [ErrorGroup].
final ErrorGroup _group;
/// Whether [this] has completed, either successfully or with an error.
var _isDone = false;
/// The underlying [Completer] for [this].
final _completer = new Completer();
/// Whether [this] has any listeners.
bool _hasListeners = false;
/// Creates a new [_ErrorGroupFuture] that's a child of [_group] and wraps
/// [inner].
_ErrorGroupFuture(this._group, Future inner) {
inner.then((value) {
if (!_isDone) _completer.complete(value);
_isDone = true;
_group._signalFutureComplete(this);
}).catchError(_group._signalError);
// Make sure _completer.future doesn't automatically send errors to the
// top-level.
_completer.future.catchError((_) {});
}
Future then(onValue(value), {Function onError}) {
_hasListeners = true;
return _completer.future.then(onValue, onError: onError);
}
Future catchError(Function onError, {bool test(Object error)}) {
_hasListeners = true;
return _completer.future.catchError(onError, test: test);
}
Future whenComplete(void action()) {
_hasListeners = true;
return _completer.future.whenComplete(action);
}
Future timeout(Duration timeLimit, {void onTimeout()}) {
_hasListeners = true;
return _completer.future.timeout(timeLimit, onTimeout: onTimeout);
}
Stream asStream() {
_hasListeners = true;
return _completer.future.asStream();
}
/// Signal that an error from [_group] should be propagated through [this],
/// unless it's already complete.
void _signalError(var error, [StackTrace stackTrace]) {
if (!_isDone) _completer.completeError(error, stackTrace);
_isDone = true;
}
}
// TODO(nweiz): currently streams never top-level unhandled errors (issue 7843).
// When this is fixed, this class will need to prevent such errors from being
// top-leveled.
/// A [Stream] wrapper that keeps track of whether it's been completed and
/// whether it has any listeners.
///
/// It also notifies its parent [ErrorGroup] when it completes successfully or
/// receives an error.
class _ErrorGroupStream extends Stream {
/// The parent [ErrorGroup].
final ErrorGroup _group;
/// Whether [this] has completed, either successfully or with an error.
var _isDone = false;
/// The underlying [StreamController] for [this].
final StreamController _controller;
/// The controller's [Stream].
///
/// May be different than `_controller.stream` if the wrapped stream is a
/// broadcasting stream.
Stream _stream;
/// The [StreamSubscription] that connects the wrapped [Stream] to
/// [_controller].
StreamSubscription _subscription;
/// Whether [this] has any listeners.
bool get _hasListeners => _controller.hasListener;
/// Creates a new [_ErrorGroupFuture] that's a child of [_group] and wraps
/// [inner].
_ErrorGroupStream(this._group, Stream inner)
: _controller = new StreamController(sync: true) {
// Use old-style asBroadcastStream behavior - cancel source _subscription
// the first time the stream has no listeners.
_stream = inner.isBroadcast ?
_controller.stream.asBroadcastStream(onCancel: (sub) => sub.cancel()) :
_controller.stream;
_subscription = inner.listen((v) {
_controller.add(v);
}, onError: (e, [stackTrace]) {
_group._signalError(e, stackTrace);
}, onDone: () {
_isDone = true;
_group._signalStreamComplete(this);
_controller.close();
});
}
StreamSubscription listen(void onData(value), {Function onError, void
onDone(), bool cancelOnError}) {
return _stream.listen(
onData,
onError: onError,
onDone: onDone,
cancelOnError: true);
}
/// Signal that an error from [_group] should be propagated through [this],
/// unless it's already complete.
void _signalError(var e, [StackTrace stackTrace]) {
if (_isDone) return;
_subscription.cancel();
// Call these asynchronously to work around issue 7913.
new Future.value().then((_) {
_controller.addError(e, stackTrace);
_controller.close();
});
}
}

View file

@ -1,125 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.exceptions;
import 'dart:io';
import 'dart:isolate';
import "package:analyzer/analyzer.dart";
import 'package:args/command_runner.dart';
import "package:http/http.dart" as http;
import "package:stack_trace/stack_trace.dart";
import "package:yaml/yaml.dart";
import '../../asset/dart/serialize.dart';
/// An exception class for exceptions that are intended to be seen by the user.
///
/// These exceptions won't have any debugging information printed when they're
/// thrown.
class ApplicationException implements Exception {
final String message;
ApplicationException(this.message);
String toString() => message;
}
/// An exception class for exceptions that are intended to be seen by the user
/// and are associated with a problem in a file at some path.
class FileException implements ApplicationException {
final String message;
/// The path to the file that was missing or erroneous.
final String path;
FileException(this.message, this.path);
String toString() => message;
}
/// A class for exceptions that wrap other exceptions.
class WrappedException extends ApplicationException {
/// The underlying exception that [this] is wrapping, if any.
final innerError;
/// The stack chain for [innerError] if it exists.
final Chain innerChain;
WrappedException(String message, this.innerError, [StackTrace innerTrace])
: innerChain = innerTrace == null ? null : new Chain.forTrace(innerTrace),
super(message);
}
/// A class for exceptions that shouldn't be printed at the top level.
///
/// This is usually used when an exception has already been printed using
/// [log.exception].
class SilentException extends WrappedException {
SilentException(innerError, [StackTrace innerTrace])
: super(innerError.toString(), innerError, innerTrace);
}
/// A class for errors in a command's input data.
///
/// This corresponds to the [exit_codes.DATA] exit code.
class DataException extends ApplicationException {
DataException(String message)
: super(message);
}
/// An class for exceptions where a package could not be found in a [Source].
///
/// The source is responsible for wrapping its internal exceptions in this so
/// that other code in pub can use this to show a more detailed explanation of
/// why the package was being requested.
class PackageNotFoundException extends WrappedException {
PackageNotFoundException(String message, [innerError, StackTrace innerTrace])
: super(message, innerError, innerTrace);
}
/// All the names of user-facing exceptions.
final _userFacingExceptions = new Set<String>.from(
['ApplicationException', 'GitException', // This refers to http.ClientException.
'ClientException',
// Errors coming from the Dart analyzer are probably caused by syntax errors
// in user code, so they're user-facing.
'AnalyzerError',
'AnalyzerErrorGroup',
// An error spawning an isolate probably indicates a transformer with an
// invalid import.
'IsolateSpawnException', // IOException and subclasses.
'CertificateException',
'FileSystemException',
'HandshakeException',
'HttpException',
'IOException',
'ProcessException',
'RedirectException',
'SignalException',
'SocketException',
'StdoutException',
'TlsException',
'WebSocketException']);
/// Returns whether [error] is a user-facing error object.
///
/// This includes both [ApplicationException] and any dart:io errors.
bool isUserFacingException(error) {
if (error is CrossIsolateException) {
return _userFacingExceptions.contains(error.type);
}
// TODO(nweiz): unify this list with _userFacingExceptions when issue 5897 is
// fixed.
return error is ApplicationException ||
error is AnalyzerError ||
error is AnalyzerErrorGroup ||
error is IsolateSpawnException ||
error is IOException ||
error is http.ClientException ||
error is YamlException ||
error is UsageException;
}

View file

@ -1,320 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.executable;
import 'dart:async';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import 'package:stack_trace/stack_trace.dart';
import 'barback/asset_environment.dart';
import 'entrypoint.dart';
import 'exit_codes.dart' as exit_codes;
import 'io.dart';
import 'log.dart' as log;
import 'utils.dart';
/// Runs [executable] from [package] reachable from [entrypoint].
///
/// The executable string is a relative Dart file path using native path
/// separators with or without a trailing ".dart" extension. It is contained
/// within [package], which should either be the entrypoint package or an
/// immediate dependency of it.
///
/// Arguments from [args] will be passed to the spawned Dart application.
///
/// If [mode] is passed, it's used as the barback mode; it defaults to
/// [BarbackMode.RELEASE].
///
/// Returns the exit code of the spawned app.
Future<int> runExecutable(Entrypoint entrypoint, String package,
String executable, Iterable<String> args, {bool isGlobal: false,
BarbackMode mode}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
join1() {
join2() {
join3() {
var localSnapshotPath =
p.join(".pub", "bin", package, "${executable}.dart.snapshot");
join4() {
var rootDir = "bin";
var parts = p.split(executable);
join5() {
var assetPath = "${p.url.joinAll(p.split(executable))}.dart";
var id = new AssetId(package, assetPath);
new Future.value(
AssetEnvironment.create(
entrypoint,
mode,
useDart2JS: false,
entrypoints: [id])).then((x0) {
try {
var environment = x0;
environment.barback.errors.listen(((error) {
log.error(log.red("Build error:\n$error"));
}));
var server;
join6() {
join7() {
var vmArgs = [];
vmArgs.add("--checked");
var relativePath =
p.url.relative(assetPath, from: p.url.joinAll(p.split(server.rootDirectory)));
vmArgs.add(
server.url.resolve(relativePath).toString());
vmArgs.addAll(args);
new Future.value(
Process.start(Platform.executable, vmArgs)).then((x1) {
try {
var process = x1;
process.stderr.listen(stderr.add);
process.stdout.listen(stdout.add);
stdin.listen(process.stdin.add);
completer0.complete(process.exitCode);
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
catch0(error, stackTrace) {
try {
if (error is AssetNotFoundException) {
var message =
"Could not find ${log.bold(executable + ".dart")}";
join8() {
log.error("${message}.");
log.fine(new Chain.forTrace(stackTrace));
completer0.complete(exit_codes.NO_INPUT);
}
if (package != entrypoint.root.name) {
message +=
" in package ${log.bold(server.package)}";
join8();
} else {
join8();
}
} else {
throw error;
}
} catch (error, stackTrace) {
completer0.completeError(error, stackTrace);
}
}
try {
new Future.value(
environment.barback.getAssetById(id)).then((x2) {
try {
x2;
join7();
} catch (e1, s1) {
catch0(e1, s1);
}
}, onError: catch0);
} catch (e2, s2) {
catch0(e2, s2);
}
}
if (package == entrypoint.root.name) {
new Future.value(
environment.serveDirectory(rootDir)).then((x3) {
try {
server = x3;
join6();
} catch (e3, s3) {
completer0.completeError(e3, s3);
}
}, onError: completer0.completeError);
} else {
new Future.value(
environment.servePackageBinDirectory(package)).then((x4) {
try {
server = x4;
join6();
} catch (e4, s4) {
completer0.completeError(e4, s4);
}
}, onError: completer0.completeError);
}
} catch (e5, s5) {
completer0.completeError(e5, s5);
}
}, onError: completer0.completeError);
}
if (parts.length > 1) {
assert(!isGlobal && package == entrypoint.root.name);
rootDir = parts.first;
join5();
} else {
executable = p.join("bin", executable);
join5();
}
}
if (!isGlobal &&
fileExists(localSnapshotPath) &&
mode == BarbackMode.RELEASE) {
completer0.complete(
_runCachedExecutable(entrypoint, localSnapshotPath, args));
} else {
join4();
}
}
if (p.extension(executable) == ".dart") {
executable = p.withoutExtension(executable);
join3();
} else {
join3();
}
}
if (log.verbosity == log.Verbosity.NORMAL) {
log.verbosity = log.Verbosity.WARNING;
join2();
} else {
join2();
}
}
if (entrypoint.root.name != package &&
!entrypoint.root.immediateDependencies.any(((dep) {
return dep.name == package;
}))) {
new Future.value(entrypoint.loadPackageGraph()).then((x5) {
try {
var graph = x5;
join9() {
join1();
}
if (graph.packages.containsKey(package)) {
dataError(
'Package "${package}" is not an immediate dependency.\n'
'Cannot run executables in transitive dependencies.');
join9();
} else {
dataError(
'Could not find package "${package}". Did you forget to add a ' 'dependency?');
join9();
}
} catch (e6, s6) {
completer0.completeError(e6, s6);
}
}, onError: completer0.completeError);
} else {
join1();
}
}
if (mode == null) {
mode = BarbackMode.RELEASE;
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Runs the snapshot at [path] with [args] and hooks its stdout, stderr, and
/// sdtin to this process's.
///
/// If [recompile] is passed, it's called if the snapshot is out-of-date. It's
/// expected to regenerate a snapshot at [path], after which the snapshot will
/// be re-run. It may return a Future.
///
/// If [checked] is set, runs the snapshot in checked mode.
///
/// Returns the snapshot's exit code.
///
/// This doesn't do any validation of the snapshot's SDK version.
Future<int> runSnapshot(String path, Iterable<String> args, {recompile(),
bool checked: false}) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var vmArgs = [path]..addAll(args);
join0() {
var stdin1;
var stdin2;
join1() {
runProcess(input) {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
new Future.value(
Process.start(Platform.executable, vmArgs)).then((x0) {
try {
var process = x0;
process.stderr.listen(stderr.add);
process.stdout.listen(stdout.add);
input.listen(process.stdin.add);
completer0.complete(process.exitCode);
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
new Future.value(runProcess(stdin1)).then((x0) {
try {
var exitCode = x0;
join2() {
new Future.value(recompile()).then((x1) {
try {
x1;
completer0.complete(runProcess(stdin2));
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
}, onError: completer0.completeError);
}
if (recompile == null || exitCode != 255) {
completer0.complete(exitCode);
} else {
join2();
}
} catch (e1, s1) {
completer0.completeError(e1, s1);
}
}, onError: completer0.completeError);
}
if (recompile == null) {
stdin1 = stdin;
join1();
} else {
var pair = tee(stdin);
stdin1 = pair.first;
stdin2 = pair.last;
join1();
}
}
if (checked) {
vmArgs.insert(0, "--checked");
join0();
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Runs the executable snapshot at [snapshotPath].
Future<int> _runCachedExecutable(Entrypoint entrypoint, String snapshotPath,
List<String> args) {
return runSnapshot(snapshotPath, args, checked: true, recompile: () {
log.fine("Precompiled executable is out of date.");
return entrypoint.precompileExecutables();
});
}

View file

@ -1,60 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Exit code constants.
///
/// From [the BSD sysexits manpage][manpage]. Not every constant here is used,
/// even though some of the unused ones may be appropriate for errors
/// encountered by pub.
///
/// [manpage]: http://www.freebsd.org/cgi/man.cgi?query=sysexits
library pub.exit_codes;
/// The command completely successfully.
const SUCCESS = 0;
/// The command was used incorrectly.
const USAGE = 64;
/// The input data was incorrect.
const DATA = 65;
/// An input file did not exist or was unreadable.
const NO_INPUT = 66;
/// The user specified did not exist.
const NO_USER = 67;
/// The host specified did not exist.
const NO_HOST = 68;
/// A service is unavailable.
const UNAVAILABLE = 69;
/// An internal software error has been detected.
const SOFTWARE = 70;
/// An operating system error has been detected.
const OS = 71;
/// Some system file did not exist or was unreadable.
const OS_FILE = 72;
/// A user-specified output file cannot be created.
const CANT_CREATE = 73;
/// An error occurred while doing I/O on some file.
const IO = 74;
/// Temporary failure, indicating something that is not really an error.
const TEMP_FAIL = 75;
/// The remote system returned something invalid during a protocol exchange.
const PROTOCOL = 76;
/// The user did not have sufficient permissions.
const NO_PERM = 77;
/// Something was unconfigured or mis-configured.
const CONFIG = 78;

View file

@ -1,116 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Helper functionality for invoking Git.
library pub.git;
import 'dart:async';
import 'dart:io';
import 'package:stack_trace/stack_trace.dart';
import 'exceptions.dart';
import 'io.dart';
import 'log.dart' as log;
import 'utils.dart';
/// An exception thrown because a git command failed.
class GitException implements ApplicationException {
/// The arguments to the git command.
final List<String> args;
/// The standard error emitted by git.
final String stderr;
String get message => 'Git error. Command: git ${args.join(" ")}\n$stderr';
GitException(Iterable<String> args, this.stderr)
: args = args.toList();
String toString() => message;
}
/// Tests whether or not the git command-line app is available for use.
bool get isInstalled {
if (_isInstalledCache != null) return _isInstalledCache;
_isInstalledCache = _gitCommand != null;
return _isInstalledCache;
}
bool _isInstalledCache;
/// Run a git process with [args] from [workingDir].
///
/// Returns the stdout as a list of strings if it succeeded. Completes to an
/// exception if it failed.
Future<List<String>> run(List<String> args, {String workingDir, Map<String,
String> environment}) {
if (!isInstalled) {
fail(
"Cannot find a Git executable.\n" "Please ensure Git is correctly installed.");
}
log.muteProgress();
return runProcess(
_gitCommand,
args,
workingDir: workingDir,
environment: environment).then((result) {
if (!result.success) throw new GitException(args, result.stderr.join("\n"));
return result.stdout;
}).whenComplete(() {
log.unmuteProgress();
});
}
/// Like [run], but synchronous.
List<String> runSync(List<String> args, {String workingDir, Map<String,
String> environment}) {
if (!isInstalled) {
fail(
"Cannot find a Git executable.\n" "Please ensure Git is correctly installed.");
}
var result = runProcessSync(
_gitCommand,
args,
workingDir: workingDir,
environment: environment);
if (!result.success) throw new GitException(args, result.stderr.join("\n"));
return result.stdout;
}
/// Returns the name of the git command-line app, or null if Git could not be
/// found on the user's PATH.
String get _gitCommand {
if (_commandCache != null) return _commandCache;
var command;
if (_tryGitCommand("git")) {
_commandCache = "git";
} else if (_tryGitCommand("git.cmd")) {
_commandCache = "git.cmd";
} else {
return null;
}
log.fine('Determined git command $command.');
return _commandCache;
}
String _commandCache;
/// Checks whether [command] is the Git command for this computer.
bool _tryGitCommand(String command) {
// If "git --version" prints something familiar, git is working.
try {
var result = runProcessSync(command, ["--version"]);
var regexp = new RegExp("^git version");
return result.stdout.length == 1 && regexp.hasMatch(result.stdout.single);
} on ProcessException catch (error, stackTrace) {
var chain = new Chain.forTrace(stackTrace);
// If the process failed, they probably don't have it.
log.message('Git command is not "$command": $error\n$chain');
return false;
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,270 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Helpers for dealing with HTTP.
library pub.http;
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:http/http.dart' as http;
import 'package:http_throttle/http_throttle.dart';
import 'io.dart';
import 'log.dart' as log;
import 'oauth2.dart' as oauth2;
import 'sdk.dart' as sdk;
import 'utils.dart';
// TODO(nweiz): make this configurable
/// The amount of time in milliseconds to allow HTTP requests before assuming
/// they've failed.
final HTTP_TIMEOUT = 30 * 1000;
/// Headers and field names that should be censored in the log output.
final _CENSORED_FIELDS = const ['refresh_token', 'authorization'];
/// Headers required for pub.dartlang.org API requests.
///
/// The Accept header tells pub.dartlang.org which version of the API we're
/// expecting, so it can either serve that version or give us a 406 error if
/// it's not supported.
final PUB_API_HEADERS = const {
'Accept': 'application/vnd.pub.v2+json'
};
/// An HTTP client that transforms 40* errors and socket exceptions into more
/// user-friendly error messages.
///
/// This also adds a 30-second timeout to every request. This can be configured
/// on a per-request basis by setting the 'Pub-Request-Timeout' header to the
/// desired number of milliseconds, or to "None" to disable the timeout.
class _PubHttpClient extends http.BaseClient {
final _requestStopwatches = new Map<http.BaseRequest, Stopwatch>();
http.Client _inner;
_PubHttpClient([http.Client inner])
: this._inner = inner == null ? new http.Client() : inner;
Future<http.StreamedResponse> send(http.BaseRequest request) {
_requestStopwatches[request] = new Stopwatch()..start();
request.headers[HttpHeaders.USER_AGENT] = "Dart pub ${sdk.version}";
_logRequest(request);
// TODO(nweiz): remove this when issue 4061 is fixed.
var stackTrace;
try {
throw null;
} catch (_, localStackTrace) {
stackTrace = localStackTrace;
}
var timeoutLength = HTTP_TIMEOUT;
var timeoutString = request.headers.remove('Pub-Request-Timeout');
if (timeoutString == 'None') {
timeoutLength = null;
} else if (timeoutString != null) {
timeoutLength = int.parse(timeoutString);
}
var future = _inner.send(request).then((streamedResponse) {
_logResponse(streamedResponse);
var status = streamedResponse.statusCode;
// 401 responses should be handled by the OAuth2 client. It's very
// unlikely that they'll be returned by non-OAuth2 requests. We also want
// to pass along 400 responses from the token endpoint.
var tokenRequest =
urisEqual(streamedResponse.request.url, oauth2.tokenEndpoint);
if (status < 400 || status == 401 || (status == 400 && tokenRequest)) {
return streamedResponse;
}
if (status == 406 &&
request.headers['Accept'] == PUB_API_HEADERS['Accept']) {
fail(
"Pub ${sdk.version} is incompatible with the current version of "
"${request.url.host}.\n" "Upgrade pub to the latest version and try again.");
}
if (status == 500 &&
(request.url.host == "pub.dartlang.org" ||
request.url.host == "storage.googleapis.com")) {
var message =
"HTTP error 500: Internal Server Error at " "${request.url}.";
if (request.url.host == "pub.dartlang.org" ||
request.url.host == "storage.googleapis.com") {
message +=
"\nThis is likely a transient error. Please try again " "later.";
}
fail(message);
}
return http.Response.fromStream(streamedResponse).then((response) {
throw new PubHttpException(response);
});
}).catchError((error, stackTrace) {
if (error is SocketException && error.osError != null) {
if (error.osError.errorCode == 8 ||
error.osError.errorCode == -2 ||
error.osError.errorCode == -5 ||
error.osError.errorCode == 11001 ||
error.osError.errorCode == 11004) {
fail(
'Could not resolve URL "${request.url.origin}".',
error,
stackTrace);
} else if (error.osError.errorCode == -12276) {
fail(
'Unable to validate SSL certificate for ' '"${request.url.origin}".',
error,
stackTrace);
}
}
throw error;
});
if (timeoutLength == null) return future;
return timeout(
future,
timeoutLength,
request.url,
'fetching URL "${request.url}"');
}
/// Logs the fact that [request] was sent, and information about it.
void _logRequest(http.BaseRequest request) {
var requestLog = new StringBuffer();
requestLog.writeln("HTTP ${request.method} ${request.url}");
request.headers.forEach(
(name, value) => requestLog.writeln(_logField(name, value)));
if (request.method == 'POST') {
var contentTypeString = request.headers[HttpHeaders.CONTENT_TYPE];
if (contentTypeString == null) contentTypeString = '';
var contentType = ContentType.parse(contentTypeString);
if (request is http.MultipartRequest) {
requestLog.writeln();
requestLog.writeln("Body fields:");
request.fields.forEach(
(name, value) => requestLog.writeln(_logField(name, value)));
// TODO(nweiz): make MultipartRequest.files readable, and log them?
} else if (request is http.Request) {
if (contentType.value == 'application/x-www-form-urlencoded') {
requestLog.writeln();
requestLog.writeln("Body fields:");
request.bodyFields.forEach(
(name, value) => requestLog.writeln(_logField(name, value)));
} else if (contentType.value == 'text/plain' ||
contentType.value == 'application/json') {
requestLog.write(request.body);
}
}
}
log.fine(requestLog.toString().trim());
}
/// Logs the fact that [response] was received, and information about it.
void _logResponse(http.StreamedResponse response) {
// TODO(nweiz): Fork the response stream and log the response body. Be
// careful not to log OAuth2 private data, though.
var responseLog = new StringBuffer();
var request = response.request;
var stopwatch = _requestStopwatches.remove(request)..stop();
responseLog.writeln(
"HTTP response ${response.statusCode} "
"${response.reasonPhrase} for ${request.method} ${request.url}");
responseLog.writeln("took ${stopwatch.elapsed}");
response.headers.forEach(
(name, value) => responseLog.writeln(_logField(name, value)));
log.fine(responseLog.toString().trim());
}
/// Returns a log-formatted string for the HTTP field or header with the given
/// [name] and [value].
String _logField(String name, String value) {
if (_CENSORED_FIELDS.contains(name.toLowerCase())) {
return "$name: <censored>";
} else {
return "$name: $value";
}
}
}
/// The [_PubHttpClient] wrapped by [httpClient].
final _pubClient = new _PubHttpClient();
/// The HTTP client to use for all HTTP requests.
final httpClient = new ThrottleClient(16, _pubClient);
/// The underlying HTTP client wrapped by [httpClient].
http.Client get innerHttpClient => _pubClient._inner;
set innerHttpClient(http.Client client) => _pubClient._inner = client;
/// Handles a successful JSON-formatted response from pub.dartlang.org.
///
/// These responses are expected to be of the form `{"success": {"message":
/// "some message"}}`. If the format is correct, the message will be printed;
/// otherwise an error will be raised.
void handleJsonSuccess(http.Response response) {
var parsed = parseJsonResponse(response);
if (parsed['success'] is! Map ||
!parsed['success'].containsKey('message') ||
parsed['success']['message'] is! String) {
invalidServerResponse(response);
}
log.message(parsed['success']['message']);
}
/// Handles an unsuccessful JSON-formatted response from pub.dartlang.org.
///
/// These responses are expected to be of the form `{"error": {"message": "some
/// message"}}`. If the format is correct, the message will be raised as an
/// error; otherwise an [invalidServerResponse] error will be raised.
void handleJsonError(http.Response response) {
var errorMap = parseJsonResponse(response);
if (errorMap['error'] is! Map ||
!errorMap['error'].containsKey('message') ||
errorMap['error']['message'] is! String) {
invalidServerResponse(response);
}
fail(errorMap['error']['message']);
}
/// Parses a response body, assuming it's JSON-formatted.
///
/// Throws a user-friendly error if the response body is invalid JSON, or if
/// it's not a map.
Map parseJsonResponse(http.Response response) {
var value;
try {
value = JSON.decode(response.body);
} on FormatException catch (e) {
invalidServerResponse(response);
}
if (value is! Map) invalidServerResponse(response);
return value;
}
/// Throws an error describing an invalid response from the server.
void invalidServerResponse(http.Response response) =>
fail('Invalid server response:\n${response.body}');
/// Exception thrown when an HTTP operation fails.
class PubHttpException implements Exception {
final http.Response response;
const PubHttpException(this.response);
String toString() =>
'HTTP error ${response.statusCode}: ' '${response.reasonPhrase}';
}

File diff suppressed because it is too large Load diff

View file

@ -1,147 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.lock_file;
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
import 'package:source_span/source_span.dart';
import 'package:yaml/yaml.dart';
import 'io.dart';
import 'package.dart';
import 'source_registry.dart';
import 'utils.dart';
/// A parsed and validated `pubspec.lock` file.
class LockFile {
/// The packages this lockfile pins.
Map<String, PackageId> packages;
/// Creates a new lockfile containing [ids].
factory LockFile(List<PackageId> ids) {
var lockFile = new LockFile.empty();
for (var id in ids) {
if (!id.isRoot) lockFile.packages[id.name] = id;
}
return lockFile;
}
LockFile._(this.packages);
LockFile.empty()
: packages = <String, PackageId>{};
/// Loads a lockfile from [filePath].
factory LockFile.load(String filePath, SourceRegistry sources) {
return LockFile._parse(filePath, readTextFile(filePath), sources);
}
/// Parses a lockfile whose text is [contents].
factory LockFile.parse(String contents, SourceRegistry sources) {
return LockFile._parse(null, contents, sources);
}
/// Parses the lockfile whose text is [contents].
///
/// [filePath] is the system-native path to the lockfile on disc. It may be
/// `null`.
static LockFile _parse(String filePath, String contents,
SourceRegistry sources) {
var packages = <String, PackageId>{};
if (contents.trim() == '') return new LockFile.empty();
var sourceUrl;
if (filePath != null) sourceUrl = p.toUri(filePath);
var parsed = loadYamlNode(contents, sourceUrl: sourceUrl);
_validate(parsed is Map, 'The lockfile must be a YAML mapping.', parsed);
var packageEntries = parsed['packages'];
if (packageEntries != null) {
_validate(
packageEntries is Map,
'The "packages" field must be a map.',
parsed.nodes['packages']);
packageEntries.forEach((name, spec) {
// Parse the version.
_validate(
spec.containsKey('version'),
'Package $name is missing a version.',
spec);
var version = new Version.parse(spec['version']);
// Parse the source.
_validate(
spec.containsKey('source'),
'Package $name is missing a source.',
spec);
var sourceName = spec['source'];
_validate(
spec.containsKey('description'),
'Package $name is missing a description.',
spec);
var description = spec['description'];
// Let the source parse the description.
var source = sources[sourceName];
try {
description =
source.parseDescription(filePath, description, fromLockFile: true);
} on FormatException catch (ex) {
throw new SourceSpanFormatException(
ex.message,
spec.nodes['source'].span);
}
var id = new PackageId(name, sourceName, version, description);
// Validate the name.
_validate(
name == id.name,
"Package name $name doesn't match ${id.name}.",
spec);
packages[name] = id;
});
}
return new LockFile._(packages);
}
/// If [condition] is `false` throws a format error with [message] for [node].
static void _validate(bool condition, String message, YamlNode node) {
if (condition) return;
throw new SourceSpanFormatException(message, node.span);
}
/// Returns the serialized YAML text of the lock file.
///
/// [packageDir] is the containing directory of the root package, used to
/// properly serialize package descriptions.
String serialize(String packageDir, SourceRegistry sources) {
// Convert the dependencies to a simple object.
var data = {};
packages.forEach((name, package) {
var description =
sources[package.source].serializeDescription(packageDir, package.description);
data[name] = {
'version': package.version.toString(),
'source': package.source,
'description': description
};
});
return """
# Generated by pub
# See http://pub.dartlang.org/doc/glossary.html#lockfile
${yamlToString({'packages': data})}
""";
}
}

View file

@ -1,551 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Message logging.
library pub.log;
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:args/command_runner.dart';
import 'package:path/path.dart' as p;
import 'package:source_span/source_span.dart';
import 'package:stack_trace/stack_trace.dart';
import 'exceptions.dart';
import 'io.dart';
import 'progress.dart';
import 'transcript.dart';
import 'utils.dart';
/// The singleton instance so that we can have a nice api like:
///
/// log.json.error(...);
final json = new _JsonLogger();
/// The current logging verbosity.
Verbosity verbosity = Verbosity.NORMAL;
/// Whether or not to log entries with prejudice.
bool withPrejudice = false;
/// In cases where there's a ton of log spew, make sure we don't eat infinite
/// memory.
///
/// This can occur when the backtracking solver stumbles into a pathological
/// dependency graph. It generally will find a solution, but it may log
/// thousands and thousands of entries to get there.
const _MAX_TRANSCRIPT = 10000;
/// The list of recorded log messages. Will only be recorded if
/// [recordTranscript()] is called.
Transcript<Entry> _transcript;
/// The currently-animated progress indicator, if any.
///
/// This will also be in [_progresses].
Progress _animatedProgress;
final _cyan = getSpecial('\u001b[36m');
final _green = getSpecial('\u001b[32m');
final _magenta = getSpecial('\u001b[35m');
final _red = getSpecial('\u001b[31m');
final _yellow = getSpecial('\u001b[33m');
final _gray = getSpecial('\u001b[1;30m');
final _none = getSpecial('\u001b[0m');
final _noColor = getSpecial('\u001b[39m');
final _bold = getSpecial('\u001b[1m');
/// An enum type for defining the different logging levels a given message can
/// be associated with.
///
/// By default, [ERROR] and [WARNING] messages are printed to sterr. [MESSAGE]
/// messages are printed to stdout, and others are ignored.
class Level {
/// An error occurred and an operation could not be completed.
///
/// Usually shown to the user on stderr.
static const ERROR = const Level._("ERR ");
/// Something unexpected happened, but the program was able to continue,
/// though possibly in a degraded fashion.
static const WARNING = const Level._("WARN");
/// A message intended specifically to be shown to the user.
static const MESSAGE = const Level._("MSG ");
/// Some interaction with the external world occurred, such as a network
/// operation, process spawning, or file IO.
static const IO = const Level._("IO ");
/// Incremental output during pub's version constraint solver.
static const SOLVER = const Level._("SLVR");
/// Fine-grained and verbose additional information.
///
/// Used to provide program state context for other logs (such as what pub
/// was doing when an IO operation occurred) or just more detail for an
/// operation.
static const FINE = const Level._("FINE");
const Level._(this.name);
final String name;
String toString() => name;
}
typedef _LogFn(Entry entry);
/// An enum type to control which log levels are displayed and how they are
/// displayed.
class Verbosity {
/// Silence all logging.
static const NONE = const Verbosity._("none", const {
Level.ERROR: null,
Level.WARNING: null,
Level.MESSAGE: null,
Level.IO: null,
Level.SOLVER: null,
Level.FINE: null
});
/// Shows only errors and warnings.
static const WARNING = const Verbosity._("warning", const {
Level.ERROR: _logToStderr,
Level.WARNING: _logToStderr,
Level.MESSAGE: null,
Level.IO: null,
Level.SOLVER: null,
Level.FINE: null
});
/// The default verbosity which shows errors, warnings, and messages.
static const NORMAL = const Verbosity._("normal", const {
Level.ERROR: _logToStderr,
Level.WARNING: _logToStderr,
Level.MESSAGE: _logToStdout,
Level.IO: null,
Level.SOLVER: null,
Level.FINE: null
});
/// Shows errors, warnings, messages, and IO event logs.
static const IO = const Verbosity._("io", const {
Level.ERROR: _logToStderrWithLabel,
Level.WARNING: _logToStderrWithLabel,
Level.MESSAGE: _logToStdoutWithLabel,
Level.IO: _logToStderrWithLabel,
Level.SOLVER: null,
Level.FINE: null
});
/// Shows errors, warnings, messages, and version solver logs.
static const SOLVER = const Verbosity._("solver", const {
Level.ERROR: _logToStderr,
Level.WARNING: _logToStderr,
Level.MESSAGE: _logToStdout,
Level.IO: null,
Level.SOLVER: _logToStdout,
Level.FINE: null
});
/// Shows all logs.
static const ALL = const Verbosity._("all", const {
Level.ERROR: _logToStderrWithLabel,
Level.WARNING: _logToStderrWithLabel,
Level.MESSAGE: _logToStdoutWithLabel,
Level.IO: _logToStderrWithLabel,
Level.SOLVER: _logToStderrWithLabel,
Level.FINE: _logToStderrWithLabel
});
const Verbosity._(this.name, this._loggers);
final String name;
final Map<Level, _LogFn> _loggers;
/// Returns whether or not logs at [level] will be printed.
bool isLevelVisible(Level level) => _loggers[level] != null;
String toString() => name;
}
/// A single log entry.
class Entry {
final Level level;
final List<String> lines;
Entry(this.level, this.lines);
}
/// Logs [message] at [Level.ERROR].
///
/// If [error] is passed, it's appended to [message]. If [trace] is passed, it's
/// printed at log level fine.
void error(message, [error, StackTrace trace]) {
if (error != null) {
message = "$message: $error";
if (error is Error && trace == null) trace = error.stackTrace;
}
write(Level.ERROR, message);
if (trace != null) write(Level.FINE, new Chain.forTrace(trace));
}
/// Logs [message] at [Level.WARNING].
void warning(message) => write(Level.WARNING, message);
/// Logs [message] at [Level.MESSAGE].
void message(message) => write(Level.MESSAGE, message);
/// Logs [message] at [Level.IO].
void io(message) => write(Level.IO, message);
/// Logs [message] at [Level.SOLVER].
void solver(message) => write(Level.SOLVER, message);
/// Logs [message] at [Level.FINE].
void fine(message) => write(Level.FINE, message);
/// Logs [message] at [level].
void write(Level level, message) {
message = message.toString();
var lines = splitLines(message);
// Discard a trailing newline. This is useful since StringBuffers often end
// up with an extra newline at the end from using [writeln].
if (lines.isNotEmpty && lines.last == "") {
lines.removeLast();
}
var entry = new Entry(level, lines.map(format).toList());
var logFn = verbosity._loggers[level];
if (logFn != null) logFn(entry);
if (_transcript != null) _transcript.add(entry);
}
final _capitalizedAnsiEscape = new RegExp(r'\u001b\[\d+(;\d+)?M');
/// Returns [string] formatted as it would be if it were logged.
String format(String string) {
if (!withPrejudice) return string;
// [toUpperCase] can corrupt terminal colorings, so fix them up using
// [replaceAllMapped].
string = string.toUpperCase().replaceAllMapped(
_capitalizedAnsiEscape,
(match) => match[0].toLowerCase());
// Don't use [bold] because it's disabled under [withPrejudice].
return "$_bold$string$_none";
}
/// Logs an asynchronous IO operation.
///
/// Logs [startMessage] before the operation starts, then when [operation]
/// completes, invokes [endMessage] with the completion value and logs the
/// result of that. Returns a future that completes after the logging is done.
///
/// If [endMessage] is omitted, then logs "Begin [startMessage]" before the
/// operation and "End [startMessage]" after it.
Future ioAsync(String startMessage, Future operation, [String
endMessage(value)]) {
if (endMessage == null) {
io("Begin $startMessage.");
} else {
io(startMessage);
}
return operation.then((result) {
if (endMessage == null) {
io("End $startMessage.");
} else {
io(endMessage(result));
}
return result;
});
}
/// Logs the spawning of an [executable] process with [arguments] at [IO]
/// level.
void process(String executable, List<String> arguments, String workingDirectory)
{
io(
"Spawning \"$executable ${arguments.join(' ')}\" in "
"${p.absolute(workingDirectory)}");
}
/// Logs the results of running [executable].
void processResult(String executable, PubProcessResult result) {
// Log it all as one message so that it shows up as a single unit in the logs.
var buffer = new StringBuffer();
buffer.writeln("Finished $executable. Exit code ${result.exitCode}.");
dumpOutput(String name, List<String> output) {
if (output.length == 0) {
buffer.writeln("Nothing output on $name.");
} else {
buffer.writeln("$name:");
var numLines = 0;
for (var line in output) {
if (++numLines > 1000) {
buffer.writeln(
'[${output.length - 1000}] more lines of output ' 'truncated...]');
break;
}
buffer.writeln("| $line");
}
}
}
dumpOutput("stdout", result.stdout);
dumpOutput("stderr", result.stderr);
io(buffer.toString().trim());
}
/// Logs an exception.
void exception(exception, [StackTrace trace]) {
if (exception is SilentException) return;
var chain = trace == null ? new Chain.current() : new Chain.forTrace(trace);
// This is basically the top-level exception handler so that we don't
// spew a stack trace on our users.
if (exception is SourceSpanException) {
error(exception.toString(color: canUseSpecialChars));
} else {
error(getErrorMessage(exception));
}
fine("Exception type: ${exception.runtimeType}");
if (json.enabled) {
if (exception is UsageException) {
// Don't print usage info in JSON output.
json.error(exception.message);
} else {
json.error(exception);
}
}
if (!isUserFacingException(exception)) {
error(chain.terse);
} else {
fine(chain.terse);
}
if (exception is WrappedException && exception.innerError != null) {
var message = "Wrapped exception: ${exception.innerError}";
if (exception.innerChain != null) {
message = "$message\n${exception.innerChain}";
}
fine(message);
}
}
/// Enables recording of log entries.
void recordTranscript() {
_transcript = new Transcript<Entry>(_MAX_TRANSCRIPT);
}
/// If [recordTranscript()] was called, then prints the previously recorded log
/// transcript to stderr.
void dumpTranscript() {
if (_transcript == null) return;
stderr.writeln('---- Log transcript ----');
_transcript.forEach((entry) {
_printToStream(stderr, entry, showLabel: true);
}, (discarded) {
stderr.writeln('---- ($discarded discarded) ----');
});
stderr.writeln('---- End log transcript ----');
}
/// Prints [message] then displays an updated elapsed time until the future
/// returned by [callback] completes.
///
/// If anything else is logged during this (including another call to
/// [progress]) that cancels the progress animation, although the total time
/// will still be printed once it finishes. If [fine] is passed, the progress
/// information will only be visible at [Level.FINE].
Future progress(String message, Future callback(), {bool fine: false}) {
_stopProgress();
var progress = new Progress(message, fine: fine);
_animatedProgress = progress;
return callback().whenComplete(progress.stop);
}
/// Stops animating the running progress indicator, if currently running.
void _stopProgress() {
if (_animatedProgress != null) _animatedProgress.stopAnimating();
_animatedProgress = null;
}
/// The number of outstanding calls to [muteProgress] that have not been unmuted
/// yet.
int _numMutes = 0;
/// Whether progress animation should be muted or not.
bool get isMuted => _numMutes > 0;
/// Stops animating any ongoing progress.
///
/// This is called before spawning Git since Git sometimes writes directly to
/// the terminal to ask for login credentials, which would then get overwritten
/// by the progress animation.
///
/// Each call to this must be paired with a call to [unmuteProgress].
void muteProgress() {
_numMutes++;
}
/// Resumes animating any ongoing progress once all calls to [muteProgress]
/// have made their matching [unmuteProgress].
void unmuteProgress() {
assert(_numMutes > 0);
_numMutes--;
}
/// Wraps [text] in the ANSI escape codes to make it bold when on a platform
/// that supports that.
///
/// Use this to highlight the most important piece of a long chunk of text.
///
/// This is disabled under [withPrejudice] since all text is bold with
/// prejudice.
String bold(text) => withPrejudice ? text : "$_bold$text$_none";
/// Wraps [text] in the ANSI escape codes to make it gray when on a platform
/// that supports that.
///
/// Use this for text that's less important than the text around it.
///
/// The gray marker also enables bold, so it needs to be handled specially with
/// [withPrejudice] to avoid disabling bolding entirely.
String gray(text) =>
withPrejudice ? "$_gray$text$_noColor" : "$_gray$text$_none";
/// Wraps [text] in the ANSI escape codes to color it cyan when on a platform
/// that supports that.
///
/// Use this to highlight something interesting but neither good nor bad.
String cyan(text) => "$_cyan$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it green when on a platform
/// that supports that.
///
/// Use this to highlight something successful or otherwise positive.
String green(text) => "$_green$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it magenta when on a
/// platform that supports that.
///
/// Use this to highlight something risky that the user should be aware of but
/// may intend to do.
String magenta(text) => "$_magenta$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it red when on a platform
/// that supports that.
///
/// Use this to highlight unequivocal errors, problems, or failures.
String red(text) => "$_red$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it yellow when on a platform
/// that supports that.
///
/// Use this to highlight warnings, cautions or other things that are bad but
/// do not prevent the user's goal from being reached.
String yellow(text) => "$_yellow$text$_noColor";
/// Log function that prints the message to stdout.
void _logToStdout(Entry entry) {
_logToStream(stdout, entry, showLabel: false);
}
/// Log function that prints the message to stdout with the level name.
void _logToStdoutWithLabel(Entry entry) {
_logToStream(stdout, entry, showLabel: true);
}
/// Log function that prints the message to stderr.
void _logToStderr(Entry entry) {
_logToStream(stderr, entry, showLabel: false);
}
/// Log function that prints the message to stderr with the level name.
void _logToStderrWithLabel(Entry entry) {
_logToStream(stderr, entry, showLabel: true);
}
void _logToStream(IOSink sink, Entry entry, {bool showLabel}) {
if (json.enabled) return;
_printToStream(sink, entry, showLabel: showLabel);
}
void _printToStream(IOSink sink, Entry entry, {bool showLabel}) {
_stopProgress();
bool firstLine = true;
for (var line in entry.lines) {
if (showLabel) {
if (firstLine) {
sink.write('${entry.level.name}: ');
} else {
sink.write(' | ');
}
}
sink.writeln(line);
firstLine = false;
}
}
/// Namespace-like class for collecting the methods for JSON logging.
class _JsonLogger {
/// Whether logging should use machine-friendly JSON output or human-friendly
/// text.
///
/// If set to `true`, then no regular logging is printed. Logged messages
/// will still be recorded and displayed if the transcript is printed.
bool enabled = false;
/// Creates an error JSON object for [error] and prints it if JSON output
/// is enabled.
///
/// Always prints to stdout.
void error(error, [stackTrace]) {
var errorJson = {
"error": error.toString()
};
if (stackTrace == null && error is Error) stackTrace = error.stackTrace;
if (stackTrace != null) {
errorJson["stackTrace"] = new Chain.forTrace(stackTrace).toString();
}
// If the error came from a file, include the path.
if (error is SourceSpanException && error.span.sourceUrl != null) {
errorJson["path"] = p.fromUri(error.span.sourceUrl);
}
if (error is FileException) {
errorJson["path"] = error.path;
}
this.message(errorJson);
}
/// Encodes [message] to JSON and prints it if JSON output is enabled.
void message(message) {
if (!enabled) return;
print(JSON.encode(message));
}
}

View file

@ -1,222 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.oauth2;
import 'dart:async';
import 'dart:io';
import 'package:oauth2/oauth2.dart';
import 'package:path/path.dart' as path;
import 'package:shelf/shelf.dart' as shelf;
import 'package:shelf/shelf_io.dart' as shelf_io;
import 'http.dart';
import 'io.dart';
import 'log.dart' as log;
import 'system_cache.dart';
import 'utils.dart';
export 'package:oauth2/oauth2.dart';
/// The pub client's OAuth2 identifier.
final _identifier =
'818368855108-8grd2eg9tj9f38os6f1urbcvsq399u8n.apps.' 'googleusercontent.com';
/// The pub client's OAuth2 secret.
///
/// This isn't actually meant to be kept a secret.
final _secret = 'SWeqj8seoJW0w7_CpEPFLX0K';
/// The URL to which the user will be directed to authorize the pub client to
/// get an OAuth2 access token.
///
/// `access_type=offline` and `approval_prompt=force` ensures that we always get
/// a refresh token from the server. See the [Google OAuth2 documentation][].
///
/// [Google OAuth2 documentation]: https://developers.google.com/accounts/docs/OAuth2WebServer#offline
final authorizationEndpoint = Uri.parse(
'https://accounts.google.com/o/oauth2/auth?access_type=offline'
'&approval_prompt=force');
/// The URL from which the pub client will request an access token once it's
/// been authorized by the user.
///
/// This can be controlled externally by setting the `_PUB_TEST_TOKEN_ENDPOINT`
/// environment variable.
Uri get tokenEndpoint {
var tokenEndpoint = Platform.environment['_PUB_TEST_TOKEN_ENDPOINT'];
if (tokenEndpoint != null) {
return Uri.parse(tokenEndpoint);
} else {
return _tokenEndpoint;
}
}
final _tokenEndpoint = Uri.parse('https://accounts.google.com/o/oauth2/token');
/// The OAuth2 scopes that the pub client needs.
///
/// Currently the client only needs the user's email so that the server can
/// verify their identity.
final _scopes = ['https://www.googleapis.com/auth/userinfo.email'];
/// An in-memory cache of the user's OAuth2 credentials.
///
/// This should always be the same as the credentials file stored in the system
/// cache.
Credentials _credentials;
/// Delete the cached credentials, if they exist.
void clearCredentials(SystemCache cache) {
_credentials = null;
var credentialsFile = _credentialsFile(cache);
if (entryExists(credentialsFile)) deleteEntry(credentialsFile);
}
/// Asynchronously passes an OAuth2 [Client] to [fn], and closes the client when
/// the [Future] returned by [fn] completes.
///
/// This takes care of loading and saving the client's credentials, as well as
/// prompting the user for their authorization. It will also re-authorize and
/// re-run [fn] if a recoverable authorization error is detected.
Future withClient(SystemCache cache, Future fn(Client client)) {
return _getClient(cache).then((client) {
var completer = new Completer();
return fn(client).whenComplete(() {
client.close();
// Be sure to save the credentials even when an error happens.
_saveCredentials(cache, client.credentials);
});
}).catchError((error) {
if (error is ExpirationException) {
log.error(
"Pub's authorization to upload packages has expired and "
"can't be automatically refreshed.");
return withClient(cache, fn);
} else if (error is AuthorizationException) {
var message = "OAuth2 authorization failed";
if (error.description != null) {
message = "$message (${error.description})";
}
log.error("$message.");
clearCredentials(cache);
return withClient(cache, fn);
} else {
throw error;
}
});
}
/// Gets a new OAuth2 client.
///
/// If saved credentials are available, those are used; otherwise, the user is
/// prompted to authorize the pub client.
Future<Client> _getClient(SystemCache cache) {
return new Future.sync(() {
var credentials = _loadCredentials(cache);
if (credentials == null) return _authorize();
var client =
new Client(_identifier, _secret, credentials, httpClient: httpClient);
_saveCredentials(cache, client.credentials);
return client;
});
}
/// Loads the user's OAuth2 credentials from the in-memory cache or the
/// filesystem if possible.
///
/// If the credentials can't be loaded for any reason, the returned [Future]
/// completes to `null`.
Credentials _loadCredentials(SystemCache cache) {
log.fine('Loading OAuth2 credentials.');
try {
if (_credentials != null) return _credentials;
var path = _credentialsFile(cache);
if (!fileExists(path)) return null;
var credentials = new Credentials.fromJson(readTextFile(path));
if (credentials.isExpired && !credentials.canRefresh) {
log.error(
"Pub's authorization to upload packages has expired and "
"can't be automatically refreshed.");
return null; // null means re-authorize.
}
return credentials;
} catch (e) {
log.error(
'Warning: could not load the saved OAuth2 credentials: $e\n'
'Obtaining new credentials...');
return null; // null means re-authorize.
}
}
/// Save the user's OAuth2 credentials to the in-memory cache and the
/// filesystem.
void _saveCredentials(SystemCache cache, Credentials credentials) {
log.fine('Saving OAuth2 credentials.');
_credentials = credentials;
var credentialsPath = _credentialsFile(cache);
ensureDir(path.dirname(credentialsPath));
writeTextFile(credentialsPath, credentials.toJson(), dontLogContents: true);
}
/// The path to the file in which the user's OAuth2 credentials are stored.
String _credentialsFile(SystemCache cache) =>
path.join(cache.rootDir, 'credentials.json');
/// Gets the user to authorize pub as a client of pub.dartlang.org via oauth2.
///
/// Returns a Future that completes to a fully-authorized [Client].
Future<Client> _authorize() {
var grant = new AuthorizationCodeGrant(
_identifier,
_secret,
authorizationEndpoint,
tokenEndpoint,
httpClient: httpClient);
// Spin up a one-shot HTTP server to receive the authorization code from the
// Google OAuth2 server via redirect. This server will close itself as soon as
// the code is received.
var completer = new Completer();
bindServer('localhost', 0).then((server) {
shelf_io.serveRequests(server, (request) {
if (request.url.path != "/") {
return new shelf.Response.notFound('Invalid URI.');
}
log.message('Authorization received, processing...');
var queryString = request.url.query;
if (queryString == null) queryString = '';
// Closing the server here is safe, since it will wait until the response
// is sent to actually shut down.
server.close();
chainToCompleter(
grant.handleAuthorizationResponse(queryToMap(queryString)),
completer);
return new shelf.Response.found('http://pub.dartlang.org/authorized');
});
var authUrl = grant.getAuthorizationUrl(
Uri.parse('http://localhost:${server.port}'),
scopes: _scopes);
log.message(
'Pub needs your authorization to upload packages on your behalf.\n'
'In a web browser, go to $authUrl\n' 'Then click "Allow access".\n\n'
'Waiting for your authorization...');
});
return completer.future.then((client) {
log.message('Successfully authorized.\n');
return client;
});
}

View file

@ -1,393 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.package;
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
import 'barback/transformer_id.dart';
import 'io.dart';
import 'git.dart' as git;
import 'pubspec.dart';
import 'source_registry.dart';
import 'utils.dart';
final _README_REGEXP = new RegExp(r"^README($|\.)", caseSensitive: false);
/// A named, versioned, unit of code and resource reuse.
class Package {
/// Compares [a] and [b] orders them by name then version number.
///
/// This is normally used as a [Comparator] to pass to sort. This does not
/// take a package's description or root directory into account, so multiple
/// distinct packages may order the same.
static int orderByNameAndVersion(Package a, Package b) {
var name = a.name.compareTo(b.name);
if (name != 0) return name;
return a.version.compareTo(b.version);
}
/// The path to the directory containing the package.
final String dir;
/// The name of the package.
String get name {
if (pubspec.name != null) return pubspec.name;
if (dir != null) return p.basename(dir);
return null;
}
/// The package's version.
Version get version => pubspec.version;
/// The parsed pubspec associated with this package.
final Pubspec pubspec;
/// The immediate dependencies this package specifies in its pubspec.
List<PackageDep> get dependencies => pubspec.dependencies;
/// The immediate dev dependencies this package specifies in its pubspec.
List<PackageDep> get devDependencies => pubspec.devDependencies;
/// The dependency overrides this package specifies in its pubspec.
List<PackageDep> get dependencyOverrides => pubspec.dependencyOverrides;
/// All immediate dependencies this package specifies.
///
/// This includes regular, dev dependencies, and overrides.
Set<PackageDep> get immediateDependencies {
var deps = {};
addToMap(dep) {
deps[dep.name] = dep;
}
dependencies.forEach(addToMap);
devDependencies.forEach(addToMap);
// Make sure to add these last so they replace normal dependencies.
dependencyOverrides.forEach(addToMap);
return deps.values.toSet();
}
/// Returns a list of asset ids for all Dart executables in this package's bin
/// directory.
List<AssetId> get executableIds {
return ordered(
listFiles(
beneath: "bin",
recursive: false)).where(
(executable) => p.extension(executable) == '.dart').map((executable) {
return new AssetId(
name,
p.toUri(p.relative(executable, from: dir)).toString());
}).toList();
}
/// Returns the path to the README file at the root of the entrypoint, or null
/// if no README file is found.
///
/// If multiple READMEs are found, this uses the same conventions as
/// pub.dartlang.org for choosing the primary one: the README with the fewest
/// extensions that is lexically ordered first is chosen.
String get readmePath {
var readmes = listFiles(
recursive: false).map(
p.basename).where((entry) => entry.contains(_README_REGEXP));
if (readmes.isEmpty) return null;
return p.join(dir, readmes.reduce((readme1, readme2) {
var extensions1 = ".".allMatches(readme1).length;
var extensions2 = ".".allMatches(readme2).length;
var comparison = extensions1.compareTo(extensions2);
if (comparison == 0) comparison = readme1.compareTo(readme2);
return (comparison <= 0) ? readme1 : readme2;
}));
}
/// Loads the package whose root directory is [packageDir].
///
/// [name] is the expected name of that package (e.g. the name given in the
/// dependency), or `null` if the package being loaded is the entrypoint
/// package.
Package.load(String name, String packageDir, SourceRegistry sources)
: dir = packageDir,
pubspec = new Pubspec.load(packageDir, sources, expectedName: name);
/// Constructs a package with the given pubspec.
///
/// The package will have no directory associated with it.
Package.inMemory(this.pubspec)
: dir = null;
/// Creates a package with [pubspec] located at [dir].
Package(this.pubspec, this.dir);
/// Given a relative path within this package, returns its absolute path.
///
/// This is similar to `p.join(dir, part1, ...)`, except that subclasses may
/// override it to report that certain paths exist elsewhere than within
/// [dir]. For example, a [CachedPackage]'s `lib` directory is in the
/// `.pub/deps` directory.
String path(String part1, [String part2, String part3, String part4,
String part5, String part6, String part7]) {
if (dir == null) {
throw new StateError(
"Package $name is in-memory and doesn't have paths " "on disk.");
}
return p.join(dir, part1, part2, part3, part4, part5, part6, part7);
}
/// Given an absolute path within this package (such as that returned by
/// [path] or [listFiles]), returns it relative to the package root.
String relative(String path) {
if (dir == null) {
throw new StateError(
"Package $name is in-memory and doesn't have paths " "on disk.");
}
return p.relative(path, from: dir);
}
/// Returns the path to the library identified by [id] within [this].
String transformerPath(TransformerId id) {
if (id.package != name) {
throw new ArgumentError("Transformer $id isn't in package $name.");
}
if (id.path != null) return path('lib', p.fromUri('${id.path}.dart'));
var transformerPath = path('lib/transformer.dart');
if (fileExists(transformerPath)) return transformerPath;
return path('lib/$name.dart');
}
/// The basenames of files that are included in [list] despite being hidden.
static final _WHITELISTED_FILES = const ['.htaccess'];
/// A set of patterns that match paths to blacklisted files.
static final _blacklistedFiles = createFileFilter(['pubspec.lock']);
/// A set of patterns that match paths to blacklisted directories.
static final _blacklistedDirs = createDirectoryFilter(['packages']);
/// Returns a list of files that are considered to be part of this package.
///
/// If this is a Git repository, this will respect .gitignore; otherwise, it
/// will return all non-hidden, non-blacklisted files.
///
/// If [beneath] is passed, this will only return files beneath that path,
/// which is expected to be relative to the package's root directory. If
/// [recursive] is true, this will return all files beneath that path;
/// otherwise, it will only return files one level beneath it.
///
/// If [useGitIgnore] is passed, this will take the .gitignore rules into
/// account if the package's root directory is a Git repository.
///
/// Note that the returned paths won't always be beneath [dir]. To safely
/// convert them to paths relative to the package root, use [relative].
List<String> listFiles({String beneath, bool recursive: true,
bool useGitIgnore: false}) {
if (beneath == null) {
beneath = dir;
} else {
beneath = p.join(dir, beneath);
}
if (!dirExists(beneath)) return [];
// This is used in some performance-sensitive paths and can list many, many
// files. As such, it leans more havily towards optimization as opposed to
// readability than most code in pub. In particular, it avoids using the
// path package, since re-parsing a path is very expensive relative to
// string operations.
var files;
if (useGitIgnore && git.isInstalled && dirExists(path('.git'))) {
// Later versions of git do not allow a path for ls-files that appears to
// be outside of the repo, so make sure we give it a relative path.
var relativeBeneath = p.relative(beneath, from: dir);
// List all files that aren't gitignored, including those not checked in
// to Git.
files = git.runSync(
["ls-files", "--cached", "--others", "--exclude-standard", relativeBeneath],
workingDir: dir);
// If we're not listing recursively, strip out paths that contain
// separators. Since git always prints forward slashes, we always detect
// them.
if (!recursive) {
// If we're listing a subdirectory, we only want to look for slashes
// after the subdirectory prefix.
var relativeStart =
relativeBeneath == '.' ? 0 : relativeBeneath.length + 1;
files = files.where((file) => !file.contains('/', relativeStart));
}
// Git always prints files relative to the repository root, but we want
// them relative to the working directory. It also prints forward slashes
// on Windows which we normalize away for easier testing.
files = files.map((file) {
if (Platform.operatingSystem != 'windows') return "$dir/$file";
return "$dir\\${file.replaceAll("/", "\\")}";
}).where((file) {
// Filter out broken symlinks, since git doesn't do so automatically.
return fileExists(file);
});
} else {
files = listDir(
beneath,
recursive: recursive,
includeDirs: false,
whitelist: _WHITELISTED_FILES);
}
return files.where((file) {
// Using substring here is generally problematic in cases where dir has
// one or more trailing slashes. If you do listDir("foo"), you'll get back
// paths like "foo/bar". If you do listDir("foo/"), you'll get "foo/bar"
// (note the trailing slash was dropped. If you do listDir("foo//"),
// you'll get "foo//bar".
//
// This means if you strip off the prefix, the resulting string may have a
// leading separator (if the prefix did not have a trailing one) or it may
// not. However, since we are only using the results of that to call
// contains() on, the leading separator is harmless.
assert(file.startsWith(beneath));
file = file.substring(beneath.length);
return !_blacklistedFiles.any(file.endsWith) &&
!_blacklistedDirs.any(file.contains);
}).toList();
}
/// Returns a debug string for the package.
String toString() => '$name $version ($dir)';
}
/// This is the private base class of [PackageRef], [PackageID], and
/// [PackageDep].
///
/// It contains functionality and state that those classes share but is private
/// so that from outside of this library, there is no type relationship between
/// those three types.
class _PackageName {
_PackageName(this.name, this.source, this.description);
/// The name of the package being identified.
final String name;
/// The name of the [Source] used to look up this package given its
/// [description].
///
/// If this is a root package, this will be `null`.
final String source;
/// The metadata used by the package's [source] to identify and locate it.
///
/// It contains whatever [Source]-specific data it needs to be able to get
/// the package. For example, the description of a git sourced package might
/// by the URL "git://github.com/dart/uilib.git".
final description;
/// Whether this package is the root package.
bool get isRoot => source == null;
String toString() {
if (isRoot) return "$name (root)";
return "$name from $source";
}
/// Returns a [PackageRef] with this one's [name], [source], and
/// [description].
PackageRef toRef() => new PackageRef(name, source, description);
/// Returns a [PackageId] for this package with the given concrete version.
PackageId atVersion(Version version) =>
new PackageId(name, source, version, description);
/// Returns a [PackageDep] for this package with the given version constraint.
PackageDep withConstraint(VersionConstraint constraint) =>
new PackageDep(name, source, constraint, description);
}
/// A reference to a [Package], but not any particular version(s) of it.
class PackageRef extends _PackageName {
PackageRef(String name, String source, description)
: super(name, source, description);
int get hashCode => name.hashCode ^ source.hashCode;
bool operator ==(other) {
// TODO(rnystrom): We're assuming here that we don't need to delve into the
// description.
return other is PackageRef && other.name == name && other.source == source;
}
}
/// A reference to a specific version of a package.
///
/// A package ID contains enough information to correctly get the package.
///
/// Note that it's possible for multiple distinct package IDs to point to
/// different packages that have identical contents. For example, the same
/// package may be available from multiple sources. As far as Pub is concerned,
/// those packages are different.
class PackageId extends _PackageName {
/// The package's version.
final Version version;
PackageId(String name, String source, this.version, description)
: super(name, source, description);
/// Creates an ID for the given root package.
PackageId.root(Package package)
: version = package.version,
super(package.name, null, package.name);
int get hashCode => name.hashCode ^ source.hashCode ^ version.hashCode;
bool operator ==(other) {
// TODO(rnystrom): We're assuming here that we don't need to delve into the
// description.
return other is PackageId &&
other.name == name &&
other.source == source &&
other.version == version;
}
String toString() {
if (isRoot) return "$name $version (root)";
return "$name $version from $source";
}
}
/// A reference to a constrained range of versions of one package.
class PackageDep extends _PackageName {
/// The allowed package versions.
final VersionConstraint constraint;
PackageDep(String name, String source, this.constraint, description)
: super(name, source, description);
String toString() {
if (isRoot) return "$name $constraint (root)";
return "$name $constraint from $source ($description)";
}
int get hashCode => name.hashCode ^ source.hashCode;
bool operator ==(other) {
// TODO(rnystrom): We're assuming here that we don't need to delve into the
// description.
return other is PackageDep &&
other.name == name &&
other.source == source &&
other.constraint == constraint;
}
}

View file

@ -1,120 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.package_graph;
import 'barback/transformer_cache.dart';
import 'entrypoint.dart';
import 'lock_file.dart';
import 'package.dart';
import 'source/cached.dart';
import 'utils.dart';
/// A holistic view of the entire transitive dependency graph for an entrypoint.
///
/// A package graph can be loaded using [Entrypoint.loadPackageGraph].
class PackageGraph {
/// The entrypoint.
final Entrypoint entrypoint;
/// The entrypoint's lockfile.
///
/// This describes the sources and resolved descriptions of everything in
/// [packages].
final LockFile lockFile;
/// The transitive dependencies of the entrypoint (including itself).
///
/// This may not include all transitive dependencies of the entrypoint if the
/// creator of the package graph knows only a subset of the packages are
/// relevant in the current context.
final Map<String, Package> packages;
/// A map of transitive dependencies for each package.
Map<String, Set<Package>> _transitiveDependencies;
/// The transformer cache, if it's been loaded.
TransformerCache _transformerCache;
PackageGraph(this.entrypoint, this.lockFile, this.packages);
/// Loads the transformer cache for this graph.
///
/// This may only be called if [entrypoint] represents a physical package.
/// This may modify the cache.
TransformerCache loadTransformerCache() {
if (_transformerCache == null) {
if (entrypoint.root.dir == null) {
throw new StateError(
"Can't load the transformer cache for virtual "
"entrypoint ${entrypoint.root.name}.");
}
_transformerCache = new TransformerCache.load(this);
}
return _transformerCache;
}
/// Returns all transitive dependencies of [package].
///
/// For the entrypoint this returns all packages in [packages], which includes
/// dev and override. For any other package, it ignores dev and override
/// dependencies.
Set<Package> transitiveDependencies(String package) {
if (package == entrypoint.root.name) return packages.values.toSet();
if (_transitiveDependencies == null) {
var closure = transitiveClosure(
mapMap(
packages,
value: (_, package) => package.dependencies.map((dep) => dep.name)));
_transitiveDependencies = mapMap(
closure,
value: (_, names) => names.map((name) => packages[name]).toSet());
}
return _transitiveDependencies[package];
}
/// Returns whether [package] is mutable.
///
/// A package is considered to be mutable if it or any of its dependencies
/// don't come from a cached source, since the user can change its contents
/// without modifying the pub cache. Information generated from mutable
/// packages is generally not safe to cache, since it may change frequently.
bool isPackageMutable(String package) {
var id = lockFile.packages[package];
if (id == null) return true;
var source = entrypoint.cache.sources[id.source];
if (source is! CachedSource) return true;
return transitiveDependencies(package).any((dep) {
var depId = lockFile.packages[dep.name];
// The entrypoint package doesn't have a lockfile entry. It's always
// mutable.
if (depId == null) return true;
return entrypoint.cache.sources[depId.source] is! CachedSource;
});
}
/// Returns whether [package] is static.
///
/// A package is considered to be static if it's not transformed and it came
/// from a cached source. Static packages don't need to be fully processed by
/// barback.
///
/// Note that a static package isn't the same as an immutable package (see
/// [isPackageMutable]).
bool isPackageStatic(String package) {
var id = lockFile.packages[package];
if (id == null) return false;
var source = entrypoint.cache.sources[id.source];
if (source is! CachedSource) return false;
return packages[package].pubspec.transformers.isEmpty;
}
}

View file

@ -1,144 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.preprocess;
import 'package:pub_semver/pub_semver.dart';
import 'package:string_scanner/string_scanner.dart';
/// Runs a simple preprocessor over [input] to remove sections that are
/// incompatible with the available barback version.
///
/// [versions] are the available versions of each installed package, and
/// [sourceUrl] is a [String] or [Uri] indicating where [input] came from. It's
/// used for error reporting.
///
/// For the most part, the preprocessor leaves text in the source document
/// alone. However, it handles two types of lines specially. Lines that begin
/// with `//>` are uncommented by the preprocessor, and lines that begin with
/// `//#` are operators.
///
/// The preprocessor currently supports one top-level operator, "if":
///
/// //# if barback >=0.14.1
/// ...
/// //# else
/// ...
/// //# end
///
/// If can check against any package installed in the current package. It can
/// check the version of the package, as above, or (if the version range is
/// omitted) whether the package exists at all. If the condition is true,
/// everything within the first block is included in the output and everything
/// within the second block is removed; otherwise, the first block is removed
/// and the second block is included. The `else` block is optional.
///
/// It's important that the preprocessor syntax also be valid Dart code, because
/// pub loads the source files before preprocessing and runs them against the
/// version of barback that was compiled into pub. This is why the `//>` syntax
/// exists: so that code can be hidden from the running pub process but still be
/// visible to the barback isolate. For example:
///
/// //# if barback >= 0.14.1
/// ClassMirror get aggregateClass => reflectClass(AggregateTransformer);
/// //# else
/// //> ClassMirror get aggregateClass => null;
/// //# end
String preprocess(String input, Map<String, Version> versions, sourceUrl) {
// Short-circuit if there are no preprocessor directives in the file.
if (!input.contains(new RegExp(r"^//[>#]", multiLine: true))) return input;
return new _Preprocessor(input, versions, sourceUrl).run();
}
/// The preprocessor class.
class _Preprocessor {
/// The scanner over the input string.
final StringScanner _scanner;
final Map<String, Version> _versions;
/// The buffer to which the output is written.
final _buffer = new StringBuffer();
_Preprocessor(String input, this._versions, sourceUrl)
: _scanner = new StringScanner(input, sourceUrl: sourceUrl);
/// Run the preprocessor and return the processed output.
String run() {
while (!_scanner.isDone) {
if (_scanner.scan(new RegExp(r"//#[ \t]*"))) {
_if();
} else {
_emitText();
}
}
_scanner.expectDone();
return _buffer.toString();
}
/// Emit lines of the input document directly until an operator is
/// encountered.
void _emitText() {
while (!_scanner.isDone && !_scanner.matches("//#")) {
if (_scanner.scan("//>")) {
if (!_scanner.matches("\n")) _scanner.expect(" ");
}
_scanner.scan(new RegExp(r"[^\n]*\n?"));
_buffer.write(_scanner.lastMatch[0]);
}
}
/// Move through lines of the input document without emitting them until an
/// operator is encountered.
void _ignoreText() {
while (!_scanner.isDone && !_scanner.matches("//#")) {
_scanner.scan(new RegExp(r"[^\n]*\n?"));
}
}
/// Handle an `if` operator.
void _if() {
_scanner.expect(new RegExp(r"if[ \t]+"), name: "if statement");
_scanner.expect(new RegExp(r"[a-zA-Z0-9_]+"), name: "package name");
var package = _scanner.lastMatch[0];
_scanner.scan(new RegExp(r"[ \t]*"));
var constraint = VersionConstraint.any;
if (_scanner.scan(new RegExp(r"[^\n]+"))) {
try {
constraint = new VersionConstraint.parse(_scanner.lastMatch[0]);
} on FormatException catch (error) {
_scanner.error("Invalid version constraint: ${error.message}");
}
}
_scanner.expect("\n");
var allowed =
_versions.containsKey(package) &&
constraint.allows(_versions[package]);
if (allowed) {
_emitText();
} else {
_ignoreText();
}
_scanner.expect("//#");
_scanner.scan(new RegExp(r"[ \t]*"));
if (_scanner.scan("else")) {
_scanner.expect("\n");
if (allowed) {
_ignoreText();
} else {
_emitText();
}
_scanner.expect("//#");
_scanner.scan(new RegExp(r"[ \t]*"));
}
_scanner.expect("end");
if (!_scanner.isDone) _scanner.expect("\n");
}
}

View file

@ -1,100 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.progress;
import 'dart:async';
import 'dart:io';
import 'log.dart' as log;
import 'utils.dart';
/// A live-updating progress indicator for long-running log entries.
class Progress {
/// The timer used to write "..." during a progress log.
Timer _timer;
/// The [Stopwatch] used to track how long a progress log has been running.
final _stopwatch = new Stopwatch();
/// The progress message as it's being incrementally appended.
///
/// When the progress is done, a single entry will be added to the log for it.
final String _message;
/// Gets the current progress time as a parenthesized, formatted string.
String get _time => "(${niceDuration(_stopwatch.elapsed)})";
/// Creates a new progress indicator.
///
/// If [fine] is passed, this will log progress messages on [log.Level.FINE]
/// as opposed to [log.Level.MESSAGE].
Progress(this._message, {bool fine: false}) {
_stopwatch.start();
var level = fine ? log.Level.FINE : log.Level.MESSAGE;
// The animation is only shown when it would be meaningful to a human.
// That means we're writing a visible message to a TTY at normal log levels
// with non-JSON output.
if (stdioType(stdout) != StdioType.TERMINAL ||
!log.verbosity.isLevelVisible(level) ||
log.json.enabled ||
fine ||
log.verbosity.isLevelVisible(log.Level.FINE)) {
// Not animating, so just log the start and wait until the task is
// completed.
log.write(level, "$_message...");
return;
}
_timer = new Timer.periodic(new Duration(milliseconds: 100), (_) {
_update();
});
_update();
}
/// Stops the progress indicator.
void stop() {
_stopwatch.stop();
// Always log the final time as [log.fine] because for the most part normal
// users don't care about the precise time information beyond what's shown
// in the animation.
log.fine("$_message finished $_time.");
// If we were animating, print one final update to show the user the final
// time.
if (_timer == null) return;
_timer.cancel();
_timer = null;
_update();
stdout.writeln();
}
/// Stop animating the progress indicator.
///
/// This will continue running the stopwatch so that the full time can be
/// logged in [stop].
void stopAnimating() {
if (_timer == null) return;
// Print a final message without a time indicator so that we don't leave a
// misleading half-complete time indicator on the console.
stdout.writeln(log.format("\r$_message..."));
_timer.cancel();
_timer = null;
}
/// Refreshes the progress line.
void _update() {
if (log.isMuted) return;
stdout.write(log.format("\r$_message... "));
// Show the time only once it gets noticeably long.
if (_stopwatch.elapsed.inSeconds > 0) stdout.write("${log.gray(_time)} ");
}
}

View file

@ -1,628 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.pubspec;
import 'package:path/path.dart' as path;
import 'package:pub_semver/pub_semver.dart';
import 'package:source_span/source_span.dart';
import 'package:yaml/yaml.dart';
import 'barback/transformer_config.dart';
import 'exceptions.dart';
import 'io.dart';
import 'package.dart';
import 'source_registry.dart';
import 'utils.dart';
/// The parsed contents of a pubspec file.
///
/// The fields of a pubspec are, for the most part, validated when they're first
/// accessed. This allows a partially-invalid pubspec to be used if only the
/// valid portions are relevant. To get a list of all errors in the pubspec, use
/// [allErrors].
class Pubspec {
// If a new lazily-initialized field is added to this class and the
// initialization can throw a [PubspecException], that error should also be
// exposed through [allErrors].
/// The registry of sources to use when parsing [dependencies] and
/// [devDependencies].
///
/// This will be null if this was created using [new Pubspec] or [new
/// Pubspec.empty].
final SourceRegistry _sources;
/// The location from which the pubspec was loaded.
///
/// This can be null if the pubspec was created in-memory or if its location
/// is unknown.
Uri get _location => fields.span.sourceUrl;
/// All pubspec fields.
///
/// This includes the fields from which other properties are derived.
final YamlMap fields;
/// The package's name.
String get name {
if (_name != null) return _name;
var name = fields['name'];
if (name == null) {
throw new PubspecException(
'Missing the required "name" field.',
fields.span);
} else if (name is! String) {
throw new PubspecException(
'"name" field must be a string.',
fields.nodes['name'].span);
}
_name = name;
return _name;
}
String _name;
/// The package's version.
Version get version {
if (_version != null) return _version;
var version = fields['version'];
if (version == null) {
_version = Version.none;
return _version;
}
var span = fields.nodes['version'].span;
if (version is num) {
var fixed = '$version.0';
if (version is int) {
fixed = '$fixed.0';
}
_error(
'"version" field must have three numeric components: major, '
'minor, and patch. Instead of "$version", consider "$fixed".',
span);
}
if (version is! String) {
_error('"version" field must be a string.', span);
}
_version =
_wrapFormatException('version number', span, () => new Version.parse(version));
return _version;
}
Version _version;
/// The additional packages this package depends on.
List<PackageDep> get dependencies {
if (_dependencies != null) return _dependencies;
_dependencies = _parseDependencies('dependencies');
_checkDependencyOverlap(_dependencies, _devDependencies);
return _dependencies;
}
List<PackageDep> _dependencies;
/// The packages this package depends on when it is the root package.
List<PackageDep> get devDependencies {
if (_devDependencies != null) return _devDependencies;
_devDependencies = _parseDependencies('dev_dependencies');
_checkDependencyOverlap(_dependencies, _devDependencies);
return _devDependencies;
}
List<PackageDep> _devDependencies;
/// The dependency constraints that this package overrides when it is the
/// root package.
///
/// Dependencies here will replace any dependency on a package with the same
/// name anywhere in the dependency graph.
List<PackageDep> get dependencyOverrides {
if (_dependencyOverrides != null) return _dependencyOverrides;
_dependencyOverrides = _parseDependencies('dependency_overrides');
return _dependencyOverrides;
}
List<PackageDep> _dependencyOverrides;
/// The configurations of the transformers to use for this package.
List<Set<TransformerConfig>> get transformers {
if (_transformers != null) return _transformers;
var transformers = fields['transformers'];
if (transformers == null) {
_transformers = [];
return _transformers;
}
if (transformers is! List) {
_error(
'"transformers" field must be a list.',
fields.nodes['transformers'].span);
}
var i = 0;
_transformers = transformers.nodes.map((phase) {
var phaseNodes = phase is YamlList ? phase.nodes : [phase];
return phaseNodes.map((transformerNode) {
var transformer = transformerNode.value;
if (transformer is! String && transformer is! Map) {
_error(
'A transformer must be a string or map.',
transformerNode.span);
}
var libraryNode;
var configurationNode;
if (transformer is String) {
libraryNode = transformerNode;
} else {
if (transformer.length != 1) {
_error(
'A transformer map must have a single key: the transformer ' 'identifier.',
transformerNode.span);
} else if (transformer.keys.single is! String) {
_error(
'A transformer identifier must be a string.',
transformer.nodes.keys.single.span);
}
libraryNode = transformer.nodes.keys.single;
configurationNode = transformer.nodes.values.single;
if (configurationNode is! YamlMap) {
_error(
"A transformer's configuration must be a map.",
configurationNode.span);
}
}
var config = _wrapSpanFormatException('transformer config', () {
return new TransformerConfig.parse(
libraryNode.value,
libraryNode.span,
configurationNode);
});
var package = config.id.package;
if (package != name &&
!config.id.isBuiltInTransformer &&
!dependencies.any((ref) => ref.name == package) &&
!devDependencies.any((ref) => ref.name == package) &&
!dependencyOverrides.any((ref) => ref.name == package)) {
_error('"$package" is not a dependency.', libraryNode.span);
}
return config;
}).toSet();
}).toList();
return _transformers;
}
List<Set<TransformerConfig>> _transformers;
/// The environment-related metadata.
PubspecEnvironment get environment {
if (_environment != null) return _environment;
var yaml = fields['environment'];
if (yaml == null) {
_environment = new PubspecEnvironment(VersionConstraint.any);
return _environment;
}
if (yaml is! Map) {
_error(
'"environment" field must be a map.',
fields.nodes['environment'].span);
}
_environment =
new PubspecEnvironment(_parseVersionConstraint(yaml.nodes['sdk']));
return _environment;
}
PubspecEnvironment _environment;
/// The URL of the server that the package should default to being published
/// to, "none" if the package should not be published, or `null` if it should
/// be published to the default server.
///
/// If this does return a URL string, it will be a valid parseable URL.
String get publishTo {
if (_parsedPublishTo) return _publishTo;
var publishTo = fields['publish_to'];
if (publishTo != null) {
var span = fields.nodes['publish_to'].span;
if (publishTo is! String) {
_error('"publish_to" field must be a string.', span);
}
// It must be "none" or a valid URL.
if (publishTo != "none") {
_wrapFormatException(
'"publish_to" field',
span,
() => Uri.parse(publishTo));
}
}
_parsedPublishTo = true;
_publishTo = publishTo;
return _publishTo;
}
bool _parsedPublishTo = false;
String _publishTo;
/// The executables that should be placed on the user's PATH when this
/// package is globally activated.
///
/// It is a map of strings to string. Each key is the name of the command
/// that will be placed on the user's PATH. The value is the name of the
/// .dart script (without extension) in the package's `bin` directory that
/// should be run for that command. Both key and value must be "simple"
/// strings: alphanumerics, underscores and hypens only. If a value is
/// omitted, it is inferred to use the same name as the key.
Map<String, String> get executables {
if (_executables != null) return _executables;
_executables = {};
var yaml = fields['executables'];
if (yaml == null) return _executables;
if (yaml is! Map) {
_error(
'"executables" field must be a map.',
fields.nodes['executables'].span);
}
yaml.nodes.forEach((key, value) {
// Don't allow path separators or other stuff meaningful to the shell.
validateName(name, description) {
}
if (key.value is! String) {
_error('"executables" keys must be strings.', key.span);
}
final keyPattern = new RegExp(r"^[a-zA-Z0-9_-]+$");
if (!keyPattern.hasMatch(key.value)) {
_error(
'"executables" keys may only contain letters, '
'numbers, hyphens and underscores.',
key.span);
}
if (value.value == null) {
value = key;
} else if (value.value is! String) {
_error('"executables" values must be strings or null.', value.span);
}
final valuePattern = new RegExp(r"[/\\]");
if (valuePattern.hasMatch(value.value)) {
_error(
'"executables" values may not contain path separators.',
value.span);
}
_executables[key.value] = value.value;
});
return _executables;
}
Map<String, String> _executables;
/// Whether the package is private and cannot be published.
///
/// This is specified in the pubspec by setting "publish_to" to "none".
bool get isPrivate => publishTo == "none";
/// Whether or not the pubspec has no contents.
bool get isEmpty =>
name == null && version == Version.none && dependencies.isEmpty;
/// Loads the pubspec for a package located in [packageDir].
///
/// If [expectedName] is passed and the pubspec doesn't have a matching name
/// field, this will throw a [PubspecError].
factory Pubspec.load(String packageDir, SourceRegistry sources,
{String expectedName}) {
var pubspecPath = path.join(packageDir, 'pubspec.yaml');
var pubspecUri = path.toUri(pubspecPath);
if (!fileExists(pubspecPath)) {
throw new FileException(
'Could not find a file named "pubspec.yaml" in "$packageDir".',
pubspecPath);
}
return new Pubspec.parse(
readTextFile(pubspecPath),
sources,
expectedName: expectedName,
location: pubspecUri);
}
Pubspec(this._name, {Version version, Iterable<PackageDep> dependencies,
Iterable<PackageDep> devDependencies, Iterable<PackageDep> dependencyOverrides,
VersionConstraint sdkConstraint,
Iterable<Iterable<TransformerConfig>> transformers, Map fields,
SourceRegistry sources})
: _version = version,
_dependencies = dependencies == null ? null : dependencies.toList(),
_devDependencies = devDependencies == null ?
null :
devDependencies.toList(),
_dependencyOverrides = dependencyOverrides == null ?
null :
dependencyOverrides.toList(),
_environment = new PubspecEnvironment(sdkConstraint),
_transformers = transformers == null ?
[] :
transformers.map((phase) => phase.toSet()).toList(),
fields = fields == null ? new YamlMap() : new YamlMap.wrap(fields),
_sources = sources;
Pubspec.empty()
: _sources = null,
_name = null,
_version = Version.none,
_dependencies = <PackageDep>[],
_devDependencies = <PackageDep>[],
_environment = new PubspecEnvironment(),
_transformers = <Set<TransformerConfig>>[],
fields = new YamlMap();
/// Returns a Pubspec object for an already-parsed map representing its
/// contents.
///
/// If [expectedName] is passed and the pubspec doesn't have a matching name
/// field, this will throw a [PubspecError].
///
/// [location] is the location from which this pubspec was loaded.
Pubspec.fromMap(Map fields, this._sources, {String expectedName,
Uri location})
: fields = fields is YamlMap ?
fields :
new YamlMap.wrap(fields, sourceUrl: location) {
// If [expectedName] is passed, ensure that the actual 'name' field exists
// and matches the expectation.
if (expectedName == null) return;
if (name == expectedName) return;
throw new PubspecException(
'"name" field doesn\'t match expected name ' '"$expectedName".',
this.fields.nodes["name"].span);
}
/// Parses the pubspec stored at [filePath] whose text is [contents].
///
/// If the pubspec doesn't define a version for itself, it defaults to
/// [Version.none].
factory Pubspec.parse(String contents, SourceRegistry sources,
{String expectedName, Uri location}) {
var pubspecNode = loadYamlNode(contents, sourceUrl: location);
if (pubspecNode is YamlScalar && pubspecNode.value == null) {
pubspecNode = new YamlMap(sourceUrl: location);
} else if (pubspecNode is! YamlMap) {
throw new PubspecException(
'The pubspec must be a YAML mapping.',
pubspecNode.span);
}
return new Pubspec.fromMap(
pubspecNode,
sources,
expectedName: expectedName,
location: location);
}
/// Returns a list of most errors in this pubspec.
///
/// This will return at most one error for each field.
List<PubspecException> get allErrors {
var errors = <PubspecException>[];
_getError(fn()) {
try {
fn();
} on PubspecException catch (e) {
errors.add(e);
}
}
_getError(() => this.name);
_getError(() => this.version);
_getError(() => this.dependencies);
_getError(() => this.devDependencies);
_getError(() => this.transformers);
_getError(() => this.environment);
_getError(() => this.publishTo);
return errors;
}
/// Parses the dependency field named [field], and returns the corresponding
/// list of dependencies.
List<PackageDep> _parseDependencies(String field) {
var dependencies = <PackageDep>[];
var yaml = fields[field];
// Allow an empty dependencies key.
if (yaml == null) return dependencies;
if (yaml is! Map) {
_error('"$field" field must be a map.', fields.nodes[field].span);
}
var nonStringNode =
yaml.nodes.keys.firstWhere((e) => e.value is! String, orElse: () => null);
if (nonStringNode != null) {
_error('A dependency name must be a string.', nonStringNode.span);
}
yaml.nodes.forEach((nameNode, specNode) {
var name = nameNode.value;
var spec = specNode.value;
if (fields['name'] != null && name == this.name) {
_error('A package may not list itself as a dependency.', nameNode.span);
}
var descriptionNode;
var sourceName;
var versionConstraint = new VersionRange();
if (spec == null) {
descriptionNode = nameNode;
sourceName = _sources.defaultSource.name;
} else if (spec is String) {
descriptionNode = nameNode;
sourceName = _sources.defaultSource.name;
versionConstraint = _parseVersionConstraint(specNode);
} else if (spec is Map) {
// Don't write to the immutable YAML map.
spec = new Map.from(spec);
if (spec.containsKey('version')) {
spec.remove('version');
versionConstraint =
_parseVersionConstraint(specNode.nodes['version']);
}
var sourceNames = spec.keys.toList();
if (sourceNames.length > 1) {
_error('A dependency may only have one source.', specNode.span);
}
sourceName = sourceNames.single;
if (sourceName is! String) {
_error(
'A source name must be a string.',
specNode.nodes.keys.single.span);
}
descriptionNode = specNode.nodes[sourceName];
} else {
_error(
'A dependency specification must be a string or a mapping.',
specNode.span);
}
// Let the source validate the description.
var description =
_wrapFormatException('description', descriptionNode.span, () {
var pubspecPath;
if (_location != null && _isFileUri(_location)) {
pubspecPath = path.fromUri(_location);
}
return _sources[sourceName].parseDescription(
pubspecPath,
descriptionNode.value,
fromLockFile: false);
});
dependencies.add(
new PackageDep(name, sourceName, versionConstraint, description));
});
return dependencies;
}
/// Parses [node] to a [VersionConstraint].
VersionConstraint _parseVersionConstraint(YamlNode node) {
if (node.value == null) return VersionConstraint.any;
if (node.value is! String) {
_error('A version constraint must be a string.', node.span);
}
return _wrapFormatException(
'version constraint',
node.span,
() => new VersionConstraint.parse(node.value));
}
/// Makes sure the same package doesn't appear as both a regular and dev
/// dependency.
void _checkDependencyOverlap(List<PackageDep> dependencies,
List<PackageDep> devDependencies) {
if (dependencies == null) return;
if (devDependencies == null) return;
var dependencyNames = dependencies.map((dep) => dep.name).toSet();
var collisions =
dependencyNames.intersection(devDependencies.map((dep) => dep.name).toSet());
if (collisions.isEmpty) return;
var span = fields["dependencies"].nodes.keys.firstWhere(
(key) => collisions.contains(key.value)).span;
// TODO(nweiz): associate source range info with PackageDeps and use it
// here.
_error(
'${pluralize('Package', collisions.length)} '
'${toSentence(collisions.map((package) => '"$package"'))} cannot '
'appear in both "dependencies" and "dev_dependencies".',
span);
}
/// Runs [fn] and wraps any [FormatException] it throws in a
/// [PubspecException].
///
/// [description] should be a noun phrase that describes whatever's being
/// parsed or processed by [fn]. [span] should be the location of whatever's
/// being processed within the pubspec.
_wrapFormatException(String description, SourceSpan span, fn()) {
try {
return fn();
} on FormatException catch (e) {
_error('Invalid $description: ${e.message}', span);
}
}
_wrapSpanFormatException(String description, fn()) {
try {
return fn();
} on SourceSpanFormatException catch (e) {
_error('Invalid $description: ${e.message}', e.span);
}
}
/// Throws a [PubspecException] with the given message.
void _error(String message, SourceSpan span) {
var name;
try {
name = this.name;
} on PubspecException catch (_) {
// [name] is null.
}
throw new PubspecException(message, span);
}
}
/// The environment-related metadata in the pubspec.
///
/// Corresponds to the data under the "environment:" key in the pubspec.
class PubspecEnvironment {
/// The version constraint specifying which SDK versions this package works
/// with.
final VersionConstraint sdkVersion;
PubspecEnvironment([VersionConstraint sdk])
: sdkVersion = sdk != null ? sdk : VersionConstraint.any;
}
/// An exception thrown when parsing a pubspec.
///
/// These exceptions are often thrown lazily while accessing pubspec properties.
class PubspecException extends SourceSpanFormatException implements
ApplicationException {
PubspecException(String message, SourceSpan span)
: super(message, span);
}
/// Returns whether [uri] is a file URI.
///
/// This is slightly more complicated than just checking if the scheme is
/// 'file', since relative URIs also refer to the filesystem on the VM.
bool _isFileUri(Uri uri) => uri.scheme == 'file' || uri.scheme == '';

View file

@ -1,73 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Operations relative to the user's installed Dart SDK.
library pub.sdk;
import 'dart:io';
import 'package:path/path.dart' as path;
import 'package:pub_semver/pub_semver.dart';
import 'io.dart';
/// Gets the path to the root directory of the SDK.
///
/// When running from the actual built SDK, this will be the SDK that contains
/// the running Dart executable. When running from the repo, it will be the
/// "sdk" directory in the Dart repository itself.
final String rootDirectory =
runningFromSdk ? _rootDirectory : path.join(repoRoot, "sdk");
/// Gets the path to the root directory of the SDK, assuming that the currently
/// running Dart executable is within it.
final String _rootDirectory = path.dirname(path.dirname(Platform.executable));
/// The SDK's revision number formatted to be a semantic version.
///
/// This can be set so that the version solver tests can artificially select
/// different SDK versions.
Version version = _getVersion();
/// Determine the SDK's version number.
Version _getVersion() {
// Some of the pub integration tests require an SDK version number, but the
// tests on the bots are not run from a built SDK so this lets us avoid
// parsing the missing version file.
var sdkVersion = Platform.environment["_PUB_TEST_SDK_VERSION"];
if (sdkVersion != null) return new Version.parse(sdkVersion);
if (runningFromSdk) {
// Read the "version" file.
var version = readTextFile(path.join(_rootDirectory, "version")).trim();
return new Version.parse(version);
}
// When running from the repo, read the canonical VERSION file in tools/.
// This makes it possible to run pub without having built the SDK first.
var contents = readTextFile(path.join(repoRoot, "tools/VERSION"));
parseField(name) {
var pattern = new RegExp("^$name ([a-z0-9]+)", multiLine: true);
var match = pattern.firstMatch(contents);
return match[1];
}
var channel = parseField("CHANNEL");
var major = parseField("MAJOR");
var minor = parseField("MINOR");
var patch = parseField("PATCH");
var prerelease = parseField("PRERELEASE");
var prereleasePatch = parseField("PRERELEASE_PATCH");
var version = "$major.$minor.$patch";
if (channel == "be") {
// TODO(rnystrom): tools/utils.py includes the svn commit here. Should we?
version += "-edge";
} else if (channel == "dev") {
version += "-dev.$prerelease.$prereleasePatch";
}
return new Version.parse(version);
}

View file

@ -1,810 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// A back-tracking depth-first solver.
///
/// Attempts to find the best solution for a root package's transitive
/// dependency graph, where a "solution" is a set of concrete package versions.
/// A valid solution will select concrete versions for every package reached
/// from the root package's dependency graph, and each of those packages will
/// fit the version constraints placed on it.
///
/// The solver builds up a solution incrementally by traversing the dependency
/// graph starting at the root package. When it reaches a new package, it gets
/// the set of versions that meet the current constraint placed on it. It
/// *speculatively* selects one version from that set and adds it to the
/// current solution and then proceeds. If it fully traverses the dependency
/// graph, the solution is valid and it stops.
///
/// If it reaches an error because:
///
/// - A new dependency is placed on a package that's already been selected in
/// the solution and the selected version doesn't match the new constraint.
///
/// - There are no versions available that meet the constraint placed on a
/// package.
///
/// - etc.
///
/// then the current solution is invalid. It will then backtrack to the most
/// recent speculative version choice and try the next one. That becomes the
/// new in-progress solution and it tries to proceed from there. It will keep
/// doing this, traversing and then backtracking when it meets a failure until
/// a valid solution has been found or until all possible options for all
/// speculative choices have been exhausted.
library pub.solver.backtracking_solver;
import 'dart:async';
import 'dart:collection' show Queue;
import 'package:pub_semver/pub_semver.dart';
import '../barback.dart' as barback;
import '../exceptions.dart';
import '../lock_file.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../sdk.dart' as sdk;
import '../source_registry.dart';
import '../source/unknown.dart';
import '../utils.dart';
import 'dependency_queue.dart';
import 'version_queue.dart';
import 'version_solver.dart';
/// The top-level solver.
///
/// Keeps track of the current potential solution, and the other possible
/// versions for speculative package selections. Backtracks and advances to the
/// next potential solution in the case of a failure.
class BacktrackingSolver {
final SolveType type;
final SourceRegistry sources;
final Package root;
/// The lockfile that was present before solving.
final LockFile lockFile;
final PubspecCache cache;
/// The set of packages that are being explicitly upgraded.
///
/// The solver will only allow the very latest version for each of these
/// packages.
final _forceLatest = new Set<String>();
/// The set of packages whose dependecy is being overridden by the root
/// package, keyed by the name of the package.
///
/// Any dependency on a package that appears in this map will be overriden
/// to use the one here.
final _overrides = new Map<String, PackageDep>();
/// The package versions currently selected by the solver, along with the
/// versions which are remaining to be tried.
///
/// Every time a package is encountered when traversing the dependency graph,
/// the solver must select a version for it, sometimes when multiple versions
/// are valid. This keeps track of which versions have been selected so far
/// and which remain to be tried.
///
/// Each entry in the list is a [VersionQueue], which is an ordered queue of
/// versions to try for a single package. It maintains the currently selected
/// version for that package. When a new dependency is encountered, a queue
/// of versions of that dependency is pushed onto the end of the list. A
/// queue is removed from the list once it's empty, indicating that none of
/// the versions provided a solution.
///
/// The solver tries versions in depth-first order, so only the last queue in
/// the list will have items removed from it. When a new constraint is placed
/// on an already-selected package, and that constraint doesn't match the
/// selected version, that will cause the current solution to fail and
/// trigger backtracking.
final _selected = <VersionQueue>[];
/// The number of solutions the solver has tried so far.
int get attemptedSolutions => _attemptedSolutions;
var _attemptedSolutions = 1;
BacktrackingSolver(SolveType type, SourceRegistry sources, this.root,
this.lockFile, List<String> useLatest)
: type = type,
sources = sources,
cache = new PubspecCache(type, sources) {
for (var package in useLatest) {
_forceLatest.add(package);
}
for (var override in root.dependencyOverrides) {
_overrides[override.name] = override;
}
}
/// Run the solver.
///
/// Completes with a list of specific package versions if successful or an
/// error if it failed to find a solution.
Future<SolveResult> solve() {
var stopwatch = new Stopwatch();
_logParameters();
// Sort the overrides by package name to make sure they're deterministic.
var overrides = _overrides.values.toList();
overrides.sort((a, b) => a.name.compareTo(b.name));
// TODO(nweiz): Use async/await here once
// https://github.com/dart-lang/async_await/issues/79 is fixed.
return new Future.sync(() {
stopwatch.start();
// Pre-cache the root package's known pubspec.
cache.cache(new PackageId.root(root), root.pubspec);
_validateSdkConstraint(root.pubspec);
return _traverseSolution();
}).then((packages) {
var pubspecs = new Map.fromIterable(
packages,
key: (id) => id.name,
value: (id) => cache.getCachedPubspec(id));
return Future.wait(
packages.map((id) => sources[id.source].resolveId(id))).then((packages) {
return new SolveResult.success(
sources,
root,
lockFile,
packages,
overrides,
pubspecs,
_getAvailableVersions(packages),
attemptedSolutions);
});
}).catchError((error) {
if (error is! SolveFailure) throw error;
// Wrap a failure in a result so we can attach some other data.
return new SolveResult.failure(
sources,
root,
lockFile,
overrides,
error,
attemptedSolutions);
}).whenComplete(() {
// Gather some solving metrics.
var buffer = new StringBuffer();
buffer.writeln('${runtimeType} took ${stopwatch.elapsed} seconds.');
buffer.writeln(cache.describeResults());
log.solver(buffer);
});
}
/// Generates a map containing all of the known available versions for each
/// package in [packages].
///
/// The version list may not always be complete. The the package is the root
/// root package, or its a package that we didn't unlock while solving
/// because we weren't trying to upgrade it, we will just know the current
/// version.
Map<String, List<Version>> _getAvailableVersions(List<PackageId> packages) {
var availableVersions = new Map<String, List<Version>>();
for (var package in packages) {
var cached = cache.getCachedVersions(package.toRef());
var versions;
if (cached != null) {
versions = cached.map((id) => id.version).toList();
} else {
// If the version list was never requested, just use the one known
// version.
versions = [package.version];
}
availableVersions[package.name] = versions;
}
return availableVersions;
}
/// Adds [versions], which is the list of all allowed versions of a given
/// package, to the set of versions to consider for solutions.
///
/// The first item in the list will be the currently selected version of that
/// package. Subsequent items will be tried if it the current selection fails.
/// Returns the first selected version.
PackageId select(VersionQueue versions) {
_selected.add(versions);
logSolve();
return versions.current;
}
/// Returns the the currently selected id for the package [name] or `null` if
/// no concrete version has been selected for that package yet.
PackageId getSelected(String name) {
// Always prefer the root package.
if (root.name == name) return new PackageId.root(root);
// Look through the current selections.
for (var i = _selected.length - 1; i >= 0; i--) {
if (_selected[i].current.name == name) return _selected[i].current;
}
return null;
}
/// Gets the version of [package] currently locked in the lock file.
///
/// Returns `null` if it isn't in the lockfile (or has been unlocked).
PackageId getLocked(String package) {
if (type == SolveType.GET) return lockFile.packages[package];
// When downgrading, we don't want to force the latest versions of
// non-hosted packages, since they don't support multiple versions and thus
// can't be downgraded.
if (type == SolveType.DOWNGRADE) {
var locked = lockFile.packages[package];
if (locked != null && !sources[locked.source].hasMultipleVersions) {
return locked;
}
}
if (_forceLatest.isEmpty || _forceLatest.contains(package)) return null;
return lockFile.packages[package];
}
/// Traverses the root package's dependency graph using the current potential
/// solution.
///
/// If successful, completes to the solution. If not, backtracks to the most
/// recently selected version of a package and tries the next version of it.
/// If there are no more versions, continues to backtrack to previous
/// selections, and so on. If there is nothing left to backtrack to,
/// completes to the last failure that occurred.
Future<List<PackageId>> _traverseSolution() => resetStack(() {
return new Traverser(this).traverse().catchError((error) {
if (error is! SolveFailure) throw error;
return _backtrack(error).then((canTry) {
if (canTry) {
_attemptedSolutions++;
return _traverseSolution();
}
// All out of solutions, so fail.
throw error;
});
});
});
/// Backtracks from the current failed solution and determines the next
/// solution to try.
///
/// If possible, it will backjump based on the cause of the [failure] to
/// minize backtracking. Otherwise, it will simply backtrack to the next
/// possible solution.
///
/// Returns `true` if there is a new solution to try.
Future<bool> _backtrack(SolveFailure failure) {
// Bail if there is nothing to backtrack to.
if (_selected.isEmpty) return new Future.value(false);
// Mark any packages that may have led to this failure so that we know to
// consider them when backtracking.
var dependers = _getTransitiveDependers(failure.package);
for (var selected in _selected) {
if (dependers.contains(selected.current.name)) {
selected.fail();
}
}
// Advance past the current version of the leaf-most package.
advanceVersion() {
_backjump(failure);
var previous = _selected.last.current;
return _selected.last.advance().then((success) {
if (success) {
logSolve();
return true;
}
logSolve('$previous is last version, backtracking');
// That package has no more versions, so pop it and try the next one.
_selected.removeLast();
if (_selected.isEmpty) return false;
// If we got here, the leafmost package was discarded so we need to
// advance the next one.
return advanceVersion();
});
}
return advanceVersion();
}
/// Walks the selected packages from most to least recent to determine which
/// ones can be ignored and jumped over by the backtracker.
///
/// The only packages we need to backtrack to are ones that led (possibly
/// indirectly) to the failure. Everything else can be skipped.
void _backjump(SolveFailure failure) {
for (var i = _selected.length - 1; i >= 0; i--) {
// Each queue will never be empty since it gets discarded by _backtrack()
// when that happens.
var selected = _selected[i].current;
// If the failure is a disjoint version range, then no possible versions
// for that package can match and there's no reason to try them. Instead,
// just backjump past it.
if (failure is DisjointConstraintException &&
selected.name == failure.package) {
logSolve("skipping past disjoint selected ${selected.name}");
continue;
}
if (_selected[i].hasFailed) {
logSolve('backjump to ${selected.name}');
_selected.removeRange(i + 1, _selected.length);
return;
}
}
// If we got here, we walked the entire list without finding a package that
// could lead to another solution, so discard everything. This will happen
// if every package that led to the failure has no other versions that it
// can try to select.
_selected.removeRange(1, _selected.length);
}
/// Gets the set of currently selected packages that depend on [dependency]
/// either directly or indirectly.
///
/// When backtracking, it's only useful to consider changing the version of
/// packages who have a dependency on the failed package that triggered
/// backtracking. This is used to determine those packages.
///
/// We calculate the full set up front before backtracking because during
/// backtracking, we will unselect packages and start to lose this
/// information in the middle of the process.
///
/// For example, consider dependencies A -> B -> C. We've selected A and B
/// then encounter a problem with C. We start backtracking. B has no more
/// versions so we discard it and keep backtracking to A. When we get there,
/// since we've unselected B, we no longer realize that A had a transitive
/// dependency on C. We would end up backjumping over A and failing.
///
/// Calculating the dependency set up front before we start backtracking
/// solves that.
Set<String> _getTransitiveDependers(String dependency) {
// Generate a reverse dependency graph. For each package, create edges to
// each package that depends on it.
var dependers = new Map<String, Set<String>>();
addDependencies(name, deps) {
dependers.putIfAbsent(name, () => new Set<String>());
for (var dep in deps) {
dependers.putIfAbsent(dep.name, () => new Set<String>()).add(name);
}
}
for (var i = 0; i < _selected.length; i++) {
var id = _selected[i].current;
var pubspec = cache.getCachedPubspec(id);
if (pubspec != null) addDependencies(id.name, pubspec.dependencies);
}
// Include the root package's dependencies.
addDependencies(root.name, root.immediateDependencies);
// Now walk the depending graph to see which packages transitively depend
// on [dependency].
var visited = new Set<String>();
walk(String package) {
// Don't get stuck in cycles.
if (visited.contains(package)) return;
visited.add(package);
var depender = dependers[package].forEach(walk);
}
walk(dependency);
return visited;
}
/// Logs the initial parameters to the solver.
void _logParameters() {
var buffer = new StringBuffer();
buffer.writeln("Solving dependencies:");
for (var package in root.dependencies) {
buffer.write("- $package");
var locked = getLocked(package.name);
if (_forceLatest.contains(package.name)) {
buffer.write(" (use latest)");
} else if (locked != null) {
var version = locked.version;
buffer.write(" (locked to $version)");
}
buffer.writeln();
}
log.solver(buffer.toString().trim());
}
/// Logs [message] in the context of the current selected packages.
///
/// If [message] is omitted, just logs a description of leaf-most selection.
void logSolve([String message]) {
if (message == null) {
if (_selected.isEmpty) {
message = "* start at root";
} else {
message = "* select ${_selected.last.current}";
}
} else {
// Otherwise, indent it under the current selected package.
message = prefixLines(message);
}
// Indent for the previous selections.
var prefix = _selected.skip(1).map((_) => '| ').join();
log.solver(prefixLines(message, prefix: prefix));
}
}
/// Given the solver's current set of selected package versions, this tries to
/// traverse the dependency graph and see if a complete set of valid versions
/// has been chosen.
///
/// If it reaches a conflict, it fails and stops traversing. If it reaches a
/// package that isn't selected, it refines the solution by adding that
/// package's set of allowed versions to the solver and then select the best
/// one and continuing.
class Traverser {
final BacktrackingSolver _solver;
/// The queue of packages left to traverse.
///
/// We do a breadth-first traversal using an explicit queue just to avoid the
/// code complexity of a recursive asynchronous traversal.
final _packages = new Queue<PackageId>();
/// The packages we have already traversed.
///
/// Used to avoid traversing the same package multiple times, and to build
/// the complete solution results.
final _visited = new Set<PackageId>();
/// The dependencies visited so far in the traversal.
///
/// For each package name (the map key) we track the list of dependencies
/// that other packages have placed on it so that we can calculate the
/// complete constraint for shared dependencies.
final _dependencies = <String, List<Dependency>>{};
Traverser(this._solver);
/// Walks the dependency graph starting at the root package and validates
/// that each reached package has a valid version selected.
Future<List<PackageId>> traverse() {
// Start at the root.
_packages.add(new PackageId.root(_solver.root));
return _traversePackage();
}
/// Traverses the next package in the queue.
///
/// Completes to a list of package IDs if the traversal completed
/// successfully and found a solution. Completes to an error if the traversal
/// failed. Otherwise, recurses to the next package in the queue, etc.
Future<List<PackageId>> _traversePackage() {
if (_packages.isEmpty) {
// We traversed the whole graph. If we got here, we successfully found
// a solution.
return new Future<List<PackageId>>.value(_visited.toList());
}
var id = _packages.removeFirst();
// Don't visit the same package twice.
if (_visited.contains(id)) {
return _traversePackage();
}
_visited.add(id);
return _solver.cache.getPubspec(id).then((pubspec) {
_validateSdkConstraint(pubspec);
var deps = pubspec.dependencies.toSet();
if (id.isRoot) {
// Include dev dependencies of the root package.
deps.addAll(pubspec.devDependencies);
// Add all overrides. This ensures a dependency only present as an
// override is still included.
deps.addAll(_solver._overrides.values);
}
// Replace any overridden dependencies.
deps = deps.map((dep) {
var override = _solver._overrides[dep.name];
if (override != null) return override;
// Not overridden.
return dep;
}).toSet();
// Make sure the package doesn't have any bad dependencies.
for (var dep in deps) {
if (!dep.isRoot && _solver.sources[dep.source] is UnknownSource) {
throw new UnknownSourceException(
id.name,
[new Dependency(id.name, id.version, dep)]);
}
}
return _traverseDeps(id, new DependencyQueue(_solver, deps));
}).catchError((error) {
if (error is! PackageNotFoundException) throw error;
// We can only get here if the lockfile refers to a specific package
// version that doesn't exist (probably because it was yanked).
throw new NoVersionException(id.name, null, id.version, []);
});
}
/// Traverses the references that [depender] depends on, stored in [deps].
///
/// Desctructively modifies [deps]. Completes to a list of packages if the
/// traversal is complete. Completes it to an error if a failure occurred.
/// Otherwise, recurses.
Future<List<PackageId>> _traverseDeps(PackageId depender,
DependencyQueue deps) {
// Move onto the next package if we've traversed all of these references.
if (deps.isEmpty) return _traversePackage();
return resetStack(() {
return deps.advance().then((dep) {
var dependency = new Dependency(depender.name, depender.version, dep);
return _registerDependency(dependency).then((_) {
if (dep.name == "barback") return _addImplicitDependencies();
});
}).then((_) => _traverseDeps(depender, deps));
});
}
/// Register [dependency]'s constraints on the package it depends on and
/// enqueues the package for processing if necessary.
Future _registerDependency(Dependency dependency) {
return new Future.sync(() {
_validateDependency(dependency);
var dep = dependency.dep;
var dependencies = _getDependencies(dep.name);
dependencies.add(dependency);
var constraint = _getConstraint(dep.name);
// See if it's possible for a package to match that constraint.
if (constraint.isEmpty) {
var constraints = dependencies.map(
(dep) => " ${dep.dep.constraint} from ${dep.depender}").join('\n');
_solver.logSolve('disjoint constraints on ${dep.name}:\n$constraints');
throw new DisjointConstraintException(dep.name, dependencies);
}
var selected = _validateSelected(dep, constraint);
if (selected != null) {
// The selected package version is good, so enqueue it to traverse
// into it.
_packages.add(selected);
return null;
}
// We haven't selected a version. Try all of the versions that match
// the constraints we currently have for this package.
var locked = _getValidLocked(dep.name);
return VersionQueue.create(locked, () {
return _getAllowedVersions(dep);
}).then((versions) => _packages.add(_solver.select(versions)));
});
}
/// Gets all versions of [dep] that match the current constraints placed on
/// it.
Future<Iterable<PackageId>> _getAllowedVersions(PackageDep dep) {
var constraint = _getConstraint(dep.name);
return _solver.cache.getVersions(dep.toRef()).then((versions) {
var allowed = versions.where((id) => constraint.allows(id.version));
if (allowed.isEmpty) {
_solver.logSolve('no versions for ${dep.name} match $constraint');
throw new NoVersionException(
dep.name,
null,
constraint,
_getDependencies(dep.name));
}
// If we're doing an upgrade on this package, only allow the latest
// version.
if (_solver._forceLatest.contains(dep.name)) allowed = [allowed.first];
// Remove the locked version, if any, since that was already handled.
var locked = _getValidLocked(dep.name);
if (locked != null) {
allowed = allowed.where((dep) => dep.version != locked.version);
}
return allowed;
}).catchError((error, stackTrace) {
if (error is PackageNotFoundException) {
// Show the user why the package was being requested.
throw new DependencyNotFoundException(
dep.name,
error,
_getDependencies(dep.name));
}
throw error;
});
}
/// Ensures that dependency [dep] from [depender] is consistent with the
/// other dependencies on the same package.
///
/// Throws a [SolveFailure] exception if not. Only validates sources and
/// descriptions, not the version.
void _validateDependency(Dependency dependency) {
var dep = dependency.dep;
// Make sure the dependencies agree on source and description.
var required = _getRequired(dep.name);
if (required == null) return;
// Make sure all of the existing sources match the new reference.
if (required.dep.source != dep.source) {
_solver.logSolve(
'source mismatch on ${dep.name}: ${required.dep.source} ' '!= ${dep.source}');
throw new SourceMismatchException(dep.name, [required, dependency]);
}
// Make sure all of the existing descriptions match the new reference.
var source = _solver.sources[dep.source];
if (!source.descriptionsEqual(dep.description, required.dep.description)) {
_solver.logSolve(
'description mismatch on ${dep.name}: '
'${required.dep.description} != ${dep.description}');
throw new DescriptionMismatchException(dep.name, [required, dependency]);
}
}
/// Validates the currently selected package against the new dependency that
/// [dep] and [constraint] place on it.
///
/// Returns `null` if there is no currently selected package, throws a
/// [SolveFailure] if the new reference it not does not allow the previously
/// selected version, or returns the selected package if successful.
PackageId _validateSelected(PackageDep dep, VersionConstraint constraint) {
var selected = _solver.getSelected(dep.name);
if (selected == null) return null;
// Make sure it meets the constraint.
if (!dep.constraint.allows(selected.version)) {
_solver.logSolve('selection $selected does not match $constraint');
throw new NoVersionException(
dep.name,
selected.version,
constraint,
_getDependencies(dep.name));
}
return selected;
}
/// Register pub's implicit dependencies.
///
/// Pub has an implicit version constraint on barback and various other
/// packages used in barback's plugin isolate.
Future _addImplicitDependencies() {
/// Ensure we only add the barback dependency once.
if (_getDependencies("barback").length != 1) return new Future.value();
return Future.wait(barback.pubConstraints.keys.map((depName) {
var constraint = barback.pubConstraints[depName];
_solver.logSolve(
'add implicit $constraint pub dependency on ' '$depName');
var override = _solver._overrides[depName];
// Use the same source and description as the dependency override if one
// exists. This is mainly used by the pkgbuild tests, which use dependency
// overrides for all repo packages.
var pubDep = override == null ?
new PackageDep(depName, "hosted", constraint, depName) :
override.withConstraint(constraint);
return _registerDependency(
new Dependency("pub itself", Version.none, pubDep));
}));
}
/// Gets the list of dependencies for package [name].
///
/// Creates an empty list if needed.
List<Dependency> _getDependencies(String name) {
return _dependencies.putIfAbsent(name, () => <Dependency>[]);
}
/// Gets a "required" reference to the package [name].
///
/// This is the first non-root dependency on that package. All dependencies
/// on a package must agree on source and description, except for references
/// to the root package. This will return a reference to that "canonical"
/// source and description, or `null` if there is no required reference yet.
///
/// This is required because you may have a circular dependency back onto the
/// root package. That second dependency won't be a root dependency and it's
/// *that* one that other dependencies need to agree on. In other words, you
/// can have a bunch of dependencies back onto the root package as long as
/// they all agree with each other.
Dependency _getRequired(String name) {
return _getDependencies(
name).firstWhere((dep) => !dep.dep.isRoot, orElse: () => null);
}
/// Gets the combined [VersionConstraint] currently being placed on package
/// [name].
VersionConstraint _getConstraint(String name) {
var constraint = _getDependencies(
name).map(
(dep) =>
dep.dep.constraint).fold(VersionConstraint.any, (a, b) => a.intersect(b));
return constraint;
}
/// Gets the package [name] that's currently contained in the lockfile if it
/// meets [constraint] and has the same source and description as other
/// references to that package.
///
/// Returns `null` otherwise.
PackageId _getValidLocked(String name) {
var package = _solver.getLocked(name);
if (package == null) return null;
var constraint = _getConstraint(name);
if (!constraint.allows(package.version)) {
_solver.logSolve('$package is locked but does not match $constraint');
return null;
} else {
_solver.logSolve('$package is locked');
}
var required = _getRequired(name);
if (required != null) {
if (package.source != required.dep.source) return null;
var source = _solver.sources[package.source];
if (!source.descriptionsEqual(
package.description,
required.dep.description)) return null;
}
return package;
}
}
/// Ensures that if [pubspec] has an SDK constraint, then it is compatible
/// with the current SDK.
///
/// Throws a [SolveFailure] if not.
void _validateSdkConstraint(Pubspec pubspec) {
if (pubspec.environment.sdkVersion.allows(sdk.version)) return;
throw new BadSdkVersionException(
pubspec.name,
'Package ${pubspec.name} requires SDK version '
'${pubspec.environment.sdkVersion} but the current SDK is ' '${sdk.version}.');
}

View file

@ -1,156 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.dependency_queue;
import 'dart:async';
import 'dart:collection' show Queue;
import '../log.dart' as log;
import '../package.dart';
import 'backtracking_solver.dart';
/// A queue of one package's dependencies, ordered by how the solver should
/// traverse them.
///
/// It prefers locked versions so that they stay locked if possible. Then it
/// prefers a currently selected package so that it only has to consider a
/// single version.
///
/// After that, it orders the remaining packages by the number of versions they
/// have so that packages with fewer versions are solved first. (If two
/// packages have the same number of versions, they are sorted alphabetically
/// just to be deterministic.)
///
/// Critically, this queue will *not* sort the dependencies by number of
/// versions until actually needed. This ensures we don't do any network
/// requests until we actually need to. In particular, it means that solving
/// a package graph with an already up-to-date lockfile will do no network
/// requests.
class DependencyQueue {
final BacktrackingSolver _solver;
/// The dependencies for packages that have already been selected.
final Queue<PackageDep> _presorted;
/// The dependencies on the remaining packages.
///
/// This is lazily sorted right before the first item is requested.
final List<PackageDep> _remaining;
bool _isSorted = false;
/// Gets whether there are any dependencies left to iterate over.
bool get isEmpty => _presorted.isEmpty && _remaining.isEmpty;
/// The pending [Future] while the remaining dependencies are being sorted.
///
/// This will only be non-null while a sort is in progress.
Future _sortFuture;
factory DependencyQueue(BacktrackingSolver solver, Iterable<PackageDep> deps)
{
// Separate out the presorted ones.
var presorted = <PackageDep>[];
var remaining = <PackageDep>[];
for (var dep in deps) {
// Selected or locked packages come first.
if (solver.getSelected(dep.name) != null ||
solver.getLocked(dep.name) != null) {
presorted.add(dep);
} else {
remaining.add(dep);
}
}
// Sort the selected/locked packages by name just to ensure the solver is
// deterministic.
presorted.sort((a, b) => a.name.compareTo(b.name));
return new DependencyQueue._(
solver,
new Queue<PackageDep>.from(presorted),
remaining);
}
DependencyQueue._(this._solver, this._presorted, this._remaining);
/// Emits the next dependency in priority order.
///
/// It is an error to call this if [isEmpty] returns `true`. Note that this
/// function is *not* re-entrant. You should only advance after the previous
/// advance has completed.
Future<PackageDep> advance() {
// Emit the sorted ones first.
if (_presorted.isNotEmpty) {
return new Future.value(_presorted.removeFirst());
}
// Sort the remaining packages when we need the first one.
if (!_isSorted) return _sort().then((_) => _remaining.removeAt(0));
return new Future.value(_remaining.removeAt(0));
}
/// Sorts the unselected packages by number of versions and name.
Future _sort() {
// Sorting is not re-entrant.
assert(_sortFuture == null);
_sortFuture = Future.wait(_remaining.map(_getNumVersions)).then((versions) {
_sortFuture = null;
// Map deps to the number of versions they have.
var versionMap = new Map.fromIterables(_remaining, versions);
// Sort in best-first order to minimize backtracking.
_remaining.sort((a, b) {
// Traverse into packages with fewer versions since they will lead to
// less backtracking.
if (versionMap[a] != versionMap[b]) {
return versionMap[a].compareTo(versionMap[b]);
}
// Otherwise, just sort by name so that it's deterministic.
return a.name.compareTo(b.name);
});
_isSorted = true;
});
return _sortFuture;
}
/// Given a dependency, returns a future that completes to the number of
/// versions available for it.
Future<int> _getNumVersions(PackageDep dep) {
// There is only ever one version of the root package.
if (dep.isRoot) {
return new Future.value(1);
}
return _solver.cache.getVersions(dep.toRef()).then((versions) {
// If the root package depends on this one, ignore versions that don't
// match that constraint. Since the root package's dependency constraints
// won't change during solving, we can safely filter out packages that
// don't meet it.
for (var rootDep in _solver.root.immediateDependencies) {
if (rootDep.name == dep.name) {
versions =
versions.where((id) => rootDep.constraint.allows(id.version));
break;
}
}
return versions.length;
}).catchError((error, trace) {
// If it fails for any reason, just treat that as no versions. This
// will sort this reference higher so that we can traverse into it
// and report the error more properly.
log.solver("Could not get versions for $dep:\n$error\n\n$trace");
return 0;
});
}
}

View file

@ -1,248 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.solve_report;
import 'package:pub_semver/pub_semver.dart';
import '../lock_file.dart';
import '../log.dart' as log;
import '../package.dart';
import '../source_registry.dart';
import '../utils.dart';
import 'version_solver.dart';
/// Unlike [SolveResult], which is the static data describing a resolution,
/// this class contains the mutable state used while generating the report
/// itself.
///
/// It's a report builder.
class SolveReport {
final SolveType _type;
final SourceRegistry _sources;
final Package _root;
final LockFile _previousLockFile;
final SolveResult _result;
/// The dependencies in [_result], keyed by package name.
final _dependencies = new Map<String, PackageId>();
final _output = new StringBuffer();
SolveReport(this._type, this._sources, this._root, this._previousLockFile,
this._result) {
// Fill the map so we can use it later.
for (var id in _result.packages) {
_dependencies[id.name] = id;
}
}
/// Displays a report of the results of the version resolution relative to
/// the previous lock file.
void show() {
_reportChanges();
_reportOverrides();
}
/// Displays a one-line message summarizing what changes were made (or would
/// be made) to the lockfile.
///
/// If [dryRun] is true, describes it in terms of what would be done.
void summarize({bool dryRun: false}) {
// Count how many dependencies actually changed.
var dependencies = _dependencies.keys.toSet();
dependencies.addAll(_previousLockFile.packages.keys);
dependencies.remove(_root.name);
var numChanged = dependencies.where((name) {
var oldId = _previousLockFile.packages[name];
var newId = _dependencies[name];
// Added or removed dependencies count.
if (oldId == null) return true;
if (newId == null) return true;
// The dependency existed before, so see if it was modified.
return !_sources.idsEqual(oldId, newId);
}).length;
if (dryRun) {
if (numChanged == 0) {
log.message("No dependencies would change.");
} else if (numChanged == 1) {
log.message("Would change $numChanged dependency.");
} else {
log.message("Would change $numChanged dependencies.");
}
} else {
if (numChanged == 0) {
if (_type == SolveType.GET) {
log.message("Got dependencies!");
} else {
log.message("No dependencies changed.");
}
} else if (numChanged == 1) {
log.message("Changed $numChanged dependency!");
} else {
log.message("Changed $numChanged dependencies!");
}
}
}
/// Displays a report of all of the previous and current dependencies and
/// how they have changed.
void _reportChanges() {
_output.clear();
// Show the new set of dependencies ordered by name.
var names = _result.packages.map((id) => id.name).toList();
names.remove(_root.name);
names.sort();
names.forEach(_reportPackage);
// Show any removed ones.
var removed = _previousLockFile.packages.keys.toSet();
removed.removeAll(names);
if (removed.isNotEmpty) {
_output.writeln("These packages are no longer being depended on:");
removed = removed.toList();
removed.sort();
removed.forEach((name) => _reportPackage(name, alwaysShow: true));
}
log.message(_output);
}
/// Displays a warning about the overrides currently in effect.
void _reportOverrides() {
_output.clear();
if (_result.overrides.isNotEmpty) {
_output.writeln("Warning: You are using these overridden dependencies:");
var overrides = _result.overrides.map((dep) => dep.name).toList();
overrides.sort((a, b) => a.compareTo(b));
overrides.forEach(
(name) => _reportPackage(name, alwaysShow: true, highlightOverride: false));
log.warning(_output);
}
}
/// Reports the results of the upgrade on the package named [name].
///
/// If [alwaysShow] is true, the package is reported even if it didn't change,
/// regardless of [_type]. If [highlightOverride] is true (or absent), writes
/// "(override)" next to overridden packages.
void _reportPackage(String name, {bool alwaysShow: false,
bool highlightOverride: true}) {
var newId = _dependencies[name];
var oldId = _previousLockFile.packages[name];
var id = newId != null ? newId : oldId;
var isOverridden =
_result.overrides.map((dep) => dep.name).contains(id.name);
// If the package was previously a dependency but the dependency has
// changed in some way.
var changed = false;
// If the dependency was added or removed.
var addedOrRemoved = false;
// Show a one-character "icon" describing the change. They are:
//
// ! The package is being overridden.
// - The package was removed.
// + The package was added.
// > The package was upgraded from a lower version.
// < The package was downgraded from a higher version.
// * Any other change between the old and new package.
var icon;
if (isOverridden) {
icon = log.magenta("! ");
} else if (newId == null) {
icon = log.red("- ");
addedOrRemoved = true;
} else if (oldId == null) {
icon = log.green("+ ");
addedOrRemoved = true;
} else if (!_sources.idDescriptionsEqual(oldId, newId)) {
icon = log.cyan("* ");
changed = true;
} else if (oldId.version < newId.version) {
icon = log.green("> ");
changed = true;
} else if (oldId.version > newId.version) {
icon = log.cyan("< ");
changed = true;
} else {
// Unchanged.
icon = " ";
}
if (_type == SolveType.GET && !(alwaysShow || changed || addedOrRemoved)) {
return;
}
_output.write(icon);
_output.write(log.bold(id.name));
_output.write(" ");
_writeId(id);
// If the package was upgraded, show what it was upgraded from.
if (changed) {
_output.write(" (was ");
_writeId(oldId);
_output.write(")");
}
// Highlight overridden packages.
if (isOverridden && highlightOverride) {
_output.write(" ${log.magenta('(overridden)')}");
}
// See if there are any newer versions of the package that we were
// unable to upgrade to.
if (newId != null && _type != SolveType.DOWNGRADE) {
var versions = _result.availableVersions[newId.name];
var newerStable = false;
var newerUnstable = false;
for (var version in versions) {
if (version > newId.version) {
if (version.isPreRelease) {
newerUnstable = true;
} else {
newerStable = true;
}
}
}
// If there are newer stable versions, only show those.
var message;
if (newerStable) {
message = "(${maxAll(versions, Version.prioritize)} available)";
} else if (newerUnstable) {
message = "(${maxAll(versions)} available)";
}
if (message != null) _output.write(" ${log.cyan(message)}");
}
_output.writeln();
}
/// Writes a terse description of [id] (not including its name) to the output.
void _writeId(PackageId id) {
_output.write(id.version);
var source = _sources[id.source];
if (source != _sources.defaultSource) {
var description = source.formatDescription(_root.dir, id.description);
_output.write(" from ${id.source} $description");
}
}
}

View file

@ -1,110 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.version_queue;
import 'dart:async';
import 'dart:collection' show Queue;
import '../package.dart';
/// A function that asynchronously returns a sequence of package IDs.
typedef Future<Iterable<PackageId>> PackageIdGenerator();
/// A prioritized, asynchronous queue of the possible versions that can be
/// selected for one package.
///
/// If there is a locked version, that comes first, followed by other versions
/// in descending order. This avoids requesting the list of versions until
/// needed (i.e. after any locked version has been consumed) to avoid unneeded
/// network requests.
class VersionQueue {
/// The set of allowed versions that match [_constraint].
///
/// If [_locked] is not `null`, this will initially be `null` until we
/// advance past the locked version.
Queue<PackageId> _allowed;
/// The callback that will generate the sequence of packages. This will be
/// called as lazily as possible.
final PackageIdGenerator _allowedGenerator;
/// The currently locked version of the package, or `null` if there is none,
/// or we have advanced past it.
PackageId _locked;
/// Gets the currently selected version.
PackageId get current {
if (_locked != null) return _locked;
return _allowed.first;
}
/// Whether the currently selected version has been responsible for a solve
/// failure, or depends on a package that has.
///
/// The solver uses this to determine which packages to backtrack to after a
/// failure occurs. Any selected package that did *not* cause the failure can
/// be skipped by the backtracker.
bool get hasFailed => _hasFailed;
bool _hasFailed = false;
/// Creates a new [VersionQueue] queue for starting with the optional
/// [locked] package followed by the results of calling [allowedGenerator].
///
/// This is asynchronous so that [current] can always be accessed
/// synchronously. If there is no locked version, we need to get the list of
/// versions asynchronously before we can determine what the first one is.
static Future<VersionQueue> create(PackageId locked,
PackageIdGenerator allowedGenerator) {
var versions = new VersionQueue._(locked, allowedGenerator);
// If there is a locked version, it's the current one so it's synchronously
// available now.
if (locked != null) return new Future.value(versions);
// Otherwise, the current version needs to be calculated before we can
// return.
return versions._calculateAllowed().then((_) => versions);
}
VersionQueue._(this._locked, this._allowedGenerator);
/// Tries to advance to the next possible version.
///
/// Returns `true` if it moved to a new version (which can be accessed from
/// [current]. Returns `false` if there are no more versions.
Future<bool> advance() {
// Any failure was the fault of the previous version, not necessarily the
// new one.
_hasFailed = false;
// If we have a locked version, consume it first.
if (_locked != null) {
// Advancing past the locked version, so need to load the others now
// so that [current] is available.
return _calculateAllowed().then((_) {
_locked = null;
return _allowed.isNotEmpty;
});
}
// Move to the next allowed version.
_allowed.removeFirst();
return new Future.value(_allowed.isNotEmpty);
}
/// Marks the selected version as being directly or indirectly responsible
/// for a solve failure.
void fail() {
_hasFailed = true;
}
/// Determines the list of allowed versions matching its constraint and places
/// them in [_allowed].
Future _calculateAllowed() {
return _allowedGenerator().then((allowed) {
_allowed = new Queue<PackageId>.from(allowed);
});
}
}

View file

@ -1,510 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.version_solver;
import 'dart:async';
import "dart:convert";
import 'package:pub_semver/pub_semver.dart';
import 'package:stack_trace/stack_trace.dart';
import '../exceptions.dart';
import '../lock_file.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../source_registry.dart';
import '../utils.dart';
import 'backtracking_solver.dart';
import 'solve_report.dart';
/// Attempts to select the best concrete versions for all of the transitive
/// dependencies of [root] taking into account all of the [VersionConstraint]s
/// that those dependencies place on each other and the requirements imposed by
/// [lockFile].
///
/// If [useLatest] is given, then only the latest versions of the referenced
/// packages will be used. This is for forcing an upgrade to one or more
/// packages.
///
/// If [upgradeAll] is true, the contents of [lockFile] are ignored.
Future<SolveResult> resolveVersions(SolveType type, SourceRegistry sources,
Package root, {LockFile lockFile, List<String> useLatest}) {
if (lockFile == null) lockFile = new LockFile.empty();
if (useLatest == null) useLatest = [];
return log.progress('Resolving dependencies', () {
return new BacktrackingSolver(
type,
sources,
root,
lockFile,
useLatest).solve();
});
}
/// The result of a version resolution.
class SolveResult {
/// Whether the solver found a complete solution or failed.
bool get succeeded => error == null;
/// The list of concrete package versions that were selected for each package
/// reachable from the root, or `null` if the solver failed.
final List<PackageId> packages;
/// The dependency overrides that were used in the solution.
final List<PackageDep> overrides;
/// A map from package names to the pubspecs for the versions of those
/// packages that were installed, or `null` if the solver failed.
final Map<String, Pubspec> pubspecs;
/// The available versions of all selected packages from their source.
///
/// Will be empty if the solve failed. An entry here may not include the full
/// list of versions available if the given package was locked and did not
/// need to be unlocked during the solve.
final Map<String, List<Version>> availableVersions;
/// The error that prevented the solver from finding a solution or `null` if
/// it was successful.
final SolveFailure error;
/// The number of solutions that were attempted before either finding a
/// successful solution or exhausting all options.
///
/// In other words, one more than the number of times it had to backtrack
/// because it found an invalid solution.
final int attemptedSolutions;
final SourceRegistry _sources;
final Package _root;
final LockFile _previousLockFile;
/// Returns the names of all packages that were changed.
///
/// This includes packages that were added or removed.
Set<String> get changedPackages {
if (packages == null) return null;
var changed = packages.where(
(id) =>
!_sources.idsEqual(
_previousLockFile.packages[id.name],
id)).map((id) => id.name).toSet();
return changed.union(
_previousLockFile.packages.keys.where(
(package) => !availableVersions.containsKey(package)).toSet());
}
SolveResult.success(this._sources, this._root, this._previousLockFile,
this.packages, this.overrides, this.pubspecs, this.availableVersions,
this.attemptedSolutions)
: error = null;
SolveResult.failure(this._sources, this._root, this._previousLockFile,
this.overrides, this.error, this.attemptedSolutions)
: this.packages = null,
this.pubspecs = null,
this.availableVersions = {};
/// Displays a report of what changes were made to the lockfile.
///
/// [type] is the type of version resolution that was run.
void showReport(SolveType type) {
new SolveReport(type, _sources, _root, _previousLockFile, this).show();
}
/// Displays a one-line message summarizing what changes were made (or would
/// be made) to the lockfile.
///
/// [type] is the type of version resolution that was run.
void summarizeChanges(SolveType type, {bool dryRun: false}) {
new SolveReport(
type,
_sources,
_root,
_previousLockFile,
this).summarize(dryRun: dryRun);
}
String toString() {
if (!succeeded) {
return 'Failed to solve after $attemptedSolutions attempts:\n' '$error';
}
return 'Took $attemptedSolutions tries to resolve to\n'
'- ${packages.join("\n- ")}';
}
}
/// Maintains a cache of previously-requested data: pubspecs and version lists.
///
/// Used to avoid requesting the same pubspec from the server repeatedly.
class PubspecCache {
final SourceRegistry _sources;
/// The already-requested cached version lists.
final _versions = new Map<PackageRef, List<PackageId>>();
/// The errors from failed version list requests.
final _versionErrors = new Map<PackageRef, Pair<Object, Chain>>();
/// The already-requested cached pubspecs.
final _pubspecs = new Map<PackageId, Pubspec>();
/// The type of version resolution that was run.
final SolveType _type;
/// The number of times a version list was requested and it wasn't cached and
/// had to be requested from the source.
int _versionCacheMisses = 0;
/// The number of times a version list was requested and the cached version
/// was returned.
int _versionCacheHits = 0;
/// The number of times a pubspec was requested and it wasn't cached and had
/// to be requested from the source.
int _pubspecCacheMisses = 0;
/// The number of times a pubspec was requested and the cached version was
/// returned.
int _pubspecCacheHits = 0;
PubspecCache(this._type, this._sources);
/// Caches [pubspec] as the [Pubspec] for the package identified by [id].
void cache(PackageId id, Pubspec pubspec) {
_pubspecs[id] = pubspec;
}
/// Loads the pubspec for the package identified by [id].
Future<Pubspec> getPubspec(PackageId id) {
// Complete immediately if it's already cached.
if (_pubspecs.containsKey(id)) {
_pubspecCacheHits++;
return new Future<Pubspec>.value(_pubspecs[id]);
}
_pubspecCacheMisses++;
var source = _sources[id.source];
return source.describe(id).then((pubspec) {
_pubspecs[id] = pubspec;
return pubspec;
});
}
/// Returns the previously cached pubspec for the package identified by [id]
/// or returns `null` if not in the cache.
Pubspec getCachedPubspec(PackageId id) => _pubspecs[id];
/// Gets the list of versions for [package].
///
/// Packages are sorted in descending version order with all "stable"
/// versions (i.e. ones without a prerelease suffix) before pre-release
/// versions. This ensures that the solver prefers stable packages over
/// unstable ones.
Future<List<PackageId>> getVersions(PackageRef package) {
if (package.isRoot) {
throw new StateError("Cannot get versions for root package $package.");
}
// See if we have it cached.
var versions = _versions[package];
if (versions != null) {
_versionCacheHits++;
return new Future.value(versions);
}
// See if we cached a failure.
var error = _versionErrors[package];
if (error != null) {
_versionCacheHits++;
return new Future.error(error.first, error.last);
}
_versionCacheMisses++;
var source = _sources[package.source];
return source.getVersions(
package.name,
package.description).then((versions) {
// Sort by priority so we try preferred versions first.
versions.sort(
_type == SolveType.DOWNGRADE ? Version.antiprioritize : Version.prioritize);
var ids =
versions.reversed.map((version) => package.atVersion(version)).toList();
_versions[package] = ids;
return ids;
}).catchError((error, trace) {
// If an error occurs, cache that too. We only want to do one request
// for any given package, successful or not.
log.solver("Could not get versions for $package:\n$error\n\n$trace");
_versionErrors[package] = new Pair(error, new Chain.forTrace(trace));
throw error;
});
}
/// Returns the previously cached list of versions for the package identified
/// by [package] or returns `null` if not in the cache.
List<PackageId> getCachedVersions(PackageRef package) => _versions[package];
/// Returns a user-friendly output string describing metrics of the solve.
String describeResults() {
var results = '''- Requested $_versionCacheMisses version lists
- Looked up $_versionCacheHits cached version lists
- Requested $_pubspecCacheMisses pubspecs
- Looked up $_pubspecCacheHits cached pubspecs
''';
// Uncomment this to dump the visited package graph to JSON.
//results += _debugWritePackageGraph();
return results;
}
/// This dumps the set of packages that were looked at by the solver to a
/// JSON map whose format matches the map passed to [testResolve] in the
/// version solver unit tests.
///
/// If a real-world version solve is failing, this can be used to mirror that
/// data to build a regression test using mock packages.
String _debugDescribePackageGraph() {
var packages = {};
_pubspecs.forEach((id, pubspec) {
var deps = {};
packages["${id.name} ${id.version}"] = deps;
for (var dep in pubspec.dependencies) {
deps[dep.name] = dep.constraint.toString();
}
});
// Add in the packages that we know of but didn't need their pubspecs.
_versions.forEach((ref, versions) {
for (var id in versions) {
packages.putIfAbsent("${id.name} ${id.version}", () => {});
}
});
// TODO(rnystrom): Include dev dependencies and dependency overrides.
return JSON.encode(packages);
}
}
/// A reference from a depending package to a package that it depends on.
class Dependency {
/// The name of the package that has this dependency.
final String depender;
/// The version of the depender that has this dependency.
final Version dependerVersion;
/// The package being depended on.
final PackageDep dep;
/// Whether [depender] is a magic dependency (e.g. "pub itself" or "pub global
/// activate").
bool get isMagic => depender.contains(" ");
Dependency(this.depender, this.dependerVersion, this.dep);
String toString() => '$depender $dependerVersion -> $dep';
}
/// An enum for types of version resolution.
class SolveType {
/// As few changes to the lockfile as possible to be consistent with the
/// pubspec.
static const GET = const SolveType._("get");
/// Upgrade all packages or specific packages to the highest versions
/// possible, regardless of the lockfile.
static const UPGRADE = const SolveType._("upgrade");
/// Downgrade all packages or specific packages to the lowest versions
/// possible, regardless of the lockfile.
static const DOWNGRADE = const SolveType._("downgrade");
final String _name;
const SolveType._(this._name);
String toString() => _name;
}
/// Base class for all failures that can occur while trying to resolve versions.
abstract class SolveFailure implements ApplicationException {
/// The name of the package whose version could not be solved.
///
/// Will be `null` if the failure is not specific to one package.
final String package;
/// The known dependencies on [package] at the time of the failure.
///
/// Will be an empty collection if the failure is not specific to one package.
final Iterable<Dependency> dependencies;
String get message => toString();
/// A message describing the specific kind of solve failure.
String get _message {
throw new UnimplementedError("Must override _message or toString().");
}
SolveFailure(this.package, Iterable<Dependency> dependencies)
: dependencies = dependencies != null ? dependencies : <Dependency>[];
String toString() {
if (dependencies.isEmpty) return _message;
var buffer = new StringBuffer();
buffer.write("$_message:");
var sorted = dependencies.toList();
sorted.sort((a, b) => a.depender.compareTo(b.depender));
for (var dep in sorted) {
buffer.writeln();
buffer.write("- ${log.bold(dep.depender)}");
if (!dep.isMagic) buffer.write(" ${dep.dependerVersion}");
buffer.write(" ${_describeDependency(dep.dep)}");
}
return buffer.toString();
}
/// Describes a dependency's reference in the output message.
///
/// Override this to highlight which aspect of [dep] led to the failure.
String _describeDependency(PackageDep dep) =>
"depends on version ${dep.constraint}";
}
/// Exception thrown when the current SDK's version does not match a package's
/// constraint on it.
class BadSdkVersionException extends SolveFailure {
final String _message;
BadSdkVersionException(String package, String message)
: super(package, null),
_message = message;
}
/// Exception thrown when the [VersionConstraint] used to match a package is
/// valid (i.e. non-empty), but there are no available versions of the package
/// that fit that constraint.
class NoVersionException extends SolveFailure {
final VersionConstraint constraint;
/// The last selected version of the package that failed to meet the new
/// constraint.
///
/// This will be `null` when the failure occurred because there are no
/// versions of the package *at all* that match the constraint. It will be
/// non-`null` when a version was selected, but then the solver tightened a
/// constraint such that that version was no longer allowed.
final Version version;
NoVersionException(String package, this.version, this.constraint,
Iterable<Dependency> dependencies)
: super(package, dependencies);
String get _message {
if (version == null) {
return "Package $package has no versions that match $constraint derived "
"from";
}
return "Package $package $version does not match $constraint derived from";
}
}
// TODO(rnystrom): Report the list of depending packages and their constraints.
/// Exception thrown when the most recent version of [package] must be selected,
/// but doesn't match the [VersionConstraint] imposed on the package.
class CouldNotUpgradeException extends SolveFailure {
final VersionConstraint constraint;
final Version best;
CouldNotUpgradeException(String package, this.constraint, this.best)
: super(package, null);
String get _message =>
"The latest version of $package, $best, does not match $constraint.";
}
/// Exception thrown when the [VersionConstraint] used to match a package is
/// the empty set: in other words, multiple packages depend on it and have
/// conflicting constraints that have no overlap.
class DisjointConstraintException extends SolveFailure {
DisjointConstraintException(String package, Iterable<Dependency> dependencies)
: super(package, dependencies);
String get _message => "Incompatible version constraints on $package";
}
/// Exception thrown when two packages with the same name but different sources
/// are depended upon.
class SourceMismatchException extends SolveFailure {
String get _message => "Incompatible dependencies on $package";
SourceMismatchException(String package, Iterable<Dependency> dependencies)
: super(package, dependencies);
String _describeDependency(PackageDep dep) =>
"depends on it from source ${dep.source}";
}
/// Exception thrown when a dependency on an unknown source name is found.
class UnknownSourceException extends SolveFailure {
UnknownSourceException(String package, Iterable<Dependency> dependencies)
: super(package, dependencies);
String toString() {
var dep = dependencies.single;
return 'Package ${dep.depender} depends on ${dep.dep.name} from unknown '
'source "${dep.dep.source}".';
}
}
/// Exception thrown when two packages with the same name and source but
/// different descriptions are depended upon.
class DescriptionMismatchException extends SolveFailure {
String get _message => "Incompatible dependencies on $package";
DescriptionMismatchException(String package,
Iterable<Dependency> dependencies)
: super(package, dependencies);
String _describeDependency(PackageDep dep) {
// TODO(nweiz): Dump descriptions to YAML when that's supported.
return "depends on it with description ${JSON.encode(dep.description)}";
}
}
/// Exception thrown when a dependency could not be found in its source.
///
/// Unlike [PackageNotFoundException], this includes information about the
/// dependent packages requesting the missing one.
class DependencyNotFoundException extends SolveFailure {
final PackageNotFoundException _innerException;
String get _message => "${_innerException.message}\nDepended on by";
DependencyNotFoundException(String package, this._innerException,
Iterable<Dependency> dependencies)
: super(package, dependencies);
/// The failure isn't because of the version of description of the package,
/// it's the package itself that can't be found, so just show the name and no
/// descriptive details.
String _describeDependency(PackageDep dep) => "";
}

View file

@ -1,187 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import 'package.dart';
import 'pubspec.dart';
import 'system_cache.dart';
/// A source from which to get packages.
///
/// Each source has many packages that it looks up using [PackageId]s. Sources
/// that inherit this directly (currently just [PathSource]) are *uncached*
/// sources. They deliver a package directly to the package that depends on it.
///
/// Other sources are *cached* sources. These extend [CachedSource]. When a
/// package needs a dependency from a cached source, it is first installed in
/// the [SystemCache] and then acquired from there.
abstract class Source {
/// The name of the source.
///
/// Should be lower-case, suitable for use in a filename, and unique accross
/// all sources.
String get name;
/// Whether this source can choose between multiple versions of the same
/// package during version solving.
///
/// Defaults to `false`.
final bool hasMultipleVersions = false;
/// Whether or not this source is the default source.
bool get isDefault => systemCache.sources.defaultSource == this;
/// The system cache with which this source is registered.
SystemCache get systemCache {
assert(_systemCache != null);
return _systemCache;
}
/// The system cache variable.
///
/// Set by [_bind].
SystemCache _systemCache;
/// Records the system cache to which this source belongs.
///
/// This should only be called once for each source, by
/// [SystemCache.register]. It should not be overridden by base classes.
void bind(SystemCache systemCache) {
assert(_systemCache == null);
this._systemCache = systemCache;
}
/// Get the list of all versions that exist for the package described by
/// [description].
///
/// [name] is the expected name of the package.
///
/// Note that this does *not* require the packages to be downloaded locally,
/// which is the point. This is used during version resolution to determine
/// which package versions are available to be downloaded (or already
/// downloaded).
///
/// By default, this assumes that each description has a single version and
/// uses [describe] to get that version.
Future<List<Version>> getVersions(String name, description) {
var id = new PackageId(name, this.name, Version.none, description);
return describe(id).then((pubspec) => [pubspec.version]);
}
/// Loads the (possibly remote) pubspec for the package version identified by
/// [id].
///
/// This may be called for packages that have not yet been downloaded during
/// the version resolution process.
///
/// Sources should not override this. Instead, they implement [doDescribe].
Future<Pubspec> describe(PackageId id) {
if (id.isRoot) throw new ArgumentError("Cannot describe the root package.");
if (id.source != name) {
throw new ArgumentError("Package $id does not use source $name.");
}
// Delegate to the overridden one.
return doDescribe(id);
}
/// Loads the (possibly remote) pubspec for the package version identified by
/// [id].
///
/// This may be called for packages that have not yet been downloaded during
/// the version resolution process.
///
/// This method is effectively protected: subclasses must implement it, but
/// external code should not call this. Instead, call [describe].
Future<Pubspec> doDescribe(PackageId id);
/// Ensures [id] is available locally and creates a symlink at [symlink]
/// pointing it.
Future get(PackageId id, String symlink);
/// Returns the directory where this package can (or could) be found locally.
///
/// If the source is cached, this will be a path in the system cache. In that
/// case, this will return a directory even if the package has not been
/// installed into the cache yet.
Future<String> getDirectory(PackageId id);
/// Gives the source a chance to interpret and validate the description for
/// a package coming from this source.
///
/// When a [Pubspec] or [LockFile] is parsed, it reads in the description for
/// each dependency. It is up to the dependency's [Source] to determine how
/// that should be interpreted. This will be called during parsing to validate
/// that the given [description] is well-formed according to this source, and
/// to give the source a chance to canonicalize the description.
///
/// [containingPath] is the path to the local file (pubspec or lockfile)
/// where this description appears. It may be `null` if the description is
/// coming from some in-memory source (such as pulling down a pubspec from
/// pub.dartlang.org).
///
/// It should return if a (possibly modified) valid description, or throw a
/// [FormatException] if not valid.
///
/// [fromLockFile] is true when the description comes from a [LockFile], to
/// allow the source to use lockfile-specific descriptions via [resolveId].
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false});
/// When a [LockFile] is serialized, it uses this method to get the
/// [description] in the right format.
///
/// [containingPath] is the containing directory of the root package.
dynamic serializeDescription(String containingPath, description) {
return description;
}
/// When a package [description] is shown to the user, this is called to
/// convert it into a human-friendly form.
///
/// By default, it just converts the description to a string, but sources
/// may customize this. [containingPath] is the containing directory of the
/// root package.
String formatDescription(String containingPath, description) {
return description.toString();
}
/// Returns whether or not [description1] describes the same package as
/// [description2] for this source.
///
/// This method should be light-weight. It doesn't need to validate that
/// either package exists.
bool descriptionsEqual(description1, description2);
/// Resolves [id] to a more possibly more precise that will uniquely identify
/// a package regardless of when the package is requested.
///
/// For some sources, [PackageId]s can point to different chunks of code at
/// different times. This takes such an [id] and returns a future that
/// completes to a [PackageId] that will uniquely specify a single chunk of
/// code forever.
///
/// For example, [GitSource] might take an [id] with description
/// `http://github.com/dart-lang/some-lib.git` and return an id with a
/// description that includes the current commit of the Git repository.
///
/// Pub calls this after getting a package, so the source can use the local
/// package to determine information about the resolved id.
///
/// The returned [PackageId] may have a description field that's invalid
/// according to [parseDescription], although it must still be serializable
/// to JSON and YAML. It must also be equal to [id] according to
/// [descriptionsEqual].
///
/// By default, this just returns [id].
Future<PackageId> resolveId(PackageId id) => new Future.value(id);
/// Returns the source's name.
String toString() => name;
}

View file

@ -1,75 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.cached;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../io.dart';
import '../package.dart';
import '../pubspec.dart';
import '../source.dart';
import '../utils.dart';
/// Base class for a [Source] that installs packages into pub's [SystemCache].
///
/// A source should be cached if it requires network access to retrieve
/// packages or the package needs to be "frozen" at the point in time that it's
/// installed. (For example, Git packages are cached because installing from
/// the same repo over time may yield different commits.)
abstract class CachedSource extends Source {
/// The root directory of this source's cache within the system cache.
///
/// This shouldn't be overridden by subclasses.
String get systemCacheRoot => path.join(systemCache.rootDir, name);
/// If [id] is already in the system cache, just loads it from there.
///
/// Otherwise, defers to the subclass.
Future<Pubspec> doDescribe(PackageId id) {
return getDirectory(id).then((packageDir) {
if (fileExists(path.join(packageDir, "pubspec.yaml"))) {
return new Pubspec.load(
packageDir,
systemCache.sources,
expectedName: id.name);
}
return describeUncached(id);
});
}
/// Loads the (possibly remote) pubspec for the package version identified by
/// [id].
///
/// This will only be called for packages that have not yet been installed in
/// the system cache.
Future<Pubspec> describeUncached(PackageId id);
Future get(PackageId id, String symlink) {
return downloadToSystemCache(id).then((pkg) {
createPackageSymlink(id.name, pkg.dir, symlink);
});
}
/// Determines if the package with [id] is already downloaded to the system
/// cache.
Future<bool> isInSystemCache(PackageId id) =>
getDirectory(id).then(dirExists);
/// Downloads the package identified by [id] to the system cache.
Future<Package> downloadToSystemCache(PackageId id);
/// Returns the [Package]s that have been downloaded to the system cache.
List<Package> getCachedPackages();
/// Reinstalls all packages that have been previously installed into the
/// system cache by this source.
///
/// Returns a [Pair] whose first element is the number of packages
/// successfully repaired and the second is the number of failures.
Future<Pair<int, int>> repairCachedPackages();
}

View file

@ -1,400 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.git;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../git.dart' as git;
import '../io.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../utils.dart';
import 'cached.dart';
/// A package source that gets packages from Git repos.
class GitSource extends CachedSource {
/// Given a valid git package description, returns the URL of the repository
/// it pulls from.
static String urlFromDescription(description) => description["url"];
final name = "git";
/// The paths to the canonical clones of repositories for which "git fetch"
/// has already been run during this run of pub.
final _updatedRepos = new Set<String>();
/// Given a Git repo that contains a pub package, gets the name of the pub
/// package.
Future<String> getPackageNameFromRepo(String repo) {
// Clone the repo to a temp directory.
return withTempDir((tempDir) {
return _clone(repo, tempDir, shallow: true).then((_) {
var pubspec = new Pubspec.load(tempDir, systemCache.sources);
return pubspec.name;
});
});
}
/// Since we don't have an easy way to read from a remote Git repo, this
/// just installs [id] into the system cache, then describes it from there.
Future<Pubspec> describeUncached(PackageId id) {
return downloadToSystemCache(id).then((package) => package.pubspec);
}
/// Clones a Git repo to the local filesystem.
///
/// The Git cache directory is a little idiosyncratic. At the top level, it
/// contains a directory for each commit of each repository, named `<package
/// name>-<commit hash>`. These are the canonical package directories that are
/// linked to from the `packages/` directory.
///
/// In addition, the Git system cache contains a subdirectory named `cache/`
/// which contains a directory for each separate repository URL, named
/// `<package name>-<url hash>`. These are used to check out the repository
/// itself; each of the commit-specific directories are clones of a directory
/// in `cache/`.
Future<Package> downloadToSystemCache(PackageId id) {
var revisionCachePath;
if (!git.isInstalled) {
fail(
"Cannot get ${id.name} from Git (${_getUrl(id)}).\n"
"Please ensure Git is correctly installed.");
}
ensureDir(path.join(systemCacheRoot, 'cache'));
return _ensureRevision(id).then((_) => getDirectory(id)).then((path) {
revisionCachePath = path;
if (entryExists(revisionCachePath)) return null;
return _clone(_repoCachePath(id), revisionCachePath, mirror: false);
}).then((_) {
var ref = _getEffectiveRef(id);
if (ref == 'HEAD') return null;
return _checkOut(revisionCachePath, ref);
}).then((_) {
return new Package.load(id.name, revisionCachePath, systemCache.sources);
});
}
/// Returns the path to the revision-specific cache of [id].
Future<String> getDirectory(PackageId id) {
return _ensureRevision(id).then((rev) {
var revisionCacheName = '${id.name}-$rev';
return path.join(systemCacheRoot, revisionCacheName);
});
}
/// Ensures [description] is a Git URL.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) {
// TODO(rnystrom): Handle git URLs that are relative file paths (#8570).
// TODO(rnystrom): Now that this function can modify the description, it
// may as well canonicalize it to a map so that other code in the source
// can assume that.
// A single string is assumed to be a Git URL.
if (description is String) return description;
if (description is! Map || !description.containsKey('url')) {
throw new FormatException(
"The description must be a Git URL or a map " "with a 'url' key.");
}
var parsed = new Map.from(description);
parsed.remove('url');
parsed.remove('ref');
if (fromLockFile) parsed.remove('resolved-ref');
if (!parsed.isEmpty) {
var plural = parsed.length > 1;
var keys = parsed.keys.join(', ');
throw new FormatException("Invalid key${plural ? 's' : ''}: $keys.");
}
return description;
}
/// If [description] has a resolved ref, print it out in short-form.
///
/// This helps distinguish different git commits with the same pubspec
/// version.
String formatDescription(String containingPath, description) {
if (description is Map && description.containsKey('resolved-ref')) {
return "${description['url']} at "
"${description['resolved-ref'].substring(0, 6)}";
} else {
return super.formatDescription(containingPath, description);
}
}
/// Two Git descriptions are equal if both their URLs and their refs are
/// equal.
bool descriptionsEqual(description1, description2) {
// TODO(nweiz): Do we really want to throw an error if you have two
// dependencies on some repo, one of which specifies a ref and one of which
// doesn't? If not, how do we handle that case in the version solver?
if (_getUrl(description1) != _getUrl(description2)) return false;
if (_getRef(description1) != _getRef(description2)) return false;
if (description1 is Map &&
description1.containsKey('resolved-ref') &&
description2 is Map &&
description2.containsKey('resolved-ref')) {
return description1['resolved-ref'] == description2['resolved-ref'];
}
return true;
}
/// Attaches a specific commit to [id] to disambiguate it.
Future<PackageId> resolveId(PackageId id) {
return _ensureRevision(id).then((revision) {
var description = {
'url': _getUrl(id),
'ref': _getRef(id)
};
description['resolved-ref'] = revision;
return new PackageId(id.name, name, id.version, description);
});
}
List<Package> getCachedPackages() {
// TODO(keertip): Implement getCachedPackages().
throw new UnimplementedError(
"The git source doesn't support listing its cached packages yet.");
}
/// Resets all cached packages back to the pristine state of the Git
/// repository at the revision they are pinned to.
Future<Pair<int, int>> repairCachedPackages() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var successes = 0;
var failures = 0;
var packages = listDir(systemCacheRoot).where(((entry) {
return dirExists(path.join(entry, ".git"));
})).map(((packageDir) {
return new Package.load(null, packageDir, systemCache.sources);
})).toList();
packages.sort(Package.orderByNameAndVersion);
var it0 = packages.iterator;
break0() {
completer0.complete(new Pair(successes, failures));
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var package = it0.current;
log.message(
"Resetting Git repository for "
"${log.bold(package.name)} ${package.version}...");
join1() {
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
catch0(error, stackTrace) {
try {
if (error is git.GitException) {
log.error(
"Failed to reset ${log.bold(package.name)} "
"${package.version}. Error:\n${error}");
log.fine(stackTrace);
failures++;
tryDeleteEntry(package.dir);
join1();
} else {
throw error;
}
} catch (error, stackTrace) {
completer0.completeError(error, stackTrace);
}
}
try {
new Future.value(
git.run(["clean", "-d", "--force", "-x"], workingDir: package.dir)).then((x0) {
trampoline0 = () {
trampoline0 = null;
try {
x0;
new Future.value(
git.run(["reset", "--hard", "HEAD"], workingDir: package.dir)).then((x1) {
trampoline0 = () {
trampoline0 = null;
try {
x1;
successes++;
join1();
} catch (e0, s0) {
catch0(e0, s0);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: catch0);
} catch (e1, s1) {
catch0(e1, s1);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: catch0);
} catch (e2, s2) {
catch0(e2, s2);
}
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
if (!dirExists(systemCacheRoot)) {
completer0.complete(new Pair(0, 0));
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Ensure that the canonical clone of the repository referred to by [id] (the
/// one in `<system cache>/git/cache`) exists and contains the revision
/// referred to by [id].
///
/// Returns a future that completes to the hash of the revision identified by
/// [id].
Future<String> _ensureRevision(PackageId id) {
return new Future.sync(() {
var path = _repoCachePath(id);
if (!entryExists(path)) {
return _clone(_getUrl(id), path, mirror: true).then((_) => _getRev(id));
}
// If [id] didn't come from a lockfile, it may be using a symbolic
// reference. We want to get the latest version of that reference.
var description = id.description;
if (description is! Map || !description.containsKey('resolved-ref')) {
return _updateRepoCache(id).then((_) => _getRev(id));
}
// If [id] did come from a lockfile, then we want to avoid running "git
// fetch" if possible to avoid networking time and errors. See if the
// revision exists in the repo cache before updating it.
return _getRev(id).catchError((error) {
if (error is! git.GitException) throw error;
return _updateRepoCache(id).then((_) => _getRev(id));
});
});
}
/// Runs "git fetch" in the canonical clone of the repository referred to by
/// [id].
///
/// This assumes that the canonical clone already exists.
Future _updateRepoCache(PackageId id) {
var path = _repoCachePath(id);
if (_updatedRepos.contains(path)) return new Future.value();
return git.run(["fetch"], workingDir: path).then((_) {
_updatedRepos.add(path);
});
}
/// Runs "git rev-list" in the canonical clone of the repository referred to
/// by [id] on the effective ref of [id].
///
/// This assumes that the canonical clone already exists.
Future<String> _getRev(PackageId id) {
return git.run(
["rev-list", "--max-count=1", _getEffectiveRef(id)],
workingDir: _repoCachePath(id)).then((result) => result.first);
}
/// Clones the repo at the URI [from] to the path [to] on the local
/// filesystem.
///
/// If [mirror] is true, creates a bare, mirrored clone. This doesn't check
/// out the working tree, but instead makes the repository a local mirror of
/// the remote repository. See the manpage for `git clone` for more
/// information.
///
/// If [shallow] is true, creates a shallow clone that contains no history
/// for the repository.
Future _clone(String from, String to, {bool mirror: false, bool shallow:
false}) {
return new Future.sync(() {
// Git on Windows does not seem to automatically create the destination
// directory.
ensureDir(to);
var args = ["clone", from, to];
if (mirror) args.insert(1, "--mirror");
if (shallow) args.insertAll(1, ["--depth", "1"]);
return git.run(args);
}).then((result) => null);
}
/// Checks out the reference [ref] in [repoPath].
Future _checkOut(String repoPath, String ref) {
return git.run(
["checkout", ref],
workingDir: repoPath).then((result) => null);
}
/// Returns the path to the canonical clone of the repository referred to by
/// [id] (the one in `<system cache>/git/cache`).
String _repoCachePath(PackageId id) {
var repoCacheName = '${id.name}-${sha1(_getUrl(id))}';
return path.join(systemCacheRoot, 'cache', repoCacheName);
}
/// Returns the repository URL for [id].
///
/// [description] may be a description or a [PackageId].
String _getUrl(description) {
description = _getDescription(description);
if (description is String) return description;
return description['url'];
}
/// Returns the commit ref that should be checked out for [description].
///
/// This differs from [_getRef] in that it doesn't just return the ref in
/// [description]. It will return a sensible default if that ref doesn't
/// exist, and it will respect the "resolved-ref" parameter set by
/// [resolveId].
///
/// [description] may be a description or a [PackageId].
String _getEffectiveRef(description) {
description = _getDescription(description);
if (description is Map && description.containsKey('resolved-ref')) {
return description['resolved-ref'];
}
var ref = _getRef(description);
return ref == null ? 'HEAD' : ref;
}
/// Returns the commit ref for [description], or null if none is given.
///
/// [description] may be a description or a [PackageId].
String _getRef(description) {
description = _getDescription(description);
if (description is String) return null;
return description['ref'];
}
/// Returns [description] if it's a description, or [PackageId.description] if
/// it's a [PackageId].
_getDescription(description) {
if (description is PackageId) return description.description;
return description;
}
}

View file

@ -1,450 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.hosted;
import 'dart:async';
import 'dart:io' as io;
import "dart:convert";
import 'package:http/http.dart' as http;
import 'package:path/path.dart' as path;
import 'package:pub_semver/pub_semver.dart';
import '../exceptions.dart';
import '../http.dart';
import '../io.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../utils.dart';
import 'cached.dart';
/// A package source that gets packages from a package hosting site that uses
/// the same API as pub.dartlang.org.
class HostedSource extends CachedSource {
final name = "hosted";
final hasMultipleVersions = true;
/// Gets the default URL for the package server for hosted dependencies.
static String get defaultUrl {
var url = io.Platform.environment["PUB_HOSTED_URL"];
if (url != null) return url;
return "https://pub.dartlang.org";
}
/// Downloads a list of all versions of a package that are available from the
/// site.
Future<List<Version>> getVersions(String name, description) {
var url =
_makeUrl(description, (server, package) => "$server/api/packages/$package");
log.io("Get versions from $url.");
return httpClient.read(url, headers: PUB_API_HEADERS).then((body) {
var doc = JSON.decode(body);
return doc['versions'].map(
(version) => new Version.parse(version['version'])).toList();
}).catchError((ex, stackTrace) {
var parsed = _parseDescription(description);
_throwFriendlyError(ex, stackTrace, parsed.first, parsed.last);
});
}
/// Downloads and parses the pubspec for a specific version of a package that
/// is available from the site.
Future<Pubspec> describeUncached(PackageId id) {
// Request it from the server.
var url = _makeVersionUrl(
id,
(server, package, version) =>
"$server/api/packages/$package/versions/$version");
log.io("Describe package at $url.");
return httpClient.read(url, headers: PUB_API_HEADERS).then((version) {
version = JSON.decode(version);
// TODO(rnystrom): After this is pulled down, we could place it in
// a secondary cache of just pubspecs. This would let us have a
// persistent cache for pubspecs for packages that haven't actually
// been downloaded.
return new Pubspec.fromMap(
version['pubspec'],
systemCache.sources,
expectedName: id.name,
location: url);
}).catchError((ex, stackTrace) {
var parsed = _parseDescription(id.description);
_throwFriendlyError(ex, stackTrace, id.name, parsed.last);
});
}
/// Downloads the package identified by [id] to the system cache.
Future<Package> downloadToSystemCache(PackageId id) {
return isInSystemCache(id).then((inCache) {
// Already cached so don't download it.
if (inCache) return true;
var packageDir = _getDirectory(id);
ensureDir(path.dirname(packageDir));
var parsed = _parseDescription(id.description);
return _download(parsed.last, parsed.first, id.version, packageDir);
}).then((found) {
if (!found) fail('Package $id not found.');
return new Package.load(id.name, _getDirectory(id), systemCache.sources);
});
}
/// The system cache directory for the hosted source contains subdirectories
/// for each separate repository URL that's used on the system.
///
/// Each of these subdirectories then contains a subdirectory for each
/// package downloaded from that site.
Future<String> getDirectory(PackageId id) =>
new Future.value(_getDirectory(id));
String _getDirectory(PackageId id) {
var parsed = _parseDescription(id.description);
var dir = _urlToDirectory(parsed.last);
return path.join(systemCacheRoot, dir, "${parsed.first}-${id.version}");
}
String packageName(description) => _parseDescription(description).first;
bool descriptionsEqual(description1, description2) =>
_parseDescription(description1) == _parseDescription(description2);
/// Ensures that [description] is a valid hosted package description.
///
/// There are two valid formats. A plain string refers to a package with the
/// given name from the default host, while a map with keys "name" and "url"
/// refers to a package with the given name from the host at the given URL.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) {
_parseDescription(description);
return description;
}
/// Re-downloads all packages that have been previously downloaded into the
/// system cache from any server.
Future<Pair<int, int>> repairCachedPackages() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
join0() {
var successes = 0;
var failures = 0;
var it0 = listDir(systemCacheRoot).iterator;
break0() {
completer0.complete(new Pair(successes, failures));
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var serverDir = it0.current;
var url = _directoryToUrl(path.basename(serverDir));
var packages =
_getCachedPackagesInDirectory(path.basename(serverDir));
packages.sort(Package.orderByNameAndVersion);
var it1 = packages.iterator;
break1() {
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
var trampoline1;
continue1() {
trampoline1 = null;
if (it1.moveNext()) {
var package = it1.current;
join1() {
trampoline1 = continue1;
do trampoline1(); while (trampoline1 != null);
}
catch0(error, stackTrace) {
try {
failures++;
var message =
"Failed to repair ${log.bold(package.name)} " "${package.version}";
join2() {
log.error("${message}. Error:\n${error}");
log.fine(stackTrace);
tryDeleteEntry(package.dir);
join1();
}
if (url != defaultUrl) {
message += " from ${url}";
join2();
} else {
join2();
}
} catch (error, stackTrace) {
completer0.completeError(error, stackTrace);
}
}
try {
new Future.value(
_download(url, package.name, package.version, package.dir)).then((x0) {
trampoline1 = () {
trampoline1 = null;
try {
x0;
successes++;
join1();
} catch (e0, s0) {
catch0(e0, s0);
}
};
do trampoline1(); while (trampoline1 != null);
}, onError: catch0);
} catch (e1, s1) {
catch0(e1, s1);
}
} else {
break1();
}
}
trampoline1 = continue1;
do trampoline1(); while (trampoline1 != null);
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
if (!dirExists(systemCacheRoot)) {
completer0.complete(new Pair(0, 0));
} else {
join0();
}
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Gets all of the packages that have been downloaded into the system cache
/// from the default server.
List<Package> getCachedPackages() {
return _getCachedPackagesInDirectory(_urlToDirectory(defaultUrl));
}
/// Gets all of the packages that have been downloaded into the system cache
/// into [dir].
List<Package> _getCachedPackagesInDirectory(String dir) {
var cacheDir = path.join(systemCacheRoot, dir);
if (!dirExists(cacheDir)) return [];
return listDir(
cacheDir).map(
(entry) => new Package.load(null, entry, systemCache.sources)).toList();
}
/// Downloads package [package] at [version] from [server], and unpacks it
/// into [destPath].
Future<bool> _download(String server, String package, Version version,
String destPath) {
return new Future.sync(() {
var url = Uri.parse("$server/packages/$package/versions/$version.tar.gz");
log.io("Get package from $url.");
log.message('Downloading ${log.bold(package)} ${version}...');
// Download and extract the archive to a temp directory.
var tempDir = systemCache.createTempDir();
return httpClient.send(
new http.Request(
"GET",
url)).then((response) => response.stream).then((stream) {
return timeout(
extractTarGz(stream, tempDir),
HTTP_TIMEOUT,
url,
'downloading $url');
}).then((_) {
// Remove the existing directory if it exists. This will happen if
// we're forcing a download to repair the cache.
if (dirExists(destPath)) deleteEntry(destPath);
// Now that the get has succeeded, move it to the real location in the
// cache. This ensures that we don't leave half-busted ghost
// directories in the user's pub cache if a get fails.
renameDir(tempDir, destPath);
return true;
});
});
}
/// When an error occurs trying to read something about [package] from [url],
/// this tries to translate into a more user friendly error message.
///
/// Always throws an error, either the original one or a better one.
void _throwFriendlyError(error, StackTrace stackTrace, String package,
String url) {
if (error is PubHttpException && error.response.statusCode == 404) {
throw new PackageNotFoundException(
"Could not find package $package at $url.",
error,
stackTrace);
}
if (error is TimeoutException) {
fail(
"Timed out trying to find package $package at $url.",
error,
stackTrace);
}
if (error is io.SocketException) {
fail(
"Got socket error trying to find package $package at $url.",
error,
stackTrace);
}
// Otherwise re-throw the original exception.
throw error;
}
}
/// This is the modified hosted source used when pub get or upgrade are run
/// with "--offline".
///
/// This uses the system cache to get the list of available packages and does
/// no network access.
class OfflineHostedSource extends HostedSource {
/// Gets the list of all versions of [name] that are in the system cache.
Future<List<Version>> getVersions(String name, description) {
return newFuture(() {
var parsed = _parseDescription(description);
var server = parsed.last;
log.io(
"Finding versions of $name in " "$systemCacheRoot/${_urlToDirectory(server)}");
return _getCachedPackagesInDirectory(
_urlToDirectory(
server)).where(
(package) => package.name == name).map((package) => package.version).toList();
}).then((versions) {
// If there are no versions in the cache, report a clearer error.
if (versions.isEmpty) fail("Could not find package $name in cache.");
return versions;
});
}
Future<bool> _download(String server, String package, Version version,
String destPath) {
// Since HostedSource is cached, this will only be called for uncached
// packages.
throw new UnsupportedError("Cannot download packages when offline.");
}
Future<Pubspec> doDescribeUncached(PackageId id) {
// [getVersions()] will only return packages that are already cached.
// [CachedSource] will only call [doDescribeUncached()] on a package after
// it has failed to find it in the cache, so this code should not be
// reached.
throw new UnsupportedError("Cannot describe packages when offline.");
}
}
/// Given a URL, returns a "normalized" string to be used as a directory name
/// for packages downloaded from the server at that URL.
///
/// This normalization strips off the scheme (which is presumed to be HTTP or
/// HTTPS) and *sort of* URL-encodes it. I say "sort of" because it does it
/// incorrectly: it uses the character's *decimal* ASCII value instead of hex.
///
/// This could cause an ambiguity since some characters get encoded as three
/// digits and others two. It's possible for one to be a prefix of the other.
/// In practice, the set of characters that are encoded don't happen to have
/// any collisions, so the encoding is reversible.
///
/// This behavior is a bug, but is being preserved for compatibility.
String _urlToDirectory(String url) {
// Normalize all loopback URLs to "localhost".
url = url.replaceAllMapped(
new RegExp(r"^https?://(127\.0\.0\.1|\[::1\])?"),
(match) => match[1] == null ? '' : 'localhost');
return replace(
url,
new RegExp(r'[<>:"\\/|?*%]'),
(match) => '%${match[0].codeUnitAt(0)}');
}
/// Given a directory name in the system cache, returns the URL of the server
/// whose packages it contains.
///
/// See [_urlToDirectory] for details on the mapping. Note that because the
/// directory name does not preserve the scheme, this has to guess at it. It
/// chooses "http" for loopback URLs (mainly to support the pub tests) and
/// "https" for all others.
String _directoryToUrl(String url) {
// Decode the pseudo-URL-encoded characters.
var chars = '<>:"\\/|?*%';
for (var i = 0; i < chars.length; i++) {
var c = chars.substring(i, i + 1);
url = url.replaceAll("%${c.codeUnitAt(0)}", c);
}
// Figure out the scheme.
var scheme = "https";
// See if it's a loopback IP address.
if (isLoopback(url.replaceAll(new RegExp(":.*"), ""))) scheme = "http";
return "$scheme://$url";
}
/// Parses [description] into its server and package name components, then
/// converts that to a Uri given [pattern].
///
/// Ensures the package name is properly URL encoded.
Uri _makeUrl(description, String pattern(String server, String package)) {
var parsed = _parseDescription(description);
var server = parsed.last;
var package = Uri.encodeComponent(parsed.first);
return Uri.parse(pattern(server, package));
}
/// Parses [id] into its server, package name, and version components, then
/// converts that to a Uri given [pattern].
///
/// Ensures the package name is properly URL encoded.
Uri _makeVersionUrl(PackageId id, String pattern(String server, String package,
String version)) {
var parsed = _parseDescription(id.description);
var server = parsed.last;
var package = Uri.encodeComponent(parsed.first);
var version = Uri.encodeComponent(id.version.toString());
return Uri.parse(pattern(server, package, version));
}
/// Parses the description for a package.
///
/// If the package parses correctly, this returns a (name, url) pair. If not,
/// this throws a descriptive FormatException.
Pair<String, String> _parseDescription(description) {
if (description is String) {
return new Pair<String, String>(description, HostedSource.defaultUrl);
}
if (description is! Map) {
throw new FormatException("The description must be a package name or map.");
}
if (!description.containsKey("name")) {
throw new FormatException("The description map must contain a 'name' key.");
}
var name = description["name"];
if (name is! String) {
throw new FormatException("The 'name' key must have a string value.");
}
var url = description["url"];
if (url == null) url = HostedSource.defaultUrl;
return new Pair<String, String>(name, url);
}

View file

@ -1,162 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.path;
import 'dart:async';
import 'package:path/path.dart' as p;
import '../exceptions.dart';
import '../io.dart';
import '../package.dart';
import '../pubspec.dart';
import '../source.dart';
import '../utils.dart';
/// A package [Source] that gets packages from a given local file path.
class PathSource extends Source {
/// Returns a valid description for a reference to a package at [path].
static describePath(String path) {
return {
"path": path,
"relative": p.isRelative(path)
};
}
/// Given a valid path reference description, returns the file path it
/// describes.
///
/// This returned path may be relative or absolute and it is up to the caller
/// to know how to interpret a relative path.
static String pathFromDescription(description) => description["path"];
final name = 'path';
Future<Pubspec> doDescribe(PackageId id) {
return new Future.sync(() {
var dir = _validatePath(id.name, id.description);
return new Pubspec.load(dir, systemCache.sources, expectedName: id.name);
});
}
bool descriptionsEqual(description1, description2) {
// Compare real paths after normalizing and resolving symlinks.
var path1 = canonicalize(description1["path"]);
var path2 = canonicalize(description2["path"]);
return path1 == path2;
}
Future get(PackageId id, String symlink) {
return new Future.sync(() {
var dir = _validatePath(id.name, id.description);
createPackageSymlink(
id.name,
dir,
symlink,
relative: id.description["relative"]);
});
}
Future<String> getDirectory(PackageId id) =>
newFuture(() => _validatePath(id.name, id.description));
/// Parses a path dependency.
///
/// This takes in a path string and returns a map. The "path" key will be the
/// original path but resolved relative to the containing path. The
/// "relative" key will be `true` if the original path was relative.
///
/// A path coming from a pubspec is a simple string. From a lock file, it's
/// an expanded {"path": ..., "relative": ...} map.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) {
if (fromLockFile) {
if (description is! Map) {
throw new FormatException("The description must be a map.");
}
if (description["path"] is! String) {
throw new FormatException(
"The 'path' field of the description must " "be a string.");
}
if (description["relative"] is! bool) {
throw new FormatException(
"The 'relative' field of the description " "must be a boolean.");
}
return description;
}
if (description is! String) {
throw new FormatException("The description must be a path string.");
}
// Resolve the path relative to the containing file path, and remember
// whether the original path was relative or absolute.
var isRelative = p.isRelative(description);
if (isRelative) {
// Relative paths coming from pubspecs that are not on the local file
// system aren't allowed. This can happen if a hosted or git dependency
// has a path dependency.
if (containingPath == null) {
throw new FormatException(
'"$description" is a relative path, but this ' 'isn\'t a local pubspec.');
}
description = p.normalize(p.join(p.dirname(containingPath), description));
}
return {
"path": description,
"relative": isRelative
};
}
/// Serializes path dependency's [description].
///
/// For the descriptions where `relative` attribute is `true`, tries to make
/// `path` relative to the specified [containingPath].
dynamic serializeDescription(String containingPath, description) {
if (description["relative"]) {
return {
"path": p.relative(description['path'], from: containingPath),
"relative": true
};
}
return description;
}
/// Converts a parsed relative path to its original relative form.
String formatDescription(String containingPath, description) {
var sourcePath = description["path"];
if (description["relative"]) {
sourcePath = p.relative(description['path'], from: containingPath);
}
return sourcePath;
}
/// Ensures that [description] is a valid path description and returns a
/// normalized path to the package.
///
/// It must be a map, with a "path" key containing a path that points to an
/// existing directory. Throws an [ApplicationException] if the path is
/// invalid.
String _validatePath(String name, description) {
var dir = description["path"];
if (dirExists(dir)) return dir;
if (fileExists(dir)) {
fail(
'Path dependency for package $name must refer to a directory, '
'not a file. Was "$dir".');
}
throw new PackageNotFoundException(
'Could not find package $name at "$dir".');
}
}

View file

@ -1,48 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.unknown;
import 'dart:async';
import '../package.dart';
import '../pubspec.dart';
import '../source.dart';
/// A [Null Object] that represents a source not recognized by pub.
///
/// It provides some default behavior so that pub can work with sources it
/// doesn't recognize.
///
/// [null object]: http://en.wikipedia.org/wiki/Null_Object_pattern
class UnknownSource extends Source {
final String name;
UnknownSource(this.name);
/// Two unknown sources are the same if their names are the same.
bool operator ==(other) => other is UnknownSource && other.name == name;
int get hashCode => name.hashCode;
Future<Pubspec> doDescribe(PackageId id) =>
throw new UnsupportedError(
"Cannot describe a package from unknown source '$name'.");
Future get(PackageId id, String symlink) =>
throw new UnsupportedError("Cannot get an unknown source '$name'.");
/// Returns the directory where this package can be found locally.
Future<String> getDirectory(PackageId id) =>
throw new UnsupportedError(
"Cannot find a package from an unknown source '$name'.");
bool descriptionsEqual(description1, description2) =>
description1 == description2;
/// Unknown sources do no validation.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) =>
description;
}

View file

@ -1,81 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source_registry;
import 'dart:collection';
import 'package.dart';
import 'source.dart';
import 'source/unknown.dart';
/// A class that keeps track of [Source]s used for getting packages.
class SourceRegistry extends IterableBase<Source> {
final _sources = new Map<String, Source>();
Source _default;
/// Returns the default source, which is used when no source is specified.
Source get defaultSource => _default;
/// Iterates over the registered sources in name order.
Iterator<Source> get iterator {
var sources = _sources.values.toList();
sources.sort((a, b) => a.name.compareTo(b.name));
return sources.iterator;
}
/// Returns whether [id1] and [id2] refer to the same package, including
/// validating that their descriptions are equivalent.
bool idsEqual(PackageId id1, PackageId id2) {
if (id1 != id2) return false;
if (id1 == null && id2 == null) return true;
return idDescriptionsEqual(id1, id2);
}
/// Returns whether [id1] and [id2] have the same source and description.
///
/// This doesn't check whether the name or versions are equal.
bool idDescriptionsEqual(PackageId id1, PackageId id2) {
if (id1.source != id2.source) return false;
return this[id1.source].descriptionsEqual(id1.description, id2.description);
}
/// Sets the default source.
///
/// This takes a string, which must be the name of a registered source.
void setDefault(String name) {
if (!_sources.containsKey(name)) {
throw new StateError('Default source $name is not in the registry');
}
_default = _sources[name];
}
/// Registers a new source.
///
/// This source may not have the same name as a source that's already been
/// registered.
void register(Source source) {
if (_sources.containsKey(source.name)) {
throw new StateError(
'Source registry already has a source named ' '${source.name}');
}
_sources[source.name] = source;
}
/// Returns the source named [name].
///
/// Returns an [UnknownSource] if no source with that name has been
/// registered. If [name] is null, returns the default source.
Source operator [](String name) {
if (name == null) {
if (defaultSource != null) return defaultSource;
throw new StateError('No default source has been registered');
}
if (_sources.containsKey(name)) return _sources[name];
return new UnknownSource(name);
}
}

View file

@ -1,108 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.system_cache;
import 'dart:async';
import 'dart:io';
import 'package:path/path.dart' as path;
import 'io.dart';
import 'io.dart' as io show createTempDir;
import 'log.dart' as log;
import 'package.dart';
import 'source/cached.dart';
import 'source/git.dart';
import 'source/hosted.dart';
import 'source/path.dart';
import 'source.dart';
import 'source_registry.dart';
/// The system-wide cache of downloaded packages.
///
/// This cache contains all packages that are downloaded from the internet.
/// Packages that are available locally (e.g. path dependencies) don't use this
/// cache.
class SystemCache {
/// The root directory where this package cache is located.
final String rootDir;
String get tempDir => path.join(rootDir, '_temp');
/// The sources from which to get packages.
final sources = new SourceRegistry();
static String defaultDir = (() {
if (Platform.environment.containsKey('PUB_CACHE')) {
return Platform.environment['PUB_CACHE'];
} else if (Platform.operatingSystem == 'windows') {
var appData = Platform.environment['APPDATA'];
return path.join(appData, 'Pub', 'Cache');
} else {
return '${Platform.environment['HOME']}/.pub-cache';
}
})();
/// Creates a new package cache which is backed by the given directory on the
/// user's file system.
SystemCache([String rootDir])
: rootDir = rootDir == null ? SystemCache.defaultDir : rootDir;
/// Creates a system cache and registers the standard set of sources.
///
/// If [isOffline] is `true`, then the offline hosted source will be used.
/// Defaults to `false`.
factory SystemCache.withSources({String rootDir, bool isOffline: false}) {
var cache = new SystemCache(rootDir);
cache.register(new GitSource());
if (isOffline) {
cache.register(new OfflineHostedSource());
} else {
cache.register(new HostedSource());
}
cache.register(new PathSource());
cache.sources.setDefault('hosted');
return cache;
}
/// Registers a new source.
///
/// This source must not have the same name as a source that's already been
/// registered.
void register(Source source) {
source.bind(this);
sources.register(source);
}
/// Determines if the system cache contains the package identified by [id].
Future<bool> contains(PackageId id) {
var source = sources[id.source];
if (source is! CachedSource) {
throw new ArgumentError("Package $id is not cacheable.");
}
return source.isInSystemCache(id);
}
/// Create a new temporary directory within the system cache.
///
/// The system cache maintains its own temporary directory that it uses to
/// stage packages into while downloading. It uses this instead of the OS's
/// system temp directory to ensure that it's on the same volume as the pub
/// system cache so that it can move the directory from it.
String createTempDir() {
var temp = ensureDir(tempDir);
return io.createTempDir(temp, 'dir');
}
/// Deletes the system cache's internal temp directory.
void deleteTempDir() {
log.fine('Clean up system cache temp directory $tempDir.');
if (dirExists(tempDir)) deleteEntry(tempDir);
}
}

View file

@ -1,73 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.transcript;
import 'dart:collection';
/// A rolling transcript of entries of type [T].
///
/// It has a maximum number of entries. If entries are added that exceed that
/// it discards entries from the *middle* of the transcript. Generally, in logs,
/// the first and last entries are the most important, so it maintains those.
class Transcript<T> {
/// The maximum number of transcript entries.
final int max;
/// The number of entries that were discarded after reaching [max].
int get discarded => _discarded;
int _discarded = 0;
/// The earliest half of the entries.
///
/// This will be empty until the maximum number of entries is hit at which
/// point the oldest half of the entries will be moved from [_newest] to
/// here.
final _oldest = new List<T>();
/// The most recent half of the entries.
final _newest = new Queue<T>();
/// Creates a new [Transcript] that can hold up to [max] entries.
Transcript(this.max);
/// Adds [entry] to the transcript.
///
/// If the transcript already has the maximum number of entries, discards one
/// from the middle.
void add(T entry) {
if (discarded > 0) {
// We're already in "rolling" mode.
_newest.removeFirst();
_discarded++;
} else if (_newest.length == max) {
// We are crossing the threshold where we have to discard items. Copy
// the first half over to the oldest list.
while (_newest.length > max ~/ 2) {
_oldest.add(_newest.removeFirst());
}
// Discard the middle item.
_newest.removeFirst();
_discarded++;
}
_newest.add(entry);
}
/// Traverses the entries in the transcript from oldest to newest.
///
/// Invokes [onEntry] for each item. When it reaches the point in the middle
/// where excess entries where dropped, invokes [onGap] with the number of
/// dropped entries. If no more than [max] entries were added, does not
/// invoke [onGap].
void forEach(void onEntry(T entry), [void onGap(int)]) {
if (_oldest.isNotEmpty) {
_oldest.forEach(onEntry);
if (onGap != null) onGap(discarded);
}
_newest.forEach(onEntry);
}
}

View file

@ -1,924 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Generic utility functions. Stuff that should possibly be in core.
library pub.utils;
import 'dart:async';
import "dart:convert";
import 'dart:io';
// This is used by [libraryPath]. It must be kept up-to-date with all libraries
// whose paths are looked up using that function.
@MirrorsUsed(targets: const ['pub.io', 'test_pub'])
import 'dart:mirrors';
import "package:crypto/crypto.dart";
import 'package:path/path.dart' as path;
import "package:stack_trace/stack_trace.dart";
import 'exceptions.dart';
import 'log.dart' as log;
export '../../asset/dart/utils.dart';
/// A pair of values.
class Pair<E, F> {
E first;
F last;
Pair(this.first, this.last);
String toString() => '($first, $last)';
bool operator ==(other) {
if (other is! Pair) return false;
return other.first == first && other.last == last;
}
int get hashCode => first.hashCode ^ last.hashCode;
}
/// A completer that waits until all added [Future]s complete.
// TODO(rnystrom): Copied from web_components. Remove from here when it gets
// added to dart:core. (See #6626.)
class FutureGroup<T> {
int _pending = 0;
Completer<List<T>> _completer = new Completer<List<T>>();
final List<Future<T>> futures = <Future<T>>[];
bool completed = false;
final List<T> _values = <T>[];
/// Wait for [task] to complete.
Future<T> add(Future<T> task) {
if (completed) {
throw new StateError("The FutureGroup has already completed.");
}
_pending++;
futures.add(task.then((value) {
if (completed) return;
_pending--;
_values.add(value);
if (_pending <= 0) {
completed = true;
_completer.complete(_values);
}
}).catchError((e, stackTrace) {
if (completed) return;
completed = true;
_completer.completeError(e, stackTrace);
}));
return task;
}
Future<List> get future => _completer.future;
}
/// Like [new Future], but avoids around issue 11911 by using [new Future.value]
/// under the covers.
Future newFuture(callback()) => new Future.value().then((_) => callback());
/// Runs [callback] in an error zone and pipes any unhandled error to the
/// returned [Future].
///
/// If the returned [Future] produces an error, its stack trace will always be a
/// [Chain]. By default, this chain will contain only the local stack trace, but
/// if [captureStackChains] is passed, it will contain the full stack chain for
/// the error.
Future captureErrors(Future callback(), {bool captureStackChains: false}) {
var completer = new Completer();
var wrappedCallback = () {
new Future.sync(
callback).then(completer.complete).catchError((e, stackTrace) {
// [stackTrace] can be null if we're running without [captureStackChains],
// since dart:io will often throw errors without stack traces.
if (stackTrace != null) {
stackTrace = new Chain.forTrace(stackTrace);
} else {
stackTrace = new Chain([]);
}
if (!completer.isCompleted) completer.completeError(e, stackTrace);
});
};
if (captureStackChains) {
Chain.capture(wrappedCallback, onError: (error, stackTrace) {
if (!completer.isCompleted) completer.completeError(error, stackTrace);
});
} else {
runZoned(wrappedCallback, onError: (e, stackTrace) {
if (stackTrace == null) {
stackTrace = new Chain.current();
} else {
stackTrace = new Chain([new Trace.from(stackTrace)]);
}
if (!completer.isCompleted) completer.completeError(e, stackTrace);
});
}
return completer.future;
}
/// Like [Future.wait], but prints all errors from the futures as they occur and
/// only returns once all Futures have completed, successfully or not.
///
/// This will wrap the first error thrown in a [SilentException] and rethrow it.
Future waitAndPrintErrors(Iterable<Future> futures) {
return Future.wait(futures.map((future) {
return future.catchError((error, stackTrace) {
log.exception(error, stackTrace);
throw error;
});
})).catchError((error, stackTrace) {
throw new SilentException(error, stackTrace);
});
}
/// Returns a [StreamTransformer] that will call [onDone] when the stream
/// completes.
///
/// The stream will be passed through unchanged.
StreamTransformer onDoneTransformer(void onDone()) {
return new StreamTransformer.fromHandlers(handleDone: (sink) {
onDone();
sink.close();
});
}
// TODO(rnystrom): Move into String?
/// Pads [source] to [length] by adding spaces at the end.
String padRight(String source, int length) {
final result = new StringBuffer();
result.write(source);
while (result.length < length) {
result.write(' ');
}
return result.toString();
}
/// Pads [source] to [length] by adding [char]s at the beginning.
///
/// If [char] is `null`, it defaults to a space.
String padLeft(String source, int length, [String char]) {
if (char == null) char = ' ';
if (source.length >= length) return source;
return char * (length - source.length) + source;
}
/// Returns a labelled sentence fragment starting with [name] listing the
/// elements [iter].
///
/// If [iter] does not have one item, name will be pluralized by adding "s" or
/// using [plural], if given.
String namedSequence(String name, Iterable iter, [String plural]) {
if (iter.length == 1) return "$name ${iter.single}";
if (plural == null) plural = "${name}s";
return "$plural ${toSentence(iter)}";
}
/// Returns a sentence fragment listing the elements of [iter].
///
/// This converts each element of [iter] to a string and separates them with
/// commas and/or "and" where appropriate.
String toSentence(Iterable iter) {
if (iter.length == 1) return iter.first.toString();
return iter.take(iter.length - 1).join(", ") + " and ${iter.last}";
}
/// Returns [name] if [number] is 1, or the plural of [name] otherwise.
///
/// By default, this just adds "s" to the end of [name] to get the plural. If
/// [plural] is passed, that's used instead.
String pluralize(String name, int number, {String plural}) {
if (number == 1) return name;
if (plural != null) return plural;
return '${name}s';
}
/// Escapes any regex metacharacters in [string] so that using as a [RegExp]
/// pattern will match the string literally.
// TODO(rnystrom): Remove when #4706 is fixed.
String quoteRegExp(String string) {
// Note: make sure "\" is done first so that we don't escape the other
// escaped characters. We could do all of the replaces at once with a regexp
// but string literal for regex that matches all regex metacharacters would
// be a bit hard to read.
for (var metacharacter in r"\^$.*+?()[]{}|".split("")) {
string = string.replaceAll(metacharacter, "\\$metacharacter");
}
return string;
}
/// Creates a URL string for [address]:[port].
///
/// Handles properly formatting IPv6 addresses.
Uri baseUrlForAddress(InternetAddress address, int port) {
if (address.isLoopback) {
return new Uri(scheme: "http", host: "localhost", port: port);
}
// IPv6 addresses in URLs need to be enclosed in square brackets to avoid
// URL ambiguity with the ":" in the address.
if (address.type == InternetAddressType.IP_V6) {
return new Uri(scheme: "http", host: "[${address.address}]", port: port);
}
return new Uri(scheme: "http", host: address.address, port: port);
}
/// Returns whether [host] is a host for a localhost or loopback URL.
///
/// Unlike [InternetAddress.isLoopback], this hostnames from URLs as well as
/// from [InternetAddress]es, including "localhost".
bool isLoopback(String host) {
if (host == 'localhost') return true;
// IPv6 hosts in URLs are surrounded by square brackets.
if (host.startsWith("[") && host.endsWith("]")) {
host = host.substring(1, host.length - 1);
}
try {
return new InternetAddress(host).isLoopback;
} on ArgumentError catch (_) {
// The host isn't an IP address and isn't "localhost', so it's almost
// certainly not a loopback host.
return false;
}
}
/// Flattens nested lists inside an iterable into a single list containing only
/// non-list elements.
List flatten(Iterable nested) {
var result = [];
helper(list) {
for (var element in list) {
if (element is List) {
helper(element);
} else {
result.add(element);
}
}
}
helper(nested);
return result;
}
/// Returns a set containing all elements in [minuend] that are not in
/// [subtrahend].
Set setMinus(Iterable minuend, Iterable subtrahend) {
var minuendSet = new Set.from(minuend);
minuendSet.removeAll(subtrahend);
return minuendSet;
}
/// Returns whether there's any overlap between [set1] and [set2].
bool overlaps(Set set1, Set set2) {
// Iterate through the smaller set.
var smaller = set1.length > set2.length ? set1 : set2;
var larger = smaller == set1 ? set2 : set1;
return smaller.any(larger.contains);
}
/// Returns a list containing the sorted elements of [iter].
List ordered(Iterable<Comparable> iter) {
var list = iter.toList();
list.sort();
return list;
}
/// Returns the element of [iter] for which [f] returns the minimum value.
minBy(Iterable iter, Comparable f(element)) {
var min = null;
var minComparable = null;
for (var element in iter) {
var comparable = f(element);
if (minComparable == null || comparable.compareTo(minComparable) < 0) {
min = element;
minComparable = comparable;
}
}
return min;
}
/// Returns every pair of consecutive elements in [iter].
///
/// For example, if [iter] is `[1, 2, 3, 4]`, this will return `[(1, 2), (2, 3),
/// (3, 4)]`.
Iterable<Pair> pairs(Iterable iter) {
var previous = iter.first;
return iter.skip(1).map((element) {
var oldPrevious = previous;
previous = element;
return new Pair(oldPrevious, element);
});
}
/// Creates a new map from [map] with new keys and values.
///
/// The return values of [key] are used as the keys and the return values of
/// [value] are used as the values for the new map.
///
/// [key] defaults to returning the original key and [value] defaults to
/// returning the original value.
Map mapMap(Map map, {key(key, value), value(key, value)}) {
if (key == null) key = (key, _) => key;
if (value == null) value = (_, value) => value;
var result = {};
map.forEach((mapKey, mapValue) {
result[key(mapKey, mapValue)] = value(mapKey, mapValue);
});
return result;
}
/// Like [Map.fromIterable], but [key] and [value] may return [Future]s.
Future<Map> mapFromIterableAsync(Iterable iter, {key(element), value(element)})
{
if (key == null) key = (element) => element;
if (value == null) value = (element) => element;
var map = new Map();
return Future.wait(iter.map((element) {
return Future.wait(
[
new Future.sync(() => key(element)),
new Future.sync(() => value(element))]).then((results) {
map[results[0]] = results[1];
});
})).then((_) => map);
}
/// Returns the transitive closure of [graph].
///
/// This assumes [graph] represents a graph with a vertex for each key and an
/// edge betweek each key and the values for that key.
Map<dynamic, Set> transitiveClosure(Map<dynamic, Iterable> graph) {
// This uses the Floyd-Warshall algorithm
// (https://en.wikipedia.org/wiki/Floyd%E2%80%93Warshall_algorithm).
var result = {};
graph.forEach((vertex, edges) {
result[vertex] = new Set.from(edges)..add(vertex);
});
for (var vertex1 in graph.keys) {
for (var vertex2 in graph.keys) {
for (var vertex3 in graph.keys) {
if (result[vertex2].contains(vertex1) &&
result[vertex1].contains(vertex3)) {
result[vertex2].add(vertex3);
}
}
}
}
return result;
}
/// Given a list of filenames, returns a set of patterns that can be used to
/// filter for those filenames.
///
/// For a given path, that path ends with some string in the returned set if
/// and only if that path's basename is in [files].
Set<String> createFileFilter(Iterable<String> files) {
return files.expand((file) {
var result = ["/$file"];
if (Platform.operatingSystem == 'windows') result.add("\\$file");
return result;
}).toSet();
}
/// Given a blacklist of directory names, returns a set of patterns that can
/// be used to filter for those directory names.
///
/// For a given path, that path contains some string in the returned set if
/// and only if one of that path's components is in [dirs].
Set<String> createDirectoryFilter(Iterable<String> dirs) {
return dirs.expand((dir) {
var result = ["/$dir/"];
if (Platform.operatingSystem == 'windows') {
result
..add("/$dir\\")
..add("\\$dir/")
..add("\\$dir\\");
}
return result;
}).toSet();
}
/// Returns the maximum value in [iter] by [compare].
///
/// [compare] defaults to [Comparable.compare].
maxAll(Iterable iter, [int compare(element1, element2)]) {
if (compare == null) compare = Comparable.compare;
return iter.reduce(
(max, element) => compare(element, max) > 0 ? element : max);
}
/// Returns the minimum value in [iter] by [compare].
///
/// [compare] defaults to [Comparable.compare].
minAll(Iterable iter, [int compare(element1, element2)]) {
if (compare == null) compare = Comparable.compare;
return iter.reduce(
(max, element) => compare(element, max) < 0 ? element : max);
}
/// Replace each instance of [matcher] in [source] with the return value of
/// [fn].
String replace(String source, Pattern matcher, String fn(Match)) {
var buffer = new StringBuffer();
var start = 0;
for (var match in matcher.allMatches(source)) {
buffer.write(source.substring(start, match.start));
start = match.end;
buffer.write(fn(match));
}
buffer.write(source.substring(start));
return buffer.toString();
}
/// Returns whether or not [str] ends with [matcher].
bool endsWithPattern(String str, Pattern matcher) {
for (var match in matcher.allMatches(str)) {
if (match.end == str.length) return true;
}
return false;
}
/// Returns the hex-encoded sha1 hash of [source].
String sha1(String source) {
var sha = new SHA1();
sha.add(source.codeUnits);
return CryptoUtils.bytesToHex(sha.close());
}
/// Configures [future] so that its result (success or exception) is passed on
/// to [completer].
void chainToCompleter(Future future, Completer completer) {
future.then(completer.complete, onError: completer.completeError);
}
/// Ensures that [stream] can emit at least one value successfully (or close
/// without any values).
///
/// For example, reading asynchronously from a non-existent file will return a
/// stream that fails on the first chunk. In order to handle that more
/// gracefully, you may want to check that the stream looks like it's working
/// before you pipe the stream to something else.
///
/// This lets you do that. It returns a [Future] that completes to a [Stream]
/// emitting the same values and errors as [stream], but only if at least one
/// value can be read successfully. If an error occurs before any values are
/// emitted, the returned Future completes to that error.
Future<Stream> validateStream(Stream stream) {
var completer = new Completer<Stream>();
var controller = new StreamController(sync: true);
StreamSubscription subscription;
subscription = stream.listen((value) {
// We got a value, so the stream is valid.
if (!completer.isCompleted) completer.complete(controller.stream);
controller.add(value);
}, onError: (error, [stackTrace]) {
// If the error came after values, it's OK.
if (completer.isCompleted) {
controller.addError(error, stackTrace);
return;
}
// Otherwise, the error came first and the stream is invalid.
completer.completeError(error, stackTrace);
// We don't be returning the stream at all in this case, so unsubscribe
// and swallow the error.
subscription.cancel();
}, onDone: () {
// It closed with no errors, so the stream is valid.
if (!completer.isCompleted) completer.complete(controller.stream);
controller.close();
});
return completer.future;
}
// TODO(nweiz): remove this when issue 7964 is fixed.
/// Returns a [Future] that will complete to the first element of [stream].
///
/// Unlike [Stream.first], this is safe to use with single-subscription streams.
Future streamFirst(Stream stream) {
var completer = new Completer();
var subscription;
subscription = stream.listen((value) {
subscription.cancel();
completer.complete(value);
}, onError: (e, [stackTrace]) {
completer.completeError(e, stackTrace);
}, onDone: () {
completer.completeError(new StateError("No elements"), new Chain.current());
}, cancelOnError: true);
return completer.future;
}
/// Returns a wrapped version of [stream] along with a [StreamSubscription] that
/// can be used to control the wrapped stream.
Pair<Stream, StreamSubscription> streamWithSubscription(Stream stream) {
var controller = stream.isBroadcast ?
new StreamController.broadcast(sync: true) :
new StreamController(sync: true);
var subscription = stream.listen(
controller.add,
onError: controller.addError,
onDone: controller.close);
return new Pair<Stream, StreamSubscription>(controller.stream, subscription);
}
// TODO(nweiz): remove this when issue 7787 is fixed.
/// Creates two single-subscription [Stream]s that each emit all values and
/// errors from [stream].
///
/// This is useful if [stream] is single-subscription but multiple subscribers
/// are necessary.
Pair<Stream, Stream> tee(Stream stream) {
var controller1 = new StreamController(sync: true);
var controller2 = new StreamController(sync: true);
stream.listen((value) {
controller1.add(value);
controller2.add(value);
}, onError: (error, [stackTrace]) {
controller1.addError(error, stackTrace);
controller2.addError(error, stackTrace);
}, onDone: () {
controller1.close();
controller2.close();
});
return new Pair<Stream, Stream>(controller1.stream, controller2.stream);
}
/// Merges [stream1] and [stream2] into a single stream that emits events from
/// both sources.
Stream mergeStreams(Stream stream1, Stream stream2) {
var doneCount = 0;
var controller = new StreamController(sync: true);
for (var stream in [stream1, stream2]) {
stream.listen(controller.add, onError: controller.addError, onDone: () {
doneCount++;
if (doneCount == 2) controller.close();
});
}
return controller.stream;
}
/// A regular expression matching a trailing CR character.
final _trailingCR = new RegExp(r"\r$");
// TODO(nweiz): Use `text.split(new RegExp("\r\n?|\n\r?"))` when issue 9360 is
// fixed.
/// Splits [text] on its line breaks in a Windows-line-break-friendly way.
List<String> splitLines(String text) =>
text.split("\n").map((line) => line.replaceFirst(_trailingCR, "")).toList();
/// Converts a stream of arbitrarily chunked strings into a line-by-line stream.
///
/// The lines don't include line termination characters. A single trailing
/// newline is ignored.
Stream<String> streamToLines(Stream<String> stream) {
var buffer = new StringBuffer();
return stream.transform(
new StreamTransformer.fromHandlers(handleData: (chunk, sink) {
var lines = splitLines(chunk);
var leftover = lines.removeLast();
for (var line in lines) {
if (!buffer.isEmpty) {
buffer.write(line);
line = buffer.toString();
buffer = new StringBuffer();
}
sink.add(line);
}
buffer.write(leftover);
}, handleDone: (sink) {
if (!buffer.isEmpty) sink.add(buffer.toString());
sink.close();
}));
}
/// Like [Iterable.where], but allows [test] to return [Future]s and uses the
/// results of those [Future]s as the test.
Future<Iterable> futureWhere(Iterable iter, test(value)) {
return Future.wait(iter.map((e) {
var result = test(e);
if (result is! Future) result = new Future.value(result);
return result.then((result) => new Pair(e, result));
})).then(
(pairs) =>
pairs.where(
(pair) => pair.last)).then((pairs) => pairs.map((pair) => pair.first));
}
// TODO(nweiz): unify the following functions with the utility functions in
// pkg/http.
/// Like [String.split], but only splits on the first occurrence of the pattern.
///
/// This always returns an array of two elements or fewer.
List<String> split1(String toSplit, String pattern) {
if (toSplit.isEmpty) return <String>[];
var index = toSplit.indexOf(pattern);
if (index == -1) return [toSplit];
return [
toSplit.substring(0, index),
toSplit.substring(index + pattern.length)];
}
/// Adds additional query parameters to [url], overwriting the original
/// parameters if a name conflict occurs.
Uri addQueryParameters(Uri url, Map<String, String> parameters) {
var queryMap = queryToMap(url.query);
queryMap.addAll(parameters);
return url.resolve("?${mapToQuery(queryMap)}");
}
/// Convert a URL query string (or `application/x-www-form-urlencoded` body)
/// into a [Map] from parameter names to values.
Map<String, String> queryToMap(String queryList) {
var map = {};
for (var pair in queryList.split("&")) {
var split = split1(pair, "=");
if (split.isEmpty) continue;
var key = urlDecode(split[0]);
var value = split.length > 1 ? urlDecode(split[1]) : "";
map[key] = value;
}
return map;
}
/// Convert a [Map] from parameter names to values to a URL query string.
String mapToQuery(Map<String, String> map) {
var pairs = <List<String>>[];
map.forEach((key, value) {
key = Uri.encodeQueryComponent(key);
value =
(value == null || value.isEmpty) ? null : Uri.encodeQueryComponent(value);
pairs.add([key, value]);
});
return pairs.map((pair) {
if (pair[1] == null) return pair[0];
return "${pair[0]}=${pair[1]}";
}).join("&");
}
/// Returns the union of all elements in each set in [sets].
Set unionAll(Iterable<Set> sets) =>
sets.fold(new Set(), (union, set) => union.union(set));
// TODO(nweiz): remove this when issue 9068 has been fixed.
/// Whether [uri1] and [uri2] are equal.
///
/// This consider HTTP URIs to default to port 80, and HTTPs URIs to default to
/// port 443.
bool urisEqual(Uri uri1, Uri uri2) =>
canonicalizeUri(uri1) == canonicalizeUri(uri2);
/// Return [uri] with redundant port information removed.
Uri canonicalizeUri(Uri uri) {
return uri;
}
/// Returns a human-friendly representation of [inputPath].
///
/// If [inputPath] isn't too distant from the current working directory, this
/// will return the relative path to it. Otherwise, it will return the absolute
/// path.
String nicePath(String inputPath) {
var relative = path.relative(inputPath);
var split = path.split(relative);
if (split.length > 1 && split[0] == '..' && split[1] == '..') {
return path.absolute(inputPath);
}
return relative;
}
/// Returns a human-friendly representation of [duration].
String niceDuration(Duration duration) {
var result = duration.inMinutes > 0 ? "${duration.inMinutes}:" : "";
var s = duration.inSeconds % 59;
var ms = duration.inMilliseconds % 1000;
// If we're using verbose logging, be more verbose but more accurate when
// reporting timing information.
if (log.verbosity.isLevelVisible(log.Level.FINE)) {
ms = padLeft(ms.toString(), 3, '0');
} else {
ms ~/= 100;
}
return "$result$s.${ms}s";
}
/// Decodes a URL-encoded string.
///
/// Unlike [Uri.decodeComponent], this includes replacing `+` with ` `.
String urlDecode(String encoded) =>
Uri.decodeComponent(encoded.replaceAll("+", " "));
/// Takes a simple data structure (composed of [Map]s, [Iterable]s, scalar
/// objects, and [Future]s) and recursively resolves all the [Future]s contained
/// within.
///
/// Completes with the fully resolved structure.
Future awaitObject(object) {
// Unroll nested futures.
if (object is Future) return object.then(awaitObject);
if (object is Iterable) {
return Future.wait(object.map(awaitObject).toList());
}
if (object is! Map) return new Future.value(object);
var pairs = <Future<Pair>>[];
object.forEach((key, value) {
pairs.add(awaitObject(value).then((resolved) => new Pair(key, resolved)));
});
return Future.wait(pairs).then((resolvedPairs) {
var map = {};
for (var pair in resolvedPairs) {
map[pair.first] = pair.last;
}
return map;
});
}
/// Returns the path to the library named [libraryName].
///
/// The library name must be globally unique, or the wrong library path may be
/// returned. Any libraries accessed must be added to the [MirrorsUsed]
/// declaration in the import above.
String libraryPath(String libraryName) {
var lib = currentMirrorSystem().findLibrary(new Symbol(libraryName));
return path.fromUri(lib.uri);
}
/// Whether "special" strings such as Unicode characters or color escapes are
/// safe to use.
///
/// On Windows or when not printing to a terminal, only printable ASCII
/// characters should be used.
bool get canUseSpecialChars =>
!runningAsTest &&
Platform.operatingSystem != 'windows' &&
stdioType(stdout) == StdioType.TERMINAL;
/// Gets a "special" string (ANSI escape or Unicode).
///
/// On Windows or when not printing to a terminal, returns something else since
/// those aren't supported.
String getSpecial(String special, [String onWindows = '']) =>
canUseSpecialChars ? special : onWindows;
/// Prepends each line in [text] with [prefix].
///
/// If [firstPrefix] is passed, the first line is prefixed with that instead.
String prefixLines(String text, {String prefix: '| ', String firstPrefix}) {
var lines = text.split('\n');
if (firstPrefix == null) {
return lines.map((line) => '$prefix$line').join('\n');
}
var firstLine = "$firstPrefix${lines.first}";
lines = lines.skip(1).map((line) => '$prefix$line').toList();
lines.insert(0, firstLine);
return lines.join('\n');
}
/// Whether pub is running as a subprocess in an integration test or in a unit
/// test that has explicitly set this.
bool runningAsTest = Platform.environment.containsKey('_PUB_TESTING');
/// Whether today is April Fools' day.
bool get isAprilFools {
// Tests should never see April Fools' output.
if (runningAsTest) return false;
var date = new DateTime.now();
return date.month == 4 && date.day == 1;
}
/// Wraps [fn] to guard against several different kinds of stack overflow
/// exceptions:
///
/// * A sufficiently long [Future] chain can cause a stack overflow if there are
/// no asynchronous operations in it (issue 9583).
/// * A recursive function that recurses too deeply without an asynchronous
/// operation can cause a stack overflow.
/// * Even if the former is guarded against by adding asynchronous operations,
/// returning a value through the [Future] chain can still cause a stack
/// overflow.
Future resetStack(fn()) {
// Using a [Completer] breaks the [Future] chain for the return value and
// avoids the third case described above.
var completer = new Completer();
// Using [new Future] adds an asynchronous operation that works around the
// first and second cases described above.
newFuture(fn).then((val) {
scheduleMicrotask(() => completer.complete(val));
}).catchError((err, stackTrace) {
scheduleMicrotask(() => completer.completeError(err, stackTrace));
});
return completer.future;
}
/// The subset of strings that don't need quoting in YAML.
///
/// This pattern does not strictly follow the plain scalar grammar of YAML,
/// which means some strings may be unnecessarily quoted, but it's much simpler.
final _unquotableYamlString = new RegExp(r"^[a-zA-Z_-][a-zA-Z_0-9-]*$");
/// Converts [data], which is a parsed YAML object, to a pretty-printed string,
/// using indentation for maps.
String yamlToString(data) {
var buffer = new StringBuffer();
_stringify(bool isMapValue, String indent, data) {
// TODO(nweiz): Serialize using the YAML library once it supports
// serialization.
// Use indentation for (non-empty) maps.
if (data is Map && !data.isEmpty) {
if (isMapValue) {
buffer.writeln();
indent += ' ';
}
// Sort the keys. This minimizes deltas in diffs.
var keys = data.keys.toList();
keys.sort((a, b) => a.toString().compareTo(b.toString()));
var first = true;
for (var key in keys) {
if (!first) buffer.writeln();
first = false;
var keyString = key;
if (key is! String || !_unquotableYamlString.hasMatch(key)) {
keyString = JSON.encode(key);
}
buffer.write('$indent$keyString:');
_stringify(true, indent, data[key]);
}
return;
}
// Everything else we just stringify using JSON to handle escapes in
// strings and number formatting.
var string = data;
// Don't quote plain strings if not needed.
if (data is! String || !_unquotableYamlString.hasMatch(data)) {
string = JSON.encode(data);
}
if (isMapValue) {
buffer.write(' $string');
} else {
buffer.write('$indent$string');
}
}
_stringify(false, '', data);
return buffer.toString();
}
/// Throw a [ApplicationException] with [message].
void fail(String message, [innerError, StackTrace innerTrace]) {
if (innerError != null) {
throw new WrappedException(message, innerError, innerTrace);
} else {
throw new ApplicationException(message);
}
}
/// Throw a [DataException] with [message] to indicate that the command has
/// failed because of invalid input data.
///
/// This will report the error and cause pub to exit with [exit_codes.DATA].
void dataError(String message) => throw new DataException(message);

View file

@ -1,99 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator;
import 'dart:async';
import 'entrypoint.dart';
import 'log.dart' as log;
import 'utils.dart';
import 'validator/compiled_dartdoc.dart';
import 'validator/dependency.dart';
import 'validator/dependency_override.dart';
import 'validator/directory.dart';
import 'validator/executable.dart';
import 'validator/license.dart';
import 'validator/name.dart';
import 'validator/pubspec_field.dart';
import 'validator/sdk_constraint.dart';
import 'validator/size.dart';
import 'validator/utf8_readme.dart';
/// The base class for validators that check whether a package is fit for
/// uploading.
///
/// Each validator should override [errors], [warnings], or both to return
/// lists of errors or warnings to display to the user. Errors will cause the
/// package not to be uploaded; warnings will require the user to confirm the
/// upload.
abstract class Validator {
/// The entrypoint that's being validated.
final Entrypoint entrypoint;
/// The accumulated errors for this validator.
///
/// Filled by calling [validate].
final errors = <String>[];
/// The accumulated warnings for this validator.
///
/// Filled by calling [validate].
final warnings = <String>[];
Validator(this.entrypoint);
/// Validates the entrypoint, adding any errors and warnings to [errors] and
/// [warnings], respectively.
Future validate();
/// Run all validators on the [entrypoint] package and print their results.
///
/// The future completes with the error and warning messages, respectively.
///
/// [packageSize], if passed, should complete to the size of the tarred
/// package, in bytes. This is used to validate that it's not too big to
/// upload to the server.
static Future<Pair<List<String>, List<String>>> runAll(Entrypoint entrypoint,
[Future<int> packageSize]) {
var validators = [
new LicenseValidator(entrypoint),
new NameValidator(entrypoint),
new PubspecFieldValidator(entrypoint),
new DependencyValidator(entrypoint),
new DependencyOverrideValidator(entrypoint),
new DirectoryValidator(entrypoint),
new ExecutableValidator(entrypoint),
new CompiledDartdocValidator(entrypoint),
new Utf8ReadmeValidator(entrypoint),
new SdkConstraintValidator(entrypoint)];
if (packageSize != null) {
validators.add(new SizeValidator(entrypoint, packageSize));
}
return Future.wait(
validators.map((validator) => validator.validate())).then((_) {
var errors = flatten(validators.map((validator) => validator.errors));
var warnings = flatten(validators.map((validator) => validator.warnings));
if (!errors.isEmpty) {
log.error("Missing requirements:");
for (var error in errors) {
log.error("* ${error.split('\n').join('\n ')}");
}
log.error("");
}
if (!warnings.isEmpty) {
log.warning("Suggestions:");
for (var warning in warnings) {
log.warning("* ${warning.split('\n').join('\n ')}");
}
log.warning("");
}
return new Pair<List<String>, List<String>>(errors, warnings);
});
}
}

View file

@ -1,43 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator.compiled_dartdoc;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../entrypoint.dart';
import '../io.dart';
import '../validator.dart';
/// Validates that a package doesn't contain compiled Dartdoc
/// output.
class CompiledDartdocValidator extends Validator {
CompiledDartdocValidator(Entrypoint entrypoint)
: super(entrypoint);
Future validate() {
return new Future.sync(() {
for (var entry in entrypoint.root.listFiles(useGitIgnore: true)) {
if (path.basename(entry) != "nav.json") continue;
var dir = path.dirname(entry);
// Look for tell-tale Dartdoc output files all in the same directory.
var files = [
entry,
path.join(dir, "index.html"),
path.join(dir, "styles.css"),
path.join(dir, "dart-logo-small.png"),
path.join(dir, "client-live-nav.js")];
if (files.every((val) => fileExists(val))) {
warnings.add(
"Avoid putting generated documentation in " "${path.relative(dir)}.\n"
"Generated documentation bloats the package with redundant " "data.");
}
}
});
}
}

View file

@ -1,273 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator.dependency;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../entrypoint.dart';
import '../log.dart' as log;
import '../package.dart';
import '../utils.dart';
import '../validator.dart';
/// The range of all pub versions that don't support `^` version constraints.
final _preCaretPubVersions = new VersionConstraint.parse("<1.8.0-dev.3.0");
// TODO(nweiz): replace this with "^1.8.0" for the 1.8 release.
/// The range of all pub versions that do support `^` version constraints.
///
/// This is intersected with the user's SDK constraint to provide a suggested
/// constraint.
final _postCaretPubVersions = new VersionConstraint.parse("^1.8.0-dev.3.0");
/// A validator that validates a package's dependencies.
class DependencyValidator extends Validator {
/// Whether the SDK constraint guarantees that `^` version constraints are
/// safe.
bool get _caretAllowed =>
entrypoint.root.pubspec.environment.sdkVersion.intersect(
_preCaretPubVersions).isEmpty;
DependencyValidator(Entrypoint entrypoint)
: super(entrypoint);
Future validate() {
final completer0 = new Completer();
scheduleMicrotask(() {
try {
var caretDeps = [];
var it0 = entrypoint.root.pubspec.dependencies.iterator;
break0() {
join0() {
completer0.complete();
}
if (caretDeps.isNotEmpty && !_caretAllowed) {
_errorAboutCaretConstraints(caretDeps);
join0();
} else {
join0();
}
}
var trampoline0;
continue0() {
trampoline0 = null;
if (it0.moveNext()) {
var dependency = it0.current;
join1() {
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
}
if (dependency.source != "hosted") {
new Future.value(_warnAboutSource(dependency)).then((x0) {
trampoline0 = () {
trampoline0 = null;
try {
x0;
join1();
} catch (e0, s0) {
completer0.completeError(e0, s0);
}
};
do trampoline0(); while (trampoline0 != null);
}, onError: completer0.completeError);
} else {
join2() {
join1();
}
if (dependency.constraint.isAny) {
_warnAboutNoConstraint(dependency);
join2();
} else {
join3() {
join2();
}
if (dependency.constraint is Version) {
_warnAboutSingleVersionConstraint(dependency);
join3();
} else {
join4() {
join3();
}
if (dependency.constraint is VersionRange) {
join5() {
join6() {
join4();
}
if (dependency.constraint.toString().startsWith("^")) {
caretDeps.add(dependency);
join6();
} else {
join6();
}
}
if (dependency.constraint.min == null) {
_warnAboutNoConstraintLowerBound(dependency);
join5();
} else {
join7() {
join5();
}
if (dependency.constraint.max == null) {
_warnAboutNoConstraintUpperBound(dependency);
join7();
} else {
join7();
}
}
} else {
join4();
}
}
}
}
} else {
break0();
}
}
trampoline0 = continue0;
do trampoline0(); while (trampoline0 != null);
} catch (e, s) {
completer0.completeError(e, s);
}
});
return completer0.future;
}
/// Warn that dependencies should use the hosted source.
Future _warnAboutSource(PackageDep dep) {
return entrypoint.cache.sources['hosted'].getVersions(
dep.name,
dep.name).catchError((e) => <Version>[]).then((versions) {
var constraint;
var primary = Version.primary(versions);
if (primary != null) {
constraint = _constraintForVersion(primary);
} else {
constraint = dep.constraint.toString();
if (!dep.constraint.isAny && dep.constraint is! Version) {
constraint = '"$constraint"';
}
}
// Path sources are errors. Other sources are just warnings.
var messages = warnings;
if (dep.source == "path") {
messages = errors;
}
messages.add(
'Don\'t depend on "${dep.name}" from the ${dep.source} '
'source. Use the hosted source instead. For example:\n' '\n' 'dependencies:\n'
' ${dep.name}: $constraint\n' '\n'
'Using the hosted source ensures that everyone can download your '
'package\'s dependencies along with your package.');
});
}
/// Warn that dependencies should have version constraints.
void _warnAboutNoConstraint(PackageDep dep) {
var message =
'Your dependency on "${dep.name}" should have a version ' 'constraint.';
var locked = entrypoint.lockFile.packages[dep.name];
if (locked != null) {
message =
'$message For example:\n' '\n' 'dependencies:\n'
' ${dep.name}: ${_constraintForVersion(locked.version)}\n';
}
warnings.add(
"$message\n"
'Without a constraint, you\'re promising to support ${log.bold("all")} '
'future versions of "${dep.name}".');
}
/// Warn that dependencies should allow more than a single version.
void _warnAboutSingleVersionConstraint(PackageDep dep) {
warnings.add(
'Your dependency on "${dep.name}" should allow more than one version. '
'For example:\n' '\n' 'dependencies:\n'
' ${dep.name}: ${_constraintForVersion(dep.constraint)}\n' '\n'
'Constraints that are too tight will make it difficult for people to '
'use your package\n'
'along with other packages that also depend on "${dep.name}".');
}
/// Warn that dependencies should have lower bounds on their constraints.
void _warnAboutNoConstraintLowerBound(PackageDep dep) {
var message = 'Your dependency on "${dep.name}" should have a lower bound.';
var locked = entrypoint.lockFile.packages[dep.name];
if (locked != null) {
var constraint;
if (locked.version == (dep.constraint as VersionRange).max) {
constraint = _constraintForVersion(locked.version);
} else {
constraint = '">=${locked.version} ${dep.constraint}"';
}
message =
'$message For example:\n' '\n' 'dependencies:\n' ' ${dep.name}: $constraint\n';
}
warnings.add(
"$message\n"
'Without a constraint, you\'re promising to support ${log.bold("all")} '
'previous versions of "${dep.name}".');
}
/// Warn that dependencies should have upper bounds on their constraints.
void _warnAboutNoConstraintUpperBound(PackageDep dep) {
var constraint;
if ((dep.constraint as VersionRange).includeMin) {
constraint = _constraintForVersion((dep.constraint as VersionRange).min);
} else {
constraint =
'"${dep.constraint} ' '<${(dep.constraint as VersionRange).min.nextBreaking}"';
}
warnings.add(
'Your dependency on "${dep.name}" should have an upper bound. For ' 'example:\n'
'\n' 'dependencies:\n' ' ${dep.name}: $constraint\n' '\n'
'Without an upper bound, you\'re promising to support '
'${log.bold("all")} future versions of ${dep.name}.');
}
/// Emits an error for any version constraints that use `^` without an
/// appropriate SDK constraint.
void _errorAboutCaretConstraints(List<PackageDep> caretDeps) {
var newSdkConstraint =
entrypoint.root.pubspec.environment.sdkVersion.intersect(_postCaretPubVersions);
if (newSdkConstraint.isEmpty) newSdkConstraint = _postCaretPubVersions;
var buffer = new StringBuffer(
"Older versions of pub don't support ^ version constraints.\n"
"Make sure your SDK constraint excludes those old versions:\n" "\n"
"environment:\n" " sdk: \"$newSdkConstraint\"\n" "\n");
if (caretDeps.length == 1) {
buffer.writeln("Or use a fully-expanded constraint:");
} else {
buffer.writeln("Or use fully-expanded constraints:");
}
buffer.writeln();
buffer.writeln("dependencies:");
caretDeps.forEach((dep) {
VersionRange constraint = dep.constraint;
buffer.writeln(
" ${dep.name}: \">=${constraint.min} <${constraint.max}\"");
});
errors.add(buffer.toString().trim());
}
/// Returns the suggested version constraint for a dependency that was tested
/// against [version].
String _constraintForVersion(Version version) {
if (_caretAllowed) return "^$version";
return '">=$version <${version.nextBreaking}"';
}
}

Some files were not shown because too many files have changed in this diff Show more