Start pulling pub from its own repo.

Pub now lives in third_party/pkg/pub (it's in pkg to make it possible to import
using "package:" imports).

R=ricow@google.com, rnystrom@google.com

Review URL: https://codereview.chromium.org//1165473002
This commit is contained in:
Natalie Weizenbaum 2015-05-29 14:50:18 -07:00
parent 8f1506e817
commit 3d8c06c4e9
584 changed files with 26 additions and 46883 deletions

3
DEPS
View file

@ -82,6 +82,7 @@ vars = {
"ply_rev": "@604b32590ffad5cbb82e4afef1d305512d06ae93",
"plugin_tag": "@0.1.0",
"pool_rev": "@22e12aeb16ad0b626900dbe79e4a25391ddfb28c",
"pub_rev": "@6f2a1b90b8210a85a38aab1af479c047681c29e6",
"pub_semver_tag": "@1.2.1",
"scheduled_test_tag": "@0.11.8+1",
"shelf_rev": "@1e87b79b21ac5e6fa2f93576d6c06eaa65285ef4",
@ -238,6 +239,8 @@ deps = {
(Var("github_mirror") % "pool") + Var("pool_rev"),
Var("dart_root") + "/third_party/pkg/pub_semver":
(Var("github_mirror") % "pub_semver") + Var("pub_semver_tag"),
Var("dart_root") + "/third_party/pkg_tested/pub":
("https://github.com/dart-lang/pub.git") + Var("pub_rev"),
Var("dart_root") + "/third_party/pkg/scheduled_test":
(Var("github_mirror") % "scheduled_test") +
Var("scheduled_test_tag"),

View file

@ -30,6 +30,9 @@
'--timestamp_file=<(SHARED_INTERMEDIATE_DIR)/packages.stamp',
'<(PRODUCT_DIR)/packages',
'<@(_inputs)',
# Pub imports dart2js as compiler_unsupported so it can work outside
# the SDK. Map that to the compiler package.
'compiler/lib:compiler_unsupported'
],
},
],

View file

@ -53,5 +53,5 @@ DART="$BUILD_DIR/dart-sdk/bin/dart"
PACKAGES_DIR="$BUILD_DIR/packages/"
# Run pub.
PUB="$SDK_DIR/lib/_internal/pub/bin/pub.dart"
PUB="$SDK_DIR/../third_party/pkg_tested/pub/bin/pub.dart"
exec "$DART" "${VM_OPTIONS[@]}" "--package-root=$PACKAGES_DIR" "$PUB" "$@"

View file

@ -35,7 +35,7 @@ set PACKAGES_DIR=%BUILD_DIR%\packages
set DART=%BUILD_DIR%\dart-sdk\bin\dart
rem Run pub.
set PUB="%SDK_DIR%\lib\_internal\pub\bin\pub.dart"
set PUB="%SDK_DIR%\..\third_party\pkg_tested\pub\bin\pub.dart"
"%DART%" %VM_OPTIONS% --package-root="%PACKAGES_DIR%" "%PUB%" %*
endlocal

View file

@ -1,90 +0,0 @@
# Contributing to pub
Thanks for being interested in contributing to pub! Contributing to a new
project can be hard: there's a lot of new code and practices to learn. This
document is intended to get you up and running as quickly as possible. If you're
looking for documentation on using pub, try
[pub.dartlang.org](http://pub.dartlang.org/doc).
The first step towards contributing is to contact the pub dev team and let us
know what you're working on, so we can be sure not to start working on the same
thing at the same time. Just send an email to [misc@dartlang.org] letting us
know that you're interested in contributing and what you plan on working on.
This will also let us give you specific advice about where to start.
[misc@dartlang.org]: mailto:misc@dartlang.org
## Organization
Pub isn't a package, but it's organized like one. It has four top-level
directories:
* `lib/` contains the implementation of pub. Currently, it's all in `lib/src/`,
since there are no libraries intended for public consumption.
* `test/` contains the tests for pub.
* `bin/` contains `pub.dart`, the entrypoint script that's run whenever a user
types "pub" on the command line or runs it in the Dart editor. This is usually
run through shell scripts in `sdk/bin` at the root of the Dart repository.
* `resource/` contains static resource files that pub uses. They're
automatically distributed in the Dart SDK.
It's probably easiest to start diving into the codebase by looking at a
particular pub command. Each command is encapsulated in files in
`lib/src/command/`.
## Running pub
To run pub from the Dart repository, first [build Dart][building]. From the root
of the repo:
./tools/build.py -m release
You'll need to re-build whenever you sync the repository, but not when you
modify pub or any packages it depends on. To run pub, just run `sdk/bin/pub` (or
`sdk/bin/pub.bat` on Windows).
[building]: https://code.google.com/p/dart/wiki/Building
## Testing pub
Before any change is made to pub, all tests should pass. To run all the pub
tests, run this from the root of the Dart repository:
./tools/test.py -m release pub
Changes to pub should be accompanied by one or more tests that exercise the new
functionality. When adding a test, the best strategy is to find a similar test
in `test/` and follow the same patterns. Note that pub makes wide use of the
[scheduled_test] package in its tests, so it's usually important to be familiar
with that when adding tests.
[scheduled_test]: http://pub.dartlang.org/packages/scheduled_test
Pub tests come in two basic forms. The first, which is usually used to unit test
classes and libraries internal to pub, has many tests in a single file. This is
used when each test will take a short time to run. For example,
`test/version_test.dart` contains unit tests for pub's Version class.
The other form, used by most pub tests, is usually used for integration tests of
user-visible pub commands. Each test has a file to itself, which is named after
the test description. This is used when tests can take a long time to run to
avoid having the tests time out when running on the build bots. For example,
`tests/get/hosted/get_transitive_test.dart` tests the resolution of transitive
hosted dependencies when using `pub get`.
When testing new functionality, it's often useful to run a single test rather
than the entire test suite. You can do this by appending the path to the test
file to the test command. For example, to run `get/relative_symlink_test.dart`:
./tools/test.py -m release pub/get/relative_symlink_test
## Landing your patch
All patches to the Dart repo, including to pub, need to undergo code review
before they're submitted. The full process for putting up your patch for review
is [documented elsewhere][contributing].
[contributing]: https://code.google.com/p/dart/wiki/Contributing

View file

@ -1,184 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
//# if source_maps >=0.9.0 <0.10.0
//> import 'package:source_maps/span.dart';
//# end
//# if source_span
import 'package:source_span/source_span.dart';
//# end
import 'serialize/exception.dart';
import 'utils.dart';
export 'serialize/aggregate_transform.dart';
export 'serialize/exception.dart';
export 'serialize/transform.dart';
export 'serialize/transformer.dart';
/// Converts [id] into a serializable map.
Map serializeId(AssetId id) => {'package': id.package, 'path': id.path};
/// Converts a serializable map into an [AssetId].
AssetId deserializeId(Map id) => new AssetId(id['package'], id['path']);
/// Converts [span] into a serializable map.
///
/// [span] may be a [SourceSpan] or a [Span].
Map serializeSpan(span) {
// TODO(nweiz): convert FileSpans to FileSpans.
// Handily, this code works for both source_map and source_span spans.
return {
'sourceUrl': span.sourceUrl.toString(),
'start': serializeLocation(span.start),
'end': serializeLocation(span.end),
'text': span.text,
};
}
/// Converts a serializable map into a [SourceSpan].
SourceSpan deserializeSpan(Map span) {
return new SourceSpan(
deserializeLocation(span['start']),
deserializeLocation(span['end']),
span['text']);
}
/// Converts [location] into a serializable map.
///
/// [location] may be a [SourceLocation] or a [SourceLocation].
Map serializeLocation(location) {
//# if source_maps >=0.9.0 <0.10.0
//> if (location is Location) {
//> return {
//> 'sourceUrl': location.sourceUrl,
//> 'offset': location.offset,
//> 'line': location.line,
//> 'column': location.column
//> };
//> }
//# end
//# if source_span
// TODO(nweiz): convert FileLocations to FileLocations.
if (location is SourceLocation) {
return {
'sourceUrl': location.sourceUrl.toString(),
'offset': location.offset,
'line': location.line,
'column': location.column
};
}
//# end
throw new ArgumentError("Unknown type ${location.runtimeType} for location.");
}
/// Converts a serializable map into a [Location].
SourceLocation deserializeLocation(Map location) {
return new SourceLocation(location['offset'],
sourceUrl: location['sourceUrl'],
line: location['line'],
column: location['column']);
}
/// Converts [stream] into a serializable map.
///
/// [serializeEvent] is used to serialize each event from the stream.
Map serializeStream(Stream stream, serializeEvent(event)) {
var receivePort = new ReceivePort();
var map = {'replyTo': receivePort.sendPort};
receivePort.first.then((message) {
var sendPort = message['replyTo'];
stream.listen((event) {
sendPort.send({
'type': 'event',
'value': serializeEvent(event)
});
}, onError: (error, stackTrace) {
sendPort.send({
'type': 'error',
'error': serializeException(error, stackTrace)
});
}, onDone: () => sendPort.send({'type': 'done'}));
});
return map;
}
/// Converts a serializable map into a [Stream].
///
/// [deserializeEvent] is used to deserialize each event from the stream.
Stream deserializeStream(Map stream, deserializeEvent(event)) {
return callbackStream(() {
var receivePort = new ReceivePort();
stream['replyTo'].send({'replyTo': receivePort.sendPort});
var controller = new StreamController(sync: true);
receivePort.listen((event) {
switch (event['type']) {
case 'event':
controller.add(deserializeEvent(event['value']));
break;
case 'error':
var exception = deserializeException(event['error']);
controller.addError(exception, exception.stackTrace);
break;
case 'done':
controller.close();
receivePort.close();
break;
}
});
return controller.stream;
});
}
/// Wraps [message] and sends it across [port], then waits for a response which
/// should be sent using [respond].
///
/// The returned Future will complete to the value or error returned by
/// [respond].
Future call(SendPort port, message) {
var receivePort = new ReceivePort();
port.send({
'message': message,
'replyTo': receivePort.sendPort
});
return receivePort.first.then((response) {
if (response['type'] == 'success') return response['value'];
assert(response['type'] == 'error');
var exception = deserializeException(response['error']);
return new Future.error(exception, exception.stackTrace);
});
}
/// Responds to a message sent by [call].
///
/// [wrappedMessage] is the raw message sent by [call]. This unwraps it and
/// passes the contents of the message to [callback], then sends the return
/// value of [callback] back to [call]. If [callback] returns a Future or
/// throws an error, that will also be sent.
void respond(wrappedMessage, callback(message)) {
var replyTo = wrappedMessage['replyTo'];
new Future.sync(() => callback(wrappedMessage['message']))
.then((result) => replyTo.send({'type': 'success', 'value': result}))
.catchError((error, stackTrace) {
replyTo.send({
'type': 'error',
'error': serializeException(error, stackTrace)
});
});
}

View file

@ -1,173 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.aggregate_transform;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
// TODO(nweiz): don't import from "src" once issue 14966 is fixed.
import 'package:barback/src/internal_asset.dart';
import '../serialize.dart';
import 'get_input_transform.dart';
/// Serialize the methods shared between [AggregateTransform] and
/// [DeclaringAggregateTransform].
///
/// [additionalFields] contains additional serialized fields to add to the
/// serialized transform. [methodHandlers] is a set of additional methods. Each
/// value should take a JSON message and return the response (which may be a
/// Future).
Map _serializeBaseAggregateTransform(transform, Map additionalFields,
Map<String, Function> methodHandlers) {
var receivePort = new ReceivePort();
receivePort.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
var handler = methodHandlers[message['type']];
if (handler != null) return handler(message);
if (message['type'] == 'consumePrimary') {
transform.consumePrimary(deserializeId(message['assetId']));
return null;
}
assert(message['type'] == 'log');
var method = {
'Info': transform.logger.info,
'Fine': transform.logger.fine,
'Warning': transform.logger.warning,
'Error': transform.logger.error
}[message['level']];
assert(method != null);
var assetId = message['assetId'] == null ? null :
deserializeId(message['assetId']);
var span = message['span'] == null ? null :
deserializeSpan(message['span']);
method(message['message'], asset: assetId, span: span);
});
});
return {
'port': receivePort.sendPort,
'key': transform.key,
'package': transform.package
}..addAll(additionalFields);
}
/// Converts [transform] into a serializable map.
Map serializeAggregateTransform(AggregateTransform transform) {
return _serializeBaseAggregateTransform(transform, {
'primaryInputs': serializeStream(transform.primaryInputs, serializeAsset)
}, {
'getInput': (message) => transform.getInput(deserializeId(message['id']))
.then((asset) => serializeAsset(asset)),
'addOutput': (message) =>
transform.addOutput(deserializeAsset(message['output']))
});
}
/// Converts [transform] into a serializable map.
Map serializeDeclaringAggregateTransform(
DeclaringAggregateTransform transform) {
return _serializeBaseAggregateTransform(transform, {
'primaryIds': serializeStream(transform.primaryIds, serializeId)
}, {
'declareOutput': (message) =>
transform.declareOutput(deserializeId(message['output']))
});
}
/// The base class for wrappers for [AggregateTransform]s that are in the host
/// isolate.
class _ForeignBaseAggregateTransform {
/// The port with which we communicate with the host isolate.
///
/// This port and all messages sent across it are specific to this transform.
final SendPort _port;
final String key;
final String package;
TransformLogger get logger => _logger;
TransformLogger _logger;
_ForeignBaseAggregateTransform(Map transform)
: _port = transform['port'],
key = transform['key'],
package = transform['package'] {
_logger = new TransformLogger((assetId, level, message, span) {
call(_port, {
'type': 'log',
'level': level.name,
'message': message,
'assetId': assetId == null ? null : serializeId(assetId),
'span': span == null ? null : serializeSpan(span)
});
});
}
void consumePrimary(AssetId id) {
call(_port, {'type': 'consumePrimary', 'assetId': serializeId(id)});
}
}
// We can get away with only removing the class declarations in incompatible
// barback versions because merely referencing undefined types in type
// annotations isn't a static error. Only implementing an undefined interface is
// a static error.
//# if barback >=0.14.1
/// A wrapper for an [AggregateTransform] that's in the host isolate.
///
/// This retrieves inputs from and sends outputs and logs to the host isolate.
class ForeignAggregateTransform extends _ForeignBaseAggregateTransform
with GetInputTransform implements AggregateTransform {
final Stream<Asset> primaryInputs;
/// Creates a transform from a serialized map sent from the host isolate.
ForeignAggregateTransform(Map transform)
: primaryInputs = deserializeStream(
transform['primaryInputs'], deserializeAsset),
super(transform);
Future<Asset> getInput(AssetId id) {
return call(_port, {
'type': 'getInput',
'id': serializeId(id)
}).then(deserializeAsset);
}
void addOutput(Asset output) {
call(_port, {
'type': 'addOutput',
'output': serializeAsset(output)
});
}
}
/// A wrapper for a [DeclaringAggregateTransform] that's in the host isolate.
class ForeignDeclaringAggregateTransform
extends _ForeignBaseAggregateTransform
implements DeclaringAggregateTransform {
final Stream<AssetId> primaryIds;
/// Creates a transform from a serializable map sent from the host isolate.
ForeignDeclaringAggregateTransform(Map transform)
: primaryIds = deserializeStream(
transform['primaryIds'], deserializeId),
super(transform);
void declareOutput(AssetId id) {
call(_port, {
'type': 'declareOutput',
'output': serializeId(id)
});
}
}
//# end

View file

@ -1,102 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.exception;
import 'package:barback/barback.dart';
import 'package:stack_trace/stack_trace.dart';
import '../utils.dart';
/// An exception that was originally raised in another isolate.
///
/// Exception objects can't cross isolate boundaries in general, so this class
/// wraps as much information as can be consistently serialized.
class CrossIsolateException implements Exception {
/// The name of the type of exception thrown.
///
/// This is the return value of [error.runtimeType.toString()]. Keep in mind
/// that objects in different libraries may have the same type name.
final String type;
/// The exception's message, or its [toString] if it didn't expose a `message`
/// property.
final String message;
/// The exception's stack chain, or `null` if no stack chain was available.
final Chain stackTrace;
/// Loads a [CrossIsolateException] from a serialized representation.
///
/// [error] should be the result of [CrossIsolateException.serialize].
CrossIsolateException.deserialize(Map error)
: type = error['type'],
message = error['message'],
stackTrace = error['stack'] == null ? null :
new Chain.parse(error['stack']);
/// Serializes [error] to an object that can safely be passed across isolate
/// boundaries.
static Map serialize(error, [StackTrace stack]) {
if (stack == null && error is Error) stack = error.stackTrace;
return {
'type': error.runtimeType.toString(),
'message': getErrorMessage(error),
'stack': stack == null ? null : new Chain.forTrace(stack).toString()
};
}
String toString() => "$message\n$stackTrace";
}
/// An [AssetNotFoundException] that was originally raised in another isolate.
class _CrossIsolateAssetNotFoundException extends CrossIsolateException
implements AssetNotFoundException {
final AssetId id;
String get message => "Could not find asset $id.";
/// Loads a [_CrossIsolateAssetNotFoundException] from a serialized
/// representation.
///
/// [error] should be the result of
/// [_CrossIsolateAssetNotFoundException.serialize].
_CrossIsolateAssetNotFoundException.deserialize(Map error)
: id = new AssetId(error['package'], error['path']),
super.deserialize(error);
/// Serializes [error] to an object that can safely be passed across isolate
/// boundaries.
static Map serialize(AssetNotFoundException error, [StackTrace stack]) {
var map = CrossIsolateException.serialize(error);
map['package'] = error.id.package;
map['path'] = error.id.path;
return map;
}
}
/// Serializes [error] to an object that can safely be passed across isolate
/// boundaries.
///
/// This handles [AssetNotFoundException]s specially, ensuring that their
/// metadata is preserved.
Map serializeException(error, [StackTrace stack]) {
if (error is AssetNotFoundException) {
return _CrossIsolateAssetNotFoundException.serialize(error, stack);
} else {
return CrossIsolateException.serialize(error, stack);
}
}
/// Loads an exception from a serialized representation.
///
/// This handles [AssetNotFoundException]s specially, ensuring that their
/// metadata is preserved.
CrossIsolateException deserializeException(Map error) {
if (error['type'] == 'AssetNotFoundException') {
return new _CrossIsolateAssetNotFoundException.deserialize(error);
} else {
return new CrossIsolateException.deserialize(error);
}
}

View file

@ -1,34 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.get_input_transform;
import 'dart:async';
import 'dart:convert';
import 'package:barback/barback.dart';
import '../utils.dart';
/// A mixin for transforms that support [getInput] and the associated suite of
/// methods.
abstract class GetInputTransform {
Future<Asset> getInput(AssetId id);
Future<String> readInputAsString(AssetId id, {Encoding encoding}) {
if (encoding == null) encoding = UTF8;
return getInput(id).then((input) =>
input.readAsString(encoding: encoding));
}
Stream<List<int>> readInput(AssetId id) =>
futureStream(getInput(id).then((input) => input.read()));
Future<bool> hasInput(AssetId id) {
return getInput(id).then((_) => true).catchError((error) {
if (error is AssetNotFoundException && error.id == id) return false;
throw error;
});
}
}

View file

@ -1,149 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.transform;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
// TODO(nweiz): don't import from "src" once issue 14966 is fixed.
import 'package:barback/src/internal_asset.dart';
import '../serialize.dart';
import 'get_input_transform.dart';
/// Serialize the methods shared between [Transform] and [DeclaringTransform].
///
/// [additionalFields] contains additional serialized fields to add to the
/// serialized transform. [methodHandlers] is a set of additional methods. Each
/// value should take a JSON message and return the response (which may be a
/// Future).
Map _serializeBaseTransform(transform, Map additionalFields,
Map<String, Function> methodHandlers) {
var receivePort = new ReceivePort();
receivePort.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
var handler = methodHandlers[message['type']];
if (handler != null) return handler(message);
if (message['type'] == 'consumePrimary') {
transform.consumePrimary();
return null;
}
assert(message['type'] == 'log');
var method = {
'Info': transform.logger.info,
'Fine': transform.logger.fine,
'Warning': transform.logger.warning,
'Error': transform.logger.error
}[message['level']];
assert(method != null);
var assetId = message['assetId'] == null ? null :
deserializeId(message['assetId']);
var span = message['span'] == null ? null :
deserializeSpan(message['span']);
method(message['message'], asset: assetId, span: span);
});
});
return {'port': receivePort.sendPort}..addAll(additionalFields);
}
/// Converts [transform] into a serializable map.
Map serializeTransform(Transform transform) {
return _serializeBaseTransform(transform, {
'primaryInput': serializeAsset(transform.primaryInput)
}, {
'getInput': (message) => transform.getInput(deserializeId(message['id']))
.then((asset) => serializeAsset(asset)),
'addOutput': (message) =>
transform.addOutput(deserializeAsset(message['output']))
});
}
/// Converts [transform] into a serializable map.
Map serializeDeclaringTransform(DeclaringTransform transform) {
return _serializeBaseTransform(transform, {
'primaryId': serializeId(transform.primaryId)
}, {
'declareOutput': (message) =>
transform.declareOutput(deserializeId(message['output']))
});
}
/// The base class for wrappers for [Transform]s that are in the host isolate.
class _ForeignBaseTransform {
/// The port with which we communicate with the host isolate.
///
/// This port and all messages sent across it are specific to this transform.
final SendPort _port;
TransformLogger get logger => _logger;
TransformLogger _logger;
_ForeignBaseTransform(Map transform)
: _port = transform['port'] {
_logger = new TransformLogger((assetId, level, message, span) {
call(_port, {
'type': 'log',
'level': level.name,
'message': message,
'assetId': assetId == null ? null : serializeId(assetId),
'span': span == null ? null : serializeSpan(span)
});
});
}
void consumePrimary() {
call(_port, {'type': 'consumePrimary'});
}
}
/// A wrapper for a [Transform] that's in the host isolate.
///
/// This retrieves inputs from and sends outputs and logs to the host isolate.
class ForeignTransform extends _ForeignBaseTransform
with GetInputTransform implements Transform {
final Asset primaryInput;
/// Creates a transform from a serialized map sent from the host isolate.
ForeignTransform(Map transform)
: primaryInput = deserializeAsset(transform['primaryInput']),
super(transform);
Future<Asset> getInput(AssetId id) {
return call(_port, {
'type': 'getInput',
'id': serializeId(id)
}).then(deserializeAsset);
}
void addOutput(Asset output) {
call(_port, {
'type': 'addOutput',
'output': serializeAsset(output)
});
}
}
/// A wrapper for a [DeclaringTransform] that's in the host isolate.
class ForeignDeclaringTransform extends _ForeignBaseTransform
implements DeclaringTransform {
final AssetId primaryId;
/// Creates a transform from a serializable map sent from the host isolate.
ForeignDeclaringTransform(Map transform)
: primaryId = deserializeId(transform['primaryId']),
super(transform);
void declareOutput(AssetId id) {
call(_port, {
'type': 'declareOutput',
'output': serializeId(id)
});
}
}

View file

@ -1,126 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.serialize.transformer;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
import '../serialize.dart';
import 'transform.dart';
/// Converts [transformer] into a serializable map.
Map _serializeTransformer(Transformer transformer) {
var port = new ReceivePort();
port.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
if (message['type'] == 'isPrimary') {
return transformer.isPrimary(deserializeId(message['id']));
} else if (message['type'] == 'declareOutputs') {
return new Future.sync(() {
return (transformer as DeclaringTransformer).declareOutputs(
new ForeignDeclaringTransform(message['transform']));
}).then((_) => null);
} else {
assert(message['type'] == 'apply');
// Make sure we return null so that if the transformer's [apply] returns
// a non-serializable value it doesn't cause problems.
return new Future.sync(() {
return transformer.apply(new ForeignTransform(message['transform']));
}).then((_) => null);
}
});
});
var type;
if (transformer is LazyTransformer) {
type = 'LazyTransformer';
} else if (transformer is DeclaringTransformer) {
type = 'DeclaringTransformer';
} else {
type = 'Transformer';
}
return {
'type': type,
'toString': transformer.toString(),
'port': port.sendPort
};
}
/// Converts [transformer] into a serializable map.
Map _serializeAggregateTransformer(AggregateTransformer transformer) {
var port = new ReceivePort();
port.listen((wrappedMessage) {
respond(wrappedMessage, (message) {
if (message['type'] == 'classifyPrimary') {
return transformer.classifyPrimary(deserializeId(message['id']));
} else if (message['type'] == 'declareOutputs') {
return new Future.sync(() {
return (transformer as DeclaringAggregateTransformer).declareOutputs(
new ForeignDeclaringAggregateTransform(message['transform']));
}).then((_) => null);
} else {
assert(message['type'] == 'apply');
// Make sure we return null so that if the transformer's [apply] returns
// a non-serializable value it doesn't cause problems.
return new Future.sync(() {
return transformer.apply(
new ForeignAggregateTransform(message['transform']));
}).then((_) => null);
}
});
});
var type;
if (transformer is LazyAggregateTransformer) {
type = 'LazyAggregateTransformer';
} else if (transformer is DeclaringAggregateTransformer) {
type = 'DeclaringAggregateTransformer';
} else {
type = 'AggregateTransformer';
}
return {
'type': type,
'toString': transformer.toString(),
'port': port.sendPort
};
}
// Converts [group] into a serializable map.
Map _serializeTransformerGroup(TransformerGroup group) {
if (group.phases == null) {
throw "TransformerGroup $group phases cannot be null.";
}
return {
'type': 'TransformerGroup',
'toString': group.toString(),
'phases': group.phases.map((phase) {
return phase.map(serializeTransformerLike).toList();
}).toList()
};
}
/// Converts [transformerLike] into a serializable map.
///
/// [transformerLike] can be a [Transformer], an [AggregateTransformer], or a
/// [TransformerGroup].
Map serializeTransformerLike(transformerLike) {
if (transformerLike is Transformer) {
return _serializeTransformer(transformerLike);
} else if (transformerLike is TransformerGroup) {
return _serializeTransformerGroup(transformerLike);
} else {
// This has to be last, since "transformerLike is AggregateTransformer" will
// throw on older versions of barback.
assert(transformerLike is AggregateTransformer);
return _serializeAggregateTransformer(transformerLike);
}
}

View file

@ -1,112 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.asset.transformer_isolate;
import 'dart:convert';
import 'dart:isolate';
import 'dart:mirrors';
import 'package:barback/barback.dart';
import 'serialize.dart';
/// The mirror system.
///
/// Cached to avoid re-instantiating each time a transformer is initialized.
final _mirrors = currentMirrorSystem();
/// Sets up the initial communication with the host isolate.
void loadTransformers(SendPort replyTo) {
var port = new ReceivePort();
replyTo.send(port.sendPort);
port.listen((wrappedMessage) {
// TODO(nweiz): When issue 19228 is fixed, spin up a separate isolate for
// libraries loaded beyond the first so they can run in parallel.
respond(wrappedMessage, (message) {
var configuration = JSON.decode(message['configuration']);
var mode = new BarbackMode(message['mode']);
return _initialize(message['library'], configuration, mode).
map(serializeTransformerLike).toList();
});
});
}
/// Loads all the transformers and groups defined in [uri].
///
/// Loads the library, finds any [Transformer] or [TransformerGroup] subclasses
/// in it, instantiates them with [configuration] and [mode], and returns them.
List _initialize(String uri, Map configuration, BarbackMode mode) {
var transformerClass = reflectClass(Transformer);
var aggregateClass = _aggregateTransformerClass;
var groupClass = reflectClass(TransformerGroup);
var seen = new Set();
var transformers = [];
loadFromLibrary(library) {
if (seen.contains(library)) return;
seen.add(library);
// Load transformers from libraries exported by [library].
for (var dependency in library.libraryDependencies) {
if (!dependency.isExport) continue;
loadFromLibrary(dependency.targetLibrary);
}
// TODO(nweiz): if no valid transformers are found, throw an error message
// describing candidates and why they were rejected.
transformers.addAll(library.declarations.values.map((declaration) {
if (declaration is! ClassMirror) return null;
var classMirror = declaration;
if (classMirror.isPrivate) return null;
if (classMirror.isAbstract) return null;
if (!classMirror.isSubtypeOf(transformerClass) &&
!classMirror.isSubtypeOf(groupClass) &&
(aggregateClass == null ||
!classMirror.isSubtypeOf(aggregateClass))) {
return null;
}
var constructor = _getConstructor(classMirror, 'asPlugin');
if (constructor == null) return null;
if (constructor.parameters.isEmpty) {
if (configuration.isNotEmpty) return null;
return classMirror.newInstance(const Symbol('asPlugin'), []).reflectee;
}
if (constructor.parameters.length != 1) return null;
return classMirror.newInstance(const Symbol('asPlugin'),
[new BarbackSettings(configuration, mode)]).reflectee;
}).where((classMirror) => classMirror != null));
}
var library = _mirrors.libraries[Uri.parse(uri)];
// This should only happen if something's wrong with the logic in pub itself.
// If it were user error, the entire isolate would fail to load.
if (library == null) throw "Couldn't find library at $uri.";
loadFromLibrary(library);
return transformers;
}
// TODO(nweiz): clean this up when issue 13248 is fixed.
MethodMirror _getConstructor(ClassMirror classMirror, String constructor) {
var name = new Symbol("${MirrorSystem.getName(classMirror.simpleName)}"
".$constructor");
var candidate = classMirror.declarations[name];
if (candidate is MethodMirror && candidate.isConstructor) return candidate;
return null;
}
// Older barbacks don't support [AggregateTransformer], and calling
// [reflectClass] on an undefined class will throw an error, so we just define a
// null getter for them.
//# if barback >=0.14.1
ClassMirror get _aggregateTransformerClass =>
reflectClass(AggregateTransformer);
//# else
//> ClassMirror get _aggregateTransformerClass => null;
//# end

View file

@ -1,86 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Functions go in this file as opposed to lib/src/utils.dart if they need to
/// be accessible to the transformer-loading isolate.
library pub.asset.utils;
import 'dart:async';
/// A regular expression to match the exception prefix that some exceptions'
/// [Object.toString] values contain.
final _exceptionPrefix = new RegExp(r'^([A-Z][a-zA-Z]*)?(Exception|Error): ');
/// Get a string description of an exception.
///
/// Many exceptions include the exception class name at the beginning of their
/// [toString], so we remove that if it exists.
String getErrorMessage(error) =>
error.toString().replaceFirst(_exceptionPrefix, '');
/// Returns a buffered stream that will emit the same values as the stream
/// returned by [future] once [future] completes.
///
/// If [future] completes to an error, the return value will emit that error and
/// then close.
///
/// If [broadcast] is true, a broadcast stream is returned. This assumes that
/// the stream returned by [future] will be a broadcast stream as well.
/// [broadcast] defaults to false.
Stream futureStream(Future<Stream> future, {bool broadcast: false}) {
var subscription;
var controller;
future = future.catchError((e, stackTrace) {
// Since [controller] is synchronous, it's likely that emitting an error
// will cause it to be cancelled before we call close.
if (controller != null) controller.addError(e, stackTrace);
if (controller != null) controller.close();
controller = null;
});
onListen() {
future.then((stream) {
if (controller == null) return;
subscription = stream.listen(
controller.add,
onError: controller.addError,
onDone: controller.close);
});
}
onCancel() {
if (subscription != null) subscription.cancel();
subscription = null;
controller = null;
}
if (broadcast) {
controller = new StreamController.broadcast(
sync: true, onListen: onListen, onCancel: onCancel);
} else {
controller = new StreamController(
sync: true, onListen: onListen, onCancel: onCancel);
}
return controller.stream;
}
/// Returns a [Stream] that will emit the same values as the stream returned by
/// [callback].
///
/// [callback] will only be called when the returned [Stream] gets a subscriber.
Stream callbackStream(Stream callback()) {
var subscription;
var controller;
controller = new StreamController(onListen: () {
subscription = callback().listen(controller.add,
onError: controller.addError,
onDone: controller.close);
},
onCancel: () => subscription.cancel(),
onPause: () => subscription.pause(),
onResume: () => subscription.resume(),
sync: true);
return controller.stream;
}

View file

@ -1,9 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import '../lib/src/command_runner.dart';
void main(List<String> arguments) {
new PubCommandRunner().run(arguments);
}

View file

@ -1,167 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// A simple library for rendering tree-like structures in ASCII.
library pub.ascii_tree;
import 'package:path/path.dart' as path;
import 'log.dart' as log;
import 'utils.dart';
/// Draws a tree for the given list of files. Given files like:
///
/// TODO
/// example/console_example.dart
/// example/main.dart
/// example/web copy/web_example.dart
/// test/absolute_test.dart
/// test/basename_test.dart
/// test/dirname_test.dart
/// test/extension_test.dart
/// test/is_absolute_test.dart
/// test/is_relative_test.dart
/// test/join_test.dart
/// test/normalize_test.dart
/// test/relative_test.dart
/// test/split_test.dart
/// .gitignore
/// README.md
/// lib/path.dart
/// pubspec.yaml
/// test/all_test.dart
/// test/path_posix_test.dart
/// test/path_windows_test.dart
///
/// this renders:
///
/// |-- .gitignore
/// |-- README.md
/// |-- TODO
/// |-- example
/// | |-- console_example.dart
/// | |-- main.dart
/// | '-- web copy
/// | '-- web_example.dart
/// |-- lib
/// | '-- path.dart
/// |-- pubspec.yaml
/// '-- test
/// |-- absolute_test.dart
/// |-- all_test.dart
/// |-- basename_test.dart
/// | (7 more...)
/// |-- path_windows_test.dart
/// |-- relative_test.dart
/// '-- split_test.dart
///
/// If [baseDir] is passed, it will be used as the root of the tree.
///
/// If [showAllChildren] is `false`, then directories with more than ten items
/// will have their contents truncated. Defaults to `false`.
String fromFiles(List<String> files, {String baseDir, bool showAllChildren}) {
// Parse out the files into a tree of nested maps.
var root = {};
for (var file in files) {
if (baseDir != null) file = path.relative(file, from: baseDir);
var directory = root;
for (var part in path.split(file)) {
directory = directory.putIfAbsent(part, () => {});
}
}
// Walk the map recursively and render to a string.
return fromMap(root, showAllChildren: showAllChildren);
}
/// Draws a tree from a nested map. Given a map like:
///
/// {
/// "analyzer": {
/// "args": {
/// "collection": ""
/// },
/// "logging": {}
/// },
/// "barback": {}
/// }
///
/// this renders:
///
/// analyzer
/// |-- args
/// | '-- collection
/// '---logging
/// barback
///
/// Items with no children should have an empty map as the value.
///
/// If [showAllChildren] is `false`, then directories with more than ten items
/// will have their contents truncated. Defaults to `false`.
String fromMap(Map map, {bool showAllChildren}) {
var buffer = new StringBuffer();
_draw(buffer, "", null, map, showAllChildren: showAllChildren);
return buffer.toString();
}
void _drawLine(StringBuffer buffer, String prefix, bool isLastChild,
String name) {
// Print lines.
buffer.write(prefix);
if (name != null) {
if (isLastChild) {
buffer.write(log.gray("'-- "));
} else {
buffer.write(log.gray("|-- "));
}
}
// Print name.
buffer.writeln(name);
}
String _getPrefix(bool isRoot, bool isLast) {
if (isRoot) return "";
if (isLast) return " ";
return log.gray("| ");
}
void _draw(StringBuffer buffer, String prefix, String name, Map children,
{bool showAllChildren, bool isLast: false}) {
if (showAllChildren == null) showAllChildren = false;
// Don't draw a line for the root node.
if (name != null) _drawLine(buffer, prefix, isLast, name);
// Recurse to the children.
var childNames = ordered(children.keys);
drawChild(bool isLastChild, String child) {
var childPrefix = _getPrefix(name == null, isLast);
_draw(buffer, '$prefix$childPrefix', child, children[child],
showAllChildren: showAllChildren, isLast: isLastChild);
}
if (name == null || showAllChildren || childNames.length <= 10) {
// Not too many, so show all the children.
for (var i = 0; i < childNames.length; i++) {
drawChild(i == childNames.length - 1, childNames[i]);
}
} else {
// Show the first few.
drawChild(false, childNames[0]);
drawChild(false, childNames[1]);
drawChild(false, childNames[2]);
// Elide the middle ones.
buffer.write(prefix);
buffer.write(_getPrefix(name == null, isLast));
buffer.writeln(log.gray('| (${childNames.length - 6} more...)'));
// Show the last few.
drawChild(false, childNames[childNames.length - 3]);
drawChild(false, childNames[childNames.length - 2]);
drawChild(true, childNames[childNames.length - 1]);
}
}

View file

@ -1,89 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback;
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
/// The currently supported versions of packages that this version of pub works
/// with.
///
/// Pub implicitly constrains these packages to these versions as long as
/// barback is a dependency.
///
/// Users' transformers are loaded in an isolate that uses the entrypoint
/// package's dependency versions. However, that isolate also loads code
/// provided by pub (`asset/dart/transformer_isolate.dart` and associated
/// files). This code uses these packages as well, so these constraints exist to
/// ensure that its usage of the packages remains valid.
///
/// Most constraints here are like normal version constraints in that their
/// upper bound is the next major version of the package (or minor version for
/// pre-1.0.0 packages). If a new major version of the package is released,
/// these *must* be incremented to synchronize with that.
///
/// The constraint on barback is different. Its upper bound is the next *patch*
/// version of barbackthat is, the next version with new features. This is
/// because most barback features need additional serialization code to be fully
/// supported in pub, even if they're otherwise backwards-compatible.
///
/// Whenever a new minor or patch version of barback is published, this *must*
/// be incremented to synchronize with that. See the barback [compatibility
/// documentation][compat] for details on the relationship between this
/// constraint and barback's version.
///
/// [compat]: https://gist.github.com/nex3/10942218
final pubConstraints = {
"barback": new VersionConstraint.parse(">=0.13.0 <0.15.3"),
"source_span": new VersionConstraint.parse(">=1.0.0 <2.0.0"),
"stack_trace": new VersionConstraint.parse(">=0.9.1 <2.0.0")
};
/// Converts [id] to a "package:" URI.
///
/// This will throw an [ArgumentError] if [id] doesn't represent a library in
/// `lib/`.
Uri idToPackageUri(AssetId id) {
if (!id.path.startsWith('lib/')) {
throw new ArgumentError("Asset id $id doesn't identify a library.");
}
return new Uri(scheme: 'package',
path: p.url.join(id.package, id.path.replaceFirst('lib/', '')));
}
/// Converts [uri] into an [AssetId] if its path is within "packages".
///
/// If the URL contains a special directory, but lacks a following package name,
/// throws a [FormatException].
///
/// If the URI doesn't contain one of those special directories, returns null.
AssetId packagesUrlToId(Uri url) {
var parts = p.url.split(url.path);
// Strip the leading "/" from the URL.
if (parts.isNotEmpty && parts.first == "/") parts = parts.skip(1).toList();
if (parts.isEmpty) return null;
// Check for "packages" in the URL.
// TODO(rnystrom): If we rewrite "package:" imports to relative imports that
// point to a canonical "packages" directory, we can limit "packages" to the
// root of the URL as well. See: #16649.
var index = parts.indexOf("packages");
if (index == -1) return null;
// There should be a package name after "packages".
if (parts.length <= index + 1) {
throw new FormatException(
'Invalid URL path "${url.path}". Expected package name '
'after "packages".');
}
var package = parts[index + 1];
var assetPath = p.url.join("lib", p.url.joinAll(parts.skip(index + 2)));
return new AssetId(package, assetPath);
}

View file

@ -1,71 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.admin_server;
import 'dart:async';
import 'dart:io';
import 'package:http_parser/http_parser.dart';
import 'package:shelf/shelf.dart' as shelf;
import 'package:shelf_web_socket/shelf_web_socket.dart';
import '../io.dart';
import '../log.dart' as log;
import 'asset_environment.dart';
import 'base_server.dart';
import 'web_socket_api.dart';
/// The web admin interface to pub serve.
// TODO(rnystrom): Currently this just provides access to the Web Socket API.
// See #16954.
class AdminServer extends BaseServer {
/// All currently open [WebSocket] connections.
final _webSockets = new Set<CompatibleWebSocket>();
shelf.Handler _handler;
/// Creates a new server and binds it to [port] of [host].
static Future<AdminServer> bind(AssetEnvironment environment,
String host, int port) {
return bindServer(host, port).then((server) {
log.fine('Bound admin server to $host:$port.');
return new AdminServer._(environment, server);
});
}
AdminServer._(AssetEnvironment environment, HttpServer server)
: super(environment, server) {
_handler = new shelf.Cascade()
.add(webSocketHandler(_handleWebSocket))
.add(_handleHttp).handler;
}
/// Closes the server and all Web Socket connections.
Future close() {
var futures = [super.close()];
futures.addAll(_webSockets.map((socket) => socket.close()));
return Future.wait(futures);
}
handleRequest(shelf.Request request) => _handler(request);
/// Handles an HTTP request.
_handleHttp(shelf.Request request) {
// TODO(rnystrom): Actually respond to requests once there is an admin
// interface. See #16954.
logRequest(request, "501 Not Implemented");
return new shelf.Response(501,
body: "Currently this server only accepts Web Socket connections.");
}
/// Creates a web socket for [request] which should be an upgrade request.
void _handleWebSocket(CompatibleWebSocket socket) {
_webSockets.add(socket);
var api = new WebSocketApi(socket, environment);
api.listen()
.whenComplete(() => _webSockets.remove(api))
.catchError(addError);
}
}

View file

@ -1,799 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.asset_environment;
import 'dart:async';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import 'package:watcher/watcher.dart';
import '../cached_package.dart';
import '../entrypoint.dart';
import '../exceptions.dart';
import '../io.dart';
import '../log.dart' as log;
import '../package.dart';
import '../package_graph.dart';
import '../source/cached.dart';
import '../utils.dart';
import 'admin_server.dart';
import 'barback_server.dart';
import 'dart_forwarding_transformer.dart';
import 'dart2js_transformer.dart';
import 'load_all_transformers.dart';
import 'pub_package_provider.dart';
import 'source_directory.dart';
/// The entire "visible" state of the assets of a package and all of its
/// dependencies, taking into account the user's configuration when running pub.
///
/// Where [PackageGraph] just describes the entrypoint's dependencies as
/// specified by pubspecs, this includes "transient" information like the mode
/// that the user is running pub in, or which directories they want to
/// transform.
class AssetEnvironment {
/// Creates a new build environment for working with the assets used by
/// [entrypoint] and its dependencies.
///
/// HTTP servers that serve directories from this environment will be bound
/// to [hostname] and have ports based on [basePort]. If omitted, they
/// default to "localhost" and "0" (use ephemeral ports), respectively.
///
/// Loads all used transformers using [mode] (including dart2js if
/// [useDart2JS] is true).
///
/// This will only add the root package's "lib" directory to the environment.
/// Other directories can be added to the environment using [serveDirectory].
///
/// If [watcherType] is not [WatcherType.NONE] (the default), watches source
/// assets for modification.
///
/// If [packages] is passed, only those packages' assets are loaded and
/// served.
///
/// If [entrypoints] is passed, only transformers necessary to run those
/// entrypoints are loaded. Each entrypoint is expected to refer to a Dart
/// library.
///
/// If [environmentConstants] is passed, the constants it defines are passed
/// on to the built-in dart2js transformer.
///
/// Returns a [Future] that completes to the environment once the inputs,
/// transformers, and server are loaded and ready.
static Future<AssetEnvironment> create(Entrypoint entrypoint,
BarbackMode mode, {WatcherType watcherType, String hostname, int basePort,
Iterable<String> packages, Iterable<AssetId> entrypoints,
Map<String, String> environmentConstants, bool useDart2JS: true}) {
if (watcherType == null) watcherType = WatcherType.NONE;
if (hostname == null) hostname = "localhost";
if (basePort == null) basePort = 0;
if (environmentConstants == null) environmentConstants = {};
return log.progress("Loading asset environment", () async {
var graph = await entrypoint.loadPackageGraph();
graph = _adjustPackageGraph(graph, mode, packages);
var barback = new Barback(new PubPackageProvider(graph));
barback.log.listen(_log);
var environment = new AssetEnvironment._(graph, barback, mode,
watcherType, hostname, basePort, environmentConstants);
await environment._load(entrypoints: entrypoints, useDart2JS: useDart2JS);
return environment;
}, fine: true);
}
/// Return a version of [graph] that's restricted to [packages] (if passed)
/// and loads cached packages (if [mode] is [BarbackMode.DEBUG]).
static PackageGraph _adjustPackageGraph(PackageGraph graph,
BarbackMode mode, Iterable<String> packages) {
if (mode != BarbackMode.DEBUG && packages == null) return graph;
packages = (packages == null ? graph.packages.keys : packages).toSet();
return new PackageGraph(graph.entrypoint, graph.lockFile,
new Map.fromIterable(packages, value: (packageName) {
var package = graph.packages[packageName];
if (mode != BarbackMode.DEBUG) return package;
var cache = path.join('.pub/deps/debug', packageName);
if (!dirExists(cache)) return package;
return new CachedPackage(package, cache);
}));
}
/// The server for the Web Socket API and admin interface.
AdminServer _adminServer;
/// The public directories in the root package that are included in the asset
/// environment, keyed by their root directory.
final _directories = new Map<String, SourceDirectory>();
/// The [Barback] instance used to process assets in this environment.
final Barback barback;
/// The root package being built.
Package get rootPackage => graph.entrypoint.root;
/// The graph of packages whose assets and transformers are loaded in this
/// environment.
///
/// This isn't necessarily identical to the graph that's passed in to the
/// environment. It may expose fewer packages if some packages' assets don't
/// need to be loaded, and it may expose some [CachedPackage]s.
final PackageGraph graph;
/// The mode to run the transformers in.
final BarbackMode mode;
/// Constants to passed to the built-in dart2js transformer.
final Map<String, String> environmentConstants;
/// The [Transformer]s that should be appended by default to the root
/// package's transformer cascade. Will be empty if there are none.
final _builtInTransformers = <Transformer>[];
/// How source files should be watched.
final WatcherType _watcherType;
/// The hostname that servers are bound to.
final String _hostname;
/// The starting number for ports that servers will be bound to.
///
/// Servers will be bound to ports starting at this number and then
/// incrementing from there. However, if this is zero, then ephemeral port
/// numbers will be selected for each server.
final int _basePort;
/// The modified source assets that have not been sent to barback yet.
///
/// The build environment can be paused (by calling [pauseUpdates]) and
/// resumed ([resumeUpdates]). While paused, all source asset updates that
/// come from watching or adding new directories are not sent to barback.
/// When resumed, all pending source updates are sent to barback.
///
/// This lets pub serve and pub build create an environment and bind several
/// servers before barback starts building and producing results
/// asynchronously.
///
/// If this is `null`, then the environment is "live" and all updates will
/// go to barback immediately.
Set<AssetId> _modifiedSources;
AssetEnvironment._(this.graph, this.barback, this.mode,
this._watcherType, this._hostname, this._basePort,
this.environmentConstants);
/// Gets the built-in [Transformer]s that should be added to [package].
///
/// Returns `null` if there are none.
Iterable<Transformer> getBuiltInTransformers(Package package) {
// Built-in transformers only apply to the root package.
if (package.name != rootPackage.name) return null;
// The built-in transformers are for dart2js and forwarding assets around
// dart2js.
if (_builtInTransformers.isEmpty) return null;
return _builtInTransformers;
}
/// Starts up the admin server on an appropriate port and returns it.
///
/// This may only be called once on the build environment.
Future<AdminServer> startAdminServer(int port) {
// Can only start once.
assert(_adminServer == null);
return AdminServer.bind(this, _hostname, port)
.then((server) => _adminServer = server);
}
/// Binds a new port to serve assets from within [rootDirectory] in the
/// entrypoint package.
///
/// Adds and watches the sources within that directory. Returns a [Future]
/// that completes to the bound server.
///
/// If [rootDirectory] is already being served, returns that existing server.
Future<BarbackServer> serveDirectory(String rootDirectory) {
// See if there is already a server bound to the directory.
var directory = _directories[rootDirectory];
if (directory != null) {
return directory.server.then((server) {
log.fine('Already serving $rootDirectory on ${server.url}.');
return server;
});
}
// See if the new directory overlaps any existing servers.
var overlapping = _directories.keys.where((directory) =>
path.isWithin(directory, rootDirectory) ||
path.isWithin(rootDirectory, directory)).toList();
if (overlapping.isNotEmpty) {
return new Future.error(
new OverlappingSourceDirectoryException(overlapping));
}
var port = _basePort;
// If not using an ephemeral port, find the lowest-numbered available one.
if (port != 0) {
var boundPorts = _directories.values.map((directory) => directory.port)
.toSet();
while (boundPorts.contains(port)) {
port++;
}
}
var sourceDirectory = new SourceDirectory(
this, rootDirectory, _hostname, port);
_directories[rootDirectory] = sourceDirectory;
return _provideDirectorySources(rootPackage, rootDirectory)
.then((subscription) {
sourceDirectory.watchSubscription = subscription;
return sourceDirectory.serve();
});
}
/// Binds a new port to serve assets from within the "bin" directory of
/// [package].
///
/// Adds the sources within that directory and then binds a server to it.
/// Unlike [serveDirectory], this works with packages that are not the
/// entrypoint.
///
/// Returns a [Future] that completes to the bound server.
Future<BarbackServer> servePackageBinDirectory(String package) {
return _provideDirectorySources(graph.packages[package], "bin").then(
(_) => BarbackServer.bind(this, _hostname, 0, package: package,
rootDirectory: "bin"));
}
/// Precompiles all of [packageName]'s executables to snapshots in
/// [directory].
///
/// If [executableIds] is passed, only those executables are precompiled.
///
/// Returns a map from executable name to path for the snapshots that were
/// successfully precompiled.
Future<Map<String, String>> precompileExecutables(String packageName,
String directory, {Iterable<AssetId> executableIds}) async {
if (executableIds == null) {
executableIds = graph.packages[packageName].executableIds;
}
log.fine("Executables for $packageName: $executableIds");
if (executableIds.isEmpty) return {};
var server = await servePackageBinDirectory(packageName);
try {
var precompiled = {};
await waitAndPrintErrors(executableIds.map((id) async {
var basename = path.url.basename(id.path);
var snapshotPath = path.join(directory, "$basename.snapshot");
var result = await runProcess(Platform.executable, [
'--snapshot=$snapshotPath',
server.url.resolve(basename).toString()
]);
if (result.success) {
log.message("Precompiled ${_formatExecutable(id)}.");
precompiled[path.withoutExtension(basename)] = snapshotPath;
} else {
throw new ApplicationException(
log.yellow("Failed to precompile ${_formatExecutable(id)}:\n") +
result.stderr.join('\n'));
}
}));
return precompiled;
} finally {
// Don't await this future, since we have no need to wait for the server
// to fully shut down.
server.close();
}
}
/// Returns the executable name for [id].
///
/// [id] is assumed to be an executable in a bin directory. The return value
/// is intended for log output and may contain formatting.
String _formatExecutable(AssetId id) =>
log.bold("${id.package}:${path.basenameWithoutExtension(id.path)}");
/// Stops the server bound to [rootDirectory].
///
/// Also removes any source files within that directory from barback. Returns
/// the URL of the unbound server, of `null` if [rootDirectory] was not
/// bound to a server.
Future<Uri> unserveDirectory(String rootDirectory) {
log.fine("Unserving $rootDirectory.");
var directory = _directories.remove(rootDirectory);
if (directory == null) return new Future.value();
return directory.server.then((server) {
var url = server.url;
return directory.close().then((_) {
_removeDirectorySources(rootDirectory);
return url;
});
});
}
/// Gets the source directory that contains [assetPath] within the entrypoint
/// package.
///
/// If [assetPath] is not contained within a source directory, this throws
/// an exception.
String getSourceDirectoryContaining(String assetPath) =>
_directories.values
.firstWhere((dir) => path.isWithin(dir.directory, assetPath))
.directory;
/// Return all URLs serving [assetPath] in this environment.
Future<List<Uri>> getUrlsForAssetPath(String assetPath) {
// Check the three (mutually-exclusive) places the path could be pointing.
return _lookUpPathInServerRoot(assetPath).then((urls) {
if (urls.isNotEmpty) return urls;
return _lookUpPathInPackagesDirectory(assetPath);
}).then((urls) {
if (urls.isNotEmpty) return urls;
return _lookUpPathInDependency(assetPath);
});
}
/// Look up [assetPath] in the root directories of servers running in the
/// entrypoint package.
Future<List<Uri>> _lookUpPathInServerRoot(String assetPath) {
// Find all of the servers whose root directories contain the asset and
// generate appropriate URLs for each.
return Future.wait(_directories.values
.where((dir) => path.isWithin(dir.directory, assetPath))
.map((dir) {
var relativePath = path.relative(assetPath, from: dir.directory);
return dir.server.then((server) =>
server.url.resolveUri(path.toUri(relativePath)));
}));
}
/// Look up [assetPath] in the "packages" directory in the entrypoint package.
Future<List<Uri>> _lookUpPathInPackagesDirectory(String assetPath) {
var components = path.split(path.relative(assetPath));
if (components.first != "packages") return new Future.value([]);
if (!graph.packages.containsKey(components[1])) return new Future.value([]);
return Future.wait(_directories.values.map((dir) {
return dir.server.then((server) =>
server.url.resolveUri(path.toUri(assetPath)));
}));
}
/// Look up [assetPath] in the "lib" or "asset" directory of a dependency
/// package.
Future<List<Uri>> _lookUpPathInDependency(String assetPath) {
for (var packageName in graph.packages.keys) {
var package = graph.packages[packageName];
var libDir = package.path('lib');
var assetDir = package.path('asset');
var uri;
if (path.isWithin(libDir, assetPath)) {
uri = path.toUri(path.join('packages', package.name,
path.relative(assetPath, from: libDir)));
} else if (path.isWithin(assetDir, assetPath)) {
uri = path.toUri(path.join('assets', package.name,
path.relative(assetPath, from: assetDir)));
} else {
continue;
}
return Future.wait(_directories.values.map((dir) {
return dir.server.then((server) => server.url.resolveUri(uri));
}));
}
return new Future.value([]);
}
/// Given a URL to an asset served by this environment, returns the ID of the
/// asset that would be accessed by that URL.
///
/// If no server can serve [url], completes to `null`.
Future<AssetId> getAssetIdForUrl(Uri url) {
return Future.wait(_directories.values.map((dir) => dir.server))
.then((servers) {
var server = servers.firstWhere((server) {
if (server.port != url.port) return false;
return isLoopback(server.address.host) == isLoopback(url.host) ||
server.address.host == url.host;
}, orElse: () => null);
if (server == null) return null;
return server.urlToId(url);
});
}
/// Determines if [sourcePath] is contained within any of the directories in
/// the root package that are visible to this build environment.
bool containsPath(String sourcePath) {
var directories = ["lib"];
directories.addAll(_directories.keys);
return directories.any((dir) => path.isWithin(dir, sourcePath));
}
/// Pauses sending source asset updates to barback.
void pauseUpdates() {
// Cannot pause while already paused.
assert(_modifiedSources == null);
_modifiedSources = new Set<AssetId>();
}
/// Sends any pending source updates to barback and begins the asynchronous
/// build process.
void resumeUpdates() {
// Cannot resume while not paused.
assert(_modifiedSources != null);
barback.updateSources(_modifiedSources);
_modifiedSources = null;
}
/// Loads the assets and transformers for this environment.
///
/// This transforms and serves all library and asset files in all packages in
/// the environment's package graph. It loads any transformer plugins defined
/// in packages in [graph] and re-runs them as necessary when any input files
/// change.
///
/// If [useDart2JS] is `true`, then the [Dart2JSTransformer] is implicitly
/// added to end of the root package's transformer phases.
///
/// If [entrypoints] is passed, only transformers necessary to run those
/// entrypoints will be loaded.
///
/// Returns a [Future] that completes once all inputs and transformers are
/// loaded.
Future _load({Iterable<AssetId> entrypoints, bool useDart2JS}) {
return log.progress("Initializing barback", () async {
// If the entrypoint package manually configures the dart2js
// transformer, don't include it in the built-in transformer list.
//
// TODO(nweiz): if/when we support more built-in transformers, make
// this more general.
var containsDart2JS = graph.entrypoint.root.pubspec.transformers
.any((transformers) =>
transformers.any((config) => config.id.package == '\$dart2js'));
if (!containsDart2JS && useDart2JS) {
_builtInTransformers.addAll([
new Dart2JSTransformer(this, mode),
new DartForwardingTransformer()
]);
}
// Bind a server that we can use to load the transformers.
var transformerServer = await BarbackServer.bind(this, _hostname, 0);
var errorStream = barback.errors.map((error) {
// Even most normally non-fatal barback errors should take down pub if
// they happen during the initial load process.
if (error is! AssetLoadException) throw error;
log.error(log.red(error.message));
log.fine(error.stackTrace.terse);
});
await _withStreamErrors(() {
return log.progress("Loading source assets", _provideSources);
}, [errorStream, barback.results]);
log.fine("Provided sources.");
errorStream = barback.errors.map((error) {
// Now that we're loading transformers, errors they log shouldn't be
// fatal, since we're starting to run them on real user assets which
// may have e.g. syntax errors. If an error would cause a transformer
// to fail to load, the load failure will cause us to exit.
if (error is! TransformerException) throw error;
var message = error.error.toString();
if (error.stackTrace != null) {
message += "\n" + error.stackTrace.terse.toString();
}
_log(new LogEntry(error.transform, error.transform.primaryId,
LogLevel.ERROR, message, null));
});
await _withStreamErrors(() async {
return log.progress("Loading transformers", () async {
await loadAllTransformers(this, transformerServer,
entrypoints: entrypoints);
transformerServer.close();
}, fine: true);
}, [errorStream, barback.results, transformerServer.results]);
}, fine: true);
}
/// Provides the public source assets in the environment to barback.
///
/// If [watcherType] is not [WatcherType.NONE], enables watching on them.
Future _provideSources() async {
// Just include the "lib" directory from each package. We'll add the
// other build directories in the root package by calling
// [serveDirectory].
await Future.wait(graph.packages.values.map((package) async {
if (graph.isPackageStatic(package.name)) return;
await _provideDirectorySources(package, "lib");
}));
}
/// Provides all of the source assets within [dir] in [package] to barback.
///
/// If [watcherType] is not [WatcherType.NONE], enables watching on them.
/// Returns the subscription to the watcher, or `null` if none was created.
Future<StreamSubscription<WatchEvent>> _provideDirectorySources(
Package package, String dir) {
log.fine("Providing sources for ${package.name}|$dir.");
// TODO(rnystrom): Handle overlapping directories. If two served
// directories overlap like so:
//
// $ pub serve example example/subdir
//
// Then the sources of the subdirectory will be updated and watched twice.
// See: #17454
if (_watcherType == WatcherType.NONE) {
_updateDirectorySources(package, dir);
return new Future.value();
}
// Watch the directory before listing is so we don't miss files that
// are added between the initial list and registering the watcher.
return _watchDirectorySources(package, dir).then((_) {
_updateDirectorySources(package, dir);
});
}
/// Updates barback with all of the files in [dir] inside [package].
void _updateDirectorySources(Package package, String dir) {
var ids = _listDirectorySources(package, dir);
if (_modifiedSources == null) {
barback.updateSources(ids);
} else {
_modifiedSources.addAll(ids);
}
}
/// Removes all of the files in [dir] in the root package from barback.
void _removeDirectorySources(String dir) {
var ids = _listDirectorySources(rootPackage, dir);
if (_modifiedSources == null) {
barback.removeSources(ids);
} else {
_modifiedSources.removeAll(ids);
}
}
/// Lists all of the source assets in [dir] inside [package].
///
/// For large packages, listing the contents is a performance bottleneck, so
/// this is optimized for our needs in here instead of using the more general
/// but slower [listDir].
Iterable<AssetId> _listDirectorySources(Package package, String dir) {
// This is used in some performance-sensitive paths and can list many, many
// files. As such, it leans more havily towards optimization as opposed to
// readability than most code in pub. In particular, it avoids using the
// path package, since re-parsing a path is very expensive relative to
// string operations.
return package.listFiles(beneath: dir).map((file) {
// From profiling, path.relative here is just as fast as a raw substring
// and is correct in the case where package.dir has a trailing slash.
var relative = package.relative(file);
if (Platform.operatingSystem == 'windows') {
relative = relative.replaceAll("\\", "/");
}
var uri = new Uri(pathSegments: relative.split("/"));
return new AssetId(package.name, uri.toString());
});
}
/// Adds a file watcher for [dir] within [package], if the directory exists
/// and the package needs watching.
Future<StreamSubscription<WatchEvent>> _watchDirectorySources(
Package package, String dir) {
// If this package comes from a cached source, its contents won't change so
// we don't need to monitor it. `packageId` will be null for the
// application package, since that's not locked.
var packageId = graph.lockFile.packages[package.name];
if (packageId != null &&
graph.entrypoint.cache.sources[packageId.source] is CachedSource) {
return new Future.value();
}
var subdirectory = package.path(dir);
if (!dirExists(subdirectory)) return new Future.value();
// TODO(nweiz): close this watcher when [barback] is closed.
var watcher = _watcherType.create(subdirectory);
var subscription = watcher.events.listen((event) {
// Don't watch files symlinked into these directories.
// TODO(rnystrom): If pub gets rid of symlinks, remove this.
var parts = path.split(event.path);
if (parts.contains("packages")) return;
// Skip files that were (most likely) compiled from nearby ".dart"
// files. These are created by the Editor's "Run as JavaScript"
// command and are written directly into the package's directory.
// When pub's dart2js transformer then tries to create the same file
// name, we get a build error. To avoid that, just don't consider
// that file to be a source.
// TODO(rnystrom): Remove these when the Editor no longer generates
// .js files and users have had enough time that they no longer have
// these files laying around. See #15859.
if (event.path.endsWith(".dart.js")) return;
if (event.path.endsWith(".dart.js.map")) return;
if (event.path.endsWith(".dart.precompiled.js")) return;
var idPath = package.relative(event.path);
var id = new AssetId(package.name, path.toUri(idPath).toString());
if (event.type == ChangeType.REMOVE) {
if (_modifiedSources != null) {
_modifiedSources.remove(id);
} else {
barback.removeSources([id]);
}
} else if (_modifiedSources != null) {
_modifiedSources.add(id);
} else {
barback.updateSources([id]);
}
});
return watcher.ready.then((_) => subscription);
}
/// Returns the result of [futureCallback] unless any stream in [streams]
/// emits an error before it's done.
///
/// If a stream does emit an error, that error is thrown instead.
/// [futureCallback] is a callback rather than a plain future to ensure that
/// [streams] are listened to before any code that might cause an error starts
/// running.
Future _withStreamErrors(Future futureCallback(), List<Stream> streams) {
var completer = new Completer.sync();
var subscriptions = streams.map((stream) =>
stream.listen((_) {}, onError: completer.completeError)).toList();
new Future.sync(futureCallback).then((_) {
if (!completer.isCompleted) completer.complete();
}).catchError((error, stackTrace) {
if (!completer.isCompleted) completer.completeError(error, stackTrace);
});
return completer.future.whenComplete(() {
for (var subscription in subscriptions) {
subscription.cancel();
}
});
}
}
/// Log [entry] using Pub's logging infrastructure.
///
/// Since both [LogEntry] objects and the message itself often redundantly
/// show the same context like the file where an error occurred, this tries
/// to avoid showing redundant data in the entry.
void _log(LogEntry entry) {
messageMentions(text) =>
entry.message.toLowerCase().contains(text.toLowerCase());
messageMentionsAsset(id) =>
messageMentions(id.toString()) ||
messageMentions(path.fromUri(entry.assetId.path));
var prefixParts = [];
// Show the level (unless the message mentions it).
if (!messageMentions(entry.level.name)) {
prefixParts.add("${entry.level} from");
}
// Show the transformer.
prefixParts.add(entry.transform.transformer);
// Mention the primary input of the transform unless the message seems to.
if (!messageMentionsAsset(entry.transform.primaryId)) {
prefixParts.add("on ${entry.transform.primaryId}");
}
// If the relevant asset isn't the primary input, mention it unless the
// message already does.
if (entry.assetId != entry.transform.primaryId &&
!messageMentionsAsset(entry.assetId)) {
prefixParts.add("with input ${entry.assetId}");
}
var prefix = "[${prefixParts.join(' ')}]:";
var message = entry.message;
if (entry.span != null) {
message = entry.span.message(entry.message);
}
switch (entry.level) {
case LogLevel.ERROR:
log.error("${log.red(prefix)}\n$message");
break;
case LogLevel.WARNING:
log.warning("${log.yellow(prefix)}\n$message");
break;
case LogLevel.INFO:
log.message("${log.cyan(prefix)}\n$message");
break;
case LogLevel.FINE:
log.fine("${log.gray(prefix)}\n$message");
break;
}
}
/// Exception thrown when trying to serve a new directory that overlaps one or
/// more directories already being served.
class OverlappingSourceDirectoryException implements Exception {
/// The relative paths of the directories that overlap the one that could not
/// be served.
final List<String> overlappingDirectories;
OverlappingSourceDirectoryException(this.overlappingDirectories);
}
/// An enum describing different modes of constructing a [DirectoryWatcher].
abstract class WatcherType {
/// A watcher that automatically chooses its type based on the operating
/// system.
static const AUTO = const _AutoWatcherType();
/// A watcher that always polls the filesystem for changes.
static const POLLING = const _PollingWatcherType();
/// No directory watcher at all.
static const NONE = const _NoneWatcherType();
/// Creates a new DirectoryWatcher.
DirectoryWatcher create(String directory);
String toString();
}
class _AutoWatcherType implements WatcherType {
const _AutoWatcherType();
DirectoryWatcher create(String directory) =>
new DirectoryWatcher(directory);
String toString() => "auto";
}
class _PollingWatcherType implements WatcherType {
const _PollingWatcherType();
DirectoryWatcher create(String directory) =>
new PollingDirectoryWatcher(directory);
String toString() => "polling";
}
class _NoneWatcherType implements WatcherType {
const _NoneWatcherType();
DirectoryWatcher create(String directory) => null;
String toString() => "none";
}

View file

@ -1,209 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.server;
import 'dart:async';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:mime/mime.dart';
import 'package:path/path.dart' as path;
import 'package:shelf/shelf.dart' as shelf;
import 'package:stack_trace/stack_trace.dart';
import '../barback.dart';
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'base_server.dart';
import 'asset_environment.dart';
/// Callback for determining if an asset with [id] should be served or not.
typedef bool AllowAsset(AssetId id);
/// A server that serves assets transformed by barback.
class BarbackServer extends BaseServer<BarbackServerResult> {
/// The package whose assets are being served.
final String package;
/// The directory in the root which will serve as the root of this server as
/// a native platform path.
///
/// This may be `null` in which case no files in the root package can be
/// served and only assets in "lib" directories are available.
final String rootDirectory;
/// Optional callback to determine if an asset should be served.
///
/// This can be set to allow outside code to filter out assets. Pub serve
/// uses this after plug-ins are loaded to avoid serving ".dart" files in
/// release mode.
///
/// If this is `null`, all assets may be served.
AllowAsset allowAsset;
/// Creates a new server and binds it to [port] of [host].
///
/// This server serves assets from [barback], and uses [rootDirectory]
/// (which is relative to the root directory of [package]) as the root
/// directory. If [rootDirectory] is omitted, the bound server can only be
/// used to serve assets from packages' lib directories (i.e. "packages/..."
/// URLs). If [package] is omitted, it defaults to the entrypoint package.
static Future<BarbackServer> bind(AssetEnvironment environment,
String host, int port, {String package, String rootDirectory}) {
if (package == null) package = environment.rootPackage.name;
return bindServer(host, port).then((server) {
if (rootDirectory == null) {
log.fine('Serving packages on $host:$port.');
} else {
log.fine('Bound "$rootDirectory" to $host:$port.');
}
return new BarbackServer._(environment, server, package, rootDirectory);
});
}
BarbackServer._(AssetEnvironment environment, HttpServer server,
this.package, this.rootDirectory)
: super(environment, server);
/// Converts a [url] served by this server into an [AssetId] that can be
/// requested from barback.
AssetId urlToId(Uri url) {
// See if it's a URL to a public directory in a dependency.
var id = packagesUrlToId(url);
if (id != null) return id;
if (rootDirectory == null) {
throw new FormatException(
"This server cannot serve out of the root directory. Got $url.");
}
// Otherwise, it's a path in current package's [rootDirectory].
var parts = path.url.split(url.path);
// Strip the leading "/" from the URL.
if (parts.isNotEmpty && parts.first == "/") parts = parts.skip(1);
var relativePath = path.url.join(rootDirectory, path.url.joinAll(parts));
return new AssetId(package, relativePath);
}
/// Handles an HTTP request.
handleRequest(shelf.Request request) {
if (request.method != "GET" && request.method != "HEAD") {
return methodNotAllowed(request);
}
var id;
try {
id = urlToId(request.url);
} on FormatException catch (ex) {
// If we got here, we had a path like "/packages" which is a special
// directory, but not a valid path since it lacks a following package
// name.
return notFound(request, error: ex.message);
}
// See if the asset should be blocked.
if (allowAsset != null && !allowAsset(id)) {
return notFound(request,
error: "Asset $id is not available in this configuration.",
asset: id);
}
return environment.barback.getAssetById(id).then((result) {
return result;
}).then((asset) => _serveAsset(request, asset)).catchError((error, trace) {
if (error is! AssetNotFoundException) throw error;
return environment.barback.getAssetById(id.addExtension("/index.html"))
.then((asset) {
if (request.url.path.endsWith('/')) return _serveAsset(request, asset);
// We only want to serve index.html if the URL explicitly ends in a
// slash. For other URLs, we redirect to one with the slash added to
// implicitly support that too. This follows Apache's behavior.
logRequest(request, "302 Redirect to ${request.url}/");
return new shelf.Response.found('${request.url}/');
}).catchError((newError, newTrace) {
// If we find neither the original file or the index, we should report
// the error about the original to the user.
throw newError is AssetNotFoundException ? error : newError;
});
}).catchError((error, trace) {
if (error is! AssetNotFoundException) {
trace = new Chain.forTrace(trace);
logRequest(request, "$error\n$trace");
addError(error, trace);
close();
return new shelf.Response.internalServerError();
}
addResult(new BarbackServerResult._failure(request.url, id, error));
return notFound(request, asset: id);
}).then((response) {
// Allow requests of any origin to access "pub serve". This is useful for
// running "pub serve" in parallel with another development server. Since
// "pub serve" is only used as a development server and doesn't require
// any sort of credentials anyway, this is secure.
return response.change(
headers: const {"Access-Control-Allow-Origin": "*"});
});
}
/// Returns the body of [asset] as a response to [request].
Future<shelf.Response> _serveAsset(shelf.Request request, Asset asset) {
return validateStream(asset.read()).then((stream) {
addResult(new BarbackServerResult._success(request.url, asset.id));
var headers = {};
var mimeType = lookupMimeType(asset.id.path);
if (mimeType != null) headers['Content-Type'] = mimeType;
return new shelf.Response.ok(stream, headers: headers);
}).catchError((error, trace) {
addResult(new BarbackServerResult._failure(request.url, asset.id, error));
// If we couldn't read the asset, handle the error gracefully.
if (error is FileSystemException) {
// Assume this means the asset was a file-backed source asset
// and we couldn't read it, so treat it like a missing asset.
return notFound(request, error: error.toString(), asset: asset.id);
}
trace = new Chain.forTrace(trace);
logRequest(request, "$error\n$trace");
// Otherwise, it's some internal error.
return new shelf.Response.internalServerError(body: error.toString());
});
}
}
/// The result of the server handling a URL.
///
/// Only requests for which an asset was requested from barback will emit a
/// result. Malformed requests will be handled internally.
class BarbackServerResult {
/// The requested url.
final Uri url;
/// The id that [url] identifies.
final AssetId id;
/// The error thrown by barback.
///
/// If the request was served successfully, this will be null.
final error;
/// Whether the request was served successfully.
bool get isSuccess => error == null;
/// Whether the request was served unsuccessfully.
bool get isFailure => !isSuccess;
BarbackServerResult._success(this.url, this.id)
: error = null;
BarbackServerResult._failure(this.url, this.id, this.error);
}

View file

@ -1,123 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.base_server;
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:shelf/shelf.dart' as shelf;
import 'package:shelf/shelf_io.dart' as shelf_io;
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
/// Base class for a pub-controlled server.
abstract class BaseServer<T> {
/// The [BuildEnvironment] being served.
final AssetEnvironment environment;
/// The underlying HTTP server.
final HttpServer _server;
/// The server's port.
int get port => _server.port;
/// The servers's address.
InternetAddress get address => _server.address;
/// The server's base URL.
Uri get url => baseUrlForAddress(_server.address, port);
/// The results of requests handled by the server.
///
/// These can be used to provide visual feedback for the server's processing.
/// This stream is also used to emit any programmatic errors that occur in the
/// server.
Stream<T> get results => _resultsController.stream;
final _resultsController = new StreamController<T>.broadcast();
BaseServer(this.environment, this._server) {
shelf_io.serveRequests(_server, const shelf.Pipeline()
.addMiddleware(shelf.createMiddleware(errorHandler: _handleError))
.addHandler(handleRequest));
}
/// Closes this server.
Future close() {
return Future.wait([_server.close(), _resultsController.close()]);
}
/// Handles an HTTP request.
handleRequest(shelf.Request request);
/// Returns a 405 response to [request].
shelf.Response methodNotAllowed(shelf.Request request) {
logRequest(request, "405 Method Not Allowed");
return new shelf.Response(405,
body: "The ${request.method} method is not allowed for ${request.url}.",
headers: {'Allow': 'GET, HEAD'});
}
/// Returns a 404 response to [request].
///
/// If [asset] is given, it is the ID of the asset that couldn't be found.
shelf.Response notFound(shelf.Request request, {String error,
AssetId asset}) {
logRequest(request, "Not Found");
// TODO(rnystrom): Apply some styling to make it visually clear that this
// error is coming from pub serve itself.
var body = new StringBuffer();
body.writeln("""
<!DOCTYPE html>
<head>
<title>404 Not Found</title>
</head>
<body>
<h1>404 Not Found</h1>""");
if (asset != null) {
body.writeln("<p>Could not find asset "
"<code>${HTML_ESCAPE.convert(asset.path)}</code> in package "
"<code>${HTML_ESCAPE.convert(asset.package)}</code>.</p>");
}
if (error != null) {
body.writeln("<p>Error: ${HTML_ESCAPE.convert(error)}</p>");
}
body.writeln("""
</body>""");
// Force a UTF-8 encoding so that error messages in non-English locales are
// sent correctly.
return new shelf.Response.notFound(body.toString(),
headers: {'Content-Type': 'text/html; charset=utf-8'});
}
/// Log [message] at [log.Level.FINE] with metadata about [request].
void logRequest(shelf.Request request, String message) =>
log.fine("$this ${request.method} ${request.url}\n$message");
/// Adds [result] to the server's [results] stream.
void addResult(T result) {
_resultsController.add(result);
}
/// Adds [error] as an error to the server's [results] stream.
void addError(error, [stackTrace]) {
_resultsController.addError(error, stackTrace);
}
/// Handles an error thrown by [handleRequest].
_handleError(error, StackTrace stackTrace) {
_resultsController.addError(error, stackTrace);
close();
return new shelf.Response.internalServerError();
}
}

View file

@ -1,60 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS d.file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.cycle_exception;
import '../exceptions.dart';
/// An exception thrown when a transformer dependency cycle is detected.
///
/// A cycle exception is usually produced within a deeply-nested series of
/// calls. The API is designed to make it easy for each of these calls to add to
/// the message so that the full reasoning for the cycle is made visible to the
/// user.
///
/// Each call's individual message is called a "step". A [CycleException] is
/// represented internally as a linked list of steps.
class CycleException implements ApplicationException {
/// The step for this exception.
final String _step;
/// The next exception in the linked list.
///
/// [_next]'s steps come after [_step].
final CycleException _next;
/// A list of all steps in the cycle.
List<String> get steps {
if (_step == null) return [];
var exception = this;
var steps = [];
while (exception != null) {
steps.add(exception._step);
exception = exception._next;
}
return steps;
}
String get message {
var steps = this.steps;
if (steps.isEmpty) return "Transformer cycle detected.";
return "Transformer cycle detected:\n" +
steps.map((step) => " $step").join("\n");
}
/// Creates a new [CycleException] with zero or one steps.
CycleException([this._step])
: _next = null;
CycleException._(this._step, this._next);
/// Returns a copy of [this] with [step] added to the beginning of [steps].
CycleException prependStep(String step) {
if (_step == null) return new CycleException(step);
return new CycleException._(step, this);
}
String toString() => message;
}

View file

@ -1,419 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.dart2js_transformer;
import 'dart:async';
import 'dart:convert';
import 'package:analyzer/analyzer.dart';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import 'package:pool/pool.dart';
import 'package:compiler/compiler.dart' as compiler;
import 'package:compiler/src/dart2js.dart'
show AbortLeg;
import 'package:compiler/src/io/source_file.dart';
import '../barback.dart';
import '../dart.dart' as dart;
import '../utils.dart';
import 'asset_environment.dart';
/// The set of all valid configuration options for this transformer.
final _validOptions = new Set<String>.from([
'commandLineOptions', 'checked', 'csp', 'minify', 'verbose', 'environment',
'preserveUris', 'suppressWarnings', 'suppressHints',
'suppressPackageWarnings', 'terse', 'sourceMaps'
]);
/// A [Transformer] that uses dart2js's library API to transform Dart
/// entrypoints in "web" to JavaScript.
class Dart2JSTransformer extends Transformer implements LazyTransformer {
/// We use this to ensure that only one compilation is in progress at a time.
///
/// Dart2js uses lots of memory, so if we try to actually run compiles in
/// parallel, it takes down the VM. The tracking bug to do something better
/// is here: https://code.google.com/p/dart/issues/detail?id=14730.
static final _pool = new Pool(1);
final AssetEnvironment _environment;
final BarbackSettings _settings;
/// Whether source maps should be generated for the compiled JS.
bool get _generateSourceMaps => _configBool('sourceMaps',
defaultsTo: _settings.mode != BarbackMode.RELEASE);
Dart2JSTransformer.withSettings(this._environment, this._settings) {
var invalidOptions = _settings.configuration.keys.toSet()
.difference(_validOptions);
if (invalidOptions.isEmpty) return;
throw new FormatException("Unrecognized dart2js "
"${pluralize('option', invalidOptions.length)} "
"${toSentence(invalidOptions.map((option) => '"$option"'))}.");
}
Dart2JSTransformer(AssetEnvironment environment, BarbackMode mode)
: this.withSettings(environment, new BarbackSettings({}, mode));
/// Only ".dart" entrypoint files within a buildable directory are processed.
bool isPrimary(AssetId id) {
if (id.extension != ".dart") return false;
// "lib" should only contain libraries. For efficiency's sake, we don't
// look for entrypoints in there.
return !id.path.startsWith("lib/");
}
Future apply(Transform transform) {
// TODO(nweiz): If/when barback starts reporting what assets were modified,
// don't re-run the entrypoint detection logic unless the primary input was
// actually modified. See issue 16817.
return _isEntrypoint(transform.primaryInput).then((isEntrypoint) {
if (!isEntrypoint) return null;
// Wait for any ongoing apply to finish first.
return _pool.withResource(() {
transform.logger.info("Compiling ${transform.primaryInput.id}...");
var stopwatch = new Stopwatch()..start();
return _doCompilation(transform).then((_) {
stopwatch.stop();
transform.logger.info("Took ${stopwatch.elapsed} to compile "
"${transform.primaryInput.id}.");
});
});
});
}
void declareOutputs(DeclaringTransform transform) {
var primaryId = transform.primaryId;
transform.declareOutput(primaryId.addExtension(".js"));
if (_generateSourceMaps) {
transform.declareOutput(primaryId.addExtension(".js.map"));
}
}
/// Returns whether or not [asset] might be an entrypoint.
Future<bool> _isEntrypoint(Asset asset) {
return asset.readAsString().then((code) {
try {
var name = asset.id.path;
if (asset.id.package != _environment.rootPackage.name) {
name += " in ${asset.id.package}";
}
var parsed = parseCompilationUnit(code, name: name);
return dart.isEntrypoint(parsed);
} on AnalyzerErrorGroup {
// If we get a parse error, consider the asset primary so we report
// dart2js's more detailed error message instead.
return true;
}
});
}
/// Run the dart2js compiler.
Future _doCompilation(Transform transform) {
var provider = new _BarbackCompilerProvider(_environment, transform,
generateSourceMaps: _generateSourceMaps);
// Create a "path" to the entrypoint script. The entrypoint may not actually
// be on disk, but this gives dart2js a root to resolve relative paths
// against.
var id = transform.primaryInput.id;
var entrypoint = _environment.graph.packages[id.package].path(id.path);
// TODO(rnystrom): Should have more sophisticated error-handling here. Need
// to report compile errors to the user in an easily visible way. Need to
// make sure paths in errors are mapped to the original source path so they
// can understand them.
return dart.compile(
entrypoint, provider,
commandLineOptions: _configCommandLineOptions,
csp: _configBool('csp'),
checked: _configBool('checked'),
minify: _configBool(
'minify', defaultsTo: _settings.mode == BarbackMode.RELEASE),
verbose: _configBool('verbose'),
environment: _configEnvironment,
packageRoot: _environment.rootPackage.path("packages"),
analyzeAll: _configBool('analyzeAll'),
preserveUris: _configBool('preserveUris'),
suppressWarnings: _configBool('suppressWarnings'),
suppressHints: _configBool('suppressHints'),
suppressPackageWarnings: _configBool(
'suppressPackageWarnings', defaultsTo: true),
terse: _configBool('terse'),
includeSourceMapUrls: _generateSourceMaps);
}
/// Parses and returns the "commandLineOptions" configuration option.
List<String> get _configCommandLineOptions {
if (!_settings.configuration.containsKey('commandLineOptions')) return null;
var options = _settings.configuration['commandLineOptions'];
if (options is List && options.every((option) => option is String)) {
return options;
}
throw new FormatException('Invalid value for '
'\$dart2js.commandLineOptions: ${JSON.encode(options)} (expected list '
'of strings).');
}
/// Parses and returns the "environment" configuration option.
Map<String, String> get _configEnvironment {
if (!_settings.configuration.containsKey('environment')) {
return _environment.environmentConstants;
}
var environment = _settings.configuration['environment'];
if (environment is Map &&
environment.keys.every((key) => key is String) &&
environment.values.every((key) => key is String)) {
return mergeMaps(environment, _environment.environmentConstants);
}
throw new FormatException('Invalid value for \$dart2js.environment: '
'${JSON.encode(environment)} (expected map from strings to strings).');
}
/// Parses and returns a boolean configuration option.
///
/// [defaultsTo] is the default value of the option.
bool _configBool(String name, {bool defaultsTo: false}) {
if (!_settings.configuration.containsKey(name)) return defaultsTo;
var value = _settings.configuration[name];
if (value is bool) return value;
throw new FormatException('Invalid value for \$dart2js.$name: '
'${JSON.encode(value)} (expected true or false).');
}
}
/// Defines an interface for dart2js to communicate with barback and pub.
///
/// Note that most of the implementation of diagnostic handling here was
/// copied from [FormattingDiagnosticHandler] in dart2js. The primary
/// difference is that it uses barback's logging code and, more importantly, it
/// handles missing source files more gracefully.
class _BarbackCompilerProvider implements dart.CompilerProvider {
Uri get libraryRoot => Uri.parse("${path.toUri(_libraryRootPath)}/");
final AssetEnvironment _environment;
final Transform _transform;
String _libraryRootPath;
/// The map of previously loaded files.
///
/// Used to show where an error occurred in a source file.
final _sourceFiles = new Map<String, SourceFile>();
// TODO(rnystrom): Make these configurable.
/// Whether or not warnings should be logged.
var _showWarnings = true;
/// Whether or not hints should be logged.
var _showHints = true;
/// Whether or not verbose info messages should be logged.
var _verbose = false;
/// Whether an exception should be thrown on an error to stop compilation.
var _throwOnError = false;
/// This gets set after a fatal error is reported to quash any subsequent
/// errors.
var _isAborting = false;
final bool generateSourceMaps;
compiler.Diagnostic _lastKind = null;
static final int _FATAL =
compiler.Diagnostic.CRASH.ordinal |
compiler.Diagnostic.ERROR.ordinal;
static final int _INFO =
compiler.Diagnostic.INFO.ordinal |
compiler.Diagnostic.VERBOSE_INFO.ordinal;
_BarbackCompilerProvider(this._environment, this._transform,
{this.generateSourceMaps: true}) {
// Dart2js outputs source maps that reference the Dart SDK sources. For
// that to work, those sources need to be inside the build environment. We
// do that by placing them in a special "$sdk" pseudo-package. In order for
// dart2js to generate the right URLs to point to that package, we give it
// a library root that corresponds to where that package can be found
// relative to the public source directory containing that entrypoint.
//
// For example, say the package being compiled is "/dev/myapp", the
// entrypoint is "web/sub/foo/bar.dart", and the source directory is
// "web/sub". This means the SDK sources will be (conceptually) at:
//
// /dev/myapp/web/sub/packages/$sdk/lib/
//
// This implies that the asset path for a file in the SDK is:
//
// $sdk|lib/lib/...
//
// TODO(rnystrom): Fix this if #17751 is fixed.
var buildDir = _environment.getSourceDirectoryContaining(
_transform.primaryInput.id.path);
_libraryRootPath = _environment.rootPackage.path(
buildDir, "packages", r"$sdk");
}
/// A [CompilerInputProvider] for dart2js.
Future<String> provideInput(Uri resourceUri) {
// We only expect to get absolute "file:" URLs from dart2js.
assert(resourceUri.isAbsolute);
assert(resourceUri.scheme == "file");
var sourcePath = path.fromUri(resourceUri);
return _readResource(resourceUri).then((source) {
_sourceFiles[resourceUri.toString()] =
new StringSourceFile(resourceUri, path.relative(sourcePath), source);
return source;
});
}
/// A [CompilerOutputProvider] for dart2js.
EventSink<String> provideOutput(String name, String extension) {
// TODO(rnystrom): Do this more cleanly. See: #17403.
if (!generateSourceMaps && extension.endsWith(".map")) {
return new NullSink<String>();
}
// TODO(nweiz): remove this special case when dart2js stops generating these
// files.
if (extension.endsWith(".precompiled.js")) return new NullSink<String>();
var primaryId = _transform.primaryInput.id;
// Dart2js uses an empty string for the name of the entrypoint library.
// Otherwise, it's the name of a deferred library.
var outPath;
if (name == "") {
outPath = _transform.primaryInput.id.path;
} else {
var dirname = path.url.dirname(_transform.primaryInput.id.path);
outPath = path.url.join(dirname, name);
}
var id = new AssetId(primaryId.package, "$outPath.$extension");
// Make a sink that dart2js can write to.
var sink = new StreamController<String>();
// dart2js gives us strings, but stream assets expect byte lists.
var stream = UTF8.encoder.bind(sink.stream);
// And give it to barback as a stream it can read from.
_transform.addOutput(new Asset.fromStream(id, stream));
return sink;
}
/// A [DiagnosticHandler] for dart2js, loosely based on
/// [FormattingDiagnosticHandler].
void handleDiagnostic(Uri uri, int begin, int end,
String message, compiler.Diagnostic kind) {
// TODO(ahe): Remove this when source map is handled differently.
if (kind.name == "source map") return;
if (_isAborting) return;
_isAborting = (kind == compiler.Diagnostic.CRASH);
var isInfo = (kind.ordinal & _INFO) != 0;
if (isInfo && uri == null && kind != compiler.Diagnostic.INFO) {
if (!_verbose && kind == compiler.Diagnostic.VERBOSE_INFO) return;
_transform.logger.info(message);
return;
}
// [_lastKind] records the previous non-INFO kind we saw.
// This is used to suppress info about a warning when warnings are
// suppressed, and similar for hints.
if (kind != compiler.Diagnostic.INFO) _lastKind = kind;
var logFn;
if (kind == compiler.Diagnostic.ERROR) {
logFn = _transform.logger.error;
} else if (kind == compiler.Diagnostic.WARNING) {
if (!_showWarnings) return;
logFn = _transform.logger.warning;
} else if (kind == compiler.Diagnostic.HINT) {
if (!_showHints) return;
logFn = _transform.logger.warning;
} else if (kind == compiler.Diagnostic.CRASH) {
logFn = _transform.logger.error;
} else if (kind == compiler.Diagnostic.INFO) {
if (_lastKind == compiler.Diagnostic.WARNING && !_showWarnings) return;
if (_lastKind == compiler.Diagnostic.HINT && !_showHints) return;
logFn = _transform.logger.info;
} else {
throw new Exception('Unknown kind: $kind (${kind.ordinal})');
}
var fatal = (kind.ordinal & _FATAL) != 0;
if (uri == null) {
logFn(message);
} else {
SourceFile file = _sourceFiles[uri.toString()];
if (file == null) {
// We got a message before loading the file, so just report the message
// itself.
logFn('$uri: $message');
} else {
logFn(file.getLocationMessage(message, begin, end));
}
}
if (fatal && _throwOnError) {
_isAborting = true;
throw new AbortLeg(message);
}
}
Future<String> _readResource(Uri url) {
return new Future.sync(() {
// Find the corresponding asset in barback.
var id = _sourceUrlToId(url);
if (id != null) return _transform.readInputAsString(id);
// Don't allow arbitrary file paths that point to things not in packages.
// Doing so won't work in Dartium.
throw new Exception(
"Cannot read $url because it is outside of the build environment.");
});
}
AssetId _sourceUrlToId(Uri url) {
// See if it's a package path.
var id = packagesUrlToId(url);
if (id != null) return id;
// See if it's a path to a "public" asset within the root package. All
// other files in the root package are not visible to transformers, so
// should be loaded directly from disk.
var sourcePath = path.fromUri(url);
if (_environment.containsPath(sourcePath)) {
var relative = path.toUri(_environment.rootPackage.relative(sourcePath))
.toString();
return new AssetId(_environment.rootPackage.name, relative);
}
return null;
}
}
/// An [EventSink] that discards all data. Provided to dart2js when we don't
/// want an actual output.
class NullSink<T> implements EventSink<T> {
void add(T event) {}
void addError(errorEvent, [StackTrace stackTrace]) {}
void close() {}
}

View file

@ -1,29 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.dart_forwarding_transformer;
import 'dart:async';
import 'package:barback/barback.dart';
import '../utils.dart';
/// A single transformer that just forwards any ".dart" file as an output when
/// not in release mode.
///
/// Since the [Dart2JSTransformer] consumes its inputs, this is used in
/// parallel to make sure the original Dart file is still available for use by
/// Dartium.
class DartForwardingTransformer extends Transformer {
DartForwardingTransformer();
String get allowedExtensions => ".dart";
Future apply(Transform transform) {
return newFuture(() {
transform.addOutput(transform.primaryInput);
});
}
}

View file

@ -1,446 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.dependency_computer;
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import '../dart.dart';
import '../io.dart';
import '../package.dart';
import '../package_graph.dart';
import '../pubspec.dart';
import '../utils.dart';
import 'cycle_exception.dart';
import 'transformer_config.dart';
import 'transformer_id.dart';
/// A class for determining dependencies between transformers and from Dart
/// libraries onto transformers.
class DependencyComputer {
/// The package graph being analyzed.
final PackageGraph _graph;
/// The names of packages for which [_PackageDependencyComputer]s are
/// currently loading.
///
/// This is used to detect transformer cycles. If a package's libraries or
/// transformers are referenced while the transformers that apply to it are
/// being processed, that indicates an unresolvable cycle.
final _loadingPackageComputers = new Set<String>();
/// [_PackageDependencyComputer]s that have been loaded.
final _packageComputers = new Map<String, _PackageDependencyComputer>();
/// A cache of the results of [transformersNeededByPackage].
final _transformersNeededByPackages = new Map<String, Set<TransformerId>>();
/// The set of all packages that neither use transformers themselves nor
/// import packages that use transformers.
///
/// This is precomputed before any package computers are loaded.
final _untransformedPackages = new Set<String>();
DependencyComputer(this._graph) {
for (var package in ordered(_graph.packages.keys)) {
if (_graph.transitiveDependencies(package).every((dependency) =>
dependency.pubspec.transformers.isEmpty)) {
_untransformedPackages.add(package);
}
}
ordered(_graph.packages.keys).forEach(_loadPackageComputer);
}
/// Returns a dependency graph for [transformers], or for all transformers if
/// [transformers] is `null`.
///
/// This graph is represented by a map whose keys are the vertices and whose
/// values are sets representing edges from the given vertex. Each vertex is a
/// [TransformerId]. If there's an edge from `T1` to `T2`, then `T2` must be
/// loaded before `T1` can be loaded.
///
/// The returned graph is transitively closed. That is, if there's an edge
/// from `T1` to `T2` and an edge from `T2` to `T3`, there's also an edge from
/// `T1` to `T2`.
Map<TransformerId, Set<TransformerId>> transformersNeededByTransformers(
[Iterable<TransformerId> transformers]) {
var result = {};
if (transformers == null) {
transformers = ordered(_graph.packages.keys).expand((packageName) {
var package = _graph.packages[packageName];
return package.pubspec.transformers.expand((phase) {
return phase.expand((config) {
var id = config.id;
if (id.isBuiltInTransformer) return [];
if (package.name != _graph.entrypoint.root.name &&
!config.canTransformPublicFiles) {
return [];
}
return [id];
});
});
});
}
for (var id in transformers) {
result[id] = _transformersNeededByTransformer(id);
}
return result;
}
/// Returns the set of all transformers needed to load the library identified
/// by [id].
Set<TransformerId> transformersNeededByLibrary(AssetId id) {
var library = _graph.packages[id.package].path(p.fromUri(id.path));
_loadPackageComputer(id.package);
return _packageComputers[id.package].transformersNeededByLibrary(library)
.where((id) => !id.isBuiltInTransformer).toSet();
}
/// Returns the set of all transformers that need to be loaded before [id] is
/// loaded.
Set<TransformerId> _transformersNeededByTransformer(TransformerId id) {
if (id.isBuiltInTransformer) return new Set();
if (!_graph.packages.containsKey(id.package)) {
// Throw this here rather than during pubspec parsing because by the time
// we're here, we're sure that the package is actually transforming public
// assets and that being unable to load it will be a problem.
throw new PubspecException(
'Error loading transformer "$id": package "${id.package}" is not '
'a dependency.',
id.span);
}
_loadPackageComputer(id.package);
return _packageComputers[id.package]._transformersNeededByTransformer(id);
}
/// Returns the set of all transformers that need to be loaded before
/// [packageUri] (a "package:" URI) can be safely imported from an external
/// package.
Set<TransformerId> _transformersNeededByPackageUri(Uri packageUri) {
var components = p.split(p.fromUri(packageUri.path));
var packageName = components.first;
if (_untransformedPackages.contains(packageName)) return new Set();
var package = _graph.packages[packageName];
if (package == null) {
// TODO(nweiz): include source range information here.
fail('A transformer imported unknown package "$packageName" (in '
'"$packageUri").');
}
var library = package.path('lib', p.joinAll(components.skip(1)));
_loadPackageComputer(packageName);
return _packageComputers[packageName].transformersNeededByLibrary(library);
}
/// Returns the set of all transformers that need to be loaded before
/// everything in [rootPackage] can be used.
///
/// This is conservative in that it returns all transformers that could
/// theoretically affect [rootPackage]. It only looks at which transformers
/// packages use and which packages they depend on; it ignores imports
/// entirely.
///
/// We fall back on this conservative analysis when a transformer
/// (transitively) imports a transformed library. The result of the
/// transformation may import any dependency or hit any transformer, so we
/// have to assume that it will.
Set<TransformerId> _transformersNeededByPackage(String rootPackage) {
if (_untransformedPackages.contains(rootPackage)) return new Set();
if (_transformersNeededByPackages.containsKey(rootPackage)) {
return _transformersNeededByPackages[rootPackage];
}
var results = new Set();
var seen = new Set();
traversePackage(packageName) {
if (seen.contains(packageName)) return;
seen.add(packageName);
var package = _graph.packages[packageName];
for (var phase in package.pubspec.transformers) {
for (var config in phase) {
var id = config.id;
if (id.isBuiltInTransformer) continue;
if (_loadingPackageComputers.contains(id.package)) {
throw new CycleException("$packageName is transformed by $id");
}
results.add(id);
}
}
var dependencies = packageName == _graph.entrypoint.root.name ?
package.immediateDependencies : package.dependencies;
for (var dep in dependencies) {
try {
traversePackage(dep.name);
} on CycleException catch (error) {
throw error.prependStep("$packageName depends on ${dep.name}");
}
}
}
traversePackage(rootPackage);
_transformersNeededByPackages[rootPackage] = results;
return results;
}
/// Ensure that a [_PackageDependencyComputer] for [packageName] is loaded.
///
/// If the computer has already been loaded, this does nothing. If the
/// computer is in the process of being loaded, this throws a
/// [CycleException].
void _loadPackageComputer(String packageName) {
if (_loadingPackageComputers.contains(packageName)) {
throw new CycleException();
}
if (_packageComputers.containsKey(packageName)) return;
_loadingPackageComputers.add(packageName);
_packageComputers[packageName] =
new _PackageDependencyComputer(this, packageName);
_loadingPackageComputers.remove(packageName);
}
}
/// A helper class for [computeTransformersNeededByTransformers] that keeps
/// package-specific state and caches over the course of the computation.
class _PackageDependencyComputer {
/// The parent [DependencyComputer].
final DependencyComputer _dependencyComputer;
/// The package whose dependencies [this] is computing.
final Package _package;
/// The set of transformers that currently apply to [this].
///
/// This is added to phase-by-phase while [this] is being initialized. This is
/// necessary to model the dependencies of a transformer that's applied to its
/// own package.
final _applicableTransformers = new Set<TransformerConfig>();
/// A cache of imports and exports parsed from libraries in this package.
final _directives = new Map<Uri, Set<Uri>>();
/// The set of libraries for which there are currently active
/// [transformersNeededByLibrary] calls.
///
/// This is used to guard against infinite loops caused by libraries in
/// different packages importing one another circularly.
/// [transformersNeededByLibrary] will return an empty set for any active
/// libraries.
final _activeLibraries = new Set<String>();
/// A cache of the results of [_transformersNeededByTransformer].
final _transformersNeededByTransformers =
new Map<TransformerId, Set<TransformerId>>();
/// A cache of the results of [_getTransitiveExternalDirectives].
///
/// This is invalidated whenever [_applicableTransformers] changes.
final _transitiveExternalDirectives = new Map<String, Set<Uri>>();
_PackageDependencyComputer(DependencyComputer dependencyComputer,
String packageName)
: _dependencyComputer = dependencyComputer,
_package = dependencyComputer._graph.packages[packageName] {
var isRootPackage =
packageName == _dependencyComputer._graph.entrypoint.root.name;
// If [_package] uses its own transformers, there will be fewer transformers
// running on [_package] while its own transformers are loading than there
// will be once all its transformers are finished loading. To handle this,
// we run [_transformersNeededByTransformer] to pre-populate
// [_transformersNeededByLibraries] while [_applicableTransformers] is
// smaller.
for (var phase in _package.pubspec.transformers) {
for (var config in phase) {
// Ignore non-root transformers on non-public files.
if (!isRootPackage && !config.canTransformPublicFiles) continue;
var id = config.id;
try {
if (id.package != _package.name) {
// Probe [id]'s transformer dependencies to ensure that it doesn't
// depend on this package. If it does, a CycleError will be thrown.
_dependencyComputer._transformersNeededByTransformer(id);
} else {
// Store the transformers needed specifically with the current set
// of [_applicableTransformers]. When reporting this transformer's
// dependencies, [computeTransformersNeededByTransformers] will use
// this stored set of dependencies rather than the potentially wider
// set that would be recomputed if [transformersNeededByLibrary]
// were called anew.
_transformersNeededByTransformers[id] =
transformersNeededByLibrary(_package.transformerPath(id));
}
} on CycleException catch (error) {
throw error.prependStep("$packageName is transformed by $id");
}
}
// Clear the cached imports and exports because the new transformers may
// start transforming a library whose directives were previously
// statically analyzable.
_transitiveExternalDirectives.clear();
_applicableTransformers.addAll(phase);
}
}
/// Returns the set of all transformers that need to be loaded before [id] is
/// loaded.
///
/// [id] must refer to a transformer in [_package].
Set<TransformerId> _transformersNeededByTransformer(TransformerId id) {
assert(id.package == _package.name);
if (_transformersNeededByTransformers.containsKey(id)) {
return _transformersNeededByTransformers[id];
}
_transformersNeededByTransformers[id] =
transformersNeededByLibrary(_package.transformerPath(id));
return _transformersNeededByTransformers[id];
}
/// Returns the set of all transformers that need to be loaded before
/// [library] is imported.
///
/// If [library] or anything it imports/exports within this package is
/// transformed by [_applicableTransformers], this will return a conservative
/// set of transformers (see also
/// [DependencyComputer._transformersNeededByPackage]).
Set<TransformerId> transformersNeededByLibrary(String library) {
library = p.normalize(library);
if (_activeLibraries.contains(library)) return new Set();
_activeLibraries.add(library);
try {
var externalDirectives = _getTransitiveExternalDirectives(library);
if (externalDirectives == null) {
var rootName = _dependencyComputer._graph.entrypoint.root.name;
var dependencies = _package.name == rootName ?
_package.immediateDependencies : _package.dependencies;
// If anything transitively imported/exported by [library] within this
// package is modified by a transformer, we don't know what it will
// load, so we take the conservative approach and say it depends on
// everything.
return _applicableTransformers.map((config) => config.id).toSet().union(
unionAll(dependencies.map((dep) {
try {
return _dependencyComputer._transformersNeededByPackage(dep.name);
} on CycleException catch (error) {
throw error.prependStep("${_package.name} depends on ${dep.name}");
}
})));
} else {
// If nothing's transformed, then we only depend on the transformers
// used by the external packages' libraries that we import or export.
return unionAll(externalDirectives.map((uri) {
try {
return _dependencyComputer._transformersNeededByPackageUri(uri);
} on CycleException catch (error) {
var packageName = p.url.split(uri.path).first;
throw error.prependStep("${_package.name} depends on $packageName");
}
}));
}
} finally {
_activeLibraries.remove(library);
}
}
/// Returns the set of all external package libraries transitively imported or
/// exported by [rootLibrary].
///
/// All of the returned URIs will have the "package:" scheme. None of them
/// will be URIs for this package.
///
/// If [rootLibrary] transitively imports or exports a library that's modified
/// by a transformer, this will return `null`.
Set<Uri> _getTransitiveExternalDirectives(String rootLibrary) {
rootLibrary = p.normalize(rootLibrary);
if (_transitiveExternalDirectives.containsKey(rootLibrary)) {
return _transitiveExternalDirectives[rootLibrary];
}
var results = new Set();
var seen = new Set();
traverseLibrary(library) {
library = p.normalize(library);
if (seen.contains(library)) return true;
seen.add(library);
var directives = _getDirectives(library);
if (directives == null) return false;
for (var uri in directives) {
var path;
if (uri.scheme == 'package') {
var components = p.split(p.fromUri(uri.path));
if (components.first != _package.name) {
results.add(uri);
continue;
}
path = _package.path('lib', p.joinAll(components.skip(1)));
} else if (uri.scheme == '' || uri.scheme == 'file') {
path = p.join(p.dirname(library), p.fromUri(uri));
} else {
// Ignore "dart:" URIs and theoretically-possible "http:" URIs.
continue;
}
if (!traverseLibrary(path)) return false;
}
return true;
}
_transitiveExternalDirectives[rootLibrary] =
traverseLibrary(rootLibrary) ? results : null;
return _transitiveExternalDirectives[rootLibrary];
}
/// Returns the set of all imports or exports in [library].
///
/// If [library] is modified by a transformer, this will return `null`.
Set<Uri> _getDirectives(String library) {
var libraryUri = p.toUri(p.normalize(library));
var relative = p.toUri(_package.relative(library)).path;
if (_applicableTransformers.any((config) =>
config.canTransform(relative))) {
_directives[libraryUri] = null;
return null;
}
// Check the cache *after* checking [_applicableTransformers] because
// [_applicableTransformers] changes over time so the directives may be
// invalidated.
if (_directives.containsKey(libraryUri)) return _directives[libraryUri];
// If a nonexistent library is imported, it will probably be generated by a
// transformer.
if (!fileExists(library)) {
_directives[libraryUri] = null;
return null;
}
_directives[libraryUri] =
parseImportsAndExports(readTextFile(library), name: library)
.map((directive) => Uri.parse(directive.uri.stringValue))
.toSet();
return _directives[libraryUri];
}
}

View file

@ -1,69 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.excluding_aggregate_transformer;
import 'dart:async';
import 'package:barback/barback.dart';
import 'transformer_config.dart';
/// Decorates an inner [AggregateTransformer] and handles including and
/// excluding primary inputs.
class ExcludingAggregateTransformer extends AggregateTransformer {
/// If [config] defines includes or excludes, wraps [inner] in an
/// [ExcludingAggregateTransformer] that handles those.
///
/// Otherwise, just returns [inner] unmodified.
static AggregateTransformer wrap(AggregateTransformer inner,
TransformerConfig config) {
if (!config.hasExclusions) return inner;
if (inner is LazyAggregateTransformer) {
return new _LazyExcludingAggregateTransformer(
inner as LazyAggregateTransformer, config);
} else if (inner is DeclaringAggregateTransformer) {
return new _DeclaringExcludingAggregateTransformer(
inner as DeclaringAggregateTransformer, config);
} else {
return new ExcludingAggregateTransformer._(inner, config);
}
}
final AggregateTransformer _inner;
/// The config containing rules for which assets to include or exclude.
final TransformerConfig _config;
ExcludingAggregateTransformer._(this._inner, this._config);
classifyPrimary(AssetId id) {
if (!_config.canTransform(id.path)) return null;
return _inner.classifyPrimary(id);
}
Future apply(AggregateTransform transform) => _inner.apply(transform);
String toString() => _inner.toString();
}
class _DeclaringExcludingAggregateTransformer
extends ExcludingAggregateTransformer
implements DeclaringAggregateTransformer {
_DeclaringExcludingAggregateTransformer(DeclaringAggregateTransformer inner,
TransformerConfig config)
: super._(inner as AggregateTransformer, config);
Future declareOutputs(DeclaringAggregateTransform transform) =>
(_inner as DeclaringAggregateTransformer).declareOutputs(transform);
}
class _LazyExcludingAggregateTransformer
extends _DeclaringExcludingAggregateTransformer
implements LazyAggregateTransformer {
_LazyExcludingAggregateTransformer(DeclaringAggregateTransformer inner,
TransformerConfig config)
: super(inner, config);
}

View file

@ -1,66 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.excluding_transformer;
import 'dart:async';
import 'package:barback/barback.dart';
import 'transformer_config.dart';
/// Decorates an inner [Transformer] and handles including and excluding
/// primary inputs.
class ExcludingTransformer extends Transformer {
/// If [config] defines includes or excludes, wraps [inner] in an
/// [ExcludingTransformer] that handles those.
///
/// Otherwise, just returns [inner] unmodified.
static Transformer wrap(Transformer inner, TransformerConfig config) {
if (!config.hasExclusions) return inner;
if (inner is LazyTransformer) {
// TODO(nweiz): Remove these unnecessary "as"es when issue 19046 is fixed.
return new _LazyExcludingTransformer(inner as LazyTransformer, config);
} else if (inner is DeclaringTransformer) {
return new _DeclaringExcludingTransformer(
inner as DeclaringTransformer, config);
} else {
return new ExcludingTransformer._(inner, config);
}
}
final Transformer _inner;
/// The config containing rules for which assets to include or exclude.
final TransformerConfig _config;
ExcludingTransformer._(this._inner, this._config);
isPrimary(AssetId id) {
if (!_config.canTransform(id.path)) return false;
return _inner.isPrimary(id);
}
Future apply(Transform transform) => _inner.apply(transform);
String toString() => _inner.toString();
}
class _DeclaringExcludingTransformer extends ExcludingTransformer
implements DeclaringTransformer {
_DeclaringExcludingTransformer(DeclaringTransformer inner,
TransformerConfig config)
: super._(inner as Transformer, config);
Future declareOutputs(DeclaringTransform transform) =>
(_inner as DeclaringTransformer).declareOutputs(transform);
}
class _LazyExcludingTransformer extends _DeclaringExcludingTransformer
implements LazyTransformer {
_LazyExcludingTransformer(DeclaringTransformer inner,
TransformerConfig config)
: super(inner, config);
}

View file

@ -1,170 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.foreign_transformer;
import 'dart:async';
import 'dart:isolate';
import 'package:barback/barback.dart';
import '../../../asset/dart/serialize.dart';
import 'excluding_transformer.dart';
import 'excluding_aggregate_transformer.dart';
import 'transformer_config.dart';
/// A wrapper for a transformer that's in a different isolate.
class _ForeignTransformer extends Transformer {
/// The port with which we communicate with the child isolate.
///
/// This port and all messages sent across it are specific to this
/// transformer.
final SendPort _port;
/// The result of calling [toString] on the transformer in the isolate.
final String _toString;
_ForeignTransformer(Map map)
: _port = map['port'],
_toString = map['toString'];
Future<bool> isPrimary(AssetId id) {
return call(_port, {
'type': 'isPrimary',
'id': serializeId(id)
});
}
Future apply(Transform transform) {
return call(_port, {
'type': 'apply',
'transform': serializeTransform(transform)
});
}
String toString() => _toString;
}
class _ForeignDeclaringTransformer extends _ForeignTransformer
implements DeclaringTransformer {
_ForeignDeclaringTransformer(Map map)
: super(map);
Future declareOutputs(DeclaringTransform transform) {
return call(_port, {
'type': 'declareOutputs',
'transform': serializeDeclaringTransform(transform)
});
}
}
class _ForeignLazyTransformer extends _ForeignDeclaringTransformer
implements LazyTransformer {
_ForeignLazyTransformer(Map map)
: super(map);
}
/// A wrapper for an aggregate transformer that's in a different isolate.
class _ForeignAggregateTransformer extends AggregateTransformer {
/// The port with which we communicate with the child isolate.
///
/// This port and all messages sent across it are specific to this
/// transformer.
final SendPort _port;
/// The result of calling [toString] on the transformer in the isolate.
final String _toString;
_ForeignAggregateTransformer(Map map)
: _port = map['port'],
_toString = map['toString'];
Future<String> classifyPrimary(AssetId id) {
return call(_port, {
'type': 'classifyPrimary',
'id': serializeId(id)
});
}
Future apply(AggregateTransform transform) {
return call(_port, {
'type': 'apply',
'transform': serializeAggregateTransform(transform)
});
}
String toString() => _toString;
}
class _ForeignDeclaringAggregateTransformer extends _ForeignAggregateTransformer
implements DeclaringAggregateTransformer {
_ForeignDeclaringAggregateTransformer(Map map)
: super(map);
Future declareOutputs(DeclaringAggregateTransform transform) {
return call(_port, {
'type': 'declareOutputs',
'transform': serializeDeclaringAggregateTransform(transform)
});
}
}
class _ForeignLazyAggregateTransformer
extends _ForeignDeclaringAggregateTransformer
implements LazyAggregateTransformer {
_ForeignLazyAggregateTransformer(Map map)
: super(map);
}
/// A wrapper for a transformer group that's in a different isolate.
class _ForeignGroup implements TransformerGroup {
final Iterable<Iterable> phases;
/// The result of calling [toString] on the transformer group in the isolate.
final String _toString;
_ForeignGroup(TransformerConfig config, Map map)
: phases = map['phases'].map((phase) {
return phase.map((transformer) => deserializeTransformerLike(
transformer, config)).toList();
}).toList(),
_toString = map['toString'];
String toString() => _toString;
}
/// Converts a serializable map into a [Transformer], an [AggregateTransformer],
/// or a [TransformerGroup].
deserializeTransformerLike(Map map, TransformerConfig config) {
var transformer;
switch(map['type']) {
case 'TransformerGroup': return new _ForeignGroup(config, map);
case 'Transformer':
transformer = new _ForeignTransformer(map);
break;
case 'DeclaringTransformer':
transformer = new _ForeignDeclaringTransformer(map);
break;
case 'LazyTransformer':
transformer = new _ForeignLazyTransformer(map);
break;
case 'AggregateTransformer':
transformer = new _ForeignAggregateTransformer(map);
break;
case 'DeclaringAggregateTransformer':
transformer = new _ForeignDeclaringAggregateTransformer(map);
break;
case 'LazyAggregateTransformer':
transformer = new _ForeignLazyAggregateTransformer(map);
break;
default: assert(false);
}
if (transformer is Transformer) {
return ExcludingTransformer.wrap(transformer, config);
} else {
assert(transformer is AggregateTransformer);
return ExcludingAggregateTransformer.wrap(transformer, config);
}
}

View file

@ -1,169 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.load_all_transformers;
import 'dart:async';
import 'package:barback/barback.dart';
import '../log.dart' as log;
import '../package_graph.dart';
import '../utils.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
import 'dependency_computer.dart';
import 'transformer_id.dart';
import 'transformer_loader.dart';
/// Loads all transformers depended on by packages in [environment].
///
/// This uses [environment]'s primary server to serve the Dart files from which
/// transformers are loaded, then adds the transformers to
/// `environment.barback`.
///
/// Any built-in transformers that are provided by the environment will
/// automatically be added to the end of the root package's cascade.
///
/// If [entrypoints] is passed, only transformers necessary to run those
/// entrypoints will be loaded.
Future loadAllTransformers(AssetEnvironment environment,
BarbackServer transformerServer, {Iterable<AssetId> entrypoints}) async {
var dependencyComputer = new DependencyComputer(environment.graph);
// If we only need to load transformers for a specific set of entrypoints,
// remove any other transformers from [transformersNeededByTransformers].
var necessaryTransformers;
if (entrypoints != null) {
if (entrypoints.isEmpty) return;
necessaryTransformers = unionAll(entrypoints.map(
dependencyComputer.transformersNeededByLibrary));
if (necessaryTransformers.isEmpty) {
log.fine("No transformers are needed for ${toSentence(entrypoints)}.");
return;
}
}
var transformersNeededByTransformers = dependencyComputer
.transformersNeededByTransformers(necessaryTransformers);
var buffer = new StringBuffer();
buffer.writeln("Transformer dependencies:");
transformersNeededByTransformers.forEach((id, dependencies) {
if (dependencies.isEmpty) {
buffer.writeln("$id: -");
} else {
buffer.writeln("$id: ${toSentence(dependencies)}");
}
});
log.fine(buffer);
var stagedTransformers = _stageTransformers(transformersNeededByTransformers);
var packagesThatUseTransformers =
_packagesThatUseTransformers(environment.graph);
var loader = new TransformerLoader(environment, transformerServer);
// Only save compiled snapshots when a physical entrypoint package is being
// used. There's no physical entrypoint when e.g. globally activating a cached
// package.
var cache = environment.rootPackage.dir == null ? null :
environment.graph.loadTransformerCache();
var first = true;
for (var stage in stagedTransformers) {
// Only cache the first stage, since its contents aren't based on other
// transformers and thus is independent of the current mode.
var snapshotPath = cache == null || !first ? null :
cache.snapshotPath(stage);
first = false;
/// Load all the transformers in [stage], then add them to the appropriate
/// locations in the transformer graphs of the packages that use them.
await loader.load(stage, snapshot: snapshotPath);
// Only update packages that use transformers in [stage].
var packagesToUpdate = unionAll(stage.map((id) =>
packagesThatUseTransformers[id]));
await Future.wait(packagesToUpdate.map((packageName) async {
var package = environment.graph.packages[packageName];
var phases = await loader.transformersForPhases(
package.pubspec.transformers);
environment.barback.updateTransformers(packageName, phases);
}));
}
if (cache != null) cache.save();
/// Add built-in transformers for the packages that need them.
await Future.wait(environment.graph.packages.values.map((package) async {
var phases = await loader.transformersForPhases(
package.pubspec.transformers);
var transformers = environment.getBuiltInTransformers(package);
if (transformers != null) phases.add(transformers);
if (phases.isEmpty) return;
// TODO(nweiz): remove the [newFuture] here when issue 17305 is fixed.
// If no transformer in [phases] applies to a source input,
// [updateTransformers] may cause a [BuildResult] to be scheduled for
// immediate emission. Issue 17305 means that the caller will be unable
// to receive this result unless we delay the update to after this
// function returns.
newFuture(() =>
environment.barback.updateTransformers(package.name, phases));
}));
}
/// Given [transformerDependencies], a directed acyclic graph, returns a list of
/// "stages" (sets of transformers).
///
/// Each stage must be fully loaded and passed to barback before the next stage
/// can be safely loaded. However, transformers within a stage can be safely
/// loaded in parallel.
List<Set<TransformerId>> _stageTransformers(
Map<TransformerId, Set<TransformerId>> transformerDependencies) {
// A map from transformer ids to the indices of the stages that those
// transformer ids should end up in. Populated by [stageNumberFor].
var stageNumbers = {};
var stages = [];
stageNumberFor(id) {
// Built-in transformers don't have to be loaded in stages, since they're
// run from pub's source. Return -1 so that the "next stage" is 0.
if (id.isBuiltInTransformer) return -1;
if (stageNumbers.containsKey(id)) return stageNumbers[id];
var dependencies = transformerDependencies[id];
stageNumbers[id] = dependencies.isEmpty ?
0 : maxAll(dependencies.map(stageNumberFor)) + 1;
return stageNumbers[id];
}
for (var id in transformerDependencies.keys) {
var stageNumber = stageNumberFor(id);
if (stages.length <= stageNumber) stages.length = stageNumber + 1;
if (stages[stageNumber] == null) stages[stageNumber] = new Set();
stages[stageNumber].add(id);
}
return stages;
}
/// Returns a map from transformer ids to all packages in [graph] that use each
/// transformer.
Map<TransformerId, Set<String>> _packagesThatUseTransformers(
PackageGraph graph) {
var results = {};
for (var package in graph.packages.values) {
for (var phase in package.pubspec.transformers) {
for (var config in phase) {
results.putIfAbsent(config.id, () => new Set()).add(package.name);
}
}
}
return results;
}

View file

@ -1,119 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.pub_package_provider;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import '../io.dart';
import '../package_graph.dart';
import '../preprocess.dart';
import '../sdk.dart' as sdk;
import '../utils.dart';
/// An implementation of barback's [PackageProvider] interface so that barback
/// can find assets within pub packages.
class PubPackageProvider implements StaticPackageProvider {
final PackageGraph _graph;
final List<String> staticPackages;
Iterable<String> get packages =>
_graph.packages.keys.toSet().difference(staticPackages.toSet());
PubPackageProvider(PackageGraph graph)
: _graph = graph,
staticPackages = [r"$pub", r"$sdk"]..addAll(
graph.packages.keys.where(graph.isPackageStatic));
Future<Asset> getAsset(AssetId id) async {
// "$pub" is a psuedo-package that allows pub's transformer-loading
// infrastructure to share code with pub proper.
if (id.package == r'$pub') {
var components = path.url.split(id.path);
assert(components.isNotEmpty);
assert(components.first == 'lib');
components[0] = 'dart';
var file = assetPath(path.joinAll(components));
_assertExists(file, id);
// Barback may not be in the package graph if there are no user-defined
// transformers being used at all. The "$pub" sources are still provided,
// but will never be loaded.
if (!_graph.packages.containsKey("barback")) {
return new Asset.fromPath(id, file);
}
var versions = mapMap(_graph.packages,
value: (_, package) => package.version);
var contents = readTextFile(file);
contents = preprocess(contents, versions, path.toUri(file));
return new Asset.fromString(id, contents);
}
// "$sdk" is a pseudo-package that provides access to the Dart library
// sources in the SDK. The dart2js transformer uses this to locate the Dart
// sources for "dart:" libraries.
if (id.package == r'$sdk') {
// The asset path contains two "lib" entries. The first represent's pub's
// concept that all public assets are in "lib". The second comes from the
// organization of the SDK itself. Strip off the first. Leave the second
// since dart2js adds it and expects it to be there.
var parts = path.split(path.fromUri(id.path));
assert(parts.isNotEmpty && parts[0] == 'lib');
parts = parts.skip(1);
var file = path.join(sdk.rootDirectory, path.joinAll(parts));
_assertExists(file, id);
return new Asset.fromPath(id, file);
}
var nativePath = path.fromUri(id.path);
var file = _graph.packages[id.package].path(nativePath);
_assertExists(file, id);
return new Asset.fromPath(id, file);
}
/// Throw an [AssetNotFoundException] for [id] if [path] doesn't exist.
void _assertExists(String path, AssetId id) {
if (!fileExists(path)) throw new AssetNotFoundException(id);
}
Stream<AssetId> getAllAssetIds(String packageName) {
if (packageName == r'$pub') {
// "$pub" is a pseudo-package that allows pub's transformer-loading
// infrastructure to share code with pub proper. We provide it only during
// the initial transformer loading process.
var dartPath = assetPath('dart');
return new Stream.fromIterable(listDir(dartPath, recursive: true)
// Don't include directories.
.where((file) => path.extension(file) == ".dart")
.map((library) {
var idPath = path.join('lib', path.relative(library, from: dartPath));
return new AssetId('\$pub', path.toUri(idPath).toString());
}));
} else if (packageName == r'$sdk') {
// "$sdk" is a pseudo-package that allows the dart2js transformer to find
// the Dart core libraries without hitting the file system directly. This
// ensures they work with source maps.
var libPath = path.join(sdk.rootDirectory, "lib");
return new Stream.fromIterable(listDir(libPath, recursive: true)
.where((file) => path.extension(file) == ".dart")
.map((file) {
var idPath = path.join("lib",
path.relative(file, from: sdk.rootDirectory));
return new AssetId('\$sdk', path.toUri(idPath).toString());
}));
} else {
var package = _graph.packages[packageName];
return new Stream.fromIterable(
package.listFiles(beneath: 'lib').map((file) {
return new AssetId(packageName,
path.toUri(package.relative(file)).toString());
}));
}
}
}

View file

@ -1,68 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.source_directory;
import 'dart:async';
import 'package:watcher/watcher.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
/// A directory in the entrypoint package whose contents have been made
/// available to barback and that are bound to a server.
class SourceDirectory {
final AssetEnvironment _environment;
/// The relative directory path within the package.
final String directory;
/// The hostname to serve this directory on.
final String hostname;
/// The port to serve this directory on.
final int port;
/// The server bound to this directory.
///
/// This is a future that will complete once [serve] has been called and the
/// server has been successfully spun up.
Future<BarbackServer> get server => _serverCompleter.future;
final _serverCompleter = new Completer<BarbackServer>();
/// The subscription to the [DirectoryWatcher] used to watch this directory
/// for changes.
///
/// If the directory is not being watched, this will be `null`.
StreamSubscription<WatchEvent> watchSubscription;
SourceDirectory(this._environment, this.directory, this.hostname, this.port);
/// Binds a server running on [hostname]:[port] to this directory.
Future<BarbackServer> serve() {
return BarbackServer.bind(_environment, hostname, port,
rootDirectory: directory).then((server) {
_serverCompleter.complete(server);
return server;
});
}
/// Removes the source directory from the build environment.
///
/// Closes the server, removes the assets from barback, and stops watching it.
Future close() {
return server.then((server) {
var futures = [server.close()];
// Stop watching the directory.
if (watchSubscription != null) {
var cancel = watchSubscription.cancel();
if (cancel != null) futures.add(cancel);
}
return Future.wait(futures);
});
}
}

View file

@ -1,144 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_cache;
import 'package:path/path.dart' as p;
import '../io.dart';
import '../log.dart' as log;
import '../package_graph.dart';
import '../sdk.dart' as sdk;
import '../utils.dart';
import 'transformer_id.dart';
/// A cache for managing a snapshot of the first "stage" of transformers to
/// load.
///
/// This uses the [_stageTransformers] notion of a stage. Transformers are
/// divided into stages for loading based on which transformers are needed to
/// load one another. For example, if a transformer T1 produces a file that's
/// imported by another transformer T2, T2 must be put in a stage after T1.
///
/// We only cache the first stage because it's the only stage whose contents are
/// independent of any configuration. Since most transformers don't import the
/// output of other transformers, many packages will only have one stage.
class TransformerCache {
final PackageGraph _graph;
/// The set of transformer ids that were previously cached.
///
/// If there was no previous cache, this will be empty.
Set<TransformerId> _oldTransformers;
/// The set of transformer ids that are newly cached or re-used from the
/// previous cache.
Set<TransformerId> _newTransformers;
/// The directory in which transformers are cached.
///
/// This may be `null` if there's no physical entrypoint directory.
String _dir;
/// The directory of the manifest listing which transformers were cached.
String get _manifestPath => p.join(_dir, "manifest.txt");
/// Loads the transformer cache for [environment].
///
/// This may modify the cache.
TransformerCache.load(PackageGraph graph)
: _graph = graph,
_dir = graph.entrypoint.root.path(".pub/transformers") {
_oldTransformers = _parseManifest();
}
/// Clear the cache if it depends on any package in [changedPackages].
void clearIfOutdated(Set<String> changedPackages) {
var snapshotDependencies = unionAll(_oldTransformers.map((id) {
// If the transformer cache contains transformers we don't know about,
// that's fine; we just won't load them.
if (!_graph.packages.containsKey(id.package)) return new Set();
return _graph.transitiveDependencies(id.package)
.map((package) => package.name).toSet();
}));
// If none of the snapshot's dependencies have changed, then we can reuse
// it.
if (!overlaps(changedPackages, snapshotDependencies)) return;
// Otherwise, delete it.
deleteEntry(_dir);
_oldTransformers = new Set();
}
/// Returns the path for the transformer snapshot for [transformers], or
/// `null` if the transformers shouldn't be cached.
///
/// There may or may not exist a file at the returned path. If one does exist,
/// it can safely be used to load the stage. Otherwise, a snapshot of the
/// stage should be written there.
String snapshotPath(Set<TransformerId> transformers) {
var path = p.join(_dir, "transformers.snapshot");
if (_newTransformers != null) return path;
if (transformers.any((id) => _graph.isPackageMutable(id.package))) {
log.fine("Not caching mutable transformers.");
deleteEntry(_dir);
return null;
}
if (!_oldTransformers.containsAll(transformers)) {
log.fine("Cached transformer snapshot is out-of-date, deleting.");
deleteEntry(path);
} else {
log.fine("Using cached transformer snapshot.");
}
_newTransformers = transformers;
return path;
}
/// Saves the manifest to the transformer cache.
void save() {
// If we didn't write any snapshots, there's no need to write a manifest.
if (_newTransformers == null) {
if (_dir != null) deleteEntry(_dir);
return;
}
// We only need to rewrite the manifest if we created a new snapshot.
if (_oldTransformers.containsAll(_newTransformers)) return;
ensureDir(_dir);
writeTextFile(_manifestPath,
"${sdk.version}\n" +
ordered(_newTransformers.map((id) => id.serialize())).join(","));
}
/// Parses the cache manifest and returns the set of previously-cached
/// transformers.
///
/// If the manifest indicates that the SDK version is out-of-date, this
/// deletes the existing cache. Otherwise,
Set<TransformerId> _parseManifest() {
if (!fileExists(_manifestPath)) return new Set();
var manifest = readTextFile(_manifestPath).split("\n");
// The first line of the manifest is the SDK version. We want to clear out
// the snapshots even if they're VM-compatible, since pub's transformer
// isolate scaffolding may have changed.
if (manifest.removeAt(0) != sdk.version.toString()) {
deleteEntry(_dir);
return new Set();
}
/// The second line of the manifest is a list of transformer ids used to
/// create the existing snapshot.
return manifest.single.split(",")
.map((id) => new TransformerId.parse(id, null))
.toSet();
}
}

View file

@ -1,160 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_config;
import 'package:glob/glob.dart';
import 'package:path/path.dart' as p;
import 'package:source_span/source_span.dart';
import 'package:yaml/yaml.dart';
import 'transformer_id.dart';
/// The configuration for a transformer.
///
/// This corresponds to the transformers listed in a pubspec, which have both an
/// [id] indicating the location of the transformer and configuration specific
/// to that use of the transformer.
class TransformerConfig {
/// The [id] of the transformer [this] is configuring.
final TransformerId id;
/// The configuration to pass to the transformer.
///
/// Any pub-specific configuration (i.e. keys starting with "$") will have
/// been stripped out of this and handled separately. This will be an empty
/// map if no configuration was provided.
final Map configuration;
/// The source span from which this configuration was parsed.
final SourceSpan span;
/// The primary input inclusions.
///
/// Each inclusion is an asset path. If this set is non-empty, then *only*
/// matching assets are allowed as a primary input by this transformer. If
/// `null`, all assets are included.
///
/// This is processed before [excludes]. If a transformer has both includes
/// and excludes, then the set of included assets is determined and assets
/// are excluded from that resulting set.
final Set<Glob> includes;
/// The primary input exclusions.
///
/// Any asset whose pach is in this is not allowed as a primary input by
/// this transformer.
///
/// This is processed after [includes]. If a transformer has both includes
/// and excludes, then the set of included assets is determined and assets
/// are excluded from that resulting set.
final Set<Glob> excludes;
/// Returns whether this config excludes certain asset ids from being
/// processed.
bool get hasExclusions => includes != null || excludes != null;
/// Returns whether this transformer might transform a file that's visible to
/// the package's dependers.
bool get canTransformPublicFiles {
if (includes == null) return true;
return includes.any((glob) {
// Check whether the first path component of the glob is "lib", "bin", or
// contains wildcards that may cause it to match "lib" or "bin".
var first = p.posix.split(glob.toString()).first;
if (first.contains('{') || first.contains('*') || first.contains('[') ||
first.contains('?')) {
return true;
}
return first == 'lib' || first == 'bin';
});
}
/// Parses [identifier] as a [TransformerId] with [configuration].
///
/// [identifierSpan] is the source span for [identifier].
factory TransformerConfig.parse(String identifier, SourceSpan identifierSpan,
YamlMap configuration) =>
new TransformerConfig(new TransformerId.parse(identifier, identifierSpan),
configuration);
factory TransformerConfig(TransformerId id, YamlMap configurationNode) {
parseField(key) {
if (!configurationNode.containsKey(key)) return null;
var fieldNode = configurationNode.nodes[key];
var field = fieldNode.value;
if (field is String) {
return new Set.from([new Glob(field, context: p.url, recursive: true)]);
}
if (field is! List) {
throw new SourceSpanFormatException(
'"$key" field must be a string or list.', fieldNode.span);
}
return new Set.from(field.nodes.map((node) {
if (node.value is String) {
return new Glob(node.value, context: p.url, recursive: true);
}
throw new SourceSpanFormatException(
'"$key" field may contain only strings.', node.span);
}));
}
var includes = null;
var excludes = null;
var configuration;
var span;
if (configurationNode == null) {
configuration = {};
span = id.span;
} else {
// Don't write to the immutable YAML map.
configuration = new Map.from(configurationNode);
span = configurationNode.span;
// Pull out the exclusions/inclusions.
includes = parseField("\$include");
configuration.remove("\$include");
excludes = parseField("\$exclude");
configuration.remove("\$exclude");
// All other keys starting with "$" are unexpected.
for (var key in configuration.keys) {
if (key is! String || !key.startsWith(r'$')) continue;
throw new SourceSpanFormatException(
'Unknown reserved field.', configurationNode.nodes[key].span);
}
}
return new TransformerConfig._(id, configuration, span, includes, excludes);
}
TransformerConfig._(
this.id, this.configuration, this.span, this.includes, this.excludes);
String toString() => id.toString();
/// Returns whether the include/exclude rules allow the transformer to run on
/// [pathWithinPackage].
///
/// [pathWithinPackage] must be a URL-style path relative to the containing
/// package's root directory.
bool canTransform(String pathWithinPackage) {
if (excludes != null) {
// If there are any excludes, it must not match any of them.
for (var exclude in excludes) {
if (exclude.matches(pathWithinPackage)) return false;
}
}
// If there are any includes, it must match one of them.
return includes == null ||
includes.any((include) => include.matches(pathWithinPackage));
}
}

View file

@ -1,95 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_id;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:source_span/source_span.dart';
import '../utils.dart';
/// A list of the names of all built-in transformers that pub exposes.
const _BUILT_IN_TRANSFORMERS = const ['\$dart2js'];
/// An identifier that indicates the library that contains a transformer.
///
/// It's possible that the library identified by [this] defines multiple
/// transformers. If so, they're all always loaded in the same phase.
class TransformerId {
/// The package containing the library where the transformer is defined.
final String package;
/// The `/`-separated path to the library that contains this transformer.
///
/// This is relative to the `lib/` directory in [package], and doesn't end in
/// `.dart`.
///
/// This can be null; if so, it indicates that the transformer(s) should be
/// loaded from `lib/transformer.dart` if that exists, and `lib/$package.dart`
/// otherwise.
final String path;
/// The source span from which this id was parsed.
final SourceSpan span;
/// Whether this ID points to a built-in transformer exposed by pub.
bool get isBuiltInTransformer => package.startsWith('\$');
/// Parses a transformer identifier.
///
/// A transformer identifier is a string of the form "package_name" or
/// "package_name/path/to/library". It does not have a trailing extension. If
/// it just has a package name, it expands to lib/transformer.dart if that
/// exists, or lib/${package}.dart otherwise. Otherwise, it expands to
/// lib/${path}.dart. In either case it's located in the given package.
factory TransformerId.parse(String identifier, SourceSpan span) {
if (identifier.isEmpty) {
throw new FormatException('Invalid library identifier: "".');
}
var parts = split1(identifier, "/");
if (parts.length == 1) {
return new TransformerId(parts.single, null, span);
}
return new TransformerId(parts.first, parts.last, span);
}
TransformerId(this.package, this.path, this.span) {
if (!package.startsWith('\$')) return;
if (_BUILT_IN_TRANSFORMERS.contains(package)) return;
throw new SourceSpanFormatException(
'Unsupported built-in transformer $package.', span);
}
bool operator==(other) =>
other is TransformerId && other.package == package && other.path == path;
int get hashCode => package.hashCode ^ path.hashCode;
/// Returns a serialized form of [this] that can be passed to
/// [new TransformerId.parse].
String serialize() => path == null ? package : '$package/$path';
String toString() => serialize();
/// Returns the asset id for the library identified by this transformer id.
///
/// If `path` is null, this will determine which library to load. Unlike
/// [getAssetId], this doesn't take generated assets into account; it's used
/// to determine transformers' dependencies, which requires looking at files
/// on disk.
Future<AssetId> getAssetId(Barback barback) {
if (path != null) {
return new Future.value(new AssetId(package, 'lib/$path.dart'));
}
var transformerAsset = new AssetId(package, 'lib/transformer.dart');
return barback.getAssetById(transformerAsset).then((_) => transformerAsset)
.catchError((e) => new AssetId(package, 'lib/$package.dart'),
test: (e) => e is AssetNotFoundException);
}
}

View file

@ -1,144 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.transformer_isolate;
import 'dart:async';
import 'dart:convert';
import 'dart:isolate';
import 'package:barback/barback.dart';
import 'package:source_span/source_span.dart';
import 'package:stack_trace/stack_trace.dart';
import '../../../asset/dart/serialize.dart';
import '../barback.dart';
import '../exceptions.dart';
import '../dart.dart' as dart;
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
import 'foreign_transformer.dart';
import 'transformer_config.dart';
import 'transformer_id.dart';
/// A wrapper for an isolate from which transformer plugins can be instantiated.
class TransformerIsolate {
/// The port used to communicate with the wrapped isolate.
final SendPort _port;
/// A map indicating the barback server URLs for each [TransformerId] that's
/// loaded in the wrapped isolate.
///
/// A barback server URL is the URL for the library that the given id
/// identifies. For example, the URL for "polymer/src/mirrors_remover" might
/// be "http://localhost:56234/packages/polymer/src/mirrors_remover.dart".
final Map<TransformerId, Uri> _idsToUrls;
/// The barback mode for this run of pub.
final BarbackMode _mode;
/// Spawns an isolate that loads all transformer libraries defined by [ids].
///
/// This doesn't actually instantiate any transformers, since a
/// [TransformerId] doesn't define the transformers' configuration. The
/// transformers can be constructed using [create].
///
/// If [snapshot] is passed, the isolate will be loaded from that path if it
/// exists. Otherwise, a snapshot of the isolate's code will be saved to that
/// path once the isolate is loaded.
static Future<TransformerIsolate> spawn(AssetEnvironment environment,
BarbackServer transformerServer, List<TransformerId> ids,
{String snapshot}) {
return mapFromIterableAsync(ids, value: (id) {
return id.getAssetId(environment.barback);
}).then((idsToAssetIds) {
var baseUrl = transformerServer.url;
var idsToUrls = mapMap(idsToAssetIds, value: (id, assetId) {
var path = assetId.path.replaceFirst('lib/', '');
return Uri.parse('package:${id.package}/$path');
});
var code = new StringBuffer();
code.writeln("import 'dart:isolate';");
for (var url in idsToUrls.values) {
code.writeln("import '$url';");
}
code.writeln("import r'package:\$pub/transformer_isolate.dart';");
code.writeln(
"void main(_, SendPort replyTo) => loadTransformers(replyTo);");
log.fine("Loading transformers from $ids");
var port = new ReceivePort();
return dart.runInIsolate(code.toString(), port.sendPort,
packageRoot: baseUrl.resolve('packages'),
snapshot: snapshot)
.then((_) => port.first)
.then((sendPort) {
return new TransformerIsolate._(sendPort, environment.mode, idsToUrls);
}).catchError((error, stackTrace) {
if (error is! CrossIsolateException) throw error;
if (error.type != 'IsolateSpawnException') throw error;
// TODO(nweiz): don't parse this as a string once issues 12617 and 12689
// are fixed.
var firstErrorLine = error.message.split('\n')[1];
// The isolate error message contains the fully expanded path, not the
// "package:" URI, so we have to be liberal in what we look for in the
// error message.
var missingTransformer = idsToUrls.keys.firstWhere((id) =>
firstErrorLine.startsWith(
"Load Error for") &&
firstErrorLine.contains(idsToUrls[id].path),
orElse: () => throw error);
var packageUri = idToPackageUri(idsToAssetIds[missingTransformer]);
// If there was an IsolateSpawnException and the import that actually
// failed was the one we were loading transformers from, throw an
// application exception with a more user-friendly message.
fail('Transformer library "$packageUri" not found.',
error, stackTrace);
});
});
}
TransformerIsolate._(this._port, this._mode, this._idsToUrls);
/// Instantiate the transformers in the [config.id] with
/// [config.configuration].
///
/// If there are no transformers defined in the given library, this will
/// return an empty set.
Future<Set<Transformer>> create(TransformerConfig config) {
return call(_port, {
'library': _idsToUrls[config.id].toString(),
'mode': _mode.name,
'configuration': JSON.encode(config.configuration)
}).then((transformers) {
transformers = transformers.map(
(transformer) => deserializeTransformerLike(transformer, config))
.toSet();
log.fine("Transformers from $config: $transformers");
return transformers;
}).catchError((error, stackTrace) {
throw new TransformerLoadError(error, config.span);
});
}
}
/// An error thrown when a transformer fails to load.
class TransformerLoadError extends SourceSpanException
implements WrappedException {
final CrossIsolateException innerError;
Chain get innerChain => innerError.stackTrace;
TransformerLoadError(CrossIsolateException error, SourceSpan span)
: innerError = error,
super("Error loading transformer: ${error.message}", span);
}

View file

@ -1,127 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.transformer_loader;
import 'dart:async';
import 'package:barback/barback.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
import 'barback_server.dart';
import 'dart2js_transformer.dart';
import 'excluding_transformer.dart';
import 'transformer_config.dart';
import 'transformer_id.dart';
import 'transformer_isolate.dart';
/// A class that loads transformers defined in specific files.
class TransformerLoader {
final AssetEnvironment _environment;
final BarbackServer _transformerServer;
final _isolates = new Map<TransformerId, TransformerIsolate>();
final _transformers = new Map<TransformerConfig, Set<Transformer>>();
/// The packages that use each transformer id.
///
/// Used for error reporting.
final _transformerUsers = new Map<TransformerId, Set<String>>();
TransformerLoader(this._environment, this._transformerServer) {
for (var package in _environment.graph.packages.values) {
for (var config in unionAll(package.pubspec.transformers)) {
_transformerUsers.putIfAbsent(config.id, () => new Set<String>())
.add(package.name);
}
}
}
/// Loads a transformer plugin isolate that imports the transformer libraries
/// indicated by [ids].
///
/// Once the returned future completes, transformer instances from this
/// isolate can be created using [transformersFor] or [transformersForPhase].
///
/// This skips any ids that have already been loaded.
Future load(Iterable<TransformerId> ids, {String snapshot}) async {
ids = ids.where((id) => !_isolates.containsKey(id)).toList();
if (ids.isEmpty) return;
var isolate = await log.progress("Loading ${toSentence(ids)} transformers",
() => TransformerIsolate.spawn(_environment, _transformerServer, ids,
snapshot: snapshot));
for (var id in ids) {
_isolates[id] = isolate;
}
}
/// Instantiates and returns all transformers in the library indicated by
/// [config] with the given configuration.
///
/// If this is called before the library has been loaded into an isolate via
/// [load], it will return an empty set.
Future<Set<Transformer>> transformersFor(TransformerConfig config) async {
if (_transformers.containsKey(config)) return _transformers[config];
if (_isolates.containsKey(config.id)) {
var transformers = await _isolates[config.id].create(config);
if (transformers.isNotEmpty) {
_transformers[config] = transformers;
return transformers;
}
var message = "No transformers";
if (config.configuration.isNotEmpty) {
message += " that accept configuration";
}
var location;
if (config.id.path == null) {
location = 'package:${config.id.package}/transformer.dart or '
'package:${config.id.package}/${config.id.package}.dart';
} else {
location = 'package:$config.dart';
}
var users = toSentence(ordered(_transformerUsers[config.id]));
fail("$message were defined in $location,\n"
"required by $users.");
} else if (config.id.package != '\$dart2js') {
return new Future.value(new Set());
}
var transformer;
try {
transformer = new Dart2JSTransformer.withSettings(_environment,
new BarbackSettings(config.configuration, _environment.mode));
} on FormatException catch (error, stackTrace) {
fail(error.message, error, stackTrace);
}
// Handle any exclusions.
_transformers[config] = new Set.from(
[ExcludingTransformer.wrap(transformer, config)]);
return _transformers[config];
}
/// Loads all transformers defined in each phase of [phases].
///
/// If any library hasn't yet been loaded via [load], it will be ignored.
Future<List<Set<Transformer>>> transformersForPhases(
Iterable<Set<TransformerConfig>> phases) async {
var result = await Future.wait(phases.map((phase) async {
var transformers = await waitAndPrintErrors(phase.map(transformersFor));
return unionAll(transformers);
}));
// Return a growable list so that callers can add phases.
return result.toList();
}
}

View file

@ -1,309 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.barback.web_socket_api;
import 'dart:async';
import 'dart:io';
import 'package:http_parser/http_parser.dart';
import 'package:path/path.dart' as path;
import 'package:json_rpc_2/json_rpc_2.dart' as json_rpc;
import '../exit_codes.dart' as exit_codes;
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'asset_environment.dart';
/// Implements the [WebSocket] API for communicating with a running pub serve
/// process, mainly for use by the Editor.
///
/// This is a [JSON-RPC 2.0](http://www.jsonrpc.org/specification) server. Its
/// methods are described in the method-level documentation below.
class WebSocketApi {
final AssetEnvironment _environment;
final json_rpc.Server _server;
/// Whether the application should exit when this connection closes.
bool _exitOnClose = false;
WebSocketApi(CompatibleWebSocket socket, this._environment)
: _server = new json_rpc.Server(socket) {
_server.registerMethod("urlToAssetId", _urlToAssetId);
_server.registerMethod("pathToUrls", _pathToUrls);
_server.registerMethod("serveDirectory", _serveDirectory);
_server.registerMethod("unserveDirectory", _unserveDirectory);
/// Tells the server to exit as soon as this WebSocket connection is closed.
///
/// This takes no arguments and returns no results. It can safely be called
/// as a JSON-RPC notification.
_server.registerMethod("exitOnClose", () {
_exitOnClose = true;
});
}
/// Listens on the socket.
///
/// Returns a future that completes when the socket has closed. It will
/// complete with an error if the socket had an error, otherwise it will
/// complete to `null`.
Future listen() {
return _server.listen().then((_) {
if (!_exitOnClose) return;
log.message("WebSocket connection closed, terminating.");
flushThenExit(exit_codes.SUCCESS);
});
}
/// Given a URL to an asset that is served by pub, returns the ID of the
/// asset that would be accessed by that URL.
///
/// The method name is "urlToAssetId" and it takes a "url" parameter for the
/// URL being mapped:
///
/// "params": {
/// "url": "http://localhost:8080/index.html"
/// }
///
/// If successful, it returns a map containing the asset ID's package and
/// path:
///
/// "result": {
/// "package": "myapp",
/// "path": "web/index.html"
/// }
///
/// The "path" key in the result is a URL path that's relative to the root
/// directory of the package identified by "package". The location of this
/// package may vary depending on which source it was installed from.
///
/// An optional "line" key may be provided whose value must be an integer. If
/// given, the result will also include a "line" key that maps the line in
/// the served final file back to the corresponding source line in the asset
/// that was used to generate that file.
///
/// Examples (where "myapp" is the root package and pub serve is being run
/// normally with "web" bound to port 8080 and "test" to 8081):
///
/// http://localhost:8080/index.html -> myapp|web/index.html
/// http://localhost:8081/sub/main.dart -> myapp|test/sub/main.dart
///
/// If the URL is not a domain being served by pub, this returns an error:
///
/// http://localhost:1234/index.html -> NOT_SERVED error
///
/// This does *not* currently support the implicit index.html behavior that
/// pub serve provides for user-friendliness:
///
/// http://localhost:1234 -> NOT_SERVED error
///
/// This does *not* currently check to ensure the asset actually exists. It
/// only maps what the corresponding asset *should* be for that URL.
Future<Map> _urlToAssetId(json_rpc.Parameters params) {
var url = params["url"].asUri;
// If a line number was given, map it to the output line.
var line = params["line"].asIntOr(null);
return _environment.getAssetIdForUrl(url).then((id) {
if (id == null) {
throw new json_rpc.RpcException(_Error.NOT_SERVED,
'"${url.host}:${url.port}" is not being served by pub.');
}
// TODO(rnystrom): When this is hooked up to actually talk to barback to
// see if assets exist, consider supporting implicit index.html at that
// point.
var result = {"package": id.package, "path": id.path};
// Map the line.
// TODO(rnystrom): Right now, source maps are not supported and it just
// passes through the original line. This lets the editor start using
// this API before we've fully implemented it. See #12339 and #16061.
if (line != null) result["line"] = line;
return result;
});
}
/// Given a path on the filesystem, returns the URLs served by pub that can be
/// used to access asset found at that path.
///
/// The method name is "pathToUrls" and it takes a "path" key (a native OS
/// path which may be absolute or relative to the root directory of the
/// entrypoint package) for the path being mapped:
///
/// "params": {
/// "path": "web/index.html"
/// }
///
/// If successful, it returns a map containing the list of URLs that can be
/// used to access that asset.
///
/// "result": {
/// "urls": ["http://localhost:8080/index.html"]
/// }
///
/// The "path" key may refer to a path in another package, either by referring
/// to its location within the top-level "packages" directory or by referring
/// to its location on disk. Only the "lib" directory is visible in other
/// packages:
///
/// "params": {
/// "path": "packages/http/http.dart"
/// }
///
/// Assets in the "lib" directory will usually have one URL for each server:
///
/// "result": {
/// "urls": [
/// "http://localhost:8080/packages/http/http.dart",
/// "http://localhost:8081/packages/http/http.dart"
/// ]
/// }
///
/// An optional "line" key may be provided whose value must be an integer. If
/// given, the result will also include a "line" key that maps the line in
/// the source file to the corresponding output line in the resulting asset
/// served at the URL.
///
/// Examples (where "myapp" is the root package and pub serve is being run
/// normally with "web" bound to port 8080 and "test" to 8081):
///
/// web/index.html -> http://localhost:8080/index.html
/// test/sub/main.dart -> http://localhost:8081/sub/main.dart
///
/// If the asset is not in a directory being served by pub, returns an error:
///
/// example/index.html -> NOT_SERVED error
Future<Map> _pathToUrls(json_rpc.Parameters params) {
var assetPath = params["path"].asString;
var line = params["line"].asIntOr(null);
return _environment.getUrlsForAssetPath(assetPath).then((urls) {
if (urls.isEmpty) {
throw new json_rpc.RpcException(_Error.NOT_SERVED,
'Asset path "$assetPath" is not currently being served.');
}
var result = {"urls": urls.map((url) => url.toString()).toList()};
// Map the line.
// TODO(rnystrom): Right now, source maps are not supported and it just
// passes through the original line. This lets the editor start using
// this API before we've fully implemented it. See #12339 and #16061.
if (line != null) result["line"] = line;
return result;
});
}
/// Given a relative directory path within the entrypoint package, binds a
/// new port to serve from that path and returns its URL.
///
/// The method name is "serveDirectory" and it takes a "path" key (a native
/// OS path relative to the root of the entrypoint package) for the directory
/// being served:
///
/// "params": {
/// "path": "example/awesome"
/// }
///
/// If successful, it returns a map containing the URL that can be used to
/// access the directory.
///
/// "result": {
/// "url": "http://localhost:8083"
/// }
///
/// If the directory is already being served, returns the previous URL.
Future<Map> _serveDirectory(json_rpc.Parameters params) {
var rootDirectory = _validateRelativePath(params, "path");
return _environment.serveDirectory(rootDirectory).then((server) {
return {
"url": server.url.toString()
};
}).catchError((error) {
if (error is! OverlappingSourceDirectoryException) throw error;
var dir = pluralize("directory", error.overlappingDirectories.length,
plural: "directories");
var overlapping = toSentence(error.overlappingDirectories.map(
(dir) => '"$dir"'));
print("data: ${error.overlappingDirectories}");
throw new json_rpc.RpcException(_Error.OVERLAPPING,
'Path "$rootDirectory" overlaps already served $dir $overlapping.',
data: {
"directories": error.overlappingDirectories
});
});
}
/// Given a relative directory path within the entrypoint package, unbinds
/// the server previously bound to that directory and returns its (now
/// unreachable) URL.
///
/// The method name is "unserveDirectory" and it takes a "path" key (a
/// native OS path relative to the root of the entrypoint package) for the
/// directory being unserved:
///
/// "params": {
/// "path": "example/awesome"
/// }
///
/// If successful, it returns a map containing the URL that used to be used
/// to access the directory.
///
/// "result": {
/// "url": "http://localhost:8083"
/// }
///
/// If no server is bound to that directory, it returns a `NOT_SERVED` error.
Future<Map> _unserveDirectory(json_rpc.Parameters params) {
var rootDirectory = _validateRelativePath(params, "path");
return _environment.unserveDirectory(rootDirectory).then((url) {
if (url == null) {
throw new json_rpc.RpcException(_Error.NOT_SERVED,
'Directory "$rootDirectory" is not bound to a server.');
}
return {"url": url.toString()};
});
}
/// Validates that [command] has a field named [key] whose value is a string
/// containing a relative path that doesn't reach out of the entrypoint
/// package's root directory.
///
/// Returns the path if found, or throws a [_WebSocketException] if
/// validation failed.
String _validateRelativePath(json_rpc.Parameters params, String key) {
var pathString = params[key].asString;
if (!path.isRelative(pathString)) {
throw new json_rpc.RpcException.invalidParams(
'"$key" must be a relative path. Got "$pathString".');
}
if (!path.isWithin(".", pathString)) {
throw new json_rpc.RpcException.invalidParams(
'"$key" cannot reach out of its containing directory. '
'Got "$pathString".');
}
return pathString;
}
}
/// The pub-specific JSON RPC error codes.
class _Error {
/// The specified directory is not being served.
static const NOT_SERVED = 1;
/// The specified directory overlaps one or more ones already being served.
static const OVERLAPPING = 2;
}

View file

@ -1,86 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.cached_package;
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
import 'package:yaml/yaml.dart';
import 'barback/transformer_config.dart';
import 'io.dart';
import 'package.dart';
import 'pubspec.dart';
/// A [Package] whose `lib` directory has been precompiled and cached.
///
/// When users of this class request path information about files that are
/// cached, this returns the cached information. It also wraps the package's
/// pubspec to report no transformers, since the transformations have all been
/// applied already.
class CachedPackage extends Package {
/// The directory contianing the cached assets from this package.
///
/// Although only `lib` is cached, this directory corresponds to the root of
/// the package. The actual cached assets exist in `$_cacheDir/lib`.
final String _cacheDir;
/// Creates a new cached package wrapping [inner] with the cache at
/// [_cacheDir].
CachedPackage(Package inner, this._cacheDir)
: super(new _CachedPubspec(inner.pubspec), inner.dir);
String path(String part1, [String part2, String part3, String part4,
String part5, String part6, String part7]) {
if (_pathInCache(part1)) {
return p.join(_cacheDir, part1, part2, part3, part4, part5, part6, part7);
} else {
return super.path(part1, part2, part3, part4, part5, part6, part7);
}
}
String relative(String path) {
if (p.isWithin(path, _cacheDir)) return p.relative(path, from: _cacheDir);
return super.relative(path);
}
/// This will include the cached, transformed versions of files if [beneath]
/// is within a cached directory, but not otherwise.
List<String> listFiles({String beneath, recursive: true,
bool useGitIgnore: false}) {
if (beneath == null) {
return super.listFiles(recursive: recursive, useGitIgnore: useGitIgnore);
}
if (_pathInCache(beneath)) return listDir(p.join(_cacheDir, beneath));
return super.listFiles(beneath: beneath, recursive: recursive,
useGitIgnore: useGitIgnore);
}
/// Returns whether [relativePath], a path relative to the package's root,
/// is in a cached directory.
bool _pathInCache(String relativePath) => p.isWithin('lib', relativePath);
}
/// A pubspec wrapper that reports no transformers.
class _CachedPubspec implements Pubspec {
final Pubspec _inner;
YamlMap get fields => _inner.fields;
String get name => _inner.name;
Version get version => _inner.version;
List<PackageDep> get dependencies => _inner.dependencies;
List<PackageDep> get devDependencies => _inner.devDependencies;
List<PackageDep> get dependencyOverrides => _inner.dependencyOverrides;
PubspecEnvironment get environment => _inner.environment;
String get publishTo => _inner.publishTo;
Map<String, String> get executables => _inner.executables;
bool get isPrivate => _inner.isPrivate;
bool get isEmpty => _inner.isEmpty;
List<PubspecException> get allErrors => _inner.allErrors;
List<Set<TransformerConfig>> get transformers => const [];
_CachedPubspec(this._inner);
}

View file

@ -1,94 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command;
import 'package:args/args.dart';
import 'package:args/command_runner.dart';
import 'package:path/path.dart' as path;
import 'entrypoint.dart';
import 'log.dart' as log;
import 'global_packages.dart';
import 'system_cache.dart';
/// The base class for commands for the pub executable.
///
/// A command may either be a "leaf" command or it may be a parent for a set
/// of subcommands. Only leaf commands are ever actually invoked. If a command
/// has subcommands, then one of those must always be chosen.
abstract class PubCommand extends Command {
SystemCache get cache {
if (_cache == null) {
_cache = new SystemCache.withSources(isOffline: isOffline);
}
return _cache;
}
SystemCache _cache;
GlobalPackages get globals {
if (_globals == null) {
_globals = new GlobalPackages(cache);
}
return _globals;
}
GlobalPackages _globals;
/// Gets the [Entrypoint] package for the current working directory.
///
/// This will load the pubspec and fail with an error if the current directory
/// is not a package.
Entrypoint get entrypoint {
// Lazy load it.
if (_entrypoint == null) {
_entrypoint = new Entrypoint(path.current, cache,
packageSymlinks: globalResults['package-symlinks']);
}
return _entrypoint;
}
Entrypoint _entrypoint;
/// The URL for web documentation for this command.
String get docUrl => null;
/// Override this and return `false` to disallow trailing options from being
/// parsed after a non-option argument is parsed.
bool get allowTrailingOptions => true;
ArgParser get argParser {
// Lazily initialize the parser because the superclass constructor requires
// it but we want to initialize it based on [allowTrailingOptions].
if (_argParser == null) {
_argParser = new ArgParser(allowTrailingOptions: allowTrailingOptions);
}
return _argParser;
}
ArgParser _argParser;
/// Override this to use offline-only sources instead of hitting the network.
///
/// This will only be called before the [SystemCache] is created. After that,
/// it has no effect. This only needs to be set in leaf commands.
bool get isOffline => false;
String get usageFooter {
if (docUrl == null) return null;
return "See $docUrl for detailed documentation.";
}
void printUsage() {
log.message(usage);
}
/// Parses a user-supplied integer [intString] named [name].
///
/// If the parsing fails, prints a usage message and exits.
int parseInt(String intString, String name) {
try {
return int.parse(intString);
} on FormatException catch (_) {
usageException('Could not parse $name "$intString".');
}
}
}

View file

@ -1,198 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.barback;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import '../command.dart';
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
final _arrow = getSpecial('\u2192', '=>');
/// The set of top level directories in the entrypoint package that are built
/// when the user does "--all".
final _allSourceDirectories = new Set<String>.from([
"benchmark", "bin", "example", "test", "web"
]);
/// Shared base class for [BuildCommand] and [ServeCommand].
abstract class BarbackCommand extends PubCommand {
/// The build mode.
BarbackMode get mode => new BarbackMode(argResults["mode"]);
/// The directories in the entrypoint package that should be added to the
/// build environment.
final sourceDirectories = new Set<String>();
/// The default build mode.
BarbackMode get defaultMode => BarbackMode.RELEASE;
/// Override this to specify the default source directories if none are
/// provided on the command line.
List<String> get defaultSourceDirectories;
BarbackCommand() {
argParser.addOption("mode", defaultsTo: defaultMode.toString(),
help: "Mode to run transformers in.");
argParser.addFlag("all",
help: "Use all default source directories.",
defaultsTo: false, negatable: false);
}
Future run() {
// Switch to JSON output if specified. We need to do this before parsing
// the source directories so an error will be correctly reported in JSON
// format.
log.json.enabled = argResults.options.contains("format") &&
argResults["format"] == "json";
_parseSourceDirectories();
return onRunTransformerCommand();
}
/// Override this to run the actual command.
Future onRunTransformerCommand();
/// Parses the command-line arguments to determine the set of source
/// directories to add to the build environment.
///
/// If there are no arguments, this will just be [defaultSourceDirectories].
///
/// If the `--all` flag is set, then it will be all default directories
/// that exist.
///
/// Otherwise, all arguments should be the paths of directories to include.
///
/// Throws an exception if the arguments are invalid.
void _parseSourceDirectories() {
if (argResults["all"]) {
_addAllDefaultSources();
return;
}
// If no directories were specified, use the defaults.
if (argResults.rest.isEmpty) {
_addDefaultSources();
return;
}
sourceDirectories.addAll(argResults.rest);
// Prohibit "lib".
var disallowed = sourceDirectories.where((dir) {
var parts = path.split(path.normalize(dir));
return parts.isNotEmpty && parts.first == "lib";
});
if (disallowed.isNotEmpty) {
usageException(_directorySentence(disallowed, "is", "are", "not allowed"));
}
// Make sure the source directories don't reach out of the package.
var invalid = sourceDirectories.where((dir) => !path.isWithin('.', dir));
if (invalid.isNotEmpty) {
usageException(_directorySentence(invalid, "isn't", "aren't",
"in this package"));
}
// Make sure all of the source directories exist.
var missing = sourceDirectories.where(
(dir) => !dirExists(entrypoint.root.path(dir)));
if (missing.isNotEmpty) {
dataError(_directorySentence(missing, "does", "do", "not exist"));
}
// Make sure the directories don't overlap.
var sources = sourceDirectories.toList();
var overlapping = new Set();
for (var i = 0; i < sources.length; i++) {
for (var j = i + 1; j < sources.length; j++) {
if (path.isWithin(sources[i], sources[j]) ||
path.isWithin(sources[j], sources[i])) {
overlapping.add(sources[i]);
overlapping.add(sources[j]);
}
}
}
if (overlapping.isNotEmpty) {
usageException(_directorySentence(overlapping, "cannot", "cannot",
"overlap"));
}
}
/// Handles "--all" by adding all default source directories that are
/// present.
void _addAllDefaultSources() {
if (argResults.rest.isNotEmpty) {
usageException(
'Directory names are not allowed if "--all" is passed.');
}
// Include every build directory that exists in the package.
var dirs = _allSourceDirectories.where(
(dir) => dirExists(entrypoint.root.path(dir)));
if (dirs.isEmpty) {
var defaultDirs = toSentence(_allSourceDirectories.map(
(name) => '"$name"'));
dataError('There are no source directories present.\n'
'The default directories are $defaultDirs.');
}
sourceDirectories.addAll(dirs);
}
/// Adds the default sources that should be used if no directories are passed
/// on the command line.
void _addDefaultSources() {
sourceDirectories.addAll(defaultSourceDirectories.where(
(dir) => dirExists(entrypoint.root.path(dir))));
// TODO(rnystrom): Hackish. Assumes there will only be one or two
// default sources. That's true for pub build and serve, but isn't as
// general as it could be.
if (sourceDirectories.isEmpty) {
var defaults;
if (defaultSourceDirectories.length == 1) {
defaults = 'a "${defaultSourceDirectories.first}" directory';
} else {
defaults = '"${defaultSourceDirectories[0]}" and/or '
'"${defaultSourceDirectories[1]}" directories';
}
dataError("Your package must have $defaults,\n"
"or you must specify the source directories.");
}
}
/// Converts a list of [directoryNames] to a sentence.
///
/// After the list of directories, [singularVerb] will be used if there is
/// only one directory and [pluralVerb] will be used if there are more than
/// one. Then [suffix] is added to the end of the sentence, and, finally, a
/// period is added.
String _directorySentence(Iterable<String> directoryNames,
String singularVerb, String pluralVerb, String suffix) {
var directories = pluralize('Directory', directoryNames.length,
plural: 'Directories');
var names = toSentence(directoryNames.map((dir) => '"$dir"'));
var verb = pluralize(singularVerb, directoryNames.length,
plural: pluralVerb);
var result = "$directories $names $verb";
if (suffix != null) result += " $suffix";
result += ".";
return result;
}
}

View file

@ -1,268 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.build;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as path;
import '../barback/asset_environment.dart';
import '../exit_codes.dart' as exit_codes;
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'barback.dart';
final _arrow = getSpecial('\u2192', '=>');
/// Handles the `build` pub command.
class BuildCommand extends BarbackCommand {
String get name => "build";
String get description => "Apply transformers to build a package.";
String get invocation => "pub build [options] [directories...]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-build.html";
List<String> get aliases => const ["deploy", "settle-up"];
/// The path to the application's build output directory.
String get outputDirectory => argResults["output"];
List<String> get defaultSourceDirectories => ["web"];
/// The number of files that have been built and written to disc so far.
int builtFiles = 0;
BuildCommand() {
argParser.addOption("define", abbr: "D",
help: "Defines an environment constant for dart2js.",
allowMultiple: true, splitCommas: false);
argParser.addOption("format",
help: "How output should be displayed.",
allowed: ["text", "json"], defaultsTo: "text");
argParser.addOption("output", abbr: "o",
help: "Directory to write build outputs to.",
defaultsTo: "build");
}
Future onRunTransformerCommand() async {
cleanDir(outputDirectory);
var errorsJson = [];
var logJson = [];
var environmentConstants = new Map.fromIterable(argResults["define"],
key: (pair) => pair.split("=").first,
value: (pair) => pair.split("=").last);
// Since this server will only be hit by the transformer loader and isn't
// user-facing, just use an IPv4 address to avoid a weird bug on the
// OS X buildbots.
return AssetEnvironment.create(entrypoint, mode,
environmentConstants: environmentConstants,
useDart2JS: true)
.then((environment) {
// Show in-progress errors, but not results. Those get handled
// implicitly by getAllAssets().
environment.barback.errors.listen((error) {
log.error(log.red("Build error:\n$error"));
if (log.json.enabled) {
// Wrap the error in a map in case we end up decorating it with
// more properties later.
errorsJson.add({
"error": error.toString()
});
}
});
// If we're using JSON output, the regular server logging is disabled.
// Instead, we collect it here to include in the final JSON result.
if (log.json.enabled) {
environment.barback.log.listen(
(entry) => logJson.add(_logEntryToJson(entry)));
}
return log.progress("Building ${entrypoint.root.name}", () {
// Register all of the build directories.
// TODO(rnystrom): We don't actually need to bind servers for these, we
// just need to add them to barback's sources. Add support to
// BuildEnvironment for going the latter without the former.
return Future.wait(sourceDirectories.map(
(dir) => environment.serveDirectory(dir))).then((_) {
return environment.barback.getAllAssets();
});
}).then((assets) {
// Find all of the JS entrypoints we built.
var dart2JSEntrypoints = assets
.where((asset) => asset.id.path.endsWith(".dart.js"))
.map((asset) => asset.id);
return Future.wait(assets.map(_writeAsset)).then((_) {
return _copyBrowserJsFiles(dart2JSEntrypoints, assets);
}).then((_) {
log.message('Built $builtFiles ${pluralize('file', builtFiles)} '
'to "$outputDirectory".');
log.json.message({
"buildResult": "success",
"outputDirectory": outputDirectory,
"numFiles": builtFiles,
"log": logJson
});
});
});
}).catchError((error) {
// If [getAllAssets()] throws a BarbackException, the error has already
// been reported.
if (error is! BarbackException) throw error;
log.error(log.red("Build failed."));
log.json.message({
"buildResult": "failure",
"errors": errorsJson,
"log": logJson
});
return flushThenExit(exit_codes.DATA);
});
}
/// Writes [asset] to the appropriate build directory.
///
/// If [asset] is in the special "packages" directory, writes it to every
/// build directory.
Future _writeAsset(Asset asset) async {
// In release mode, strip out .dart files since all relevant ones have been
// compiled to JavaScript already.
if (mode == BarbackMode.RELEASE && asset.id.extension == ".dart") {
return null;
}
var destPath = _idToPath(asset.id);
// If the asset is from a public directory, copy it into all of the
// top-level build directories.
if (path.isWithin("packages", destPath)) {
return Future.wait(sourceDirectories.map((buildDir) =>
_writeOutputFile(asset, path.join(buildDir, destPath))));
}
return _writeOutputFile(asset, destPath);
}
/// Converts [id] to a relative path in the output directory for that asset.
///
/// This corresponds to the URL that could be used to request that asset from
/// pub serve.
///
/// Examples (where entrypoint is "myapp"):
///
/// myapp|web/index.html -> web/index.html
/// myapp|lib/lib.dart -> packages/myapp/lib.dart
/// foo|lib/foo.dart -> packages/foo/foo.dart
/// myapp|test/main.dart -> test/main.dart
/// foo|test/main.dart -> ERROR
///
/// Throws a [FormatException] if [id] is not a valid public asset.
String _idToPath(AssetId id) {
var parts = path.split(path.fromUri(id.path));
if (parts.length < 2) {
throw new FormatException(
"Can not build assets from top-level directory.");
}
// Map "lib" to the "packages" directory.
if (parts[0] == "lib") {
return path.join("packages", id.package, path.joinAll(parts.skip(1)));
}
// Shouldn't be trying to access non-public directories of other packages.
assert(id.package == entrypoint.root.name);
// Allow any path in the entrypoint package.
return path.joinAll(parts);
}
/// Writes the contents of [asset] to [relativePath] within the build
/// directory.
Future _writeOutputFile(Asset asset, String relativePath) {
builtFiles++;
var destPath = path.join(outputDirectory, relativePath);
ensureDir(path.dirname(destPath));
return createFileFromStream(asset.read(), destPath);
}
/// If this package depends directly on the `browser` package, this ensures
/// that the JavaScript bootstrap files are copied into `packages/browser/`
/// directories next to each entrypoint in [entrypoints].
Future _copyBrowserJsFiles(Iterable<AssetId> entrypoints, AssetSet assets) {
// Must depend on the browser package.
if (!entrypoint.root.immediateDependencies.any(
(dep) => dep.name == 'browser' && dep.source == 'hosted')) {
return new Future.value();
}
// Get all of the subdirectories that contain Dart entrypoints.
var entrypointDirs = entrypoints
// Convert the asset path to a native-separated one and get the
// directory containing the entrypoint.
.map((id) => path.dirname(path.fromUri(id.path)))
// Don't copy files to the top levels of the build directories since
// the normal lib asset copying will take care of that.
.where((dir) => path.split(dir).length > 1)
.toSet();
var jsAssets = assets.where((asset) =>
asset.id.package == 'browser' && asset.id.extension == '.js');
return Future.wait(entrypointDirs.expand((dir) {
// TODO(nweiz): we should put browser JS files next to any HTML file
// rather than any entrypoint. An HTML file could import an entrypoint
// that's not adjacent.
return jsAssets.map((asset) {
var jsPath = path.join(dir, _idToPath(asset.id));
return _writeOutputFile(asset, jsPath);
});
}));
}
/// Converts [entry] to a JSON object for use with JSON-formatted output.
Map _logEntryToJson(LogEntry entry) {
var data = {
"level": entry.level.name,
"transformer": {
"name": entry.transform.transformer.toString(),
"primaryInput": {
"package": entry.transform.primaryId.package,
"path": entry.transform.primaryId.path
},
},
"assetId": {
"package": entry.assetId.package,
"path": entry.assetId.path
},
"message": entry.message
};
if (entry.span != null) {
data["span"] = {
"url": entry.span.sourceUrl,
"start": {
"line": entry.span.start.line,
"column": entry.span.start.column
},
"end": {
"line": entry.span.end.line,
"column": entry.span.end.column
},
};
}
return data;
}
}

View file

@ -1,24 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache;
import '../command.dart';
import 'cache_add.dart';
import 'cache_list.dart';
import 'cache_repair.dart';
/// Handles the `cache` pub command.
class CacheCommand extends PubCommand {
String get name => "cache";
String get description => "Work with the system cache.";
String get invocation => "pub cache <subcommand>";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-cache.html";
CacheCommand() {
addSubcommand(new CacheAddCommand());
addSubcommand(new CacheListCommand());
addSubcommand(new CacheRepairCommand());
}
}

View file

@ -1,94 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache_add;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../command.dart';
import '../log.dart' as log;
import '../package.dart';
import '../utils.dart';
/// Handles the `cache add` pub command.
class CacheAddCommand extends PubCommand {
String get name => "add";
String get description => "Install a package.";
String get invocation =>
"pub cache add <package> [--version <constraint>] [--all]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-cache.html";
CacheAddCommand() {
argParser.addFlag("all",
help: "Install all matching versions.",
negatable: false);
argParser.addOption("version", abbr: "v",
help: "Version constraint.");
}
Future run() async {
// Make sure there is a package.
if (argResults.rest.isEmpty) {
usageException("No package to add given.");
}
// Don't allow extra arguments.
if (argResults.rest.length > 1) {
var unexpected = argResults.rest.skip(1).map((arg) => '"$arg"');
var arguments = pluralize("argument", unexpected.length);
usageException("Unexpected $arguments ${toSentence(unexpected)}.");
}
var package = argResults.rest.single;
// Parse the version constraint, if there is one.
var constraint = VersionConstraint.any;
if (argResults["version"] != null) {
try {
constraint = new VersionConstraint.parse(argResults["version"]);
} on FormatException catch (error) {
usageException(error.message);
}
}
// TODO(rnystrom): Support installing from git too.
var source = cache.sources["hosted"];
// TODO(rnystrom): Allow specifying the server.
var pubspecs = await source.getVersions(package, package);
var versions = pubspecs.map((pubspec) => pubspec.version)
.where(constraint.allows).toList();
if (versions.isEmpty) {
// TODO(rnystrom): Show most recent unmatching version?
fail("Package $package has no versions that match $constraint.");
}
downloadVersion(version) async {
var id = new PackageId(package, source.name, version, package);
if (await cache.contains(id)) {
// TODO(rnystrom): Include source and description if not hosted.
// See solve_report.dart for code to harvest.
log.message("Already cached ${id.name} ${id.version}.");
return null;
}
// Download it.
await source.downloadToSystemCache(id);
}
if (argResults["all"]) {
// Install them in ascending order.
versions.sort();
await Future.forEach(versions, downloadVersion);
} else {
// Pick the best matching version.
versions.sort(Version.prioritize);
await downloadVersion(versions.last);
}
}
}

View file

@ -1,35 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache_list;
import 'dart:convert';
import '../command.dart';
import '../log.dart' as log;
import '../source/cached.dart';
/// Handles the `cache list` pub command.
class CacheListCommand extends PubCommand {
String get name => "list";
String get description => "List packages in the system cache.";
String get invocation => "pub cache list";
bool get hidden => true;
bool get takesArguments => false;
void run() {
// TODO(keertip): Add flag to list packages from non default sources.
var packagesObj = <String, Map>{};
var source = cache.sources.defaultSource as CachedSource;
for (var package in source.getCachedPackages()) {
var packageInfo = packagesObj.putIfAbsent(package.name, () => {});
packageInfo[package.version.toString()] = {'location': package.dir};
}
// TODO(keertip): Add support for non-JSON format and check for --format
// flag.
log.message(JSON.encode({'packages': packagesObj}));
}
}

View file

@ -1,64 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.cache_repair;
import 'dart:async';
import '../command.dart';
import '../exit_codes.dart' as exit_codes;
import '../io.dart';
import '../log.dart' as log;
import '../source/cached.dart';
import '../utils.dart';
/// Handles the `cache repair` pub command.
class CacheRepairCommand extends PubCommand {
String get name => "repair";
String get description => "Reinstall cached packages.";
String get invocation => "pub cache repair";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-cache.html";
bool get takesArguments => false;
Future run() async {
var successes = 0;
var failures = 0;
// Repair every cached source.
for (var source in cache.sources) {
if (source is! CachedSource) continue;
var results = await source.repairCachedPackages();
successes += results.first;
failures += results.last;
}
if (successes > 0) {
var packages = pluralize("package", successes);
log.message("Reinstalled ${log.green(successes)} $packages.");
}
if (failures > 0) {
var packages = pluralize("package", failures);
log.message("Failed to reinstall ${log.red(failures)} $packages.");
}
var results = await globals.repairActivatedPackages();
if (results.first > 0) {
var packages = pluralize("package", results.first);
log.message("Reactivated ${log.green(results.first)} $packages.");
}
if (results.last > 0) {
var packages = pluralize("package", results.last);
log.message("Failed to reactivate ${log.red(results.last)} $packages.");
}
if (successes == 0 && failures == 0) {
log.message("No packages in cache, so nothing to repair.");
}
if (failures > 0) await flushThenExit(exit_codes.UNAVAILABLE);
}
}

View file

@ -1,190 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.list;
import 'dart:async';
import 'dart:collection';
import '../ascii_tree.dart' as tree;
import '../command.dart';
import '../log.dart' as log;
import '../package.dart';
import '../package_graph.dart';
import '../utils.dart';
/// Handles the `deps` pub command.
class DepsCommand extends PubCommand {
String get name => "deps";
String get description => "Print package dependencies.";
List<String> get aliases => const ["dependencies", "tab"];
String get invocation => "pub deps";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-deps.html";
bool get takesArguments => false;
/// The loaded package graph.
PackageGraph _graph;
/// The [StringBuffer] used to accumulate the output.
StringBuffer _buffer;
DepsCommand() {
argParser.addOption("style", abbr: "s",
help: "How output should be displayed.",
allowed: ["compact", "tree", "list"],
defaultsTo: "tree");
}
Future run() {
return entrypoint.loadPackageGraph().then((graph) {
_graph = graph;
_buffer = new StringBuffer();
_buffer.writeln(_labelPackage(entrypoint.root));
switch (argResults["style"]) {
case "compact": _outputCompact(); break;
case "list": _outputList(); break;
case "tree": _outputTree(); break;
}
log.message(_buffer);
});
}
/// Outputs a list of all of the package's immediate, dev, override, and
/// transitive dependencies.
///
/// For each dependency listed, *that* package's immediate dependencies are
/// shown. Unlike [_outputList], this prints all of these dependencies on one
/// line.
void _outputCompact() {
var root = entrypoint.root;
_outputCompactPackages("dependencies",
root.dependencies.map((dep) => dep.name));
_outputCompactPackages("dev dependencies",
root.devDependencies.map((dep) => dep.name));
_outputCompactPackages("dependency overrides",
root.dependencyOverrides.map((dep) => dep.name));
var transitive = _getTransitiveDependencies();
_outputCompactPackages("transitive dependencies", transitive);
}
/// Outputs one section of packages in the compact output.
_outputCompactPackages(String section, Iterable<String> names) {
if (names.isEmpty) return;
_buffer.writeln();
_buffer.writeln("$section:");
for (var name in ordered(names)) {
var package = _graph.packages[name];
_buffer.write("- ${_labelPackage(package)}");
if (package.dependencies.isEmpty) {
_buffer.writeln();
} else {
var depNames = package.dependencies.map((dep) => dep.name);
var depsList = "[${depNames.join(' ')}]";
_buffer.writeln(" ${log.gray(depsList)}");
}
}
}
/// Outputs a list of all of the package's immediate, dev, override, and
/// transitive dependencies.
///
/// For each dependency listed, *that* package's immediate dependencies are
/// shown.
void _outputList() {
var root = entrypoint.root;
_outputListSection("dependencies",
root.dependencies.map((dep) => dep.name));
_outputListSection("dev dependencies",
root.devDependencies.map((dep) => dep.name));
_outputListSection("dependency overrides",
root.dependencyOverrides.map((dep) => dep.name));
var transitive = _getTransitiveDependencies();
if (transitive.isEmpty) return;
_outputListSection("transitive dependencies", ordered(transitive));
}
/// Outputs one section of packages in the list output.
_outputListSection(String name, Iterable<String> deps) {
if (deps.isEmpty) return;
_buffer.writeln();
_buffer.writeln("$name:");
for (var name in deps) {
var package = _graph.packages[name];
_buffer.writeln("- ${_labelPackage(package)}");
for (var dep in package.dependencies) {
_buffer.writeln(
" - ${log.bold(dep.name)} ${log.gray(dep.constraint)}");
}
}
}
/// Generates a dependency tree for the root package.
///
/// If a package is encountered more than once (i.e. a shared or circular
/// dependency), later ones are not traversed. This is done in breadth-first
/// fashion so that a package will always be expanded at the shallowest
/// depth that it appears at.
void _outputTree() {
// The work list for the breadth-first traversal. It contains the package
// being added to the tree, and the parent map that will receive that
// package.
var toWalk = new Queue<Pair<Package, Map>>();
var visited = new Set<String>();
// Start with the root dependencies.
var packageTree = {};
for (var dep in entrypoint.root.immediateDependencies) {
toWalk.add(new Pair(_graph.packages[dep.name], packageTree));
}
// Do a breadth-first walk to the dependency graph.
while (toWalk.isNotEmpty) {
var pair = toWalk.removeFirst();
var package = pair.first;
var map = pair.last;
if (visited.contains(package.name)) {
map[log.gray('${package.name}...')] = {};
continue;
}
visited.add(package.name);
// Populate the map with this package's dependencies.
var childMap = {};
map[_labelPackage(package)] = childMap;
for (var dep in package.dependencies) {
toWalk.add(new Pair(_graph.packages[dep.name], childMap));
}
}
_buffer.write(tree.fromMap(packageTree, showAllChildren: true));
}
String _labelPackage(Package package) =>
"${log.bold(package.name)} ${package.version}";
/// Gets the names of the non-immediate dependencies of the root package.
Set<String> _getTransitiveDependencies() {
var transitive = _graph.packages.keys.toSet();
var root = entrypoint.root;
transitive.remove(root.name);
transitive.removeAll(root.dependencies.map((dep) => dep.name));
transitive.removeAll(root.devDependencies.map((dep) => dep.name));
transitive.removeAll(root.dependencyOverrides.map((dep) => dep.name));
return transitive;
}
}

View file

@ -1,40 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.downgrade;
import 'dart:async';
import '../command.dart';
import '../log.dart' as log;
import '../solver/version_solver.dart';
/// Handles the `downgrade` pub command.
class DowngradeCommand extends PubCommand {
String get name => "downgrade";
String get description =>
"Downgrade the current package's dependencies to oldest versions.\n\n"
"This doesn't modify the lockfile, so it can be reset with \"pub get\".";
String get invocation => "pub downgrade [dependencies...]";
bool get isOffline => argResults['offline'];
DowngradeCommand() {
argParser.addFlag('offline',
help: 'Use cached packages instead of accessing the network.');
argParser.addFlag('dry-run', abbr: 'n', negatable: false,
help: "Report what dependencies would change but don't change any.");
}
Future run() async {
var dryRun = argResults['dry-run'];
await entrypoint.acquireDependencies(SolveType.DOWNGRADE,
useLatest: argResults.rest, dryRun: dryRun);
if (isOffline) {
log.warning("Warning: Downgrading when offline may not update you to "
"the oldest versions of your dependencies.");
}
}
}

View file

@ -1,33 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.get;
import 'dart:async';
import '../command.dart';
import '../solver/version_solver.dart';
/// Handles the `get` pub command.
class GetCommand extends PubCommand {
String get name => "get";
String get description => "Get the current package's dependencies.";
String get invocation => "pub get";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-get.html";
List<String> get aliases => const ["install"];
bool get isOffline => argResults["offline"];
GetCommand() {
argParser.addFlag('offline',
help: 'Use cached packages instead of accessing the network.');
argParser.addFlag('dry-run', abbr: 'n', negatable: false,
help: "Report what dependencies would change but don't change any.");
}
Future run() {
return entrypoint.acquireDependencies(SolveType.GET,
dryRun: argResults['dry-run']);
}
}

View file

@ -1,25 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global;
import '../command.dart';
import 'global_activate.dart';
import 'global_deactivate.dart';
import 'global_list.dart';
import 'global_run.dart';
/// Handles the `global` pub command.
class GlobalCommand extends PubCommand {
String get name => "global";
String get description => "Work with global packages.";
String get invocation => "pub global <subcommand>";
GlobalCommand() {
addSubcommand(new GlobalActivateCommand());
addSubcommand(new GlobalDeactivateCommand());
addSubcommand(new GlobalListCommand());
addSubcommand(new GlobalRunCommand());
}
}

View file

@ -1,103 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_activate;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../command.dart';
import '../utils.dart';
/// Handles the `global activate` pub command.
class GlobalActivateCommand extends PubCommand {
String get name => "activate";
String get description => "Make a package's executables globally available.";
String get invocation => "pub global activate <package...>";
GlobalActivateCommand() {
argParser.addOption("source",
abbr: "s",
help: "The source used to find the package.",
allowed: ["git", "hosted", "path"],
defaultsTo: "hosted");
argParser.addFlag("no-executables", negatable: false,
help: "Do not put executables on PATH.");
argParser.addOption("executable", abbr: "x",
help: "Executable(s) to place on PATH.",
allowMultiple: true);
argParser.addFlag("overwrite", negatable: false,
help: "Overwrite executables from other packages with the same name.");
}
Future run() {
// Default to `null`, which means all executables.
var executables;
if (argResults.wasParsed("executable")) {
if (argResults.wasParsed("no-executables")) {
usageException("Cannot pass both --no-executables and --executable.");
}
executables = argResults["executable"];
} else if (argResults["no-executables"]) {
// An empty list means no executables.
executables = [];
}
var overwrite = argResults["overwrite"];
var args = argResults.rest;
readArg([String error]) {
if (args.isEmpty) usageException(error);
var arg = args.first;
args = args.skip(1);
return arg;
}
validateNoExtraArgs() {
if (args.isEmpty) return;
var unexpected = args.map((arg) => '"$arg"');
var arguments = pluralize("argument", unexpected.length);
usageException("Unexpected $arguments ${toSentence(unexpected)}.");
}
switch (argResults["source"]) {
case "git":
var repo = readArg("No Git repository given.");
// TODO(rnystrom): Allow passing in a Git ref too.
validateNoExtraArgs();
return globals.activateGit(repo, executables,
overwriteBinStubs: overwrite);
case "hosted":
var package = readArg("No package to activate given.");
// Parse the version constraint, if there is one.
var constraint = VersionConstraint.any;
if (args.isNotEmpty) {
try {
constraint = new VersionConstraint.parse(readArg());
} on FormatException catch (error) {
usageException(error.message);
}
}
validateNoExtraArgs();
return globals.activateHosted(package, constraint, executables,
overwriteBinStubs: overwrite);
case "path":
var path = readArg("No package to activate given.");
validateNoExtraArgs();
return globals.activatePath(path, executables,
overwriteBinStubs: overwrite);
}
throw "unreachable";
}
}

View file

@ -1,34 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_deactivate;
import '../command.dart';
import '../log.dart' as log;
import '../utils.dart';
/// Handles the `global deactivate` pub command.
class GlobalDeactivateCommand extends PubCommand {
String get name => "deactivate";
String get description => "Remove a previously activated package.";
String get invocation => "pub global deactivate <package>";
void run() {
// Make sure there is a package.
if (argResults.rest.isEmpty) {
usageException("No package to deactivate given.");
}
// Don't allow extra arguments.
if (argResults.rest.length > 1) {
var unexpected = argResults.rest.skip(1).map((arg) => '"$arg"');
var arguments = pluralize("argument", unexpected.length);
usageException("Unexpected $arguments ${toSentence(unexpected)}.");
}
if (!globals.deactivate(argResults.rest.first)) {
dataError("No active package ${log.bold(argResults.rest.first)}.");
}
}
}

View file

@ -1,20 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_list;
import '../command.dart';
/// Handles the `global list` pub command.
class GlobalListCommand extends PubCommand {
String get name => "list";
String get description => 'List globally activated packages.';
String get invocation => 'pub global list';
bool get allowTrailingOptions => false;
bool get takesArguments => false;
void run() {
globals.listActivePackages();
}
}

View file

@ -1,61 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.global_run;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import '../command.dart';
import '../io.dart';
import '../utils.dart';
/// Handles the `global run` pub command.
class GlobalRunCommand extends PubCommand {
String get name => "run";
String get description =>
"Run an executable from a globally activated package.\n"
"NOTE: We are currently optimizing this command's startup time.";
String get invocation => "pub global run <package>:<executable> [args...]";
bool get allowTrailingOptions => false;
/// The mode for barback transformers.
BarbackMode get mode => new BarbackMode(argResults["mode"]);
GlobalRunCommand() {
argParser.addOption("mode", defaultsTo: "release",
help: 'Mode to run transformers in.');
}
Future run() async {
if (argResults.rest.isEmpty) {
usageException("Must specify an executable to run.");
}
var package;
var executable = argResults.rest[0];
if (executable.contains(":")) {
var parts = split1(executable, ":");
package = parts[0];
executable = parts[1];
} else {
// If the package name is omitted, use the same name for both.
package = executable;
}
var args = argResults.rest.skip(1).toList();
if (p.split(executable).length > 1) {
// TODO(nweiz): Use adjacent strings when the new async/await compiler
// lands.
usageException('Cannot run an executable in a subdirectory of a global ' +
'package.');
}
var exitCode = await globals.runExecutable(package, executable, args,
mode: mode);
await flushThenExit(exitCode);
}
}

View file

@ -1,187 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.lish;
import 'dart:async';
import 'package:http/http.dart' as http;
import '../command.dart';
import '../ascii_tree.dart' as tree;
import '../http.dart';
import '../io.dart';
import '../log.dart' as log;
import '../oauth2.dart' as oauth2;
import '../source/hosted.dart';
import '../utils.dart';
import '../validator.dart';
/// Handles the `lish` and `publish` pub commands.
class LishCommand extends PubCommand {
String get name => "publish";
String get description => "Publish the current package to pub.dartlang.org.";
String get invocation => "pub publish [options]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-lish.html";
List<String> get aliases => const ["lish", "lush"];
bool get takesArguments => false;
/// The URL of the server to which to upload the package.
Uri get server {
// An explicit argument takes precedence.
if (argResults.wasParsed('server')) {
return Uri.parse(argResults['server']);
}
// Otherwise, use the one specified in the pubspec.
if (entrypoint.root.pubspec.publishTo != null) {
return Uri.parse(entrypoint.root.pubspec.publishTo);
}
// Otherwise, use the default.
return Uri.parse(HostedSource.defaultUrl);
}
/// Whether the publish is just a preview.
bool get dryRun => argResults['dry-run'];
/// Whether the publish requires confirmation.
bool get force => argResults['force'];
LishCommand() {
argParser.addFlag('dry-run', abbr: 'n', negatable: false,
help: 'Validate but do not publish the package.');
argParser.addFlag('force', abbr: 'f', negatable: false,
help: 'Publish without confirmation if there are no errors.');
argParser.addOption('server', defaultsTo: HostedSource.defaultUrl,
help: 'The package server to which to upload this package.');
}
Future _publish(packageBytes) {
var cloudStorageUrl;
return oauth2.withClient(cache, (client) {
return log.progress('Uploading', () {
// TODO(nweiz): Cloud Storage can provide an XML-formatted error. We
// should report that error and exit.
var newUri = server.resolve("/api/packages/versions/new");
return client.get(newUri, headers: PUB_API_HEADERS).then((response) {
var parameters = parseJsonResponse(response);
var url = _expectField(parameters, 'url', response);
if (url is! String) invalidServerResponse(response);
cloudStorageUrl = Uri.parse(url);
var request = new http.MultipartRequest('POST', cloudStorageUrl);
request.headers['Pub-Request-Timeout'] = 'None';
var fields = _expectField(parameters, 'fields', response);
if (fields is! Map) invalidServerResponse(response);
fields.forEach((key, value) {
if (value is! String) invalidServerResponse(response);
request.fields[key] = value;
});
request.followRedirects = false;
request.files.add(new http.MultipartFile.fromBytes(
'file', packageBytes, filename: 'package.tar.gz'));
return client.send(request);
}).then(http.Response.fromStream).then((response) {
var location = response.headers['location'];
if (location == null) throw new PubHttpException(response);
return location;
}).then((location) => client.get(location, headers: PUB_API_HEADERS))
.then(handleJsonSuccess);
});
}).catchError((error) {
if (error is! PubHttpException) throw error;
var url = error.response.request.url;
if (urisEqual(url, cloudStorageUrl)) {
// TODO(nweiz): the response may have XML-formatted information about
// the error. Try to parse that out once we have an easily-accessible
// XML parser.
fail('Failed to upload the package.');
} else if (urisEqual(Uri.parse(url.origin), Uri.parse(server.origin))) {
handleJsonError(error.response);
} else {
throw error;
}
});
}
Future run() {
if (force && dryRun) {
usageException('Cannot use both --force and --dry-run.');
}
if (entrypoint.root.pubspec.isPrivate) {
dataError('A private package cannot be published.\n'
'You can enable this by changing the "publish_to" field in your '
'pubspec.');
}
var files = entrypoint.root.listFiles(useGitIgnore: true);
log.fine('Archiving and publishing ${entrypoint.root}.');
// Show the package contents so the user can verify they look OK.
var package = entrypoint.root;
log.message(
'Publishing ${package.name} ${package.version} to $server:\n'
'${tree.fromFiles(files, baseDir: entrypoint.root.dir)}');
var packageBytesFuture = createTarGz(files, baseDir: entrypoint.root.dir)
.toBytes();
// Validate the package.
return _validate(packageBytesFuture.then((bytes) => bytes.length))
.then((isValid) {
if (isValid) return packageBytesFuture.then(_publish);
});
}
/// Returns the value associated with [key] in [map]. Throws a user-friendly
/// error if [map] doens't contain [key].
_expectField(Map map, String key, http.Response response) {
if (map.containsKey(key)) return map[key];
invalidServerResponse(response);
}
/// Validates the package. Completes to false if the upload should not
/// proceed.
Future<bool> _validate(Future<int> packageSize) {
return Validator.runAll(entrypoint, packageSize).then((pair) {
var errors = pair.first;
var warnings = pair.last;
if (!errors.isEmpty) {
log.error("Sorry, your package is missing "
"${(errors.length > 1) ? 'some requirements' : 'a requirement'} "
"and can't be published yet.\nFor more information, see: "
"http://pub.dartlang.org/doc/pub-lish.html.\n");
return false;
}
if (force) return true;
if (dryRun) {
var s = warnings.length == 1 ? '' : 's';
log.warning("\nPackage has ${warnings.length} warning$s.");
return false;
}
var message = '\nLooks great! Are you ready to upload your package';
if (!warnings.isEmpty) {
var s = warnings.length == 1 ? '' : 's';
message = "\nPackage has ${warnings.length} warning$s. Upload anyway";
}
return confirm(message).then((confirmed) {
if (!confirmed) {
log.error("Package upload canceled.");
return false;
}
return true;
});
});
}
}

View file

@ -1,65 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.list_package_dirs;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../command.dart';
import '../log.dart' as log;
import '../utils.dart';
/// Handles the `list-package-dirs` pub command.
class ListPackageDirsCommand extends PubCommand {
String get name => "list-package-dirs";
String get description => "Print local paths to dependencies.";
String get invocation => "pub list-package-dirs";
bool get takesArguments => false;
bool get hidden => true;
ListPackageDirsCommand() {
argParser.addOption("format",
help: "How output should be displayed.",
allowed: ["json"]);
}
Future run() {
log.json.enabled = true;
if (!entrypoint.lockFileExists) {
dataError('Package "myapp" has no lockfile. Please run "pub get" first.');
}
var output = {};
// Include the local paths to all locked packages.
var packages = {};
var futures = [];
entrypoint.lockFile.packages.forEach((name, package) {
var source = entrypoint.cache.sources[package.source];
futures.add(source.getDirectory(package).then((packageDir) {
packages[name] = path.join(packageDir, "lib");
}));
});
output["packages"] = packages;
// Include the self link.
packages[entrypoint.root.name] = entrypoint.root.path("lib");
// Include the file(s) which when modified will affect the results. For pub,
// that's just the pubspec and lockfile.
output["input_files"] = [
entrypoint.lockFilePath,
entrypoint.pubspecPath
];
return Future.wait(futures).then((_) {
log.json.message(output);
});
}
}

View file

@ -1,85 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.run;
import 'dart:async';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import '../command.dart';
import '../executable.dart';
import '../io.dart';
import '../log.dart' as log;
import '../utils.dart';
/// Handles the `run` pub command.
class RunCommand extends PubCommand {
String get name => "run";
String get description => "Run an executable from a package.\n"
"NOTE: We are currently optimizing this command's startup time.";
String get invocation => "pub run <executable> [args...]";
bool get allowTrailingOptions => false;
RunCommand() {
argParser.addOption("mode",
help: 'Mode to run transformers in.\n'
'(defaults to "release" for dependencies, "debug" for '
'entrypoint)');
}
Future run() async {
if (argResults.rest.isEmpty) {
usageException("Must specify an executable to run.");
}
var package = entrypoint.root.name;
var executable = argResults.rest[0];
var args = argResults.rest.skip(1).toList();
// A command like "foo:bar" runs the "bar" script from the "foo" package.
// If there is no colon prefix, default to the root package.
if (executable.contains(":")) {
var components = split1(executable, ":");
package = components[0];
executable = components[1];
if (p.split(executable).length > 1) {
// TODO(nweiz): Use adjacent strings when the new async/await compiler
// lands.
usageException("Cannot run an executable in a subdirectory of a " +
"dependency.");
}
} else if (onlyIdentifierRegExp.hasMatch(executable)) {
// "pub run foo" means the same thing as "pub run foo:foo" as long as
// "foo" is a valid Dart identifier (and thus package name).
// TODO(nweiz): Remove this after Dart 1.10 ships.
var localPath = p.join("bin", "$executable.dart");
if (fileExists(localPath) && executable != entrypoint.root.name) {
log.warning(
'In future releases, "pub run $executable" will mean the same '
'thing as "pub run $executable:$executable".\n'
'Run "pub run ${p.join("bin", executable)}" explicitly to run the '
'local executable.');
} else {
package = executable;
}
}
var mode;
if (argResults['mode'] != null) {
mode = new BarbackMode(argResults['mode']);
} else if (package == entrypoint.root.name) {
mode = BarbackMode.DEBUG;
} else {
mode = BarbackMode.RELEASE;
}
var exitCode = await runExecutable(entrypoint, package, executable, args,
mode: mode);
await flushThenExit(exitCode);
}
}

View file

@ -1,185 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.serve;
import 'dart:async';
import 'dart:math' as math;
import 'package:barback/barback.dart';
import '../barback/asset_environment.dart';
import '../log.dart' as log;
import '../utils.dart';
import 'barback.dart';
final _arrow = getSpecial('\u2192', '=>');
/// Handles the `serve` pub command.
class ServeCommand extends BarbackCommand {
String get name => "serve";
String get description =>
'Run a local web development server.\n\n'
'By default, this serves "web/" and "test/", but an explicit list of \n'
'directories to serve can be provided as well.';
String get invocation => "pub serve [directories...]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-serve.html";
String get hostname => argResults['hostname'];
/// The base port for the servers.
///
/// This will print a usage error and exit if the specified port is invalid.
int get port => parseInt(argResults['port'], 'port');
/// The port for the admin UI.
///
/// This will print a usage error and exit if the specified port is invalid.
int get adminPort {
var adminPort = argResults['admin-port'];
return adminPort == null ? null : parseInt(adminPort, 'admin port');
}
/// `true` if Dart entrypoints should be compiled to JavaScript.
bool get useDart2JS => argResults['dart2js'];
/// `true` if the admin server URL should be displayed on startup.
bool get logAdminUrl => argResults['log-admin-url'];
BarbackMode get defaultMode => BarbackMode.DEBUG;
List<String> get defaultSourceDirectories => ["web", "test"];
/// This completer is used to keep pub running (by not completing) and to
/// pipe fatal errors to pub's top-level error-handling machinery.
final _completer = new Completer();
ServeCommand() {
argParser.addOption("define", abbr: "D",
help: "Defines an environment constant for dart2js.",
allowMultiple: true, splitCommas: false);
argParser.addOption('hostname', defaultsTo: 'localhost',
help: 'The hostname to listen on.');
argParser.addOption('port', defaultsTo: '8080',
help: 'The base port to listen on.');
// TODO(rnystrom): A hidden option to print the URL that the admin server
// is bound to on startup. Since this is currently only used for the Web
// Socket interface, we don't want to show it to users, but the tests and
// Editor need this logged to know what port to bind to.
// Remove this (and always log) when #16954 is fixed.
argParser.addFlag('log-admin-url', defaultsTo: false, hide: true);
// TODO(nweiz): Make this public when issue 16954 is fixed.
argParser.addOption('admin-port', hide: true);
argParser.addFlag('dart2js', defaultsTo: true,
help: 'Compile Dart to JavaScript.');
argParser.addFlag('force-poll', defaultsTo: false,
help: 'Force the use of a polling filesystem watcher.');
}
Future onRunTransformerCommand() async {
var port = parseInt(argResults['port'], 'port');
var adminPort = argResults['admin-port'] == null ? null :
parseInt(argResults['admin-port'], 'admin port');
var watcherType = argResults['force-poll'] ?
WatcherType.POLLING : WatcherType.AUTO;
var environmentConstants = new Map.fromIterable(argResults["define"],
key: (pair) => pair.split("=").first,
value: (pair) => pair.split("=").last);
var environment = await AssetEnvironment.create(entrypoint, mode,
watcherType: watcherType, hostname: hostname, basePort: port,
useDart2JS: useDart2JS, environmentConstants: environmentConstants);
var directoryLength = sourceDirectories.map((dir) => dir.length)
.reduce(math.max);
if (adminPort != null) {
var server = await environment.startAdminServer(adminPort);
server.results.listen((_) {
// The admin server produces no result values.
assert(false);
}, onError: _fatalError);
if (logAdminUrl) {
log.message("Running admin server on "
"${log.bold('http://$hostname:${server.port}')}");
}
}
// Start up the servers. We pause updates while this is happening so
// that we don't log spurious build results in the middle of listing
// out the bound servers.
environment.pauseUpdates();
for (var directory in sourceDirectories) {
await _startServer(environment, directory, directoryLength);
}
// Now that the servers are up and logged, send them to barback.
environment.barback.errors.listen((error) {
log.error(log.red("Build error:\n$error"));
});
environment.barback.results.listen((result) {
if (result.succeeded) {
// TODO(rnystrom): Report using growl/inotify-send where available.
log.message("Build completed ${log.green('successfully')}");
} else {
log.message("Build completed with "
"${log.red(result.errors.length)} errors.");
}
}, onError: _fatalError);
environment.resumeUpdates();
await _completer.future;
}
Future _startServer(AssetEnvironment environment, String rootDirectory,
int directoryLength) async {
var server = await environment.serveDirectory(rootDirectory);
// In release mode, strip out .dart files since all relevant ones have
// been compiled to JavaScript already.
if (mode == BarbackMode.RELEASE) {
server.allowAsset = (url) => !url.path.endsWith(".dart");
}
// Add two characters to account for "[" and "]".
var prefix = log.gray(
padRight("[${server.rootDirectory}]", directoryLength + 2));
server.results.listen((result) {
var buffer = new StringBuffer();
buffer.write("$prefix ");
if (result.isSuccess) {
buffer.write(
"${log.green('GET')} ${result.url.path} $_arrow ${result.id}");
} else {
buffer.write("${log.red('GET')} ${result.url.path} $_arrow");
var error = result.error.toString();
if (error.contains("\n")) {
buffer.write("\n${prefixLines(error)}");
} else {
buffer.write(" $error");
}
}
log.message(buffer);
}, onError: _fatalError);
log.message("Serving ${entrypoint.root.name} "
"${padRight(server.rootDirectory, directoryLength)} "
"on ${log.bold('http://$hostname:${server.port}')}");
}
/// Reports [error] and exits the server.
void _fatalError(error, [stackTrace]) {
if (_completer.isCompleted) return;
_completer.completeError(error, stackTrace);
}
}

View file

@ -1,41 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.upgrade;
import 'dart:async';
import '../command.dart';
import '../log.dart' as log;
import '../solver/version_solver.dart';
/// Handles the `upgrade` pub command.
class UpgradeCommand extends PubCommand {
String get name => "upgrade";
String get description =>
"Upgrade the current package's dependencies to latest versions.";
String get invocation => "pub upgrade [dependencies...]";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-upgrade.html";
List<String> get aliases => const ["update"];
bool get isOffline => argResults['offline'];
UpgradeCommand() {
argParser.addFlag('offline',
help: 'Use cached packages instead of accessing the network.');
argParser.addFlag('dry-run', abbr: 'n', negatable: false,
help: "Report what dependencies would change but don't change any.");
}
Future run() async {
var dryRun = argResults['dry-run'];
await entrypoint.acquireDependencies(SolveType.UPGRADE,
useLatest: argResults.rest, dryRun: dryRun);
if (isOffline) {
log.warning("Warning: Upgrading when offline may not update you to the "
"latest versions of your dependencies.");
}
}
}

View file

@ -1,84 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.uploader;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../command.dart';
import '../entrypoint.dart';
import '../exit_codes.dart' as exit_codes;
import '../http.dart';
import '../io.dart';
import '../log.dart' as log;
import '../oauth2.dart' as oauth2;
import '../source/hosted.dart';
/// Handles the `uploader` pub command.
class UploaderCommand extends PubCommand {
String get name => "uploader";
String get description =>
"Manage uploaders for a package on pub.dartlang.org.";
String get invocation => "pub uploader [options] {add/remove} <email>";
String get docUrl => "http://dartlang.org/tools/pub/cmd/pub-uploader.html";
/// The URL of the package hosting server.
Uri get server => Uri.parse(argResults['server']);
UploaderCommand() {
argParser.addOption('server', defaultsTo: HostedSource.defaultUrl,
help: 'The package server on which the package is hosted.');
argParser.addOption('package',
help: 'The package whose uploaders will be modified.\n'
'(defaults to the current package)');
}
Future run() {
if (argResults.rest.isEmpty) {
log.error('No uploader command given.');
this.printUsage();
return flushThenExit(exit_codes.USAGE);
}
var rest = argResults.rest.toList();
// TODO(rnystrom): Use subcommands for these.
var command = rest.removeAt(0);
if (!['add', 'remove'].contains(command)) {
log.error('Unknown uploader command "$command".');
this.printUsage();
return flushThenExit(exit_codes.USAGE);
} else if (rest.isEmpty) {
log.error('No uploader given for "pub uploader $command".');
this.printUsage();
return flushThenExit(exit_codes.USAGE);
}
return new Future.sync(() {
var package = argResults['package'];
if (package != null) return package;
return new Entrypoint(path.current, cache).root.name;
}).then((package) {
var uploader = rest[0];
return oauth2.withClient(cache, (client) {
if (command == 'add') {
var url = server.resolve("/api/packages/"
"${Uri.encodeComponent(package)}/uploaders");
return client.post(url,
headers: PUB_API_HEADERS,
body: {"email": uploader});
} else { // command == 'remove'
var url = server.resolve("/api/packages/"
"${Uri.encodeComponent(package)}/uploaders/"
"${Uri.encodeComponent(uploader)}");
return client.delete(url, headers: PUB_API_HEADERS);
}
});
}).then(handleJsonSuccess)
.catchError((error) => handleJsonError(error.response),
test: (e) => e is PubHttpException);
}
}

View file

@ -1,20 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command.version;
import '../command.dart';
import '../log.dart' as log;
import '../sdk.dart' as sdk;
/// Handles the `version` pub command.
class VersionCommand extends PubCommand {
String get name => "version";
String get description => "Print pub version.";
String get invocation => "pub version";
void run() {
log.message("Pub ${sdk.version}");
}
}

View file

@ -1,183 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.command_runner;
import 'dart:async';
import 'dart:io';
import 'package:args/args.dart';
import 'package:args/command_runner.dart';
import 'package:http/http.dart' as http;
import 'command/build.dart';
import 'command/cache.dart';
import 'command/deps.dart';
import 'command/downgrade.dart';
import 'command/get.dart';
import 'command/global.dart';
import 'command/lish.dart';
import 'command/list_package_dirs.dart';
import 'command/run.dart';
import 'command/serve.dart';
import 'command/upgrade.dart';
import 'command/uploader.dart';
import 'command/version.dart';
import 'exceptions.dart';
import 'exit_codes.dart' as exit_codes;
import 'http.dart';
import 'io.dart';
import 'log.dart' as log;
import 'sdk.dart' as sdk;
import 'solver/version_solver.dart';
import 'utils.dart';
class PubCommandRunner extends CommandRunner {
String get usageFooter => "See http://dartlang.org/tools/pub for detailed "
"documentation.";
PubCommandRunner()
: super("pub", "Pub is a package manager for Dart.") {
argParser.addFlag('version', negatable: false,
help: 'Print pub version.');
argParser.addFlag('trace',
help: 'Print debugging information when an error occurs.');
argParser.addOption('verbosity',
help: 'Control output verbosity.',
allowed: ['normal', 'io', 'solver', 'all'],
allowedHelp: {
'normal': 'Show errors, warnings, and user messages.',
'io': 'Also show IO operations.',
'solver': 'Show steps during version resolution.',
'all': 'Show all output including internal tracing messages.'
});
argParser.addFlag('verbose', abbr: 'v', negatable: false,
help: 'Shortcut for "--verbosity=all".');
argParser.addFlag('with-prejudice', hide: !isAprilFools,
negatable: false, help: 'Execute commands with prejudice.');
argParser.addFlag('package-symlinks', hide: true, negatable: true,
defaultsTo: true);
addCommand(new BuildCommand());
addCommand(new CacheCommand());
addCommand(new DepsCommand());
addCommand(new DowngradeCommand());
addCommand(new GlobalCommand());
addCommand(new GetCommand());
addCommand(new ListPackageDirsCommand());
addCommand(new LishCommand());
addCommand(new RunCommand());
addCommand(new ServeCommand());
addCommand(new UpgradeCommand());
addCommand(new UploaderCommand());
addCommand(new VersionCommand());
}
Future run(List<String> arguments) async {
var options;
try {
options = super.parse(arguments);
} on UsageException catch (error) {
log.error(error.message);
await flushThenExit(exit_codes.USAGE);
}
await runCommand(options);
}
Future runCommand(ArgResults options) async {
log.withPrejudice = options['with-prejudice'];
if (options['version']) {
log.message('Pub ${sdk.version}');
return;
}
if (options['trace']) {
log.recordTranscript();
}
switch (options['verbosity']) {
case 'normal': log.verbosity = log.Verbosity.NORMAL; break;
case 'io': log.verbosity = log.Verbosity.IO; break;
case 'solver': log.verbosity = log.Verbosity.SOLVER; break;
case 'all': log.verbosity = log.Verbosity.ALL; break;
default:
// No specific verbosity given, so check for the shortcut.
if (options['verbose']) log.verbosity = log.Verbosity.ALL;
break;
}
log.fine('Pub ${sdk.version}');
await _validatePlatform();
var captureStackChains =
options['trace'] ||
options['verbose'] ||
options['verbosity'] == 'all';
try {
await captureErrors(() => super.runCommand(options),
captureStackChains: captureStackChains);
// Explicitly exit on success to ensure that any dangling dart:io handles
// don't cause the process to never terminate.
await flushThenExit(exit_codes.SUCCESS);
} catch (error, chain) {
log.exception(error, chain);
if (options['trace']) {
log.dumpTranscript();
} else if (!isUserFacingException(error)) {
// TODO(23505): Implement proper shell escaping, not a partial hack.
protectArgument(String x) => x.contains(' ') ? '"$x"' : x;
log.error("""
This is an unexpected error. Please run
pub --trace ${options.arguments.map(protectArgument).join(' ')}
and include the results in a bug report on http://dartbug.com/new.
""");
}
await flushThenExit(_chooseExitCode(error));
}
}
void printUsage() {
log.message(usage);
}
/// Returns the appropriate exit code for [exception], falling back on 1 if no
/// appropriate exit code could be found.
int _chooseExitCode(exception) {
while (exception is WrappedException) exception = exception.innerError;
if (exception is HttpException || exception is http.ClientException ||
exception is SocketException || exception is PubHttpException ||
exception is DependencyNotFoundException) {
return exit_codes.UNAVAILABLE;
} else if (exception is FormatException || exception is DataException) {
return exit_codes.DATA;
} else if (exception is UsageException) {
return exit_codes.USAGE;
} else {
return 1;
}
}
/// Checks that pub is running on a supported platform.
///
/// If it isn't, it prints an error message and exits. Completes when the
/// validation is done.
Future _validatePlatform() async {
if (Platform.operatingSystem != 'windows') return;
var result = await runProcess('ver', []);
if (result.stdout.join('\n').contains('XP')) {
log.error('Sorry, but pub is not supported on Windows XP.');
await flushThenExit(exit_codes.USAGE);
}
}
}

View file

@ -1,225 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// A library for compiling Dart code and manipulating analyzer parse trees.
library pub.dart;
import 'dart:async';
import 'dart:io';
import 'dart:isolate';
import 'package:analyzer/analyzer.dart';
import 'package:path/path.dart' as path;
import 'package:compiler/compiler.dart' as compiler;
import 'package:compiler/src/filenames.dart'
show appendSlash;
import '../../asset/dart/serialize.dart';
import 'io.dart';
import 'log.dart' as log;
/// Interface to communicate with dart2js.
///
/// This is basically an amalgamation of dart2js's
/// [compiler.CompilerInputProvider], [compiler.CompilerOutputProvider], and
/// [compiler.DiagnosticHandler] function types so that we can provide them
/// as a single unit.
abstract class CompilerProvider {
/// The URI to the root directory where "dart:" libraries can be found.
///
/// This is used as the base URL to generate library URLs that are then sent
/// back to [provideInput].
Uri get libraryRoot;
/// Given [uri], responds with a future that completes to the contents of
/// the input file at that URI.
///
/// The future can complete to a string or a list of bytes.
Future/*<String | List<int>>*/ provideInput(Uri uri);
/// Reports a diagnostic message from dart2js to the user.
void handleDiagnostic(Uri uri, int begin, int end, String message,
compiler.Diagnostic kind);
/// Given a [name] (which will be "" for the entrypoint) and a file extension,
/// returns an [EventSink] that dart2js can write to to emit an output file.
EventSink<String> provideOutput(String name, String extension);
}
/// Compiles [entrypoint] to JavaScript (or to Dart if [toDart] is true) as
/// well as any ancillary outputs dart2js creates.
///
/// Uses [provider] to communcate between dart2js and the caller. Returns a
/// future that completes when compilation is done.
///
/// By default, the package root is assumed to be adjacent to [entrypoint], but
/// if [packageRoot] is passed that will be used instead.
Future compile(String entrypoint, CompilerProvider provider, {
Iterable<String> commandLineOptions,
bool checked: false,
bool csp: false,
bool minify: true,
bool verbose: false,
Map<String, String> environment,
String packageRoot,
bool analyzeAll: false,
bool preserveUris: false,
bool suppressWarnings: false,
bool suppressHints: false,
bool suppressPackageWarnings: true,
bool terse: false,
bool includeSourceMapUrls: false,
bool toDart: false}) {
return new Future.sync(() {
var options = <String>['--categories=Client,Server'];
if (checked) options.add('--enable-checked-mode');
if (csp) options.add('--csp');
if (minify) options.add('--minify');
if (verbose) options.add('--verbose');
if (analyzeAll) options.add('--analyze-all');
if (preserveUris) options.add('--preserve-uris');
if (suppressWarnings) options.add('--suppress-warnings');
if (suppressHints) options.add('--suppress-hints');
if (!suppressPackageWarnings) options.add('--show-package-warnings');
if (terse) options.add('--terse');
if (toDart) options.add('--output-type=dart');
var sourceUrl = path.toUri(entrypoint);
options.add("--out=$sourceUrl.js");
// Add the source map URLs.
if (includeSourceMapUrls) {
options.add("--source-map=$sourceUrl.js.map");
}
if (environment == null) environment = {};
if (commandLineOptions != null) options.addAll(commandLineOptions);
if (packageRoot == null) {
packageRoot = path.join(path.dirname(entrypoint), 'packages');
}
return compiler.compile(
path.toUri(entrypoint),
provider.libraryRoot,
path.toUri(appendSlash(packageRoot)),
provider.provideInput,
provider.handleDiagnostic,
options,
provider.provideOutput,
environment);
});
}
/// Returns whether [dart] looks like an entrypoint file.
bool isEntrypoint(CompilationUnit dart) {
// Allow two or fewer arguments so that entrypoints intended for use with
// [spawnUri] get counted.
//
// TODO(nweiz): this misses the case where a Dart file doesn't contain main(),
// but it parts in another file that does.
return dart.declarations.any((node) {
return node is FunctionDeclaration && node.name.name == "main" &&
node.functionExpression.parameters.parameters.length <= 2;
});
}
/// Efficiently parses the import and export directives in [contents].
///
/// If [name] is passed, it's used as the filename for error reporting.
List<UriBasedDirective> parseImportsAndExports(String contents, {String name}) {
var collector = new _DirectiveCollector();
parseDirectives(contents, name: name).accept(collector);
return collector.directives;
}
/// A simple visitor that collects import and export nodes.
class _DirectiveCollector extends GeneralizingAstVisitor {
final directives = <UriBasedDirective>[];
visitUriBasedDirective(UriBasedDirective node) => directives.add(node);
}
/// Runs [code] in an isolate.
///
/// [code] should be the contents of a Dart entrypoint. It may contain imports;
/// they will be resolved in the same context as the host isolate. [message] is
/// passed to the [main] method of the code being run; the caller is responsible
/// for using this to establish communication with the isolate.
///
/// [packageRoot] controls the package root of the isolate. It may be either a
/// [String] or a [Uri].
///
/// If [snapshot] is passed, the isolate will be loaded from that path if it
/// exists. Otherwise, a snapshot of the isolate's code will be saved to that
/// path once the isolate is loaded.
Future runInIsolate(String code, message, {packageRoot, String snapshot})
async {
if (snapshot != null && fileExists(snapshot)) {
log.fine("Spawning isolate from $snapshot.");
if (packageRoot != null) packageRoot = packageRoot.toString();
try {
await Isolate.spawnUri(path.toUri(snapshot), [], message,
packageRoot: packageRoot);
return;
} on IsolateSpawnException catch (error) {
log.fine("Couldn't load existing snapshot $snapshot:\n$error");
// Do nothing, we will regenerate the snapshot below.
}
}
await withTempDir((dir) async {
var dartPath = path.join(dir, 'runInIsolate.dart');
writeTextFile(dartPath, code, dontLogContents: true);
var port = new ReceivePort();
await Isolate.spawn(_isolateBuffer, {
'replyTo': port.sendPort,
'uri': path.toUri(dartPath).toString(),
'packageRoot': packageRoot == null ? null : packageRoot.toString(),
'message': message
});
var response = await port.first;
if (response['type'] == 'error') {
throw new CrossIsolateException.deserialize(response['error']);
}
if (snapshot == null) return;
ensureDir(path.dirname(snapshot));
var snapshotArgs = [];
if (packageRoot != null) snapshotArgs.add('--package-root=$packageRoot');
snapshotArgs.addAll(['--snapshot=$snapshot', dartPath]);
var result = await runProcess(Platform.executable, snapshotArgs);
if (result.success) return;
// Don't emit a fatal error here, since we don't want to crash the
// otherwise successful isolate load.
log.warning("Failed to compile a snapshot to "
"${path.relative(snapshot)}:\n" + result.stderr.join("\n"));
});
}
// TODO(nweiz): remove this when issue 12617 is fixed.
/// A function used as a buffer between the host isolate and [spawnUri].
///
/// [spawnUri] synchronously loads the file and its imports, which can deadlock
/// the host isolate if there's an HTTP import pointing at a server in the host.
/// Adding an additional isolate in the middle works around this.
void _isolateBuffer(message) {
var replyTo = message['replyTo'];
var packageRoot = message['packageRoot'];
if (packageRoot != null) packageRoot = Uri.parse(packageRoot);
Isolate.spawnUri(Uri.parse(message['uri']), [], message['message'],
packageRoot: packageRoot)
.then((_) => replyTo.send({'type': 'success'}))
.catchError((e, stack) {
replyTo.send({
'type': 'error',
'error': CrossIsolateException.serialize(e, stack)
});
});
}

View file

@ -1,564 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.entrypoint;
import 'dart:async';
import 'package:path/path.dart' as path;
import 'package:barback/barback.dart';
import 'barback/asset_environment.dart';
import 'io.dart';
import 'lock_file.dart';
import 'log.dart' as log;
import 'package.dart';
import 'package_graph.dart';
import 'sdk.dart' as sdk;
import 'solver/version_solver.dart';
import 'source/cached.dart';
import 'system_cache.dart';
import 'utils.dart';
/// The context surrounding the root package pub is operating on.
///
/// Pub operates over a directed graph of dependencies that starts at a root
/// "entrypoint" package. This is typically the package where the current
/// working directory is located. An entrypoint knows the [root] package it is
/// associated with and is responsible for managing the "packages" directory
/// for it.
///
/// That directory contains symlinks to all packages used by an app. These links
/// point either to the [SystemCache] or to some other location on the local
/// filesystem.
///
/// While entrypoints are typically applications, a pure library package may end
/// up being used as an entrypoint. Also, a single package may be used as an
/// entrypoint in one context but not in another. For example, a package that
/// contains a reusable library may not be the entrypoint when used by an app,
/// but may be the entrypoint when you're running its tests.
class Entrypoint {
/// The root package this entrypoint is associated with.
final Package root;
/// The system-wide cache which caches packages that need to be fetched over
/// the network.
final SystemCache cache;
/// Whether to create and symlink a "packages" directory containing links to
/// the installed packages.
final bool _packageSymlinks;
/// The lockfile for the entrypoint.
///
/// If not provided to the entrypoint, it will be laoded lazily from disc.
LockFile _lockFile;
/// The graph of all packages reachable from the entrypoint.
PackageGraph _packageGraph;
/// Loads the entrypoint from a package at [rootDir].
///
/// If [packageSymlinks] is `true`, this will create a "packages" directory
/// with symlinks to the installed packages. This directory will be symlinked
/// into any directory that might contain an entrypoint.
Entrypoint(String rootDir, SystemCache cache, {bool packageSymlinks: true})
: root = new Package.load(null, rootDir, cache.sources),
cache = cache,
_packageSymlinks = packageSymlinks;
/// Creates an entrypoint given package and lockfile objects.
Entrypoint.inMemory(this.root, this._lockFile, this.cache)
: _packageSymlinks = false;
/// The path to the entrypoint's "packages" directory.
String get packagesDir => root.path('packages');
/// `true` if the entrypoint package currently has a lock file.
bool get lockFileExists => _lockFile != null || entryExists(lockFilePath);
LockFile get lockFile {
if (_lockFile != null) return _lockFile;
if (!lockFileExists) {
_lockFile = new LockFile.empty();
} else {
_lockFile = new LockFile.load(lockFilePath, cache.sources);
}
return _lockFile;
}
/// The path to the entrypoint package's pubspec.
String get pubspecPath => root.path('pubspec.yaml');
/// The path to the entrypoint package's lockfile.
String get lockFilePath => root.path('pubspec.lock');
/// Gets all dependencies of the [root] package.
///
/// Performs version resolution according to [SolveType].
///
/// [useLatest], if provided, defines a list of packages that will be
/// unlocked and forced to their latest versions. If [upgradeAll] is
/// true, the previous lockfile is ignored and all packages are re-resolved
/// from scratch. Otherwise, it will attempt to preserve the versions of all
/// previously locked packages.
///
/// Shows a report of the changes made relative to the previous lockfile. If
/// this is an upgrade or downgrade, all transitive dependencies are shown in
/// the report. Otherwise, only dependencies that were changed are shown. If
/// [dryRun] is `true`, no physical changes are made.
Future acquireDependencies(SolveType type, {List<String> useLatest,
bool dryRun: false}) async {
var result = await resolveVersions(type, cache.sources, root,
lockFile: lockFile, useLatest: useLatest);
if (!result.succeeded) throw result.error;
result.showReport(type);
if (dryRun) {
result.summarizeChanges(type, dryRun: dryRun);
return;
}
// Install the packages and maybe link them into the entrypoint.
if (_packageSymlinks) {
cleanDir(packagesDir);
} else {
deleteEntry(packagesDir);
}
var ids = await Future.wait(result.packages.map(_get));
_saveLockFile(ids);
if (_packageSymlinks) _linkSelf();
_linkOrDeleteSecondaryPackageDirs();
result.summarizeChanges(type, dryRun: dryRun);
/// Build a package graph from the version solver results so we don't
/// have to reload and reparse all the pubspecs.
var packageGraph = await loadPackageGraph(result);
packageGraph.loadTransformerCache().clearIfOutdated(result.changedPackages);
try {
await precompileDependencies(changed: result.changedPackages);
await precompileExecutables(changed: result.changedPackages);
} catch (error, stackTrace) {
// Just log exceptions here. Since the method is just about acquiring
// dependencies, it shouldn't fail unless that fails.
log.exception(error, stackTrace);
}
}
/// Precompile any transformed dependencies of the entrypoint.
///
/// If [changed] is passed, only dependencies whose contents might be changed
/// if one of the given packages changes will be recompiled.
Future precompileDependencies({Iterable<String> changed}) async {
if (changed != null) changed = changed.toSet();
var graph = await loadPackageGraph();
// Just precompile the debug version of a package. We're mostly interested
// in improving speed for development iteration loops, which usually use
// debug mode.
var depsDir = path.join('.pub', 'deps', 'debug');
var dependenciesToPrecompile = graph.packages.values.where((package) {
if (package.pubspec.transformers.isEmpty) return false;
if (graph.isPackageMutable(package.name)) return false;
if (!dirExists(path.join(depsDir, package.name))) return true;
if (changed == null) return true;
/// Only recompile [package] if any of its transitive dependencies have
/// changed. We check all transitive dependencies because it's possible
/// that a transformer makes decisions based on their contents.
return overlaps(
graph.transitiveDependencies(package.name)
.map((package) => package.name).toSet(),
changed);
}).map((package) => package.name).toSet();
if (dirExists(depsDir)) {
// Delete any cached dependencies that are going to be recached.
for (var package in dependenciesToPrecompile) {
deleteEntry(path.join(depsDir, package));
}
// Also delete any cached dependencies that should no longer be cached.
for (var subdir in listDir(depsDir)) {
var package = graph.packages[path.basename(subdir)];
if (package == null || package.pubspec.transformers.isEmpty ||
graph.isPackageMutable(package.name)) {
deleteEntry(subdir);
}
}
}
if (dependenciesToPrecompile.isEmpty) return;
try {
await log.progress("Precompiling dependencies", () async {
var packagesToLoad =
unionAll(dependenciesToPrecompile.map(graph.transitiveDependencies))
.map((package) => package.name).toSet();
var environment = await AssetEnvironment.create(this, BarbackMode.DEBUG,
packages: packagesToLoad, useDart2JS: false);
/// Ignore barback errors since they'll be emitted via [getAllAssets]
/// below.
environment.barback.errors.listen((_) {});
// TODO(nweiz): only get assets from [dependenciesToPrecompile] so as
// not to trigger unnecessary lazy transformers.
var assets = await environment.barback.getAllAssets();
await waitAndPrintErrors(assets.map((asset) async {
if (!dependenciesToPrecompile.contains(asset.id.package)) return;
var destPath = path.join(
depsDir, asset.id.package, path.fromUri(asset.id.path));
ensureDir(path.dirname(destPath));
await createFileFromStream(asset.read(), destPath);
}));
log.message("Precompiled " +
toSentence(ordered(dependenciesToPrecompile).map(log.bold)) + ".");
});
} catch (_) {
// TODO(nweiz): When barback does a better job of associating errors with
// assets (issue 19491), catch and handle compilation errors on a
// per-package basis.
for (var package in dependenciesToPrecompile) {
deleteEntry(path.join(depsDir, package));
}
rethrow;
}
}
/// Precompiles all executables from dependencies that don't transitively
/// depend on [this] or on a path dependency.
Future precompileExecutables({Iterable<String> changed}) async {
if (changed != null) changed = changed.toSet();
var binDir = path.join('.pub', 'bin');
var sdkVersionPath = path.join(binDir, 'sdk-version');
// If the existing executable was compiled with a different SDK, we need to
// recompile regardless of what changed.
// TODO(nweiz): Use the VM to check this when issue 20802 is fixed.
var sdkMatches = fileExists(sdkVersionPath) &&
readTextFile(sdkVersionPath) == "${sdk.version}\n";
if (!sdkMatches) changed = null;
var graph = await loadPackageGraph();
// Clean out any outdated snapshots.
if (dirExists(binDir)) {
for (var entry in listDir(binDir)) {
if (!dirExists(entry)) continue;
var package = path.basename(entry);
if (!graph.packages.containsKey(package) ||
graph.isPackageMutable(package)) {
deleteEntry(entry);
}
}
}
var executables = new Map.fromIterable(root.immediateDependencies,
key: (dep) => dep.name,
value: (dep) => _executablesForPackage(graph, dep.name, changed));
for (var package in executables.keys.toList()) {
if (executables[package].isEmpty) executables.remove(package);
}
if (!sdkMatches) deleteEntry(binDir);
if (executables.isEmpty) return;
await log.progress("Precompiling executables", () async {
ensureDir(binDir);
// Make sure there's a trailing newline so our version file matches the
// SDK's.
writeTextFile(sdkVersionPath, "${sdk.version}\n");
var packagesToLoad =
unionAll(executables.keys.map(graph.transitiveDependencies))
.map((package) => package.name).toSet();
var executableIds = unionAll(
executables.values.map((ids) => ids.toSet()));
var environment = await AssetEnvironment.create(this, BarbackMode.RELEASE,
packages: packagesToLoad,
entrypoints: executableIds,
useDart2JS: false);
environment.barback.errors.listen((error) {
log.error(log.red("Build error:\n$error"));
});
await waitAndPrintErrors(executables.keys.map((package) async {
var dir = path.join(binDir, package);
cleanDir(dir);
await environment.precompileExecutables(package, dir,
executableIds: executables[package]);
}));
});
}
/// Returns the list of all executable assets for [packageName] that should be
/// precompiled.
///
/// If [changed] isn't `null`, executables for [packageName] will only be
/// compiled if they might depend on a package in [changed].
List<AssetId> _executablesForPackage(PackageGraph graph, String packageName,
Set<String> changed) {
var package = graph.packages[packageName];
var binDir = package.path('bin');
if (!dirExists(binDir)) return [];
if (graph.isPackageMutable(packageName)) return [];
var executables = package.executableIds;
// If we don't know which packages were changed, always precompile the
// executables.
if (changed == null) return executables;
// If any of the package's dependencies changed, recompile the executables.
if (graph.transitiveDependencies(packageName)
.any((package) => changed.contains(package.name))) {
return executables;
}
// If any executables don't exist, precompile them regardless of what
// changed. Since we delete the bin directory before recompiling, we need to
// recompile all executables.
var executablesExist = executables.every((executable) =>
fileExists(path.join('.pub', 'bin', packageName,
"${path.url.basename(executable.path)}.snapshot")));
if (!executablesExist) return executables;
// Otherwise, we don't need to recompile.
return [];
}
/// Makes sure the package at [id] is locally available.
///
/// This automatically downloads the package to the system-wide cache as well
/// if it requires network access to retrieve (specifically, if the package's
/// source is a [CachedSource]).
Future<PackageId> _get(PackageId id) {
if (id.isRoot) return new Future.value(id);
var source = cache.sources[id.source];
return new Future.sync(() {
if (!_packageSymlinks) {
if (source is! CachedSource) return null;
return source.downloadToSystemCache(id);
}
var packageDir = path.join(packagesDir, id.name);
if (entryExists(packageDir)) deleteEntry(packageDir);
return source.get(id, packageDir);
}).then((_) => source.resolveId(id));
}
/// Determines whether or not the lockfile is out of date with respect to the
/// pubspec.
///
/// This will be `false` if there is no lockfile at all, or if the pubspec
/// contains dependencies that are not in the lockfile or that don't match
/// what's in there.
bool _isLockFileUpToDate(LockFile lockFile) {
/// If this is an entrypoint for an in-memory package, trust the in-memory
/// lockfile provided for it.
if (root.dir == null) return true;
return root.immediateDependencies.every((package) {
var locked = lockFile.packages[package.name];
if (locked == null) return false;
if (package.source != locked.source) return false;
if (!package.constraint.allows(locked.version)) return false;
var source = cache.sources[package.source];
if (source == null) return false;
return source.descriptionsEqual(package.description, locked.description);
});
}
/// Determines whether all of the packages in the lockfile are already
/// installed and available.
///
/// Note: this assumes [isLockFileUpToDate] has already been called and
/// returned `true`.
Future<bool> _arePackagesAvailable(LockFile lockFile) {
return Future.wait(lockFile.packages.values.map((package) {
var source = cache.sources[package.source];
// This should only be called after [_isLockFileUpToDate] has returned
// `true`, which ensures all of the sources in the lock file are valid.
assert(source != null);
// We only care about cached sources. Uncached sources aren't "installed".
// If one of those is missing, we want to show the user the file not
// found error later since installing won't accomplish anything.
if (source is! CachedSource) return new Future.value(true);
// Get the directory.
return source.getDirectory(package).then((dir) {
// See if the directory is there and looks like a package.
return dirExists(dir) || fileExists(path.join(dir, "pubspec.yaml"));
});
})).then((results) {
// Make sure they are all true.
return results.every((result) => result);
});
}
/// Gets dependencies if the lockfile is out of date with respect to the
/// pubspec.
Future ensureLockFileIsUpToDate() {
return new Future.sync(() {
// If we don't have a current lock file, we definitely need to install.
if (!_isLockFileUpToDate(lockFile)) {
if (lockFileExists) {
log.message(
"Your pubspec has changed, so we need to update your lockfile:");
} else {
log.message(
"You don't have a lockfile, so we need to generate that:");
}
return false;
}
// If we do have a lock file, we still need to make sure the packages
// are actually installed. The user may have just gotten a package that
// includes a lockfile.
return _arePackagesAvailable(lockFile).then((available) {
if (!available) {
log.message(
"You are missing some dependencies, so we need to install them "
"first:");
}
return available;
});
}).then((upToDate) {
if (upToDate) return null;
return acquireDependencies(SolveType.GET);
});
}
/// Loads the package graph for the application and all of its transitive
/// dependencies.
///
/// If [result] is passed, this loads the graph from it without re-parsing the
/// lockfile or any pubspecs. Otherwise, before loading, this makes sure the
/// lockfile and dependencies are installed and up to date.
Future<PackageGraph> loadPackageGraph([SolveResult result]) async {
if (_packageGraph != null) return _packageGraph;
var graph = await log.progress("Loading package graph", () async {
if (result != null) {
var packages = await Future.wait(result.packages.map((id) async {
var dir = await cache.sources[id.source].getDirectory(id);
return new Package(result.pubspecs[id.name], dir);
}));
return new PackageGraph(this, new LockFile(result.packages),
new Map.fromIterable(packages, key: (package) => package.name));
}
await ensureLockFileIsUpToDate();
var packages = await Future.wait(lockFile.packages.values.map((id) async {
var source = cache.sources[id.source];
var dir = await source.getDirectory(id);
return new Package.load(id.name, dir, cache.sources);
}));
var packageMap = new Map.fromIterable(packages, key: (p) => p.name);
packageMap[root.name] = root;
return new PackageGraph(this, lockFile, packageMap);
}, fine: true);
_packageGraph = graph;
return graph;
}
/// Saves a list of concrete package versions to the `pubspec.lock` file.
void _saveLockFile(List<PackageId> packageIds) {
_lockFile = new LockFile(packageIds);
var lockFilePath = root.path('pubspec.lock');
writeTextFile(lockFilePath, _lockFile.serialize(root.dir, cache.sources));
}
/// Creates a self-referential symlink in the `packages` directory that allows
/// a package to import its own files using `package:`.
void _linkSelf() {
var linkPath = path.join(packagesDir, root.name);
// Create the symlink if it doesn't exist.
if (entryExists(linkPath)) return;
ensureDir(packagesDir);
createPackageSymlink(root.name, root.dir, linkPath,
isSelfLink: true, relative: true);
}
/// If [packageSymlinks] is true, add "packages" directories to the whitelist
/// of directories that may contain Dart entrypoints.
///
/// Otherwise, delete any "packages" directories in the whitelist of
/// directories that may contain Dart entrypoints.
void _linkOrDeleteSecondaryPackageDirs() {
// Only the main "bin" directory gets a "packages" directory, not its
// subdirectories.
var binDir = root.path('bin');
if (dirExists(binDir)) _linkOrDeleteSecondaryPackageDir(binDir);
// The others get "packages" directories in subdirectories too.
for (var dir in ['benchmark', 'example', 'test', 'tool', 'web']) {
_linkOrDeleteSecondaryPackageDirsRecursively(root.path(dir));
}
}
/// If [packageSymlinks] is true, creates a symlink to the "packages"
/// directory in [dir] and all its subdirectories.
///
/// Otherwise, deletes any "packages" directories in [dir] and all its
/// subdirectories.
void _linkOrDeleteSecondaryPackageDirsRecursively(String dir) {
if (!dirExists(dir)) return;
_linkOrDeleteSecondaryPackageDir(dir);
_listDirWithoutPackages(dir)
.where(dirExists)
.forEach(_linkOrDeleteSecondaryPackageDir);
}
// TODO(nweiz): roll this into [listDir] in io.dart once issue 4775 is fixed.
/// Recursively lists the contents of [dir], excluding hidden `.DS_Store`
/// files and `package` files.
List<String> _listDirWithoutPackages(dir) {
return flatten(listDir(dir).map((file) {
if (path.basename(file) == 'packages') return [];
if (!dirExists(file)) return [];
var fileAndSubfiles = [file];
fileAndSubfiles.addAll(_listDirWithoutPackages(file));
return fileAndSubfiles;
}));
}
/// If [packageSymlinks] is true, creates a symlink to the "packages"
/// directory in [dir].
///
/// Otherwise, deletes a "packages" directories in [dir] if one exists.
void _linkOrDeleteSecondaryPackageDir(String dir) {
var symlink = path.join(dir, 'packages');
if (entryExists(symlink)) deleteEntry(symlink);
if (_packageSymlinks) createSymlink(packagesDir, symlink, relative: true);
}
}

View file

@ -1,297 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.error_group;
import 'dart:async';
/// An [ErrorGroup] entangles the errors of multiple [Future]s and [Stream]s
/// with one another.
///
/// This allows APIs to expose multiple [Future]s and [Stream]s that have
/// identical error conditions without forcing API consumers to attach error
/// handling to objects they don't care about.
///
/// To use an [ErrorGroup], register [Future]s and [Stream]s with it using
/// [registerFuture] and [registerStream]. These methods return wrapped versions
/// of the [Future]s and [Stream]s, which should then be used in place of the
/// originals. For example:
///
/// var errorGroup = new ErrorGroup();
/// future = errorGroup.registerFuture(future);
/// stream = errorGroup.registerStream(stream);
///
/// An [ErrorGroup] has two major effects on its wrapped members:
///
/// * An error in any member of the group will be propagated to every member
/// that hasn't already completed. If those members later complete, their
/// values will be ignored.
/// * If any member of this group has a listener, errors on members without
/// listeners won't get passed to the top-level error handler.
class ErrorGroup {
/// The [Future]s that are members of [this].
final _futures = <_ErrorGroupFuture>[];
/// The [Stream]s that are members of [this].
final _streams = <_ErrorGroupStream>[];
/// Whether [this] has completed, either successfully or with an error.
var _isDone = false;
/// The [Completer] for [done].
final _doneCompleter = new Completer();
/// The underlying [Future] for [done].
///
/// We need to be able to access it internally as an [_ErrorGroupFuture] so
/// we can check if it has listeners and signal errors on it.
_ErrorGroupFuture _done;
/// Returns a [Future] that completes successully when all members of [this]
/// are complete, or with an error if any member receives an error.
///
/// This [Future] is effectively in the group in that an error on it won't be
/// passed to the top-level error handler unless no members of the group have
/// listeners attached.
Future get done => _done;
/// Creates a new group with no members.
ErrorGroup() {
this._done = new _ErrorGroupFuture(this, _doneCompleter.future);
}
/// Registers a [Future] as a member of [this].
///
/// Returns a wrapped version of [future] that should be used in its place.
///
/// If all members of [this] have already completed successfully or with an
/// error, it's a [StateError] to try to register a new [Future].
Future registerFuture(Future future) {
if (_isDone) {
throw new StateError("Can't register new members on a complete "
"ErrorGroup.");
}
var wrapped = new _ErrorGroupFuture(this, future);
_futures.add(wrapped);
return wrapped;
}
/// Registers a [Stream] as a member of [this].
///
/// Returns a wrapped version of [stream] that should be used in its place.
/// The returned [Stream] will be multi-subscription if and only if [stream]
/// is.
///
/// Since all errors in a group are passed to all members, the returned
/// [Stream] will automatically unsubscribe all its listeners when it
/// encounters an error.
///
/// If all members of [this] have already completed successfully or with an
/// error, it's a [StateError] to try to register a new [Stream].
Stream registerStream(Stream stream) {
if (_isDone) {
throw new StateError("Can't register new members on a complete "
"ErrorGroup.");
}
var wrapped = new _ErrorGroupStream(this, stream);
_streams.add(wrapped);
return wrapped;
}
/// Sends [error] to all members of [this].
///
/// Like errors that come from members, this will only be passed to the
/// top-level error handler if no members have listeners.
///
/// If all members of [this] have already completed successfully or with an
/// error, it's a [StateError] to try to signal an error.
void signalError(var error, [StackTrace stackTrace]) {
if (_isDone) {
throw new StateError("Can't signal errors on a complete ErrorGroup.");
}
_signalError(error, stackTrace);
}
/// Signal an error internally.
///
/// This is just like [signalError], but instead of throwing an error if
/// [this] is complete, it just does nothing.
void _signalError(var error, [StackTrace stackTrace]) {
if (_isDone) return;
var caught = false;
for (var future in _futures) {
if (future._isDone || future._hasListeners) caught = true;
future._signalError(error, stackTrace);
}
for (var stream in _streams) {
if (stream._isDone || stream._hasListeners) caught = true;
stream._signalError(error, stackTrace);
}
_isDone = true;
_done._signalError(error, stackTrace);
if (!caught && !_done._hasListeners) scheduleMicrotask((){ throw error; });
}
/// Notifies [this] that one of its member [Future]s is complete.
void _signalFutureComplete(_ErrorGroupFuture future) {
if (_isDone) return;
_isDone = _futures.every((future) => future._isDone) &&
_streams.every((stream) => stream._isDone);
if (_isDone) _doneCompleter.complete();
}
/// Notifies [this] that one of its member [Stream]s is complete.
void _signalStreamComplete(_ErrorGroupStream stream) {
if (_isDone) return;
_isDone = _futures.every((future) => future._isDone) &&
_streams.every((stream) => stream._isDone);
if (_isDone) _doneCompleter.complete();
}
}
/// A [Future] wrapper that keeps track of whether it's been completed and
/// whether it has any listeners.
///
/// It also notifies its parent [ErrorGroup] when it completes successfully or
/// receives an error.
class _ErrorGroupFuture implements Future {
/// The parent [ErrorGroup].
final ErrorGroup _group;
/// Whether [this] has completed, either successfully or with an error.
var _isDone = false;
/// The underlying [Completer] for [this].
final _completer = new Completer();
/// Whether [this] has any listeners.
bool _hasListeners = false;
/// Creates a new [_ErrorGroupFuture] that's a child of [_group] and wraps
/// [inner].
_ErrorGroupFuture(this._group, Future inner) {
inner.then((value) {
if (!_isDone) _completer.complete(value);
_isDone = true;
_group._signalFutureComplete(this);
}).catchError(_group._signalError);
// Make sure _completer.future doesn't automatically send errors to the
// top-level.
_completer.future.catchError((_) {});
}
Future then(onValue(value), {Function onError}) {
_hasListeners = true;
return _completer.future.then(onValue, onError: onError);
}
Future catchError(Function onError, {bool test(Object error)}) {
_hasListeners = true;
return _completer.future.catchError(onError, test: test);
}
Future whenComplete(void action()) {
_hasListeners = true;
return _completer.future.whenComplete(action);
}
Future timeout(Duration timeLimit, {void onTimeout()}) {
_hasListeners = true;
return _completer.future.timeout(timeLimit, onTimeout: onTimeout);
}
Stream asStream() {
_hasListeners = true;
return _completer.future.asStream();
}
/// Signal that an error from [_group] should be propagated through [this],
/// unless it's already complete.
void _signalError(var error, [StackTrace stackTrace]) {
if (!_isDone) _completer.completeError(error, stackTrace);
_isDone = true;
}
}
// TODO(nweiz): currently streams never top-level unhandled errors (issue 7843).
// When this is fixed, this class will need to prevent such errors from being
// top-leveled.
/// A [Stream] wrapper that keeps track of whether it's been completed and
/// whether it has any listeners.
///
/// It also notifies its parent [ErrorGroup] when it completes successfully or
/// receives an error.
class _ErrorGroupStream extends Stream {
/// The parent [ErrorGroup].
final ErrorGroup _group;
/// Whether [this] has completed, either successfully or with an error.
var _isDone = false;
/// The underlying [StreamController] for [this].
final StreamController _controller;
/// The controller's [Stream].
///
/// May be different than `_controller.stream` if the wrapped stream is a
/// broadcasting stream.
Stream _stream;
/// The [StreamSubscription] that connects the wrapped [Stream] to
/// [_controller].
StreamSubscription _subscription;
/// Whether [this] has any listeners.
bool get _hasListeners => _controller.hasListener;
/// Creates a new [_ErrorGroupFuture] that's a child of [_group] and wraps
/// [inner].
_ErrorGroupStream(this._group, Stream inner)
: _controller = new StreamController(sync: true) {
// Use old-style asBroadcastStream behavior - cancel source _subscription
// the first time the stream has no listeners.
_stream = inner.isBroadcast
? _controller.stream.asBroadcastStream(onCancel: (sub) => sub.cancel())
: _controller.stream;
_subscription = inner.listen((v) {
_controller.add(v);
}, onError: (e, [stackTrace]) {
_group._signalError(e, stackTrace);
}, onDone: () {
_isDone = true;
_group._signalStreamComplete(this);
_controller.close();
});
}
StreamSubscription listen(void onData(value),
{Function onError, void onDone(),
bool cancelOnError}) {
return _stream.listen(onData,
onError: onError,
onDone: onDone,
cancelOnError: true);
}
/// Signal that an error from [_group] should be propagated through [this],
/// unless it's already complete.
void _signalError(var e, [StackTrace stackTrace]) {
if (_isDone) return;
_subscription.cancel();
// Call these asynchronously to work around issue 7913.
new Future.value().then((_) {
_controller.addError(e, stackTrace);
_controller.close();
});
}
}

View file

@ -1,119 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.exceptions;
import 'dart:io';
import 'dart:isolate';
import "package:analyzer/analyzer.dart";
import 'package:args/command_runner.dart';
import "package:http/http.dart" as http;
import "package:stack_trace/stack_trace.dart";
import "package:yaml/yaml.dart";
import '../../asset/dart/serialize.dart';
/// An exception class for exceptions that are intended to be seen by the user.
///
/// These exceptions won't have any debugging information printed when they're
/// thrown.
class ApplicationException implements Exception {
final String message;
ApplicationException(this.message);
String toString() => message;
}
/// An exception class for exceptions that are intended to be seen by the user
/// and are associated with a problem in a file at some path.
class FileException implements ApplicationException {
final String message;
/// The path to the file that was missing or erroneous.
final String path;
FileException(this.message, this.path);
String toString() => message;
}
/// A class for exceptions that wrap other exceptions.
class WrappedException extends ApplicationException {
/// The underlying exception that [this] is wrapping, if any.
final innerError;
/// The stack chain for [innerError] if it exists.
final Chain innerChain;
WrappedException(String message, this.innerError, [StackTrace innerTrace])
: innerChain = innerTrace == null ? null : new Chain.forTrace(innerTrace),
super(message);
}
/// A class for exceptions that shouldn't be printed at the top level.
///
/// This is usually used when an exception has already been printed using
/// [log.exception].
class SilentException extends WrappedException {
SilentException(innerError, [StackTrace innerTrace])
: super(innerError.toString(), innerError, innerTrace);
}
/// A class for errors in a command's input data.
///
/// This corresponds to the [exit_codes.DATA] exit code.
class DataException extends ApplicationException {
DataException(String message)
: super(message);
}
/// An class for exceptions where a package could not be found in a [Source].
///
/// The source is responsible for wrapping its internal exceptions in this so
/// that other code in pub can use this to show a more detailed explanation of
/// why the package was being requested.
class PackageNotFoundException extends WrappedException {
PackageNotFoundException(String message, [innerError, StackTrace innerTrace])
: super(message, innerError, innerTrace);
}
/// All the names of user-facing exceptions.
final _userFacingExceptions = new Set<String>.from([
'ApplicationException', 'GitException',
// This refers to http.ClientException.
'ClientException',
// Errors coming from the Dart analyzer are probably caused by syntax errors
// in user code, so they're user-facing.
'AnalyzerError', 'AnalyzerErrorGroup',
// An error spawning an isolate probably indicates a transformer with an
// invalid import.
'IsolateSpawnException',
// IOException and subclasses.
'CertificateException', 'FileSystemException', 'HandshakeException',
'HttpException', 'IOException', 'ProcessException', 'RedirectException',
'SignalException', 'SocketException', 'StdoutException', 'TlsException',
'WebSocketException'
]);
/// Returns whether [error] is a user-facing error object.
///
/// This includes both [ApplicationException] and any dart:io errors.
bool isUserFacingException(error) {
if (error is CrossIsolateException) {
return _userFacingExceptions.contains(error.type);
}
// TODO(nweiz): unify this list with _userFacingExceptions when issue 5897 is
// fixed.
return error is ApplicationException ||
error is AnalyzerError ||
error is AnalyzerErrorGroup ||
error is IsolateSpawnException ||
error is IOException ||
error is http.ClientException ||
error is YamlException ||
error is UsageException;
}

View file

@ -1,240 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.executable;
import 'dart:async';
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import 'package:stack_trace/stack_trace.dart';
import 'barback/asset_environment.dart';
import 'entrypoint.dart';
import 'exit_codes.dart' as exit_codes;
import 'io.dart';
import 'log.dart' as log;
import 'utils.dart';
/// All signals that can be caught by a Dart process.
///
/// This intentionally omits SIGINT. SIGINT usually comes from a user pressing
/// Control+C on the terminal, and the terminal automatically passes the signal
/// to all processes in the process tree. If we forwarded it manually, the
/// subprocess would see two instances, which could cause problems. Instead, we
/// just ignore it and let the terminal pass it to the subprocess.
final _catchableSignals = Platform.isWindows
? [ProcessSignal.SIGHUP]
: [
ProcessSignal.SIGHUP,
ProcessSignal.SIGTERM,
ProcessSignal.SIGUSR1,
ProcessSignal.SIGUSR2,
ProcessSignal.SIGWINCH,
];
/// Runs [executable] from [package] reachable from [entrypoint].
///
/// The executable string is a relative Dart file path using native path
/// separators with or without a trailing ".dart" extension. It is contained
/// within [package], which should either be the entrypoint package or an
/// immediate dependency of it.
///
/// Arguments from [args] will be passed to the spawned Dart application.
///
/// If [mode] is passed, it's used as the barback mode; it defaults to
/// [BarbackMode.RELEASE].
///
/// Returns the exit code of the spawned app.
Future<int> runExecutable(Entrypoint entrypoint, String package,
String executable, Iterable<String> args, {bool isGlobal: false,
BarbackMode mode}) async {
if (mode == null) mode = BarbackMode.RELEASE;
// Make sure the package is an immediate dependency of the entrypoint or the
// entrypoint itself.
if (entrypoint.root.name != package &&
!entrypoint.root.immediateDependencies
.any((dep) => dep.name == package)) {
var graph = await entrypoint.loadPackageGraph();
if (graph.packages.containsKey(package)) {
dataError('Package "$package" is not an immediate dependency.\n'
'Cannot run executables in transitive dependencies.');
} else {
dataError('Could not find package "$package". Did you forget to add a '
'dependency?');
}
}
// Unless the user overrides the verbosity, we want to filter out the
// normal pub output shown while loading the environment.
if (log.verbosity == log.Verbosity.NORMAL) {
log.verbosity = log.Verbosity.WARNING;
}
// Ignore a trailing extension.
if (p.extension(executable) == ".dart") {
executable = p.withoutExtension(executable);
}
var localSnapshotPath = p.join(".pub", "bin", package,
"$executable.dart.snapshot");
if (!isGlobal && fileExists(localSnapshotPath) &&
// Dependencies are only snapshotted in release mode, since that's the
// default mode for them to run. We can't run them in a different mode
// using the snapshot.
mode == BarbackMode.RELEASE) {
return _runCachedExecutable(entrypoint, localSnapshotPath, args);
}
// If the command has a path separator, then it's a path relative to the
// root of the package. Otherwise, it's implicitly understood to be in
// "bin".
var rootDir = "bin";
var parts = p.split(executable);
if (parts.length > 1) {
assert(!isGlobal && package == entrypoint.root.name);
rootDir = parts.first;
} else {
executable = p.join("bin", executable);
}
var assetPath = "${p.url.joinAll(p.split(executable))}.dart";
var id = new AssetId(package, assetPath);
// TODO(nweiz): Use [packages] to only load assets from packages that the
// executable might load.
var environment = await AssetEnvironment.create(entrypoint, mode,
useDart2JS: false, entrypoints: [id]);
environment.barback.errors.listen((error) {
log.error(log.red("Build error:\n$error"));
});
var server;
if (package == entrypoint.root.name) {
// Serve the entire root-most directory containing the entrypoint. That
// ensures that, for example, things like `import '../../utils.dart';`
// will work from within some deeply nested script.
server = await environment.serveDirectory(rootDir);
} else {
// For other packages, always use the "bin" directory.
server = await environment.servePackageBinDirectory(package);
}
try {
await environment.barback.getAssetById(id);
} on AssetNotFoundException catch (error, stackTrace) {
var message = "Could not find ${log.bold(executable + ".dart")}";
if (package != entrypoint.root.name) {
message += " in package ${log.bold(server.package)}";
}
log.error("$message.");
log.fine(new Chain.forTrace(stackTrace));
return exit_codes.NO_INPUT;
}
var vmArgs = [];
// Run in checked mode.
// TODO(rnystrom): Make this configurable.
vmArgs.add("--checked");
// Get the URL of the executable, relative to the server's root directory.
var relativePath = p.url.relative(assetPath,
from: p.url.joinAll(p.split(server.rootDirectory)));
vmArgs.add(server.url.resolve(relativePath).toString());
vmArgs.addAll(args);
var process = await Process.start(Platform.executable, vmArgs);
_forwardSignals(process);
// Note: we're not using process.std___.pipe(std___) here because
// that prevents pub from also writing to the output streams.
process.stderr.listen(stderr.add);
process.stdout.listen(stdout.add);
stdin.listen(process.stdin.add);
return process.exitCode;
}
/// Runs the snapshot at [path] with [args] and hooks its stdout, stderr, and
/// sdtin to this process's.
///
/// If [recompile] is passed, it's called if the snapshot is out-of-date. It's
/// expected to regenerate a snapshot at [path], after which the snapshot will
/// be re-run. It may return a Future.
///
/// If [checked] is set, runs the snapshot in checked mode.
///
/// Returns the snapshot's exit code.
///
/// This doesn't do any validation of the snapshot's SDK version.
Future<int> runSnapshot(String path, Iterable<String> args, {recompile(),
bool checked: false}) async {
var vmArgs = [path]..addAll(args);
// TODO(nweiz): pass a flag to silence the "Wrong full snapshot version"
// message when issue 20784 is fixed.
if (checked) vmArgs.insert(0, "--checked");
// We need to split stdin so that we can send the same input both to the
// first and second process, if we start more than one.
var stdin1;
var stdin2;
if (recompile == null) {
stdin1 = stdin;
} else {
var pair = tee(stdin);
stdin1 = pair.first;
stdin2 = pair.last;
}
runProcess(input) async {
var process = await Process.start(Platform.executable, vmArgs);
_forwardSignals(process);
// Note: we're not using process.std___.pipe(std___) here because
// that prevents pub from also writing to the output streams.
process.stderr.listen(stderr.add);
process.stdout.listen(stdout.add);
input.listen(process.stdin.add);
return process.exitCode;
}
var exitCode = await runProcess(stdin1);
if (recompile == null || exitCode != 253) return exitCode;
// Exit code 253 indicates that the snapshot version was out-of-date. If we
// can recompile, do so.
await recompile();
return runProcess(stdin2);
}
/// Forwards all catchable signals to [process].
void _forwardSignals(Process process) {
// See [_catchableSignals].
ProcessSignal.SIGINT.watch().listen(
(_) => log.fine("Ignoring SIGINT in pub."));
for (var signal in _catchableSignals) {
signal.watch().listen((_) {
log.fine("Forwarding $signal to running process.");
process.kill(signal);
});
}
}
/// Runs the executable snapshot at [snapshotPath].
Future<int> _runCachedExecutable(Entrypoint entrypoint, String snapshotPath,
List<String> args) {
return runSnapshot(snapshotPath, args, checked: true, recompile: () {
log.fine("Precompiled executable is out of date.");
return entrypoint.precompileExecutables();
});
}

View file

@ -1,60 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Exit code constants.
///
/// From [the BSD sysexits manpage][manpage]. Not every constant here is used,
/// even though some of the unused ones may be appropriate for errors
/// encountered by pub.
///
/// [manpage]: http://www.freebsd.org/cgi/man.cgi?query=sysexits
library pub.exit_codes;
/// The command completely successfully.
const SUCCESS = 0;
/// The command was used incorrectly.
const USAGE = 64;
/// The input data was incorrect.
const DATA = 65;
/// An input file did not exist or was unreadable.
const NO_INPUT = 66;
/// The user specified did not exist.
const NO_USER = 67;
/// The host specified did not exist.
const NO_HOST = 68;
/// A service is unavailable.
const UNAVAILABLE = 69;
/// An internal software error has been detected.
const SOFTWARE = 70;
/// An operating system error has been detected.
const OS = 71;
/// Some system file did not exist or was unreadable.
const OS_FILE = 72;
/// A user-specified output file cannot be created.
const CANT_CREATE = 73;
/// An error occurred while doing I/O on some file.
const IO = 74;
/// Temporary failure, indicating something that is not really an error.
const TEMP_FAIL = 75;
/// The remote system returned something invalid during a protocol exchange.
const PROTOCOL = 76;
/// The user did not have sufficient permissions.
const NO_PERM = 77;
/// Something was unconfigured or mis-configured.
const CONFIG = 78;

View file

@ -1,111 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Helper functionality for invoking Git.
library pub.git;
import 'dart:async';
import 'dart:io';
import 'package:stack_trace/stack_trace.dart';
import 'exceptions.dart';
import 'io.dart';
import 'log.dart' as log;
import 'utils.dart';
/// An exception thrown because a git command failed.
class GitException implements ApplicationException {
/// The arguments to the git command.
final List<String> args;
/// The standard error emitted by git.
final String stderr;
String get message => 'Git error. Command: git ${args.join(" ")}\n$stderr';
GitException(Iterable<String> args, this.stderr)
: args = args.toList();
String toString() => message;
}
/// Tests whether or not the git command-line app is available for use.
bool get isInstalled {
if (_isInstalledCache != null) return _isInstalledCache;
_isInstalledCache = _gitCommand != null;
return _isInstalledCache;
}
bool _isInstalledCache;
/// Run a git process with [args] from [workingDir].
///
/// Returns the stdout as a list of strings if it succeeded. Completes to an
/// exception if it failed.
Future<List<String>> run(List<String> args,
{String workingDir, Map<String, String> environment}) {
if (!isInstalled) {
fail("Cannot find a Git executable.\n"
"Please ensure Git is correctly installed.");
}
log.muteProgress();
return runProcess(_gitCommand, args, workingDir: workingDir,
environment: environment).then((result) {
if (!result.success) throw new GitException(args, result.stderr.join("\n"));
return result.stdout;
}).whenComplete(() {
log.unmuteProgress();
});
}
/// Like [run], but synchronous.
List<String> runSync(List<String> args, {String workingDir,
Map<String, String> environment}) {
if (!isInstalled) {
fail("Cannot find a Git executable.\n"
"Please ensure Git is correctly installed.");
}
var result = runProcessSync(_gitCommand, args,
workingDir: workingDir,
environment: environment);
if (!result.success) throw new GitException(args, result.stderr.join("\n"));
return result.stdout;
}
/// Returns the name of the git command-line app, or null if Git could not be
/// found on the user's PATH.
String get _gitCommand {
if (_commandCache != null) return _commandCache;
var command;
if (_tryGitCommand("git")) {
_commandCache = "git";
} else if (_tryGitCommand("git.cmd")){
_commandCache = "git.cmd";
} else {
return null;
}
log.fine('Determined git command $command.');
return _commandCache;
}
String _commandCache;
/// Checks whether [command] is the Git command for this computer.
bool _tryGitCommand(String command) {
// If "git --version" prints something familiar, git is working.
try {
var result = runProcessSync(command, ["--version"]);
var regexp = new RegExp("^git version");
return result.stdout.length == 1 && regexp.hasMatch(result.stdout.single);
} on ProcessException catch (error, stackTrace) {
var chain = new Chain.forTrace(stackTrace);
// If the process failed, they probably don't have it.
log.message('Git command is not "$command": $error\n$chain');
return false;
}
}

View file

@ -1,783 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.global_packages;
import 'dart:async';
import 'dart:io';
import 'package:path/path.dart' as p;
import 'package:barback/barback.dart';
import 'package:pub_semver/pub_semver.dart';
import 'barback/asset_environment.dart';
import 'entrypoint.dart';
import 'exceptions.dart';
import 'executable.dart' as exe;
import 'io.dart';
import 'lock_file.dart';
import 'log.dart' as log;
import 'package.dart';
import 'pubspec.dart';
import 'sdk.dart' as sdk;
import 'solver/version_solver.dart';
import 'source/cached.dart';
import 'source/git.dart';
import 'source/path.dart';
import 'system_cache.dart';
import 'utils.dart';
/// Maintains the set of packages that have been globally activated.
///
/// These have been hand-chosen by the user to make their executables in bin/
/// available to the entire system. This lets them access them even when the
/// current working directory is not inside another entrypoint package.
///
/// Only one version of a given package name can be globally activated at a
/// time. Activating a different version of a package will deactivate the
/// previous one.
///
/// This handles packages from uncached and cached sources a little differently.
/// For a cached source, the package is physically in the user's pub cache and
/// we don't want to mess with it by putting a lockfile in there. Instead, when
/// we activate the package, we create a full lockfile and put it in the
/// "global_packages" directory. It's named "<package>.lock". Unlike a normal
/// lockfile, it also contains an entry for the root package itself, so that we
/// know the version and description that was activated.
///
/// Uncached packages (i.e. "path" packages) are somewhere else on the user's
/// local file system and can have a lockfile directly in place. (And, in fact,
/// we want to ensure we honor the user's lockfile there.) To activate it, we
/// just need to know where that package directory is. For that, we create a
/// lockfile that *only* contains the root package's [PackageId] -- basically
/// just the path to the directory where the real lockfile lives.
class GlobalPackages {
/// The [SystemCache] containing the global packages.
final SystemCache cache;
/// The directory where the lockfiles for activated packages are stored.
String get _directory => p.join(cache.rootDir, "global_packages");
/// The directory where binstubs for global package executables are stored.
String get _binStubDir => p.join(cache.rootDir, "bin");
/// Creates a new global package registry backed by the given directory on
/// the user's file system.
///
/// The directory may not physically exist yet. If not, this will create it
/// when needed.
GlobalPackages(this.cache);
/// Caches the package located in the Git repository [repo] and makes it the
/// active global version.
///
/// [executables] is the names of the executables that should have binstubs.
/// If `null`, all executables in the package will get binstubs. If empty, no
/// binstubs will be created.
///
/// if [overwriteBinStubs] is `true`, any binstubs that collide with
/// existing binstubs in other packages will be overwritten by this one's.
/// Otherwise, the previous ones will be preserved.
Future activateGit(String repo, List<String> executables,
{bool overwriteBinStubs}) async {
var source = cache.sources["git"] as GitSource;
var name = await source.getPackageNameFromRepo(repo);
// Call this just to log what the current active package is, if any.
_describeActive(name);
// TODO(nweiz): Add some special handling for git repos that contain path
// dependencies. Their executables shouldn't be cached, and there should
// be a mechanism for redoing dependency resolution if a path pubspec has
// changed (see also issue 20499).
await _installInCache(
new PackageDep(name, "git", VersionConstraint.any, repo),
executables, overwriteBinStubs: overwriteBinStubs);
}
/// Finds the latest version of the hosted package with [name] that matches
/// [constraint] and makes it the active global version.
///
/// [executables] is the names of the executables that should have binstubs.
/// If `null`, all executables in the package will get binstubs. If empty, no
/// binstubs will be created.
///
/// if [overwriteBinStubs] is `true`, any binstubs that collide with
/// existing binstubs in other packages will be overwritten by this one's.
/// Otherwise, the previous ones will be preserved.
Future activateHosted(String name, VersionConstraint constraint,
List<String> executables, {bool overwriteBinStubs}) async {
_describeActive(name);
await _installInCache(new PackageDep(name, "hosted", constraint, name),
executables, overwriteBinStubs: overwriteBinStubs);
}
/// Makes the local package at [path] globally active.
///
/// [executables] is the names of the executables that should have binstubs.
/// If `null`, all executables in the package will get binstubs. If empty, no
/// binstubs will be created.
///
/// if [overwriteBinStubs] is `true`, any binstubs that collide with
/// existing binstubs in other packages will be overwritten by this one's.
/// Otherwise, the previous ones will be preserved.
Future activatePath(String path, List<String> executables,
{bool overwriteBinStubs}) async {
var entrypoint = new Entrypoint(path, cache);
// Get the package's dependencies.
await entrypoint.ensureLockFileIsUpToDate();
var name = entrypoint.root.name;
// Call this just to log what the current active package is, if any.
_describeActive(name);
// Write a lockfile that points to the local package.
var fullPath = canonicalize(entrypoint.root.dir);
var id = new PackageId(name, "path", entrypoint.root.version,
PathSource.describePath(fullPath));
// TODO(rnystrom): Look in "bin" and display list of binaries that
// user can run.
_writeLockFile(name, new LockFile([id]));
var binDir = p.join(_directory, name, 'bin');
if (dirExists(binDir)) deleteEntry(binDir);
_updateBinStubs(entrypoint.root, executables,
overwriteBinStubs: overwriteBinStubs);
}
/// Installs the package [dep] and its dependencies into the system cache.
Future _installInCache(PackageDep dep, List<String> executables,
{bool overwriteBinStubs}) async {
// Create a dummy package with just [dep] so we can do resolution on it.
var root = new Package.inMemory(new Pubspec("pub global activate",
dependencies: [dep], sources: cache.sources));
// Resolve it and download its dependencies.
var result = await resolveVersions(SolveType.GET, cache.sources, root);
if (!result.succeeded) {
// If the package specified by the user doesn't exist, we want to
// surface that as a [DataError] with the associated exit code.
if (result.error.package != dep.name) throw result.error;
if (result.error is NoVersionException) dataError(result.error.message);
throw result.error;
}
result.showReport(SolveType.GET);
// Make sure all of the dependencies are locally installed.
var ids = await Future.wait(result.packages.map(_cacheDependency));
var lockFile = new LockFile(ids);
// Load the package graph from [result] so we don't need to re-parse all
// the pubspecs.
var graph = await new Entrypoint.inMemory(root, lockFile, cache)
.loadPackageGraph(result);
var snapshots = await _precompileExecutables(graph.entrypoint, dep.name);
_writeLockFile(dep.name, lockFile);
_updateBinStubs(graph.packages[dep.name], executables,
overwriteBinStubs: overwriteBinStubs, snapshots: snapshots);
}
/// Precompiles the executables for [package] and saves them in the global
/// cache.
///
/// Returns a map from executable name to path for the snapshots that were
/// successfully precompiled.
Future<Map<String, String>> _precompileExecutables(Entrypoint entrypoint,
String package) {
return log.progress("Precompiling executables", () async {
var binDir = p.join(_directory, package, 'bin');
cleanDir(binDir);
var graph = await entrypoint.loadPackageGraph();
var environment = await AssetEnvironment.create(
entrypoint, BarbackMode.RELEASE,
entrypoints: graph.packages[package].executableIds,
useDart2JS: false);
environment.barback.errors.listen((error) {
log.error(log.red("Build error:\n$error"));
});
return environment.precompileExecutables(package, binDir);
});
}
/// Downloads [id] into the system cache if it's a cached package.
///
/// Returns the resolved [PackageId] for [id].
Future<PackageId> _cacheDependency(PackageId id) async {
var source = cache.sources[id.source];
if (!id.isRoot && source is CachedSource) {
await source.downloadToSystemCache(id);
}
return source.resolveId(id);
}
/// Finishes activating package [package] by saving [lockFile] in the cache.
void _writeLockFile(String package, LockFile lockFile) {
ensureDir(p.join(_directory, package));
// TODO(nweiz): This cleans up Dart 1.6's old lockfile location. Remove it
// when Dart 1.6 is old enough that we don't think anyone will have these
// lockfiles anymore (issue 20703).
var oldPath = p.join(_directory, "$package.lock");
if (fileExists(oldPath)) deleteEntry(oldPath);
writeTextFile(_getLockFilePath(package),
lockFile.serialize(cache.rootDir, cache.sources));
var id = lockFile.packages[package];
log.message('Activated ${_formatPackage(id)}.');
}
/// Shows the user the currently active package with [name], if any.
void _describeActive(String name) {
try {
var lockFile = new LockFile.load(_getLockFilePath(name), cache.sources);
var id = lockFile.packages[name];
if (id.source == 'git') {
var url = GitSource.urlFromDescription(id.description);
log.message('Package ${log.bold(name)} is currently active from Git '
'repository "${url}".');
} else if (id.source == 'path') {
var path = PathSource.pathFromDescription(id.description);
log.message('Package ${log.bold(name)} is currently active at path '
'"$path".');
} else {
log.message('Package ${log.bold(name)} is currently active at version '
'${log.bold(id.version)}.');
}
} on IOException {
// If we couldn't read the lock file, it's not activated.
return null;
}
}
/// Deactivates a previously-activated package named [name].
///
/// Returns `false` if no package with [name] was currently active.
bool deactivate(String name) {
var dir = p.join(_directory, name);
if (!dirExists(dir)) return false;
_deleteBinStubs(name);
var lockFile = new LockFile.load(_getLockFilePath(name), cache.sources);
var id = lockFile.packages[name];
log.message('Deactivated package ${_formatPackage(id)}.');
deleteEntry(dir);
return true;
}
/// Finds the active package with [name].
///
/// Returns an [Entrypoint] loaded with the active package if found.
Future<Entrypoint> find(String name) async {
var lockFilePath = _getLockFilePath(name);
var lockFile;
try {
lockFile = new LockFile.load(lockFilePath, cache.sources);
} on IOException {
var oldLockFilePath = p.join(_directory, '$name.lock');
try {
// TODO(nweiz): This looks for Dart 1.6's old lockfile location.
// Remove it when Dart 1.6 is old enough that we don't think anyone
// will have these lockfiles anymore (issue 20703).
lockFile = new LockFile.load(oldLockFilePath, cache.sources);
} on IOException {
// If we couldn't read the lock file, it's not activated.
dataError("No active package ${log.bold(name)}.");
}
// Move the old lockfile to its new location.
ensureDir(p.dirname(lockFilePath));
new File(oldLockFilePath).renameSync(lockFilePath);
}
// Load the package from the cache.
var id = lockFile.packages[name];
lockFile.packages.remove(name);
var source = cache.sources[id.source];
if (source is CachedSource) {
// For cached sources, the package itself is in the cache and the
// lockfile is the one we just loaded.
var dir = await cache.sources[id.source].getDirectory(id);
var package = new Package.load(name, dir, cache.sources);
return new Entrypoint.inMemory(package, lockFile, cache);
}
// For uncached sources (i.e. path), the ID just points to the real
// directory for the package.
assert(id.source == "path");
return new Entrypoint(PathSource.pathFromDescription(id.description),
cache);
}
/// Runs [package]'s [executable] with [args].
///
/// If [executable] is available in its precompiled form, that will be
/// recompiled if the SDK has been upgraded since it was first compiled and
/// then run. Otherwise, it will be run from source.
///
/// If [mode] is passed, it's used as the barback mode; it defaults to
/// [BarbackMode.RELEASE].
///
/// Returns the exit code from the executable.
Future<int> runExecutable(String package, String executable,
Iterable<String> args, {BarbackMode mode}) {
if (mode == null) mode = BarbackMode.RELEASE;
var binDir = p.join(_directory, package, 'bin');
if (mode != BarbackMode.RELEASE ||
!fileExists(p.join(binDir, '$executable.dart.snapshot'))) {
return find(package).then((entrypoint) {
return exe.runExecutable(entrypoint, package, executable, args,
mode: mode, isGlobal: true);
});
}
// Unless the user overrides the verbosity, we want to filter out the
// normal pub output shown while loading the environment.
if (log.verbosity == log.Verbosity.NORMAL) {
log.verbosity = log.Verbosity.WARNING;
}
var snapshotPath = p.join(binDir, '$executable.dart.snapshot');
return exe.runSnapshot(snapshotPath, args, recompile: () {
log.fine("$package:$executable is out of date and needs to be "
"recompiled.");
return find(package)
.then((entrypoint) => entrypoint.loadPackageGraph())
.then((graph) => _precompileExecutables(graph.entrypoint, package));
});
}
/// Gets the path to the lock file for an activated cached package with
/// [name].
String _getLockFilePath(String name) =>
p.join(_directory, name, "pubspec.lock");
/// Shows the user a formatted list of globally activated packages.
void listActivePackages() {
if (!dirExists(_directory)) return;
listDir(_directory).map(_loadPackageId).toList()
..sort((id1, id2) => id1.name.compareTo(id2.name))
..forEach((id) => log.message(_formatPackage(id)));
}
/// Returns the [PackageId] for the globally-activated package at [path].
///
/// [path] should be a path within [_directory]. It can either be an old-style
/// path to a single lockfile or a new-style path to a directory containing a
/// lockfile.
PackageId _loadPackageId(String path) {
var name = p.basenameWithoutExtension(path);
if (!fileExists(path)) path = p.join(path, 'pubspec.lock');
var id = new LockFile.load(p.join(_directory, path), cache.sources)
.packages[name];
if (id == null) {
throw new FormatException("Pubspec for activated package $name didn't "
"contain an entry for itself.");
}
return id;
}
/// Returns formatted string representing the package [id].
String _formatPackage(PackageId id) {
if (id.source == 'git') {
var url = GitSource.urlFromDescription(id.description);
return '${log.bold(id.name)} ${id.version} from Git repository "$url"';
} else if (id.source == 'path') {
var path = PathSource.pathFromDescription(id.description);
return '${log.bold(id.name)} ${id.version} at path "$path"';
} else {
return '${log.bold(id.name)} ${id.version}';
}
}
/// Repairs any corrupted globally-activated packages and their binstubs.
///
/// Returns a pair of two [int]s. The first indicates how many packages were
/// successfully re-activated; the second indicates how many failed.
Future<Pair<int, int>> repairActivatedPackages() async {
var executables = {};
if (dirExists(_binStubDir)) {
for (var entry in listDir(_binStubDir)) {
try {
var binstub = readTextFile(entry);
var package = _binStubProperty(binstub, "Package");
if (package == null) {
throw new ApplicationException("No 'Package' property.");
}
var executable = _binStubProperty(binstub, "Executable");
if (executable == null) {
throw new ApplicationException("No 'Executable' property.");
}
executables.putIfAbsent(package, () => []).add(executable);
} catch (error, stackTrace) {
log.error(
"Error reading binstub for "
"\"${p.basenameWithoutExtension(entry)}\"",
error, stackTrace);
tryDeleteEntry(entry);
}
}
}
var successes = 0;
var failures = 0;
if (dirExists(_directory)) {
for (var entry in listDir(_directory)) {
var id;
try {
id = _loadPackageId(entry);
log.message("Reactivating ${log.bold(id.name)} ${id.version}...");
var entrypoint = await find(id.name);
var graph = await entrypoint.loadPackageGraph();
var snapshots = await _precompileExecutables(entrypoint, id.name);
var packageExecutables = executables.remove(id.name);
if (packageExecutables == null) packageExecutables = [];
_updateBinStubs(graph.packages[id.name], packageExecutables,
overwriteBinStubs: true, snapshots: snapshots,
suggestIfNotOnPath: false);
successes++;
} catch (error, stackTrace) {
var message = "Failed to reactivate "
"${log.bold(p.basenameWithoutExtension(entry))}";
if (id != null) {
message += " ${id.version}";
if (id.source != "hosted") message += " from ${id.source}";
}
log.error(message, error, stackTrace);
failures++;
tryDeleteEntry(entry);
}
}
}
if (executables.isNotEmpty) {
var message = new StringBuffer("Binstubs exist for non-activated "
"packages:\n");
executables.forEach((package, executableNames) {
// TODO(nweiz): Use a normal for loop here when
// https://github.com/dart-lang/async_await/issues/68 is fixed.
executableNames.forEach((executable) =>
deleteEntry(p.join(_binStubDir, executable)));
message.writeln(" From ${log.bold(package)}: "
"${toSentence(executableNames)}");
});
log.error(message);
}
return new Pair(successes, failures);
}
/// Updates the binstubs for [package].
///
/// A binstub is a little shell script in `PUB_CACHE/bin` that runs an
/// executable from a globally activated package. This removes any old
/// binstubs from the previously activated version of the package and
/// (optionally) creates new ones for the executables listed in the package's
/// pubspec.
///
/// [executables] is the names of the executables that should have binstubs.
/// If `null`, all executables in the package will get binstubs. If empty, no
/// binstubs will be created.
///
/// If [overwriteBinStubs] is `true`, any binstubs that collide with
/// existing binstubs in other packages will be overwritten by this one's.
/// Otherwise, the previous ones will be preserved.
///
/// If [snapshots] is given, it is a map of the names of executables whose
/// snapshots were precompiled to the paths of those snapshots. Binstubs for
/// those will run the snapshot directly and skip pub entirely.
///
/// If [suggestIfNotOnPath] is `true` (the default), this will warn the user if
/// the bin directory isn't on their path.
void _updateBinStubs(Package package, List<String> executables,
{bool overwriteBinStubs, Map<String, String> snapshots,
bool suggestIfNotOnPath: true}) {
if (snapshots == null) snapshots = const {};
// Remove any previously activated binstubs for this package, in case the
// list of executables has changed.
_deleteBinStubs(package.name);
if ((executables != null && executables.isEmpty) ||
package.pubspec.executables.isEmpty) {
return;
}
ensureDir(_binStubDir);
var installed = [];
var collided = {};
var allExecutables = ordered(package.pubspec.executables.keys);
for (var executable in allExecutables) {
if (executables != null && !executables.contains(executable)) continue;
var script = package.pubspec.executables[executable];
var previousPackage = _createBinStub(package, executable, script,
overwrite: overwriteBinStubs, snapshot: snapshots[script]);
if (previousPackage != null) {
collided[executable] = previousPackage;
if (!overwriteBinStubs) continue;
}
installed.add(executable);
}
if (installed.isNotEmpty) {
var names = namedSequence("executable", installed.map(log.bold));
log.message("Installed $names.");
}
// Show errors for any collisions.
if (collided.isNotEmpty) {
for (var command in ordered(collided.keys)) {
if (overwriteBinStubs) {
log.warning("Replaced ${log.bold(command)} previously installed from "
"${log.bold(collided[command])}.");
} else {
log.warning("Executable ${log.bold(command)} was already installed "
"from ${log.bold(collided[command])}.");
}
}
if (!overwriteBinStubs) {
log.warning("Deactivate the other package(s) or activate "
"${log.bold(package.name)} using --overwrite.");
}
}
// Show errors for any unknown executables.
if (executables != null) {
var unknown = ordered(executables.where(
(exe) => !package.pubspec.executables.keys.contains(exe)));
if (unknown.isNotEmpty) {
dataError("Unknown ${namedSequence('executable', unknown)}.");
}
}
// Show errors for any missing scripts.
// TODO(rnystrom): This can print false positives since a script may be
// produced by a transformer. Do something better.
var binFiles = package.listFiles(beneath: "bin", recursive: false)
.map((path) => package.relative(path))
.toList();
for (var executable in installed) {
var script = package.pubspec.executables[executable];
var scriptPath = p.join("bin", "$script.dart");
if (!binFiles.contains(scriptPath)) {
log.warning('Warning: Executable "$executable" runs "$scriptPath", '
'which was not found in ${log.bold(package.name)}.');
}
}
if (suggestIfNotOnPath && installed.isNotEmpty) {
_suggestIfNotOnPath(installed.first);
}
}
/// Creates a binstub named [executable] that runs [script] from [package].
///
/// If [overwrite] is `true`, this will replace an existing binstub with that
/// name for another package.
///
/// If [snapshot] is non-null, it is a path to a snapshot file. The binstub
/// will invoke that directly. Otherwise, it will run `pub global run`.
///
/// If a collision occurs, returns the name of the package that owns the
/// existing binstub. Otherwise returns `null`.
String _createBinStub(Package package, String executable, String script,
{bool overwrite, String snapshot}) {
var binStubPath = p.join(_binStubDir, executable);
if (Platform.operatingSystem == "windows") binStubPath += ".bat";
// See if the binstub already exists. If so, it's for another package
// since we already deleted all of this package's binstubs.
var previousPackage;
if (fileExists(binStubPath)) {
var contents = readTextFile(binStubPath);
previousPackage = _binStubProperty(contents, "Package");
if (previousPackage == null) {
log.fine("Could not parse binstub $binStubPath:\n$contents");
} else if (!overwrite) {
return previousPackage;
}
}
// If the script was precompiled to a snapshot, just invoke that directly
// and skip pub global run entirely.
var invocation;
if (snapshot != null) {
// We expect absolute paths from the precompiler since relative ones
// won't be relative to the right directory when the user runs this.
assert(p.isAbsolute(snapshot));
invocation = 'dart "$snapshot"';
} else {
invocation = "pub global run ${package.name}:$script";
}
if (Platform.operatingSystem == "windows") {
var batch = """
@echo off
rem This file was created by pub v${sdk.version}.
rem Package: ${package.name}
rem Version: ${package.version}
rem Executable: ${executable}
rem Script: ${script}
$invocation %*
""";
if (snapshot != null) {
batch += """
rem The VM exits with code 253 if the snapshot version is out-of-date.
rem If it is, we need to delete it and run "pub global" manually.
if not errorlevel 253 (
exit /b %errorlevel%
)
pub global run ${package.name}:$script %*
""";
}
writeTextFile(binStubPath, batch);
} else {
var bash = """
#!/usr/bin/env sh
# This file was created by pub v${sdk.version}.
# Package: ${package.name}
# Version: ${package.version}
# Executable: ${executable}
# Script: ${script}
$invocation "\$@"
""";
if (snapshot != null) {
bash += """
# The VM exits with code 253 if the snapshot version is out-of-date.
# If it is, we need to delete it and run "pub global" manually.
exit_code=\$?
if [ \$exit_code != 253 ]; then
exit \$exit_code
fi
pub global run ${package.name}:$script "\$@"
""";
}
writeTextFile(binStubPath, bash);
// Make it executable.
var result = Process.runSync('chmod', ['+x', binStubPath]);
if (result.exitCode != 0) {
// Couldn't make it executable so don't leave it laying around.
try {
deleteEntry(binStubPath);
} on IOException catch (err) {
// Do nothing. We're going to fail below anyway.
log.fine("Could not delete binstub:\n$err");
}
fail('Could not make "$binStubPath" executable (exit code '
'${result.exitCode}):\n${result.stderr}');
}
}
return previousPackage;
}
/// Deletes all existing binstubs for [package].
void _deleteBinStubs(String package) {
if (!dirExists(_binStubDir)) return;
for (var file in listDir(_binStubDir, includeDirs: false)) {
var contents = readTextFile(file);
var binStubPackage = _binStubProperty(contents, "Package");
if (binStubPackage == null) {
log.fine("Could not parse binstub $file:\n$contents");
continue;
}
if (binStubPackage == package) {
log.fine("Deleting old binstub $file");
deleteEntry(file);
}
}
}
/// Checks to see if the binstubs are on the user's PATH and, if not, suggests
/// that the user add the directory to their PATH.
///
/// [installed] should be the name of an installed executable that can be used
/// to test whether accessing it on the path works.
void _suggestIfNotOnPath(String installed) {
if (Platform.operatingSystem == "windows") {
// See if the shell can find one of the binstubs.
// "\q" means return exit code 0 if found or 1 if not.
var result = runProcessSync("where", [r"\q", installed + ".bat"]);
if (result.exitCode == 0) return;
log.warning(
"${log.yellow('Warning:')} Pub installs executables into "
"${log.bold(_binStubDir)}, which is not on your path.\n"
"You can fix that by adding that directory to your system's "
'"Path" environment variable.\n'
'A web search for "configure windows path" will show you how.');
} else {
// See if the shell can find one of the binstubs.
var result = runProcessSync("which", [installed]);
if (result.exitCode == 0) return;
var binDir = _binStubDir;
if (binDir.startsWith(Platform.environment['HOME'])) {
binDir = p.join("~", p.relative(binDir,
from: Platform.environment['HOME']));
}
log.warning(
"${log.yellow('Warning:')} Pub installs executables into "
"${log.bold(binDir)}, which is not on your path.\n"
"You can fix that by adding this to your shell's config file "
"(.bashrc, .bash_profile, etc.):\n"
"\n"
" ${log.bold('export PATH="\$PATH":"$binDir"')}\n"
"\n");
}
}
/// Returns the value of the property named [name] in the bin stub script
/// [source].
String _binStubProperty(String source, String name) {
var pattern = new RegExp(quoteRegExp(name) + r": ([a-zA-Z0-9_-]+)");
var match = pattern.firstMatch(source);
return match == null ? null : match[1];
}
}

View file

@ -1,258 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Helpers for dealing with HTTP.
library pub.http;
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:http/http.dart' as http;
import 'package:http_throttle/http_throttle.dart';
import 'package:stack_trace/stack_trace.dart';
import 'io.dart';
import 'log.dart' as log;
import 'oauth2.dart' as oauth2;
import 'sdk.dart' as sdk;
import 'utils.dart';
// TODO(nweiz): make this configurable
/// The amount of time in milliseconds to allow HTTP requests before assuming
/// they've failed.
final HTTP_TIMEOUT = 30 * 1000;
/// Headers and field names that should be censored in the log output.
final _CENSORED_FIELDS = const ['refresh_token', 'authorization'];
/// Headers required for pub.dartlang.org API requests.
///
/// The Accept header tells pub.dartlang.org which version of the API we're
/// expecting, so it can either serve that version or give us a 406 error if
/// it's not supported.
final PUB_API_HEADERS = const {'Accept': 'application/vnd.pub.v2+json'};
/// An HTTP client that transforms 40* errors and socket exceptions into more
/// user-friendly error messages.
///
/// This also adds a 30-second timeout to every request. This can be configured
/// on a per-request basis by setting the 'Pub-Request-Timeout' header to the
/// desired number of milliseconds, or to "None" to disable the timeout.
class _PubHttpClient extends http.BaseClient {
final _requestStopwatches = new Map<http.BaseRequest, Stopwatch>();
http.Client _inner;
_PubHttpClient([http.Client inner])
: this._inner = inner == null ? new http.Client() : inner;
Future<http.StreamedResponse> send(http.BaseRequest request) {
_requestStopwatches[request] = new Stopwatch()..start();
request.headers[HttpHeaders.USER_AGENT] = "Dart pub ${sdk.version}";
_logRequest(request);
var timeoutLength = HTTP_TIMEOUT;
var timeoutString = request.headers.remove('Pub-Request-Timeout');
if (timeoutString == 'None') {
timeoutLength = null;
} else if (timeoutString != null) {
timeoutLength = int.parse(timeoutString);
}
var future = _inner.send(request).then((streamedResponse) {
_logResponse(streamedResponse);
var status = streamedResponse.statusCode;
// 401 responses should be handled by the OAuth2 client. It's very
// unlikely that they'll be returned by non-OAuth2 requests. We also want
// to pass along 400 responses from the token endpoint.
var tokenRequest = urisEqual(
streamedResponse.request.url, oauth2.tokenEndpoint);
if (status < 400 || status == 401 || (status == 400 && tokenRequest)) {
return streamedResponse;
}
if (status == 406 &&
request.headers['Accept'] == PUB_API_HEADERS['Accept']) {
fail("Pub ${sdk.version} is incompatible with the current version of "
"${request.url.host}.\n"
"Upgrade pub to the latest version and try again.");
}
if (status == 500 &&
(request.url.host == "pub.dartlang.org" ||
request.url.host == "storage.googleapis.com")) {
var message = "HTTP error 500: Internal Server Error at "
"${request.url}.";
if (request.url.host == "pub.dartlang.org" ||
request.url.host == "storage.googleapis.com") {
message += "\nThis is likely a transient error. Please try again "
"later.";
}
fail(message);
}
return http.Response.fromStream(streamedResponse).then((response) {
throw new PubHttpException(response);
});
}).catchError((error, stackTrace) {
// Work around issue 23008.
if (stackTrace == null) stackTrace = new Chain.current();
if (error is SocketException &&
error.osError != null) {
if (error.osError.errorCode == 8 ||
error.osError.errorCode == -2 ||
error.osError.errorCode == -5 ||
error.osError.errorCode == 11001 ||
error.osError.errorCode == 11004) {
fail('Could not resolve URL "${request.url.origin}".',
error, stackTrace);
} else if (error.osError.errorCode == -12276) {
fail('Unable to validate SSL certificate for '
'"${request.url.origin}".',
error, stackTrace);
}
}
throw error;
});
if (timeoutLength == null) return future;
return timeout(future, timeoutLength, request.url,
'fetching URL "${request.url}"');
}
/// Logs the fact that [request] was sent, and information about it.
void _logRequest(http.BaseRequest request) {
var requestLog = new StringBuffer();
requestLog.writeln("HTTP ${request.method} ${request.url}");
request.headers.forEach((name, value) =>
requestLog.writeln(_logField(name, value)));
if (request.method == 'POST') {
var contentTypeString = request.headers[HttpHeaders.CONTENT_TYPE];
if (contentTypeString == null) contentTypeString = '';
var contentType = ContentType.parse(contentTypeString);
if (request is http.MultipartRequest) {
requestLog.writeln();
requestLog.writeln("Body fields:");
request.fields.forEach((name, value) =>
requestLog.writeln(_logField(name, value)));
// TODO(nweiz): make MultipartRequest.files readable, and log them?
} else if (request is http.Request) {
if (contentType.value == 'application/x-www-form-urlencoded') {
requestLog.writeln();
requestLog.writeln("Body fields:");
request.bodyFields.forEach((name, value) =>
requestLog.writeln(_logField(name, value)));
} else if (contentType.value == 'text/plain' ||
contentType.value == 'application/json') {
requestLog.write(request.body);
}
}
}
log.fine(requestLog.toString().trim());
}
/// Logs the fact that [response] was received, and information about it.
void _logResponse(http.StreamedResponse response) {
// TODO(nweiz): Fork the response stream and log the response body. Be
// careful not to log OAuth2 private data, though.
var responseLog = new StringBuffer();
var request = response.request;
var stopwatch = _requestStopwatches.remove(request)..stop();
responseLog.writeln("HTTP response ${response.statusCode} "
"${response.reasonPhrase} for ${request.method} ${request.url}");
responseLog.writeln("took ${stopwatch.elapsed}");
response.headers.forEach((name, value) =>
responseLog.writeln(_logField(name, value)));
log.fine(responseLog.toString().trim());
}
/// Returns a log-formatted string for the HTTP field or header with the given
/// [name] and [value].
String _logField(String name, String value) {
if (_CENSORED_FIELDS.contains(name.toLowerCase())) {
return "$name: <censored>";
} else {
return "$name: $value";
}
}
}
/// The [_PubHttpClient] wrapped by [httpClient].
final _pubClient = new _PubHttpClient();
/// The HTTP client to use for all HTTP requests.
final httpClient = new ThrottleClient(16, _pubClient);
/// The underlying HTTP client wrapped by [httpClient].
http.Client get innerHttpClient => _pubClient._inner;
set innerHttpClient(http.Client client) => _pubClient._inner = client;
/// Handles a successful JSON-formatted response from pub.dartlang.org.
///
/// These responses are expected to be of the form `{"success": {"message":
/// "some message"}}`. If the format is correct, the message will be printed;
/// otherwise an error will be raised.
void handleJsonSuccess(http.Response response) {
var parsed = parseJsonResponse(response);
if (parsed['success'] is! Map ||
!parsed['success'].containsKey('message') ||
parsed['success']['message'] is! String) {
invalidServerResponse(response);
}
log.message(parsed['success']['message']);
}
/// Handles an unsuccessful JSON-formatted response from pub.dartlang.org.
///
/// These responses are expected to be of the form `{"error": {"message": "some
/// message"}}`. If the format is correct, the message will be raised as an
/// error; otherwise an [invalidServerResponse] error will be raised.
void handleJsonError(http.Response response) {
var errorMap = parseJsonResponse(response);
if (errorMap['error'] is! Map ||
!errorMap['error'].containsKey('message') ||
errorMap['error']['message'] is! String) {
invalidServerResponse(response);
}
fail(errorMap['error']['message']);
}
/// Parses a response body, assuming it's JSON-formatted.
///
/// Throws a user-friendly error if the response body is invalid JSON, or if
/// it's not a map.
Map parseJsonResponse(http.Response response) {
var value;
try {
value = JSON.decode(response.body);
} on FormatException {
invalidServerResponse(response);
}
if (value is! Map) invalidServerResponse(response);
return value;
}
/// Throws an error describing an invalid response from the server.
void invalidServerResponse(http.Response response) =>
fail('Invalid server response:\n${response.body}');
/// Exception thrown when an HTTP operation fails.
class PubHttpException implements Exception {
final http.Response response;
const PubHttpException(this.response);
String toString() => 'HTTP error ${response.statusCode}: '
'${response.reasonPhrase}';
}

File diff suppressed because it is too large Load diff

View file

@ -1,136 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.lock_file;
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
import 'package:source_span/source_span.dart';
import 'package:yaml/yaml.dart';
import 'io.dart';
import 'package.dart';
import 'source_registry.dart';
import 'utils.dart';
/// A parsed and validated `pubspec.lock` file.
class LockFile {
/// The packages this lockfile pins.
Map<String, PackageId> packages;
/// Creates a new lockfile containing [ids].
factory LockFile(List<PackageId> ids) {
var lockFile = new LockFile.empty();
for (var id in ids) {
if (!id.isRoot) lockFile.packages[id.name] = id;
}
return lockFile;
}
LockFile._(this.packages);
LockFile.empty()
: packages = <String, PackageId>{};
/// Loads a lockfile from [filePath].
factory LockFile.load(String filePath, SourceRegistry sources) {
return LockFile._parse(filePath, readTextFile(filePath), sources);
}
/// Parses a lockfile whose text is [contents].
factory LockFile.parse(String contents, SourceRegistry sources) {
return LockFile._parse(null, contents, sources);
}
/// Parses the lockfile whose text is [contents].
///
/// [filePath] is the system-native path to the lockfile on disc. It may be
/// `null`.
static LockFile _parse(String filePath, String contents,
SourceRegistry sources) {
var packages = <String, PackageId>{};
if (contents.trim() == '') return new LockFile.empty();
var sourceUrl;
if (filePath != null) sourceUrl = p.toUri(filePath);
var parsed = loadYamlNode(contents, sourceUrl: sourceUrl);
_validate(parsed is Map, 'The lockfile must be a YAML mapping.', parsed);
var packageEntries = parsed['packages'];
if (packageEntries != null) {
_validate(packageEntries is Map, 'The "packages" field must be a map.',
parsed.nodes['packages']);
packageEntries.forEach((name, spec) {
// Parse the version.
_validate(spec.containsKey('version'),
'Package $name is missing a version.', spec);
var version = new Version.parse(spec['version']);
// Parse the source.
_validate(spec.containsKey('source'),
'Package $name is missing a source.', spec);
var sourceName = spec['source'];
_validate(spec.containsKey('description'),
'Package $name is missing a description.', spec);
var description = spec['description'];
// Let the source parse the description.
var source = sources[sourceName];
try {
description = source.parseDescription(filePath, description,
fromLockFile: true);
} on FormatException catch (ex) {
throw new SourceSpanFormatException(ex.message,
spec.nodes['source'].span);
}
var id = new PackageId(name, sourceName, version, description);
// Validate the name.
_validate(name == id.name,
"Package name $name doesn't match ${id.name}.", spec);
packages[name] = id;
});
}
return new LockFile._(packages);
}
/// If [condition] is `false` throws a format error with [message] for [node].
static void _validate(bool condition, String message, YamlNode node) {
if (condition) return;
throw new SourceSpanFormatException(message, node.span);
}
/// Returns the serialized YAML text of the lock file.
///
/// [packageDir] is the containing directory of the root package, used to
/// properly serialize package descriptions.
String serialize(String packageDir, SourceRegistry sources) {
// Convert the dependencies to a simple object.
var data = {};
packages.forEach((name, package) {
var description = sources[package.source].serializeDescription(packageDir,
package.description);
data[name] = {
'version': package.version.toString(),
'source': package.source,
'description': description
};
});
return """
# Generated by pub
# See http://pub.dartlang.org/doc/glossary.html#lockfile
${yamlToString({'packages': data})}
""";
}
}

View file

@ -1,547 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Message logging.
library pub.log;
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:args/command_runner.dart';
import 'package:path/path.dart' as p;
import 'package:source_span/source_span.dart';
import 'package:stack_trace/stack_trace.dart';
import 'exceptions.dart';
import 'io.dart';
import 'progress.dart';
import 'transcript.dart';
import 'utils.dart';
/// The singleton instance so that we can have a nice api like:
///
/// log.json.error(...);
final json = new _JsonLogger();
/// The current logging verbosity.
Verbosity verbosity = Verbosity.NORMAL;
/// Whether or not to log entries with prejudice.
bool withPrejudice = false;
/// In cases where there's a ton of log spew, make sure we don't eat infinite
/// memory.
///
/// This can occur when the backtracking solver stumbles into a pathological
/// dependency graph. It generally will find a solution, but it may log
/// thousands and thousands of entries to get there.
const _MAX_TRANSCRIPT = 10000;
/// The list of recorded log messages. Will only be recorded if
/// [recordTranscript()] is called.
Transcript<Entry> _transcript;
/// The currently-animated progress indicator, if any.
///
/// This will also be in [_progresses].
Progress _animatedProgress;
final _cyan = getSpecial('\u001b[36m');
final _green = getSpecial('\u001b[32m');
final _magenta = getSpecial('\u001b[35m');
final _red = getSpecial('\u001b[31m');
final _yellow = getSpecial('\u001b[33m');
final _gray = getSpecial('\u001b[1;30m');
final _none = getSpecial('\u001b[0m');
final _noColor = getSpecial('\u001b[39m');
final _bold = getSpecial('\u001b[1m');
/// An enum type for defining the different logging levels a given message can
/// be associated with.
///
/// By default, [ERROR] and [WARNING] messages are printed to sterr. [MESSAGE]
/// messages are printed to stdout, and others are ignored.
class Level {
/// An error occurred and an operation could not be completed.
///
/// Usually shown to the user on stderr.
static const ERROR = const Level._("ERR ");
/// Something unexpected happened, but the program was able to continue,
/// though possibly in a degraded fashion.
static const WARNING = const Level._("WARN");
/// A message intended specifically to be shown to the user.
static const MESSAGE = const Level._("MSG ");
/// Some interaction with the external world occurred, such as a network
/// operation, process spawning, or file IO.
static const IO = const Level._("IO ");
/// Incremental output during pub's version constraint solver.
static const SOLVER = const Level._("SLVR");
/// Fine-grained and verbose additional information.
///
/// Used to provide program state context for other logs (such as what pub
/// was doing when an IO operation occurred) or just more detail for an
/// operation.
static const FINE = const Level._("FINE");
const Level._(this.name);
final String name;
String toString() => name;
}
typedef _LogFn(Entry entry);
/// An enum type to control which log levels are displayed and how they are
/// displayed.
class Verbosity {
/// Silence all logging.
static const NONE = const Verbosity._("none", const {
Level.ERROR: null,
Level.WARNING: null,
Level.MESSAGE: null,
Level.IO: null,
Level.SOLVER: null,
Level.FINE: null
});
/// Shows only errors and warnings.
static const WARNING = const Verbosity._("warning", const {
Level.ERROR: _logToStderr,
Level.WARNING: _logToStderr,
Level.MESSAGE: null,
Level.IO: null,
Level.SOLVER: null,
Level.FINE: null
});
/// The default verbosity which shows errors, warnings, and messages.
static const NORMAL = const Verbosity._("normal", const {
Level.ERROR: _logToStderr,
Level.WARNING: _logToStderr,
Level.MESSAGE: _logToStdout,
Level.IO: null,
Level.SOLVER: null,
Level.FINE: null
});
/// Shows errors, warnings, messages, and IO event logs.
static const IO = const Verbosity._("io", const {
Level.ERROR: _logToStderrWithLabel,
Level.WARNING: _logToStderrWithLabel,
Level.MESSAGE: _logToStdoutWithLabel,
Level.IO: _logToStderrWithLabel,
Level.SOLVER: null,
Level.FINE: null
});
/// Shows errors, warnings, messages, and version solver logs.
static const SOLVER = const Verbosity._("solver", const {
Level.ERROR: _logToStderr,
Level.WARNING: _logToStderr,
Level.MESSAGE: _logToStdout,
Level.IO: null,
Level.SOLVER: _logToStdout,
Level.FINE: null
});
/// Shows all logs.
static const ALL = const Verbosity._("all", const {
Level.ERROR: _logToStderrWithLabel,
Level.WARNING: _logToStderrWithLabel,
Level.MESSAGE: _logToStdoutWithLabel,
Level.IO: _logToStderrWithLabel,
Level.SOLVER: _logToStderrWithLabel,
Level.FINE: _logToStderrWithLabel
});
const Verbosity._(this.name, this._loggers);
final String name;
final Map<Level, _LogFn> _loggers;
/// Returns whether or not logs at [level] will be printed.
bool isLevelVisible(Level level) => _loggers[level] != null;
String toString() => name;
}
/// A single log entry.
class Entry {
final Level level;
final List<String> lines;
Entry(this.level, this.lines);
}
/// Logs [message] at [Level.ERROR].
///
/// If [error] is passed, it's appended to [message]. If [trace] is passed, it's
/// printed at log level fine.
void error(message, [error, StackTrace trace]) {
if (error != null) {
message = "$message: $error";
if (error is Error && trace == null) trace = error.stackTrace;
}
write(Level.ERROR, message);
if (trace != null) write(Level.FINE, new Chain.forTrace(trace));
}
/// Logs [message] at [Level.WARNING].
void warning(message) => write(Level.WARNING, message);
/// Logs [message] at [Level.MESSAGE].
void message(message) => write(Level.MESSAGE, message);
/// Logs [message] at [Level.IO].
void io(message) => write(Level.IO, message);
/// Logs [message] at [Level.SOLVER].
void solver(message) => write(Level.SOLVER, message);
/// Logs [message] at [Level.FINE].
void fine(message) => write(Level.FINE, message);
/// Logs [message] at [level].
void write(Level level, message) {
message = message.toString();
var lines = splitLines(message);
// Discard a trailing newline. This is useful since StringBuffers often end
// up with an extra newline at the end from using [writeln].
if (lines.isNotEmpty && lines.last == "") {
lines.removeLast();
}
var entry = new Entry(level, lines.map(format).toList());
var logFn = verbosity._loggers[level];
if (logFn != null) logFn(entry);
if (_transcript != null) _transcript.add(entry);
}
final _capitalizedAnsiEscape = new RegExp(r'\u001b\[\d+(;\d+)?M');
/// Returns [string] formatted as it would be if it were logged.
String format(String string) {
if (!withPrejudice) return string;
// [toUpperCase] can corrupt terminal colorings, so fix them up using
// [replaceAllMapped].
string = string.toUpperCase().replaceAllMapped(_capitalizedAnsiEscape,
(match) => match[0].toLowerCase());
// Don't use [bold] because it's disabled under [withPrejudice].
return "$_bold$string$_none";
}
/// Logs an asynchronous IO operation.
///
/// Logs [startMessage] before the operation starts, then when [operation]
/// completes, invokes [endMessage] with the completion value and logs the
/// result of that. Returns a future that completes after the logging is done.
///
/// If [endMessage] is omitted, then logs "Begin [startMessage]" before the
/// operation and "End [startMessage]" after it.
Future ioAsync(String startMessage, Future operation,
[String endMessage(value)]) {
if (endMessage == null) {
io("Begin $startMessage.");
} else {
io(startMessage);
}
return operation.then((result) {
if (endMessage == null) {
io("End $startMessage.");
} else {
io(endMessage(result));
}
return result;
});
}
/// Logs the spawning of an [executable] process with [arguments] at [IO]
/// level.
void process(String executable, List<String> arguments,
String workingDirectory) {
io("Spawning \"$executable ${arguments.join(' ')}\" in "
"${p.absolute(workingDirectory)}");
}
/// Logs the results of running [executable].
void processResult(String executable, PubProcessResult result) {
// Log it all as one message so that it shows up as a single unit in the logs.
var buffer = new StringBuffer();
buffer.writeln("Finished $executable. Exit code ${result.exitCode}.");
dumpOutput(String name, List<String> output) {
if (output.length == 0) {
buffer.writeln("Nothing output on $name.");
} else {
buffer.writeln("$name:");
var numLines = 0;
for (var line in output) {
if (++numLines > 1000) {
buffer.writeln('[${output.length - 1000}] more lines of output '
'truncated...]');
break;
}
buffer.writeln("| $line");
}
}
}
dumpOutput("stdout", result.stdout);
dumpOutput("stderr", result.stderr);
io(buffer.toString().trim());
}
/// Logs an exception.
void exception(exception, [StackTrace trace]) {
if (exception is SilentException) return;
var chain = trace == null ? new Chain.current() : new Chain.forTrace(trace);
// This is basically the top-level exception handler so that we don't
// spew a stack trace on our users.
if (exception is SourceSpanException) {
error(exception.toString(color: canUseSpecialChars));
} else {
error(getErrorMessage(exception));
}
fine("Exception type: ${exception.runtimeType}");
if (json.enabled) {
if (exception is UsageException) {
// Don't print usage info in JSON output.
json.error(exception.message);
} else {
json.error(exception);
}
}
if (!isUserFacingException(exception)) {
error(chain.terse);
} else {
fine(chain.terse);
}
if (exception is WrappedException && exception.innerError != null) {
var message = "Wrapped exception: ${exception.innerError}";
if (exception.innerChain != null) {
message = "$message\n${exception.innerChain}";
}
fine(message);
}
}
/// Enables recording of log entries.
void recordTranscript() {
_transcript = new Transcript<Entry>(_MAX_TRANSCRIPT);
}
/// If [recordTranscript()] was called, then prints the previously recorded log
/// transcript to stderr.
void dumpTranscript() {
if (_transcript == null) return;
stderr.writeln('---- Log transcript ----');
_transcript.forEach((entry) {
_printToStream(stderr, entry, showLabel: true);
}, (discarded) {
stderr.writeln('---- ($discarded discarded) ----');
});
stderr.writeln('---- End log transcript ----');
}
/// Prints [message] then displays an updated elapsed time until the future
/// returned by [callback] completes.
///
/// If anything else is logged during this (including another call to
/// [progress]) that cancels the progress animation, although the total time
/// will still be printed once it finishes. If [fine] is passed, the progress
/// information will only be visible at [Level.FINE].
Future progress(String message, Future callback(), {bool fine: false}) {
_stopProgress();
var progress = new Progress(message, fine: fine);
_animatedProgress = progress;
return callback().whenComplete(progress.stop);
}
/// Stops animating the running progress indicator, if currently running.
void _stopProgress() {
if (_animatedProgress != null) _animatedProgress.stopAnimating();
_animatedProgress = null;
}
/// The number of outstanding calls to [muteProgress] that have not been unmuted
/// yet.
int _numMutes = 0;
/// Whether progress animation should be muted or not.
bool get isMuted => _numMutes > 0;
/// Stops animating any ongoing progress.
///
/// This is called before spawning Git since Git sometimes writes directly to
/// the terminal to ask for login credentials, which would then get overwritten
/// by the progress animation.
///
/// Each call to this must be paired with a call to [unmuteProgress].
void muteProgress() {
_numMutes++;
}
/// Resumes animating any ongoing progress once all calls to [muteProgress]
/// have made their matching [unmuteProgress].
void unmuteProgress() {
assert(_numMutes > 0);
_numMutes--;
}
/// Wraps [text] in the ANSI escape codes to make it bold when on a platform
/// that supports that.
///
/// Use this to highlight the most important piece of a long chunk of text.
///
/// This is disabled under [withPrejudice] since all text is bold with
/// prejudice.
String bold(text) => withPrejudice ? text : "$_bold$text$_none";
/// Wraps [text] in the ANSI escape codes to make it gray when on a platform
/// that supports that.
///
/// Use this for text that's less important than the text around it.
///
/// The gray marker also enables bold, so it needs to be handled specially with
/// [withPrejudice] to avoid disabling bolding entirely.
String gray(text) =>
withPrejudice ? "$_gray$text$_noColor" : "$_gray$text$_none";
/// Wraps [text] in the ANSI escape codes to color it cyan when on a platform
/// that supports that.
///
/// Use this to highlight something interesting but neither good nor bad.
String cyan(text) => "$_cyan$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it green when on a platform
/// that supports that.
///
/// Use this to highlight something successful or otherwise positive.
String green(text) => "$_green$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it magenta when on a
/// platform that supports that.
///
/// Use this to highlight something risky that the user should be aware of but
/// may intend to do.
String magenta(text) => "$_magenta$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it red when on a platform
/// that supports that.
///
/// Use this to highlight unequivocal errors, problems, or failures.
String red(text) => "$_red$text$_noColor";
/// Wraps [text] in the ANSI escape codes to color it yellow when on a platform
/// that supports that.
///
/// Use this to highlight warnings, cautions or other things that are bad but
/// do not prevent the user's goal from being reached.
String yellow(text) => "$_yellow$text$_noColor";
/// Log function that prints the message to stdout.
void _logToStdout(Entry entry) {
_logToStream(stdout, entry, showLabel: false);
}
/// Log function that prints the message to stdout with the level name.
void _logToStdoutWithLabel(Entry entry) {
_logToStream(stdout, entry, showLabel: true);
}
/// Log function that prints the message to stderr.
void _logToStderr(Entry entry) {
_logToStream(stderr, entry, showLabel: false);
}
/// Log function that prints the message to stderr with the level name.
void _logToStderrWithLabel(Entry entry) {
_logToStream(stderr, entry, showLabel: true);
}
void _logToStream(IOSink sink, Entry entry, {bool showLabel}) {
if (json.enabled) return;
_printToStream(sink, entry, showLabel: showLabel);
}
void _printToStream(IOSink sink, Entry entry, {bool showLabel}) {
_stopProgress();
bool firstLine = true;
for (var line in entry.lines) {
if (showLabel) {
if (firstLine) {
sink.write('${entry.level.name}: ');
} else {
sink.write(' | ');
}
}
sink.writeln(line);
firstLine = false;
}
}
/// Namespace-like class for collecting the methods for JSON logging.
class _JsonLogger {
/// Whether logging should use machine-friendly JSON output or human-friendly
/// text.
///
/// If set to `true`, then no regular logging is printed. Logged messages
/// will still be recorded and displayed if the transcript is printed.
bool enabled = false;
/// Creates an error JSON object for [error] and prints it if JSON output
/// is enabled.
///
/// Always prints to stdout.
void error(error, [stackTrace]) {
var errorJson = {"error": error.toString()};
if (stackTrace == null && error is Error) stackTrace = error.stackTrace;
if (stackTrace != null) {
errorJson["stackTrace"] = new Chain.forTrace(stackTrace).toString();
}
// If the error came from a file, include the path.
if (error is SourceSpanException && error.span.sourceUrl != null) {
errorJson["path"] = p.fromUri(error.span.sourceUrl);
}
if (error is FileException) {
errorJson["path"] = error.path;
}
this.message(errorJson);
}
/// Encodes [message] to JSON and prints it if JSON output is enabled.
void message(message) {
if (!enabled) return;
print(JSON.encode(message));
}
}

View file

@ -1,217 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.oauth2;
import 'dart:async';
import 'dart:io';
import 'package:oauth2/oauth2.dart';
import 'package:path/path.dart' as path;
import 'package:shelf/shelf.dart' as shelf;
import 'package:shelf/shelf_io.dart' as shelf_io;
import 'http.dart';
import 'io.dart';
import 'log.dart' as log;
import 'system_cache.dart';
import 'utils.dart';
export 'package:oauth2/oauth2.dart';
/// The pub client's OAuth2 identifier.
final _identifier = '818368855108-8grd2eg9tj9f38os6f1urbcvsq399u8n.apps.'
'googleusercontent.com';
/// The pub client's OAuth2 secret.
///
/// This isn't actually meant to be kept a secret.
final _secret = 'SWeqj8seoJW0w7_CpEPFLX0K';
/// The URL to which the user will be directed to authorize the pub client to
/// get an OAuth2 access token.
///
/// `access_type=offline` and `approval_prompt=force` ensures that we always get
/// a refresh token from the server. See the [Google OAuth2 documentation][].
///
/// [Google OAuth2 documentation]: https://developers.google.com/accounts/docs/OAuth2WebServer#offline
final authorizationEndpoint = Uri.parse(
'https://accounts.google.com/o/oauth2/auth?access_type=offline'
'&approval_prompt=force');
/// The URL from which the pub client will request an access token once it's
/// been authorized by the user.
///
/// This can be controlled externally by setting the `_PUB_TEST_TOKEN_ENDPOINT`
/// environment variable.
Uri get tokenEndpoint {
var tokenEndpoint = Platform.environment['_PUB_TEST_TOKEN_ENDPOINT'];
if (tokenEndpoint != null) {
return Uri.parse(tokenEndpoint);
} else {
return _tokenEndpoint;
}
}
final _tokenEndpoint = Uri.parse('https://accounts.google.com/o/oauth2/token');
/// The OAuth2 scopes that the pub client needs.
///
/// Currently the client only needs the user's email so that the server can
/// verify their identity.
final _scopes = ['https://www.googleapis.com/auth/userinfo.email'];
/// An in-memory cache of the user's OAuth2 credentials.
///
/// This should always be the same as the credentials file stored in the system
/// cache.
Credentials _credentials;
/// Delete the cached credentials, if they exist.
void clearCredentials(SystemCache cache) {
_credentials = null;
var credentialsFile = _credentialsFile(cache);
if (entryExists(credentialsFile)) deleteEntry(credentialsFile);
}
/// Asynchronously passes an OAuth2 [Client] to [fn], and closes the client when
/// the [Future] returned by [fn] completes.
///
/// This takes care of loading and saving the client's credentials, as well as
/// prompting the user for their authorization. It will also re-authorize and
/// re-run [fn] if a recoverable authorization error is detected.
Future withClient(SystemCache cache, Future fn(Client client)) {
return _getClient(cache).then((client) {
return fn(client).whenComplete(() {
client.close();
// Be sure to save the credentials even when an error happens.
_saveCredentials(cache, client.credentials);
});
}).catchError((error) {
if (error is ExpirationException) {
log.error("Pub's authorization to upload packages has expired and "
"can't be automatically refreshed.");
return withClient(cache, fn);
} else if (error is AuthorizationException) {
var message = "OAuth2 authorization failed";
if (error.description != null) {
message = "$message (${error.description})";
}
log.error("$message.");
clearCredentials(cache);
return withClient(cache, fn);
} else {
throw error;
}
});
}
/// Gets a new OAuth2 client.
///
/// If saved credentials are available, those are used; otherwise, the user is
/// prompted to authorize the pub client.
Future<Client> _getClient(SystemCache cache) {
return new Future.sync(() {
var credentials = _loadCredentials(cache);
if (credentials == null) return _authorize();
var client = new Client(_identifier, _secret, credentials,
httpClient: httpClient);
_saveCredentials(cache, client.credentials);
return client;
});
}
/// Loads the user's OAuth2 credentials from the in-memory cache or the
/// filesystem if possible.
///
/// If the credentials can't be loaded for any reason, the returned [Future]
/// completes to `null`.
Credentials _loadCredentials(SystemCache cache) {
log.fine('Loading OAuth2 credentials.');
try {
if (_credentials != null) return _credentials;
var path = _credentialsFile(cache);
if (!fileExists(path)) return null;
var credentials = new Credentials.fromJson(readTextFile(path));
if (credentials.isExpired && !credentials.canRefresh) {
log.error("Pub's authorization to upload packages has expired and "
"can't be automatically refreshed.");
return null; // null means re-authorize.
}
return credentials;
} catch (e) {
log.error('Warning: could not load the saved OAuth2 credentials: $e\n'
'Obtaining new credentials...');
return null; // null means re-authorize.
}
}
/// Save the user's OAuth2 credentials to the in-memory cache and the
/// filesystem.
void _saveCredentials(SystemCache cache, Credentials credentials) {
log.fine('Saving OAuth2 credentials.');
_credentials = credentials;
var credentialsPath = _credentialsFile(cache);
ensureDir(path.dirname(credentialsPath));
writeTextFile(credentialsPath, credentials.toJson(), dontLogContents: true);
}
/// The path to the file in which the user's OAuth2 credentials are stored.
String _credentialsFile(SystemCache cache) =>
path.join(cache.rootDir, 'credentials.json');
/// Gets the user to authorize pub as a client of pub.dartlang.org via oauth2.
///
/// Returns a Future that completes to a fully-authorized [Client].
Future<Client> _authorize() {
var grant = new AuthorizationCodeGrant(
_identifier,
_secret,
authorizationEndpoint,
tokenEndpoint,
httpClient: httpClient);
// Spin up a one-shot HTTP server to receive the authorization code from the
// Google OAuth2 server via redirect. This server will close itself as soon as
// the code is received.
var completer = new Completer();
bindServer('localhost', 0).then((server) {
shelf_io.serveRequests(server, (request) {
if (request.url.path != "/") {
return new shelf.Response.notFound('Invalid URI.');
}
log.message('Authorization received, processing...');
var queryString = request.url.query;
if (queryString == null) queryString = '';
// Closing the server here is safe, since it will wait until the response
// is sent to actually shut down.
server.close();
chainToCompleter(grant.handleAuthorizationResponse(queryToMap(queryString)),
completer);
return new shelf.Response.found('http://pub.dartlang.org/authorized');
});
var authUrl = grant.getAuthorizationUrl(
Uri.parse('http://localhost:${server.port}'), scopes: _scopes);
log.message(
'Pub needs your authorization to upload packages on your behalf.\n'
'In a web browser, go to $authUrl\n'
'Then click "Allow access".\n\n'
'Waiting for your authorization...');
});
return completer.future.then((client) {
log.message('Successfully authorized.\n');
return client;
});
}

View file

@ -1,422 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.package;
import 'dart:io';
import 'package:barback/barback.dart';
import 'package:path/path.dart' as p;
import 'package:pub_semver/pub_semver.dart';
import 'barback/transformer_id.dart';
import 'io.dart';
import 'git.dart' as git;
import 'pubspec.dart';
import 'source_registry.dart';
import 'utils.dart';
final _README_REGEXP = new RegExp(r"^README($|\.)", caseSensitive: false);
/// A named, versioned, unit of code and resource reuse.
class Package {
/// Compares [a] and [b] orders them by name then version number.
///
/// This is normally used as a [Comparator] to pass to sort. This does not
/// take a package's description or root directory into account, so multiple
/// distinct packages may order the same.
static int orderByNameAndVersion(Package a, Package b) {
var name = a.name.compareTo(b.name);
if (name != 0) return name;
return a.version.compareTo(b.version);
}
/// The path to the directory containing the package.
final String dir;
/// The name of the package.
String get name {
if (pubspec.name != null) return pubspec.name;
if (dir != null) return p.basename(dir);
return null;
}
/// The package's version.
Version get version => pubspec.version;
/// The parsed pubspec associated with this package.
final Pubspec pubspec;
/// The immediate dependencies this package specifies in its pubspec.
List<PackageDep> get dependencies => pubspec.dependencies;
/// The immediate dev dependencies this package specifies in its pubspec.
List<PackageDep> get devDependencies => pubspec.devDependencies;
/// The dependency overrides this package specifies in its pubspec.
List<PackageDep> get dependencyOverrides => pubspec.dependencyOverrides;
/// All immediate dependencies this package specifies.
///
/// This includes regular, dev dependencies, and overrides.
Set<PackageDep> get immediateDependencies {
var deps = {};
addToMap(dep) {
deps[dep.name] = dep;
}
dependencies.forEach(addToMap);
devDependencies.forEach(addToMap);
// Make sure to add these last so they replace normal dependencies.
dependencyOverrides.forEach(addToMap);
return deps.values.toSet();
}
/// Returns a list of asset ids for all Dart executables in this package's bin
/// directory.
List<AssetId> get executableIds {
return ordered(listFiles(beneath: "bin", recursive: false))
.where((executable) => p.extension(executable) == '.dart')
.map((executable) {
return new AssetId(
name, p.toUri(p.relative(executable, from: dir)).toString());
}).toList();
}
/// Returns the path to the README file at the root of the entrypoint, or null
/// if no README file is found.
///
/// If multiple READMEs are found, this uses the same conventions as
/// pub.dartlang.org for choosing the primary one: the README with the fewest
/// extensions that is lexically ordered first is chosen.
String get readmePath {
var readmes = listFiles(recursive: false).map(p.basename).
where((entry) => entry.contains(_README_REGEXP));
if (readmes.isEmpty) return null;
return p.join(dir, readmes.reduce((readme1, readme2) {
var extensions1 = ".".allMatches(readme1).length;
var extensions2 = ".".allMatches(readme2).length;
var comparison = extensions1.compareTo(extensions2);
if (comparison == 0) comparison = readme1.compareTo(readme2);
return (comparison <= 0) ? readme1 : readme2;
}));
}
/// Loads the package whose root directory is [packageDir].
///
/// [name] is the expected name of that package (e.g. the name given in the
/// dependency), or `null` if the package being loaded is the entrypoint
/// package.
Package.load(String name, String packageDir, SourceRegistry sources)
: dir = packageDir,
pubspec = new Pubspec.load(packageDir, sources, expectedName: name);
/// Constructs a package with the given pubspec.
///
/// The package will have no directory associated with it.
Package.inMemory(this.pubspec)
: dir = null;
/// Creates a package with [pubspec] located at [dir].
Package(this.pubspec, this.dir);
/// Given a relative path within this package, returns its absolute path.
///
/// This is similar to `p.join(dir, part1, ...)`, except that subclasses may
/// override it to report that certain paths exist elsewhere than within
/// [dir]. For example, a [CachedPackage]'s `lib` directory is in the
/// `.pub/deps` directory.
String path(String part1, [String part2, String part3, String part4,
String part5, String part6, String part7]) {
if (dir == null) {
throw new StateError("Package $name is in-memory and doesn't have paths "
"on disk.");
}
return p.join(dir, part1, part2, part3, part4, part5, part6, part7);
}
/// Given an absolute path within this package (such as that returned by
/// [path] or [listFiles]), returns it relative to the package root.
String relative(String path) {
if (dir == null) {
throw new StateError("Package $name is in-memory and doesn't have paths "
"on disk.");
}
return p.relative(path, from: dir);
}
/// Returns the path to the library identified by [id] within [this].
String transformerPath(TransformerId id) {
if (id.package != name) {
throw new ArgumentError("Transformer $id isn't in package $name.");
}
if (id.path != null) return path('lib', p.fromUri('${id.path}.dart'));
var transformerPath = path('lib/transformer.dart');
if (fileExists(transformerPath)) return transformerPath;
return path('lib/$name.dart');
}
/// The basenames of files that are included in [list] despite being hidden.
static final _WHITELISTED_FILES = const ['.htaccess'];
/// A set of patterns that match paths to blacklisted files.
static final _blacklistedFiles = createFileFilter(['pubspec.lock']);
/// A set of patterns that match paths to blacklisted directories.
static final _blacklistedDirs = createDirectoryFilter(['packages']);
/// Returns a list of files that are considered to be part of this package.
///
/// If this is a Git repository, this will respect .gitignore; otherwise, it
/// will return all non-hidden, non-blacklisted files.
///
/// If [beneath] is passed, this will only return files beneath that path,
/// which is expected to be relative to the package's root directory. If
/// [recursive] is true, this will return all files beneath that path;
/// otherwise, it will only return files one level beneath it.
///
/// If [useGitIgnore] is passed, this will take the .gitignore rules into
/// account if the package's root directory is a Git repository.
///
/// Note that the returned paths won't always be beneath [dir]. To safely
/// convert them to paths relative to the package root, use [relative].
List<String> listFiles({String beneath, bool recursive: true,
bool useGitIgnore: false}) {
if (beneath == null) {
beneath = dir;
} else {
beneath = p.join(dir, beneath);
}
if (!dirExists(beneath)) return [];
// This is used in some performance-sensitive paths and can list many, many
// files. As such, it leans more havily towards optimization as opposed to
// readability than most code in pub. In particular, it avoids using the
// path package, since re-parsing a path is very expensive relative to
// string operations.
var files;
if (useGitIgnore && git.isInstalled && dirExists(path('.git'))) {
// Later versions of git do not allow a path for ls-files that appears to
// be outside of the repo, so make sure we give it a relative path.
var relativeBeneath = p.relative(beneath, from: dir);
// List all files that aren't gitignored, including those not checked in
// to Git.
files = git.runSync(
["ls-files", "--cached", "--others", "--exclude-standard",
relativeBeneath],
workingDir: dir);
// If we're not listing recursively, strip out paths that contain
// separators. Since git always prints forward slashes, we always detect
// them.
if (!recursive) {
// If we're listing a subdirectory, we only want to look for slashes
// after the subdirectory prefix.
var relativeStart = relativeBeneath == '.' ? 0 :
relativeBeneath.length + 1;
files = files.where((file) => !file.contains('/', relativeStart));
}
// Git always prints files relative to the repository root, but we want
// them relative to the working directory. It also prints forward slashes
// on Windows which we normalize away for easier testing.
files = files.map((file) {
if (Platform.operatingSystem != 'windows') return "$dir/$file";
return "$dir\\${file.replaceAll("/", "\\")}";
}).where((file) {
// Filter out broken symlinks, since git doesn't do so automatically.
return fileExists(file);
});
} else {
files = listDir(beneath, recursive: recursive, includeDirs: false,
whitelist: _WHITELISTED_FILES);
}
return files.where((file) {
// Using substring here is generally problematic in cases where dir has
// one or more trailing slashes. If you do listDir("foo"), you'll get back
// paths like "foo/bar". If you do listDir("foo/"), you'll get "foo/bar"
// (note the trailing slash was dropped. If you do listDir("foo//"),
// you'll get "foo//bar".
//
// This means if you strip off the prefix, the resulting string may have a
// leading separator (if the prefix did not have a trailing one) or it may
// not. However, since we are only using the results of that to call
// contains() on, the leading separator is harmless.
assert(file.startsWith(beneath));
file = file.substring(beneath.length);
return !_blacklistedFiles.any(file.endsWith) &&
!_blacklistedDirs.any(file.contains);
}).toList();
}
/// Returns a debug string for the package.
String toString() => '$name $version ($dir)';
}
/// This is the private base class of [PackageRef], [PackageID], and
/// [PackageDep].
///
/// It contains functionality and state that those classes share but is private
/// so that from outside of this library, there is no type relationship between
/// those three types.
class _PackageName {
_PackageName(this.name, this.source, this.description)
: isMagic = false;
_PackageName.magic(this.name)
: source = null,
description = null,
isMagic = true;
/// The name of the package being identified.
final String name;
/// The name of the [Source] used to look up this package given its
/// [description].
///
/// If this is a root package, this will be `null`.
final String source;
/// The metadata used by the package's [source] to identify and locate it.
///
/// It contains whatever [Source]-specific data it needs to be able to get
/// the package. For example, the description of a git sourced package might
/// by the URL "git://github.com/dart/uilib.git".
final description;
/// Whether this is a name for a magic package.
///
/// Magic packages are unversioned pub constructs that have special semantics.
/// For example, a magic package named "pub itself" is inserted into the
/// dependency graph when any package depends on barback. This packages has
/// dependencies that represent the versions of barback and related packages
/// that pub is compatible with.
final bool isMagic;
/// Whether this package is the root package.
bool get isRoot => source == null && !isMagic;
String toString() {
if (isRoot) return "$name (root)";
if (isMagic) return name;
return "$name from $source";
}
/// Returns a [PackageRef] with this one's [name], [source], and
/// [description].
PackageRef toRef() => isMagic
? new PackageRef.magic(name)
: new PackageRef(name, source, description);
/// Returns a [PackageId] for this package with the given concrete version.
PackageId atVersion(Version version) =>
new PackageId(name, source, version, description);
/// Returns a [PackageDep] for this package with the given version constraint.
PackageDep withConstraint(VersionConstraint constraint) =>
new PackageDep(name, source, constraint, description);
}
/// A reference to a [Package], but not any particular version(s) of it.
class PackageRef extends _PackageName {
PackageRef(String name, String source, description)
: super(name, source, description);
/// Creates a reference to a magic package (see [isMagic]).
PackageRef.magic(String name)
: super.magic(name);
int get hashCode => name.hashCode ^ source.hashCode;
bool operator ==(other) {
// TODO(rnystrom): We're assuming here that we don't need to delve into the
// description.
return other is PackageRef &&
other.name == name &&
other.source == source;
}
}
/// A reference to a specific version of a package.
///
/// A package ID contains enough information to correctly get the package.
///
/// Note that it's possible for multiple distinct package IDs to point to
/// different packages that have identical contents. For example, the same
/// package may be available from multiple sources. As far as Pub is concerned,
/// those packages are different.
class PackageId extends _PackageName {
/// The package's version.
final Version version;
PackageId(String name, String source, this.version, description)
: super(name, source, description);
/// Creates an ID for a magic package (see [isMagic]).
PackageId.magic(String name)
: super.magic(name),
version = Version.none;
/// Creates an ID for the given root package.
PackageId.root(Package package)
: version = package.version,
super(package.name, null, package.name);
int get hashCode => name.hashCode ^ source.hashCode ^ version.hashCode;
bool operator ==(other) {
// TODO(rnystrom): We're assuming here that we don't need to delve into the
// description.
return other is PackageId &&
other.name == name &&
other.source == source &&
other.version == version;
}
String toString() {
if (isRoot) return "$name $version (root)";
if (isMagic) return name;
return "$name $version from $source";
}
}
/// A reference to a constrained range of versions of one package.
class PackageDep extends _PackageName {
/// The allowed package versions.
final VersionConstraint constraint;
PackageDep(String name, String source, this.constraint, description)
: super(name, source, description);
PackageDep.magic(String name)
: super.magic(name),
constraint = Version.none;
String toString() {
if (isRoot) return "$name $constraint (root)";
if (isMagic) return name;
return "$name $constraint from $source ($description)";
}
int get hashCode => name.hashCode ^ source.hashCode;
bool operator ==(other) {
// TODO(rnystrom): We're assuming here that we don't need to delve into the
// description.
return other is PackageDep &&
other.name == name &&
other.source == source &&
other.constraint == constraint;
}
}

View file

@ -1,116 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.package_graph;
import 'barback/transformer_cache.dart';
import 'entrypoint.dart';
import 'lock_file.dart';
import 'package.dart';
import 'source/cached.dart';
import 'utils.dart';
/// A holistic view of the entire transitive dependency graph for an entrypoint.
///
/// A package graph can be loaded using [Entrypoint.loadPackageGraph].
class PackageGraph {
/// The entrypoint.
final Entrypoint entrypoint;
/// The entrypoint's lockfile.
///
/// This describes the sources and resolved descriptions of everything in
/// [packages].
final LockFile lockFile;
/// The transitive dependencies of the entrypoint (including itself).
///
/// This may not include all transitive dependencies of the entrypoint if the
/// creator of the package graph knows only a subset of the packages are
/// relevant in the current context.
final Map<String, Package> packages;
/// A map of transitive dependencies for each package.
Map<String, Set<Package>> _transitiveDependencies;
/// The transformer cache, if it's been loaded.
TransformerCache _transformerCache;
PackageGraph(this.entrypoint, this.lockFile, this.packages);
/// Loads the transformer cache for this graph.
///
/// This may only be called if [entrypoint] represents a physical package.
/// This may modify the cache.
TransformerCache loadTransformerCache() {
if (_transformerCache == null) {
if (entrypoint.root.dir == null) {
throw new StateError("Can't load the transformer cache for virtual "
"entrypoint ${entrypoint.root.name}.");
}
_transformerCache = new TransformerCache.load(this);
}
return _transformerCache;
}
/// Returns all transitive dependencies of [package].
///
/// For the entrypoint this returns all packages in [packages], which includes
/// dev and override. For any other package, it ignores dev and override
/// dependencies.
Set<Package> transitiveDependencies(String package) {
if (package == entrypoint.root.name) return packages.values.toSet();
if (_transitiveDependencies == null) {
var closure = transitiveClosure(mapMap(packages,
value: (_, package) => package.dependencies.map((dep) => dep.name)));
_transitiveDependencies = mapMap(closure,
value: (_, names) => names.map((name) => packages[name]).toSet());
}
return _transitiveDependencies[package];
}
/// Returns whether [package] is mutable.
///
/// A package is considered to be mutable if it or any of its dependencies
/// don't come from a cached source, since the user can change its contents
/// without modifying the pub cache. Information generated from mutable
/// packages is generally not safe to cache, since it may change frequently.
bool isPackageMutable(String package) {
var id = lockFile.packages[package];
if (id == null) return true;
var source = entrypoint.cache.sources[id.source];
if (source is! CachedSource) return true;
return transitiveDependencies(package).any((dep) {
var depId = lockFile.packages[dep.name];
// The entrypoint package doesn't have a lockfile entry. It's always
// mutable.
if (depId == null) return true;
return entrypoint.cache.sources[depId.source] is! CachedSource;
});
}
/// Returns whether [package] is static.
///
/// A package is considered to be static if it's not transformed and it came
/// from a cached source. Static packages don't need to be fully processed by
/// barback.
///
/// Note that a static package isn't the same as an immutable package (see
/// [isPackageMutable]).
bool isPackageStatic(String package) {
var id = lockFile.packages[package];
if (id == null) return false;
var source = entrypoint.cache.sources[id.source];
if (source is! CachedSource) return false;
return packages[package].pubspec.transformers.isEmpty;
}
}

View file

@ -1,145 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.preprocess;
import 'package:pub_semver/pub_semver.dart';
import 'package:string_scanner/string_scanner.dart';
import 'utils.dart';
/// Runs a simple preprocessor over [input] to remove sections that are
/// incompatible with the available barback version.
///
/// [versions] are the available versions of each installed package, and
/// [sourceUrl] is a [String] or [Uri] indicating where [input] came from. It's
/// used for error reporting.
///
/// For the most part, the preprocessor leaves text in the source document
/// alone. However, it handles two types of lines specially. Lines that begin
/// with `//>` are uncommented by the preprocessor, and lines that begin with
/// `//#` are operators.
///
/// The preprocessor currently supports one top-level operator, "if":
///
/// //# if barback >=0.14.1
/// ...
/// //# else
/// ...
/// //# end
///
/// If can check against any package installed in the current package. It can
/// check the version of the package, as above, or (if the version range is
/// omitted) whether the package exists at all. If the condition is true,
/// everything within the first block is included in the output and everything
/// within the second block is removed; otherwise, the first block is removed
/// and the second block is included. The `else` block is optional.
///
/// It's important that the preprocessor syntax also be valid Dart code, because
/// pub loads the source files before preprocessing and runs them against the
/// version of barback that was compiled into pub. This is why the `//>` syntax
/// exists: so that code can be hidden from the running pub process but still be
/// visible to the barback isolate. For example:
///
/// //# if barback >= 0.14.1
/// ClassMirror get aggregateClass => reflectClass(AggregateTransformer);
/// //# else
/// //> ClassMirror get aggregateClass => null;
/// //# end
String preprocess(String input, Map<String, Version> versions, sourceUrl) {
// Short-circuit if there are no preprocessor directives in the file.
if (!input.contains(new RegExp(r"^//[>#]", multiLine: true))) return input;
return new _Preprocessor(input, versions, sourceUrl).run();
}
/// The preprocessor class.
class _Preprocessor {
/// The scanner over the input string.
final StringScanner _scanner;
final Map<String, Version> _versions;
/// The buffer to which the output is written.
final _buffer = new StringBuffer();
_Preprocessor(String input, this._versions, sourceUrl)
: _scanner = new StringScanner(input, sourceUrl: sourceUrl);
/// Run the preprocessor and return the processed output.
String run() {
while (!_scanner.isDone) {
if (_scanner.scan(new RegExp(r"//#[ \t]*"))) {
_if();
} else {
_emitText();
}
}
_scanner.expectDone();
return _buffer.toString();
}
/// Emit lines of the input document directly until an operator is
/// encountered.
void _emitText() {
while (!_scanner.isDone && !_scanner.matches("//#")) {
if (_scanner.scan("//>")) {
if (!_scanner.matches("\n")) _scanner.expect(" ");
}
_scanner.scan(new RegExp(r"[^\n]*\n?"));
_buffer.write(_scanner.lastMatch[0]);
}
}
/// Move through lines of the input document without emitting them until an
/// operator is encountered.
void _ignoreText() {
while (!_scanner.isDone && !_scanner.matches("//#")) {
_scanner.scan(new RegExp(r"[^\n]*\n?"));
}
}
/// Handle an `if` operator.
void _if() {
_scanner.expect(new RegExp(r"if[ \t]+"), name: "if statement");
_scanner.expect(identifierRegExp, name: "package name");
var package = _scanner.lastMatch[0];
_scanner.scan(new RegExp(r"[ \t]*"));
var constraint = VersionConstraint.any;
if (_scanner.scan(new RegExp(r"[^\n]+"))) {
try {
constraint = new VersionConstraint.parse(_scanner.lastMatch[0]);
} on FormatException catch (error) {
_scanner.error("Invalid version constraint: ${error.message}");
}
}
_scanner.expect("\n");
var allowed = _versions.containsKey(package) &&
constraint.allows(_versions[package]);
if (allowed) {
_emitText();
} else {
_ignoreText();
}
_scanner.expect("//#");
_scanner.scan(new RegExp(r"[ \t]*"));
if (_scanner.scan("else")) {
_scanner.expect("\n");
if (allowed) {
_ignoreText();
} else {
_emitText();
}
_scanner.expect("//#");
_scanner.scan(new RegExp(r"[ \t]*"));
}
_scanner.expect("end");
if (!_scanner.isDone) _scanner.expect("\n");
}
}

View file

@ -1,99 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.progress;
import 'dart:async';
import 'dart:io';
import 'log.dart' as log;
import 'utils.dart';
/// A live-updating progress indicator for long-running log entries.
class Progress {
/// The timer used to write "..." during a progress log.
Timer _timer;
/// The [Stopwatch] used to track how long a progress log has been running.
final _stopwatch = new Stopwatch();
/// The progress message as it's being incrementally appended.
///
/// When the progress is done, a single entry will be added to the log for it.
final String _message;
/// Gets the current progress time as a parenthesized, formatted string.
String get _time => "(${niceDuration(_stopwatch.elapsed)})";
/// Creates a new progress indicator.
///
/// If [fine] is passed, this will log progress messages on [log.Level.FINE]
/// as opposed to [log.Level.MESSAGE].
Progress(this._message, {bool fine: false}) {
_stopwatch.start();
var level = fine ? log.Level.FINE : log.Level.MESSAGE;
// The animation is only shown when it would be meaningful to a human.
// That means we're writing a visible message to a TTY at normal log levels
// with non-JSON output.
if (stdioType(stdout) != StdioType.TERMINAL ||
!log.verbosity.isLevelVisible(level) ||
log.json.enabled || fine ||
log.verbosity.isLevelVisible(log.Level.FINE)) {
// Not animating, so just log the start and wait until the task is
// completed.
log.write(level, "$_message...");
return;
}
_timer = new Timer.periodic(new Duration(milliseconds: 100), (_) {
_update();
});
_update();
}
/// Stops the progress indicator.
void stop() {
_stopwatch.stop();
// Always log the final time as [log.fine] because for the most part normal
// users don't care about the precise time information beyond what's shown
// in the animation.
log.fine("$_message finished $_time.");
// If we were animating, print one final update to show the user the final
// time.
if (_timer == null) return;
_timer.cancel();
_timer = null;
_update();
stdout.writeln();
}
/// Stop animating the progress indicator.
///
/// This will continue running the stopwatch so that the full time can be
/// logged in [stop].
void stopAnimating() {
if (_timer == null) return;
// Print a final message without a time indicator so that we don't leave a
// misleading half-complete time indicator on the console.
stdout.writeln(log.format("\r$_message..."));
_timer.cancel();
_timer = null;
}
/// Refreshes the progress line.
void _update() {
if (log.isMuted) return;
stdout.write(log.format("\r$_message... "));
// Show the time only once it gets noticeably long.
if (_stopwatch.elapsed.inSeconds > 0) stdout.write("${log.gray(_time)} ");
}
}

View file

@ -1,583 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.pubspec;
import 'package:path/path.dart' as path;
import 'package:pub_semver/pub_semver.dart';
import 'package:source_span/source_span.dart';
import 'package:yaml/yaml.dart';
import 'barback/transformer_config.dart';
import 'exceptions.dart';
import 'io.dart';
import 'package.dart';
import 'source_registry.dart';
import 'utils.dart';
/// The parsed contents of a pubspec file.
///
/// The fields of a pubspec are, for the most part, validated when they're first
/// accessed. This allows a partially-invalid pubspec to be used if only the
/// valid portions are relevant. To get a list of all errors in the pubspec, use
/// [allErrors].
class Pubspec {
// If a new lazily-initialized field is added to this class and the
// initialization can throw a [PubspecException], that error should also be
// exposed through [allErrors].
/// The registry of sources to use when parsing [dependencies] and
/// [devDependencies].
///
/// This will be null if this was created using [new Pubspec] or [new
/// Pubspec.empty].
final SourceRegistry _sources;
/// The location from which the pubspec was loaded.
///
/// This can be null if the pubspec was created in-memory or if its location
/// is unknown.
Uri get _location => fields.span.sourceUrl;
/// All pubspec fields.
///
/// This includes the fields from which other properties are derived.
final YamlMap fields;
/// The package's name.
String get name {
if (_name != null) return _name;
var name = fields['name'];
if (name == null) {
throw new PubspecException(
'Missing the required "name" field.', fields.span);
} else if (name is! String) {
throw new PubspecException(
'"name" field must be a string.', fields.nodes['name'].span);
}
_name = name;
return _name;
}
String _name;
/// The package's version.
Version get version {
if (_version != null) return _version;
var version = fields['version'];
if (version == null) {
_version = Version.none;
return _version;
}
var span = fields.nodes['version'].span;
if (version is num) {
var fixed = '$version.0';
if (version is int) {
fixed = '$fixed.0';
}
_error('"version" field must have three numeric components: major, '
'minor, and patch. Instead of "$version", consider "$fixed".', span);
}
if (version is! String) {
_error('"version" field must be a string.', span);
}
_version = _wrapFormatException('version number', span,
() => new Version.parse(version));
return _version;
}
Version _version;
/// The additional packages this package depends on.
List<PackageDep> get dependencies {
if (_dependencies != null) return _dependencies;
_dependencies = _parseDependencies('dependencies');
_checkDependencyOverlap(_dependencies, _devDependencies);
return _dependencies;
}
List<PackageDep> _dependencies;
/// The packages this package depends on when it is the root package.
List<PackageDep> get devDependencies {
if (_devDependencies != null) return _devDependencies;
_devDependencies = _parseDependencies('dev_dependencies');
_checkDependencyOverlap(_dependencies, _devDependencies);
return _devDependencies;
}
List<PackageDep> _devDependencies;
/// The dependency constraints that this package overrides when it is the
/// root package.
///
/// Dependencies here will replace any dependency on a package with the same
/// name anywhere in the dependency graph.
List<PackageDep> get dependencyOverrides {
if (_dependencyOverrides != null) return _dependencyOverrides;
_dependencyOverrides = _parseDependencies('dependency_overrides');
return _dependencyOverrides;
}
List<PackageDep> _dependencyOverrides;
/// The configurations of the transformers to use for this package.
List<Set<TransformerConfig>> get transformers {
if (_transformers != null) return _transformers;
var transformers = fields['transformers'];
if (transformers == null) {
_transformers = [];
return _transformers;
}
if (transformers is! List) {
_error('"transformers" field must be a list.',
fields.nodes['transformers'].span);
}
_transformers = transformers.nodes.map((phase) {
var phaseNodes = phase is YamlList ? phase.nodes : [phase];
return phaseNodes.map((transformerNode) {
var transformer = transformerNode.value;
if (transformer is! String && transformer is! Map) {
_error('A transformer must be a string or map.',
transformerNode.span);
}
var libraryNode;
var configurationNode;
if (transformer is String) {
libraryNode = transformerNode;
} else {
if (transformer.length != 1) {
_error('A transformer map must have a single key: the transformer '
'identifier.', transformerNode.span);
} else if (transformer.keys.single is! String) {
_error('A transformer identifier must be a string.',
transformer.nodes.keys.single.span);
}
libraryNode = transformer.nodes.keys.single;
configurationNode = transformer.nodes.values.single;
if (configurationNode is! YamlMap) {
_error("A transformer's configuration must be a map.",
configurationNode.span);
}
}
var config = _wrapSpanFormatException('transformer config', () {
return new TransformerConfig.parse(
libraryNode.value, libraryNode.span,
configurationNode);
});
var package = config.id.package;
if (package != name &&
!config.id.isBuiltInTransformer &&
!dependencies.any((ref) => ref.name == package) &&
!devDependencies.any((ref) => ref.name == package) &&
!dependencyOverrides.any((ref) => ref.name == package)) {
_error('"$package" is not a dependency.',
libraryNode.span);
}
return config;
}).toSet();
}).toList();
return _transformers;
}
List<Set<TransformerConfig>> _transformers;
/// The environment-related metadata.
PubspecEnvironment get environment {
if (_environment != null) return _environment;
var yaml = fields['environment'];
if (yaml == null) {
_environment = new PubspecEnvironment(VersionConstraint.any);
return _environment;
}
if (yaml is! Map) {
_error('"environment" field must be a map.',
fields.nodes['environment'].span);
}
_environment = new PubspecEnvironment(
_parseVersionConstraint(yaml.nodes['sdk']));
return _environment;
}
PubspecEnvironment _environment;
/// The URL of the server that the package should default to being published
/// to, "none" if the package should not be published, or `null` if it should
/// be published to the default server.
///
/// If this does return a URL string, it will be a valid parseable URL.
String get publishTo {
if (_parsedPublishTo) return _publishTo;
var publishTo = fields['publish_to'];
if (publishTo != null) {
var span = fields.nodes['publish_to'].span;
if (publishTo is! String) {
_error('"publish_to" field must be a string.', span);
}
// It must be "none" or a valid URL.
if (publishTo != "none") {
_wrapFormatException('"publish_to" field', span,
() => Uri.parse(publishTo));
}
}
_parsedPublishTo = true;
_publishTo = publishTo;
return _publishTo;
}
bool _parsedPublishTo = false;
String _publishTo;
/// The executables that should be placed on the user's PATH when this
/// package is globally activated.
///
/// It is a map of strings to string. Each key is the name of the command
/// that will be placed on the user's PATH. The value is the name of the
/// .dart script (without extension) in the package's `bin` directory that
/// should be run for that command. Both key and value must be "simple"
/// strings: alphanumerics, underscores and hypens only. If a value is
/// omitted, it is inferred to use the same name as the key.
Map<String, String> get executables {
if (_executables != null) return _executables;
_executables = {};
var yaml = fields['executables'];
if (yaml == null) return _executables;
if (yaml is! Map) {
_error('"executables" field must be a map.',
fields.nodes['executables'].span);
}
yaml.nodes.forEach((key, value) {
if (key.value is! String) {
_error('"executables" keys must be strings.', key.span);
}
final keyPattern = new RegExp(r"^[a-zA-Z0-9_-]+$");
if (!keyPattern.hasMatch(key.value)) {
_error('"executables" keys may only contain letters, '
'numbers, hyphens and underscores.', key.span);
}
if (value.value == null) {
value = key;
} else if (value.value is! String) {
_error('"executables" values must be strings or null.', value.span);
}
final valuePattern = new RegExp(r"[/\\]");
if (valuePattern.hasMatch(value.value)) {
_error('"executables" values may not contain path separators.',
value.span);
}
_executables[key.value] = value.value;
});
return _executables;
}
Map<String, String> _executables;
/// Whether the package is private and cannot be published.
///
/// This is specified in the pubspec by setting "publish_to" to "none".
bool get isPrivate => publishTo == "none";
/// Whether or not the pubspec has no contents.
bool get isEmpty =>
name == null && version == Version.none && dependencies.isEmpty;
/// Loads the pubspec for a package located in [packageDir].
///
/// If [expectedName] is passed and the pubspec doesn't have a matching name
/// field, this will throw a [PubspecError].
factory Pubspec.load(String packageDir, SourceRegistry sources,
{String expectedName}) {
var pubspecPath = path.join(packageDir, 'pubspec.yaml');
var pubspecUri = path.toUri(pubspecPath);
if (!fileExists(pubspecPath)) {
throw new FileException(
'Could not find a file named "pubspec.yaml" in "$packageDir".',
pubspecPath);
}
return new Pubspec.parse(readTextFile(pubspecPath), sources,
expectedName: expectedName, location: pubspecUri);
}
Pubspec(this._name, {Version version, Iterable<PackageDep> dependencies,
Iterable<PackageDep> devDependencies,
Iterable<PackageDep> dependencyOverrides,
VersionConstraint sdkConstraint,
Iterable<Iterable<TransformerConfig>> transformers,
Map fields, SourceRegistry sources})
: _version = version,
_dependencies = dependencies == null ? null : dependencies.toList(),
_devDependencies = devDependencies == null ? null :
devDependencies.toList(),
_dependencyOverrides = dependencyOverrides == null ? null :
dependencyOverrides.toList(),
_environment = new PubspecEnvironment(sdkConstraint),
_transformers = transformers == null ? [] :
transformers.map((phase) => phase.toSet()).toList(),
fields = fields == null ? new YamlMap() : new YamlMap.wrap(fields),
_sources = sources;
Pubspec.empty()
: _sources = null,
_name = null,
_version = Version.none,
_dependencies = <PackageDep>[],
_devDependencies = <PackageDep>[],
_environment = new PubspecEnvironment(),
_transformers = <Set<TransformerConfig>>[],
fields = new YamlMap();
/// Returns a Pubspec object for an already-parsed map representing its
/// contents.
///
/// If [expectedName] is passed and the pubspec doesn't have a matching name
/// field, this will throw a [PubspecError].
///
/// [location] is the location from which this pubspec was loaded.
Pubspec.fromMap(Map fields, this._sources, {String expectedName,
Uri location})
: fields = fields is YamlMap ? fields :
new YamlMap.wrap(fields, sourceUrl: location) {
// If [expectedName] is passed, ensure that the actual 'name' field exists
// and matches the expectation.
if (expectedName == null) return;
if (name == expectedName) return;
throw new PubspecException('"name" field doesn\'t match expected name '
'"$expectedName".', this.fields.nodes["name"].span);
}
/// Parses the pubspec stored at [filePath] whose text is [contents].
///
/// If the pubspec doesn't define a version for itself, it defaults to
/// [Version.none].
factory Pubspec.parse(String contents, SourceRegistry sources,
{String expectedName, Uri location}) {
var pubspecNode = loadYamlNode(contents, sourceUrl: location);
if (pubspecNode is YamlScalar && pubspecNode.value == null) {
pubspecNode = new YamlMap(sourceUrl: location);
} else if (pubspecNode is! YamlMap) {
throw new PubspecException(
'The pubspec must be a YAML mapping.', pubspecNode.span);
}
return new Pubspec.fromMap(pubspecNode, sources,
expectedName: expectedName, location: location);
}
/// Returns a list of most errors in this pubspec.
///
/// This will return at most one error for each field.
List<PubspecException> get allErrors {
var errors = <PubspecException>[];
_getError(fn()) {
try {
fn();
} on PubspecException catch (e) {
errors.add(e);
}
}
_getError(() => this.name);
_getError(() => this.version);
_getError(() => this.dependencies);
_getError(() => this.devDependencies);
_getError(() => this.transformers);
_getError(() => this.environment);
_getError(() => this.publishTo);
return errors;
}
/// Parses the dependency field named [field], and returns the corresponding
/// list of dependencies.
List<PackageDep> _parseDependencies(String field) {
var dependencies = <PackageDep>[];
var yaml = fields[field];
// Allow an empty dependencies key.
if (yaml == null) return dependencies;
if (yaml is! Map) {
_error('"$field" field must be a map.', fields.nodes[field].span);
}
var nonStringNode = yaml.nodes.keys.firstWhere((e) => e.value is! String,
orElse: () => null);
if (nonStringNode != null) {
_error('A dependency name must be a string.', nonStringNode.span);
}
yaml.nodes.forEach((nameNode, specNode) {
var name = nameNode.value;
var spec = specNode.value;
if (fields['name'] != null && name == this.name) {
_error('A package may not list itself as a dependency.',
nameNode.span);
}
var descriptionNode;
var sourceName;
var versionConstraint = new VersionRange();
if (spec == null) {
descriptionNode = nameNode;
sourceName = _sources.defaultSource.name;
} else if (spec is String) {
descriptionNode = nameNode;
sourceName = _sources.defaultSource.name;
versionConstraint = _parseVersionConstraint(specNode);
} else if (spec is Map) {
// Don't write to the immutable YAML map.
spec = new Map.from(spec);
if (spec.containsKey('version')) {
spec.remove('version');
versionConstraint = _parseVersionConstraint(
specNode.nodes['version']);
}
var sourceNames = spec.keys.toList();
if (sourceNames.length > 1) {
_error('A dependency may only have one source.', specNode.span);
}
sourceName = sourceNames.single;
if (sourceName is! String) {
_error('A source name must be a string.',
specNode.nodes.keys.single.span);
}
descriptionNode = specNode.nodes[sourceName];
} else {
_error('A dependency specification must be a string or a mapping.',
specNode.span);
}
// Let the source validate the description.
var description = _wrapFormatException('description',
descriptionNode.span, () {
var pubspecPath;
if (_location != null && _isFileUri(_location)) {
pubspecPath = path.fromUri(_location);
}
return _sources[sourceName].parseDescription(
pubspecPath, descriptionNode.value, fromLockFile: false);
});
dependencies.add(new PackageDep(
name, sourceName, versionConstraint, description));
});
return dependencies;
}
/// Parses [node] to a [VersionConstraint].
VersionConstraint _parseVersionConstraint(YamlNode node) {
if (node.value == null) return VersionConstraint.any;
if (node.value is! String) {
_error('A version constraint must be a string.', node.span);
}
return _wrapFormatException('version constraint', node.span,
() => new VersionConstraint.parse(node.value));
}
/// Makes sure the same package doesn't appear as both a regular and dev
/// dependency.
void _checkDependencyOverlap(List<PackageDep> dependencies,
List<PackageDep> devDependencies) {
if (dependencies == null) return;
if (devDependencies == null) return;
var dependencyNames = dependencies.map((dep) => dep.name).toSet();
var collisions = dependencyNames.intersection(
devDependencies.map((dep) => dep.name).toSet());
if (collisions.isEmpty) return;
var span = fields["dependencies"].nodes.keys
.firstWhere((key) => collisions.contains(key.value)).span;
// TODO(nweiz): associate source range info with PackageDeps and use it
// here.
_error('${pluralize('Package', collisions.length)} '
'${toSentence(collisions.map((package) => '"$package"'))} cannot '
'appear in both "dependencies" and "dev_dependencies".',
span);
}
/// Runs [fn] and wraps any [FormatException] it throws in a
/// [PubspecException].
///
/// [description] should be a noun phrase that describes whatever's being
/// parsed or processed by [fn]. [span] should be the location of whatever's
/// being processed within the pubspec.
_wrapFormatException(String description, SourceSpan span, fn()) {
try {
return fn();
} on FormatException catch (e) {
_error('Invalid $description: ${e.message}', span);
}
}
_wrapSpanFormatException(String description, fn()) {
try {
return fn();
} on SourceSpanFormatException catch (e) {
_error('Invalid $description: ${e.message}', e.span);
}
}
/// Throws a [PubspecException] with the given message.
void _error(String message, SourceSpan span) {
throw new PubspecException(message, span);
}
}
/// The environment-related metadata in the pubspec.
///
/// Corresponds to the data under the "environment:" key in the pubspec.
class PubspecEnvironment {
/// The version constraint specifying which SDK versions this package works
/// with.
final VersionConstraint sdkVersion;
PubspecEnvironment([VersionConstraint sdk])
: sdkVersion = sdk != null ? sdk : VersionConstraint.any;
}
/// An exception thrown when parsing a pubspec.
///
/// These exceptions are often thrown lazily while accessing pubspec properties.
class PubspecException extends SourceSpanFormatException
implements ApplicationException {
PubspecException(String message, SourceSpan span)
: super(message, span);
}
/// Returns whether [uri] is a file URI.
///
/// This is slightly more complicated than just checking if the scheme is
/// 'file', since relative URIs also refer to the filesystem on the VM.
bool _isFileUri(Uri uri) => uri.scheme == 'file' || uri.scheme == '';

View file

@ -1,74 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Operations relative to the user's installed Dart SDK.
library pub.sdk;
import 'dart:io';
import 'package:path/path.dart' as path;
import 'package:pub_semver/pub_semver.dart';
import 'io.dart';
/// Gets the path to the root directory of the SDK.
///
/// When running from the actual built SDK, this will be the SDK that contains
/// the running Dart executable. When running from the repo, it will be the
/// "sdk" directory in the Dart repository itself.
final String rootDirectory =
runningFromSdk ? _rootDirectory : path.join(repoRoot, "sdk");
/// Gets the path to the root directory of the SDK, assuming that the currently
/// running Dart executable is within it.
final String _rootDirectory =
path.dirname(path.dirname(Platform.executable));
/// The SDK's revision number formatted to be a semantic version.
///
/// This can be set so that the version solver tests can artificially select
/// different SDK versions.
Version version = _getVersion();
/// Determine the SDK's version number.
Version _getVersion() {
// Some of the pub integration tests require an SDK version number, but the
// tests on the bots are not run from a built SDK so this lets us avoid
// parsing the missing version file.
var sdkVersion = Platform.environment["_PUB_TEST_SDK_VERSION"];
if (sdkVersion != null) return new Version.parse(sdkVersion);
if (runningFromSdk) {
// Read the "version" file.
var version = readTextFile(path.join(_rootDirectory, "version")).trim();
return new Version.parse(version);
}
// When running from the repo, read the canonical VERSION file in tools/.
// This makes it possible to run pub without having built the SDK first.
var contents = readTextFile(path.join(repoRoot, "tools/VERSION"));
parseField(name) {
var pattern = new RegExp("^$name ([a-z0-9]+)", multiLine: true);
var match = pattern.firstMatch(contents);
return match[1];
}
var channel = parseField("CHANNEL");
var major = parseField("MAJOR");
var minor = parseField("MINOR");
var patch = parseField("PATCH");
var prerelease = parseField("PRERELEASE");
var prereleasePatch = parseField("PRERELEASE_PATCH");
var version = "$major.$minor.$patch";
if (channel == "be") {
// TODO(rnystrom): tools/utils.py includes the svn commit here. Should we?
version += "-edge";
} else if (channel == "dev") {
version += "-dev.$prerelease.$prereleasePatch";
}
return new Version.parse(version);
}

View file

@ -1,658 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// A back-tracking depth-first solver.
///
/// Attempts to find the best solution for a root package's transitive
/// dependency graph, where a "solution" is a set of concrete package versions.
/// A valid solution will select concrete versions for every package reached
/// from the root package's dependency graph, and each of those packages will
/// fit the version constraints placed on it.
///
/// The solver builds up a solution incrementally by traversing the dependency
/// graph starting at the root package. When it reaches a new package, it gets
/// the set of versions that meet the current constraint placed on it. It
/// *speculatively* selects one version from that set and adds it to the
/// current solution and then proceeds. If it fully traverses the dependency
/// graph, the solution is valid and it stops.
///
/// If it reaches an error because:
///
/// - A new dependency is placed on a package that's already been selected in
/// the solution and the selected version doesn't match the new constraint.
///
/// - There are no versions available that meet the constraint placed on a
/// package.
///
/// - etc.
///
/// then the current solution is invalid. It will then backtrack to the most
/// recent speculative version choice and try the next one. That becomes the
/// new in-progress solution and it tries to proceed from there. It will keep
/// doing this, traversing and then backtracking when it meets a failure until
/// a valid solution has been found or until all possible options for all
/// speculative choices have been exhausted.
library pub.solver.backtracking_solver;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../barback.dart' as barback;
import '../exceptions.dart';
import '../lock_file.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../sdk.dart' as sdk;
import '../source_registry.dart';
import '../source/unknown.dart';
import '../utils.dart';
import 'version_queue.dart';
import 'version_selection.dart';
import 'version_solver.dart';
/// The top-level solver.
///
/// Keeps track of the current potential solution, and the other possible
/// versions for speculative package selections. Backtracks and advances to the
/// next potential solution in the case of a failure.
class BacktrackingSolver {
final SolveType type;
final SourceRegistry sources;
final Package root;
/// The lockfile that was present before solving.
final LockFile lockFile;
final PubspecCache cache;
/// The set of packages that are being explicitly upgraded.
///
/// The solver will only allow the very latest version for each of these
/// packages.
final _forceLatest = new Set<String>();
/// The set of packages whose dependecy is being overridden by the root
/// package, keyed by the name of the package.
///
/// Any dependency on a package that appears in this map will be overriden
/// to use the one here.
final _overrides = new Map<String, PackageDep>();
/// The package versions currently selected by the solver, along with the
/// versions which are remaining to be tried.
///
/// Every time a package is encountered when traversing the dependency graph,
/// the solver must select a version for it, sometimes when multiple versions
/// are valid. This keeps track of which versions have been selected so far
/// and which remain to be tried.
///
/// Each entry in the list is a [VersionQueue], which is an ordered queue of
/// versions to try for a single package. It maintains the currently selected
/// version for that package. When a new dependency is encountered, a queue
/// of versions of that dependency is pushed onto the end of the list. A
/// queue is removed from the list once it's empty, indicating that none of
/// the versions provided a solution.
///
/// The solver tries versions in depth-first order, so only the last queue in
/// the list will have items removed from it. When a new constraint is placed
/// on an already-selected package, and that constraint doesn't match the
/// selected version, that will cause the current solution to fail and
/// trigger backtracking.
final _versions = <VersionQueue>[];
/// The current set of package versions the solver has selected, along with
/// metadata about those packages' dependencies.
///
/// This has the same view of the selected versions as [_versions], except for
/// two differences. First, [_versions] doesn't have an entry for the root
/// package, since it has only one valid version, but [_selection] does, since
/// its dependencies are relevant. Second, when backtracking, [_versions]
/// contains the version that's being backtracked, while [_selection] does
/// not.
VersionSelection _selection;
/// The number of solutions the solver has tried so far.
var _attemptedSolutions = 1;
BacktrackingSolver(SolveType type, SourceRegistry sources, this.root,
this.lockFile, List<String> useLatest)
: type = type,
sources = sources,
cache = new PubspecCache(type, sources) {
_selection = new VersionSelection(this);
for (var package in useLatest) {
_forceLatest.add(package);
}
for (var override in root.dependencyOverrides) {
_overrides[override.name] = override;
}
}
/// Run the solver.
///
/// Completes with a list of specific package versions if successful or an
/// error if it failed to find a solution.
Future<SolveResult> solve() async {
var stopwatch = new Stopwatch();
_logParameters();
// Sort the overrides by package name to make sure they're deterministic.
var overrides = _overrides.values.toList();
overrides.sort((a, b) => a.name.compareTo(b.name));
try {
stopwatch.start();
// Pre-cache the root package's known pubspec.
var rootID = new PackageId.root(root);
cache.cache(rootID, root.pubspec);
cache.cache(new PackageId.magic('pub itself'), _implicitPubspec());
await _selection.select(rootID);
_validateSdkConstraint(root.pubspec);
logSolve();
var packages = await _solve();
var pubspecs = new Map.fromIterable(packages,
key: (id) => id.name,
value: (id) => cache.getCachedPubspec(id));
var resolved = await Future.wait(
packages.map((id) => sources[id.source].resolveId(id)));
return new SolveResult.success(sources, root, lockFile, resolved,
overrides, pubspecs, _getAvailableVersions(resolved),
_attemptedSolutions);
} on SolveFailure catch (error) {
// Wrap a failure in a result so we can attach some other data.
return new SolveResult.failure(sources, root, lockFile, overrides,
error, _attemptedSolutions);
} finally {
// Gather some solving metrics.
var buffer = new StringBuffer();
buffer.writeln('${runtimeType} took ${stopwatch.elapsed} seconds.');
buffer.writeln(cache.describeResults());
log.solver(buffer);
}
}
/// Creates a pubspec for pub's implicit dependencies on barback and related
/// packages.
Pubspec _implicitPubspec() {
var dependencies = [];
barback.pubConstraints.forEach((name, constraint) {
dependencies.add(new PackageDep(name, "hosted", constraint, name));
});
return new Pubspec("pub itself", dependencies: dependencies);
}
/// Generates a map containing all of the known available versions for each
/// package in [packages].
///
/// The version list may not always be complete. If the package is the root
/// root package, or if it's a package that we didn't unlock while solving
/// because we weren't trying to upgrade it, we will just know the current
/// version.
Map<String, List<Version>> _getAvailableVersions(List<PackageId> packages) {
var availableVersions = new Map<String, List<Version>>();
for (var package in packages) {
var cached = cache.getCachedVersions(package.toRef());
var versions;
if (cached != null) {
versions = cached.map((id) => id.version).toList();
} else {
// If the version list was never requested, just use the one known
// version.
versions = [package.version];
}
availableVersions[package.name] = versions;
}
return availableVersions;
}
/// Gets the version of [package] currently locked in the lock file.
///
/// Returns `null` if it isn't in the lockfile (or has been unlocked).
PackageId getLocked(String package) {
if (type == SolveType.GET) return lockFile.packages[package];
// When downgrading, we don't want to force the latest versions of
// non-hosted packages, since they don't support multiple versions and thus
// can't be downgraded.
if (type == SolveType.DOWNGRADE) {
var locked = lockFile.packages[package];
if (locked != null && !sources[locked.source].hasMultipleVersions) {
return locked;
}
}
if (_forceLatest.isEmpty || _forceLatest.contains(package)) return null;
return lockFile.packages[package];
}
/// Gets the package [name] that's currently contained in the lockfile if it
/// matches the current constraint and has the same source and description as
/// other references to that package.
///
/// Returns `null` otherwise.
PackageId _getValidLocked(String name) {
var package = getLocked(name);
if (package == null) return null;
var constraint = _selection.getConstraint(name);
if (!constraint.allows(package.version)) {
logSolve('$package is locked but does not match $constraint');
return null;
} else {
logSolve('$package is locked');
}
var required = _selection.getRequiredDependency(name);
if (required != null) {
if (package.source != required.dep.source) return null;
var source = sources[package.source];
if (!source.descriptionsEqual(
package.description, required.dep.description)) return null;
}
return package;
}
/// Tries to find the best set of versions that meet the constraints.
///
/// Selects matching versions of unselected packages, or backtracks if there
/// are no such versions.
Future<List<PackageId>> _solve() async {
// TODO(nweiz): Use real while loops when issue 23394 is fixed.
await Future.doWhile(() async {
// Avoid starving the event queue by waiting for a timer-level event.
await new Future(() {});
// If there are no more packages to traverse, we've traversed the whole
// graph.
var ref = _selection.nextUnselected;
if (ref == null) return false;
var queue;
try {
queue = await _versionQueueFor(ref);
} on SolveFailure catch (error) {
// TODO(nweiz): adjust the priority of [ref] in the unselected queue
// since we now know it's problematic. We should reselect it as soon as
// we've selected a different version of one of its dependers.
// There are no valid versions of [ref] to select, so we have to
// backtrack and unselect some previously-selected packages.
if (await _backtrack()) return true;
// Backtracking failed, which means we're out of possible solutions.
// Throw the error that caused us to try backtracking.
if (error is! NoVersionException) rethrow;
// If we got a NoVersionException, convert it to a
// non-version-specific one so that it's clear that there aren't *any*
// acceptable versions that satisfy the constraint.
throw new NoVersionException(
error.package,
null,
(error as NoVersionException).constraint,
error.dependencies);
}
await _selection.select(queue.current);
_versions.add(queue);
logSolve();
return true;
});
// If we got here, we successfully found a solution.
return _selection.ids.where((id) => !id.isMagic).toList();
}
/// Creates a queue of available versions for [ref].
///
/// The returned queue starts at a version that is valid according to the
/// current dependency constraints. If no such version is available, throws a
/// [SolveFailure].
Future<VersionQueue> _versionQueueFor(PackageRef ref) async {
if (ref.isRoot) {
return await VersionQueue.create(
new PackageId.root(root), () => new Future.value([]));
}
var locked = _getValidLocked(ref.name);
var queue = await VersionQueue.create(locked,
() => _getAllowedVersions(ref, locked));
await _findValidVersion(queue);
return queue;
}
/// Gets all versions of [ref] that could be selected, other than [locked].
Future<Iterable<PackageId>> _getAllowedVersions(PackageRef ref,
PackageId locked) async {
var allowed;
try {
allowed = await cache.getVersions(ref);
} on PackageNotFoundException catch (error) {
// Show the user why the package was being requested.
throw new DependencyNotFoundException(
ref.name, error, _selection.getDependenciesOn(ref.name).toList());
}
if (_forceLatest.contains(ref.name)) allowed = [allowed.first];
if (locked != null) {
allowed = allowed.where((version) => version != locked);
}
return allowed;
}
/// Backtracks from the current failed solution and determines the next
/// solution to try.
///
/// This backjumps based on the cause of previous failures to minize
/// backtracking.
///
/// Returns `true` if there is a new solution to try.
Future<bool> _backtrack() async {
// Bail if there is nothing to backtrack to.
if (_versions.isEmpty) return false;
// TODO(nweiz): Use real while loops when issue 23394 is fixed.
// Advance past the current version of the leaf-most package.
await Future.doWhile(() async {
// Move past any packages that couldn't have led to the failure.
await Future.doWhile(() async {
if (_versions.isEmpty || _versions.last.hasFailed) return false;
var queue = _versions.removeLast();
assert(_selection.ids.last == queue.current);
await _selection.unselectLast();
return true;
});
if (_versions.isEmpty) return false;
var queue = _versions.last;
var name = queue.current.name;
assert(_selection.ids.last == queue.current);
await _selection.unselectLast();
// Fast forward through versions to find one that's valid relative to the
// current constraints.
var foundVersion = false;
if (await queue.advance()) {
try {
await _findValidVersion(queue);
foundVersion = true;
} on SolveFailure {
// `foundVersion` is already false.
}
}
// If we found a valid version, add it to the selection and stop
// backtracking. Otherwise, backtrack through this package and on.
if (foundVersion) {
await _selection.select(queue.current);
logSolve();
return false;
} else {
logSolve('no more versions of $name, backtracking');
_versions.removeLast();
return true;
}
});
if (!_versions.isEmpty) _attemptedSolutions++;
return !_versions.isEmpty;
}
/// Rewinds [queue] until it reaches a version that's valid relative to the
/// current constraints.
///
/// If the first version is valid, no rewinding will be done. If no version is
/// valid, this throws a [SolveFailure] explaining why.
Future _findValidVersion(VersionQueue queue) {
// TODO(nweiz): Use real while loops when issue 23394 is fixed.
return Future.doWhile(() async {
try {
await _checkVersion(queue.current);
return false;
} on SolveFailure {
var name = queue.current.name;
if (await queue.advance()) return true;
// If we've run out of valid versions for this package, mark its oldest
// depender as failing. This ensures that we look at graphs in which the
// package isn't selected at all.
_fail(_selection.getDependenciesOn(name).first.depender.name);
// TODO(nweiz): Throw a more detailed error here that combines all the
// errors that were thrown for individual versions and fully explains
// why we couldn't select any versions.
// The queue is out of versions, so throw the final error we
// encountered while trying to find one.
rethrow;
}
});
}
/// Checks whether the package identified by [id] is valid relative to the
/// current constraints.
///
/// If it's not, throws a [SolveFailure] explaining why.
Future _checkVersion(PackageId id) async {
var constraint = _selection.getConstraint(id.name);
if (!constraint.allows(id.version)) {
var deps = _selection.getDependenciesOn(id.name);
for (var dep in deps) {
if (dep.dep.constraint.allows(id.version)) continue;
_fail(dep.depender.name);
}
logSolve(
"version ${id.version} of ${id.name} doesn't match $constraint:\n" +
_selection.describeDependencies(id.name));
throw new NoVersionException(
id.name, id.version, constraint, deps.toList());
}
var pubspec;
try {
pubspec = await cache.getPubspec(id);
} on PackageNotFoundException {
// We can only get here if the lockfile refers to a specific package
// version that doesn't exist (probably because it was yanked).
throw new NoVersionException(id.name, null, id.version, []);
}
_validateSdkConstraint(pubspec);
for (var dep in await depsFor(id)) {
if (dep.isMagic) continue;
var dependency = new Dependency(id, dep);
var allDeps = _selection.getDependenciesOn(dep.name).toList();
allDeps.add(dependency);
var depConstraint = _selection.getConstraint(dep.name);
if (!depConstraint.allowsAny(dep.constraint)) {
for (var otherDep in _selection.getDependenciesOn(dep.name)) {
if (otherDep.dep.constraint.allowsAny(dep.constraint)) continue;
_fail(otherDep.depender.name);
}
logSolve(
'inconsistent constraints on ${dep.name}:\n'
' $dependency\n' +
_selection.describeDependencies(dep.name));
throw new DisjointConstraintException(dep.name, allDeps);
}
var selected = _selection.selected(dep.name);
if (selected != null && !dep.constraint.allows(selected.version)) {
_fail(dep.name);
logSolve(
"constraint doesn't match selected version ${selected.version} of "
"${dep.name}:\n"
" $dependency");
throw new NoVersionException(dep.name, selected.version, dep.constraint,
allDeps);
}
var required = _selection.getRequiredDependency(dep.name);
if (required == null) continue;
if (dep.source != required.dep.source) {
// Mark the dependers as failing rather than the package itself, because
// no version from this source will be compatible.
for (var otherDep in _selection.getDependenciesOn(dep.name)) {
_fail(otherDep.depender.name);
}
logSolve(
'inconsistent source "${dep.source}" for ${dep.name}:\n'
' $dependency\n' +
_selection.describeDependencies(dep.name));
throw new SourceMismatchException(dep.name, allDeps);
}
var source = sources[dep.source];
if (!source.descriptionsEqual(
dep.description, required.dep.description)) {
// Mark the dependers as failing rather than the package itself, because
// no version with this description will be compatible.
for (var otherDep in _selection.getDependenciesOn(dep.name)) {
_fail(otherDep.depender.name);
}
logSolve(
'inconsistent description "${dep.description}" for ${dep.name}:\n'
' $dependency\n' +
_selection.describeDependencies(dep.name));
throw new DescriptionMismatchException(dep.name, allDeps);
}
}
return true;
}
/// Marks the package named [name] as having failed.
///
/// This will cause the backtracker not to jump over this package.
void _fail(String name) {
// Don't mark the root package as failing because it's not in [_versions]
// and there's only one version of it anyway.
if (name == root.name) return;
_versions.firstWhere((queue) => queue.current.name == name).fail();
}
/// Returns the dependencies of the package identified by [id].
///
/// This takes overrides and dev dependencies into account when neccessary.
Future<Set<PackageDep>> depsFor(PackageId id) async {
var pubspec = await cache.getPubspec(id);
var deps = pubspec.dependencies.toSet();
if (id.isRoot) {
// Include dev dependencies of the root package.
deps.addAll(pubspec.devDependencies);
// Add all overrides. This ensures a dependency only present as an
// override is still included.
deps.addAll(_overrides.values);
// Replace any overridden dependencies.
deps = deps.map((dep) {
var override = _overrides[dep.name];
if (override != null) return override;
// Not overridden.
return dep;
}).toSet();
} else {
// Ignore any overridden dependencies.
deps.removeWhere((dep) => _overrides.containsKey(dep.name));
}
// Make sure the package doesn't have any bad dependencies.
for (var dep in deps.toSet()) {
if (!dep.isRoot && sources[dep.source] is UnknownSource) {
throw new UnknownSourceException(id.name, [new Dependency(id, dep)]);
}
if (dep.name == 'barback') {
deps.add(new PackageDep.magic('pub itself'));
}
}
return deps;
}
/// Logs the initial parameters to the solver.
void _logParameters() {
var buffer = new StringBuffer();
buffer.writeln("Solving dependencies:");
for (var package in root.dependencies) {
buffer.write("- $package");
var locked = getLocked(package.name);
if (_forceLatest.contains(package.name)) {
buffer.write(" (use latest)");
} else if (locked != null) {
var version = locked.version;
buffer.write(" (locked to $version)");
}
buffer.writeln();
}
log.solver(buffer.toString().trim());
}
/// Logs [message] in the context of the current selected packages.
///
/// If [message] is omitted, just logs a description of leaf-most selection.
void logSolve([String message]) {
if (message == null) {
if (_versions.isEmpty) {
message = "* start at root";
} else {
message = "* select ${_versions.last.current}";
}
} else {
// Otherwise, indent it under the current selected package.
message = prefixLines(message);
}
// Indent for the previous selections.
log.solver(prefixLines(message, prefix: '| ' * _versions.length));
}
}
/// Ensures that if [pubspec] has an SDK constraint, then it is compatible
/// with the current SDK.
///
/// Throws a [SolveFailure] if not.
void _validateSdkConstraint(Pubspec pubspec) {
if (pubspec.environment.sdkVersion.allows(sdk.version)) return;
throw new BadSdkVersionException(pubspec.name,
'Package ${pubspec.name} requires SDK version '
'${pubspec.environment.sdkVersion} but the current SDK is '
'${sdk.version}.');
}

View file

@ -1,249 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.solve_report;
import 'package:pub_semver/pub_semver.dart';
import '../lock_file.dart';
import '../log.dart' as log;
import '../package.dart';
import '../source_registry.dart';
import '../utils.dart';
import 'version_solver.dart';
/// Unlike [SolveResult], which is the static data describing a resolution,
/// this class contains the mutable state used while generating the report
/// itself.
///
/// It's a report builder.
class SolveReport {
final SolveType _type;
final SourceRegistry _sources;
final Package _root;
final LockFile _previousLockFile;
final SolveResult _result;
/// The dependencies in [_result], keyed by package name.
final _dependencies = new Map<String, PackageId>();
final _output = new StringBuffer();
SolveReport(this._type, this._sources, this._root, this._previousLockFile,
this._result) {
// Fill the map so we can use it later.
for (var id in _result.packages) {
_dependencies[id.name] = id;
}
}
/// Displays a report of the results of the version resolution relative to
/// the previous lock file.
void show() {
_reportChanges();
_reportOverrides();
}
/// Displays a one-line message summarizing what changes were made (or would
/// be made) to the lockfile.
///
/// If [dryRun] is true, describes it in terms of what would be done.
void summarize({bool dryRun: false}) {
// Count how many dependencies actually changed.
var dependencies = _dependencies.keys.toSet();
dependencies.addAll(_previousLockFile.packages.keys);
dependencies.remove(_root.name);
var numChanged = dependencies.where((name) {
var oldId = _previousLockFile.packages[name];
var newId = _dependencies[name];
// Added or removed dependencies count.
if (oldId == null) return true;
if (newId == null) return true;
// The dependency existed before, so see if it was modified.
return !_sources.idsEqual(oldId, newId);
}).length;
if (dryRun) {
if (numChanged == 0) {
log.message("No dependencies would change.");
} else if (numChanged == 1) {
log.message("Would change $numChanged dependency.");
} else {
log.message("Would change $numChanged dependencies.");
}
} else {
if (numChanged == 0) {
if (_type == SolveType.GET) {
log.message("Got dependencies!");
} else {
log.message("No dependencies changed.");
}
} else if (numChanged == 1) {
log.message("Changed $numChanged dependency!");
} else {
log.message("Changed $numChanged dependencies!");
}
}
}
/// Displays a report of all of the previous and current dependencies and
/// how they have changed.
void _reportChanges() {
_output.clear();
// Show the new set of dependencies ordered by name.
var names = _result.packages.map((id) => id.name).toList();
names.remove(_root.name);
names.sort();
names.forEach(_reportPackage);
// Show any removed ones.
var removed = _previousLockFile.packages.keys.toSet();
removed.removeAll(names);
if (removed.isNotEmpty) {
_output.writeln("These packages are no longer being depended on:");
removed = removed.toList();
removed.sort();
removed.forEach((name) => _reportPackage(name, alwaysShow: true));
}
log.message(_output);
}
/// Displays a warning about the overrides currently in effect.
void _reportOverrides() {
_output.clear();
if (_result.overrides.isNotEmpty) {
_output.writeln("Warning: You are using these overridden dependencies:");
var overrides = _result.overrides.map((dep) => dep.name).toList();
overrides.sort((a, b) => a.compareTo(b));
overrides.forEach(
(name) => _reportPackage(name, alwaysShow: true,
highlightOverride: false));
log.warning(_output);
}
}
/// Reports the results of the upgrade on the package named [name].
///
/// If [alwaysShow] is true, the package is reported even if it didn't change,
/// regardless of [_type]. If [highlightOverride] is true (or absent), writes
/// "(override)" next to overridden packages.
void _reportPackage(String name,
{bool alwaysShow: false, bool highlightOverride: true}) {
var newId = _dependencies[name];
var oldId = _previousLockFile.packages[name];
var id = newId != null ? newId : oldId;
var isOverridden = _result.overrides.map(
(dep) => dep.name).contains(id.name);
// If the package was previously a dependency but the dependency has
// changed in some way.
var changed = false;
// If the dependency was added or removed.
var addedOrRemoved = false;
// Show a one-character "icon" describing the change. They are:
//
// ! The package is being overridden.
// - The package was removed.
// + The package was added.
// > The package was upgraded from a lower version.
// < The package was downgraded from a higher version.
// * Any other change between the old and new package.
var icon;
if (isOverridden) {
icon = log.magenta("! ");
} else if (newId == null) {
icon = log.red("- ");
addedOrRemoved = true;
} else if (oldId == null) {
icon = log.green("+ ");
addedOrRemoved = true;
} else if (!_sources.idDescriptionsEqual(oldId, newId)) {
icon = log.cyan("* ");
changed = true;
} else if (oldId.version < newId.version) {
icon = log.green("> ");
changed = true;
} else if (oldId.version > newId.version) {
icon = log.cyan("< ");
changed = true;
} else {
// Unchanged.
icon = " ";
}
if (_type == SolveType.GET && !(alwaysShow || changed || addedOrRemoved)) {
return;
}
_output.write(icon);
_output.write(log.bold(id.name));
_output.write(" ");
_writeId(id);
// If the package was upgraded, show what it was upgraded from.
if (changed) {
_output.write(" (was ");
_writeId(oldId);
_output.write(")");
}
// Highlight overridden packages.
if (isOverridden && highlightOverride) {
_output.write(" ${log.magenta('(overridden)')}");
}
// See if there are any newer versions of the package that we were
// unable to upgrade to.
if (newId != null && _type != SolveType.DOWNGRADE) {
var versions = _result.availableVersions[newId.name];
var newerStable = false;
var newerUnstable = false;
for (var version in versions) {
if (version > newId.version) {
if (version.isPreRelease) {
newerUnstable = true;
} else {
newerStable = true;
}
}
}
// If there are newer stable versions, only show those.
var message;
if (newerStable) {
message = "(${maxAll(versions, Version.prioritize)} available)";
} else if (newerUnstable) {
message = "(${maxAll(versions)} available)";
}
if (message != null) _output.write(" ${log.cyan(message)}");
}
_output.writeln();
}
/// Writes a terse description of [id] (not including its name) to the output.
void _writeId(PackageId id) {
_output.write(id.version);
var source = _sources[id.source];
if (source != _sources.defaultSource) {
var description = source.formatDescription(_root.dir, id.description);
_output.write(" from ${id.source} $description");
}
}
}

View file

@ -1,149 +0,0 @@
// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.unselected_package_queue;
import 'dart:async';
import 'dart:collection';
import 'package:stack_trace/stack_trace.dart';
import '../log.dart' as log;
import '../package.dart';
import 'backtracking_solver.dart';
/// A priority queue of package references.
///
/// This is used to determine which packages should be selected by the solver,
/// and when. It's ordered such that the earliest packages should be selected
/// first.
class UnselectedPackageQueue {
/// The underlying priority set.
SplayTreeSet<PackageRef> _set;
/// The version solver.
final BacktrackingSolver _solver;
/// A cache of the number of versions for each package ref.
///
/// This is cached because sorting is synchronous and retrieving this
/// information is asynchronous.
final _numVersions = new Map<PackageRef, int>();
/// The first package in the queue (that is, the package that should be
/// selected soonest).
PackageRef get first => _set.first;
/// Whether there are no more packages in the queue.
bool get isEmpty => _set.isEmpty;
UnselectedPackageQueue(this._solver) {
_set = new SplayTreeSet(_comparePackages);
}
/// Adds [ref] to the queue, if it's not there already.
Future add(PackageRef ref) async {
if (_solver.getLocked(ref.name) == null && !_numVersions.containsKey(ref)) {
// Only get the number of versions for unlocked packages. We do this for
// two reasons: first, locked packages are always sorted first anyway;
// second, if every package is locked, we want to do version resolution
// without any HTTP requests if possible.
_numVersions[ref] = await _getNumVersions(ref);
}
_set.add(ref);
}
/// Removes [ref] from the queue.
void remove(PackageRef ref) {
_set.remove(ref);
}
/// The [Comparator] used to sort the queue.
int _comparePackages(PackageRef ref1, PackageRef ref2) {
var name1 = ref1.name;
var name2 = ref2.name;
if (name1 == name2) {
assert(ref1 == ref2);
return 0;
}
// Select the root package before anything else.
if (ref1.isRoot) return -1;
if (ref2.isRoot) return 1;
// Sort magic refs before anything other than the root. The only magic
// dependency that makes sense as a ref is "pub itself", and it only has a
// single version.
if (ref1.isMagic && ref2.isMagic) return name1.compareTo(name2);
if (ref1.isMagic) return -1;
if (ref2.isMagic) return 1;
var locked1 = _solver.getLocked(name1) != null;
var locked2 = _solver.getLocked(name2) != null;
// Select locked packages before unlocked packages to ensure that they
// remain locked as long as possible.
if (locked1 && !locked2) return -1;
if (!locked1 && locked2) return 1;
// TODO(nweiz): Should we sort packages by something like number of
// dependencies? We should be able to get that quickly for locked packages
// if we have their pubspecs locally.
// Sort locked packages by name among themselves to ensure that solving is
// deterministic.
if (locked1 && locked2) return name1.compareTo(name2);
// Sort unlocked packages by the number of versions that might be selected
// for them. In general, packages with fewer versions are less likely to
// benefit from changing versions, so they should be selected earlier.
var versions1 = _numVersions[ref1];
var versions2 = _numVersions[ref2];
if (versions1 == null && versions2 != null) return -1;
if (versions1 != null && versions2 == null) return 1;
if (versions1 != versions2) return versions1.compareTo(versions2);
// Fall back on sorting by name to ensure determinism.
return name1.compareTo(name2);
}
/// Returns the number of versions available for a given package.
///
/// This excludes versions that don't match the root package's dependencies,
/// since those versions can never be selected by the solver.
Future<int> _getNumVersions(PackageRef ref) async {
// There is only ever one version of the root package.
if (ref.isRoot) return 1;
var versions;
try {
versions = await _solver.cache.getVersions(ref);
} catch (error, stackTrace) {
// If it fails for any reason, just treat that as no versions. This
// will sort this reference higher so that we can traverse into it
// and report the error more properly.
log.solver("Could not get versions for $ref:\n$error\n\n" +
new Chain.forTrace(stackTrace).terse.toString());
return 0;
}
// If the root package depends on this one, ignore versions that don't match
// that constraint. Since the root package's dependency constraints won't
// change during solving, we can safely filter out packages that don't meet
// it.
for (var rootDep in _solver.root.immediateDependencies) {
if (rootDep.name != ref.name) continue;
return versions.where((id) => rootDep.constraint.allows(id.version))
.length;
}
// TODO(nweiz): Also ignore versions with non-matching SDK constraints or
// dependencies that are incompatible with the root package's.
return versions.length;
}
String toString() => _set.toString();
}

View file

@ -1,105 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.version_queue;
import 'dart:async';
import 'dart:collection' show Queue;
import '../package.dart';
/// A function that asynchronously returns a sequence of package IDs.
typedef Future<Iterable<PackageId>> PackageIdGenerator();
/// A prioritized, asynchronous queue of the possible versions that can be
/// selected for one package.
///
/// If there is a locked version, that comes first, followed by other versions
/// in descending order. This avoids requesting the list of versions until
/// needed (i.e. after any locked version has been consumed) to avoid unneeded
/// network requests.
class VersionQueue {
/// The set of allowed versions that match [_constraint].
///
/// If [_locked] is not `null`, this will initially be `null` until we
/// advance past the locked version.
Queue<PackageId> _allowed;
/// The callback that will generate the sequence of packages. This will be
/// called as lazily as possible.
final PackageIdGenerator _allowedGenerator;
/// The currently locked version of the package, or `null` if there is none,
/// or we have advanced past it.
PackageId _locked;
/// Gets the currently selected version.
PackageId get current {
if (_locked != null) return _locked;
return _allowed.first;
}
/// Whether the currently selected version has been responsible for a solve
/// failure, or depends on a package that has.
///
/// The solver uses this to determine which packages to backtrack to after a
/// failure occurs. Any selected package that did *not* cause the failure can
/// be skipped by the backtracker.
bool get hasFailed => _hasFailed;
bool _hasFailed = false;
/// Creates a new [VersionQueue] queue for starting with the optional
/// [locked] package followed by the results of calling [allowedGenerator].
///
/// This is asynchronous so that [current] can always be accessed
/// synchronously. If there is no locked version, we need to get the list of
/// versions asynchronously before we can determine what the first one is.
static Future<VersionQueue> create(PackageId locked,
PackageIdGenerator allowedGenerator) async {
var versions = new VersionQueue._(locked, allowedGenerator);
// If there isn't a locked version, it needs to be calculated before we can
// return.
if (locked == null) await versions._calculateAllowed();
return versions;
}
VersionQueue._(this._locked, this._allowedGenerator);
/// Tries to advance to the next possible version.
///
/// Returns `true` if it moved to a new version (which can be accessed from
/// [current]. Returns `false` if there are no more versions.
Future<bool> advance() async {
// Any failure was the fault of the previous version, not necessarily the
// new one.
_hasFailed = false;
// If we have a locked version, consume it first.
if (_locked != null) {
// Advancing past the locked version, so need to load the others now
// so that [current] is available.
await _calculateAllowed();
_locked = null;
} else {
// Move to the next allowed version.
_allowed.removeFirst();
}
return _allowed.isNotEmpty;
}
/// Marks the selected version as being directly or indirectly responsible
/// for a solve failure.
void fail() {
_hasFailed = true;
}
/// Determines the list of allowed versions matching its constraint and places
/// them in [_allowed].
Future _calculateAllowed() async {
var allowed = await _allowedGenerator();
_allowed = new Queue<PackageId>.from(allowed);
}
}

View file

@ -1,133 +0,0 @@
// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.version_selection;
import 'dart:async';
import 'dart:collection';
import 'package:pub_semver/pub_semver.dart';
import '../package.dart';
import 'backtracking_solver.dart';
import 'unselected_package_queue.dart';
import 'version_solver.dart';
/// A representation of the version solver's current selected versions.
///
/// This is used to track the joint constraints from the selected packages on
/// other packages, as well as the set of packages that are depended on but have
/// yet to be selected.
///
/// A [VersionSelection] is always internally consistent. That is, all selected
/// packages are compatible with dependencies on those packages, no constraints
/// are empty, and dependencies agree on sources and descriptions. However, the
/// selection itself doesn't ensure this; that's up to the [BacktrackingSolver]
/// that controls it.
class VersionSelection {
/// The version solver.
final BacktrackingSolver _solver;
/// The packages that have been selected, in the order they were selected.
List<PackageId> get ids => new UnmodifiableListView<PackageId>(_ids);
final _ids = <PackageId>[];
/// Tracks all of the dependencies on a given package.
///
/// Each key is a package. Its value is the list of dependencies placed on
/// that package, in the order that their dependers appear in [ids].
final _dependencies = new Map<String, List<Dependency>>();
/// A priority queue of packages that are depended on but have yet to be
/// selected.
final UnselectedPackageQueue _unselected;
/// The next package for which some version should be selected by the solver.
PackageRef get nextUnselected =>
_unselected.isEmpty ? null : _unselected.first;
VersionSelection(BacktrackingSolver solver)
: _solver = solver,
_unselected = new UnselectedPackageQueue(solver);
/// Adds [id] to the selection.
Future select(PackageId id) async {
_unselected.remove(id.toRef());
_ids.add(id);
// TODO(nweiz): Use a real for loop when issue 23394 is fixed.
// Add all of [id]'s dependencies to [_dependencies], as well as to
// [_unselected] if necessary.
await Future.forEach(await _solver.depsFor(id), (dep) async {
var deps = getDependenciesOn(dep.name);
deps.add(new Dependency(id, dep));
// If this is the first dependency on this package, add it to the
// unselected queue.
if (deps.length == 1 && dep.name != _solver.root.name) {
await _unselected.add(dep.toRef());
}
});
}
/// Removes the most recently selected package from the selection.
Future unselectLast() async {
var id = _ids.removeLast();
await _unselected.add(id.toRef());
for (var dep in await _solver.depsFor(id)) {
var deps = getDependenciesOn(dep.name);
deps.removeLast();
if (deps.isEmpty) {
_unselected.remove(dep.toRef());
}
}
}
/// Returns the selected id for [packageName].
PackageId selected(String packageName) =>
ids.firstWhere((id) => id.name == packageName, orElse: () => null);
/// Gets a "required" reference to the package [name].
///
/// This is the first non-root dependency on that package. All dependencies
/// on a package must agree on source and description, except for references
/// to the root package. This will return a reference to that "canonical"
/// source and description, or `null` if there is no required reference yet.
///
/// This is required because you may have a circular dependency back onto the
/// root package. That second dependency won't be a root dependency and it's
/// *that* one that other dependencies need to agree on. In other words, you
/// can have a bunch of dependencies back onto the root package as long as
/// they all agree with each other.
Dependency getRequiredDependency(String name) {
return getDependenciesOn(name)
.firstWhere((dep) => !dep.dep.isRoot, orElse: () => null);
}
/// Gets the combined [VersionConstraint] currently placed on package [name].
VersionConstraint getConstraint(String name) {
var constraint = getDependenciesOn(name)
.map((dep) => dep.dep.constraint)
.fold(VersionConstraint.any, (a, b) => a.intersect(b));
// The caller should ensure that no version gets added with conflicting
// constraints.
assert(!constraint.isEmpty);
return constraint;
}
/// Returns a string description of the dependencies on [name].
String describeDependencies(String name) =>
getDependenciesOn(name).map((dep) => " $dep").join('\n');
/// Gets the list of known dependencies on package [name].
///
/// Creates an empty list if needed.
List<Dependency> getDependenciesOn(String name) =>
_dependencies.putIfAbsent(name, () => <Dependency>[]);
}

View file

@ -1,487 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.solver.version_solver;
import 'dart:async';
import "dart:convert";
import 'package:pub_semver/pub_semver.dart';
import 'package:stack_trace/stack_trace.dart';
import '../exceptions.dart';
import '../lock_file.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../source_registry.dart';
import '../utils.dart';
import 'backtracking_solver.dart';
import 'solve_report.dart';
/// Attempts to select the best concrete versions for all of the transitive
/// dependencies of [root] taking into account all of the [VersionConstraint]s
/// that those dependencies place on each other and the requirements imposed by
/// [lockFile].
///
/// If [useLatest] is given, then only the latest versions of the referenced
/// packages will be used. This is for forcing an upgrade to one or more
/// packages.
///
/// If [upgradeAll] is true, the contents of [lockFile] are ignored.
Future<SolveResult> resolveVersions(SolveType type, SourceRegistry sources,
Package root, {LockFile lockFile, List<String> useLatest}) {
if (lockFile == null) lockFile = new LockFile.empty();
if (useLatest == null) useLatest = [];
return log.progress('Resolving dependencies', () {
return new BacktrackingSolver(type, sources, root, lockFile, useLatest)
.solve();
});
}
/// The result of a version resolution.
class SolveResult {
/// Whether the solver found a complete solution or failed.
bool get succeeded => error == null;
/// The list of concrete package versions that were selected for each package
/// reachable from the root, or `null` if the solver failed.
final List<PackageId> packages;
/// The dependency overrides that were used in the solution.
final List<PackageDep> overrides;
/// A map from package names to the pubspecs for the versions of those
/// packages that were installed, or `null` if the solver failed.
final Map<String, Pubspec> pubspecs;
/// The available versions of all selected packages from their source.
///
/// Will be empty if the solve failed. An entry here may not include the full
/// list of versions available if the given package was locked and did not
/// need to be unlocked during the solve.
final Map<String, List<Version>> availableVersions;
/// The error that prevented the solver from finding a solution or `null` if
/// it was successful.
final SolveFailure error;
/// The number of solutions that were attempted before either finding a
/// successful solution or exhausting all options.
///
/// In other words, one more than the number of times it had to backtrack
/// because it found an invalid solution.
final int attemptedSolutions;
final SourceRegistry _sources;
final Package _root;
final LockFile _previousLockFile;
/// Returns the names of all packages that were changed.
///
/// This includes packages that were added or removed.
Set<String> get changedPackages {
if (packages == null) return null;
var changed = packages
.where((id) =>
!_sources.idsEqual(_previousLockFile.packages[id.name], id))
.map((id) => id.name).toSet();
return changed.union(_previousLockFile.packages.keys
.where((package) => !availableVersions.containsKey(package))
.toSet());
}
SolveResult.success(this._sources, this._root, this._previousLockFile,
this.packages, this.overrides, this.pubspecs, this.availableVersions,
this.attemptedSolutions)
: error = null;
SolveResult.failure(this._sources, this._root, this._previousLockFile,
this.overrides, this.error, this.attemptedSolutions)
: this.packages = null,
this.pubspecs = null,
this.availableVersions = {};
/// Displays a report of what changes were made to the lockfile.
///
/// [type] is the type of version resolution that was run.
void showReport(SolveType type) {
new SolveReport(type, _sources, _root, _previousLockFile, this).show();
}
/// Displays a one-line message summarizing what changes were made (or would
/// be made) to the lockfile.
///
/// [type] is the type of version resolution that was run.
void summarizeChanges(SolveType type, {bool dryRun: false}) {
new SolveReport(type, _sources, _root, _previousLockFile, this)
.summarize(dryRun: dryRun);
}
String toString() {
if (!succeeded) {
return 'Failed to solve after $attemptedSolutions attempts:\n'
'$error';
}
return 'Took $attemptedSolutions tries to resolve to\n'
'- ${packages.join("\n- ")}';
}
}
/// Maintains a cache of previously-requested data: pubspecs and version lists.
///
/// Used to avoid requesting the same pubspec from the server repeatedly.
class PubspecCache {
final SourceRegistry _sources;
/// The already-requested cached pubspec lists.
final _versions = new Map<PackageRef, List<PackageId>>();
/// The errors from failed version list requests.
final _versionErrors = new Map<PackageRef, Pair<Object, Chain>>();
/// The already-requested cached pubspecs.
final _pubspecs = new Map<PackageId, Pubspec>();
// TODO(nweiz): Currently, if [getCachedPubspec] returns pubspecs cached via
// [getVersions], the "complex backtrack" test case in version_solver_test
// fails. Fix that. See also [BacktrackingSolver._getTransitiveDependers].
/// The set of package ids for which [getPubspec] has been explicitly called.
final _explicitlyCached = new Set<PackageId>();
/// The type of version resolution that was run.
final SolveType _type;
/// The number of times a version list was requested and it wasn't cached and
/// had to be requested from the source.
int _versionCacheMisses = 0;
/// The number of times a version list was requested and the cached version
/// was returned.
int _versionCacheHits = 0;
/// The number of times a pubspec was requested and it wasn't cached and had
/// to be requested from the source.
int _pubspecCacheMisses = 0;
/// The number of times a pubspec was requested and the cached version was
/// returned.
int _pubspecCacheHits = 0;
PubspecCache(this._type, this._sources);
/// Caches [pubspec] as the [Pubspec] for the package identified by [id].
void cache(PackageId id, Pubspec pubspec) {
_pubspecs[id] = pubspec;
}
/// Loads the pubspec for the package identified by [id].
Future<Pubspec> getPubspec(PackageId id) async {
_explicitlyCached.add(id);
// Complete immediately if it's already cached.
if (_pubspecs.containsKey(id)) {
_pubspecCacheHits++;
return _pubspecs[id];
}
_pubspecCacheMisses++;
var source = _sources[id.source];
var pubspec = await source.describe(id);
_pubspecs[id] = pubspec;
return pubspec;
}
/// Returns the previously cached pubspec for the package identified by [id]
/// or returns `null` if not in the cache.
Pubspec getCachedPubspec(PackageId id) =>
_explicitlyCached.contains(id) ? _pubspecs[id] : null;
/// Gets the list of versions for [package].
///
/// Packages are sorted in descending version order with all "stable"
/// versions (i.e. ones without a prerelease suffix) before pre-release
/// versions. This ensures that the solver prefers stable packages over
/// unstable ones.
Future<List<PackageId>> getVersions(PackageRef package) async {
if (package.isRoot) {
throw new StateError("Cannot get versions for root package $package.");
}
if (package.isMagic) return [new PackageId.magic(package.name)];
// See if we have it cached.
var versions = _versions[package];
if (versions != null) {
_versionCacheHits++;
return versions;
}
// See if we cached a failure.
var error = _versionErrors[package];
if (error != null) {
_versionCacheHits++;
await new Future.error(error.first, error.last);
}
_versionCacheMisses++;
var source = _sources[package.source];
var pubspecs;
try {
pubspecs = await source.getVersions(package.name, package.description);
} catch (error, stackTrace) {
// If an error occurs, cache that too. We only want to do one request
// for any given package, successful or not.
var chain = new Chain.forTrace(stackTrace);
log.solver("Could not get versions for $package:\n$error\n\n" +
chain.terse.toString());
_versionErrors[package] = new Pair(error, chain);
throw error;
}
// Sort by priority so we try preferred versions first.
pubspecs.sort((pubspec1, pubspec2) {
return _type == SolveType.DOWNGRADE
? Version.antiprioritize(pubspec1.version, pubspec2.version)
: Version.prioritize(pubspec1.version, pubspec2.version);
});
var ids = pubspecs.reversed.map((pubspec) {
var id = package.atVersion(pubspec.version);
// Eagerly cache the pubspec now since we have it.
_pubspecs[id] = pubspec;
return id;
}).toList();
_versions[package] = ids;
return ids;
}
/// Returns the previously cached list of versions for the package identified
/// by [package] or returns `null` if not in the cache.
List<PackageId> getCachedVersions(PackageRef package) => _versions[package];
/// Returns a user-friendly output string describing metrics of the solve.
String describeResults() {
var results = '''- Requested $_versionCacheMisses version lists
- Looked up $_versionCacheHits cached version lists
- Requested $_pubspecCacheMisses pubspecs
- Looked up $_pubspecCacheHits cached pubspecs
''';
// Uncomment this to dump the visited package graph to JSON.
//results += _debugWritePackageGraph();
return results;
}
}
/// A reference from a depending package to a package that it depends on.
class Dependency {
/// The package that has this dependency.
final PackageId depender;
/// The package being depended on.
final PackageDep dep;
Dependency(this.depender, this.dep);
String toString() => '$depender -> $dep';
}
/// An enum for types of version resolution.
class SolveType {
/// As few changes to the lockfile as possible to be consistent with the
/// pubspec.
static const GET = const SolveType._("get");
/// Upgrade all packages or specific packages to the highest versions
/// possible, regardless of the lockfile.
static const UPGRADE = const SolveType._("upgrade");
/// Downgrade all packages or specific packages to the lowest versions
/// possible, regardless of the lockfile.
static const DOWNGRADE = const SolveType._("downgrade");
final String _name;
const SolveType._(this._name);
String toString() => _name;
}
/// Base class for all failures that can occur while trying to resolve versions.
abstract class SolveFailure implements ApplicationException {
/// The name of the package whose version could not be solved.
///
/// Will be `null` if the failure is not specific to one package.
final String package;
/// The known dependencies on [package] at the time of the failure.
///
/// Will be an empty collection if the failure is not specific to one package.
final Iterable<Dependency> dependencies;
String get message => toString();
/// A message describing the specific kind of solve failure.
String get _message {
throw new UnimplementedError("Must override _message or toString().");
}
SolveFailure(this.package, Iterable<Dependency> dependencies)
: dependencies = dependencies != null ? dependencies : <Dependency>[];
String toString() {
if (dependencies.isEmpty) return _message;
var buffer = new StringBuffer();
buffer.write("$_message:");
var sorted = dependencies.toList();
sorted.sort((a, b) => a.depender.name.compareTo(b.depender.name));
for (var dep in sorted) {
buffer.writeln();
buffer.write("- ${log.bold(dep.depender.name)}");
if (!dep.depender.isMagic && !dep.depender.isRoot) {
buffer.write(" ${dep.depender.version}");
}
buffer.write(" ${_describeDependency(dep.dep)}");
}
return buffer.toString();
}
/// Describes a dependency's reference in the output message.
///
/// Override this to highlight which aspect of [dep] led to the failure.
String _describeDependency(PackageDep dep) =>
"depends on version ${dep.constraint}";
}
/// Exception thrown when the current SDK's version does not match a package's
/// constraint on it.
class BadSdkVersionException extends SolveFailure {
final String _message;
BadSdkVersionException(String package, String message)
: super(package, null),
_message = message;
}
/// Exception thrown when the [VersionConstraint] used to match a package is
/// valid (i.e. non-empty), but there are no available versions of the package
/// that fit that constraint.
class NoVersionException extends SolveFailure {
final VersionConstraint constraint;
/// The last selected version of the package that failed to meet the new
/// constraint.
///
/// This will be `null` when the failure occurred because there are no
/// versions of the package *at all* that match the constraint. It will be
/// non-`null` when a version was selected, but then the solver tightened a
/// constraint such that that version was no longer allowed.
final Version version;
NoVersionException(String package, this.version, this.constraint,
Iterable<Dependency> dependencies)
: super(package, dependencies);
String get _message {
if (version == null) {
return "Package $package has no versions that match $constraint derived "
"from";
}
return "Package $package $version does not match $constraint derived from";
}
}
// TODO(rnystrom): Report the list of depending packages and their constraints.
/// Exception thrown when the most recent version of [package] must be selected,
/// but doesn't match the [VersionConstraint] imposed on the package.
class CouldNotUpgradeException extends SolveFailure {
final VersionConstraint constraint;
final Version best;
CouldNotUpgradeException(String package, this.constraint, this.best)
: super(package, null);
String get _message =>
"The latest version of $package, $best, does not match $constraint.";
}
/// Exception thrown when the [VersionConstraint] used to match a package is
/// the empty set: in other words, multiple packages depend on it and have
/// conflicting constraints that have no overlap.
class DisjointConstraintException extends SolveFailure {
DisjointConstraintException(String package, Iterable<Dependency> dependencies)
: super(package, dependencies);
String get _message => "Incompatible version constraints on $package";
}
/// Exception thrown when two packages with the same name but different sources
/// are depended upon.
class SourceMismatchException extends SolveFailure {
String get _message => "Incompatible dependencies on $package";
SourceMismatchException(String package, Iterable<Dependency> dependencies)
: super(package, dependencies);
String _describeDependency(PackageDep dep) =>
"depends on it from source ${dep.source}";
}
/// Exception thrown when a dependency on an unknown source name is found.
class UnknownSourceException extends SolveFailure {
UnknownSourceException(String package, Iterable<Dependency> dependencies)
: super(package, dependencies);
String toString() {
var dep = dependencies.single;
return 'Package ${dep.depender.name} depends on ${dep.dep.name} from '
'unknown source "${dep.dep.source}".';
}
}
/// Exception thrown when two packages with the same name and source but
/// different descriptions are depended upon.
class DescriptionMismatchException extends SolveFailure {
String get _message => "Incompatible dependencies on $package";
DescriptionMismatchException(String package,
Iterable<Dependency> dependencies)
: super(package, dependencies);
String _describeDependency(PackageDep dep) {
// TODO(nweiz): Dump descriptions to YAML when that's supported.
return "depends on it with description ${JSON.encode(dep.description)}";
}
}
/// Exception thrown when a dependency could not be found in its source.
///
/// Unlike [PackageNotFoundException], this includes information about the
/// dependent packages requesting the missing one.
class DependencyNotFoundException extends SolveFailure {
final PackageNotFoundException _innerException;
String get _message => "${_innerException.message}\nDepended on by";
DependencyNotFoundException(String package, this._innerException,
Iterable<Dependency> dependencies)
: super(package, dependencies);
/// The failure isn't because of the version of description of the package,
/// it's the package itself that can't be found, so just show the name and no
/// descriptive details.
String _describeDependency(PackageDep dep) => "";
}

View file

@ -1,187 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import 'package.dart';
import 'pubspec.dart';
import 'system_cache.dart';
/// A source from which to get packages.
///
/// Each source has many packages that it looks up using [PackageId]s. Sources
/// that inherit this directly (currently just [PathSource]) are *uncached*
/// sources. They deliver a package directly to the package that depends on it.
///
/// Other sources are *cached* sources. These extend [CachedSource]. When a
/// package needs a dependency from a cached source, it is first installed in
/// the [SystemCache] and then acquired from there.
abstract class Source {
/// The name of the source.
///
/// Should be lower-case, suitable for use in a filename, and unique accross
/// all sources.
String get name;
/// Whether this source can choose between multiple versions of the same
/// package during version solving.
///
/// Defaults to `false`.
final bool hasMultipleVersions = false;
/// Whether or not this source is the default source.
bool get isDefault => systemCache.sources.defaultSource == this;
/// The system cache with which this source is registered.
SystemCache get systemCache {
assert(_systemCache != null);
return _systemCache;
}
/// The system cache variable.
///
/// Set by [_bind].
SystemCache _systemCache;
/// Records the system cache to which this source belongs.
///
/// This should only be called once for each source, by
/// [SystemCache.register]. It should not be overridden by base classes.
void bind(SystemCache systemCache) {
assert(_systemCache == null);
this._systemCache = systemCache;
}
/// Get the pubspecs of all versions that exist for the package described by
/// [description].
///
/// [name] is the expected name of the package.
///
/// Note that this does *not* require the packages to be downloaded locally,
/// which is the point. This is used during version resolution to determine
/// which package versions are available to be downloaded (or already
/// downloaded).
///
/// By default, this assumes that each description has a single version and
/// uses [describe] to get that version.
Future<List<Pubspec>> getVersions(String name, description) async {
var id = new PackageId(name, this.name, Version.none, description);
return [await describe(id)];
}
/// Loads the (possibly remote) pubspec for the package version identified by
/// [id].
///
/// This may be called for packages that have not yet been downloaded during
/// the version resolution process.
///
/// Sources should not override this. Instead, they implement [doDescribe].
Future<Pubspec> describe(PackageId id) {
if (id.isRoot) throw new ArgumentError("Cannot describe the root package.");
if (id.source != name) {
throw new ArgumentError("Package $id does not use source $name.");
}
// Delegate to the overridden one.
return doDescribe(id);
}
/// Loads the (possibly remote) pubspec for the package version identified by
/// [id].
///
/// This may be called for packages that have not yet been downloaded during
/// the version resolution process.
///
/// This method is effectively protected: subclasses must implement it, but
/// external code should not call this. Instead, call [describe].
Future<Pubspec> doDescribe(PackageId id);
/// Ensures [id] is available locally and creates a symlink at [symlink]
/// pointing it.
Future get(PackageId id, String symlink);
/// Returns the directory where this package can (or could) be found locally.
///
/// If the source is cached, this will be a path in the system cache. In that
/// case, this will return a directory even if the package has not been
/// installed into the cache yet.
Future<String> getDirectory(PackageId id);
/// Gives the source a chance to interpret and validate the description for
/// a package coming from this source.
///
/// When a [Pubspec] or [LockFile] is parsed, it reads in the description for
/// each dependency. It is up to the dependency's [Source] to determine how
/// that should be interpreted. This will be called during parsing to validate
/// that the given [description] is well-formed according to this source, and
/// to give the source a chance to canonicalize the description.
///
/// [containingPath] is the path to the local file (pubspec or lockfile)
/// where this description appears. It may be `null` if the description is
/// coming from some in-memory source (such as pulling down a pubspec from
/// pub.dartlang.org).
///
/// It should return if a (possibly modified) valid description, or throw a
/// [FormatException] if not valid.
///
/// [fromLockFile] is true when the description comes from a [LockFile], to
/// allow the source to use lockfile-specific descriptions via [resolveId].
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false});
/// When a [LockFile] is serialized, it uses this method to get the
/// [description] in the right format.
///
/// [containingPath] is the containing directory of the root package.
dynamic serializeDescription(String containingPath, description) {
return description;
}
/// When a package [description] is shown to the user, this is called to
/// convert it into a human-friendly form.
///
/// By default, it just converts the description to a string, but sources
/// may customize this. [containingPath] is the containing directory of the
/// root package.
String formatDescription(String containingPath, description) {
return description.toString();
}
/// Returns whether or not [description1] describes the same package as
/// [description2] for this source.
///
/// This method should be light-weight. It doesn't need to validate that
/// either package exists.
bool descriptionsEqual(description1, description2);
/// Resolves [id] to a more possibly more precise that will uniquely identify
/// a package regardless of when the package is requested.
///
/// For some sources, [PackageId]s can point to different chunks of code at
/// different times. This takes such an [id] and returns a future that
/// completes to a [PackageId] that will uniquely specify a single chunk of
/// code forever.
///
/// For example, [GitSource] might take an [id] with description
/// `http://github.com/dart-lang/some-lib.git` and return an id with a
/// description that includes the current commit of the Git repository.
///
/// Pub calls this after getting a package, so the source can use the local
/// package to determine information about the resolved id.
///
/// The returned [PackageId] may have a description field that's invalid
/// according to [parseDescription], although it must still be serializable
/// to JSON and YAML. It must also be equal to [id] according to
/// [descriptionsEqual].
///
/// By default, this just returns [id].
Future<PackageId> resolveId(PackageId id) => new Future.value(id);
/// Returns the source's name.
String toString() => name;
}

View file

@ -1,73 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.cached;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../io.dart';
import '../package.dart';
import '../pubspec.dart';
import '../source.dart';
import '../utils.dart';
/// Base class for a [Source] that installs packages into pub's [SystemCache].
///
/// A source should be cached if it requires network access to retrieve
/// packages or the package needs to be "frozen" at the point in time that it's
/// installed. (For example, Git packages are cached because installing from
/// the same repo over time may yield different commits.)
abstract class CachedSource extends Source {
/// The root directory of this source's cache within the system cache.
///
/// This shouldn't be overridden by subclasses.
String get systemCacheRoot => path.join(systemCache.rootDir, name);
/// If [id] is already in the system cache, just loads it from there.
///
/// Otherwise, defers to the subclass.
Future<Pubspec> doDescribe(PackageId id) {
return getDirectory(id).then((packageDir) {
if (fileExists(path.join(packageDir, "pubspec.yaml"))) {
return new Pubspec.load(packageDir, systemCache.sources,
expectedName: id.name);
}
return describeUncached(id);
});
}
/// Loads the (possibly remote) pubspec for the package version identified by
/// [id].
///
/// This will only be called for packages that have not yet been installed in
/// the system cache.
Future<Pubspec> describeUncached(PackageId id);
Future get(PackageId id, String symlink) {
return downloadToSystemCache(id).then((pkg) {
createPackageSymlink(id.name, pkg.dir, symlink);
});
}
/// Determines if the package with [id] is already downloaded to the system
/// cache.
Future<bool> isInSystemCache(PackageId id) =>
getDirectory(id).then(dirExists);
/// Downloads the package identified by [id] to the system cache.
Future<Package> downloadToSystemCache(PackageId id);
/// Returns the [Package]s that have been downloaded to the system cache.
List<Package> getCachedPackages();
/// Reinstalls all packages that have been previously installed into the
/// system cache by this source.
///
/// Returns a [Pair] whose first element is the number of packages
/// successfully repaired and the second is the number of failures.
Future<Pair<int, int>> repairCachedPackages();
}

View file

@ -1,340 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.git;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../git.dart' as git;
import '../io.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../utils.dart';
import 'cached.dart';
/// A package source that gets packages from Git repos.
class GitSource extends CachedSource {
/// Given a valid git package description, returns the URL of the repository
/// it pulls from.
static String urlFromDescription(description) => description["url"];
final name = "git";
/// The paths to the canonical clones of repositories for which "git fetch"
/// has already been run during this run of pub.
final _updatedRepos = new Set<String>();
/// Given a Git repo that contains a pub package, gets the name of the pub
/// package.
Future<String> getPackageNameFromRepo(String repo) {
// Clone the repo to a temp directory.
return withTempDir((tempDir) {
return _clone(repo, tempDir, shallow: true).then((_) {
var pubspec = new Pubspec.load(tempDir, systemCache.sources);
return pubspec.name;
});
});
}
/// Since we don't have an easy way to read from a remote Git repo, this
/// just installs [id] into the system cache, then describes it from there.
Future<Pubspec> describeUncached(PackageId id) {
return downloadToSystemCache(id).then((package) => package.pubspec);
}
/// Clones a Git repo to the local filesystem.
///
/// The Git cache directory is a little idiosyncratic. At the top level, it
/// contains a directory for each commit of each repository, named `<package
/// name>-<commit hash>`. These are the canonical package directories that are
/// linked to from the `packages/` directory.
///
/// In addition, the Git system cache contains a subdirectory named `cache/`
/// which contains a directory for each separate repository URL, named
/// `<package name>-<url hash>`. These are used to check out the repository
/// itself; each of the commit-specific directories are clones of a directory
/// in `cache/`.
Future<Package> downloadToSystemCache(PackageId id) {
var revisionCachePath;
if (!git.isInstalled) {
fail("Cannot get ${id.name} from Git (${_getUrl(id)}).\n"
"Please ensure Git is correctly installed.");
}
ensureDir(path.join(systemCacheRoot, 'cache'));
return _ensureRevision(id).then((_) => getDirectory(id)).then((path) {
revisionCachePath = path;
if (entryExists(revisionCachePath)) return null;
return _clone(_repoCachePath(id), revisionCachePath, mirror: false);
}).then((_) {
var ref = _getEffectiveRef(id);
if (ref == 'HEAD') return null;
return _checkOut(revisionCachePath, ref);
}).then((_) {
return new Package.load(id.name, revisionCachePath, systemCache.sources);
});
}
/// Returns the path to the revision-specific cache of [id].
Future<String> getDirectory(PackageId id) {
return _ensureRevision(id).then((rev) {
var revisionCacheName = '${id.name}-$rev';
return path.join(systemCacheRoot, revisionCacheName);
});
}
/// Ensures [description] is a Git URL.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) {
// TODO(rnystrom): Handle git URLs that are relative file paths (#8570).
// TODO(rnystrom): Now that this function can modify the description, it
// may as well canonicalize it to a map so that other code in the source
// can assume that.
// A single string is assumed to be a Git URL.
if (description is String) return description;
if (description is! Map || !description.containsKey('url')) {
throw new FormatException("The description must be a Git URL or a map "
"with a 'url' key.");
}
var parsed = new Map.from(description);
parsed.remove('url');
parsed.remove('ref');
if (fromLockFile) parsed.remove('resolved-ref');
if (!parsed.isEmpty) {
var plural = parsed.length > 1;
var keys = parsed.keys.join(', ');
throw new FormatException("Invalid key${plural ? 's' : ''}: $keys.");
}
return description;
}
/// If [description] has a resolved ref, print it out in short-form.
///
/// This helps distinguish different git commits with the same pubspec
/// version.
String formatDescription(String containingPath, description) {
if (description is Map && description.containsKey('resolved-ref')) {
return "${description['url']} at "
"${description['resolved-ref'].substring(0, 6)}";
} else {
return super.formatDescription(containingPath, description);
}
}
/// Two Git descriptions are equal if both their URLs and their refs are
/// equal.
bool descriptionsEqual(description1, description2) {
// TODO(nweiz): Do we really want to throw an error if you have two
// dependencies on some repo, one of which specifies a ref and one of which
// doesn't? If not, how do we handle that case in the version solver?
if (_getUrl(description1) != _getUrl(description2)) return false;
if (_getRef(description1) != _getRef(description2)) return false;
if (description1 is Map && description1.containsKey('resolved-ref') &&
description2 is Map && description2.containsKey('resolved-ref')) {
return description1['resolved-ref'] == description2['resolved-ref'];
}
return true;
}
/// Attaches a specific commit to [id] to disambiguate it.
Future<PackageId> resolveId(PackageId id) {
return _ensureRevision(id).then((revision) {
var description = {'url': _getUrl(id), 'ref': _getRef(id)};
description['resolved-ref'] = revision;
return new PackageId(id.name, name, id.version, description);
});
}
List<Package> getCachedPackages() {
// TODO(keertip): Implement getCachedPackages().
throw new UnimplementedError(
"The git source doesn't support listing its cached packages yet.");
}
/// Resets all cached packages back to the pristine state of the Git
/// repository at the revision they are pinned to.
Future<Pair<int, int>> repairCachedPackages() async {
if (!dirExists(systemCacheRoot)) return new Pair(0, 0);
var successes = 0;
var failures = 0;
var packages = listDir(systemCacheRoot)
.where((entry) => dirExists(path.join(entry, ".git")))
.map((packageDir) => new Package.load(null, packageDir,
systemCache.sources))
.toList();
// Note that there may be multiple packages with the same name and version
// (pinned to different commits). The sort order of those is unspecified.
packages.sort(Package.orderByNameAndVersion);
for (var package in packages) {
log.message("Resetting Git repository for "
"${log.bold(package.name)} ${package.version}...");
try {
// Remove all untracked files.
await git.run(["clean", "-d", "--force", "-x"],
workingDir: package.dir);
// Discard all changes to tracked files.
await git.run(["reset", "--hard", "HEAD"], workingDir: package.dir);
successes++;
} on git.GitException catch (error, stackTrace) {
log.error("Failed to reset ${log.bold(package.name)} "
"${package.version}. Error:\n$error");
log.fine(stackTrace);
failures++;
tryDeleteEntry(package.dir);
}
}
return new Pair(successes, failures);
}
/// Ensure that the canonical clone of the repository referred to by [id] (the
/// one in `<system cache>/git/cache`) exists and contains the revision
/// referred to by [id].
///
/// Returns a future that completes to the hash of the revision identified by
/// [id].
Future<String> _ensureRevision(PackageId id) {
return new Future.sync(() {
var path = _repoCachePath(id);
if (!entryExists(path)) {
return _clone(_getUrl(id), path, mirror: true)
.then((_) => _getRev(id));
}
// If [id] didn't come from a lockfile, it may be using a symbolic
// reference. We want to get the latest version of that reference.
var description = id.description;
if (description is! Map || !description.containsKey('resolved-ref')) {
return _updateRepoCache(id).then((_) => _getRev(id));
}
// If [id] did come from a lockfile, then we want to avoid running "git
// fetch" if possible to avoid networking time and errors. See if the
// revision exists in the repo cache before updating it.
return _getRev(id).catchError((error) {
if (error is! git.GitException) throw error;
return _updateRepoCache(id).then((_) => _getRev(id));
});
});
}
/// Runs "git fetch" in the canonical clone of the repository referred to by
/// [id].
///
/// This assumes that the canonical clone already exists.
Future _updateRepoCache(PackageId id) {
var path = _repoCachePath(id);
if (_updatedRepos.contains(path)) return new Future.value();
return git.run(["fetch"], workingDir: path).then((_) {
_updatedRepos.add(path);
});
}
/// Runs "git rev-list" in the canonical clone of the repository referred to
/// by [id] on the effective ref of [id].
///
/// This assumes that the canonical clone already exists.
Future<String> _getRev(PackageId id) {
return git.run(["rev-list", "--max-count=1", _getEffectiveRef(id)],
workingDir: _repoCachePath(id)).then((result) => result.first);
}
/// Clones the repo at the URI [from] to the path [to] on the local
/// filesystem.
///
/// If [mirror] is true, creates a bare, mirrored clone. This doesn't check
/// out the working tree, but instead makes the repository a local mirror of
/// the remote repository. See the manpage for `git clone` for more
/// information.
///
/// If [shallow] is true, creates a shallow clone that contains no history
/// for the repository.
Future _clone(String from, String to, {bool mirror: false,
bool shallow: false}) {
return new Future.sync(() {
// Git on Windows does not seem to automatically create the destination
// directory.
ensureDir(to);
var args = ["clone", from, to];
if (mirror) args.insert(1, "--mirror");
if (shallow) args.insertAll(1, ["--depth", "1"]);
return git.run(args);
}).then((result) => null);
}
/// Checks out the reference [ref] in [repoPath].
Future _checkOut(String repoPath, String ref) {
return git.run(["checkout", ref], workingDir: repoPath).then(
(result) => null);
}
/// Returns the path to the canonical clone of the repository referred to by
/// [id] (the one in `<system cache>/git/cache`).
String _repoCachePath(PackageId id) {
var repoCacheName = '${id.name}-${sha1(_getUrl(id))}';
return path.join(systemCacheRoot, 'cache', repoCacheName);
}
/// Returns the repository URL for [id].
///
/// [description] may be a description or a [PackageId].
String _getUrl(description) {
description = _getDescription(description);
if (description is String) return description;
return description['url'];
}
/// Returns the commit ref that should be checked out for [description].
///
/// This differs from [_getRef] in that it doesn't just return the ref in
/// [description]. It will return a sensible default if that ref doesn't
/// exist, and it will respect the "resolved-ref" parameter set by
/// [resolveId].
///
/// [description] may be a description or a [PackageId].
String _getEffectiveRef(description) {
description = _getDescription(description);
if (description is Map && description.containsKey('resolved-ref')) {
return description['resolved-ref'];
}
var ref = _getRef(description);
return ref == null ? 'HEAD' : ref;
}
/// Returns the commit ref for [description], or null if none is given.
///
/// [description] may be a description or a [PackageId].
String _getRef(description) {
description = _getDescription(description);
if (description is String) return null;
return description['ref'];
}
/// Returns [description] if it's a description, or [PackageId.description] if
/// it's a [PackageId].
_getDescription(description) {
if (description is PackageId) return description.description;
return description;
}
}

View file

@ -1,372 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.hosted;
import 'dart:async';
import 'dart:io' as io;
import "dart:convert";
import 'package:http/http.dart' as http;
import 'package:path/path.dart' as path;
import 'package:pub_semver/pub_semver.dart';
import '../exceptions.dart';
import '../http.dart';
import '../io.dart';
import '../log.dart' as log;
import '../package.dart';
import '../pubspec.dart';
import '../utils.dart';
import 'cached.dart';
/// A package source that gets packages from a package hosting site that uses
/// the same API as pub.dartlang.org.
class HostedSource extends CachedSource {
final name = "hosted";
final hasMultipleVersions = true;
/// Gets the default URL for the package server for hosted dependencies.
static String get defaultUrl {
var url = io.Platform.environment["PUB_HOSTED_URL"];
if (url != null) return url;
return "https://pub.dartlang.org";
}
/// Downloads a list of all versions of a package that are available from the
/// site.
Future<List<Pubspec>> getVersions(String name, description) async {
var url = _makeUrl(description,
(server, package) => "$server/api/packages/$package");
log.io("Get versions from $url.");
var body;
try {
body = await httpClient.read(url, headers: PUB_API_HEADERS);
} catch (error, stackTrace) {
var parsed = _parseDescription(description);
_throwFriendlyError(error, stackTrace, parsed.first, parsed.last);
}
var doc = JSON.decode(body);
return doc['versions'].map((map) {
return new Pubspec.fromMap(
map['pubspec'], systemCache.sources,
expectedName: name, location: url);
}).toList();
}
/// Downloads and parses the pubspec for a specific version of a package that
/// is available from the site.
Future<Pubspec> describeUncached(PackageId id) async {
// Request it from the server.
var url = _makeVersionUrl(id, (server, package, version) =>
"$server/api/packages/$package/versions/$version");
log.io("Describe package at $url.");
var version;
try {
version = JSON.decode(
await httpClient.read(url, headers: PUB_API_HEADERS));
} catch (error, stackTrace) {
var parsed = _parseDescription(id.description);
_throwFriendlyError(error, stackTrace, id.name, parsed.last);
}
return new Pubspec.fromMap(
version['pubspec'], systemCache.sources,
expectedName: id.name, location: url);
}
/// Downloads the package identified by [id] to the system cache.
Future<Package> downloadToSystemCache(PackageId id) {
return isInSystemCache(id).then((inCache) {
// Already cached so don't download it.
if (inCache) return true;
var packageDir = _getDirectory(id);
ensureDir(path.dirname(packageDir));
var parsed = _parseDescription(id.description);
return _download(parsed.last, parsed.first, id.version, packageDir);
}).then((found) {
if (!found) fail('Package $id not found.');
return new Package.load(id.name, _getDirectory(id), systemCache.sources);
});
}
/// The system cache directory for the hosted source contains subdirectories
/// for each separate repository URL that's used on the system.
///
/// Each of these subdirectories then contains a subdirectory for each
/// package downloaded from that site.
Future<String> getDirectory(PackageId id) =>
new Future.value(_getDirectory(id));
String _getDirectory(PackageId id) {
var parsed = _parseDescription(id.description);
var dir = _urlToDirectory(parsed.last);
return path.join(systemCacheRoot, dir, "${parsed.first}-${id.version}");
}
String packageName(description) => _parseDescription(description).first;
bool descriptionsEqual(description1, description2) =>
_parseDescription(description1) == _parseDescription(description2);
/// Ensures that [description] is a valid hosted package description.
///
/// There are two valid formats. A plain string refers to a package with the
/// given name from the default host, while a map with keys "name" and "url"
/// refers to a package with the given name from the host at the given URL.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) {
_parseDescription(description);
return description;
}
/// Re-downloads all packages that have been previously downloaded into the
/// system cache from any server.
Future<Pair<int, int>> repairCachedPackages() async {
if (!dirExists(systemCacheRoot)) return new Pair(0, 0);
var successes = 0;
var failures = 0;
for (var serverDir in listDir(systemCacheRoot)) {
var url = _directoryToUrl(path.basename(serverDir));
var packages = _getCachedPackagesInDirectory(path.basename(serverDir));
packages.sort(Package.orderByNameAndVersion);
for (var package in packages) {
try {
await _download(url, package.name, package.version, package.dir);
successes++;
} catch (error, stackTrace) {
failures++;
var message = "Failed to repair ${log.bold(package.name)} "
"${package.version}";
if (url != defaultUrl) message += " from $url";
log.error("$message. Error:\n$error");
log.fine(stackTrace);
tryDeleteEntry(package.dir);
}
}
}
return new Pair(successes, failures);
}
/// Gets all of the packages that have been downloaded into the system cache
/// from the default server.
List<Package> getCachedPackages() {
return _getCachedPackagesInDirectory(_urlToDirectory(defaultUrl));
}
/// Gets all of the packages that have been downloaded into the system cache
/// into [dir].
List<Package> _getCachedPackagesInDirectory(String dir) {
var cacheDir = path.join(systemCacheRoot, dir);
if (!dirExists(cacheDir)) return [];
return listDir(cacheDir)
.map((entry) => new Package.load(null, entry, systemCache.sources))
.toList();
}
/// Downloads package [package] at [version] from [server], and unpacks it
/// into [destPath].
Future<bool> _download(String server, String package, Version version,
String destPath) {
return new Future.sync(() {
var url = Uri.parse("$server/packages/$package/versions/$version.tar.gz");
log.io("Get package from $url.");
log.message('Downloading ${log.bold(package)} ${version}...');
// Download and extract the archive to a temp directory.
var tempDir = systemCache.createTempDir();
return httpClient.send(new http.Request("GET", url))
.then((response) => response.stream)
.then((stream) {
return timeout(extractTarGz(stream, tempDir), HTTP_TIMEOUT, url,
'downloading $url');
}).then((_) {
// Remove the existing directory if it exists. This will happen if
// we're forcing a download to repair the cache.
if (dirExists(destPath)) deleteEntry(destPath);
// Now that the get has succeeded, move it to the real location in the
// cache. This ensures that we don't leave half-busted ghost
// directories in the user's pub cache if a get fails.
renameDir(tempDir, destPath);
return true;
});
});
}
/// When an error occurs trying to read something about [package] from [url],
/// this tries to translate into a more user friendly error message.
///
/// Always throws an error, either the original one or a better one.
void _throwFriendlyError(error, StackTrace stackTrace, String package,
String url) {
if (error is PubHttpException &&
error.response.statusCode == 404) {
throw new PackageNotFoundException(
"Could not find package $package at $url.", error, stackTrace);
}
if (error is TimeoutException) {
fail("Timed out trying to find package $package at $url.",
error, stackTrace);
}
if (error is io.SocketException) {
fail("Got socket error trying to find package $package at $url.",
error, stackTrace);
}
// Otherwise re-throw the original exception.
throw error;
}
}
/// This is the modified hosted source used when pub get or upgrade are run
/// with "--offline".
///
/// This uses the system cache to get the list of available packages and does
/// no network access.
class OfflineHostedSource extends HostedSource {
/// Gets the list of all versions of [name] that are in the system cache.
Future<List<Pubspec>> getVersions(String name, description) async {
var parsed = _parseDescription(description);
var server = parsed.last;
log.io("Finding versions of $name in "
"$systemCacheRoot/${_urlToDirectory(server)}");
var versions = await _getCachedPackagesInDirectory(_urlToDirectory(server))
.where((package) => package.name == name)
.map((package) => package.pubspec)
.toList();
// If there are no versions in the cache, report a clearer error.
if (versions.isEmpty) fail("Could not find package $name in cache.");
return versions;
}
Future<bool> _download(String server, String package, Version version,
String destPath) {
// Since HostedSource is cached, this will only be called for uncached
// packages.
throw new UnsupportedError("Cannot download packages when offline.");
}
Future<Pubspec> doDescribeUncached(PackageId id) {
// [getVersions()] will only return packages that are already cached.
// [CachedSource] will only call [doDescribeUncached()] on a package after
// it has failed to find it in the cache, so this code should not be
// reached.
throw new UnsupportedError("Cannot describe packages when offline.");
}
}
/// Given a URL, returns a "normalized" string to be used as a directory name
/// for packages downloaded from the server at that URL.
///
/// This normalization strips off the scheme (which is presumed to be HTTP or
/// HTTPS) and *sort of* URL-encodes it. I say "sort of" because it does it
/// incorrectly: it uses the character's *decimal* ASCII value instead of hex.
///
/// This could cause an ambiguity since some characters get encoded as three
/// digits and others two. It's possible for one to be a prefix of the other.
/// In practice, the set of characters that are encoded don't happen to have
/// any collisions, so the encoding is reversible.
///
/// This behavior is a bug, but is being preserved for compatibility.
String _urlToDirectory(String url) {
// Normalize all loopback URLs to "localhost".
url = url.replaceAllMapped(new RegExp(r"^https?://(127\.0\.0\.1|\[::1\])?"),
(match) => match[1] == null ? '' : 'localhost');
return replace(url, new RegExp(r'[<>:"\\/|?*%]'),
(match) => '%${match[0].codeUnitAt(0)}');
}
/// Given a directory name in the system cache, returns the URL of the server
/// whose packages it contains.
///
/// See [_urlToDirectory] for details on the mapping. Note that because the
/// directory name does not preserve the scheme, this has to guess at it. It
/// chooses "http" for loopback URLs (mainly to support the pub tests) and
/// "https" for all others.
String _directoryToUrl(String url) {
// Decode the pseudo-URL-encoded characters.
var chars = '<>:"\\/|?*%';
for (var i = 0; i < chars.length; i++) {
var c = chars.substring(i, i + 1);
url = url.replaceAll("%${c.codeUnitAt(0)}", c);
}
// Figure out the scheme.
var scheme = "https";
// See if it's a loopback IP address.
if (isLoopback(url.replaceAll(new RegExp(":.*"), ""))) scheme = "http";
return "$scheme://$url";
}
/// Parses [description] into its server and package name components, then
/// converts that to a Uri given [pattern].
///
/// Ensures the package name is properly URL encoded.
Uri _makeUrl(description, String pattern(String server, String package)) {
var parsed = _parseDescription(description);
var server = parsed.last;
var package = Uri.encodeComponent(parsed.first);
return Uri.parse(pattern(server, package));
}
/// Parses [id] into its server, package name, and version components, then
/// converts that to a Uri given [pattern].
///
/// Ensures the package name is properly URL encoded.
Uri _makeVersionUrl(PackageId id,
String pattern(String server, String package, String version)) {
var parsed = _parseDescription(id.description);
var server = parsed.last;
var package = Uri.encodeComponent(parsed.first);
var version = Uri.encodeComponent(id.version.toString());
return Uri.parse(pattern(server, package, version));
}
/// Parses the description for a package.
///
/// If the package parses correctly, this returns a (name, url) pair. If not,
/// this throws a descriptive FormatException.
Pair<String, String> _parseDescription(description) {
if (description is String) {
return new Pair<String, String>(description, HostedSource.defaultUrl);
}
if (description is! Map) {
throw new FormatException(
"The description must be a package name or map.");
}
if (!description.containsKey("name")) {
throw new FormatException(
"The description map must contain a 'name' key.");
}
var name = description["name"];
if (name is! String) {
throw new FormatException("The 'name' key must have a string value.");
}
var url = description["url"];
if (url == null) url = HostedSource.defaultUrl;
return new Pair<String, String>(name, url);
}

View file

@ -1,160 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.path;
import 'dart:async';
import 'package:path/path.dart' as p;
import '../exceptions.dart';
import '../io.dart';
import '../package.dart';
import '../pubspec.dart';
import '../source.dart';
import '../utils.dart';
/// A package [Source] that gets packages from a given local file path.
class PathSource extends Source {
/// Returns a valid description for a reference to a package at [path].
static describePath(String path) {
return {
"path": path,
"relative": p.isRelative(path)
};
}
/// Given a valid path reference description, returns the file path it
/// describes.
///
/// This returned path may be relative or absolute and it is up to the caller
/// to know how to interpret a relative path.
static String pathFromDescription(description) => description["path"];
final name = 'path';
Future<Pubspec> doDescribe(PackageId id) {
return new Future.sync(() {
var dir = _validatePath(id.name, id.description);
return new Pubspec.load(dir, systemCache.sources,
expectedName: id.name);
});
}
bool descriptionsEqual(description1, description2) {
// Compare real paths after normalizing and resolving symlinks.
var path1 = canonicalize(description1["path"]);
var path2 = canonicalize(description2["path"]);
return path1 == path2;
}
Future get(PackageId id, String symlink) {
return new Future.sync(() {
var dir = _validatePath(id.name, id.description);
createPackageSymlink(id.name, dir, symlink,
relative: id.description["relative"]);
});
}
Future<String> getDirectory(PackageId id) =>
newFuture(() => _validatePath(id.name, id.description));
/// Parses a path dependency.
///
/// This takes in a path string and returns a map. The "path" key will be the
/// original path but resolved relative to the containing path. The
/// "relative" key will be `true` if the original path was relative.
///
/// A path coming from a pubspec is a simple string. From a lock file, it's
/// an expanded {"path": ..., "relative": ...} map.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) {
if (fromLockFile) {
if (description is! Map) {
throw new FormatException("The description must be a map.");
}
if (description["path"] is! String) {
throw new FormatException("The 'path' field of the description must "
"be a string.");
}
if (description["relative"] is! bool) {
throw new FormatException("The 'relative' field of the description "
"must be a boolean.");
}
return description;
}
if (description is! String) {
throw new FormatException("The description must be a path string.");
}
// Resolve the path relative to the containing file path, and remember
// whether the original path was relative or absolute.
var isRelative = p.isRelative(description);
if (isRelative) {
// Relative paths coming from pubspecs that are not on the local file
// system aren't allowed. This can happen if a hosted or git dependency
// has a path dependency.
if (containingPath == null) {
throw new FormatException('"$description" is a relative path, but this '
'isn\'t a local pubspec.');
}
description = p.normalize(
p.join(p.dirname(containingPath), description));
}
return {
"path": description,
"relative": isRelative
};
}
/// Serializes path dependency's [description].
///
/// For the descriptions where `relative` attribute is `true`, tries to make
/// `path` relative to the specified [containingPath].
dynamic serializeDescription(String containingPath, description) {
if (description["relative"]) {
return {
"path": p.relative(description['path'], from: containingPath),
"relative": true
};
}
return description;
}
/// Converts a parsed relative path to its original relative form.
String formatDescription(String containingPath, description) {
var sourcePath = description["path"];
if (description["relative"]) {
sourcePath = p.relative(description['path'], from: containingPath);
}
return sourcePath;
}
/// Ensures that [description] is a valid path description and returns a
/// normalized path to the package.
///
/// It must be a map, with a "path" key containing a path that points to an
/// existing directory. Throws an [ApplicationException] if the path is
/// invalid.
String _validatePath(String name, description) {
var dir = description["path"];
if (dirExists(dir)) return dir;
if (fileExists(dir)) {
fail('Path dependency for package $name must refer to a directory, '
'not a file. Was "$dir".');
}
throw new PackageNotFoundException(
'Could not find package $name at "$dir".');
}
}

View file

@ -1,47 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source.unknown;
import 'dart:async';
import '../package.dart';
import '../pubspec.dart';
import '../source.dart';
/// A [Null Object] that represents a source not recognized by pub.
///
/// It provides some default behavior so that pub can work with sources it
/// doesn't recognize.
///
/// [null object]: http://en.wikipedia.org/wiki/Null_Object_pattern
class UnknownSource extends Source {
final String name;
UnknownSource(this.name);
/// Two unknown sources are the same if their names are the same.
bool operator==(other) =>
other is UnknownSource &&
other.name == name;
int get hashCode => name.hashCode;
Future<Pubspec> doDescribe(PackageId id) => throw new UnsupportedError(
"Cannot describe a package from unknown source '$name'.");
Future get(PackageId id, String symlink) => throw new UnsupportedError(
"Cannot get an unknown source '$name'.");
/// Returns the directory where this package can be found locally.
Future<String> getDirectory(PackageId id) => throw new UnsupportedError(
"Cannot find a package from an unknown source '$name'.");
bool descriptionsEqual(description1, description2) =>
description1 == description2;
/// Unknown sources do no validation.
dynamic parseDescription(String containingPath, description,
{bool fromLockFile: false}) => description;
}

View file

@ -1,81 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.source_registry;
import 'dart:collection';
import 'package.dart';
import 'source.dart';
import 'source/unknown.dart';
/// A class that keeps track of [Source]s used for getting packages.
class SourceRegistry extends IterableBase<Source> {
final _sources = new Map<String, Source>();
Source _default;
/// Returns the default source, which is used when no source is specified.
Source get defaultSource => _default;
/// Iterates over the registered sources in name order.
Iterator<Source> get iterator {
var sources = _sources.values.toList();
sources.sort((a, b) => a.name.compareTo(b.name));
return sources.iterator;
}
/// Returns whether [id1] and [id2] refer to the same package, including
/// validating that their descriptions are equivalent.
bool idsEqual(PackageId id1, PackageId id2) {
if (id1 != id2) return false;
if (id1 == null && id2 == null) return true;
return idDescriptionsEqual(id1, id2);
}
/// Returns whether [id1] and [id2] have the same source and description.
///
/// This doesn't check whether the name or versions are equal.
bool idDescriptionsEqual(PackageId id1, PackageId id2) {
if (id1.source != id2.source) return false;
return this[id1.source].descriptionsEqual(id1.description, id2.description);
}
/// Sets the default source.
///
/// This takes a string, which must be the name of a registered source.
void setDefault(String name) {
if (!_sources.containsKey(name)) {
throw new StateError('Default source $name is not in the registry');
}
_default = _sources[name];
}
/// Registers a new source.
///
/// This source may not have the same name as a source that's already been
/// registered.
void register(Source source) {
if (_sources.containsKey(source.name)) {
throw new StateError('Source registry already has a source named '
'${source.name}');
}
_sources[source.name] = source;
}
/// Returns the source named [name].
///
/// Returns an [UnknownSource] if no source with that name has been
/// registered. If [name] is null, returns the default source.
Source operator[](String name) {
if (name == null) {
if (defaultSource != null) return defaultSource;
throw new StateError('No default source has been registered');
}
if (_sources.containsKey(name)) return _sources[name];
return new UnknownSource(name);
}
}

View file

@ -1,108 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.system_cache;
import 'dart:async';
import 'dart:io';
import 'package:path/path.dart' as path;
import 'io.dart';
import 'io.dart' as io show createTempDir;
import 'log.dart' as log;
import 'package.dart';
import 'source/cached.dart';
import 'source/git.dart';
import 'source/hosted.dart';
import 'source/path.dart';
import 'source.dart';
import 'source_registry.dart';
/// The system-wide cache of downloaded packages.
///
/// This cache contains all packages that are downloaded from the internet.
/// Packages that are available locally (e.g. path dependencies) don't use this
/// cache.
class SystemCache {
/// The root directory where this package cache is located.
final String rootDir;
String get tempDir => path.join(rootDir, '_temp');
/// The sources from which to get packages.
final sources = new SourceRegistry();
static String defaultDir = (() {
if (Platform.environment.containsKey('PUB_CACHE')) {
return Platform.environment['PUB_CACHE'];
} else if (Platform.operatingSystem == 'windows') {
var appData = Platform.environment['APPDATA'];
return path.join(appData, 'Pub', 'Cache');
} else {
return '${Platform.environment['HOME']}/.pub-cache';
}
})();
/// Creates a new package cache which is backed by the given directory on the
/// user's file system.
SystemCache([String rootDir])
: rootDir = rootDir == null ? SystemCache.defaultDir : rootDir;
/// Creates a system cache and registers the standard set of sources.
///
/// If [isOffline] is `true`, then the offline hosted source will be used.
/// Defaults to `false`.
factory SystemCache.withSources({String rootDir, bool isOffline: false}) {
var cache = new SystemCache(rootDir);
cache.register(new GitSource());
if (isOffline) {
cache.register(new OfflineHostedSource());
} else {
cache.register(new HostedSource());
}
cache.register(new PathSource());
cache.sources.setDefault('hosted');
return cache;
}
/// Registers a new source.
///
/// This source must not have the same name as a source that's already been
/// registered.
void register(Source source) {
source.bind(this);
sources.register(source);
}
/// Determines if the system cache contains the package identified by [id].
Future<bool> contains(PackageId id) {
var source = sources[id.source];
if (source is! CachedSource) {
throw new ArgumentError("Package $id is not cacheable.");
}
return source.isInSystemCache(id);
}
/// Create a new temporary directory within the system cache.
///
/// The system cache maintains its own temporary directory that it uses to
/// stage packages into while downloading. It uses this instead of the OS's
/// system temp directory to ensure that it's on the same volume as the pub
/// system cache so that it can move the directory from it.
String createTempDir() {
var temp = ensureDir(tempDir);
return io.createTempDir(temp, 'dir');
}
/// Deletes the system cache's internal temp directory.
void deleteTempDir() {
log.fine('Clean up system cache temp directory $tempDir.');
if (dirExists(tempDir)) deleteEntry(tempDir);
}
}

View file

@ -1,73 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.transcript;
import 'dart:collection';
/// A rolling transcript of entries of type [T].
///
/// It has a maximum number of entries. If entries are added that exceed that
/// it discards entries from the *middle* of the transcript. Generally, in logs,
/// the first and last entries are the most important, so it maintains those.
class Transcript<T> {
/// The maximum number of transcript entries.
final int max;
/// The number of entries that were discarded after reaching [max].
int get discarded => _discarded;
int _discarded = 0;
/// The earliest half of the entries.
///
/// This will be empty until the maximum number of entries is hit at which
/// point the oldest half of the entries will be moved from [_newest] to
/// here.
final _oldest = new List<T>();
/// The most recent half of the entries.
final _newest = new Queue<T>();
/// Creates a new [Transcript] that can hold up to [max] entries.
Transcript(this.max);
/// Adds [entry] to the transcript.
///
/// If the transcript already has the maximum number of entries, discards one
/// from the middle.
void add(T entry) {
if (discarded > 0) {
// We're already in "rolling" mode.
_newest.removeFirst();
_discarded++;
} else if (_newest.length == max) {
// We are crossing the threshold where we have to discard items. Copy
// the first half over to the oldest list.
while (_newest.length > max ~/ 2) {
_oldest.add(_newest.removeFirst());
}
// Discard the middle item.
_newest.removeFirst();
_discarded++;
}
_newest.add(entry);
}
/// Traverses the entries in the transcript from oldest to newest.
///
/// Invokes [onEntry] for each item. When it reaches the point in the middle
/// where excess entries where dropped, invokes [onGap] with the number of
/// dropped entries. If no more than [max] entries were added, does not
/// invoke [onGap].
void forEach(void onEntry(T entry), [void onGap(int)]) {
if (_oldest.isNotEmpty) {
_oldest.forEach(onEntry);
if (onGap != null) onGap(discarded);
}
_newest.forEach(onEntry);
}
}

View file

@ -1,942 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Generic utility functions. Stuff that should possibly be in core.
library pub.utils;
import 'dart:async';
import "dart:convert";
import 'dart:io';
// This is used by [libraryPath]. It must be kept up-to-date with all libraries
// whose paths are looked up using that function.
@MirrorsUsed(targets: const ['pub.io', 'test_pub'])
import 'dart:mirrors';
import "package:crypto/crypto.dart";
import 'package:path/path.dart' as path;
import "package:stack_trace/stack_trace.dart";
import 'exceptions.dart';
import 'log.dart' as log;
export '../../asset/dart/utils.dart';
/// A regular expression matching a Dart identifier.
///
/// This also matches a package name, since they must be Dart identifiers.
final identifierRegExp = new RegExp(r"[a-zA-Z_][a-zA-Z0-9_]+");
/// Like [identifierRegExp], but anchored so that it only matches strings that
/// are *just* Dart identifiers.
final onlyIdentifierRegExp = new RegExp("^${identifierRegExp.pattern}\$");
/// A pair of values.
class Pair<E, F> {
E first;
F last;
Pair(this.first, this.last);
String toString() => '($first, $last)';
bool operator==(other) {
if (other is! Pair) return false;
return other.first == first && other.last == last;
}
int get hashCode => first.hashCode ^ last.hashCode;
}
/// A completer that waits until all added [Future]s complete.
// TODO(rnystrom): Copied from web_components. Remove from here when it gets
// added to dart:core. (See #6626.)
class FutureGroup<T> {
int _pending = 0;
Completer<List<T>> _completer = new Completer<List<T>>();
final List<Future<T>> futures = <Future<T>>[];
bool completed = false;
final List<T> _values = <T>[];
/// Wait for [task] to complete.
Future<T> add(Future<T> task) {
if (completed) {
throw new StateError("The FutureGroup has already completed.");
}
_pending++;
futures.add(task.then((value) {
if (completed) return;
_pending--;
_values.add(value);
if (_pending <= 0) {
completed = true;
_completer.complete(_values);
}
}).catchError((e, stackTrace) {
if (completed) return;
completed = true;
_completer.completeError(e, stackTrace);
}));
return task;
}
Future<List> get future => _completer.future;
}
/// Like [new Future], but avoids around issue 11911 by using [new Future.value]
/// under the covers.
Future newFuture(callback()) => new Future.value().then((_) => callback());
/// Runs [callback] in an error zone and pipes any unhandled error to the
/// returned [Future].
///
/// If the returned [Future] produces an error, its stack trace will always be a
/// [Chain]. By default, this chain will contain only the local stack trace, but
/// if [captureStackChains] is passed, it will contain the full stack chain for
/// the error.
Future captureErrors(Future callback(), {bool captureStackChains: false}) {
var completer = new Completer();
var wrappedCallback = () {
new Future.sync(callback).then(completer.complete)
.catchError((e, stackTrace) {
// [stackTrace] can be null if we're running without [captureStackChains],
// since dart:io will often throw errors without stack traces.
if (stackTrace != null) {
stackTrace = new Chain.forTrace(stackTrace);
} else {
stackTrace = new Chain([]);
}
if (!completer.isCompleted) completer.completeError(e, stackTrace);
});
};
if (captureStackChains) {
Chain.capture(wrappedCallback, onError: (error, stackTrace) {
if (!completer.isCompleted) completer.completeError(error, stackTrace);
});
} else {
runZoned(wrappedCallback, onError: (e, stackTrace) {
if (stackTrace == null) {
stackTrace = new Chain.current();
} else {
stackTrace = new Chain([new Trace.from(stackTrace)]);
}
if (!completer.isCompleted) completer.completeError(e, stackTrace);
});
}
return completer.future;
}
/// Like [Future.wait], but prints all errors from the futures as they occur and
/// only returns once all Futures have completed, successfully or not.
///
/// This will wrap the first error thrown in a [SilentException] and rethrow it.
Future waitAndPrintErrors(Iterable<Future> futures) {
return Future.wait(futures.map((future) {
return future.catchError((error, stackTrace) {
log.exception(error, stackTrace);
throw error;
});
})).catchError((error, stackTrace) {
throw new SilentException(error, stackTrace);
});
}
/// Returns a [StreamTransformer] that will call [onDone] when the stream
/// completes.
///
/// The stream will be passed through unchanged.
StreamTransformer onDoneTransformer(void onDone()) {
return new StreamTransformer.fromHandlers(handleDone: (sink) {
onDone();
sink.close();
});
}
// TODO(rnystrom): Move into String?
/// Pads [source] to [length] by adding spaces at the end.
String padRight(String source, int length) {
final result = new StringBuffer();
result.write(source);
while (result.length < length) {
result.write(' ');
}
return result.toString();
}
/// Pads [source] to [length] by adding [char]s at the beginning.
///
/// If [char] is `null`, it defaults to a space.
String padLeft(String source, int length, [String char]) {
if (char == null) char = ' ';
if (source.length >= length) return source;
return char * (length - source.length) + source;
}
/// Returns a labelled sentence fragment starting with [name] listing the
/// elements [iter].
///
/// If [iter] does not have one item, name will be pluralized by adding "s" or
/// using [plural], if given.
String namedSequence(String name, Iterable iter, [String plural]) {
if (iter.length == 1) return "$name ${iter.single}";
if (plural == null) plural = "${name}s";
return "$plural ${toSentence(iter)}";
}
/// Returns a sentence fragment listing the elements of [iter].
///
/// This converts each element of [iter] to a string and separates them with
/// commas and/or "and" where appropriate.
String toSentence(Iterable iter) {
if (iter.length == 1) return iter.first.toString();
return iter.take(iter.length - 1).join(", ") + " and ${iter.last}";
}
/// Returns [name] if [number] is 1, or the plural of [name] otherwise.
///
/// By default, this just adds "s" to the end of [name] to get the plural. If
/// [plural] is passed, that's used instead.
String pluralize(String name, int number, {String plural}) {
if (number == 1) return name;
if (plural != null) return plural;
return '${name}s';
}
/// Escapes any regex metacharacters in [string] so that using as a [RegExp]
/// pattern will match the string literally.
// TODO(rnystrom): Remove when #4706 is fixed.
String quoteRegExp(String string) {
// Note: make sure "\" is done first so that we don't escape the other
// escaped characters. We could do all of the replaces at once with a regexp
// but string literal for regex that matches all regex metacharacters would
// be a bit hard to read.
for (var metacharacter in r"\^$.*+?()[]{}|".split("")) {
string = string.replaceAll(metacharacter, "\\$metacharacter");
}
return string;
}
/// Creates a URL string for [address]:[port].
///
/// Handles properly formatting IPv6 addresses.
Uri baseUrlForAddress(InternetAddress address, int port) {
if (address.isLoopback) {
return new Uri(scheme: "http", host: "localhost", port: port);
}
// IPv6 addresses in URLs need to be enclosed in square brackets to avoid
// URL ambiguity with the ":" in the address.
if (address.type == InternetAddressType.IP_V6) {
return new Uri(scheme: "http", host: "[${address.address}]", port: port);
}
return new Uri(scheme: "http", host: address.address, port: port);
}
/// Returns whether [host] is a host for a localhost or loopback URL.
///
/// Unlike [InternetAddress.isLoopback], this hostnames from URLs as well as
/// from [InternetAddress]es, including "localhost".
bool isLoopback(String host) {
if (host == 'localhost') return true;
// IPv6 hosts in URLs are surrounded by square brackets.
if (host.startsWith("[") && host.endsWith("]")) {
host = host.substring(1, host.length - 1);
}
try {
return new InternetAddress(host).isLoopback;
} on ArgumentError catch (_) {
// The host isn't an IP address and isn't "localhost', so it's almost
// certainly not a loopback host.
return false;
}
}
/// Flattens nested lists inside an iterable into a single list containing only
/// non-list elements.
List flatten(Iterable nested) {
var result = [];
helper(list) {
for (var element in list) {
if (element is List) {
helper(element);
} else {
result.add(element);
}
}
}
helper(nested);
return result;
}
/// Returns a set containing all elements in [minuend] that are not in
/// [subtrahend].
Set setMinus(Iterable minuend, Iterable subtrahend) {
var minuendSet = new Set.from(minuend);
minuendSet.removeAll(subtrahend);
return minuendSet;
}
/// Returns whether there's any overlap between [set1] and [set2].
bool overlaps(Set set1, Set set2) {
// Iterate through the smaller set.
var smaller = set1.length > set2.length ? set1 : set2;
var larger = smaller == set1 ? set2 : set1;
return smaller.any(larger.contains);
}
/// Returns a list containing the sorted elements of [iter].
List ordered(Iterable<Comparable> iter) {
var list = iter.toList();
list.sort();
return list;
}
/// Returns the element of [iter] for which [f] returns the minimum value.
minBy(Iterable iter, Comparable f(element)) {
var min = null;
var minComparable = null;
for (var element in iter) {
var comparable = f(element);
if (minComparable == null ||
comparable.compareTo(minComparable) < 0) {
min = element;
minComparable = comparable;
}
}
return min;
}
/// Returns every pair of consecutive elements in [iter].
///
/// For example, if [iter] is `[1, 2, 3, 4]`, this will return `[(1, 2), (2, 3),
/// (3, 4)]`.
Iterable<Pair> pairs(Iterable iter) {
var previous = iter.first;
return iter.skip(1).map((element) {
var oldPrevious = previous;
previous = element;
return new Pair(oldPrevious, element);
});
}
/// Creates a new map from [map] with new keys and values.
///
/// The return values of [key] are used as the keys and the return values of
/// [value] are used as the values for the new map.
///
/// [key] defaults to returning the original key and [value] defaults to
/// returning the original value.
Map mapMap(Map map, {key(key, value), value(key, value)}) {
if (key == null) key = (key, _) => key;
if (value == null) value = (_, value) => value;
var result = {};
map.forEach((mapKey, mapValue) {
result[key(mapKey, mapValue)] = value(mapKey, mapValue);
});
return result;
}
/// Like [Map.fromIterable], but [key] and [value] may return [Future]s.
Future<Map> mapFromIterableAsync(Iterable iter, {key(element),
value(element)}) {
if (key == null) key = (element) => element;
if (value == null) value = (element) => element;
var map = new Map();
return Future.wait(iter.map((element) {
return Future.wait([
new Future.sync(() => key(element)),
new Future.sync(() => value(element))
]).then((results) {
map[results[0]] = results[1];
});
})).then((_) => map);
}
/// Returns a new map with all entries in both [map1] and [map2].
///
/// If there are overlapping keys, [map2]'s value wins.
Map mergeMaps(Map map1, Map map2) {
var result = {};
result.addAll(map1);
result.addAll(map2);
return result;
}
/// Returns the transitive closure of [graph].
///
/// This assumes [graph] represents a graph with a vertex for each key and an
/// edge betweek each key and the values for that key.
Map<dynamic, Set> transitiveClosure(Map<dynamic, Iterable> graph) {
// This uses the Floyd-Warshall algorithm
// (https://en.wikipedia.org/wiki/Floyd%E2%80%93Warshall_algorithm).
var result = {};
graph.forEach((vertex, edges) {
result[vertex] = new Set.from(edges)..add(vertex);
});
for (var vertex1 in graph.keys) {
for (var vertex2 in graph.keys) {
for (var vertex3 in graph.keys) {
if (result[vertex2].contains(vertex1) &&
result[vertex1].contains(vertex3)) {
result[vertex2].add(vertex3);
}
}
}
}
return result;
}
/// Given a list of filenames, returns a set of patterns that can be used to
/// filter for those filenames.
///
/// For a given path, that path ends with some string in the returned set if
/// and only if that path's basename is in [files].
Set<String> createFileFilter(Iterable<String> files) {
return files.expand((file) {
var result = ["/$file"];
if (Platform.operatingSystem == 'windows') result.add("\\$file");
return result;
}).toSet();
}
/// Given a blacklist of directory names, returns a set of patterns that can
/// be used to filter for those directory names.
///
/// For a given path, that path contains some string in the returned set if
/// and only if one of that path's components is in [dirs].
Set<String> createDirectoryFilter(Iterable<String> dirs) {
return dirs.expand((dir) {
var result = ["/$dir/"];
if (Platform.operatingSystem == 'windows') {
result..add("/$dir\\")..add("\\$dir/")..add("\\$dir\\");
}
return result;
}).toSet();
}
/// Returns the maximum value in [iter] by [compare].
///
/// [compare] defaults to [Comparable.compare].
maxAll(Iterable iter, [int compare(element1, element2)]) {
if (compare == null) compare = Comparable.compare;
return iter.reduce((max, element) =>
compare(element, max) > 0 ? element : max);
}
/// Returns the minimum value in [iter] by [compare].
///
/// [compare] defaults to [Comparable.compare].
minAll(Iterable iter, [int compare(element1, element2)]) {
if (compare == null) compare = Comparable.compare;
return iter.reduce((max, element) =>
compare(element, max) < 0 ? element : max);
}
/// Replace each instance of [matcher] in [source] with the return value of
/// [fn].
String replace(String source, Pattern matcher, String fn(Match)) {
var buffer = new StringBuffer();
var start = 0;
for (var match in matcher.allMatches(source)) {
buffer.write(source.substring(start, match.start));
start = match.end;
buffer.write(fn(match));
}
buffer.write(source.substring(start));
return buffer.toString();
}
/// Returns whether or not [str] ends with [matcher].
bool endsWithPattern(String str, Pattern matcher) {
for (var match in matcher.allMatches(str)) {
if (match.end == str.length) return true;
}
return false;
}
/// Returns the hex-encoded sha1 hash of [source].
String sha1(String source) {
var sha = new SHA1();
sha.add(source.codeUnits);
return CryptoUtils.bytesToHex(sha.close());
}
/// Configures [future] so that its result (success or exception) is passed on
/// to [completer].
void chainToCompleter(Future future, Completer completer) {
future.then(completer.complete, onError: completer.completeError);
}
/// Ensures that [stream] can emit at least one value successfully (or close
/// without any values).
///
/// For example, reading asynchronously from a non-existent file will return a
/// stream that fails on the first chunk. In order to handle that more
/// gracefully, you may want to check that the stream looks like it's working
/// before you pipe the stream to something else.
///
/// This lets you do that. It returns a [Future] that completes to a [Stream]
/// emitting the same values and errors as [stream], but only if at least one
/// value can be read successfully. If an error occurs before any values are
/// emitted, the returned Future completes to that error.
Future<Stream> validateStream(Stream stream) {
var completer = new Completer<Stream>();
var controller = new StreamController(sync: true);
StreamSubscription subscription;
subscription = stream.listen((value) {
// We got a value, so the stream is valid.
if (!completer.isCompleted) completer.complete(controller.stream);
controller.add(value);
}, onError: (error, [stackTrace]) {
// If the error came after values, it's OK.
if (completer.isCompleted) {
controller.addError(error, stackTrace);
return;
}
// Otherwise, the error came first and the stream is invalid.
completer.completeError(error, stackTrace);
// We don't be returning the stream at all in this case, so unsubscribe
// and swallow the error.
subscription.cancel();
}, onDone: () {
// It closed with no errors, so the stream is valid.
if (!completer.isCompleted) completer.complete(controller.stream);
controller.close();
});
return completer.future;
}
// TODO(nweiz): remove this when issue 7964 is fixed.
/// Returns a [Future] that will complete to the first element of [stream].
///
/// Unlike [Stream.first], this is safe to use with single-subscription streams.
Future streamFirst(Stream stream) {
var completer = new Completer();
var subscription;
subscription = stream.listen((value) {
subscription.cancel();
completer.complete(value);
}, onError: (e, [stackTrace]) {
completer.completeError(e, stackTrace);
}, onDone: () {
completer.completeError(new StateError("No elements"), new Chain.current());
}, cancelOnError: true);
return completer.future;
}
/// Returns a wrapped version of [stream] along with a [StreamSubscription] that
/// can be used to control the wrapped stream.
Pair<Stream, StreamSubscription> streamWithSubscription(Stream stream) {
var controller =
stream.isBroadcast ? new StreamController.broadcast(sync: true)
: new StreamController(sync: true);
var subscription = stream.listen(controller.add,
onError: controller.addError,
onDone: controller.close);
return new Pair<Stream, StreamSubscription>(controller.stream, subscription);
}
// TODO(nweiz): remove this when issue 7787 is fixed.
/// Creates two single-subscription [Stream]s that each emit all values and
/// errors from [stream].
///
/// This is useful if [stream] is single-subscription but multiple subscribers
/// are necessary.
Pair<Stream, Stream> tee(Stream stream) {
var controller1 = new StreamController(sync: true);
var controller2 = new StreamController(sync: true);
stream.listen((value) {
controller1.add(value);
controller2.add(value);
}, onError: (error, [stackTrace]) {
controller1.addError(error, stackTrace);
controller2.addError(error, stackTrace);
}, onDone: () {
controller1.close();
controller2.close();
});
return new Pair<Stream, Stream>(controller1.stream, controller2.stream);
}
/// Merges [stream1] and [stream2] into a single stream that emits events from
/// both sources.
Stream mergeStreams(Stream stream1, Stream stream2) {
var doneCount = 0;
var controller = new StreamController(sync: true);
for (var stream in [stream1, stream2]) {
stream.listen(
controller.add,
onError: controller.addError,
onDone: () {
doneCount++;
if (doneCount == 2) controller.close();
});
}
return controller.stream;
}
/// A regular expression matching a trailing CR character.
final _trailingCR = new RegExp(r"\r$");
// TODO(nweiz): Use `text.split(new RegExp("\r\n?|\n\r?"))` when issue 9360 is
// fixed.
/// Splits [text] on its line breaks in a Windows-line-break-friendly way.
List<String> splitLines(String text) =>
text.split("\n").map((line) => line.replaceFirst(_trailingCR, "")).toList();
/// Converts a stream of arbitrarily chunked strings into a line-by-line stream.
///
/// The lines don't include line termination characters. A single trailing
/// newline is ignored.
Stream<String> streamToLines(Stream<String> stream) {
var buffer = new StringBuffer();
return stream.transform(new StreamTransformer.fromHandlers(
handleData: (chunk, sink) {
var lines = splitLines(chunk);
var leftover = lines.removeLast();
for (var line in lines) {
if (!buffer.isEmpty) {
buffer.write(line);
line = buffer.toString();
buffer = new StringBuffer();
}
sink.add(line);
}
buffer.write(leftover);
},
handleDone: (sink) {
if (!buffer.isEmpty) sink.add(buffer.toString());
sink.close();
}));
}
/// Like [Iterable.where], but allows [test] to return [Future]s and uses the
/// results of those [Future]s as the test.
Future<Iterable> futureWhere(Iterable iter, test(value)) {
return Future.wait(iter.map((e) {
var result = test(e);
if (result is! Future) result = new Future.value(result);
return result.then((result) => new Pair(e, result));
}))
.then((pairs) => pairs.where((pair) => pair.last))
.then((pairs) => pairs.map((pair) => pair.first));
}
// TODO(nweiz): unify the following functions with the utility functions in
// pkg/http.
/// Like [String.split], but only splits on the first occurrence of the pattern.
///
/// This always returns an array of two elements or fewer.
List<String> split1(String toSplit, String pattern) {
if (toSplit.isEmpty) return <String>[];
var index = toSplit.indexOf(pattern);
if (index == -1) return [toSplit];
return [toSplit.substring(0, index),
toSplit.substring(index + pattern.length)];
}
/// Adds additional query parameters to [url], overwriting the original
/// parameters if a name conflict occurs.
Uri addQueryParameters(Uri url, Map<String, String> parameters) {
var queryMap = queryToMap(url.query);
queryMap.addAll(parameters);
return url.resolve("?${mapToQuery(queryMap)}");
}
/// Convert a URL query string (or `application/x-www-form-urlencoded` body)
/// into a [Map] from parameter names to values.
Map<String, String> queryToMap(String queryList) {
var map = {};
for (var pair in queryList.split("&")) {
var split = split1(pair, "=");
if (split.isEmpty) continue;
var key = urlDecode(split[0]);
var value = split.length > 1 ? urlDecode(split[1]) : "";
map[key] = value;
}
return map;
}
/// Convert a [Map] from parameter names to values to a URL query string.
String mapToQuery(Map<String, String> map) {
var pairs = <List<String>>[];
map.forEach((key, value) {
key = Uri.encodeQueryComponent(key);
value = (value == null || value.isEmpty)
? null : Uri.encodeQueryComponent(value);
pairs.add([key, value]);
});
return pairs.map((pair) {
if (pair[1] == null) return pair[0];
return "${pair[0]}=${pair[1]}";
}).join("&");
}
/// Returns the union of all elements in each set in [sets].
Set unionAll(Iterable<Set> sets) =>
sets.fold(new Set(), (union, set) => union.union(set));
// TODO(nweiz): remove this when issue 9068 has been fixed.
/// Whether [uri1] and [uri2] are equal.
///
/// This consider HTTP URIs to default to port 80, and HTTPs URIs to default to
/// port 443.
bool urisEqual(Uri uri1, Uri uri2) =>
canonicalizeUri(uri1) == canonicalizeUri(uri2);
/// Return [uri] with redundant port information removed.
Uri canonicalizeUri(Uri uri) {
return uri;
}
/// Returns a human-friendly representation of [inputPath].
///
/// If [inputPath] isn't too distant from the current working directory, this
/// will return the relative path to it. Otherwise, it will return the absolute
/// path.
String nicePath(String inputPath) {
var relative = path.relative(inputPath);
var split = path.split(relative);
if (split.length > 1 && split[0] == '..' && split[1] == '..') {
return path.absolute(inputPath);
}
return relative;
}
/// Returns a human-friendly representation of [duration].
String niceDuration(Duration duration) {
var result = duration.inMinutes > 0 ? "${duration.inMinutes}:" : "";
var s = duration.inSeconds % 59;
var ms = duration.inMilliseconds % 1000;
// If we're using verbose logging, be more verbose but more accurate when
// reporting timing information.
if (log.verbosity.isLevelVisible(log.Level.FINE)) {
ms = padLeft(ms.toString(), 3, '0');
} else {
ms ~/= 100;
}
return "$result$s.${ms}s";
}
/// Decodes a URL-encoded string.
///
/// Unlike [Uri.decodeComponent], this includes replacing `+` with ` `.
String urlDecode(String encoded) =>
Uri.decodeComponent(encoded.replaceAll("+", " "));
/// Takes a simple data structure (composed of [Map]s, [Iterable]s, scalar
/// objects, and [Future]s) and recursively resolves all the [Future]s contained
/// within.
///
/// Completes with the fully resolved structure.
Future awaitObject(object) {
// Unroll nested futures.
if (object is Future) return object.then(awaitObject);
if (object is Iterable) {
return Future.wait(object.map(awaitObject).toList());
}
if (object is! Map) return new Future.value(object);
var pairs = <Future<Pair>>[];
object.forEach((key, value) {
pairs.add(awaitObject(value)
.then((resolved) => new Pair(key, resolved)));
});
return Future.wait(pairs).then((resolvedPairs) {
var map = {};
for (var pair in resolvedPairs) {
map[pair.first] = pair.last;
}
return map;
});
}
/// Returns the path to the library named [libraryName].
///
/// The library name must be globally unique, or the wrong library path may be
/// returned. Any libraries accessed must be added to the [MirrorsUsed]
/// declaration in the import above.
String libraryPath(String libraryName) {
var lib = currentMirrorSystem().findLibrary(new Symbol(libraryName));
return path.fromUri(lib.uri);
}
/// Whether "special" strings such as Unicode characters or color escapes are
/// safe to use.
///
/// On Windows or when not printing to a terminal, only printable ASCII
/// characters should be used.
bool get canUseSpecialChars => !runningAsTest &&
Platform.operatingSystem != 'windows' &&
stdioType(stdout) == StdioType.TERMINAL;
/// Gets a "special" string (ANSI escape or Unicode).
///
/// On Windows or when not printing to a terminal, returns something else since
/// those aren't supported.
String getSpecial(String special, [String onWindows = '']) =>
canUseSpecialChars ? special : onWindows;
/// Prepends each line in [text] with [prefix].
///
/// If [firstPrefix] is passed, the first line is prefixed with that instead.
String prefixLines(String text, {String prefix: '| ', String firstPrefix}) {
var lines = text.split('\n');
if (firstPrefix == null) {
return lines.map((line) => '$prefix$line').join('\n');
}
var firstLine = "$firstPrefix${lines.first}";
lines = lines.skip(1).map((line) => '$prefix$line').toList();
lines.insert(0, firstLine);
return lines.join('\n');
}
/// Whether pub is running as a subprocess in an integration test or in a unit
/// test that has explicitly set this.
bool runningAsTest = Platform.environment.containsKey('_PUB_TESTING');
/// Whether today is April Fools' day.
bool get isAprilFools {
// Tests should never see April Fools' output.
if (runningAsTest) return false;
var date = new DateTime.now();
return date.month == 4 && date.day == 1;
}
/// Wraps [fn] to guard against several different kinds of stack overflow
/// exceptions:
///
/// * A sufficiently long [Future] chain can cause a stack overflow if there are
/// no asynchronous operations in it (issue 9583).
/// * A recursive function that recurses too deeply without an asynchronous
/// operation can cause a stack overflow.
/// * Even if the former is guarded against by adding asynchronous operations,
/// returning a value through the [Future] chain can still cause a stack
/// overflow.
Future resetStack(fn()) {
// Using a [Completer] breaks the [Future] chain for the return value and
// avoids the third case described above.
var completer = new Completer();
// Using [new Future] adds an asynchronous operation that works around the
// first and second cases described above.
newFuture(fn).then((val) {
scheduleMicrotask(() => completer.complete(val));
}).catchError((err, stackTrace) {
scheduleMicrotask(() => completer.completeError(err, stackTrace));
});
return completer.future;
}
/// The subset of strings that don't need quoting in YAML.
///
/// This pattern does not strictly follow the plain scalar grammar of YAML,
/// which means some strings may be unnecessarily quoted, but it's much simpler.
final _unquotableYamlString = new RegExp(r"^[a-zA-Z_-][a-zA-Z_0-9-]*$");
/// Converts [data], which is a parsed YAML object, to a pretty-printed string,
/// using indentation for maps.
String yamlToString(data) {
var buffer = new StringBuffer();
_stringify(bool isMapValue, String indent, data) {
// TODO(nweiz): Serialize using the YAML library once it supports
// serialization.
// Use indentation for (non-empty) maps.
if (data is Map && !data.isEmpty) {
if (isMapValue) {
buffer.writeln();
indent += ' ';
}
// Sort the keys. This minimizes deltas in diffs.
var keys = data.keys.toList();
keys.sort((a, b) => a.toString().compareTo(b.toString()));
var first = true;
for (var key in keys) {
if (!first) buffer.writeln();
first = false;
var keyString = key;
if (key is! String || !_unquotableYamlString.hasMatch(key)) {
keyString = JSON.encode(key);
}
buffer.write('$indent$keyString:');
_stringify(true, indent, data[key]);
}
return;
}
// Everything else we just stringify using JSON to handle escapes in
// strings and number formatting.
var string = data;
// Don't quote plain strings if not needed.
if (data is! String || !_unquotableYamlString.hasMatch(data)) {
string = JSON.encode(data);
}
if (isMapValue) {
buffer.write(' $string');
} else {
buffer.write('$indent$string');
}
}
_stringify(false, '', data);
return buffer.toString();
}
/// Throw a [ApplicationException] with [message].
void fail(String message, [innerError, StackTrace innerTrace]) {
if (innerError != null) {
throw new WrappedException(message, innerError, innerTrace);
} else {
throw new ApplicationException(message);
}
}
/// Throw a [DataException] with [message] to indicate that the command has
/// failed because of invalid input data.
///
/// This will report the error and cause pub to exit with [exit_codes.DATA].
void dataError(String message) => throw new DataException(message);

View file

@ -1,102 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator;
import 'dart:async';
import 'entrypoint.dart';
import 'log.dart' as log;
import 'utils.dart';
import 'validator/compiled_dartdoc.dart';
import 'validator/dependency.dart';
import 'validator/dependency_override.dart';
import 'validator/directory.dart';
import 'validator/executable.dart';
import 'validator/license.dart';
import 'validator/name.dart';
import 'validator/pubspec_field.dart';
import 'validator/sdk_constraint.dart';
import 'validator/size.dart';
import 'validator/utf8_readme.dart';
/// The base class for validators that check whether a package is fit for
/// uploading.
///
/// Each validator should override [errors], [warnings], or both to return
/// lists of errors or warnings to display to the user. Errors will cause the
/// package not to be uploaded; warnings will require the user to confirm the
/// upload.
abstract class Validator {
/// The entrypoint that's being validated.
final Entrypoint entrypoint;
/// The accumulated errors for this validator.
///
/// Filled by calling [validate].
final errors = <String>[];
/// The accumulated warnings for this validator.
///
/// Filled by calling [validate].
final warnings = <String>[];
Validator(this.entrypoint);
/// Validates the entrypoint, adding any errors and warnings to [errors] and
/// [warnings], respectively.
Future validate();
/// Run all validators on the [entrypoint] package and print their results.
///
/// The future completes with the error and warning messages, respectively.
///
/// [packageSize], if passed, should complete to the size of the tarred
/// package, in bytes. This is used to validate that it's not too big to
/// upload to the server.
static Future<Pair<List<String>, List<String>>> runAll(
Entrypoint entrypoint, [Future<int> packageSize]) {
var validators = [
new LicenseValidator(entrypoint),
new NameValidator(entrypoint),
new PubspecFieldValidator(entrypoint),
new DependencyValidator(entrypoint),
new DependencyOverrideValidator(entrypoint),
new DirectoryValidator(entrypoint),
new ExecutableValidator(entrypoint),
new CompiledDartdocValidator(entrypoint),
new Utf8ReadmeValidator(entrypoint),
new SdkConstraintValidator(entrypoint)
];
if (packageSize != null) {
validators.add(new SizeValidator(entrypoint, packageSize));
}
return Future.wait(validators.map((validator) => validator.validate()))
.then((_) {
var errors =
flatten(validators.map((validator) => validator.errors));
var warnings =
flatten(validators.map((validator) => validator.warnings));
if (!errors.isEmpty) {
log.error("Missing requirements:");
for (var error in errors) {
log.error("* ${error.split('\n').join('\n ')}");
}
log.error("");
}
if (!warnings.isEmpty) {
log.warning("Suggestions:");
for (var warning in warnings) {
log.warning("* ${warning.split('\n').join('\n ')}");
}
log.warning("");
}
return new Pair<List<String>, List<String>>(errors, warnings);
});
}
}

View file

@ -1,45 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator.compiled_dartdoc;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../entrypoint.dart';
import '../io.dart';
import '../validator.dart';
/// Validates that a package doesn't contain compiled Dartdoc
/// output.
class CompiledDartdocValidator extends Validator {
CompiledDartdocValidator(Entrypoint entrypoint)
: super(entrypoint);
Future validate() {
return new Future.sync(() {
for (var entry in entrypoint.root.listFiles(useGitIgnore: true)) {
if (path.basename(entry) != "nav.json") continue;
var dir = path.dirname(entry);
// Look for tell-tale Dartdoc output files all in the same directory.
var files = [
entry,
path.join(dir, "index.html"),
path.join(dir, "styles.css"),
path.join(dir, "dart-logo-small.png"),
path.join(dir, "client-live-nav.js")
];
if (files.every((val) => fileExists(val))) {
warnings.add("Avoid putting generated documentation in "
"${path.relative(dir)}.\n"
"Generated documentation bloats the package with redundant "
"data.");
}
}
});
}
}

View file

@ -1,217 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator.dependency;
import 'dart:async';
import 'package:pub_semver/pub_semver.dart';
import '../entrypoint.dart';
import '../log.dart' as log;
import '../package.dart';
import '../validator.dart';
/// The range of all pub versions that don't support `^` version constraints.
///
/// This is the actual range of pub versions, whereas [_postCaretPubVersions] is
/// the nicer-looking range that doesn't include a prerelease tag.
final _preCaretPubVersions = new VersionConstraint.parse("<1.8.0-dev.3.0");
/// The range of all pub versions that do support `^` version constraints.
///
/// This is intersected with the user's SDK constraint to provide a suggested
/// constraint.
final _postCaretPubVersions = new VersionConstraint.parse("^1.8.0");
/// A validator that validates a package's dependencies.
class DependencyValidator extends Validator {
/// Whether the SDK constraint guarantees that `^` version constraints are
/// safe.
bool get _caretAllowed => entrypoint.root.pubspec.environment.sdkVersion
.intersect(_preCaretPubVersions).isEmpty;
DependencyValidator(Entrypoint entrypoint)
: super(entrypoint);
Future validate() async {
var caretDeps = [];
for (var dependency in entrypoint.root.pubspec.dependencies) {
if (dependency.source != "hosted") {
await _warnAboutSource(dependency);
} else if (dependency.constraint.isAny) {
_warnAboutNoConstraint(dependency);
} else if (dependency.constraint is Version) {
_warnAboutSingleVersionConstraint(dependency);
} else if (dependency.constraint is VersionRange) {
if (dependency.constraint.min == null) {
_warnAboutNoConstraintLowerBound(dependency);
} else if (dependency.constraint.max == null) {
_warnAboutNoConstraintUpperBound(dependency);
}
if (dependency.constraint.toString().startsWith("^")) {
caretDeps.add(dependency);
}
}
}
if (caretDeps.isNotEmpty && !_caretAllowed) {
_errorAboutCaretConstraints(caretDeps);
}
}
/// Warn that dependencies should use the hosted source.
Future _warnAboutSource(PackageDep dep) async {
var versions;
try {
var pubspecs = await entrypoint.cache.sources['hosted']
.getVersions(dep.name, dep.name);
versions = pubspecs.map((pubspec) => pubspec.version).toList();
} catch (error) {
versions = [];
}
var constraint;
var primary = Version.primary(versions);
if (primary != null) {
constraint = _constraintForVersion(primary);
} else {
constraint = dep.constraint.toString();
if (!dep.constraint.isAny && dep.constraint is! Version) {
constraint = '"$constraint"';
}
}
// Path sources are errors. Other sources are just warnings.
var messages = warnings;
if (dep.source == "path") {
messages = errors;
}
messages.add('Don\'t depend on "${dep.name}" from the ${dep.source} '
'source. Use the hosted source instead. For example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: $constraint\n'
'\n'
'Using the hosted source ensures that everyone can download your '
'package\'s dependencies along with your package.');
}
/// Warn that dependencies should have version constraints.
void _warnAboutNoConstraint(PackageDep dep) {
var message = 'Your dependency on "${dep.name}" should have a version '
'constraint.';
var locked = entrypoint.lockFile.packages[dep.name];
if (locked != null) {
message = '$message For example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: ${_constraintForVersion(locked.version)}\n';
}
warnings.add("$message\n"
'Without a constraint, you\'re promising to support ${log.bold("all")} '
'future versions of "${dep.name}".');
}
/// Warn that dependencies should allow more than a single version.
void _warnAboutSingleVersionConstraint(PackageDep dep) {
warnings.add(
'Your dependency on "${dep.name}" should allow more than one version. '
'For example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: ${_constraintForVersion(dep.constraint)}\n'
'\n'
'Constraints that are too tight will make it difficult for people to '
'use your package\n'
'along with other packages that also depend on "${dep.name}".');
}
/// Warn that dependencies should have lower bounds on their constraints.
void _warnAboutNoConstraintLowerBound(PackageDep dep) {
var message = 'Your dependency on "${dep.name}" should have a lower bound.';
var locked = entrypoint.lockFile.packages[dep.name];
if (locked != null) {
var constraint;
if (locked.version == (dep.constraint as VersionRange).max) {
constraint = _constraintForVersion(locked.version);
} else {
constraint = '">=${locked.version} ${dep.constraint}"';
}
message = '$message For example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: $constraint\n';
}
warnings.add("$message\n"
'Without a constraint, you\'re promising to support ${log.bold("all")} '
'previous versions of "${dep.name}".');
}
/// Warn that dependencies should have upper bounds on their constraints.
void _warnAboutNoConstraintUpperBound(PackageDep dep) {
var constraint;
if ((dep.constraint as VersionRange).includeMin) {
constraint = _constraintForVersion((dep.constraint as VersionRange).min);
} else {
constraint = '"${dep.constraint} '
'<${(dep.constraint as VersionRange).min.nextBreaking}"';
}
warnings.add(
'Your dependency on "${dep.name}" should have an upper bound. For '
'example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: $constraint\n'
'\n'
'Without an upper bound, you\'re promising to support '
'${log.bold("all")} future versions of ${dep.name}.');
}
/// Emits an error for any version constraints that use `^` without an
/// appropriate SDK constraint.
void _errorAboutCaretConstraints(List<PackageDep> caretDeps) {
var newSdkConstraint = entrypoint.root.pubspec.environment.sdkVersion
.intersect(_postCaretPubVersions);
if (newSdkConstraint.isEmpty) newSdkConstraint = _postCaretPubVersions;
var buffer = new StringBuffer(
"Older versions of pub don't support ^ version constraints.\n"
"Make sure your SDK constraint excludes those old versions:\n"
"\n"
"environment:\n"
" sdk: \"$newSdkConstraint\"\n"
"\n");
if (caretDeps.length == 1) {
buffer.writeln("Or use a fully-expanded constraint:");
} else {
buffer.writeln("Or use fully-expanded constraints:");
}
buffer.writeln();
buffer.writeln("dependencies:");
caretDeps.forEach((dep) {
VersionRange constraint = dep.constraint;
buffer.writeln(
" ${dep.name}: \">=${constraint.min} <${constraint.max}\"");
});
errors.add(buffer.toString().trim());
}
/// Returns the suggested version constraint for a dependency that was tested
/// against [version].
String _constraintForVersion(Version version) {
if (_caretAllowed) return "^$version";
return '">=$version <${version.nextBreaking}"';
}
}

View file

@ -1,28 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator.dependency_override;
import 'dart:async';
import '../entrypoint.dart';
import '../validator.dart';
/// A validator that validates a package's dependencies overrides (or the
/// absence thereof).
class DependencyOverrideValidator extends Validator {
DependencyOverrideValidator(Entrypoint entrypoint)
: super(entrypoint);
Future validate() {
if (entrypoint.root.dependencyOverrides.isNotEmpty) {
errors.add(
'Your pubspec.yaml must not have a "dependency_overrides" field.\n'
'This ensures you test your package against the same versions of '
'its dependencies\n'
'that users will have when they use it.');
}
return new Future.value();
}
}

View file

@ -1,49 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library pub.validator.directory;
import 'dart:async';
import 'package:path/path.dart' as path;
import '../entrypoint.dart';
import '../io.dart';
import '../validator.dart';
/// A validator that validates a package's top-level directories.
class DirectoryValidator extends Validator {
DirectoryValidator(Entrypoint entrypoint)
: super(entrypoint);
static final _PLURAL_NAMES = [
"benchmarks", "docs", "examples", "tests", "tools"
];
Future validate() {
return new Future.sync(() {
for (var dir in listDir(entrypoint.root.dir)) {
if (!dirExists(dir)) continue;
dir = path.basename(dir);
if (_PLURAL_NAMES.contains(dir)) {
// Cut off the "s"
var singularName = dir.substring(0, dir.length - 1);
warnings.add('Rename the top-level "$dir" directory to '
'"$singularName".\n'
'The Pub layout convention is to use singular directory '
'names.\n'
'Plural names won\'t be correctly identified by Pub and other '
'tools.');
}
if (dir.contains(new RegExp(r"^samples?$"))) {
warnings.add('Rename the top-level "$dir" directory to "example".\n'
'This allows Pub to find your examples and create "packages" '
'directories for them.\n');
}
}
});
}
}

Some files were not shown because too many files have changed in this diff Show more