1
0
mirror of https://github.com/dart-lang/sdk synced 2024-07-05 09:20:04 +00:00

Add a snapshot of flutter/engine/src/build to our sdk

BUG=
R=zra@google.com

Review URL: https://codereview.chromium.org/2101243005 .
This commit is contained in:
John McCutchan 2016-07-01 11:09:28 -07:00
parent 1c783dea87
commit 36af9946c5
637 changed files with 87074 additions and 0 deletions

5
build/OWNERS Normal file
View File

@ -0,0 +1,5 @@
cjhopman@chromium.org
dpranke@chromium.org
jochen@chromium.org
scottmg@chromium.org
thakis@chromium.org

16
build/PRESUBMIT.py Normal file
View File

@ -0,0 +1,16 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def _RunTests(input_api, output_api):
return (input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api, '.', whitelist=[r'.+_test.py$']))
def CheckChangeOnUpload(input_api, output_api):
return _RunTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunTests(input_api, output_api)

15
build/README.chromium Normal file
View File

@ -0,0 +1,15 @@
List of property sheets to be included by projects:
common.vsprops
Not used anymore. No-op. Kept for compatibility with current projects.
debug.vsprops
Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
external_code.vsprops
Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
output_dll_copy.rules
Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
release.vsprops
Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.

7
build/README.dart Normal file
View File

@ -0,0 +1,7 @@
This directory was taken from a snapshot of flutter/engine/src/build/.
The snapshot was taken with a recursive copy `cp -R` of the directory from
the flutter repository.
The contents is used to support the GN build system.

1442
build/all.gyp Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (c) 2012 The Chromium Authors. All rights reserved. Use of this
source code is governed by a BSD-style license that can be found in the
LICENSE file.
-->
<!--
This is a dummy manifest which is required by:
1. aapt when generating R.java in java.gypi:
Nothing in the manifest is used, but it is still required by aapt.
2. lint: [min|target]SdkVersion are required by lint and should
be kept up-to-date.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="dummy.package">
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="22" />
</manifest>

56
build/android/BUILD.gn Normal file
View File

@ -0,0 +1,56 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/android/rules.gni")
sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar"
action("find_sun_tools_jar") {
script = "//build/android/gyp/find_sun_tools_jar.py"
depfile = "$target_gen_dir/$target_name.d"
outputs = [
depfile,
sun_tools_jar_path,
]
args = [
"--depfile",
rebase_path(depfile, root_build_dir),
"--output",
rebase_path(sun_tools_jar_path, root_build_dir),
]
}
java_prebuilt("sun_tools_java") {
jar_path = sun_tools_jar_path
jar_dep = ":find_sun_tools_jar"
}
action("cpplib_stripped") {
_strip_bin = "${android_tool_prefix}strip"
_soname = "libc++_shared.so"
_input_so = "${android_libcpp_root}/libs/${android_app_abi}/${_soname}"
_output_so = "${root_out_dir}/lib.stripped/${_soname}"
script = "//build/gn_run_binary.py"
inputs = [
_strip_bin,
]
sources = [
_input_so,
]
outputs = [
_output_so,
]
_rebased_strip_bin = rebase_path(_strip_bin, root_out_dir)
_rebased_input_so = rebase_path(_input_so, root_out_dir)
_rebased_output_so = rebase_path(_output_so, root_out_dir)
args = [
_rebased_strip_bin,
"--strip-unneeded",
"-o",
_rebased_output_so,
_rebased_input_so,
]
}

Binary file not shown.

3
build/android/OWNERS Normal file
View File

@ -0,0 +1,3 @@
jbudorick@chromium.org
klundberg@chromium.org
pasko@chromium.org

View File

@ -0,0 +1,64 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for android buildbot.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into depot_tools.
"""
def CommonChecks(input_api, output_api):
output = []
def J(*dirs):
"""Returns a path relative to presubmit directory."""
return input_api.os_path.join(input_api.PresubmitLocalPath(), *dirs)
output.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=[r'pylib/symbols/.*\.py$', r'gyp/.*\.py$', r'gn/.*\.py'],
extra_paths_list=[
J(), J('..', '..', 'third_party', 'android_testrunner'),
J('buildbot')]))
output.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
white_list=[r'gyp/.*\.py$', r'gn/.*\.py'],
extra_paths_list=[J('gyp'), J('gn')]))
# Disabled due to http://crbug.com/410936
#output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
#input_api, output_api, J('buildbot', 'tests')))
pylib_test_env = dict(input_api.environ)
pylib_test_env.update({
'PYTHONPATH': input_api.PresubmitLocalPath(),
'PYTHONDONTWRITEBYTECODE': '1',
})
output.extend(input_api.canned_checks.RunUnitTests(
input_api,
output_api,
unit_tests=[
J('pylib', 'base', 'test_dispatcher_unittest.py'),
J('pylib', 'device', 'battery_utils_test.py'),
J('pylib', 'device', 'device_utils_test.py'),
J('pylib', 'device', 'logcat_monitor_test.py'),
J('pylib', 'gtest', 'gtest_test_instance_test.py'),
J('pylib', 'instrumentation',
'instrumentation_test_instance_test.py'),
J('pylib', 'results', 'json_results_test.py'),
J('pylib', 'utils', 'md5sum_test.py'),
],
env=pylib_test_env))
return output
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)

View File

@ -0,0 +1,20 @@
#!/bin/bash
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# If no flags are given, prints the current content shell flags.
#
# Otherwise, the given flags are used to REPLACE (not modify) the content shell
# flags. For example:
# adb_android_webview_command_line --enable-webgl
#
# To remove all content shell flags, pass an empty string for the flags:
# adb_android_webview_command_line ""
. $(dirname $0)/adb_command_line_functions.sh
CMD_LINE_FILE=/data/local/tmp/android-webview-command-line
REQUIRES_SU=0
set_command_line "$@"

View File

@ -0,0 +1,19 @@
#!/bin/bash
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# If no flags are given, prints the current Chrome flags.
#
# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
# flags. For example:
# adb_chrome_public_command_line --enable-webgl
#
# To remove all Chrome flags, pass an empty string for the flags:
# adb_chrome_public_command_line ""
. $(dirname $0)/adb_command_line_functions.sh
CMD_LINE_FILE=/data/local/chrome-command-line
REQUIRES_SU=1
set_command_line "$@"

View File

@ -0,0 +1,20 @@
#!/bin/bash
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# If no flags are given, prints the current chrome shell flags.
#
# Otherwise, the given flags are used to REPLACE (not modify) the chrome shell
# flags. For example:
# adb_chrome_shell_command_line --enable-webgl
#
# To remove all chrome shell flags, pass an empty string for the flags:
# adb_chrome_shell_command_line ""
. $(dirname $0)/adb_command_line_functions.sh
CMD_LINE_FILE=/data/local/tmp/chrome-shell-command-line
REQUIRES_SU=0
set_command_line "$@"

View File

@ -0,0 +1,40 @@
#!/bin/bash
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Variables must be set before calling:
# CMD_LINE_FILE - Path on device to flags file.
# REQUIRES_SU - Set to 1 if path requires root.
function set_command_line() {
SU_CMD=""
if [[ "$REQUIRES_SU" = 1 ]]; then
# Older androids accept "su -c", while newer use "su uid".
SDK_LEVEL=$(adb shell getprop ro.build.version.sdk | tr -d '\r')
# E.g. if no device connected.
if [[ -z "$SDK_LEVEL" ]]; then
exit 1
fi
SU_CMD="su -c"
if (( $SDK_LEVEL >= 21 )); then
SU_CMD="su 0"
fi
fi
if [ $# -eq 0 ] ; then
# If nothing specified, print the command line (stripping off "chrome ")
adb shell "cat $CMD_LINE_FILE 2>/dev/null" | cut -d ' ' -s -f2-
elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
# If given an empty string, delete the command line.
set -x
adb shell $SU_CMD rm $CMD_LINE_FILE >/dev/null
else
# Else set it.
set -x
adb shell "echo 'chrome $*' | $SU_CMD dd of=$CMD_LINE_FILE"
# Prevent other apps from modifying flags (this can create security issues).
adb shell $SU_CMD chmod 0664 $CMD_LINE_FILE
fi
}

View File

@ -0,0 +1,20 @@
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# If no flags are given, prints the current content shell flags.
#
# Otherwise, the given flags are used to REPLACE (not modify) the content shell
# flags. For example:
# adb_content_shell_command_line --enable-webgl
#
# To remove all content shell flags, pass an empty string for the flags:
# adb_content_shell_command_line ""
. $(dirname $0)/adb_command_line_functions.sh
CMD_LINE_FILE=/data/local/tmp/content-shell-command-line
REQUIRES_SU=0
set_command_line "$@"

View File

@ -0,0 +1,139 @@
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# A collection of functions useful for maintaining android devices
# Run an adb command on all connected device in parallel.
# Usage: adb_all command line to eval. Quoting is optional.
#
# Examples:
# adb_all install Chrome.apk
# adb_all 'shell cat /path/to/file'
#
adb_all() {
if [[ $# == 0 ]]; then
echo "Usage: adb_all <adb command>. Quoting is optional."
echo "Example: adb_all install Chrome.apk"
return 1
fi
local DEVICES=$(adb_get_devices -b)
local NUM_DEVICES=$(echo $DEVICES | wc -w)
if (( $NUM_DEVICES > 1 )); then
echo "Looping over $NUM_DEVICES devices"
fi
_adb_multi "$DEVICES" "$*"
}
# Run a command on each connected device. Quoting the command is suggested but
# not required. The script setups up variable DEVICE to correspond to the
# current serial number. Intended for complex one_liners that don't work in
# adb_all
# Usage: adb_device_loop 'command line to eval'
adb_device_loop() {
if [[ $# == 0 ]]; then
echo "Intended for more complex one-liners that cannot be done with" \
"adb_all."
echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \
'adb shell cat /data/local.prop)"'
return 1
fi
local DEVICES=$(adb_get_devices)
if [[ -z $DEVICES ]]; then
return
fi
# Do not change DEVICE variable name - part of api
for DEVICE in $DEVICES; do
DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//')
echo "Running on $DEVICE ($DEV_TYPE)"
ANDROID_SERIAL=$DEVICE eval "$*"
done
}
# Erases data from any devices visible on adb. To preserve a device,
# disconnect it or:
# 1) Reboot it into fastboot with 'adb reboot bootloader'
# 2) Run wipe_all_devices to wipe remaining devices
# 3) Restore device it with 'fastboot reboot'
#
# Usage: wipe_all_devices [-f]
#
wipe_all_devices() {
if [[ -z $(which adb) || -z $(which fastboot) ]]; then
echo "aborting: adb and fastboot not in path"
return 1
elif ! $(groups | grep -q 'plugdev'); then
echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'"
fi
local DEVICES=$(adb_get_devices -b)
if [[ $1 != '-f' ]]; then
echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device."
read -p "Hit enter to continue"
fi
_adb_multi "$DEVICES" "reboot bootloader"
# Subshell to isolate job list
(
for DEVICE in $DEVICES; do
fastboot_erase $DEVICE &
done
wait
)
# Reboot devices together
for DEVICE in $DEVICES; do
fastboot -s $DEVICE reboot
done
}
# Wipe a device in fastboot.
# Usage fastboot_erase [serial]
fastboot_erase() {
if [[ -n $1 ]]; then
echo "Wiping $1"
local SERIAL="-s $1"
else
if [ -z $(fastboot devices) ]; then
echo "No devices in fastboot, aborting."
echo "Check out wipe_all_devices to see if sufficient"
echo "You can put a device in fastboot using adb reboot bootloader"
return 1
fi
local SERIAL=""
fi
fastboot $SERIAL erase cache
fastboot $SERIAL erase userdata
}
# Get list of devices connected via adb
# Args: -b block until adb detects a device
adb_get_devices() {
local DEVICES="$(adb devices | grep 'device$')"
if [[ -z $DEVICES && $1 == '-b' ]]; then
echo '- waiting for device -' >&2
local DEVICES="$(adb wait-for-device devices | grep 'device$')"
fi
echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/'
}
###################################################
## HELPER FUNCTIONS
###################################################
# Run an adb command in parallel over a device list
_adb_multi() {
local DEVICES=$1
local ADB_ARGS=$2
(
for DEVICE in $DEVICES; do
adb -s $DEVICE $ADB_ARGS &
done
wait
)
}

1047
build/android/adb_gdb Executable file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Attach to or start a ContentShell process and debug it.
# See --help for details.
#
PROGDIR=$(dirname "$0")
export ADB_GDB_PROGNAME=$(basename "$0")
export ADB_GDB_ACTIVITY=.AwShellActivity
"$PROGDIR"/adb_gdb \
--program-name=AwShellApplication \
--package-name=org.chromium.android_webview.shell \
"$@"

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Attach to or start a ChromePublic process and debug it.
# See --help for details.
#
PROGDIR=$(dirname "$0")
export ADB_GDB_PROGNAME=$(basename "$0")
export ADB_GDB_ACTIVITY=com.google.android.apps.chrome.Main
"$PROGDIR"/adb_gdb \
--program-name=ChromePublic \
--package-name=org.chromium.chrome \
"$@"

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Attach to or start a ChromeShell process and debug it.
# See --help for details.
#
PROGDIR=$(dirname "$0")
export ADB_GDB_PROGNAME=$(basename "$0")
export ADB_GDB_ACTIVITY=.ChromeShellActivity
"$PROGDIR"/adb_gdb \
--program-name=ChromeShell \
--package-name=org.chromium.chrome.shell \
"$@"

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Attach to or start a ContentShell process and debug it.
# See --help for details.
#
PROGDIR=$(dirname "$0")
export ADB_GDB_PROGNAME=$(basename "$0")
export ADB_GDB_ACTIVITY=.ContentShellActivity
"$PROGDIR"/adb_gdb \
--program-name=ContentShell \
--package-name=org.chromium.content_shell_apk \
"$@"

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Attach to or start a ContentShell process and debug it.
# See --help for details.
#
PROGDIR=$(dirname "$0")
export ADB_GDB_PROGNAME=$(basename "$0")
export ADB_GDB_ACTIVITY=.CronetSampleActivity
"$PROGDIR"/adb_gdb \
--program-name=CronetSample \
--package-name=org.chromium.cronet_sample_apk \
"$@"

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Attach to or start a ContentShell process and debug it.
# See --help for details.
#
PROGDIR=$(dirname "$0")
export ADB_GDB_PROGNAME=$(basename "$0")
export ADB_GDB_ACTIVITY=.MojoShellActivity
"$PROGDIR"/adb_gdb \
--program-name=MojoShell \
--package-name=org.chromium.mojo_shell_apk \
"$@"

114
build/android/adb_install_apk.py Executable file
View File

@ -0,0 +1,114 @@
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility script to install APKs from the command line quickly."""
import argparse
import glob
import logging
import os
import sys
from pylib import constants
from pylib.device import device_blacklist
from pylib.device import device_errors
from pylib.device import device_utils
from pylib.utils import apk_helper
from pylib.utils import run_tests_helper
def main():
parser = argparse.ArgumentParser()
apk_group = parser.add_mutually_exclusive_group(required=True)
apk_group.add_argument('--apk', dest='apk_name',
help='DEPRECATED The name of the apk containing the'
' application (with the .apk extension).')
apk_group.add_argument('apk_path', nargs='?',
help='The path to the APK to install.')
# TODO(jbudorick): Remove once no clients pass --apk_package
parser.add_argument('--apk_package', help='DEPRECATED unused')
parser.add_argument('--split',
action='append',
dest='splits',
help='A glob matching the apk splits. '
'Can be specified multiple times.')
parser.add_argument('--keep_data',
action='store_true',
default=False,
help='Keep the package data when installing '
'the application.')
parser.add_argument('--debug', action='store_const', const='Debug',
dest='build_type',
default=os.environ.get('BUILDTYPE', 'Debug'),
help='If set, run test suites under out/Debug. '
'Default is env var BUILDTYPE or Debug')
parser.add_argument('--release', action='store_const', const='Release',
dest='build_type',
help='If set, run test suites under out/Release. '
'Default is env var BUILDTYPE or Debug.')
parser.add_argument('-d', '--device', dest='device',
help='Target device for apk to install on.')
parser.add_argument('-v', '--verbose', action='count',
help='Enable verbose logging.')
args = parser.parse_args()
run_tests_helper.SetLogLevel(args.verbose)
constants.SetBuildType(args.build_type)
apk = args.apk_path or args.apk_name
if not apk.endswith('.apk'):
apk += '.apk'
if not os.path.exists(apk):
apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
if not os.path.exists(apk):
parser.error('%s not found.' % apk)
if args.splits:
splits = []
base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
for split_glob in args.splits:
apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
if not apks:
logging.warning('No apks matched for %s.' % split_glob)
for f in apks:
helper = apk_helper.ApkHelper(f)
if (helper.GetPackageName() == base_apk_package
and helper.GetSplitName()):
splits.append(f)
devices = device_utils.DeviceUtils.HealthyDevices()
if args.device:
devices = [d for d in devices if d == args.device]
if not devices:
raise device_errors.DeviceUnreachableError(args.device)
elif not devices:
raise device_errors.NoDevicesError()
def blacklisting_install(device):
try:
if args.splits:
device.InstallSplitApk(apk, splits, reinstall=args.keep_data)
else:
device.Install(apk, reinstall=args.keep_data)
except device_errors.CommandFailedError:
logging.exception('Failed to install %s', args.apk_name)
device_blacklist.ExtendBlacklist([str(device)])
logging.warning('Blacklisting %s', str(device))
except device_errors.CommandTimeoutError:
logging.exception('Timed out while installing %s', args.apk_name)
device_blacklist.ExtendBlacklist([str(device)])
logging.warning('Blacklisting %s', str(device))
device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,24 @@
#!/bin/bash
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Kill a running android webview shell.
#
# Assumes you have sourced the build/android/envsetup.sh script.
SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.android_webview.shell')
VAL=$(echo "$SHELL_PID_LINES" | wc -l)
if [ $VAL -lt 1 ] ; then
echo "Not running android webview shell."
else
SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
if [ "$SHELL_PID" != "" ] ; then
set -x
adb shell kill $SHELL_PID
set -
else
echo "Android webview shell does not appear to be running."
fi
fi

View File

@ -0,0 +1,24 @@
#!/bin/bash
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Kill a running instance of ChromePublic.
#
# Assumes you have sourced the build/android/envsetup.sh script.
SHELL_PID_LINES=$(adb shell ps | grep -w 'org.chromium.chrome')
VAL=$(echo "$SHELL_PID_LINES" | wc -l)
if [ $VAL -lt 1 ] ; then
echo "Not running ChromePublic."
else
SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
if [ "$SHELL_PID" != "" ] ; then
set -x
adb shell kill $SHELL_PID
set -
else
echo "ChromePublic does not appear to be running."
fi
fi

View File

@ -0,0 +1,24 @@
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Kill a running chrome shell.
#
# Assumes you have sourced the build/android/envsetup.sh script.
SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.chrome.shell')
VAL=$(echo "$SHELL_PID_LINES" | wc -l)
if [ $VAL -lt 1 ] ; then
echo "Not running Chrome shell."
else
SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
if [ "$SHELL_PID" != "" ] ; then
set -x
adb shell kill $SHELL_PID
set -
else
echo "Chrome shell does not appear to be running."
fi
fi

View File

@ -0,0 +1,24 @@
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Kill a running content shell.
#
# Assumes you have sourced the build/android/envsetup.sh script.
SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell_apk')
VAL=$(echo "$SHELL_PID_LINES" | wc -l)
if [ $VAL -lt 1 ] ; then
echo "Not running Content shell."
else
SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
if [ "$SHELL_PID" != "" ] ; then
set -x
adb shell kill $SHELL_PID
set -
else
echo "Content shell does not appear to be running."
fi
fi

View File

@ -0,0 +1,156 @@
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Saves logcats from all connected devices.
Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
This script will repeatedly poll adb for new devices and save logcats
inside the <base_dir> directory, which it attempts to create. The
script will run until killed by an external signal. To test, run the
script in a shell and <Ctrl>-C it after a while. It should be
resilient across phone disconnects and reconnects and start the logcat
early enough to not miss anything.
"""
import logging
import os
import re
import shutil
import signal
import subprocess
import sys
import time
# Map from device_id -> (process, logcat_num)
devices = {}
class TimeoutException(Exception):
"""Exception used to signal a timeout."""
pass
class SigtermError(Exception):
"""Exception used to catch a sigterm."""
pass
def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
"""Spawns a adb logcat process if one is not currently running."""
process, logcat_num = devices[device_id]
if process:
if process.poll() is None:
# Logcat process is still happily running
return
else:
logging.info('Logcat for device %s has died', device_id)
error_filter = re.compile('- waiting for device -')
for line in process.stderr:
if not error_filter.match(line):
logging.error(device_id + ': ' + line)
logging.info('Starting logcat %d for device %s', logcat_num,
device_id)
logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
process = subprocess.Popen([adb_cmd, '-s', device_id,
'logcat', '-v', 'threadtime'],
stdout=logcat_file,
stderr=subprocess.PIPE)
devices[device_id] = (process, logcat_num + 1)
def GetAttachedDevices(adb_cmd):
"""Gets the device list from adb.
We use an alarm in this function to avoid deadlocking from an external
dependency.
Args:
adb_cmd: binary to run adb
Returns:
list of devices or an empty list on timeout
"""
signal.alarm(2)
try:
out, err = subprocess.Popen([adb_cmd, 'devices'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if err:
logging.warning('adb device error %s', err.strip())
return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
except TimeoutException:
logging.warning('"adb devices" command timed out')
return []
except (IOError, OSError):
logging.exception('Exception from "adb devices"')
return []
finally:
signal.alarm(0)
def main(base_dir, adb_cmd='adb'):
"""Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill."""
# We create the directory to ensure 'run once' semantics
if os.path.exists(base_dir):
print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
shutil.rmtree(base_dir, ignore_errors=True)
os.makedirs(base_dir)
logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
level=logging.INFO,
format='%(asctime)-2s %(levelname)-8s %(message)s')
# Set up the alarm for calling 'adb devices'. This is to ensure
# our script doesn't get stuck waiting for a process response
def TimeoutHandler(_signum, _unused_frame):
raise TimeoutException()
signal.signal(signal.SIGALRM, TimeoutHandler)
# Handle SIGTERMs to ensure clean shutdown
def SigtermHandler(_signum, _unused_frame):
raise SigtermError()
signal.signal(signal.SIGTERM, SigtermHandler)
logging.info('Started with pid %d', os.getpid())
pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
try:
with open(pid_file_path, 'w') as f:
f.write(str(os.getpid()))
while True:
for device_id in GetAttachedDevices(adb_cmd):
if not device_id in devices:
subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
devices[device_id] = (None, 0)
for device in devices:
# This will spawn logcat watchers for any device ever detected
StartLogcatIfNecessary(device, adb_cmd, base_dir)
time.sleep(5)
except SigtermError:
logging.info('Received SIGTERM, shutting down')
except: # pylint: disable=bare-except
logging.exception('Unexpected exception in main.')
finally:
for process, _ in devices.itervalues():
if process:
try:
process.terminate()
except OSError:
pass
os.remove(pid_file_path)
if __name__ == '__main__':
if 2 <= len(sys.argv) <= 3:
print 'adb_logcat_monitor: Initializing'
sys.exit(main(*sys.argv[1:3]))
print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]

View File

@ -0,0 +1,213 @@
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Shutdown adb_logcat_monitor and print accumulated logs.
To test, call './adb_logcat_printer.py <base_dir>' where
<base_dir> contains 'adb logcat -v threadtime' files named as
logcat_<deviceID>_<sequenceNum>
The script will print the files to out, and will combine multiple
logcats from a single device if there is overlap.
Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
will attempt to terminate the contained PID by sending a SIGINT and
monitoring for the deletion of the aforementioned file.
"""
# pylint: disable=W0702
import cStringIO
import logging
import optparse
import os
import re
import signal
import sys
import time
# Set this to debug for more verbose output
LOG_LEVEL = logging.INFO
def CombineLogFiles(list_of_lists, logger):
"""Splices together multiple logcats from the same device.
Args:
list_of_lists: list of pairs (filename, list of timestamped lines)
logger: handler to log events
Returns:
list of lines with duplicates removed
"""
cur_device_log = ['']
for cur_file, cur_file_lines in list_of_lists:
# Ignore files with just the logcat header
if len(cur_file_lines) < 2:
continue
common_index = 0
# Skip this step if list just has empty string
if len(cur_device_log) > 1:
try:
line = cur_device_log[-1]
# Used to make sure we only splice on a timestamped line
if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
common_index = cur_file_lines.index(line)
else:
logger.warning('splice error - no timestamp in "%s"?', line.strip())
except ValueError:
# The last line was valid but wasn't found in the next file
cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
cur_device_log += ['*'*30 + ' %s' % cur_file]
cur_device_log.extend(cur_file_lines[common_index:])
return cur_device_log
def FindLogFiles(base_dir):
"""Search a directory for logcat files.
Args:
base_dir: directory to search
Returns:
Mapping of device_id to a sorted list of file paths for a given device
"""
logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
# list of tuples (<device_id>, <seq num>, <full file path>)
filtered_list = []
for cur_file in os.listdir(base_dir):
matcher = logcat_filter.match(cur_file)
if matcher:
filtered_list += [(matcher.group(1), int(matcher.group(2)),
os.path.join(base_dir, cur_file))]
filtered_list.sort()
file_map = {}
for device_id, _, cur_file in filtered_list:
if device_id not in file_map:
file_map[device_id] = []
file_map[device_id] += [cur_file]
return file_map
def GetDeviceLogs(log_filenames, logger):
"""Read log files, combine and format.
Args:
log_filenames: mapping of device_id to sorted list of file paths
logger: logger handle for logging events
Returns:
list of formatted device logs, one for each device.
"""
device_logs = []
for device, device_files in log_filenames.iteritems():
logger.debug('%s: %s', device, str(device_files))
device_file_lines = []
for cur_file in device_files:
with open(cur_file) as f:
device_file_lines += [(cur_file, f.read().splitlines())]
combined_lines = CombineLogFiles(device_file_lines, logger)
# Prepend each line with a short unique ID so it's easy to see
# when the device changes. We don't use the start of the device
# ID because it can be the same among devices. Example lines:
# AB324: foo
# AB324: blah
device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
return device_logs
def ShutdownLogcatMonitor(base_dir, logger):
"""Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
try:
monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
with open(monitor_pid_path) as f:
monitor_pid = int(f.readline())
logger.info('Sending SIGTERM to %d', monitor_pid)
os.kill(monitor_pid, signal.SIGTERM)
i = 0
while True:
time.sleep(.2)
if not os.path.exists(monitor_pid_path):
return
if not os.path.exists('/proc/%d' % monitor_pid):
logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
return
logger.info('Waiting for logcat process to terminate.')
i += 1
if i >= 10:
logger.warning('Monitor pid did not terminate. Continuing anyway.')
return
except (ValueError, IOError, OSError):
logger.exception('Error signaling logcat monitor - continuing')
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
parser.add_option('--output-path',
help='Output file path (if unspecified, prints to stdout)')
options, args = parser.parse_args(argv)
if len(args) != 1:
parser.error('Wrong number of unparsed args')
base_dir = args[0]
if options.output_path:
output_file = open(options.output_path, 'w')
else:
output_file = sys.stdout
log_stringio = cStringIO.StringIO()
logger = logging.getLogger('LogcatPrinter')
logger.setLevel(LOG_LEVEL)
sh = logging.StreamHandler(log_stringio)
sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
' %(message)s'))
logger.addHandler(sh)
try:
# Wait at least 5 seconds after base_dir is created before printing.
#
# The idea is that 'adb logcat > file' output consists of 2 phases:
# 1 Dump all the saved logs to the file
# 2 Stream log messages as they are generated
#
# We want to give enough time for phase 1 to complete. There's no
# good method to tell how long to wait, but it usually only takes a
# second. On most bots, this code path won't occur at all, since
# adb_logcat_monitor.py command will have spawned more than 5 seconds
# prior to called this shell script.
try:
sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
except OSError:
sleep_time = 5
if sleep_time > 0:
logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
time.sleep(sleep_time)
assert os.path.exists(base_dir), '%s does not exist' % base_dir
ShutdownLogcatMonitor(base_dir, logger)
separator = '\n' + '*' * 80 + '\n\n'
for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
output_file.write(log)
output_file.write(separator)
with open(os.path.join(base_dir, 'eventlog')) as f:
output_file.write('\nLogcat Monitor Event Log\n')
output_file.write(f.read())
except:
logger.exception('Unexpected exception')
logger.info('Done.')
sh.flush()
output_file.write('\nLogcat Printer Event Log\n')
output_file.write(log_stringio.getvalue())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,8 @@
#!/bin/bash
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Start / stop profiling in chrome.
exec $(dirname $0)/../../tools/profile_chrome.py $@

View File

@ -0,0 +1,80 @@
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command line tool for forwarding ports from a device to the host.
Allows an Android device to connect to services running on the host machine,
i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
to be built.
"""
import logging
import optparse
import sys
import time
from pylib import constants
from pylib import forwarder
from pylib.device import adb_wrapper
from pylib.device import device_errors
from pylib.device import device_utils
from pylib.utils import run_tests_helper
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
'host_port [device_port_2 host_port_2] ...',
description=__doc__)
parser.add_option('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more)')
parser.add_option('--device',
help='Serial number of device we should use.')
parser.add_option('--debug', action='store_const', const='Debug',
dest='build_type', default='Release',
help='Use Debug build of host tools instead of Release.')
options, args = parser.parse_args(argv)
run_tests_helper.SetLogLevel(options.verbose_count)
if len(args) < 2 or not len(args) % 2:
parser.error('Need even number of port pairs')
sys.exit(1)
try:
port_pairs = map(int, args[1:])
port_pairs = zip(port_pairs[::2], port_pairs[1::2])
except ValueError:
parser.error('Bad port number')
sys.exit(1)
devices = device_utils.DeviceUtils.HealthyDevices()
if options.device:
device = next((d for d in devices if d == options.device), None)
if not device:
raise device_errors.DeviceUnreachableError(options.device)
elif devices:
device = devices[0]
logging.info('No device specified. Defaulting to %s', devices[0])
else:
raise device_errors.NoDevicesError()
constants.SetBuildType(options.build_type)
try:
forwarder.Forwarder.Map(port_pairs, device)
while True:
time.sleep(60)
except KeyboardInterrupt:
sys.exit(0)
finally:
forwarder.Forwarder.UnmapAllDevicePorts(device)
if __name__ == '__main__':
main(sys.argv)

View File

@ -0,0 +1,12 @@
#!/bin/bash
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
optional_url=$1
adb shell am start \
-a android.intent.action.VIEW \
-n org.chromium.android_webview.shell/.AwShellActivity \
${optional_url:+-d "$optional_url"}

View File

@ -0,0 +1,12 @@
#!/bin/bash
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
optional_url=$1
adb shell am start \
-a android.intent.action.VIEW \
-n org.chromium.chrome/com.google.android.apps.chrome.Main \
${optional_url:+-d "$optional_url"}

View File

@ -0,0 +1,12 @@
#!/bin/bash
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
optional_url=$1
adb shell am start \
-a android.intent.action.VIEW \
-n org.chromium.chrome.shell/.ChromeShellActivity \
${optional_url:+-d "$optional_url"}

View File

@ -0,0 +1,12 @@
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
optional_url=$1
adb shell am start \
-a android.intent.action.VIEW \
-n org.chromium.content_shell_apk/.ContentShellActivity \
${optional_url:+-d "$optional_url"}

View File

@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
optional_url=$1
parameters=$2
adb logcat -c
adb shell am start -S \
-a android.intent.action.VIEW \
-n org.chromium.mojo_shell_apk/.MojoShellActivity \
${parameters:+--esa parameters "$parameters"} \
${optional_url:+-d "$optional_url"}
adb logcat -s MojoShellApplication MojoShellActivity chromium

View File

@ -0,0 +1,17 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script makes all JNI exported symbols local, to prevent the JVM from
# being able to find them, enforcing use of manual JNI function registration.
# This is used for all Android binaries by default, unless they explicitly state
# that they want JNI exported symbols to remain visible, as we need to ensure
# the manual registration path is correct to maintain compatibility with the
# crazy linker.
# Check ld version script manual:
# https://sourceware.org/binutils/docs-2.24/ld/VERSION.html#VERSION
{
local:
Java_*;
};

View File

@ -0,0 +1,13 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
copy("keystore") {
sources = [
"chromium-debug.keystore",
]
outputs = [
"$root_out_dir/chromium-debug.keystore",
]
}

View File

@ -0,0 +1,96 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (C) 2005-2008 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project default="-package">
<property name="verbose" value="false" />
<property name="out.dir" location="${OUT_DIR}" />
<property name="out.absolute.dir" location="${out.dir}" />
<property name="sdk.dir" location="${ANDROID_SDK_ROOT}"/>
<property name="emma.device.jar" location="${EMMA_DEVICE_JAR}" />
<condition property="emma.enabled" value="true" else="false">
<equals arg1="${EMMA_INSTRUMENT}" arg2="1"/>
</condition>
<!-- jar file from where the tasks are loaded -->
<path id="android.antlibs">
<pathelement path="${sdk.dir}/tools/lib/ant-tasks.jar" />
</path>
<!-- Custom tasks -->
<taskdef resource="anttasks.properties" classpathref="android.antlibs" />
<condition property="build.target" value="release" else="debug">
<equals arg1="${CONFIGURATION_NAME}" arg2="Release" />
</condition>
<condition property="build.is.packaging.debug" value="true" else="false">
<equals arg1="${build.target}" arg2="debug" />
</condition>
<!-- Disables automatic signing. -->
<property name="build.is.signing.debug" value="false"/>
<!-- SDK tools assume that out.packaged.file is signed and name it "...-unaligned" -->
<property name="out.packaged.file" value="${UNSIGNED_APK_PATH}" />
<property name="native.libs.absolute.dir" location="${NATIVE_LIBS_DIR}" />
<!-- Intermediate files -->
<property name="resource.package.file.name" value="${RESOURCE_PACKAGED_APK_NAME}" />
<property name="intermediate.dex.file" location="${DEX_FILE_PATH}" />
<!-- Macro that enables passing a variable list of external jar files
to ApkBuilder. -->
<macrodef name="package-helper">
<element name="extra-jars" optional="yes" />
<sequential>
<apkbuilder
outfolder="${out.absolute.dir}"
resourcefile="${resource.package.file.name}"
apkfilepath="${out.packaged.file}"
debugpackaging="${build.is.packaging.debug}"
debugsigning="${build.is.signing.debug}"
verbose="${verbose}"
hascode="${HAS_CODE}"
previousBuildType="/"
buildType="${build.is.packaging.debug}/${build.is.signing.debug}">
<dex path="${intermediate.dex.file}"/>
<nativefolder path="${native.libs.absolute.dir}" />
<extra-jars/>
</apkbuilder>
</sequential>
</macrodef>
<!-- Packages the application. -->
<target name="-package">
<if condition="${emma.enabled}">
<then>
<package-helper>
<extra-jars>
<jarfile path="${emma.device.jar}" />
</extra-jars>
</package-helper>
</then>
<else>
<package-helper />
</else>
</if>
</target>
</project>

Binary file not shown.

View File

@ -0,0 +1,2 @@
# This empty res folder can be passed to aapt while building Java libraries or
# APKs that don't have any resources.

View File

@ -0,0 +1,79 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is a helper to java_apk.gypi. It should be used to create an
# action that runs ApkBuilder via ANT.
#
# Required variables:
# apk_name - File name (minus path & extension) of the output apk.
# apk_path - Path to output apk.
# package_input_paths - Late-evaluated list of resource zips.
# native_libs_dir - Path to lib/ directory to use. Set to an empty directory
# if no native libs are needed.
# Optional variables:
# has_code - Whether to include classes.dex in the apk.
# dex_path - Path to classes.dex. Used only when has_code=1.
# extra_inputs - List of extra action inputs.
{
'variables': {
'variables': {
'has_code%': 1,
},
'conditions': [
['has_code == 0', {
'has_code_str': 'false',
}, {
'has_code_str': 'true',
}],
],
'has_code%': '<(has_code)',
'extra_inputs%': [],
# Write the inputs list to a file, so that its mtime is updated when
# the list of inputs changes.
'inputs_list_file': '>|(apk_package.<(_target_name).<(apk_name).gypcmd >@(package_input_paths))',
'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
},
'action_name': 'apkbuilder_<(apk_name)',
'message': 'Packaging <(apk_name)',
'inputs': [
'<(DEPTH)/build/android/ant/apk-package.xml',
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/ant.py',
'<(resource_packaged_apk_path)',
'<@(extra_inputs)',
'>@(package_input_paths)',
'>(inputs_list_file)',
],
'outputs': [
'<(apk_path)',
],
'conditions': [
['has_code == 1', {
'inputs': ['<(dex_path)'],
'action': [
'-DDEX_FILE_PATH=<(dex_path)',
]
}],
],
'action': [
'python', '<(DEPTH)/build/android/gyp/ant.py',
'--',
'-quiet',
'-DHAS_CODE=<(has_code_str)',
'-DANDROID_SDK_ROOT=<(android_sdk_root)',
'-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
'-DRESOURCE_PACKAGED_APK_NAME=<(resource_packaged_apk_name)',
'-DNATIVE_LIBS_DIR=<(native_libs_dir)',
'-DAPK_NAME=<(apk_name)',
'-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
'-DOUT_DIR=<(intermediate_dir)',
'-DUNSIGNED_APK_PATH=<(apk_path)',
'-DEMMA_INSTRUMENT=<(emma_instrument)',
'-DEMMA_DEVICE_JAR=<(emma_device_jar)',
'-Dbasedir=.',
'-buildfile',
'<(DEPTH)/build/android/ant/apk-package.xml',
]
}

103
build/android/asan_symbolize.py Executable file
View File

@ -0,0 +1,103 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import optparse
import os
import re
import sys
from pylib import constants
# Uses symbol.py from third_party/android_platform, not python's.
sys.path.insert(0,
os.path.join(constants.DIR_SOURCE_ROOT,
'third_party/android_platform/development/scripts'))
import symbol
_RE_ASAN = re.compile(r'(.*?)(#\S*?) (\S*?) \((.*?)\+(.*?)\)')
def _ParseAsanLogLine(line):
m = re.match(_RE_ASAN, line)
if not m:
return None
return {
'prefix': m.group(1),
'library': m.group(4),
'pos': m.group(2),
'rel_address': '%08x' % int(m.group(5), 16),
}
def _FindASanLibraries():
asan_lib_dir = os.path.join(constants.DIR_SOURCE_ROOT,
'third_party', 'llvm-build',
'Release+Asserts', 'lib')
asan_libs = []
for src_dir, _, files in os.walk(asan_lib_dir):
asan_libs += [os.path.relpath(os.path.join(src_dir, f))
for f in files
if f.endswith('.so')]
return asan_libs
def _TranslateLibPath(library, asan_libs):
for asan_lib in asan_libs:
if os.path.basename(library) == os.path.basename(asan_lib):
return '/' + asan_lib
return symbol.TranslateLibPath(library)
def _Symbolize(asan_input):
asan_libs = _FindASanLibraries()
libraries = collections.defaultdict(list)
asan_lines = []
for asan_log_line in [a.rstrip() for a in asan_input]:
m = _ParseAsanLogLine(asan_log_line)
if m:
libraries[m['library']].append(m)
asan_lines.append({'raw_log': asan_log_line, 'parsed': m})
all_symbols = collections.defaultdict(dict)
for library, items in libraries.iteritems():
libname = _TranslateLibPath(library, asan_libs)
lib_relative_addrs = set([i['rel_address'] for i in items])
info_dict = symbol.SymbolInformationForSet(libname,
lib_relative_addrs,
True)
if info_dict:
all_symbols[library]['symbols'] = info_dict
for asan_log_line in asan_lines:
m = asan_log_line['parsed']
if not m:
print asan_log_line['raw_log']
continue
if (m['library'] in all_symbols and
m['rel_address'] in all_symbols[m['library']]['symbols']):
s = all_symbols[m['library']]['symbols'][m['rel_address']][0]
print '%s%s %s %s' % (m['prefix'], m['pos'], s[0], s[1])
else:
print asan_log_line['raw_log']
def main():
parser = optparse.OptionParser()
parser.add_option('-l', '--logcat',
help='File containing adb logcat output with ASan stacks. '
'Use stdin if not specified.')
options, _ = parser.parse_args()
if options.logcat:
asan_input = file(options.logcat, 'r')
else:
asan_input = sys.stdin
_Symbolize(asan_input.readlines())
if __name__ == "__main__":
sys.exit(main())

96
build/android/avd.py Executable file
View File

@ -0,0 +1,96 @@
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Launches Android Virtual Devices with a set configuration for testing Chrome.
The script will launch a specified number of Android Virtual Devices (AVD's).
"""
import install_emulator_deps
import logging
import optparse
import os
import re
import sys
from pylib import cmd_helper
from pylib import constants
from pylib.utils import emulator
def main(argv):
# ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch
# the emulator to find the system images upon launch.
emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
os.environ['ANDROID_SDK_ROOT'] = emulator_sdk
opt_parser = optparse.OptionParser(description='AVD script.')
opt_parser.add_option('--name', help='Optinaly, name of existing AVD to '
'launch. If not specified, new AVD\'s will be created')
opt_parser.add_option('-n', '--num', dest='emulator_count',
help='Number of emulators to launch (default is 1).',
type='int', default='1')
opt_parser.add_option('--abi', default='x86',
help='Platform of emulators to launch (x86 default).')
opt_parser.add_option('--api-level', dest='api_level',
help='API level for the image, e.g. 19 for Android 4.4',
type='int', default=constants.ANDROID_SDK_VERSION)
options, _ = opt_parser.parse_args(argv[1:])
logging.basicConfig(level=logging.INFO,
format='# %(asctime)-15s: %(message)s')
logging.root.setLevel(logging.INFO)
# Check if KVM is enabled for x86 AVD's and check for x86 system images.
# TODO(andrewhayden) Since we can fix all of these with install_emulator_deps
# why don't we just run it?
if options.abi == 'x86':
if not install_emulator_deps.CheckKVM():
logging.critical('ERROR: KVM must be enabled in BIOS, and installed. '
'Enable KVM in BIOS and run install_emulator_deps.py')
return 1
elif not install_emulator_deps.CheckX86Image(options.api_level):
logging.critical('ERROR: System image for x86 AVD not installed. Run '
'install_emulator_deps.py')
return 1
if not install_emulator_deps.CheckSDK():
logging.critical('ERROR: Emulator SDK not installed. Run '
'install_emulator_deps.py.')
return 1
# If AVD is specified, check that the SDK has the required target. If not,
# check that the SDK has the desired target for the temporary AVD's.
api_level = options.api_level
if options.name:
android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools',
'android')
avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd'])
names = re.findall(r'Name: (\w+)', avds_output)
api_levels = re.findall(r'API level (\d+)', avds_output)
try:
avd_index = names.index(options.name)
except ValueError:
logging.critical('ERROR: Specified AVD %s does not exist.' % options.name)
return 1
api_level = int(api_levels[avd_index])
if not install_emulator_deps.CheckSDKPlatform(api_level):
logging.critical('ERROR: Emulator SDK missing required target for API %d. '
'Run install_emulator_deps.py.')
return 1
if options.name:
emulator.LaunchEmulator(options.name, options.abi)
else:
emulator.LaunchTempEmulators(options.emulator_count, options.abi,
options.api_level, True)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -0,0 +1,41 @@
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""DEPRECATED!
TODO(bulach): remove me once all other repositories reference
'test_runner.py perf' directly.
"""
import optparse
import sys
from pylib import cmd_helper
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-s', '--steps',
help='A JSON file containing all the steps to be '
'sharded.')
parser.add_option('--flaky_steps',
help='A JSON file containing steps that are flaky and '
'will have its exit code ignored.')
parser.add_option('-p', '--print_results',
help='Only prints the results for the previously '
'executed step, do not run it again.')
options, _ = parser.parse_args(argv)
if options.print_results:
return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf',
'--print-step', options.print_results])
flaky_options = []
if options.flaky_steps:
flaky_options = ['--flaky-steps', options.flaky_steps]
return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf', '-v',
'--steps', options.steps] + flaky_options)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -0,0 +1,6 @@
set noparent
cmp@chromium.org
jbudorick@chromium.org
navabi@chromium.org

View File

@ -0,0 +1,46 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helper functions to print buildbot messages."""
def PrintLink(label, url):
"""Adds a link with name |label| linking to |url| to current buildbot step.
Args:
label: A string with the name of the label.
url: A string of the URL.
"""
print '@@@STEP_LINK@%s@%s@@@' % (label, url)
def PrintMsg(msg):
"""Appends |msg| to the current buildbot step text.
Args:
msg: String to be appended.
"""
print '@@@STEP_TEXT@%s@@@' % msg
def PrintSummaryText(msg):
"""Appends |msg| to main build summary. Visible from waterfall.
Args:
msg: String to be appended.
"""
print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg
def PrintError():
"""Marks the current step as failed."""
print '@@@STEP_FAILURE@@@'
def PrintWarning():
"""Marks the current step with a warning."""
print '@@@STEP_WARNINGS@@@'
def PrintNamedStep(step):
print '@@@BUILD_STEP %s@@@' % step

View File

@ -0,0 +1,404 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A class to keep track of devices across builds and report state."""
import json
import logging
import optparse
import os
import psutil
import re
import signal
import smtplib
import subprocess
import sys
import time
import urllib
import bb_annotations
import bb_utils
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, 'util', 'lib',
'common'))
import perf_tests_results_helper # pylint: disable=F0401
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
from pylib.cmd_helper import GetCmdOutput
from pylib.device import adb_wrapper
from pylib.device import battery_utils
from pylib.device import device_blacklist
from pylib.device import device_errors
from pylib.device import device_list
from pylib.device import device_utils
from pylib.utils import run_tests_helper
_RE_DEVICE_ID = re.compile('Device ID = (\d+)')
def DeviceInfo(device, options):
"""Gathers info on a device via various adb calls.
Args:
device: A DeviceUtils instance for the device to construct info about.
Returns:
Tuple of device type, build id, report as a string, error messages, and
boolean indicating whether or not device can be used for testing.
"""
battery = battery_utils.BatteryUtils(device)
build_product = ''
build_id = ''
battery_level = 100
errors = []
dev_good = True
json_data = {}
try:
build_product = device.build_product
build_id = device.build_id
json_data = {
'serial': device.adb.GetDeviceSerial(),
'type': build_product,
'build': build_id,
'build_detail': device.GetProp('ro.build.fingerprint'),
'battery': {},
'imei_slice': 'Unknown',
'wifi_ip': device.GetProp('dhcp.wlan0.ipaddress'),
}
battery_info = {}
try:
battery_info = battery.GetBatteryInfo(timeout=5)
battery_level = int(battery_info.get('level', battery_level))
json_data['battery'] = battery_info
except device_errors.CommandFailedError:
logging.exception('Failed to get battery information for %s', str(device))
try:
for l in device.RunShellCommand(['dumpsys', 'iphonesubinfo'],
check_return=True, timeout=5):
m = _RE_DEVICE_ID.match(l)
if m:
json_data['imei_slice'] = m.group(1)[-6:]
except device_errors.CommandFailedError:
logging.exception('Failed to get IMEI slice for %s', str(device))
if battery_level < 15:
errors += ['Device critically low in battery.']
dev_good = False
if not battery.GetCharging():
battery.SetCharging(True)
if not options.no_provisioning_check:
setup_wizard_disabled = (
device.GetProp('ro.setupwizard.mode') == 'DISABLED')
if not setup_wizard_disabled and device.build_type != 'user':
errors += ['Setup wizard not disabled. Was it provisioned correctly?']
if (device.product_name == 'mantaray' and
battery_info.get('AC powered', None) != 'true'):
errors += ['Mantaray device not connected to AC power.']
except device_errors.CommandFailedError:
logging.exception('Failure while getting device status.')
dev_good = False
except device_errors.CommandTimeoutError:
logging.exception('Timeout while getting device status.')
dev_good = False
return (build_product, build_id, battery_level, errors, dev_good, json_data)
def CheckForMissingDevices(options, devices):
"""Uses file of previous online devices to detect broken phones.
Args:
options: out_dir parameter of options argument is used as the base
directory to load and update the cache file.
devices: A list of DeviceUtils instance for the currently visible and
online attached devices.
"""
out_dir = os.path.abspath(options.out_dir)
device_serials = set(d.adb.GetDeviceSerial() for d in devices)
# last_devices denotes all known devices prior to this run
last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME)
last_missing_devices_path = os.path.join(out_dir,
device_list.LAST_MISSING_DEVICES_FILENAME)
try:
last_devices = device_list.GetPersistentDeviceList(last_devices_path)
except IOError:
# Ignore error, file might not exist
last_devices = []
try:
last_missing_devices = device_list.GetPersistentDeviceList(
last_missing_devices_path)
except IOError:
last_missing_devices = []
missing_devs = list(set(last_devices) - device_serials)
new_missing_devs = list(set(missing_devs) - set(last_missing_devices))
if new_missing_devs and os.environ.get('BUILDBOT_SLAVENAME'):
logging.info('new_missing_devs %s' % new_missing_devs)
devices_missing_msg = '%d devices not detected.' % len(missing_devs)
bb_annotations.PrintSummaryText(devices_missing_msg)
from_address = 'chrome-bot@chromium.org'
to_addresses = ['chrome-labs-tech-ticket@google.com',
'chrome-android-device-alert@google.com']
cc_addresses = ['chrome-android-device-alert@google.com']
subject = 'Devices offline on %s, %s, %s' % (
os.environ.get('BUILDBOT_SLAVENAME'),
os.environ.get('BUILDBOT_BUILDERNAME'),
os.environ.get('BUILDBOT_BUILDNUMBER'))
msg = ('Please reboot the following devices:\n%s' %
'\n'.join(map(str, new_missing_devs)))
SendEmail(from_address, to_addresses, cc_addresses, subject, msg)
all_known_devices = list(device_serials | set(last_devices))
device_list.WritePersistentDeviceList(last_devices_path, all_known_devices)
device_list.WritePersistentDeviceList(last_missing_devices_path, missing_devs)
if not all_known_devices:
# This can happen if for some reason the .last_devices file is not
# present or if it was empty.
return ['No online devices. Have any devices been plugged in?']
if missing_devs:
devices_missing_msg = '%d devices not detected.' % len(missing_devs)
bb_annotations.PrintSummaryText(devices_missing_msg)
return ['Current online devices: %s' % ', '.join(d for d in device_serials),
'%s are no longer visible. Were they removed?' % missing_devs]
else:
new_devs = device_serials - set(last_devices)
if new_devs and os.path.exists(last_devices_path):
bb_annotations.PrintWarning()
bb_annotations.PrintSummaryText(
'%d new devices detected' % len(new_devs))
logging.info('New devices detected:')
for d in new_devs:
logging.info(' %s', d)
def SendEmail(from_address, to_addresses, cc_addresses, subject, msg):
msg_body = '\r\n'.join(['From: %s' % from_address,
'To: %s' % ', '.join(to_addresses),
'CC: %s' % ', '.join(cc_addresses),
'Subject: %s' % subject, '', msg])
try:
server = smtplib.SMTP('localhost')
server.sendmail(from_address, to_addresses, msg_body)
server.quit()
except Exception:
logging.exception('Failed to send alert email.')
def RestartUsb():
if not os.path.isfile('/usr/bin/restart_usb'):
logging.error('Could not restart usb. ''/usr/bin/restart_usb not '
'installed on host (see BUG=305769).')
return False
lsusb_proc = bb_utils.SpawnCmd(['lsusb'], stdout=subprocess.PIPE)
lsusb_output, _ = lsusb_proc.communicate()
if lsusb_proc.returncode:
logging.error('Could not get list of USB ports (i.e. lsusb).')
return lsusb_proc.returncode
usb_devices = [re.findall(r'Bus (\d\d\d) Device (\d\d\d)', lsusb_line)[0]
for lsusb_line in lsusb_output.strip().split('\n')]
all_restarted = True
# Walk USB devices from leaves up (i.e reverse sorted) restarting the
# connection. If a parent node (e.g. usb hub) is restarted before the
# devices connected to it, the (bus, dev) for the hub can change, making the
# output we have wrong. This way we restart the devices before the hub.
for (bus, dev) in reversed(sorted(usb_devices)):
# Can not restart root usb connections
if dev != '001':
return_code = bb_utils.RunCmd(['/usr/bin/restart_usb', bus, dev])
if return_code:
logging.error('Error restarting USB device /dev/bus/usb/%s/%s',
bus, dev)
all_restarted = False
else:
logging.info('Restarted USB device /dev/bus/usb/%s/%s', bus, dev)
return all_restarted
def KillAllAdb():
def GetAllAdb():
for p in psutil.process_iter():
try:
if 'adb' in p.name:
yield p
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]:
for p in GetAllAdb():
try:
logging.info('kill %d %d (%s [%s])', sig, p.pid, p.name,
' '.join(p.cmdline))
p.send_signal(sig)
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
for p in GetAllAdb():
try:
logging.error('Unable to kill %d (%s [%s])', p.pid, p.name,
' '.join(p.cmdline))
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
def main():
parser = optparse.OptionParser()
parser.add_option('', '--out-dir',
help='Directory where the device path is stored',
default=os.path.join(constants.DIR_SOURCE_ROOT, 'out'))
parser.add_option('--no-provisioning-check', action='store_true',
help='Will not check if devices are provisioned properly.')
parser.add_option('--device-status-dashboard', action='store_true',
help='Output device status data for dashboard.')
parser.add_option('--restart-usb', action='store_true',
help='Restart USB ports before running device check.')
parser.add_option('--json-output',
help='Output JSON information into a specified file.')
parser.add_option('-v', '--verbose', action='count', default=1,
help='Log more information.')
options, args = parser.parse_args()
if args:
parser.error('Unknown options %s' % args)
run_tests_helper.SetLogLevel(options.verbose)
# Remove the last build's "bad devices" before checking device statuses.
device_blacklist.ResetBlacklist()
try:
expected_devices = device_list.GetPersistentDeviceList(
os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME))
except IOError:
expected_devices = []
devices = device_utils.DeviceUtils.HealthyDevices()
device_serials = [d.adb.GetDeviceSerial() for d in devices]
# Only restart usb if devices are missing.
if set(expected_devices) != set(device_serials):
logging.warning('expected_devices: %s', expected_devices)
logging.warning('devices: %s', device_serials)
KillAllAdb()
retries = 5
usb_restarted = True
if options.restart_usb:
if not RestartUsb():
usb_restarted = False
bb_annotations.PrintWarning()
logging.error('USB reset stage failed, '
'wait for any device to come back.')
while retries:
logging.info('retry adb devices...')
time.sleep(1)
devices = device_utils.DeviceUtils.HealthyDevices()
device_serials = [d.adb.GetDeviceSerial() for d in devices]
if set(expected_devices) == set(device_serials):
# All devices are online, keep going.
break
if not usb_restarted and devices:
# The USB wasn't restarted, but there's at least one device online.
# No point in trying to wait for all devices.
break
retries -= 1
types, builds, batteries, errors, devices_ok, json_data = (
[], [], [], [], [], [])
if devices:
types, builds, batteries, errors, devices_ok, json_data = (
zip(*[DeviceInfo(dev, options) for dev in devices]))
# Write device info to file for buildbot info display.
if os.path.exists('/home/chrome-bot'):
with open('/home/chrome-bot/.adb_device_info', 'w') as f:
for device in json_data:
try:
f.write('%s %s %s %.1fC %s%%\n' % (device['serial'], device['type'],
device['build'], float(device['battery']['temperature']) / 10,
device['battery']['level']))
except Exception:
pass
err_msg = CheckForMissingDevices(options, devices) or []
unique_types = list(set(types))
unique_builds = list(set(builds))
bb_annotations.PrintMsg('Online devices: %d. Device types %s, builds %s'
% (len(devices), unique_types, unique_builds))
for j in json_data:
logging.info('Device %s (%s)', j.get('serial'), j.get('type'))
logging.info(' Build: %s (%s)', j.get('build'), j.get('build_detail'))
logging.info(' Current Battery Service state:')
for k, v in j.get('battery', {}).iteritems():
logging.info(' %s: %s', k, v)
logging.info(' IMEI slice: %s', j.get('imei_slice'))
logging.info(' WiFi IP: %s', j.get('wifi_ip'))
for dev, dev_errors in zip(devices, errors):
if dev_errors:
err_msg += ['%s errors:' % str(dev)]
err_msg += [' %s' % error for error in dev_errors]
if err_msg:
bb_annotations.PrintWarning()
for e in err_msg:
logging.error(e)
from_address = 'buildbot@chromium.org'
to_addresses = ['chromium-android-device-alerts@google.com']
bot_name = os.environ.get('BUILDBOT_BUILDERNAME')
slave_name = os.environ.get('BUILDBOT_SLAVENAME')
subject = 'Device status check errors on %s, %s.' % (slave_name, bot_name)
SendEmail(from_address, to_addresses, [], subject, '\n'.join(err_msg))
if options.device_status_dashboard:
offline_devices = [
device_utils.DeviceUtils(a)
for a in adb_wrapper.AdbWrapper.Devices(is_ready=False)
if a.GetState() == 'offline']
perf_tests_results_helper.PrintPerfResult('BotDevices', 'OnlineDevices',
[len(devices)], 'devices')
perf_tests_results_helper.PrintPerfResult('BotDevices', 'OfflineDevices',
[len(offline_devices)], 'devices',
'unimportant')
for dev, battery in zip(devices, batteries):
perf_tests_results_helper.PrintPerfResult('DeviceBattery', str(dev),
[battery], '%',
'unimportant')
if options.json_output:
with open(options.json_output, 'wb') as f:
f.write(json.dumps(json_data, indent=4))
num_failed_devs = 0
for device_ok, device in zip(devices_ok, devices):
if not device_ok:
logging.warning('Blacklisting %s', str(device))
device_blacklist.ExtendBlacklist([str(device)])
num_failed_devs += 1
if num_failed_devs == len(devices):
return 2
if not devices:
return 1
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,796 @@
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import glob
import hashlib
import json
import os
import random
import re
import shutil
import sys
import bb_utils
import bb_annotations
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import provision_devices
from pylib import constants
from pylib.device import device_utils
from pylib.gtest import gtest_config
CHROME_SRC_DIR = bb_utils.CHROME_SRC
DIR_BUILD_ROOT = os.path.dirname(CHROME_SRC_DIR)
CHROME_OUT_DIR = bb_utils.CHROME_OUT_DIR
BLINK_SCRIPTS_DIR = 'third_party/WebKit/Tools/Scripts'
SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
LOGCAT_DIR = os.path.join(bb_utils.CHROME_OUT_DIR, 'logcat')
GS_URL = 'https://storage.googleapis.com'
GS_AUTH_URL = 'https://storage.cloud.google.com'
# Describes an instrumation test suite:
# test: Name of test we're running.
# apk: apk to be installed.
# apk_package: package for the apk to be installed.
# test_apk: apk to run tests on.
# test_data: data folder in format destination:source.
# host_driven_root: The host-driven test root directory.
# annotation: Annotation of the tests to include.
# exclude_annotation: The annotation of the tests to exclude.
I_TEST = collections.namedtuple('InstrumentationTest', [
'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'isolate_file_path',
'host_driven_root', 'annotation', 'exclude_annotation', 'extra_flags'])
def SrcPath(*path):
return os.path.join(CHROME_SRC_DIR, *path)
def I(name, apk, apk_package, test_apk, test_data, isolate_file_path=None,
host_driven_root=None, annotation=None, exclude_annotation=None,
extra_flags=None):
return I_TEST(name, apk, apk_package, test_apk, test_data, isolate_file_path,
host_driven_root, annotation, exclude_annotation, extra_flags)
INSTRUMENTATION_TESTS = dict((suite.name, suite) for suite in [
I('ContentShell',
'ContentShell.apk',
'org.chromium.content_shell_apk',
'ContentShellTest',
'content:content/test/data/android/device_files',
isolate_file_path='content/content_shell_test_apk.isolate'),
I('ChromeShell',
'ChromeShell.apk',
'org.chromium.chrome.shell',
'ChromeShellTest',
'chrome:chrome/test/data/android/device_files',
isolate_file_path='chrome/chrome_shell_test_apk.isolate',
host_driven_root=constants.CHROME_SHELL_HOST_DRIVEN_DIR),
I('AndroidWebView',
'AndroidWebView.apk',
'org.chromium.android_webview.shell',
'AndroidWebViewTest',
'webview:android_webview/test/data/device_files',
isolate_file_path='android_webview/android_webview_test_apk.isolate'),
I('ChromeSyncShell',
'ChromeSyncShell.apk',
'org.chromium.chrome.browser.sync',
'ChromeSyncShellTest',
None),
])
InstallablePackage = collections.namedtuple('InstallablePackage', [
'name', 'apk', 'apk_package'])
INSTALLABLE_PACKAGES = dict((package.name, package) for package in (
[InstallablePackage(i.name, i.apk, i.apk_package)
for i in INSTRUMENTATION_TESTS.itervalues()] +
[InstallablePackage('ChromeDriverWebViewShell',
'ChromeDriverWebViewShell.apk',
'org.chromium.chromedriver_webview_shell')]))
VALID_TESTS = set([
'base_junit_tests',
'chromedriver',
'chrome_proxy',
'components_browsertests',
'gfx_unittests',
'gl_unittests',
'gpu',
'python_unittests',
'telemetry_unittests',
'telemetry_perf_unittests',
'ui',
'unit',
'webkit',
'webkit_layout'
])
RunCmd = bb_utils.RunCmd
def _GetRevision(options):
"""Get the SVN revision number.
Args:
options: options object.
Returns:
The revision number.
"""
revision = options.build_properties.get('got_revision')
if not revision:
revision = options.build_properties.get('revision', 'testing')
return revision
def _RunTest(options, cmd, suite):
"""Run test command with runtest.py.
Args:
options: options object.
cmd: the command to run.
suite: test name.
"""
property_args = bb_utils.EncodeProperties(options)
args = [os.path.join(SLAVE_SCRIPTS_DIR, 'runtest.py')] + property_args
args += ['--test-platform', 'android']
if options.factory_properties.get('generate_gtest_json'):
args.append('--generate-json-file')
args += ['-o', 'gtest-results/%s' % suite,
'--annotate', 'gtest',
'--build-number', str(options.build_properties.get('buildnumber',
'')),
'--builder-name', options.build_properties.get('buildername', '')]
if options.target == 'Release':
args += ['--target', 'Release']
else:
args += ['--target', 'Debug']
if options.flakiness_server:
args += ['--flakiness-dashboard-server=%s' %
options.flakiness_server]
args += cmd
RunCmd(args, cwd=DIR_BUILD_ROOT)
def RunTestSuites(options, suites, suites_options=None):
"""Manages an invocation of test_runner.py for gtests.
Args:
options: options object.
suites: List of suite names to run.
suites_options: Command line options dictionary for particular suites.
For example,
{'content_browsertests', ['--num_retries=1', '--release']}
will add the options only to content_browsertests.
"""
if not suites_options:
suites_options = {}
args = ['--verbose']
if options.target == 'Release':
args.append('--release')
if options.asan:
args.append('--tool=asan')
if options.gtest_filter:
args.append('--gtest-filter=%s' % options.gtest_filter)
for suite in suites:
bb_annotations.PrintNamedStep(suite)
cmd = [suite] + args
cmd += suites_options.get(suite, [])
if suite == 'content_browsertests' or suite == 'components_browsertests':
cmd.append('--num_retries=1')
_RunTest(options, cmd, suite)
def RunJunitSuite(suite):
bb_annotations.PrintNamedStep(suite)
RunCmd(['build/android/test_runner.py', 'junit', '-s', suite])
def RunChromeDriverTests(options):
"""Run all the steps for running chromedriver tests."""
bb_annotations.PrintNamedStep('chromedriver_annotation')
RunCmd(['chrome/test/chromedriver/run_buildbot_steps.py',
'--android-packages=%s,%s,%s,%s' %
('chrome_shell',
'chrome_stable',
'chrome_beta',
'chromedriver_webview_shell'),
'--revision=%s' % _GetRevision(options),
'--update-log'])
def RunChromeProxyTests(options):
"""Run the chrome_proxy tests.
Args:
options: options object.
"""
InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
args = ['--browser', 'android-chrome-shell']
devices = device_utils.DeviceUtils.HealthyDevices()
if devices:
args = args + ['--device', devices[0].adb.GetDeviceSerial()]
bb_annotations.PrintNamedStep('chrome_proxy')
RunCmd(['tools/chrome_proxy/run_tests'] + args)
def RunTelemetryTests(options, step_name, run_tests_path):
"""Runs either telemetry_perf_unittests or telemetry_unittests.
Args:
options: options object.
step_name: either 'telemetry_unittests' or 'telemetry_perf_unittests'
run_tests_path: path to run_tests script (tools/perf/run_tests for
perf_unittests and tools/telemetry/run_tests for
telemetry_unittests)
"""
InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
args = ['--browser', 'android-chrome-shell']
devices = device_utils.DeviceUtils.HealthyDevices()
if devices:
args = args + ['--device', 'android']
bb_annotations.PrintNamedStep(step_name)
RunCmd([run_tests_path] + args)
def InstallApk(options, test, print_step=False):
"""Install an apk to all phones.
Args:
options: options object
test: An I_TEST namedtuple
print_step: Print a buildbot step
"""
if print_step:
bb_annotations.PrintNamedStep('install_%s' % test.name.lower())
args = ['--apk_package', test.apk_package]
if options.target == 'Release':
args.append('--release')
args.append(test.apk)
RunCmd(['build/android/adb_install_apk.py'] + args, halt_on_failure=True)
def RunInstrumentationSuite(options, test, flunk_on_failure=True,
python_only=False, official_build=False):
"""Manages an invocation of test_runner.py for instrumentation tests.
Args:
options: options object
test: An I_TEST namedtuple
flunk_on_failure: Flunk the step if tests fail.
Python: Run only host driven Python tests.
official_build: Run official-build tests.
"""
bb_annotations.PrintNamedStep('%s_instrumentation_tests' % test.name.lower())
if test.apk:
InstallApk(options, test)
args = ['--test-apk', test.test_apk, '--verbose']
if test.test_data:
args.extend(['--test_data', test.test_data])
if options.target == 'Release':
args.append('--release')
if options.asan:
args.append('--tool=asan')
if options.flakiness_server:
args.append('--flakiness-dashboard-server=%s' %
options.flakiness_server)
if options.coverage_bucket:
args.append('--coverage-dir=%s' % options.coverage_dir)
if test.isolate_file_path:
args.append('--isolate-file-path=%s' % test.isolate_file_path)
if test.host_driven_root:
args.append('--host-driven-root=%s' % test.host_driven_root)
if test.annotation:
args.extend(['-A', test.annotation])
if test.exclude_annotation:
args.extend(['-E', test.exclude_annotation])
if test.extra_flags:
args.extend(test.extra_flags)
if python_only:
args.append('-p')
if official_build:
# The option needs to be assigned 'True' as it does not have an action
# associated with it.
args.append('--official-build')
RunCmd(['build/android/test_runner.py', 'instrumentation'] + args,
flunk_on_failure=flunk_on_failure)
def RunWebkitLint():
"""Lint WebKit's TestExpectation files."""
bb_annotations.PrintNamedStep('webkit_lint')
RunCmd([SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'lint-test-expectations'))])
def RunWebkitLayoutTests(options):
"""Run layout tests on an actual device."""
bb_annotations.PrintNamedStep('webkit_tests')
cmd_args = [
'--no-show-results',
'--no-new-test-results',
'--full-results-html',
'--clobber-old-results',
'--exit-after-n-failures', '5000',
'--exit-after-n-crashes-or-timeouts', '100',
'--debug-rwt-logging',
'--results-directory', '../layout-test-results',
'--target', options.target,
'--builder-name', options.build_properties.get('buildername', ''),
'--build-number', str(options.build_properties.get('buildnumber', '')),
'--master-name', 'ChromiumWebkit', # TODO: Get this from the cfg.
'--build-name', options.build_properties.get('buildername', ''),
'--platform=android']
for flag in 'test_results_server', 'driver_name', 'additional_driver_flag':
if flag in options.factory_properties:
cmd_args.extend(['--%s' % flag.replace('_', '-'),
options.factory_properties.get(flag)])
for f in options.factory_properties.get('additional_expectations', []):
cmd_args.extend(
['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
# TODO(dpranke): Remove this block after
# https://codereview.chromium.org/12927002/ lands.
for f in options.factory_properties.get('additional_expectations_files', []):
cmd_args.extend(
['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
exit_code = RunCmd(
[SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'run-webkit-tests'))] + cmd_args)
if exit_code == 255: # test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
bb_annotations.PrintMsg('?? (crashed or hung)')
elif exit_code == 254: # test_run_results.NO_DEVICES_EXIT_STATUS
bb_annotations.PrintMsg('?? (no devices found)')
elif exit_code == 253: # test_run_results.NO_TESTS_EXIT_STATUS
bb_annotations.PrintMsg('?? (no tests found)')
else:
full_results_path = os.path.join('..', 'layout-test-results',
'full_results.json')
if os.path.exists(full_results_path):
full_results = json.load(open(full_results_path))
unexpected_passes, unexpected_failures, unexpected_flakes = (
_ParseLayoutTestResults(full_results))
if unexpected_failures:
_PrintDashboardLink('failed', unexpected_failures.keys(),
max_tests=25)
elif unexpected_passes:
_PrintDashboardLink('unexpected passes', unexpected_passes.keys(),
max_tests=10)
if unexpected_flakes:
_PrintDashboardLink('unexpected flakes', unexpected_flakes.keys(),
max_tests=10)
if exit_code == 0 and (unexpected_passes or unexpected_flakes):
# If exit_code != 0, RunCmd() will have already printed an error.
bb_annotations.PrintWarning()
else:
bb_annotations.PrintError()
bb_annotations.PrintMsg('?? (results missing)')
if options.factory_properties.get('archive_webkit_results', False):
bb_annotations.PrintNamedStep('archive_webkit_results')
base = 'https://storage.googleapis.com/chromium-layout-test-archives'
builder_name = options.build_properties.get('buildername', '')
build_number = str(options.build_properties.get('buildnumber', ''))
results_link = '%s/%s/%s/layout-test-results/results.html' % (
base, EscapeBuilderName(builder_name), build_number)
bb_annotations.PrintLink('results', results_link)
bb_annotations.PrintLink('(zip)', '%s/%s/%s/layout-test-results.zip' % (
base, EscapeBuilderName(builder_name), build_number))
gs_bucket = 'gs://chromium-layout-test-archives'
RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'chromium',
'archive_layout_test_results.py'),
'--results-dir', '../../layout-test-results',
'--build-number', build_number,
'--builder-name', builder_name,
'--gs-bucket', gs_bucket],
cwd=DIR_BUILD_ROOT)
def _ParseLayoutTestResults(results):
"""Extract the failures from the test run."""
# Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
tests = _ConvertTrieToFlatPaths(results['tests'])
failures = {}
flakes = {}
passes = {}
for (test, result) in tests.iteritems():
if result.get('is_unexpected'):
actual_results = result['actual'].split()
expected_results = result['expected'].split()
if len(actual_results) > 1:
# We report the first failure type back, even if the second
# was more severe.
if actual_results[1] in expected_results:
flakes[test] = actual_results[0]
else:
failures[test] = actual_results[0]
elif actual_results[0] == 'PASS':
passes[test] = result
else:
failures[test] = actual_results[0]
return (passes, failures, flakes)
def _ConvertTrieToFlatPaths(trie, prefix=None):
"""Flatten the trie of failures into a list."""
# Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
result = {}
for name, data in trie.iteritems():
if prefix:
name = prefix + '/' + name
if len(data) and 'actual' not in data and 'expected' not in data:
result.update(_ConvertTrieToFlatPaths(data, name))
else:
result[name] = data
return result
def _PrintDashboardLink(link_text, tests, max_tests):
"""Add a link to the flakiness dashboard in the step annotations."""
if len(tests) > max_tests:
test_list_text = ' '.join(tests[:max_tests]) + ' and more'
else:
test_list_text = ' '.join(tests)
dashboard_base = ('http://test-results.appspot.com'
'/dashboards/flakiness_dashboard.html#'
'master=ChromiumWebkit&tests=')
bb_annotations.PrintLink('%d %s: %s' %
(len(tests), link_text, test_list_text),
dashboard_base + ','.join(tests))
def EscapeBuilderName(builder_name):
return re.sub('[ ()]', '_', builder_name)
def SpawnLogcatMonitor():
shutil.rmtree(LOGCAT_DIR, ignore_errors=True)
bb_utils.SpawnCmd([
os.path.join(CHROME_SRC_DIR, 'build', 'android', 'adb_logcat_monitor.py'),
LOGCAT_DIR])
# Wait for logcat_monitor to pull existing logcat
RunCmd(['sleep', '5'])
def ProvisionDevices(options):
bb_annotations.PrintNamedStep('provision_devices')
if not bb_utils.TESTING:
# Restart adb to work around bugs, sleep to wait for usb discovery.
device_utils.RestartServer()
RunCmd(['sleep', '1'])
provision_cmd = ['build/android/provision_devices.py', '-t', options.target]
if options.auto_reconnect:
provision_cmd.append('--auto-reconnect')
if options.skip_wipe:
provision_cmd.append('--skip-wipe')
if options.disable_location:
provision_cmd.append('--disable-location')
RunCmd(provision_cmd, halt_on_failure=True)
def DeviceStatusCheck(options):
bb_annotations.PrintNamedStep('device_status_check')
cmd = ['build/android/buildbot/bb_device_status_check.py']
if options.restart_usb:
cmd.append('--restart-usb')
RunCmd(cmd, halt_on_failure=True)
def GetDeviceSetupStepCmds():
return [
('device_status_check', DeviceStatusCheck),
('provision_devices', ProvisionDevices),
]
def RunUnitTests(options):
suites = gtest_config.STABLE_TEST_SUITES
if options.asan:
suites = [s for s in suites
if s not in gtest_config.ASAN_EXCLUDED_TEST_SUITES]
RunTestSuites(options, suites)
def RunTelemetryUnitTests(options):
RunTelemetryTests(options, 'telemetry_unittests', 'tools/telemetry/run_tests')
def RunTelemetryPerfUnitTests(options):
RunTelemetryTests(options, 'telemetry_perf_unittests', 'tools/perf/run_tests')
def RunInstrumentationTests(options):
for test in INSTRUMENTATION_TESTS.itervalues():
RunInstrumentationSuite(options, test)
def RunWebkitTests(options):
RunTestSuites(options, ['webkit_unit_tests', 'blink_heap_unittests'])
RunWebkitLint()
def RunGPUTests(options):
revision = _GetRevision(options)
builder_name = options.build_properties.get('buildername', 'noname')
bb_annotations.PrintNamedStep('pixel_tests')
RunCmd(['content/test/gpu/run_gpu_test.py',
'pixel', '-v',
'--browser',
'android-content-shell',
'--build-revision',
str(revision),
'--upload-refimg-to-cloud-storage',
'--refimg-cloud-storage-bucket',
'chromium-gpu-archive/reference-images',
'--os-type',
'android',
'--test-machine-name',
EscapeBuilderName(builder_name)])
bb_annotations.PrintNamedStep('webgl_conformance_tests')
RunCmd(['content/test/gpu/run_gpu_test.py', '-v',
'--browser=android-content-shell', 'webgl_conformance',
'--webgl-conformance-version=1.0.1'])
bb_annotations.PrintNamedStep('android_webview_webgl_conformance_tests')
RunCmd(['content/test/gpu/run_gpu_test.py', '-v',
'--browser=android-webview-shell', 'webgl_conformance',
'--webgl-conformance-version=1.0.1'])
bb_annotations.PrintNamedStep('gpu_rasterization_tests')
RunCmd(['content/test/gpu/run_gpu_test.py',
'gpu_rasterization', '-v',
'--browser',
'android-content-shell',
'--build-revision',
str(revision),
'--test-machine-name',
EscapeBuilderName(builder_name)])
def RunPythonUnitTests(_options):
for suite in constants.PYTHON_UNIT_TEST_SUITES:
bb_annotations.PrintNamedStep(suite)
RunCmd(['build/android/test_runner.py', 'python', '-s', suite])
def GetTestStepCmds():
return [
('base_junit_tests',
lambda _options: RunJunitSuite('base_junit_tests')),
('chromedriver', RunChromeDriverTests),
('chrome_proxy', RunChromeProxyTests),
('components_browsertests',
lambda options: RunTestSuites(options, ['components_browsertests'])),
('gfx_unittests',
lambda options: RunTestSuites(options, ['gfx_unittests'])),
('gl_unittests',
lambda options: RunTestSuites(options, ['gl_unittests'])),
('gpu', RunGPUTests),
('python_unittests', RunPythonUnitTests),
('telemetry_unittests', RunTelemetryUnitTests),
('telemetry_perf_unittests', RunTelemetryPerfUnitTests),
('ui', RunInstrumentationTests),
('unit', RunUnitTests),
('webkit', RunWebkitTests),
('webkit_layout', RunWebkitLayoutTests),
]
def MakeGSPath(options, gs_base_dir):
revision = _GetRevision(options)
bot_id = options.build_properties.get('buildername', 'testing')
randhash = hashlib.sha1(str(random.random())).hexdigest()
gs_path = '%s/%s/%s/%s' % (gs_base_dir, bot_id, revision, randhash)
# remove double slashes, happens with blank revisions and confuses gsutil
gs_path = re.sub('/+', '/', gs_path)
return gs_path
def UploadHTML(options, gs_base_dir, dir_to_upload, link_text,
link_rel_path='index.html', gs_url=GS_URL):
"""Uploads directory at |dir_to_upload| to Google Storage and output a link.
Args:
options: Command line options.
gs_base_dir: The Google Storage base directory (e.g.
'chromium-code-coverage/java')
dir_to_upload: Absolute path to the directory to be uploaded.
link_text: Link text to be displayed on the step.
link_rel_path: Link path relative to |dir_to_upload|.
gs_url: Google storage URL.
"""
gs_path = MakeGSPath(options, gs_base_dir)
RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-R', dir_to_upload, 'gs://%s' % gs_path])
bb_annotations.PrintLink(link_text,
'%s/%s/%s' % (gs_url, gs_path, link_rel_path))
def GenerateJavaCoverageReport(options):
"""Generates an HTML coverage report using EMMA and uploads it."""
bb_annotations.PrintNamedStep('java_coverage_report')
coverage_html = os.path.join(options.coverage_dir, 'coverage_html')
RunCmd(['build/android/generate_emma_html.py',
'--coverage-dir', options.coverage_dir,
'--metadata-dir', os.path.join(CHROME_OUT_DIR, options.target),
'--cleanup',
'--output', os.path.join(coverage_html, 'index.html')])
return coverage_html
def LogcatDump(options):
# Print logcat, kill logcat monitor
bb_annotations.PrintNamedStep('logcat_dump')
logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
RunCmd([SrcPath('build', 'android', 'adb_logcat_printer.py'),
'--output-path', logcat_file, LOGCAT_DIR])
gs_path = MakeGSPath(options, 'chromium-android/logcat_dumps')
RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-z', 'txt', logcat_file,
'gs://%s' % gs_path])
bb_annotations.PrintLink('logcat dump', '%s/%s' % (GS_AUTH_URL, gs_path))
def RunStackToolSteps(options):
"""Run stack tool steps.
Stack tool is run for logcat dump, optionally for ASAN.
"""
bb_annotations.PrintNamedStep('Run stack tool with logcat dump')
logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
RunCmd([os.path.join(CHROME_SRC_DIR, 'third_party', 'android_platform',
'development', 'scripts', 'stack'),
'--more-info', logcat_file])
if options.asan_symbolize:
bb_annotations.PrintNamedStep('Run stack tool for ASAN')
RunCmd([
os.path.join(CHROME_SRC_DIR, 'build', 'android', 'asan_symbolize.py'),
'-l', logcat_file])
def GenerateTestReport(options):
bb_annotations.PrintNamedStep('test_report')
for report in glob.glob(
os.path.join(CHROME_OUT_DIR, options.target, 'test_logs', '*.log')):
RunCmd(['cat', report])
os.remove(report)
def MainTestWrapper(options):
try:
# Spawn logcat monitor
SpawnLogcatMonitor()
# Run all device setup steps
for _, cmd in GetDeviceSetupStepCmds():
cmd(options)
if options.install:
for i in options.install:
install_obj = INSTALLABLE_PACKAGES[i]
InstallApk(options, install_obj, print_step=True)
if options.test_filter:
bb_utils.RunSteps(options.test_filter, GetTestStepCmds(), options)
if options.coverage_bucket:
coverage_html = GenerateJavaCoverageReport(options)
UploadHTML(options, '%s/java' % options.coverage_bucket, coverage_html,
'Coverage Report')
shutil.rmtree(coverage_html, ignore_errors=True)
if options.experimental:
RunTestSuites(options, gtest_config.EXPERIMENTAL_TEST_SUITES)
finally:
# Run all post test steps
LogcatDump(options)
if not options.disable_stack_tool:
RunStackToolSteps(options)
GenerateTestReport(options)
# KillHostHeartbeat() has logic to check if heartbeat process is running,
# and kills only if it finds the process is running on the host.
provision_devices.KillHostHeartbeat()
if options.cleanup:
shutil.rmtree(os.path.join(CHROME_OUT_DIR, options.target),
ignore_errors=True)
def GetDeviceStepsOptParser():
parser = bb_utils.GetParser()
parser.add_option('--experimental', action='store_true',
help='Run experiemental tests')
parser.add_option('-f', '--test-filter', metavar='<filter>', default=[],
action='append',
help=('Run a test suite. Test suites: "%s"' %
'", "'.join(VALID_TESTS)))
parser.add_option('--gtest-filter',
help='Filter for running a subset of tests of a gtest test')
parser.add_option('--asan', action='store_true', help='Run tests with asan.')
parser.add_option('--install', metavar='<apk name>', action="append",
help='Install an apk by name')
parser.add_option('--no-reboot', action='store_true',
help='Do not reboot devices during provisioning.')
parser.add_option('--coverage-bucket',
help=('Bucket name to store coverage results. Coverage is '
'only run if this is set.'))
parser.add_option('--restart-usb', action='store_true',
help='Restart usb ports before device status check.')
parser.add_option(
'--flakiness-server',
help=('The flakiness dashboard server to which the results should be '
'uploaded.'))
parser.add_option(
'--auto-reconnect', action='store_true',
help='Push script to device which restarts adbd on disconnections.')
parser.add_option('--skip-wipe', action='store_true',
help='Do not wipe devices during provisioning.')
parser.add_option('--disable-location', action='store_true',
help='Disable location settings.')
parser.add_option(
'--logcat-dump-output',
help='The logcat dump output will be "tee"-ed into this file')
# During processing perf bisects, a seperate working directory created under
# which builds are produced. Therefore we should look for relevent output
# file under this directory.(/b/build/slave/<slave_name>/build/bisect/src/out)
parser.add_option(
'--chrome-output-dir',
help='Chrome output directory to be used while bisecting.')
parser.add_option('--disable-stack-tool', action='store_true',
help='Do not run stack tool.')
parser.add_option('--asan-symbolize', action='store_true',
help='Run stack tool for ASAN')
parser.add_option('--cleanup', action='store_true',
help='Delete out/<target> directory at the end of the run.')
return parser
def main(argv):
parser = GetDeviceStepsOptParser()
options, args = parser.parse_args(argv[1:])
if args:
return sys.exit('Unused args %s' % args)
unknown_tests = set(options.test_filter) - VALID_TESTS
if unknown_tests:
return sys.exit('Unknown tests %s' % list(unknown_tests))
setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
if options.chrome_output_dir:
global CHROME_OUT_DIR
global LOGCAT_DIR
CHROME_OUT_DIR = options.chrome_output_dir
LOGCAT_DIR = os.path.join(CHROME_OUT_DIR, 'logcat')
if options.coverage_bucket:
setattr(options, 'coverage_dir',
os.path.join(CHROME_OUT_DIR, options.target, 'coverage'))
MainTestWrapper(options)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -0,0 +1,133 @@
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import json
import sys
import bb_utils
import bb_annotations
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
VALID_HOST_TESTS = set(['check_webview_licenses'])
DIR_BUILD_ROOT = os.path.dirname(constants.DIR_SOURCE_ROOT)
# Short hand for RunCmd which is used extensively in this file.
RunCmd = bb_utils.RunCmd
def SrcPath(*path):
return os.path.join(constants.DIR_SOURCE_ROOT, *path)
def CheckWebViewLicenses(_):
bb_annotations.PrintNamedStep('check_licenses')
RunCmd([SrcPath('android_webview', 'tools', 'webview_licenses.py'), 'scan'],
warning_code=1)
def RunHooks(build_type):
RunCmd([SrcPath('build', 'landmines.py')])
build_path = SrcPath('out', build_type)
landmine_path = os.path.join(build_path, '.landmines_triggered')
clobber_env = os.environ.get('BUILDBOT_CLOBBER')
if clobber_env or os.path.isfile(landmine_path):
bb_annotations.PrintNamedStep('Clobber')
if not clobber_env:
print 'Clobbering due to triggered landmines:'
with open(landmine_path) as f:
print f.read()
RunCmd(['rm', '-rf', build_path])
bb_annotations.PrintNamedStep('runhooks')
RunCmd(['gclient', 'runhooks'], halt_on_failure=True)
def Compile(options):
RunHooks(options.target)
cmd = [os.path.join(SLAVE_SCRIPTS_DIR, 'compile.py'),
'--build-tool=ninja',
'--compiler=goma',
'--target=%s' % options.target,
'--goma-dir=%s' % bb_utils.GOMA_DIR]
bb_annotations.PrintNamedStep('compile')
if options.build_targets:
build_targets = options.build_targets.split(',')
cmd += ['--build-args', ' '.join(build_targets)]
RunCmd(cmd, halt_on_failure=True, cwd=DIR_BUILD_ROOT)
def ZipBuild(options):
bb_annotations.PrintNamedStep('zip_build')
RunCmd([
os.path.join(SLAVE_SCRIPTS_DIR, 'zip_build.py'),
'--src-dir', constants.DIR_SOURCE_ROOT,
'--exclude-files', 'lib.target,gen,android_webview,jingle_unittests']
+ bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
def ExtractBuild(options):
bb_annotations.PrintNamedStep('extract_build')
RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'extract_build.py')]
+ bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
def BisectPerfRegression(options):
args = []
if options.extra_src:
args = ['--extra_src', options.extra_src]
RunCmd([SrcPath('tools', 'prepare-bisect-perf-regression.py'),
'-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)])
RunCmd([SrcPath('tools', 'run-bisect-perf-regression.py'),
'-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir),
'--build-properties=%s' % json.dumps(options.build_properties)] +
args)
def GetHostStepCmds():
return [
('compile', Compile),
('extract_build', ExtractBuild),
('check_webview_licenses', CheckWebViewLicenses),
('bisect_perf_regression', BisectPerfRegression),
('zip_build', ZipBuild)
]
def GetHostStepsOptParser():
parser = bb_utils.GetParser()
parser.add_option('--steps', help='Comma separated list of host tests.')
parser.add_option('--build-targets', default='',
help='Comma separated list of build targets.')
parser.add_option('--experimental', action='store_true',
help='Indicate whether to compile experimental targets.')
parser.add_option('--extra_src', default='',
help='Path to extra source file. If this is supplied, '
'bisect script will use it to override default behavior.')
return parser
def main(argv):
parser = GetHostStepsOptParser()
options, args = parser.parse_args(argv[1:])
if args:
return sys.exit('Unused args %s' % args)
setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
setattr(options, 'extra_src',
options.factory_properties.get('extra_src', ''))
if options.steps:
bb_utils.RunSteps(options.steps.split(','), GetHostStepCmds(), options)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -0,0 +1,320 @@
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import json
import os
import pipes
import re
import subprocess
import sys
import bb_utils
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage'
_BotConfig = collections.namedtuple(
'BotConfig', ['bot_id', 'host_obj', 'test_obj'])
HostConfig = collections.namedtuple(
'HostConfig',
['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch'])
TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args'])
def BotConfig(bot_id, host_object, test_object=None):
return _BotConfig(bot_id, host_object, test_object)
def DictDiff(d1, d2):
diff = []
for key in sorted(set(d1.keys() + d2.keys())):
if key in d1 and d1[key] != d2.get(key):
diff.append('- %s=%s' % (key, pipes.quote(d1[key])))
if key in d2 and d2[key] != d1.get(key):
diff.append('+ %s=%s' % (key, pipes.quote(d2[key])))
return '\n'.join(diff)
def GetEnvironment(host_obj, testing, extra_env_vars=None):
init_env = dict(os.environ)
init_env['GYP_GENERATORS'] = 'ninja'
if extra_env_vars:
init_env.update(extra_env_vars)
envsetup_cmd = '. build/android/envsetup.sh'
if testing:
# Skip envsetup to avoid presubmit dependence on android deps.
print 'Testing mode - skipping "%s"' % envsetup_cmd
envsetup_cmd = ':'
else:
print 'Running %s' % envsetup_cmd
proc = subprocess.Popen(['bash', '-exc',
envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=bb_utils.CHROME_SRC, env=init_env)
json_env, envsetup_output = proc.communicate()
if proc.returncode != 0:
print >> sys.stderr, 'FATAL Failure in envsetup.'
print >> sys.stderr, envsetup_output
sys.exit(1)
env = json.loads(json_env)
env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \
' OS=android fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR
if host_obj.target_arch:
env['GYP_DEFINES'] += ' target_arch=%s' % host_obj.target_arch
extra_gyp = host_obj.extra_gyp_defines
if extra_gyp:
env['GYP_DEFINES'] += ' %s' % extra_gyp
if re.search('(asan|clang)=1', extra_gyp):
env.pop('CXX_target', None)
# Bots checkout chrome in /b/build/slave/<name>/build/src
build_internal_android = os.path.abspath(os.path.join(
bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal',
'scripts', 'slave', 'android'))
if os.path.exists(build_internal_android):
env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']])
return env
def GetCommands(options, bot_config):
"""Get a formatted list of commands.
Args:
options: Options object.
bot_config: A BotConfig named tuple.
host_step_script: Host step script.
device_step_script: Device step script.
Returns:
list of Command objects.
"""
property_args = bb_utils.EncodeProperties(options)
commands = [[bot_config.host_obj.script,
'--steps=%s' % ','.join(bot_config.host_obj.host_steps)] +
property_args + (bot_config.host_obj.extra_args or [])]
test_obj = bot_config.test_obj
if test_obj:
run_test_cmd = [test_obj.script] + property_args
for test in test_obj.tests:
run_test_cmd.extend(['-f', test])
if test_obj.extra_args:
run_test_cmd.extend(test_obj.extra_args)
commands.append(run_test_cmd)
return commands
def GetBotStepMap():
compile_step = ['compile']
chrome_proxy_tests = ['chrome_proxy']
python_unittests = ['python_unittests']
std_host_tests = ['check_webview_licenses']
std_build_steps = ['compile', 'zip_build']
std_test_steps = ['extract_build']
std_tests = ['ui', 'unit']
telemetry_tests = ['telemetry_perf_unittests']
telemetry_tests_user_build = ['telemetry_unittests',
'telemetry_perf_unittests']
trial_tests = [
'base_junit_tests',
'components_browsertests',
'gfx_unittests',
'gl_unittests',
]
flakiness_server = (
'--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
experimental = ['--experimental']
bisect_chrome_output_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
os.pardir, 'bisect', 'src', 'out'))
B = BotConfig
H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None:
HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
extra_gyp, target_arch))
T = (lambda tests, extra_args=None:
TestConfig('build/android/buildbot/bb_device_steps.py', tests,
extra_args))
bot_configs = [
# Main builders
B('main-builder-dbg', H(std_build_steps + std_host_tests)),
B('main-builder-rel', H(std_build_steps)),
B('main-clang-builder',
H(compile_step, extra_gyp='clang=1 component=shared_library')),
B('main-clobber', H(compile_step)),
B('main-tests-rel', H(std_test_steps),
T(std_tests + telemetry_tests + chrome_proxy_tests,
['--cleanup', flakiness_server])),
B('main-tests', H(std_test_steps),
T(std_tests, ['--cleanup', flakiness_server])),
# Other waterfalls
B('asan-builder-tests', H(compile_step,
extra_gyp='asan=1 component=shared_library'),
T(std_tests, ['--asan', '--asan-symbolize'])),
B('blink-try-builder', H(compile_step)),
B('chromedriver-fyi-tests-dbg', H(std_test_steps),
T(['chromedriver'],
['--install=ChromeShell', '--install=ChromeDriverWebViewShell',
'--skip-wipe', '--disable-location', '--cleanup'])),
B('fyi-x86-builder-dbg',
H(compile_step + std_host_tests, experimental, target_arch='ia32')),
B('fyi-builder-dbg',
H(std_build_steps + std_host_tests, experimental,
extra_gyp='emma_coverage=1')),
B('x86-builder-dbg',
H(compile_step + std_host_tests, target_arch='ia32')),
B('fyi-builder-rel', H(std_build_steps, experimental)),
B('fyi-tests', H(std_test_steps),
T(std_tests + python_unittests,
['--experimental', flakiness_server,
'--coverage-bucket', CHROMIUM_COVERAGE_BUCKET,
'--cleanup'])),
B('user-build-fyi-tests-dbg', H(std_test_steps),
T(sorted(telemetry_tests_user_build + trial_tests))),
B('fyi-component-builder-tests-dbg',
H(compile_step, extra_gyp='component=shared_library'),
T(std_tests, ['--experimental', flakiness_server])),
B('gpu-builder-tests-dbg',
H(compile_step),
T(['gpu'], ['--install=ContentShell'])),
# Pass empty T([]) so that logcat monitor and device status check are run.
B('perf-bisect-builder-tests-dbg',
H(['bisect_perf_regression']),
T([], ['--chrome-output-dir', bisect_chrome_output_dir])),
B('perf-tests-rel', H(std_test_steps),
T([], ['--install=ChromeShell', '--cleanup'])),
B('webkit-latest-webkit-tests', H(std_test_steps),
T(['webkit_layout', 'webkit'], ['--cleanup', '--auto-reconnect'])),
B('webkit-latest-contentshell', H(compile_step),
T(['webkit_layout'], ['--auto-reconnect'])),
B('builder-unit-tests', H(compile_step), T(['unit'])),
# Generic builder config (for substring match).
B('builder', H(std_build_steps)),
]
bot_map = dict((config.bot_id, config) for config in bot_configs)
# These bots have identical configuration to ones defined earlier.
copy_map = [
('lkgr-clobber', 'main-clobber'),
('try-builder-dbg', 'main-builder-dbg'),
('try-builder-rel', 'main-builder-rel'),
('try-clang-builder', 'main-clang-builder'),
('try-fyi-builder-dbg', 'fyi-builder-dbg'),
('try-x86-builder-dbg', 'x86-builder-dbg'),
('try-tests-rel', 'main-tests-rel'),
('try-tests', 'main-tests'),
('try-fyi-tests', 'fyi-tests'),
('webkit-latest-tests', 'main-tests'),
]
for to_id, from_id in copy_map:
assert to_id not in bot_map
# pylint: disable=W0212
bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id)
# Trybots do not upload to flakiness dashboard. They should be otherwise
# identical in configuration to their trunk building counterparts.
test_obj = bot_map[to_id].test_obj
if to_id.startswith('try') and test_obj:
extra_args = test_obj.extra_args
if extra_args and flakiness_server in extra_args:
extra_args.remove(flakiness_server)
return bot_map
# Return an object from the map, looking first for an exact id match.
# If this fails, look for an id which is a substring of the specified id.
# Choose the longest of all substring matches.
# pylint: disable=W0622
def GetBestMatch(id_map, id):
config = id_map.get(id)
if not config:
substring_matches = [x for x in id_map.iterkeys() if x in id]
if substring_matches:
max_id = max(substring_matches, key=len)
print 'Using config from id="%s" (substring match).' % max_id
config = id_map[max_id]
return config
def GetRunBotOptParser():
parser = bb_utils.GetParser()
parser.add_option('--bot-id', help='Specify bot id directly.')
parser.add_option('--testing', action='store_true',
help='For testing: print, but do not run commands')
return parser
def GetBotConfig(options, bot_step_map):
bot_id = options.bot_id or options.factory_properties.get('android_bot_id')
if not bot_id:
print (sys.stderr,
'A bot id must be specified through option or factory_props.')
return
bot_config = GetBestMatch(bot_step_map, bot_id)
if not bot_config:
print 'Error: config for id="%s" cannot be inferred.' % bot_id
return bot_config
def RunBotCommands(options, commands, env):
print 'Environment changes:'
print DictDiff(dict(os.environ), env)
for command in commands:
print bb_utils.CommandToString(command)
sys.stdout.flush()
if options.testing:
env['BUILDBOT_TESTING'] = '1'
return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env)
if return_code != 0:
return return_code
def main(argv):
proc = subprocess.Popen(
['/bin/hostname', '-f'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
hostname_stdout, hostname_stderr = proc.communicate()
if proc.returncode == 0:
print 'Running on: ' + hostname_stdout
else:
print >> sys.stderr, 'WARNING: failed to run hostname'
print >> sys.stderr, hostname_stdout
print >> sys.stderr, hostname_stderr
sys.exit(1)
parser = GetRunBotOptParser()
options, args = parser.parse_args(argv[1:])
if args:
parser.error('Unused args: %s' % args)
bot_config = GetBotConfig(options, GetBotStepMap())
if not bot_config:
sys.exit(1)
print 'Using config:', bot_config
commands = GetCommands(options, bot_config)
for command in commands:
print 'Will run: ', bb_utils.CommandToString(command)
print
env = GetEnvironment(bot_config.host_obj, options.testing)
return RunBotCommands(options, commands, env)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -0,0 +1,100 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import optparse
import os
import pipes
import subprocess
import sys
import bb_annotations
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
TESTING = 'BUILDBOT_TESTING' in os.environ
BB_BUILD_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
os.pardir, os.pardir, os.pardir, os.pardir))
CHROME_SRC = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# TODO: Figure out how to merge this with pylib.cmd_helper.OutDirectory().
CHROME_OUT_DIR = os.path.join(CHROME_SRC, 'out')
GOMA_DIR = os.environ.get('GOMA_DIR', os.path.join(BB_BUILD_DIR, 'goma'))
GSUTIL_PATH = os.path.join(BB_BUILD_DIR, 'third_party', 'gsutil', 'gsutil')
def CommandToString(command):
"""Returns quoted command that can be run in bash shell."""
return ' '.join(map(pipes.quote, command))
def SpawnCmd(command, stdout=None, cwd=CHROME_SRC):
"""Spawn a process without waiting for termination."""
print '>', CommandToString(command)
sys.stdout.flush()
if TESTING:
class MockPopen(object):
@staticmethod
def wait():
return 0
@staticmethod
def communicate():
return '', ''
return MockPopen()
return subprocess.Popen(command, cwd=cwd, stdout=stdout)
def RunCmd(command, flunk_on_failure=True, halt_on_failure=False,
warning_code=constants.WARNING_EXIT_CODE, stdout=None,
cwd=CHROME_SRC):
"""Run a command relative to the chrome source root."""
code = SpawnCmd(command, stdout, cwd).wait()
print '<', CommandToString(command)
if code != 0:
print 'ERROR: process exited with code %d' % code
if code != warning_code and flunk_on_failure:
bb_annotations.PrintError()
else:
bb_annotations.PrintWarning()
# Allow steps to have both halting (i.e. 1) and non-halting exit codes.
if code != warning_code and halt_on_failure:
print 'FATAL %d != %d' % (code, warning_code)
sys.exit(1)
return code
def GetParser():
def ConvertJson(option, _, value, parser):
setattr(parser.values, option.dest, json.loads(value))
parser = optparse.OptionParser()
parser.add_option('--build-properties', action='callback',
callback=ConvertJson, type='string', default={},
help='build properties in JSON format')
parser.add_option('--factory-properties', action='callback',
callback=ConvertJson, type='string', default={},
help='factory properties in JSON format')
return parser
def EncodeProperties(options):
return ['--factory-properties=%s' % json.dumps(options.factory_properties),
'--build-properties=%s' % json.dumps(options.build_properties)]
def RunSteps(steps, step_cmds, options):
unknown_steps = set(steps) - set(step for step, _ in step_cmds)
if unknown_steps:
print >> sys.stderr, 'FATAL: Unknown steps %s' % list(unknown_steps)
sys.exit(1)
for step, cmd in step_cmds:
if step in steps:
cmd(options)

View File

@ -0,0 +1,11 @@
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Encode current environment into json.
import json
import os
print json.dumps(dict(os.environ))

View File

@ -0,0 +1,35 @@
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,82 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is meant to add more loadable libs into Chrome_apk.
#
# This is useful when building Chrome_apk with some loadable modules which are
# not included in Chrome_apk.
# As an example, when building Chrome_apk with
# libpeer_target_type=loadable_module,
# the libpeerconnection.so is not included in Chrome_apk. To add the missing
# lib, follow the steps below:
# - Run gyp:
# GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
# - Build chrome_with_libs:
# ninja (or make) chrome_with_libs
#
# This tool also allows replacing the loadable module with a new one via the
# following steps:
# - Build Chrome_apk with the gyp define:
# GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" build/gyp_chromium
# ninja (or make) Chrome_apk
# - Replace libpeerconnection.so with a new one:
# cp the_new_one path/to/libpeerconnection.so
# - Run gyp:
# GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
# - Build chrome_with_libs:
# ninja (or make) chrome_with_libs
{
'targets': [
{
# An "All" target is required for a top-level gyp-file.
'target_name': 'All',
'type': 'none',
'dependencies': [
'chrome_with_libs',
],
},
{
'target_name': 'chrome_with_libs',
'type': 'none',
'variables': {
'intermediate_dir': '<(PRODUCT_DIR)/prebuilt_libs/',
'chrome_unsigned_path': '<(PRODUCT_DIR)/chrome_apk/Chrome-unsigned.apk',
'chrome_with_libs_unsigned': '<(intermediate_dir)/Chrome-with-libs-unsigned.apk',
'chrome_with_libs_final': '<(PRODUCT_DIR)/apks/Chrome-with-libs.apk',
},
'dependencies': [
'<(DEPTH)/clank/native/framework/clank.gyp:chrome_apk'
],
'copies': [
{
'destination': '<(intermediate_dir)/lib/<(android_app_abi)',
'files': [
'<(PRODUCT_DIR)/libpeerconnection.so',
],
},
],
'actions': [
{
'action_name': 'put_libs_in_chrome',
'variables': {
'inputs': [
'<(intermediate_dir)/lib/<(android_app_abi)/libpeerconnection.so',
],
'input_apk_path': '<(chrome_unsigned_path)',
'output_apk_path': '<(chrome_with_libs_unsigned)',
'libraries_top_dir%': '<(intermediate_dir)',
},
'includes': [ 'create_standalone_apk_action.gypi' ],
},
{
'action_name': 'finalize_chrome_with_libs',
'variables': {
'input_apk_path': '<(chrome_with_libs_unsigned)',
'output_apk_path': '<(chrome_with_libs_final)',
},
'includes': [ 'finalize_apk_action.gypi'],
},
],
}],
}

View File

@ -0,0 +1,41 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is meant to be included into an action to provide an action that
# combines a directory of shared libraries and an incomplete APK into a
# standalone APK.
#
# To use this, create a gyp action with the following form:
# {
# 'action_name': 'some descriptive action name',
# 'variables': {
# 'inputs': [ 'input_path1', 'input_path2' ],
# 'input_apk_path': '<(unsigned_apk_path)',
# 'output_apk_path': '<(unsigned_standalone_apk_path)',
# 'libraries_top_dir': '<(libraries_top_dir)',
# },
# 'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ],
# },
{
'message': 'Creating standalone APK: <(output_apk_path)',
'variables': {
'inputs': [],
},
'inputs': [
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/create_standalone_apk.py',
'<(input_apk_path)',
'>@(inputs)',
],
'outputs': [
'<(output_apk_path)',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
'--libraries-top-dir=<(libraries_top_dir)',
'--input-apk-path=<(input_apk_path)',
'--output-apk-path=<(output_apk_path)',
],
}

View File

@ -0,0 +1,61 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is the set of recommended gyp variable settings for Chrome for Android development.
#
# These can be used by copying this file to $CHROME_SRC/chrome/supplement.gypi.
#
# Even better, create chrome/supplement.gypi containing the following:
# {
# 'includes': [ '../build/android/developer_recommended_flags.gypi' ]
# }
# and you'll get new settings automatically.
# When using this method, you can override individual settings by setting them unconditionally (with
# no %) in chrome/supplement.gypi.
# I.e. to disable gyp_managed_install but use everything else:
# {
# 'variables': {
# 'gyp_managed_install': 0,
# },
# 'includes': [ '../build/android/developer_recommended_flags.gypi' ]
# }
{
'variables': {
'variables': {
# Set component to 'shared_library' to enable the component build. This builds native code as
# many small shared libraries instead of one monolithic library. This slightly reduces the time
# required for incremental builds.
'component%': 'shared_library',
},
'component%': '<(component)',
# When gyp_managed_install is set to 1, building an APK will install that APK on the connected
# device(/emulator). To install on multiple devices (or onto a new device), build the APK once
# with each device attached. This greatly reduces the time required for incremental builds.
#
# This comes with some caveats:
# Only works with a single device connected (it will print a warning if
# zero or multiple devices are attached).
# Device must be flashed with a user-debug unsigned Android build.
# Some actions are always run (i.e. ninja will never say "no work to do").
'gyp_managed_install%': 1,
# With gyp_managed_install, we do not necessarily need a standalone APK.
# When create_standalone_apk is set to 1, we will build a standalone APK
# anyway. For even faster builds, you can set create_standalone_apk to 0.
'create_standalone_apk%': 1,
# Set clang to 1 to use the clang compiler. Clang has much (much, much) better warning/error
# messages than gcc.
# TODO(cjhopman): Enable this when http://crbug.com/156420 is addressed. Until then, users can
# set clang to 1, but Android stack traces will sometimes be incomplete.
#'clang%': 1,
# Set fastbuild to 1 to build with less debugging information. This can greatly decrease linking
# time. The downside is that stack traces will be missing useful information (like line
# numbers).
#'fastbuild%': 1,
},
}

View File

@ -0,0 +1,60 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is meant to be included into an action to provide a rule that dexes
# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME ==
# "Release", then it will dex the proguard_enabled_input_path instead of the
# normal dex_input_paths/dex_generated_input_paths.
#
# To use this, create a gyp target with the following form:
# {
# 'action_name': 'some name for the action'
# 'actions': [
# 'variables': {
# 'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ],
# 'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ],
#
# # For targets that use proguard:
# 'proguard_enabled': 'true',
# 'proguard_enabled_input_path': 'path to dex when using proguard',
# },
# 'includes': [ 'relative/path/to/dex_action.gypi' ],
# ],
# },
#
{
'message': 'Creating dex file: <(output_path)',
'variables': {
'dex_input_paths': [],
'dex_generated_input_dirs': [],
'proguard_enabled%': 'false',
'proguard_enabled_input_path%': '',
'dex_no_locals%': 0,
'dex_additional_options': [],
},
'inputs': [
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/util/md5_check.py',
'<(DEPTH)/build/android/gyp/dex.py',
'>@(dex_input_paths)',
],
'outputs': [
'<(output_path)',
'<(output_path).inputs',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/dex.py',
'--dex-path=<(output_path)',
'--android-sdk-tools=<(android_sdk_tools)',
'--output-directory=<(PRODUCT_DIR)',
'--configuration-name=<(CONFIGURATION_NAME)',
'--proguard-enabled=>(proguard_enabled)',
'--proguard-enabled-input-path=<(proguard_enabled_input_path)',
'--no-locals=>(dex_no_locals)',
'>@(dex_additional_options)',
'>@(dex_input_paths)',
'>@(dex_generated_input_dirs)',
]
}

View File

@ -0,0 +1,20 @@
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is meant to be included to disable LTO on a target.
{
'target_conditions': [
['_toolset=="target"', {
'conditions': [
['OS=="android" and (use_lto==1 or use_lto_o2==1)', {
'cflags!': [
'-flto',
'-ffat-lto-objects',
],
}],
],
}],
],
}

View File

@ -0,0 +1,6 @@
This is a file that needs to live here until http://crbug.com/158155 has
been fixed.
The ant build system requires that a src folder is always present, and for
some of our targets that is not the case. Giving it an empty src-folder works
nicely though.

View File

@ -0,0 +1 @@
# Used for apk targets that do not need proguard. See build/java_apk.gypi.

42
build/android/enable_asserts.py Executable file
View File

@ -0,0 +1,42 @@
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Enables dalvik vm asserts in the android device."""
import argparse
import sys
from pylib.device import device_utils
def main():
parser = argparse.ArgumentParser()
set_asserts_group = parser.add_mutually_exclusive_group(required=True)
set_asserts_group.add_argument(
'--enable_asserts', dest='set_asserts', action='store_true',
help='Sets the dalvik.vm.enableassertions property to "all"')
set_asserts_group.add_argument(
'--disable_asserts', dest='set_asserts', action='store_false',
help='Removes the dalvik.vm.enableassertions property')
args = parser.parse_args()
# TODO(jbudorick): Accept optional serial number and run only for the
# specified device when present.
devices = device_utils.DeviceUtils.parallel()
def set_java_asserts_and_restart(device):
if device.SetJavaAsserts(args.set_asserts):
device.RunShellCommand('stop')
device.RunShellCommand('start')
devices.pMap(set_java_asserts_and_restart)
return 0
if __name__ == '__main__':
sys.exit(main())

62
build/android/envsetup.sh Executable file
View File

@ -0,0 +1,62 @@
#!/bin/bash
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Sets up environment for building Chromium on Android.
# Make sure we're being sourced (possibly by another script). Check for bash
# since zsh sets $0 when sourcing.
if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
echo "ERROR: envsetup must be sourced."
exit 1
fi
# This only exists to set local variables. Don't call this manually.
android_envsetup_main() {
local SCRIPT_PATH="$1"
local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
local CURRENT_DIR="$(readlink -f "${SCRIPT_DIR}/../../")"
if [[ -z "${CHROME_SRC}" ]]; then
# If $CHROME_SRC was not set, assume current directory is CHROME_SRC.
local CHROME_SRC="${CURRENT_DIR}"
fi
if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then
# If current directory is not in $CHROME_SRC, it might be set for other
# source tree. If $CHROME_SRC was set correctly and we are in the correct
# directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "".
# Otherwise, it will equal to "${CURRENT_DIR}"
echo "Warning: Current directory is out of CHROME_SRC, it may not be \
the one you want."
echo "${CHROME_SRC}"
fi
# Allow the caller to override a few environment variables. If any of them is
# unset, we default to a sane value that's known to work. This allows for
# experimentation with a custom SDK.
if [[ -z "${ANDROID_SDK_ROOT}" || ! -d "${ANDROID_SDK_ROOT}" ]]; then
local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/"
fi
# Add Android SDK tools to system path.
export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
# Add Android utility tools to the system path.
export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/
# Add Chromium Android development scripts to system path.
# Must be after CHROME_SRC is set.
export PATH=$PATH:${CHROME_SRC}/build/android
export ENVSETUP_GYP_CHROME_SRC=${CHROME_SRC} # TODO(thakis): Remove.
}
# In zsh, $0 is the name of the file being sourced.
android_envsetup_main "${BASH_SOURCE:-$0}"
unset -f android_envsetup_main
android_gyp() {
echo "Please call build/gyp_chromium instead. android_gyp is going away."
"${ENVSETUP_GYP_CHROME_SRC}/build/gyp_chromium" --depth="${ENVSETUP_GYP_CHROME_SRC}" --check "$@"
}

View File

@ -0,0 +1,49 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is meant to be included into an action to provide an action that
# signs and zipaligns an APK.
#
# To use this, create a gyp action with the following form:
# {
# 'action_name': 'some descriptive action name',
# 'variables': {
# 'input_apk_path': 'relative/path/to/input.apk',
# 'output_apk_path': 'relative/path/to/output.apk',
# },
# 'includes': [ '../../build/android/finalize_apk_action.gypi' ],
# },
#
{
'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)',
'variables': {
'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
'keystore_name%': 'chromiumdebugkey',
'keystore_password%': 'chromium',
'zipalign_path%': '<(android_sdk_tools)/zipalign',
'rezip_apk_jar_path%': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar',
'load_library_from_zip%': 0,
},
'inputs': [
'<(DEPTH)/build/android/gyp/finalize_apk.py',
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(keystore_path)',
'<(input_apk_path)',
],
'outputs': [
'<(output_apk_path)',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/finalize_apk.py',
'--zipalign-path=<(zipalign_path)',
'--unsigned-apk-path=<(input_apk_path)',
'--final-apk-path=<(output_apk_path)',
'--key-path=<(keystore_path)',
'--key-name=<(keystore_name)',
'--key-passwd=<(keystore_password)',
'--load-library-from-zip=<(load_library_from_zip)',
'--rezip-apk-jar-path=<(rezip_apk_jar_path)',
],
}

View File

@ -0,0 +1,76 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is meant to be included into an action to provide an action that
# signs and zipaligns split APKs.
#
# Required variables:
# apk_name - Base name of the apk.
# Optional variables:
# density_splits - Whether to process density splits
# language_splits - Whether to language splits
{
'variables': {
'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
'keystore_name%': 'chromiumdebugkey',
'keystore_password%': 'chromium',
'zipalign_path%': '<(android_sdk_tools)/zipalign',
'density_splits%': 0,
'language_splits%': [],
'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
'base_output_path': '<(PRODUCT_DIR)/apks/<(apk_name)',
},
'inputs': [
'<(DEPTH)/build/android/gyp/finalize_splits.py',
'<(DEPTH)/build/android/gyp/finalize_apk.py',
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(keystore_path)',
],
'action': [
'python', '<(DEPTH)/build/android/gyp/finalize_splits.py',
'--resource-packaged-apk-path=<(resource_packaged_apk_path)',
'--base-output-path=<(base_output_path)',
'--zipalign-path=<(zipalign_path)',
'--key-path=<(keystore_path)',
'--key-name=<(keystore_name)',
'--key-passwd=<(keystore_password)',
],
'conditions': [
['density_splits == 1', {
'message': 'Signing/aligning <(_target_name) density splits',
'inputs': [
'<(resource_packaged_apk_path)_hdpi',
'<(resource_packaged_apk_path)_xhdpi',
'<(resource_packaged_apk_path)_xxhdpi',
'<(resource_packaged_apk_path)_xxxhdpi',
'<(resource_packaged_apk_path)_tvdpi',
],
'outputs': [
'<(base_output_path)-density-hdpi.apk',
'<(base_output_path)-density-xhdpi.apk',
'<(base_output_path)-density-xxhdpi.apk',
'<(base_output_path)-density-xxxhdpi.apk',
'<(base_output_path)-density-tvdpi.apk',
],
'action': [
'--densities=hdpi,xhdpi,xxhdpi,xxxhdpi,tvdpi',
],
}],
['language_splits != []', {
'message': 'Signing/aligning <(_target_name) language splits',
'inputs': [
"<!@(python <(DEPTH)/build/apply_locales.py '<(resource_packaged_apk_path)_ZZLOCALE' <(language_splits))",
],
'outputs': [
"<!@(python <(DEPTH)/build/apply_locales.py '<(base_output_path)-lang-ZZLOCALE.apk' <(language_splits))",
],
'action': [
'--languages=<(language_splits)',
],
}],
],
}

View File

@ -0,0 +1,22 @@
{
'action_name': 'findbugs_<(_target_name)',
'message': 'Running findbugs on <(_target_name)',
'variables': {
},
'inputs': [
'<(DEPTH)/build/android/findbugs_diff.py',
'<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
'<(DEPTH)/build/android/pylib/utils/findbugs.py',
'<(findbugs_target_jar_path)',
],
'outputs': [
'<(stamp_path)',
],
'action': [
'python', '<(DEPTH)/build/android/findbugs_diff.py',
'--auxclasspath-gyp', '>(auxclasspath)',
'--stamp', '<(stamp_path)',
'<(findbugs_target_jar_path)',
],
}

110
build/android/findbugs_diff.py Executable file
View File

@ -0,0 +1,110 @@
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs findbugs, and returns an error code if there are new warnings.
Other options
--only-analyze used to only analyze the class you are interested.
--relase-build analyze the classes in out/Release directory.
--findbugs-args used to passin other findbugs's options.
Run
$CHROMIUM_SRC/third_party/findbugs/bin/findbugs -textui for details.
"""
import argparse
import os
import sys
from pylib import constants
from pylib.utils import findbugs
_DEFAULT_BASE_DIR = os.path.join(
constants.DIR_SOURCE_ROOT, 'build', 'android', 'findbugs_filter')
sys.path.append(
os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp'))
from util import build_utils
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-a', '--auxclasspath', default=None, dest='auxclasspath',
help='Set aux classpath for analysis.')
parser.add_argument(
'--auxclasspath-gyp', dest='auxclasspath_gyp',
help='A gyp list containing the aux classpath for analysis')
parser.add_argument(
'-o', '--only-analyze', default=None,
dest='only_analyze', help='Only analyze the given classes and packages.')
parser.add_argument(
'-e', '--exclude', default=None, dest='exclude',
help='Exclude bugs matching given filter.')
parser.add_argument(
'-l', '--release-build', action='store_true', dest='release_build',
help='Analyze release build instead of debug.')
parser.add_argument(
'-f', '--findbug-args', default=None, dest='findbug_args',
help='Additional findbug arguments.')
parser.add_argument(
'-b', '--base-dir', default=_DEFAULT_BASE_DIR,
dest='base_dir', help='Base directory for configuration file.')
parser.add_argument(
'--output-file', dest='output_file',
help='Path to save the output to.')
parser.add_argument(
'--stamp', help='Path to touch on success.')
parser.add_argument(
'--depfile', help='Path to the depfile. This must be specified as the '
"action's first output.")
parser.add_argument(
'jar_paths', metavar='JAR_PATH', nargs='+',
help='JAR file to analyze')
args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
if args.auxclasspath:
args.auxclasspath = args.auxclasspath.split(':')
elif args.auxclasspath_gyp:
args.auxclasspath = build_utils.ParseGypList(args.auxclasspath_gyp)
if args.base_dir:
if not args.exclude:
args.exclude = os.path.join(args.base_dir, 'findbugs_exclude.xml')
findbugs_command, findbugs_warnings = findbugs.Run(
args.exclude, args.only_analyze, args.auxclasspath,
args.output_file, args.findbug_args, args.jar_paths)
if findbugs_warnings:
print
print '*' * 80
print 'FindBugs run via:'
print findbugs_command
print
print 'FindBugs reported the following issues:'
for warning in sorted(findbugs_warnings):
print str(warning)
print '*' * 80
print
else:
if args.depfile:
build_utils.WriteDepfile(
args.depfile,
build_utils.GetPythonDependencies() + args.auxclasspath
+ args.jar_paths)
if args.stamp:
build_utils.Touch(args.stamp)
return len(findbugs_warnings)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (c) 2012 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<!--
Documentation: http://findbugs.sourceforge.net/manual/filter.html
In particular, ~ at the start of a string means it's a regex.
-->
<FindBugsFilter>
<!-- Skip the generated resource classes (including nested classes). -->
<Match>
<Class name="~.*\.R(\$\w+)?" />
</Match>
<Match>
<Class name="~org\.chromium\..*\.Manifest(\$\w+)?" />
</Match>
<Bug pattern="DM_STRING_CTOR" />
<!-- Ignore "reliance on default String encoding" warnings, as we're not multi-platform -->
<Bug pattern="DM_DEFAULT_ENCODING" />
</FindBugsFilter>

View File

@ -0,0 +1,90 @@
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Aggregates EMMA coverage files to produce html output."""
import fnmatch
import json
import optparse
import os
import sys
from pylib import cmd_helper
from pylib import constants
def _GetFilesWithExt(root_dir, ext):
"""Gets all files with a given extension.
Args:
root_dir: Directory in which to search for files.
ext: Extension to look for (including dot)
Returns:
A list of absolute paths to files that match.
"""
files = []
for root, _, filenames in os.walk(root_dir):
basenames = fnmatch.filter(filenames, '*.' + ext)
files.extend([os.path.join(root, basename)
for basename in basenames])
return files
def main():
option_parser = optparse.OptionParser()
option_parser.add_option('--output', help='HTML output filename.')
option_parser.add_option('--coverage-dir', default=None,
help=('Root of the directory in which to search for '
'coverage data (.ec) files.'))
option_parser.add_option('--metadata-dir', default=None,
help=('Root of the directory in which to search for '
'coverage metadata (.em) files.'))
option_parser.add_option('--cleanup', action='store_true',
help=('If set, removes coverage files generated at '
'runtime.'))
options, _ = option_parser.parse_args()
if not (options.coverage_dir and options.metadata_dir and options.output):
option_parser.error('One or more mandatory options are missing.')
coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
print 'Found coverage files: %s' % str(coverage_files)
print 'Found metadata files: %s' % str(metadata_files)
sources = []
for f in metadata_files:
sources_file = os.path.splitext(f)[0] + '_sources.txt'
with open(sources_file, 'r') as sf:
sources.extend(json.load(sf))
sources = [os.path.join(constants.DIR_SOURCE_ROOT, s) for s in sources]
print 'Sources: %s' % sources
input_args = []
for f in coverage_files + metadata_files:
input_args.append('-in')
input_args.append(f)
output_args = ['-Dreport.html.out.file', options.output]
source_args = ['-sp', ','.join(sources)]
exit_code = cmd_helper.RunCmd(
['java', '-cp',
os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
'emma', 'report', '-r', 'html']
+ input_args + output_args + source_args)
if options.cleanup:
for f in coverage_files:
os.remove(f)
return exit_code
if __name__ == '__main__':
sys.exit(main())

49
build/android/gn/zip.py Executable file
View File

@ -0,0 +1,49 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Archives a set of files.
"""
import ast
import optparse
import os
import sys
import zipfile
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
from util import build_utils
def DoZip(inputs, output, base_dir):
with zipfile.ZipFile(output, 'w') as outfile:
for f in inputs:
outfile.write(f, os.path.relpath(f, base_dir))
def main():
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--inputs', help='List of files to archive.')
parser.add_option('--output', help='Path to output archive.')
parser.add_option('--base-dir',
help='If provided, the paths in the archive will be '
'relative to this directory', default='.')
options, _ = parser.parse_args()
inputs = ast.literal_eval(options.inputs)
output = options.output
base_dir = options.base_dir
DoZip(inputs, output, base_dir)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main())

54
build/android/gyp/aidl.py Executable file
View File

@ -0,0 +1,54 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Invokes Android's aidl
"""
import optparse
import os
import sys
from util import build_utils
def main(argv):
option_parser = optparse.OptionParser()
build_utils.AddDepfileOption(option_parser)
option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
option_parser.add_option('--imports', help='Files to import.')
option_parser.add_option('--includes',
help='Directories to add as import search paths.')
option_parser.add_option('--srcjar', help='Path for srcjar output.')
options, args = option_parser.parse_args(argv[1:])
with build_utils.TempDir() as temp_dir:
for f in args:
classname = os.path.splitext(os.path.basename(f))[0]
output = os.path.join(temp_dir, classname + '.java')
aidl_cmd = [options.aidl_path]
aidl_cmd += [
'-p' + s for s in build_utils.ParseGypList(options.imports)
]
if options.includes is not None:
aidl_cmd += [
'-I' + s for s in build_utils.ParseGypList(options.includes)
]
aidl_cmd += [
f,
output
]
build_utils.CheckOutput(aidl_cmd)
build_utils.ZipDir(options.srcjar, temp_dir)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv))

65
build/android/gyp/ant.py Executable file
View File

@ -0,0 +1,65 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""An Ant wrapper that suppresses useless Ant output.
Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of
every build. In the Android build, this just adds a lot of useless noise to the
build output. This script forwards its arguments to ant, and prints Ant's
output up until the BUILD SUCCESSFUL line.
Also, when a command fails, this script will re-run that ant command with the
'-verbose' argument so that the failure is easier to debug.
"""
import optparse
import sys
import traceback
from util import build_utils
def main(argv):
option_parser = optparse.OptionParser()
build_utils.AddDepfileOption(option_parser)
options, args = option_parser.parse_args(argv[1:])
try:
stdout = build_utils.CheckOutput(['ant'] + args)
except build_utils.CalledProcessError:
# It is very difficult to diagnose ant failures without the '-verbose'
# argument. So, when an ant command fails, re-run it with '-verbose' so that
# the cause of the failure is easier to identify.
verbose_args = ['-verbose'] + [a for a in args if a != '-quiet']
try:
stdout = build_utils.CheckOutput(['ant'] + verbose_args)
except build_utils.CalledProcessError:
traceback.print_exc()
sys.exit(1)
# If this did sys.exit(1), building again would succeed (which would be
# awkward). Instead, just print a big warning.
build_utils.PrintBigWarning(
'This is unexpected. `ant ' + ' '.join(args) + '` failed.' +
'But, running `ant ' + ' '.join(verbose_args) + '` passed.')
stdout = stdout.strip().split('\n')
for line in stdout:
if line.strip() == 'BUILD SUCCESSFUL':
break
print line
if options.depfile:
assert '-buildfile' in args
ant_buildfile = args[args.index('-buildfile') + 1]
build_utils.WriteDepfile(
options.depfile,
[ant_buildfile] + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv))

118
build/android/gyp/apk_install.py Executable file
View File

@ -0,0 +1,118 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Installs an APK.
"""
import optparse
import os
import re
import sys
from util import build_device
from util import build_utils
from util import md5_check
BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILD_ANDROID_DIR)
from pylib import constants
from pylib.utils import apk_helper
def GetNewMetadata(device, apk_package):
"""Gets the metadata on the device for the apk_package apk."""
output = device.RunShellCommand('ls -l /data/app/')
# Matches lines like:
# -rw-r--r-- system system 7376582 2013-04-19 16:34 \
# org.chromium.chrome.shell.apk
# -rw-r--r-- system system 7376582 2013-04-19 16:34 \
# org.chromium.chrome.shell-1.apk
apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?(.apk)?$' % apk_package, s)
matches = filter(apk_matcher, output)
return matches[0] if matches else None
def HasInstallMetadataChanged(device, apk_package, metadata_path):
"""Checks if the metadata on the device for apk_package has changed."""
if not os.path.exists(metadata_path):
return True
with open(metadata_path, 'r') as expected_file:
return expected_file.read() != device.GetInstallMetadata(apk_package)
def RecordInstallMetadata(device, apk_package, metadata_path):
"""Records the metadata from the device for apk_package."""
metadata = GetNewMetadata(device, apk_package)
if not metadata:
raise Exception('APK install failed unexpectedly.')
with open(metadata_path, 'w') as outfile:
outfile.write(metadata)
def main():
parser = optparse.OptionParser()
parser.add_option('--apk-path',
help='Path to .apk to install.')
parser.add_option('--split-apk-path',
help='Path to .apk splits (can specify multiple times, causes '
'--install-multiple to be used.',
action='append')
parser.add_option('--android-sdk-tools',
help='Path to the Android SDK build tools folder. ' +
'Required when using --split-apk-path.')
parser.add_option('--install-record',
help='Path to install record (touched only when APK is installed).')
parser.add_option('--build-device-configuration',
help='Path to build device configuration.')
parser.add_option('--stamp',
help='Path to touch on success.')
parser.add_option('--configuration-name',
help='The build CONFIGURATION_NAME')
options, _ = parser.parse_args()
device = build_device.GetBuildDeviceFromPath(
options.build_device_configuration)
if not device:
return
constants.SetBuildType(options.configuration_name)
serial_number = device.GetSerialNumber()
apk_package = apk_helper.GetPackageName(options.apk_path)
metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number)
# If the APK on the device does not match the one that was last installed by
# the build, then the APK has to be installed (regardless of the md5 record).
force_install = HasInstallMetadataChanged(device, apk_package, metadata_path)
def Install():
if options.split_apk_path:
device.InstallSplitApk(options.apk_path, options.split_apk_path)
else:
device.Install(options.apk_path, reinstall=True)
RecordInstallMetadata(device, apk_package, metadata_path)
build_utils.Touch(options.install_record)
record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number)
md5_check.CallAndRecordIfStale(
Install,
record_path=record_path,
input_paths=[options.apk_path],
force=force_install)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,147 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates the obfuscated jar and test jar for an apk.
If proguard is not enabled or 'Release' is not in the configuration name,
obfuscation will be a no-op.
"""
import optparse
import os
import sys
from util import build_utils
from util import proguard_util
def ParseArgs(argv):
parser = optparse.OptionParser()
parser.add_option('--android-sdk', help='path to the Android SDK folder')
parser.add_option('--android-sdk-tools',
help='path to the Android SDK build tools folder')
parser.add_option('--android-sdk-jar',
help='path to Android SDK\'s android.jar')
parser.add_option('--proguard-jar-path',
help='Path to proguard.jar in the sdk')
parser.add_option('--input-jars-paths',
help='Path to jars to include in obfuscated jar')
parser.add_option('--proguard-configs',
help='Paths to proguard config files')
parser.add_option('--configuration-name',
help='Gyp configuration name (i.e. Debug, Release)')
parser.add_option('--proguard-enabled', action='store_true',
help='Set if proguard is enabled for this target.')
parser.add_option('--obfuscated-jar-path',
help='Output path for obfuscated jar.')
parser.add_option('--testapp', action='store_true',
help='Set this if building an instrumentation test apk')
parser.add_option('--tested-apk-obfuscated-jar-path',
help='Path to obfusctated jar of the tested apk')
parser.add_option('--test-jar-path',
help='Output path for jar containing all the test apk\'s '
'code.')
parser.add_option('--stamp', help='File to touch on success')
(options, args) = parser.parse_args(argv)
if args:
parser.error('No positional arguments should be given. ' + str(args))
# Check that required options have been provided.
required_options = (
'android_sdk',
'android_sdk_tools',
'android_sdk_jar',
'proguard_jar_path',
'input_jars_paths',
'configuration_name',
'obfuscated_jar_path',
)
if options.testapp:
required_options += (
'test_jar_path',
)
build_utils.CheckOptions(options, parser, required=required_options)
return options, args
def DoProguard(options):
proguard = proguard_util.ProguardCmdBuilder(options.proguard_jar_path)
proguard.outjar(options.obfuscated_jar_path)
library_classpath = [options.android_sdk_jar]
input_jars = build_utils.ParseGypList(options.input_jars_paths)
exclude_paths = []
configs = build_utils.ParseGypList(options.proguard_configs)
if options.tested_apk_obfuscated_jar_path:
# configs should only contain the process_resources.py generated config.
assert len(configs) == 1, (
'test apks should not have custom proguard configs: ' + str(configs))
tested_jar_info = build_utils.ReadJson(
options.tested_apk_obfuscated_jar_path + '.info')
exclude_paths = tested_jar_info['inputs']
configs = tested_jar_info['configs']
proguard.is_test(True)
proguard.mapping(options.tested_apk_obfuscated_jar_path + '.mapping')
library_classpath.append(options.tested_apk_obfuscated_jar_path)
proguard.libraryjars(library_classpath)
proguard_injars = [p for p in input_jars if p not in exclude_paths]
proguard.injars(proguard_injars)
proguard.configs(configs)
proguard.CheckOutput()
this_info = {
'inputs': proguard_injars,
'configs': configs
}
build_utils.WriteJson(
this_info, options.obfuscated_jar_path + '.info')
def main(argv):
options, _ = ParseArgs(argv)
input_jars = build_utils.ParseGypList(options.input_jars_paths)
if options.testapp:
dependency_class_filters = [
'*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class']
build_utils.MergeZips(
options.test_jar_path, input_jars, dependency_class_filters)
if options.configuration_name == 'Release' and options.proguard_enabled:
DoProguard(options)
else:
output_files = [
options.obfuscated_jar_path,
options.obfuscated_jar_path + '.info',
options.obfuscated_jar_path + '.dump',
options.obfuscated_jar_path + '.seeds',
options.obfuscated_jar_path + '.usage',
options.obfuscated_jar_path + '.mapping']
for f in output_files:
if os.path.exists(f):
os.remove(f)
build_utils.Touch(f)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

77
build/android/gyp/copy_ex.py Executable file
View File

@ -0,0 +1,77 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Copies files to a directory."""
import optparse
import os
import shutil
import sys
from util import build_utils
def _get_all_files(base):
"""Returns a list of all the files in |base|. Each entry is relative to the
last path entry of |base|."""
result = []
dirname = os.path.dirname(base)
for root, _, files in os.walk(base):
result.extend([os.path.join(root[len(dirname):], f) for f in files])
return result
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--dest', help='Directory to copy files to.')
parser.add_option('--files', action='append',
help='List of files to copy.')
parser.add_option('--clear', action='store_true',
help='If set, the destination directory will be deleted '
'before copying files to it. This is highly recommended to '
'ensure that no stale files are left in the directory.')
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args(args)
if options.clear:
build_utils.DeleteDirectory(options.dest)
build_utils.MakeDirectory(options.dest)
files = []
for file_arg in options.files:
files += build_utils.ParseGypList(file_arg)
deps = []
for f in files:
if os.path.isdir(f):
if not options.clear:
print ('To avoid stale files you must use --clear when copying '
'directories')
sys.exit(-1)
shutil.copytree(f, os.path.join(options.dest, os.path.basename(f)))
deps.extend(_get_all_files(f))
else:
shutil.copy(f, options.dest)
deps.append(f)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
deps + build_utils.GetPythonDependencies())
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,114 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates symlinks to native libraries for an APK.
The native libraries should have previously been pushed to the device (in
options.target_dir). This script then creates links in an apk's lib/ folder to
those native libraries.
"""
import optparse
import os
import sys
from util import build_device
from util import build_utils
BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILD_ANDROID_DIR)
from pylib import constants
from pylib.utils import apk_helper
def RunShellCommand(device, cmd):
output = device.RunShellCommand(cmd)
if output:
raise Exception(
'Unexpected output running command: ' + cmd + '\n' +
'\n'.join(output))
def CreateSymlinkScript(options):
libraries = build_utils.ParseGypList(options.libraries)
link_cmd = (
'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n'
'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s '
'$APK_LIBRARIES_DIR/%(lib_basename)s \n'
)
script = '#!/bin/sh \n'
for lib in libraries:
script += link_cmd % { 'lib_basename': lib }
with open(options.script_host_path, 'w') as scriptfile:
scriptfile.write(script)
def TriggerSymlinkScript(options):
device = build_device.GetBuildDeviceFromPath(
options.build_device_configuration)
if not device:
return
apk_package = apk_helper.GetPackageName(options.apk)
apk_libraries_dir = '/data/data/%s/lib' % apk_package
device_dir = os.path.dirname(options.script_device_path)
mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' %
{ 'dir': device_dir })
RunShellCommand(device, mkdir_cmd)
device.PushChangedFiles([(options.script_host_path,
options.script_device_path)])
trigger_cmd = (
'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; '
'STRIPPED_LIBRARIES_DIR=%(target_dir)s; '
'. %(script_device_path)s'
) % {
'apk_libraries_dir': apk_libraries_dir,
'target_dir': options.target_dir,
'script_device_path': options.script_device_path
}
RunShellCommand(device, trigger_cmd)
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
parser.add_option('--apk', help='Path to the apk.')
parser.add_option('--script-host-path',
help='Path on the host for the symlink script.')
parser.add_option('--script-device-path',
help='Path on the device to push the created symlink script.')
parser.add_option('--libraries',
help='List of native libraries.')
parser.add_option('--target-dir',
help='Device directory that contains the target libraries for symlinks.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--build-device-configuration',
help='Path to build device configuration.')
parser.add_option('--configuration-name',
help='The build CONFIGURATION_NAME')
options, _ = parser.parse_args(args)
required_options = ['apk', 'libraries', 'script_host_path',
'script_device_path', 'target_dir', 'configuration_name']
build_utils.CheckOptions(options, parser, required=required_options)
constants.SetBuildType(options.configuration_name)
CreateSymlinkScript(options)
TriggerSymlinkScript(options)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Merges a list of jars into a single jar."""
import optparse
import sys
from util import build_utils
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--output', help='Path to output jar.')
parser.add_option('--inputs', action='append', help='List of jar inputs.')
options, _ = parser.parse_args(args)
build_utils.CheckOptions(options, parser, ['output', 'inputs'])
input_jars = []
for inputs_arg in options.inputs:
input_jars.extend(build_utils.ParseGypList(inputs_arg))
build_utils.MergeZips(options.output, input_jars)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
input_jars + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
#
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Create a JAR incorporating all the components required to build a Flutter application"""
import optparse
import os
import sys
import zipfile
from util import build_utils
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--output', help='Path to output jar.')
parser.add_option('--dist_jar', help='Flutter shell Java code jar.')
parser.add_option('--native_lib', action='append', help='Native code library.')
parser.add_option('--android_abi', help='Native code ABI.')
parser.add_option('--asset_dir', help='Path to assets.')
options, _ = parser.parse_args(args)
build_utils.CheckOptions(options, parser, [
'output', 'dist_jar', 'native_lib', 'android_abi', 'asset_dir'
])
input_deps = []
with zipfile.ZipFile(options.output, 'w', zipfile.ZIP_DEFLATED) as out_zip:
input_deps.append(options.dist_jar)
with zipfile.ZipFile(options.dist_jar, 'r') as dist_zip:
for dist_file in dist_zip.infolist():
if dist_file.filename.endswith('.class'):
out_zip.writestr(dist_file.filename, dist_zip.read(dist_file.filename))
for native_lib in options.native_lib:
input_deps.append(native_lib)
out_zip.write(native_lib,
'lib/%s/%s' % (options.android_abi, os.path.basename(native_lib)))
for asset_file in os.listdir(options.asset_dir):
input_deps.append(asset_file)
out_zip.write(os.path.join(options.asset_dir, asset_file),
'assets/%s' % asset_file)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
input_deps + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,77 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a simple script to run a java "binary".
This creates a script that sets up the java command line for running a java
jar. This includes correctly setting the classpath and the main class.
"""
import optparse
import os
import sys
from util import build_utils
# The java command must be executed in the current directory because there may
# be user-supplied paths in the args. The script receives the classpath relative
# to the directory that the script is written in and then, when run, must
# recalculate the paths relative to the current directory.
script_template = """\
#!/usr/bin/env python
#
# This file was generated by build/android/gyp/create_java_binary_script.py
import os
import sys
self_dir = os.path.dirname(__file__)
classpath = [{classpath}]
if os.getcwd() != self_dir:
offset = os.path.relpath(self_dir, os.getcwd())
classpath = [os.path.join(offset, p) for p in classpath]
java_args = [
"java",
"-classpath", ":".join(classpath),
"-enableassertions",
\"{main_class}\"] + sys.argv[1:]
os.execvp("java", java_args)
"""
def main(argv):
argv = build_utils.ExpandFileArgs(argv)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--output', help='Output path for executable script.')
parser.add_option('--jar-path', help='Path to the main jar.')
parser.add_option('--main-class',
help='Name of the java class with the "main" entry point.')
parser.add_option('--classpath', action='append',
help='Classpath for running the jar.')
options, _ = parser.parse_args(argv)
classpath = [options.jar_path]
for cp_arg in options.classpath:
classpath += build_utils.ParseGypList(cp_arg)
run_dir = os.path.dirname(options.output)
classpath = [os.path.relpath(p, run_dir) for p in classpath]
with open(options.output, 'w') as script:
script.write(script_template.format(
classpath=('"%s"' % '", "'.join(classpath)),
main_class=options.main_class))
os.chmod(options.output, 0750)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,35 @@
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Create placeholder files.
"""
import optparse
import os
import sys
from util import build_utils
def main():
parser = optparse.OptionParser()
parser.add_option(
'--dest-lib-dir',
help='Destination directory to have placeholder files.')
parser.add_option(
'--stamp',
help='Path to touch on success')
options, args = parser.parse_args()
for name in args:
target_path = os.path.join(options.dest_lib_dir, name)
build_utils.Touch(target_path)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,60 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Combines stripped libraries and incomplete APK into single standalone APK.
"""
import optparse
import os
import shutil
import sys
import tempfile
from util import build_utils
from util import md5_check
def CreateStandaloneApk(options):
def DoZip():
with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file:
intermediate_path = intermediate_file.name
shutil.copy(options.input_apk_path, intermediate_path)
apk_path_abs = os.path.abspath(intermediate_path)
build_utils.CheckOutput(
['zip', '-r', '-1', apk_path_abs, 'lib'],
cwd=options.libraries_top_dir)
shutil.copy(intermediate_path, options.output_apk_path)
input_paths = [options.input_apk_path, options.libraries_top_dir]
record_path = '%s.standalone.stamp' % options.input_apk_path
md5_check.CallAndRecordIfStale(
DoZip,
record_path=record_path,
input_paths=input_paths)
def main():
parser = optparse.OptionParser()
parser.add_option('--libraries-top-dir',
help='Top directory that contains libraries '
'(i.e. library paths are like '
'libraries_top_dir/lib/android_app_abi/foo.so).')
parser.add_option('--input-apk-path', help='Path to incomplete APK.')
parser.add_option('--output-apk-path', help='Path for standalone APK.')
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args()
required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path']
build_utils.CheckOptions(options, parser, required=required_options)
CreateStandaloneApk(options)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,96 @@
#!/usr/bin/env python
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a script to run an android test using build/android/test_runner.py.
"""
import argparse
import os
import sys
from util import build_utils
SCRIPT_TEMPLATE = """\
#!/usr/bin/env python
#
# This file was generated by build/android/gyp/create_test_runner_script.py
import logging
import os
import sys
def main():
script_directory = os.path.dirname(__file__)
def ResolvePath(path):
\"\"\"Returns an absolute filepath given a path relative to this script.
\"\"\"
return os.path.abspath(os.path.join(script_directory, path))
test_runner_path = ResolvePath('{test_runner_path}')
test_runner_args = {test_runner_args}
test_runner_path_args = {test_runner_path_args}
for arg, path in test_runner_path_args.iteritems():
test_runner_args.extend([arg, ResolvePath(path)])
test_runner_cmd = ' '.join(
[test_runner_path] + test_runner_args + sys.argv[1:])
logging.critical(test_runner_cmd)
os.system(test_runner_cmd)
if __name__ == '__main__':
sys.exit(main())
"""
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--script-output-path',
help='Output path for executable script.')
parser.add_argument('--depfile',
help='Path to the depfile. This must be specified as '
"the action's first output.")
# We need to intercept any test runner path arguments and make all
# of the paths relative to the output script directory.
group = parser.add_argument_group('Test runner path arguments.')
group.add_argument('--output-directory')
group.add_argument('--isolate-file-path')
group.add_argument('--support-apk')
args, test_runner_args = parser.parse_known_args()
def RelativizePathToScript(path):
"""Returns the path relative to the output script directory."""
return os.path.relpath(path, os.path.dirname(args.script_output_path))
test_runner_path = os.path.join(
os.path.dirname(__file__), os.path.pardir, 'test_runner.py')
test_runner_path = RelativizePathToScript(test_runner_path)
test_runner_path_args = {}
if args.output_directory:
test_runner_path_args['--output-directory'] = RelativizePathToScript(
args.output_directory)
if args.isolate_file_path:
test_runner_path_args['--isolate-file-path'] = RelativizePathToScript(
args.isolate_file_path)
if args.support_apk:
test_runner_path_args['--support-apk'] = RelativizePathToScript(
args.support_apk)
with open(args.script_output_path, 'w') as script:
script.write(SCRIPT_TEMPLATE.format(
test_runner_path=str(test_runner_path),
test_runner_args=str(test_runner_args),
test_runner_path_args=str(test_runner_path_args)))
os.chmod(args.script_output_path, 0750)
if args.depfile:
build_utils.WriteDepfile(
args.depfile,
build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main())

89
build/android/gyp/dex.py Executable file
View File

@ -0,0 +1,89 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import sys
from util import build_utils
from util import md5_check
def DoDex(options, paths):
dx_binary = os.path.join(options.android_sdk_tools, 'dx')
# See http://crbug.com/272064 for context on --force-jumbo.
dex_cmd = [dx_binary, '--dex', '--force-jumbo', '--output', options.dex_path]
if options.no_locals != '0':
dex_cmd.append('--no-locals')
dex_cmd += paths
record_path = '%s.md5.stamp' % options.dex_path
md5_check.CallAndRecordIfStale(
lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False),
record_path=record_path,
input_paths=paths,
input_strings=dex_cmd,
force=not os.path.exists(options.dex_path))
build_utils.WriteJson(
[os.path.relpath(p, options.output_directory) for p in paths],
options.dex_path + '.inputs')
def main():
args = build_utils.ExpandFileArgs(sys.argv[1:])
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--android-sdk-tools',
help='Android sdk build tools directory.')
parser.add_option('--output-directory',
default=os.getcwd(),
help='Path to the output build directory.')
parser.add_option('--dex-path', help='Dex output path.')
parser.add_option('--configuration-name',
help='The build CONFIGURATION_NAME.')
parser.add_option('--proguard-enabled',
help='"true" if proguard is enabled.')
parser.add_option('--proguard-enabled-input-path',
help=('Path to dex in Release mode when proguard '
'is enabled.'))
parser.add_option('--no-locals',
help='Exclude locals list from the dex file.')
parser.add_option('--inputs', help='A list of additional input paths.')
parser.add_option('--excluded-paths',
help='A list of paths to exclude from the dex file.')
options, paths = parser.parse_args(args)
required_options = ('android_sdk_tools',)
build_utils.CheckOptions(options, parser, required=required_options)
if (options.proguard_enabled == 'true'
and options.configuration_name == 'Release'):
paths = [options.proguard_enabled_input_path]
if options.inputs:
paths += build_utils.ParseGypList(options.inputs)
if options.excluded_paths:
# Excluded paths are relative to the output directory.
exclude_paths = build_utils.ParseGypList(options.excluded_paths)
paths = [p for p in paths if not
os.path.relpath(p, options.output_directory) in exclude_paths]
DoDex(options, paths)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
paths + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main())

207
build/android/gyp/emma_instr.py Executable file
View File

@ -0,0 +1,207 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Instruments classes and jar files.
This script corresponds to the 'emma_instr' action in the java build process.
Depending on whether emma_instrument is set, the 'emma_instr' action will either
call one of the instrument commands, or the copy command.
Possible commands are:
- instrument_jar: Accepts a jar and instruments it using emma.jar.
- instrument_classes: Accepts a directory containing java classes and
instruments it using emma.jar.
- copy: Called when EMMA coverage is not enabled. This allows us to make
this a required step without necessarily instrumenting on every build.
Also removes any stale coverage files.
"""
import collections
import json
import os
import shutil
import sys
import tempfile
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from pylib.utils import command_option_parser
from util import build_utils
def _AddCommonOptions(option_parser):
"""Adds common options to |option_parser|."""
option_parser.add_option('--input-path',
help=('Path to input file(s). Either the classes '
'directory, or the path to a jar.'))
option_parser.add_option('--output-path',
help=('Path to output final file(s) to. Either the '
'final classes directory, or the directory in '
'which to place the instrumented/copied jar.'))
option_parser.add_option('--stamp', help='Path to touch when done.')
option_parser.add_option('--coverage-file',
help='File to create with coverage metadata.')
option_parser.add_option('--sources-file',
help='File to create with the list of sources.')
def _AddInstrumentOptions(option_parser):
"""Adds options related to instrumentation to |option_parser|."""
_AddCommonOptions(option_parser)
option_parser.add_option('--sources',
help='Space separated list of sources.')
option_parser.add_option('--src-root',
help='Root of the src repository.')
option_parser.add_option('--emma-jar',
help='Path to emma.jar.')
option_parser.add_option(
'--filter-string', default='',
help=('Filter string consisting of a list of inclusion/exclusion '
'patterns separated with whitespace and/or comma.'))
def _RunCopyCommand(_command, options, _, option_parser):
"""Copies the jar from input to output locations.
Also removes any old coverage/sources file.
Args:
command: String indicating the command that was received to trigger
this function.
options: optparse options dictionary.
args: List of extra args from optparse.
option_parser: optparse.OptionParser object.
Returns:
An exit code.
"""
if not (options.input_path and options.output_path and
options.coverage_file and options.sources_file):
option_parser.error('All arguments are required.')
coverage_file = os.path.join(os.path.dirname(options.output_path),
options.coverage_file)
sources_file = os.path.join(os.path.dirname(options.output_path),
options.sources_file)
if os.path.exists(coverage_file):
os.remove(coverage_file)
if os.path.exists(sources_file):
os.remove(sources_file)
if os.path.isdir(options.input_path):
shutil.rmtree(options.output_path, ignore_errors=True)
shutil.copytree(options.input_path, options.output_path)
else:
shutil.copy(options.input_path, options.output_path)
if options.stamp:
build_utils.Touch(options.stamp)
def _CreateSourcesFile(sources_string, sources_file, src_root):
"""Adds all normalized source directories to |sources_file|.
Args:
sources_string: String generated from gyp containing the list of sources.
sources_file: File into which to write the JSON list of sources.
src_root: Root which sources added to the file should be relative to.
Returns:
An exit code.
"""
src_root = os.path.abspath(src_root)
sources = build_utils.ParseGypList(sources_string)
relative_sources = []
for s in sources:
abs_source = os.path.abspath(s)
if abs_source[:len(src_root)] != src_root:
print ('Error: found source directory not under repository root: %s %s'
% (abs_source, src_root))
return 1
rel_source = os.path.relpath(abs_source, src_root)
relative_sources.append(rel_source)
with open(sources_file, 'w') as f:
json.dump(relative_sources, f)
def _RunInstrumentCommand(command, options, _, option_parser):
"""Instruments the classes/jar files using EMMA.
Args:
command: 'instrument_jar' or 'instrument_classes'. This distinguishes
whether we copy the output from the created lib/ directory, or classes/
directory.
options: optparse options dictionary.
args: List of extra args from optparse.
option_parser: optparse.OptionParser object.
Returns:
An exit code.
"""
if not (options.input_path and options.output_path and
options.coverage_file and options.sources_file and options.sources and
options.src_root and options.emma_jar):
option_parser.error('All arguments are required.')
coverage_file = os.path.join(os.path.dirname(options.output_path),
options.coverage_file)
sources_file = os.path.join(os.path.dirname(options.output_path),
options.sources_file)
if os.path.exists(coverage_file):
os.remove(coverage_file)
temp_dir = tempfile.mkdtemp()
try:
cmd = ['java', '-cp', options.emma_jar,
'emma', 'instr',
'-ip', options.input_path,
'-ix', options.filter_string,
'-d', temp_dir,
'-out', coverage_file,
'-m', 'fullcopy']
build_utils.CheckOutput(cmd)
if command == 'instrument_jar':
for jar in os.listdir(os.path.join(temp_dir, 'lib')):
shutil.copy(os.path.join(temp_dir, 'lib', jar),
options.output_path)
else: # 'instrument_classes'
if os.path.isdir(options.output_path):
shutil.rmtree(options.output_path, ignore_errors=True)
shutil.copytree(os.path.join(temp_dir, 'classes'),
options.output_path)
finally:
shutil.rmtree(temp_dir)
_CreateSourcesFile(options.sources, sources_file, options.src_root)
if options.stamp:
build_utils.Touch(options.stamp)
return 0
CommandFunctionTuple = collections.namedtuple(
'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
VALID_COMMANDS = {
'copy': CommandFunctionTuple(_AddCommonOptions,
_RunCopyCommand),
'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
_RunInstrumentCommand),
'instrument_classes': CommandFunctionTuple(_AddInstrumentOptions,
_RunInstrumentCommand),
}
def main():
option_parser = command_option_parser.CommandOptionParser(
commands_dict=VALID_COMMANDS)
command_option_parser.ParseAndExecute(option_parser)
if __name__ == '__main__':
sys.exit(main())

132
build/android/gyp/finalize_apk.py Executable file
View File

@ -0,0 +1,132 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Signs and zipaligns APK.
"""
import optparse
import shutil
import sys
import tempfile
from util import build_utils
def RenameInflateAndAddPageAlignment(
rezip_apk_jar_path, in_zip_file, out_zip_file):
rezip_apk_cmd = [
'java',
'-classpath',
rezip_apk_jar_path,
'RezipApk',
'renamealign',
in_zip_file,
out_zip_file,
]
build_utils.CheckOutput(rezip_apk_cmd)
def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file):
rezip_apk_cmd = [
'java',
'-classpath',
rezip_apk_jar_path,
'RezipApk',
'reorder',
in_zip_file,
out_zip_file,
]
build_utils.CheckOutput(rezip_apk_cmd)
def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path):
shutil.copy(unsigned_path, signed_path)
sign_cmd = [
'jarsigner',
'-sigalg', 'MD5withRSA',
'-digestalg', 'SHA1',
'-keystore', key_path,
'-storepass', key_passwd,
signed_path,
key_name,
]
build_utils.CheckOutput(sign_cmd)
def AlignApk(zipalign_path, unaligned_path, final_path):
align_cmd = [
zipalign_path,
'-f', '4', # 4 bytes
unaligned_path,
final_path,
]
build_utils.CheckOutput(align_cmd)
def main():
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--rezip-apk-jar-path',
help='Path to the RezipApk jar file.')
parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.')
parser.add_option('--final-apk-path',
help='Path to output signed and aligned APK.')
parser.add_option('--key-path', help='Path to keystore for signing.')
parser.add_option('--key-passwd', help='Keystore password')
parser.add_option('--key-name', help='Keystore name')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--load-library-from-zip', type='int',
help='If non-zero, build the APK such that the library can be loaded ' +
'directly from the zip file using the crazy linker. The library ' +
'will be renamed, uncompressed and page aligned.')
options, _ = parser.parse_args()
FinalizeApk(options)
if options.depfile:
build_utils.WriteDepfile(
options.depfile, build_utils.GetPythonDependencies())
if options.stamp:
build_utils.Touch(options.stamp)
def FinalizeApk(options):
with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \
tempfile.NamedTemporaryFile() as apk_to_sign_tmp:
if options.load_library_from_zip:
# We alter the name of the library so that the Android Package Manager
# does not extract it into a separate file. This must be done before
# signing, as the filename is part of the signed manifest. At the same
# time we uncompress the library, which is necessary so that it can be
# loaded directly from the APK.
# Move the library to a page boundary by adding a page alignment file.
apk_to_sign = apk_to_sign_tmp.name
RenameInflateAndAddPageAlignment(
options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign)
else:
apk_to_sign = options.unsigned_apk_path
signed_apk_path = signed_apk_path_tmp.name
JarSigner(options.key_path, options.key_name, options.key_passwd,
apk_to_sign, signed_apk_path)
if options.load_library_from_zip:
# Reorder the contents of the APK. This re-establishes the canonical
# order which means the library will be back at its page aligned location.
# This step also aligns uncompressed items to 4 bytes.
ReorderAndAlignApk(
options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path)
else:
# Align uncompressed items to 4 bytes
AlignApk(options.zipalign_path, signed_apk_path, options.final_apk_path)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,52 @@
#!/usr/bin/env python
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Signs and zipaligns split APKs.
This script is require only by GYP (not GN).
"""
import optparse
import sys
import finalize_apk
from util import build_utils
def main():
parser = optparse.OptionParser()
parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
parser.add_option('--resource-packaged-apk-path',
help='Base path to input .ap_s.')
parser.add_option('--base-output-path',
help='Path to output .apk, minus extension.')
parser.add_option('--key-path', help='Path to keystore for signing.')
parser.add_option('--key-passwd', help='Keystore password')
parser.add_option('--key-name', help='Keystore name')
parser.add_option('--densities',
help='Comma separated list of densities finalize.')
parser.add_option('--languages',
help='GYP list of language splits to finalize.')
options, _ = parser.parse_args()
options.load_library_from_zip = 0
if options.densities:
for density in options.densities.split(','):
options.unsigned_apk_path = ("%s_%s" %
(options.resource_packaged_apk_path, density))
options.final_apk_path = ("%s-density-%s.apk" %
(options.base_output_path, density))
finalize_apk.FinalizeApk(options)
if options.languages:
for lang in build_utils.ParseGypList(options.languages):
options.unsigned_apk_path = ("%s_%s" %
(options.resource_packaged_apk_path, lang))
options.final_apk_path = ("%s-lang-%s.apk" %
(options.base_output_path, lang))
finalize_apk.FinalizeApk(options)
if __name__ == '__main__':
sys.exit(main())

30
build/android/gyp/find.py Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds files in directories.
"""
import fnmatch
import optparse
import os
import sys
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--pattern', default='*', help='File pattern to match.')
options, directories = parser.parse_args(argv)
for d in directories:
if not os.path.exists(d):
print >> sys.stderr, '%s does not exist' % d
return 1
for root, _, filenames in os.walk(d):
for f in fnmatch.filter(filenames, options.pattern):
print os.path.join(root, f)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This finds the java distribution's tools.jar and copies it somewhere.
"""
import argparse
import os
import re
import shutil
import sys
from util import build_utils
RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
def main():
parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
parser.add_argument('--depfile',
help='Path to depfile. This must be specified as the '
'action\'s first output.')
parser.add_argument('--output', required=True)
args = parser.parse_args()
sun_tools_jar_path = FindSunToolsJarPath()
if sun_tools_jar_path is None:
raise Exception("Couldn\'t find tools.jar")
# Using copyfile instead of copy() because copy() calls copymode()
# We don't want the locked mode because we may copy over this file again
shutil.copyfile(sun_tools_jar_path, args.output)
if args.depfile:
build_utils.WriteDepfile(
args.depfile,
[sun_tools_jar_path] + build_utils.GetPythonDependencies())
def FindSunToolsJarPath():
# This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
stdout = build_utils.CheckOutput(
["java", "-verbose", "-version"], print_stderr=False)
for ln in stdout.splitlines():
match = RT_JAR_FINDER.match(ln)
if match:
return os.path.join(match.group(1), 'lib', 'tools.jar')
return None
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,58 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import sys
from util import build_utils
def DoGcc(options):
build_utils.MakeDirectory(os.path.dirname(options.output))
gcc_cmd = [ 'gcc' ] # invoke host gcc.
if options.defines:
gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
gcc_cmd.extend([
'-E', # stop after preprocessing.
'-D', 'ANDROID', # Specify ANDROID define for pre-processor.
'-x', 'c-header', # treat sources as C header files
'-P', # disable line markers, i.e. '#line 309'
'-I', options.include_path,
'-o', options.output,
options.template
])
build_utils.CheckOutput(gcc_cmd)
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--include-path', help='Include path for gcc.')
parser.add_option('--template', help='Path to template.')
parser.add_option('--output', help='Path for generated file.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--defines', help='Pre-defines macros', action='append')
options, _ = parser.parse_args(args)
DoGcc(options)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,97 @@
#!/usr/bin/env python
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates an AndroidManifest.xml for an APK split.
Given the manifest file for the main APK, generates an AndroidManifest.xml with
the value required for a Split APK (package, versionCode, etc).
"""
import optparse
import xml.etree.ElementTree
from util import build_utils
MANIFEST_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
package="%(package)s"
split="%(split)s">
<uses-sdk android:minSdkVersion="21" />
<application android:hasCode="%(has_code)s">
</application>
</manifest>
"""
def ParseArgs():
"""Parses command line options.
Returns:
An options object as from optparse.OptionsParser.parse_args()
"""
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--main-manifest', help='The main manifest of the app')
parser.add_option('--out-manifest', help='The output manifest')
parser.add_option('--split', help='The name of the split')
parser.add_option(
'--has-code',
action='store_true',
default=False,
help='Whether the split will contain a .dex file')
(options, args) = parser.parse_args()
if args:
parser.error('No positional arguments should be given.')
# Check that required options have been provided.
required_options = ('main_manifest', 'out_manifest', 'split')
build_utils.CheckOptions(options, parser, required=required_options)
return options
def Build(main_manifest, split, has_code):
"""Builds a split manifest based on the manifest of the main APK.
Args:
main_manifest: the XML manifest of the main APK as a string
split: the name of the split as a string
has_code: whether this split APK will contain .dex files
Returns:
The XML split manifest as a string
"""
doc = xml.etree.ElementTree.fromstring(main_manifest)
package = doc.get('package')
return MANIFEST_TEMPLATE % {
'package': package,
'split': split.replace('-', '_'),
'has_code': str(has_code).lower()
}
def main():
options = ParseArgs()
main_manifest = file(options.main_manifest).read()
split_manifest = Build(
main_manifest,
options.split,
options.has_code)
with file(options.out_manifest, 'w') as f:
f.write(split_manifest)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
[options.main_manifest] + build_utils.GetPythonDependencies())
if __name__ == '__main__':
main()

View File

@ -0,0 +1,319 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Convert Android xml resources to API 14 compatible.
There are two reasons that we cannot just use API 17 attributes,
so we are generating another set of resources by this script.
1. paddingStart attribute can cause a crash on Galaxy Tab 2.
2. There is a bug that paddingStart does not override paddingLeft on
JB-MR1. This is fixed on JB-MR2. b/8654490
Therefore, this resource generation script can be removed when
we drop the support for JB-MR1.
Please refer to http://crbug.com/235118 for the details.
"""
import optparse
import os
import re
import shutil
import sys
import xml.dom.minidom as minidom
from util import build_utils
# Note that we are assuming 'android:' is an alias of
# the namespace 'http://schemas.android.com/apk/res/android'.
GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
# Almost all the attributes that has "Start" or "End" in
# its name should be mapped.
ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
'drawableStart' : 'drawableLeft',
'layout_alignStart' : 'layout_alignLeft',
'layout_marginStart' : 'layout_marginLeft',
'layout_alignParentStart' : 'layout_alignParentLeft',
'layout_toStartOf' : 'layout_toLeftOf',
'paddingEnd' : 'paddingRight',
'drawableEnd' : 'drawableRight',
'layout_alignEnd' : 'layout_alignRight',
'layout_marginEnd' : 'layout_marginRight',
'layout_alignParentEnd' : 'layout_alignParentRight',
'layout_toEndOf' : 'layout_toRightOf'}
ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
in ATTRIBUTES_TO_MAP.iteritems())
ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
in ATTRIBUTES_TO_MAP.iteritems())
def IterateXmlElements(node):
"""minidom helper function that iterates all the element nodes.
Iteration order is pre-order depth-first."""
if node.nodeType == node.ELEMENT_NODE:
yield node
for child_node in node.childNodes:
for child_node_element in IterateXmlElements(child_node):
yield child_node_element
def ParseAndReportErrors(filename):
try:
return minidom.parse(filename)
except Exception:
import traceback
traceback.print_exc()
sys.stderr.write('Failed to parse XML file: %s\n' % filename)
sys.exit(1)
def AssertNotDeprecatedAttribute(name, value, filename):
"""Raises an exception if the given attribute is deprecated."""
msg = None
if name in ATTRIBUTES_TO_MAP_REVERSED:
msg = '{0} should use {1} instead of {2}'.format(filename,
ATTRIBUTES_TO_MAP_REVERSED[name], name)
elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
msg = '{0} should use start/end instead of left/right for {1}'.format(
filename, name)
if msg:
msg += ('\nFor background, see: http://android-developers.blogspot.com/'
'2013/03/native-rtl-support-in-android-42.html\n'
'If you have a legitimate need for this attribute, discuss with '
'kkimlabs@chromium.org or newt@chromium.org')
raise Exception(msg)
def WriteDomToFile(dom, filename):
"""Write the given dom to filename."""
build_utils.MakeDirectory(os.path.dirname(filename))
with open(filename, 'w') as f:
dom.writexml(f, '', ' ', '\n', encoding='utf-8')
def HasStyleResource(dom):
"""Return True if the dom is a style resource, False otherwise."""
root_node = IterateXmlElements(dom).next()
return bool(root_node.nodeName == 'resources' and
list(root_node.getElementsByTagName('style')))
def ErrorIfStyleResourceExistsInDir(input_dir):
"""If a style resource is in input_dir, raises an exception."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
dom = ParseAndReportErrors(input_filename)
if HasStyleResource(dom):
raise Exception('error: style file ' + input_filename +
' should be under ' + input_dir +
'-v17 directory. Please refer to '
'http://crbug.com/243952 for the details.')
def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
"""Convert layout resource to API 14 compatible layout resource.
Args:
dom: Parsed minidom object to be modified.
filename: Filename that the DOM was parsed from.
assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
cause an exception to be thrown.
Returns:
True if dom is modified, False otherwise.
"""
is_modified = False
# Iterate all the elements' attributes to find attributes to convert.
for element in IterateXmlElements(dom):
for name, value in list(element.attributes.items()):
# Convert any API 17 Start/End attributes to Left/Right attributes.
# For example, from paddingStart="10dp" to paddingLeft="10dp"
# Note: gravity attributes are not necessary to convert because
# start/end values are backward-compatible. Explained at
# https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
if name in ATTRIBUTES_TO_MAP:
element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
del element.attributes[name]
is_modified = True
elif assert_not_deprecated:
AssertNotDeprecatedAttribute(name, value, filename)
return is_modified
def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
"""Convert style resource to API 14 compatible style resource.
Args:
dom: Parsed minidom object to be modified.
filename: Filename that the DOM was parsed from.
assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
cause an exception to be thrown.
Returns:
True if dom is modified, False otherwise.
"""
is_modified = False
for style_element in dom.getElementsByTagName('style'):
for item_element in style_element.getElementsByTagName('item'):
name = item_element.attributes['name'].value
value = item_element.childNodes[0].nodeValue
if name in ATTRIBUTES_TO_MAP:
item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
is_modified = True
elif assert_not_deprecated:
AssertNotDeprecatedAttribute(name, value, filename)
return is_modified
def GenerateV14LayoutResource(input_filename, output_v14_filename,
output_v17_filename):
"""Convert API 17 layout resource to API 14 compatible layout resource.
It's mostly a simple replacement, s/Start/Left s/End/Right,
on the attribute names.
If the generated resource is identical to the original resource,
don't do anything. If not, write the generated resource to
output_v14_filename, and copy the original resource to output_v17_filename.
"""
dom = ParseAndReportErrors(input_filename)
is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
if is_modified:
# Write the generated resource.
WriteDomToFile(dom, output_v14_filename)
# Copy the original resource.
build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
shutil.copy2(input_filename, output_v17_filename)
def GenerateV14StyleResource(input_filename, output_v14_filename):
"""Convert API 17 style resources to API 14 compatible style resource.
Write the generated style resource to output_v14_filename.
It's mostly a simple replacement, s/Start/Left s/End/Right,
on the attribute names.
"""
dom = ParseAndReportErrors(input_filename)
GenerateV14StyleResourceDom(dom, input_filename)
# Write the generated resource.
WriteDomToFile(dom, output_v14_filename)
def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
"""Convert layout resources to API 14 compatible resources in input_dir."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
rel_filename = os.path.relpath(input_filename, input_dir)
output_v14_filename = os.path.join(output_v14_dir, rel_filename)
output_v17_filename = os.path.join(output_v17_dir, rel_filename)
GenerateV14LayoutResource(input_filename, output_v14_filename,
output_v17_filename)
def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
"""Convert style resources to API 14 compatible resources in input_dir."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
rel_filename = os.path.relpath(input_filename, input_dir)
output_v14_filename = os.path.join(output_v14_dir, rel_filename)
GenerateV14StyleResource(input_filename, output_v14_filename)
def ParseArgs():
"""Parses command line options.
Returns:
An options object as from optparse.OptionsParser.parse_args()
"""
parser = optparse.OptionParser()
parser.add_option('--res-dir',
help='directory containing resources '
'used to generate v14 compatible resources')
parser.add_option('--res-v14-compatibility-dir',
help='output directory into which '
'v14 compatible resources will be generated')
parser.add_option('--stamp', help='File to touch on success')
options, args = parser.parse_args()
if args:
parser.error('No positional arguments should be given.')
# Check that required options have been provided.
required_options = ('res_dir', 'res_v14_compatibility_dir')
build_utils.CheckOptions(options, parser, required=required_options)
return options
def GenerateV14Resources(res_dir, res_v14_dir):
for name in os.listdir(res_dir):
if not os.path.isdir(os.path.join(res_dir, name)):
continue
dir_pieces = name.split('-')
resource_type = dir_pieces[0]
qualifiers = dir_pieces[1:]
api_level_qualifier_index = -1
api_level_qualifier = ''
for index, qualifier in enumerate(qualifiers):
if re.match('v[0-9]+$', qualifier):
api_level_qualifier_index = index
api_level_qualifier = qualifier
break
# Android pre-v17 API doesn't support RTL. Skip.
if 'ldrtl' in qualifiers:
continue
input_dir = os.path.abspath(os.path.join(res_dir, name))
# We also need to copy the original v17 resource to *-v17 directory
# because the generated v14 resource will hide the original resource.
output_v14_dir = os.path.join(res_v14_dir, name)
output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
# We only convert layout resources under layout*/, xml*/,
# and style resources under values*/.
if resource_type in ('layout', 'xml'):
if not api_level_qualifier:
GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
output_v17_dir)
elif resource_type == 'values':
if api_level_qualifier == 'v17':
output_qualifiers = qualifiers[:]
del output_qualifiers[api_level_qualifier_index]
output_v14_dir = os.path.join(res_v14_dir,
'-'.join([resource_type] +
output_qualifiers))
GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
elif not api_level_qualifier:
ErrorIfStyleResourceExistsInDir(input_dir)
def main():
options = ParseArgs()
res_v14_dir = options.res_v14_compatibility_dir
build_utils.DeleteDirectory(res_v14_dir)
build_utils.MakeDirectory(res_v14_dir)
GenerateV14Resources(options.res_dir, res_v14_dir)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,67 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Gets and writes the configurations of the attached devices.
This configuration is used by later build steps to determine which devices to
install to and what needs to be installed to those devices.
"""
import optparse
import sys
from util import build_utils
from util import build_device
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--stamp', action='store')
parser.add_option('--output', action='store')
options, _ = parser.parse_args(argv)
devices = build_device.GetAttachedDevices()
device_configurations = []
for d in devices:
configuration, is_online, has_root = (
build_device.GetConfigurationForDevice(d))
if not is_online:
build_utils.PrintBigWarning(
'%s is not online. Skipping managed install for this device. '
'Try rebooting the device to fix this warning.' % d)
continue
if not has_root:
build_utils.PrintBigWarning(
'"adb root" failed on device: %s\n'
'Skipping managed install for this device.'
% configuration['description'])
continue
device_configurations.append(configuration)
if len(device_configurations) == 0:
build_utils.PrintBigWarning(
'No valid devices attached. Skipping managed install steps.')
elif len(devices) > 1:
# Note that this checks len(devices) and not len(device_configurations).
# This way, any time there are multiple devices attached it is
# explicitly stated which device we will install things to even if all but
# one device were rejected for other reasons (e.g. two devices attached,
# one w/o root).
build_utils.PrintBigWarning(
'Multiple devices attached. '
'Installing to the preferred device: '
'%(id)s (%(description)s)' % (device_configurations[0]))
build_device.WriteConfigurations(device_configurations, options.output)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -0,0 +1,66 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Insert a version string into a library as a section '.chromium.version'.
"""
import optparse
import os
import sys
import tempfile
from util import build_utils
def InsertChromiumVersion(android_objcopy,
library_path,
version_string):
# Remove existing .chromium.version section from .so
objcopy_command = [android_objcopy,
'--remove-section=.chromium.version',
library_path]
build_utils.CheckOutput(objcopy_command)
# Add a .chromium.version section.
with tempfile.NamedTemporaryFile() as stream:
stream.write(version_string)
stream.flush()
objcopy_command = [android_objcopy,
'--add-section', '.chromium.version=%s' % stream.name,
library_path]
build_utils.CheckOutput(objcopy_command)
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
parser.add_option('--android-objcopy',
help='Path to the toolchain\'s objcopy binary')
parser.add_option('--stripped-libraries-dir',
help='Directory of native libraries')
parser.add_option('--libraries',
help='List of libraries')
parser.add_option('--version-string',
help='Version string to be inserted')
parser.add_option('--stamp', help='Path to touch on success')
options, _ = parser.parse_args(args)
libraries = build_utils.ParseGypList(options.libraries)
for library in libraries:
library_path = os.path.join(options.stripped_libraries_dir, library)
InsertChromiumVersion(options.android_objcopy,
library_path,
options.version_string)
if options.stamp:
build_utils.Touch(options.stamp)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

79
build/android/gyp/jar.py Executable file
View File

@ -0,0 +1,79 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import optparse
import os
import sys
from util import build_utils
from util import md5_check
def Jar(class_files, classes_dir, jar_path, manifest_file=None):
jar_path = os.path.abspath(jar_path)
# The paths of the files in the jar will be the same as they are passed in to
# the command. Because of this, the command should be run in
# options.classes_dir so the .class file paths in the jar are correct.
jar_cwd = classes_dir
class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files]
jar_cmd = ['jar', 'cf0', jar_path]
if manifest_file:
jar_cmd[1] += 'm'
jar_cmd.append(os.path.abspath(manifest_file))
jar_cmd.extend(class_files_rel)
with build_utils.TempDir() as temp_dir:
empty_file = os.path.join(temp_dir, '.empty')
build_utils.Touch(empty_file)
jar_cmd.append(os.path.relpath(empty_file, jar_cwd))
record_path = '%s.md5.stamp' % jar_path
md5_check.CallAndRecordIfStale(
lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd),
record_path=record_path,
input_paths=class_files,
input_strings=jar_cmd,
force=not os.path.exists(jar_path),
)
build_utils.Touch(jar_path, fail_if_missing=True)
def JarDirectory(classes_dir, excluded_classes, jar_path, manifest_file=None):
class_files = build_utils.FindInDirectory(classes_dir, '*.class')
for exclude in excluded_classes:
class_files = filter(
lambda f: not fnmatch.fnmatch(f, exclude), class_files)
Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file)
def main():
parser = optparse.OptionParser()
parser.add_option('--classes-dir', help='Directory containing .class files.')
parser.add_option('--jar-path', help='Jar output path.')
parser.add_option('--excluded-classes',
help='List of .class file patterns to exclude from the jar.')
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args()
if options.excluded_classes:
excluded_classes = build_utils.ParseGypList(options.excluded_classes)
else:
excluded_classes = []
JarDirectory(options.classes_dir,
excluded_classes,
options.jar_path)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())

127
build/android/gyp/jar_toc.py Executable file
View File

@ -0,0 +1,127 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a TOC file from a Java jar.
The TOC file contains the non-package API of the jar. This includes all
public/protected/package classes/functions/members and the values of static
final variables (members with package access are kept because in some cases we
have multiple libraries with the same package, particularly test+non-test). Some
other information (major/minor javac version) is also included.
This TOC file then can be used to determine if a dependent library should be
rebuilt when this jar changes. I.e. any change to the jar that would require a
rebuild, will have a corresponding change in the TOC file.
"""
import optparse
import os
import re
import sys
import zipfile
from util import build_utils
from util import md5_check
def GetClassesInZipFile(zip_file):
classes = []
files = zip_file.namelist()
for f in files:
if f.endswith('.class'):
# f is of the form org/chromium/base/Class$Inner.class
classes.append(f.replace('/', '.')[:-6])
return classes
def CallJavap(classpath, classes):
javap_cmd = [
'javap',
'-package', # Show public/protected/package.
# -verbose is required to get constant values (which can be inlined in
# dependents).
'-verbose',
'-J-XX:NewSize=4m',
'-classpath', classpath
] + classes
return build_utils.CheckOutput(javap_cmd)
def ExtractToc(disassembled_classes):
# javap output is structured by indent (2-space) levels.
good_patterns = [
'^[^ ]', # This includes all class/function/member signatures.
'^ SourceFile:',
'^ minor version:',
'^ major version:',
'^ Constant value:',
]
bad_patterns = [
'^const #', # Matches the constant pool (i.e. literals used in the class).
]
def JavapFilter(line):
return (re.match('|'.join(good_patterns), line) and
not re.match('|'.join(bad_patterns), line))
toc = filter(JavapFilter, disassembled_classes.split('\n'))
return '\n'.join(toc)
def UpdateToc(jar_path, toc_path):
classes = GetClassesInZipFile(zipfile.ZipFile(jar_path))
toc = []
limit = 1000 # Split into multiple calls to stay under command size limit
for i in xrange(0, len(classes), limit):
javap_output = CallJavap(classpath=jar_path, classes=classes[i:i+limit])
toc.append(ExtractToc(javap_output))
with open(toc_path, 'w') as tocfile:
tocfile.write(''.join(toc))
def DoJarToc(options):
jar_path = options.jar_path
toc_path = options.toc_path
record_path = '%s.md5.stamp' % toc_path
md5_check.CallAndRecordIfStale(
lambda: UpdateToc(jar_path, toc_path),
record_path=record_path,
input_paths=[jar_path],
force=not os.path.exists(toc_path),
)
build_utils.Touch(toc_path, fail_if_missing=True)
def main():
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--jar-path', help='Input .jar path.')
parser.add_option('--toc-path', help='Output .jar.TOC path.')
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args()
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
DoJarToc(options)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,340 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import re
import optparse
import os
from string import Template
import sys
from util import build_utils
# List of C++ types that are compatible with the Java code generated by this
# script.
#
# This script can parse .idl files however, at present it ignores special
# rules such as [cpp_enum_prefix_override="ax_attr"].
ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
'short', 'unsigned short',
'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
class EnumDefinition(object):
def __init__(self, original_enum_name=None, class_name_override=None,
enum_package=None, entries=None, fixed_type=None):
self.original_enum_name = original_enum_name
self.class_name_override = class_name_override
self.enum_package = enum_package
self.entries = collections.OrderedDict(entries or [])
self.prefix_to_strip = None
self.fixed_type = fixed_type
def AppendEntry(self, key, value):
if key in self.entries:
raise Exception('Multiple definitions of key %s found.' % key)
self.entries[key] = value
@property
def class_name(self):
return self.class_name_override or self.original_enum_name
def Finalize(self):
self._Validate()
self._AssignEntryIndices()
self._StripPrefix()
def _Validate(self):
assert self.class_name
assert self.enum_package
assert self.entries
if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
raise Exception('Fixed type %s for enum %s not whitelisted.' %
(self.fixed_type, self.class_name))
def _AssignEntryIndices(self):
# Enums, if given no value, are given the value of the previous enum + 1.
if not all(self.entries.values()):
prev_enum_value = -1
for key, value in self.entries.iteritems():
if not value:
self.entries[key] = prev_enum_value + 1
elif value in self.entries:
self.entries[key] = self.entries[value]
else:
try:
self.entries[key] = int(value)
except ValueError:
raise Exception('Could not interpret integer from enum value "%s" '
'for key %s.' % (value, key))
prev_enum_value = self.entries[key]
def _StripPrefix(self):
prefix_to_strip = self.prefix_to_strip
if not prefix_to_strip:
prefix_to_strip = self.original_enum_name
prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper()
prefix_to_strip += '_'
if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]):
prefix_to_strip = ''
entries = collections.OrderedDict()
for (k, v) in self.entries.iteritems():
stripped_key = k.replace(prefix_to_strip, '', 1)
if isinstance(v, basestring):
stripped_value = v.replace(prefix_to_strip, '', 1)
else:
stripped_value = v
entries[stripped_key] = stripped_value
self.entries = entries
class DirectiveSet(object):
class_name_override_key = 'CLASS_NAME_OVERRIDE'
enum_package_key = 'ENUM_PACKAGE'
prefix_to_strip_key = 'PREFIX_TO_STRIP'
known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
def __init__(self):
self._directives = {}
def Update(self, key, value):
if key not in DirectiveSet.known_keys:
raise Exception("Unknown directive: " + key)
self._directives[key] = value
@property
def empty(self):
return len(self._directives) == 0
def UpdateDefinition(self, definition):
definition.class_name_override = self._directives.get(
DirectiveSet.class_name_override_key, '')
definition.enum_package = self._directives.get(
DirectiveSet.enum_package_key)
definition.prefix_to_strip = self._directives.get(
DirectiveSet.prefix_to_strip_key)
class HeaderParser(object):
single_line_comment_re = re.compile(r'\s*//')
multi_line_comment_start_re = re.compile(r'\s*/\*')
enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
enum_end_re = re.compile(r'^\s*}\s*;\.*$')
generator_directive_re = re.compile(
r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
multi_line_generator_directive_start_re = re.compile(
r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
multi_line_directive_continuation_re = re.compile(
r'^\s*//\s+([\.\w]+)$')
multi_line_directive_end_re = re.compile(
r'^\s*//\s+([\.\w]*)\)$')
optional_class_or_struct_re = r'(class|struct)?'
enum_name_re = r'(\w+)'
optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
optional_fixed_type_re + '\s*{\s*$')
def __init__(self, lines, path=None):
self._lines = lines
self._path = path
self._enum_definitions = []
self._in_enum = False
self._current_definition = None
self._generator_directives = DirectiveSet()
self._multi_line_generator_directive = None
def _ApplyGeneratorDirectives(self):
self._generator_directives.UpdateDefinition(self._current_definition)
self._generator_directives = DirectiveSet()
def ParseDefinitions(self):
for line in self._lines:
self._ParseLine(line)
return self._enum_definitions
def _ParseLine(self, line):
if self._multi_line_generator_directive:
self._ParseMultiLineDirectiveLine(line)
elif not self._in_enum:
self._ParseRegularLine(line)
else:
self._ParseEnumLine(line)
def _ParseEnumLine(self, line):
if HeaderParser.single_line_comment_re.match(line):
return
if HeaderParser.multi_line_comment_start_re.match(line):
raise Exception('Multi-line comments in enums are not supported in ' +
self._path)
enum_end = HeaderParser.enum_end_re.match(line)
enum_entry = HeaderParser.enum_line_re.match(line)
if enum_end:
self._ApplyGeneratorDirectives()
self._current_definition.Finalize()
self._enum_definitions.append(self._current_definition)
self._in_enum = False
elif enum_entry:
enum_key = enum_entry.groups()[0]
enum_value = enum_entry.groups()[2]
self._current_definition.AppendEntry(enum_key, enum_value)
def _ParseMultiLineDirectiveLine(self, line):
multi_line_directive_continuation = (
HeaderParser.multi_line_directive_continuation_re.match(line))
multi_line_directive_end = (
HeaderParser.multi_line_directive_end_re.match(line))
if multi_line_directive_continuation:
value_cont = multi_line_directive_continuation.groups()[0]
self._multi_line_generator_directive[1].append(value_cont)
elif multi_line_directive_end:
directive_name = self._multi_line_generator_directive[0]
directive_value = "".join(self._multi_line_generator_directive[1])
directive_value += multi_line_directive_end.groups()[0]
self._multi_line_generator_directive = None
self._generator_directives.Update(directive_name, directive_value)
else:
raise Exception('Malformed multi-line directive declaration in ' +
self._path)
def _ParseRegularLine(self, line):
enum_start = HeaderParser.enum_start_re.match(line)
generator_directive = HeaderParser.generator_directive_re.match(line)
multi_line_generator_directive_start = (
HeaderParser.multi_line_generator_directive_start_re.match(line))
if generator_directive:
directive_name = generator_directive.groups()[0]
directive_value = generator_directive.groups()[1]
self._generator_directives.Update(directive_name, directive_value)
elif multi_line_generator_directive_start:
directive_name = multi_line_generator_directive_start.groups()[0]
directive_value = multi_line_generator_directive_start.groups()[1]
self._multi_line_generator_directive = (directive_name, [directive_value])
elif enum_start:
if self._generator_directives.empty:
return
self._current_definition = EnumDefinition(
original_enum_name=enum_start.groups()[1],
fixed_type=enum_start.groups()[3])
self._in_enum = True
def GetScriptName():
script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
build_index = script_components.index('build')
return os.sep.join(script_components[build_index:])
def DoGenerate(output_dir, source_paths, print_output_only=False):
output_paths = []
for source_path in source_paths:
enum_definitions = DoParseHeaderFile(source_path)
if not enum_definitions:
raise Exception('No enums found in %s\n'
'Did you forget prefixing enums with '
'"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
source_path)
for enum_definition in enum_definitions:
package_path = enum_definition.enum_package.replace('.', os.path.sep)
file_name = enum_definition.class_name + '.java'
output_path = os.path.join(output_dir, package_path, file_name)
output_paths.append(output_path)
if not print_output_only:
build_utils.MakeDirectory(os.path.dirname(output_path))
DoWriteOutput(source_path, output_path, enum_definition)
return output_paths
def DoParseHeaderFile(path):
with open(path) as f:
return HeaderParser(f.readlines(), path).ParseDefinitions()
def GenerateOutput(source_path, enum_definition):
template = Template("""
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// ${SCRIPT_NAME}
// From
// ${SOURCE_PATH}
package ${PACKAGE};
public class ${CLASS_NAME} {
${ENUM_ENTRIES}
}
""")
enum_template = Template(' public static final int ${NAME} = ${VALUE};')
enum_entries_string = []
for enum_name, enum_value in enum_definition.entries.iteritems():
values = {
'NAME': enum_name,
'VALUE': enum_value,
}
enum_entries_string.append(enum_template.substitute(values))
enum_entries_string = '\n'.join(enum_entries_string)
values = {
'CLASS_NAME': enum_definition.class_name,
'ENUM_ENTRIES': enum_entries_string,
'PACKAGE': enum_definition.enum_package,
'SCRIPT_NAME': GetScriptName(),
'SOURCE_PATH': source_path,
}
return template.substitute(values)
def DoWriteOutput(source_path, output_path, enum_definition):
with open(output_path, 'w') as out_file:
out_file.write(GenerateOutput(source_path, enum_definition))
def AssertFilesList(output_paths, assert_files_list):
actual = set(output_paths)
expected = set(assert_files_list)
if not actual == expected:
need_to_add = list(actual - expected)
need_to_remove = list(expected - actual)
raise Exception('Output files list does not match expectations. Please '
'add %s and remove %s.' % (need_to_add, need_to_remove))
def DoMain(argv):
usage = 'usage: %prog [options] output_dir input_file(s)...'
parser = optparse.OptionParser(usage=usage)
parser.add_option('--assert_file', action="append", default=[],
dest="assert_files_list", help='Assert that the given '
'file is an output. There can be multiple occurrences of '
'this flag.')
parser.add_option('--print_output_only', help='Only print output paths.',
action='store_true')
parser.add_option('--verbose', help='Print more information.',
action='store_true')
options, args = parser.parse_args(argv)
if len(args) < 2:
parser.error('Need to specify output directory and at least one input file')
output_paths = DoGenerate(args[0], args[1:],
print_output_only=options.print_output_only)
if options.assert_files_list:
AssertFilesList(output_paths, options.assert_files_list)
if options.verbose:
print 'Output paths:'
print '\n'.join(output_paths)
return ' '.join(output_paths)
if __name__ == '__main__':
DoMain(sys.argv[1:])

View File

@ -0,0 +1,436 @@
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for enum_preprocess.py.
This test suite containss various tests for the C++ -> Java enum generator.
"""
import collections
import optparse
import os
import sys
import unittest
import java_cpp_enum
from java_cpp_enum import EnumDefinition, GenerateOutput, GetScriptName
from java_cpp_enum import HeaderParser
sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
from util import build_utils
class TestPreprocess(unittest.TestCase):
def testOutput(self):
definition = EnumDefinition(original_enum_name='ClassName',
enum_package='some.package',
entries=[('E1', 1), ('E2', '2 << 2')])
output = GenerateOutput('path/to/file', definition)
expected = """
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// %s
// From
// path/to/file
package some.package;
public class ClassName {
public static final int E1 = 1;
public static final int E2 = 2 << 2;
}
"""
self.assertEqual(expected % GetScriptName(), output)
def testParseSimpleEnum(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum EnumName {
VALUE_ZERO,
VALUE_ONE,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(1, len(definitions))
definition = definitions[0]
self.assertEqual('EnumName', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
('VALUE_ONE', 1)]),
definition.entries)
def testParseBitShifts(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum EnumName {
VALUE_ZERO = 1 << 0,
VALUE_ONE = 1 << 1,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(1, len(definitions))
definition = definitions[0]
self.assertEqual('EnumName', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
('VALUE_ONE', '1 << 1')]),
definition.entries)
def testParseClassNameOverride(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
// GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
enum EnumName {
FOO
};
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
// GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
enum PrefixTest {
PREFIX_TEST_A,
PREFIX_TEST_B,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(2, len(definitions))
definition = definitions[0]
self.assertEqual('OverrideName', definition.class_name)
definition = definitions[1]
self.assertEqual('OtherOverride', definition.class_name)
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 1)]),
definition.entries)
def testParseTwoEnums(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum EnumOne {
ENUM_ONE_A = 1,
// Comment there
ENUM_ONE_B = A,
};
enum EnumIgnore {
C, D, E
};
// GENERATED_JAVA_ENUM_PACKAGE: other.package
// GENERATED_JAVA_PREFIX_TO_STRIP: P_
enum EnumTwo {
P_A,
P_B
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(2, len(definitions))
definition = definitions[0]
self.assertEqual('EnumOne', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual(collections.OrderedDict([('A', '1'),
('B', 'A')]),
definition.entries)
definition = definitions[1]
self.assertEqual('EnumTwo', definition.class_name)
self.assertEqual('other.package', definition.enum_package)
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 1)]),
definition.entries)
def testParseThrowsOnUnknownDirective(self):
test_data = """
// GENERATED_JAVA_UNKNOWN: Value
enum EnumName {
VALUE_ONE,
};
""".split('\n')
with self.assertRaises(Exception):
HeaderParser(test_data).ParseDefinitions()
def testParseReturnsEmptyListWithoutDirectives(self):
test_data = """
enum EnumName {
VALUE_ONE,
};
""".split('\n')
self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
def testParseEnumClass(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum class Foo {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(1, len(definitions))
definition = definitions[0]
self.assertEqual('Foo', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual(collections.OrderedDict([('A', 0)]),
definition.entries)
def testParseEnumStruct(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum struct Foo {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(1, len(definitions))
definition = definitions[0]
self.assertEqual('Foo', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual(collections.OrderedDict([('A', 0)]),
definition.entries)
def testParseFixedTypeEnum(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum Foo : int {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(1, len(definitions))
definition = definitions[0]
self.assertEqual('Foo', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual('int', definition.fixed_type)
self.assertEqual(collections.OrderedDict([('A', 0)]),
definition.entries)
def testParseFixedTypeEnumClass(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum class Foo: unsigned short {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual(1, len(definitions))
definition = definitions[0]
self.assertEqual('Foo', definition.class_name)
self.assertEqual('test.namespace', definition.enum_package)
self.assertEqual('unsigned short', definition.fixed_type)
self.assertEqual(collections.OrderedDict([('A', 0)]),
definition.entries)
def testParseUnknownFixedTypeRaises(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: test.namespace
enum class Foo: foo_type {
FOO_A,
};
""".split('\n')
with self.assertRaises(Exception):
HeaderParser(test_data).ParseDefinitions()
def testParseSimpleMultiLineDirective(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: (
// test.namespace)
// GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
enum Foo {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual('test.namespace', definitions[0].enum_package)
self.assertEqual('Bar', definitions[0].class_name)
def testParseMultiLineDirective(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: (te
// st.name
// space)
enum Foo {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual('test.namespace', definitions[0].enum_package)
def testParseMultiLineDirectiveWithOtherDirective(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: (
// test.namespace)
// GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
// Ba
// r
// )
enum Foo {
FOO_A,
};
""".split('\n')
definitions = HeaderParser(test_data).ParseDefinitions()
self.assertEqual('test.namespace', definitions[0].enum_package)
self.assertEqual('Bar', definitions[0].class_name)
def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: (
// test.name
// space
// GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
enum Foo {
FOO_A,
};
""".split('\n')
with self.assertRaises(Exception):
HeaderParser(test_data).ParseDefinitions()
def testParseMalformedMultiLineDirective(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: (
// test.name
// space
enum Foo {
FOO_A,
};
""".split('\n')
with self.assertRaises(Exception):
HeaderParser(test_data).ParseDefinitions()
def testParseMalformedMultiLineDirectiveShort(self):
test_data = """
// GENERATED_JAVA_ENUM_PACKAGE: (
enum Foo {
FOO_A,
};
""".split('\n')
with self.assertRaises(Exception):
HeaderParser(test_data).ParseDefinitions()
def testEnumValueAssignmentNoneDefined(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('A', None)
definition.AppendEntry('B', None)
definition.AppendEntry('C', None)
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 1),
('C', 2)]),
definition.entries)
def testEnumValueAssignmentAllDefined(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('A', '1')
definition.AppendEntry('B', '2')
definition.AppendEntry('C', '3')
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', '1'),
('B', '2'),
('C', '3')]),
definition.entries)
def testEnumValueAssignmentReferences(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('A', None)
definition.AppendEntry('B', 'A')
definition.AppendEntry('C', None)
definition.AppendEntry('D', 'C')
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 0),
('C', 1),
('D', 1)]),
definition.entries)
def testEnumValueAssignmentSet(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('A', None)
definition.AppendEntry('B', '2')
definition.AppendEntry('C', None)
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 2),
('C', 3)]),
definition.entries)
def testEnumValueAssignmentSetReferences(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('A', None)
definition.AppendEntry('B', 'A')
definition.AppendEntry('C', 'B')
definition.AppendEntry('D', None)
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 0),
('C', 0),
('D', 1)]),
definition.entries)
def testEnumValueAssignmentRaises(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('A', None)
definition.AppendEntry('B', 'foo')
definition.AppendEntry('C', None)
with self.assertRaises(Exception):
definition.Finalize()
def testExplicitPrefixStripping(self):
definition = EnumDefinition(original_enum_name='c', enum_package='p')
definition.AppendEntry('P_A', None)
definition.AppendEntry('B', None)
definition.AppendEntry('P_C', None)
definition.AppendEntry('P_LAST', 'P_C')
definition.prefix_to_strip = 'P_'
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 1),
('C', 2),
('LAST', 2)]),
definition.entries)
def testImplicitPrefixStripping(self):
definition = EnumDefinition(original_enum_name='ClassName',
enum_package='p')
definition.AppendEntry('CLASS_NAME_A', None)
definition.AppendEntry('CLASS_NAME_B', None)
definition.AppendEntry('CLASS_NAME_C', None)
definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
definition.Finalize()
self.assertEqual(collections.OrderedDict([('A', 0),
('B', 1),
('C', 2),
('LAST', 2)]),
definition.entries)
def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
definition = EnumDefinition(original_enum_name='Name',
enum_package='p')
definition.AppendEntry('A', None)
definition.AppendEntry('B', None)
definition.AppendEntry('NAME_LAST', None)
definition.Finalize()
self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
def testGenerateThrowsOnEmptyInput(self):
with self.assertRaises(Exception):
original_do_parse = java_cpp_enum.DoParseHeaderFile
try:
java_cpp_enum.DoParseHeaderFile = lambda _: []
java_cpp_enum.DoGenerate('dir', ['file'])
finally:
java_cpp_enum.DoParseHeaderFile = original_do_parse
def main(argv):
parser = optparse.OptionParser()
parser.add_option("--stamp", help="File to touch on success.")
options, _ = parser.parse_args(argv)
suite = unittest.TestLoader().loadTestsFromTestCase(TestPreprocess)
unittest.TextTestRunner(verbosity=0).run(suite)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
main(sys.argv[1:])

321
build/android/gyp/javac.py Executable file
View File

@ -0,0 +1,321 @@
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import optparse
import os
import shutil
import re
import sys
import textwrap
from util import build_utils
from util import md5_check
import jar
sys.path.append(build_utils.COLORAMA_ROOT)
import colorama
def ColorJavacOutput(output):
fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
warning_re = re.compile(
fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
error_re = re.compile(
fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT]
def Colorize(line, regex, color):
match = regex.match(line)
start = match.start(color[0])
end = match.end(color[0])
return (line[:start]
+ color[1] + line[start:end]
+ colorama.Fore.RESET + colorama.Style.RESET_ALL
+ line[end:])
def ApplyColor(line):
if warning_re.match(line):
line = Colorize(line, warning_re, warning_color)
elif error_re.match(line):
line = Colorize(line, error_re, error_color)
elif marker_re.match(line):
line = Colorize(line, marker_re, marker_color)
return line
return '\n'.join(map(ApplyColor, output.split('\n')))
ERRORPRONE_OPTIONS = [
'-Xepdisable:'
# Something in chrome_private_java makes this check crash.
'com.google.errorprone.bugpatterns.ClassCanBeStatic,'
# These crash on lots of targets.
'com.google.errorprone.bugpatterns.WrongParameterPackage,'
'com.google.errorprone.bugpatterns.GuiceOverridesGuiceInjectableMethod,'
'com.google.errorprone.bugpatterns.GuiceOverridesJavaxInjectableMethod,'
'com.google.errorprone.bugpatterns.ElementsCountedInLoop'
]
def DoJavac(
bootclasspath, classpath, classes_dir, chromium_code,
use_errorprone_path, java_files):
"""Runs javac.
Builds |java_files| with the provided |classpath| and puts the generated
.class files into |classes_dir|. If |chromium_code| is true, extra lint
checking will be enabled.
"""
jar_inputs = []
for path in classpath:
if os.path.exists(path + '.TOC'):
jar_inputs.append(path + '.TOC')
else:
jar_inputs.append(path)
javac_args = [
'-g',
# Chromium only allows UTF8 source files. Being explicit avoids
# javac pulling a default encoding from the user's environment.
'-encoding', 'UTF-8',
'-classpath', ':'.join(classpath),
'-d', classes_dir]
if bootclasspath:
javac_args.extend([
'-bootclasspath', ':'.join(bootclasspath),
'-source', '1.7',
'-target', '1.7',
])
if chromium_code:
# TODO(aurimas): re-enable '-Xlint:deprecation' checks once they are fixed.
javac_args.extend(['-Xlint:unchecked'])
else:
# XDignore.symbol.file makes javac compile against rt.jar instead of
# ct.sym. This means that using a java internal package/class will not
# trigger a compile warning or error.
javac_args.extend(['-XDignore.symbol.file'])
if use_errorprone_path:
javac_cmd = [use_errorprone_path] + ERRORPRONE_OPTIONS
else:
javac_cmd = ['javac']
javac_cmd = javac_cmd + javac_args + java_files
def Compile():
build_utils.CheckOutput(
javac_cmd,
print_stdout=chromium_code,
stderr_filter=ColorJavacOutput)
record_path = os.path.join(classes_dir, 'javac.md5.stamp')
md5_check.CallAndRecordIfStale(
Compile,
record_path=record_path,
input_paths=java_files + jar_inputs,
input_strings=javac_cmd)
_MAX_MANIFEST_LINE_LEN = 72
def CreateManifest(manifest_path, classpath, main_class=None,
manifest_entries=None):
"""Creates a manifest file with the given parameters.
This generates a manifest file that compiles with the spec found at
http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#JAR_Manifest
Args:
manifest_path: The path to the manifest file that should be created.
classpath: The JAR files that should be listed on the manifest file's
classpath.
main_class: If present, the class containing the main() function.
manifest_entries: If present, a list of (key, value) pairs to add to
the manifest.
"""
output = ['Manifest-Version: 1.0']
if main_class:
output.append('Main-Class: %s' % main_class)
if manifest_entries:
for k, v in manifest_entries:
output.append('%s: %s' % (k, v))
if classpath:
sanitized_paths = []
for path in classpath:
sanitized_paths.append(os.path.basename(path.strip('"')))
output.append('Class-Path: %s' % ' '.join(sanitized_paths))
output.append('Created-By: ')
output.append('')
wrapper = textwrap.TextWrapper(break_long_words=True,
drop_whitespace=False,
subsequent_indent=' ',
width=_MAX_MANIFEST_LINE_LEN - 2)
output = '\r\n'.join(w for l in output for w in wrapper.wrap(l))
with open(manifest_path, 'w') as f:
f.write(output)
def main(argv):
colorama.init()
argv = build_utils.ExpandFileArgs(argv)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option(
'--src-gendirs',
help='Directories containing generated java files.')
parser.add_option(
'--java-srcjars',
action='append',
default=[],
help='List of srcjars to include in compilation.')
parser.add_option(
'--bootclasspath',
action='append',
default=[],
help='Boot classpath for javac. If this is specified multiple times, '
'they will all be appended to construct the classpath.')
parser.add_option(
'--classpath',
action='append',
help='Classpath for javac. If this is specified multiple times, they '
'will all be appended to construct the classpath.')
parser.add_option(
'--javac-includes',
help='A list of file patterns. If provided, only java files that match'
'one of the patterns will be compiled.')
parser.add_option(
'--jar-excluded-classes',
default='',
help='List of .class file patterns to exclude from the jar.')
parser.add_option(
'--chromium-code',
type='int',
help='Whether code being compiled should be built with stricter '
'warnings for chromium code.')
parser.add_option(
'--use-errorprone-path',
help='Use the Errorprone compiler at this path.')
parser.add_option(
'--classes-dir',
help='Directory for compiled .class files.')
parser.add_option('--jar-path', help='Jar output path.')
parser.add_option(
'--main-class',
help='The class containing the main method.')
parser.add_option(
'--manifest-entry',
action='append',
help='Key:value pairs to add to the .jar manifest.')
parser.add_option('--stamp', help='Path to touch on success.')
options, args = parser.parse_args(argv)
if options.main_class and not options.jar_path:
parser.error('--main-class requires --jar-path')
bootclasspath = []
for arg in options.bootclasspath:
bootclasspath += build_utils.ParseGypList(arg)
classpath = []
for arg in options.classpath:
classpath += build_utils.ParseGypList(arg)
java_srcjars = []
for arg in options.java_srcjars:
java_srcjars += build_utils.ParseGypList(arg)
java_files = args
if options.src_gendirs:
src_gendirs = build_utils.ParseGypList(options.src_gendirs)
java_files += build_utils.FindInDirectories(src_gendirs, '*.java')
input_files = bootclasspath + classpath + java_srcjars + java_files
with build_utils.TempDir() as temp_dir:
classes_dir = os.path.join(temp_dir, 'classes')
os.makedirs(classes_dir)
if java_srcjars:
java_dir = os.path.join(temp_dir, 'java')
os.makedirs(java_dir)
for srcjar in java_srcjars:
build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
java_files += build_utils.FindInDirectory(java_dir, '*.java')
if options.javac_includes:
javac_includes = build_utils.ParseGypList(options.javac_includes)
filtered_java_files = []
for f in java_files:
for include in javac_includes:
if fnmatch.fnmatch(f, include):
filtered_java_files.append(f)
break
java_files = filtered_java_files
if len(java_files) != 0:
DoJavac(
bootclasspath,
classpath,
classes_dir,
options.chromium_code,
options.use_errorprone_path,
java_files)
if options.jar_path:
if options.main_class or options.manifest_entry:
if options.manifest_entry:
entries = map(lambda e: e.split(":"), options.manifest_entry)
else:
entries = []
manifest_file = os.path.join(temp_dir, 'manifest')
CreateManifest(manifest_file, classpath, options.main_class, entries)
else:
manifest_file = None
jar.JarDirectory(classes_dir,
build_utils.ParseGypList(options.jar_excluded_classes),
options.jar_path,
manifest_file=manifest_file)
if options.classes_dir:
# Delete the old classes directory. This ensures that all .class files in
# the output are actually from the input .java files. For example, if a
# .java file is deleted or an inner class is removed, the classes
# directory should not contain the corresponding old .class file after
# running this action.
build_utils.DeleteDirectory(options.classes_dir)
shutil.copytree(classes_dir, options.classes_dir)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
input_files + build_utils.GetPythonDependencies())
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -0,0 +1,121 @@
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Renders one or more template files using the Jinja template engine."""
import codecs
import optparse
import os
import sys
from util import build_utils
# Import jinja2 from third_party/jinja2
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../third_party'))
import jinja2 # pylint: disable=F0401
class RecordingFileSystemLoader(jinja2.FileSystemLoader):
'''A FileSystemLoader that stores a list of loaded templates.'''
def __init__(self, searchpath):
jinja2.FileSystemLoader.__init__(self, searchpath)
self.loaded_templates = set()
def get_source(self, environment, template):
contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
self, environment, template)
self.loaded_templates.add(os.path.relpath(filename))
return contents, filename, uptodate
def get_loaded_templates(self):
return list(self.loaded_templates)
def ProcessFile(env, input_filename, loader_base_dir, output_filename,
variables):
input_rel_path = os.path.relpath(input_filename, loader_base_dir)
template = env.get_template(input_rel_path)
output = template.render(variables)
with codecs.open(output_filename, 'w', 'utf-8') as output_file:
output_file.write(output)
def ProcessFiles(env, input_filenames, loader_base_dir, inputs_base_dir,
outputs_zip, variables):
with build_utils.TempDir() as temp_dir:
for input_filename in input_filenames:
relpath = os.path.relpath(os.path.abspath(input_filename),
os.path.abspath(inputs_base_dir))
if relpath.startswith(os.pardir):
raise Exception('input file %s is not contained in inputs base dir %s'
% (input_filename, inputs_base_dir))
output_filename = os.path.join(temp_dir, relpath)
parent_dir = os.path.dirname(output_filename)
build_utils.MakeDirectory(parent_dir)
ProcessFile(env, input_filename, loader_base_dir, output_filename,
variables)
build_utils.ZipDir(outputs_zip, temp_dir)
def main():
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--inputs', help='The template files to process.')
parser.add_option('--output', help='The output file to generate. Valid '
'only if there is a single input.')
parser.add_option('--outputs-zip', help='A zip file containing the processed '
'templates. Required if there are multiple inputs.')
parser.add_option('--inputs-base-dir', help='A common ancestor directory of '
'the inputs. Each output\'s path in the output zip will '
'match the relative path from INPUTS_BASE_DIR to the '
'input. Required if --output-zip is given.')
parser.add_option('--loader-base-dir', help='Base path used by the template '
'loader. Must be a common ancestor directory of '
'the inputs. Defaults to CHROMIUM_SRC.',
default=build_utils.CHROMIUM_SRC)
parser.add_option('--variables', help='Variables to be made available in the '
'template processing environment, as a GYP list (e.g. '
'--variables "channel=beta mstone=39")', default='')
options, args = parser.parse_args()
build_utils.CheckOptions(options, parser, required=['inputs'])
inputs = build_utils.ParseGypList(options.inputs)
if (options.output is None) == (options.outputs_zip is None):
parser.error('Exactly one of --output and --output-zip must be given')
if options.output and len(inputs) != 1:
parser.error('--output cannot be used with multiple inputs')
if options.outputs_zip and not options.inputs_base_dir:
parser.error('--inputs-base-dir must be given when --output-zip is used')
if args:
parser.error('No positional arguments should be given.')
variables = {}
for v in build_utils.ParseGypList(options.variables):
if '=' not in v:
parser.error('--variables argument must contain "=": ' + v)
name, _, value = v.partition('=')
variables[name] = value
loader = RecordingFileSystemLoader(options.loader_base_dir)
env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined,
line_comment_prefix='##')
if options.output:
ProcessFile(env, inputs[0], options.loader_base_dir, options.output,
variables)
else:
ProcessFiles(env, inputs, options.loader_base_dir, options.inputs_base_dir,
options.outputs_zip, variables)
if options.depfile:
deps = loader.get_loaded_templates() + build_utils.GetPythonDependencies()
build_utils.WriteDepfile(options.depfile, deps)
if __name__ == '__main__':
main()

Some files were not shown because too many files have changed in this diff Show More