diff --git a/build/OWNERS b/build/OWNERS new file mode 100644 index 00000000000..17d067cff36 --- /dev/null +++ b/build/OWNERS @@ -0,0 +1,5 @@ +cjhopman@chromium.org +dpranke@chromium.org +jochen@chromium.org +scottmg@chromium.org +thakis@chromium.org diff --git a/build/PRESUBMIT.py b/build/PRESUBMIT.py new file mode 100644 index 00000000000..fca962f1caa --- /dev/null +++ b/build/PRESUBMIT.py @@ -0,0 +1,16 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +def _RunTests(input_api, output_api): + return (input_api.canned_checks.RunUnitTestsInDirectory( + input_api, output_api, '.', whitelist=[r'.+_test.py$'])) + + +def CheckChangeOnUpload(input_api, output_api): + return _RunTests(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return _RunTests(input_api, output_api) diff --git a/build/README.chromium b/build/README.chromium new file mode 100644 index 00000000000..012df35c7aa --- /dev/null +++ b/build/README.chromium @@ -0,0 +1,15 @@ +List of property sheets to be included by projects: + common.vsprops + Not used anymore. No-op. Kept for compatibility with current projects. + + debug.vsprops + Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops. + + external_code.vsprops + Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings. + + output_dll_copy.rules + Run to enable automatic copy of DLL when they are as an input file in a vcproj project. + + release.vsprops + Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable. diff --git a/build/README.dart b/build/README.dart new file mode 100644 index 00000000000..ac57fbe6222 --- /dev/null +++ b/build/README.dart @@ -0,0 +1,7 @@ +This directory was taken from a snapshot of flutter/engine/src/build/. + +The snapshot was taken with a recursive copy `cp -R` of the directory from +the flutter repository. + +The contents is used to support the GN build system. + diff --git a/build/all.gyp b/build/all.gyp new file mode 100644 index 00000000000..b36fae6a575 --- /dev/null +++ b/build/all.gyp @@ -0,0 +1,1442 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'variables': { + # A hook that can be overridden in other repositories to add additional + # compilation targets to 'All'. + 'app_targets%': [], + # For Android-specific targets. + 'android_app_targets%': [], + }, + 'targets': [ + { + 'target_name': 'All', + 'type': 'none', + 'xcode_create_dependents_test_runner': 1, + 'dependencies': [ + '<@(app_targets)', + 'some.gyp:*', + '../base/base.gyp:*', + '../components/components.gyp:*', + '../components/components_tests.gyp:*', + '../content/content.gyp:*', + '../crypto/crypto.gyp:*', + '../net/net.gyp:*', + '../sdch/sdch.gyp:*', + '../sql/sql.gyp:*', + '../testing/gmock.gyp:*', + '../testing/gtest.gyp:*', + '../third_party/icu/icu.gyp:*', + '../third_party/libxml/libxml.gyp:*', + '../third_party/sqlite/sqlite.gyp:*', + '../third_party/zlib/zlib.gyp:*', + '../ui/accessibility/accessibility.gyp:*', + '../ui/base/ui_base.gyp:*', + '../ui/display/display.gyp:display_unittests', + '../ui/snapshot/snapshot.gyp:*', + '../url/url.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="mac"', { + 'dependencies': [ + '../ui/touch_selection/ui_touch_selection.gyp:*', + ], + }], + ['OS=="ios"', { + 'dependencies': [ + '../chrome/chrome.gyp:browser', + '../chrome/chrome.gyp:browser_ui', + '../ios/ios.gyp:*', + # NOTE: This list of targets is present because + # mojo_base.gyp:mojo_base cannot be built on iOS, as + # javascript-related targets cause v8 to be built. + '../mojo/mojo_base.gyp:mojo_common_lib', + '../mojo/mojo_base.gyp:mojo_common_unittests', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../third_party/mojo/mojo_edk.gyp:mojo_system_impl', + '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests', + '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_environment_unittests', + '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_unittests', + '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_utility_unittests', + '../third_party/mojo/mojo_edk_tests.gyp:mojo_system_unittests', + '../third_party/mojo/mojo_public.gyp:mojo_cpp_bindings', + '../third_party/mojo/mojo_public.gyp:mojo_public_test_utils', + '../third_party/mojo/mojo_public.gyp:mojo_system', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + ], + }], + ['OS=="android"', { + 'dependencies': [ + '../content/content_shell_and_tests.gyp:content_shell_apk', + '<@(android_app_targets)', + 'android_builder_tests', + '../tools/telemetry/telemetry.gyp:*#host', + # TODO(nyquist) This should instead by a target for sync when all of + # the sync-related code for Android has been upstreamed. + # See http://crbug.com/159203 + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib', + ], + 'conditions': [ + ['chromecast==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:android_webview_apk', + '../android_webview/android_webview.gyp:system_webview_apk', + '../android_webview/android_webview_shell.gyp:android_webview_shell_apk', + '../chrome/android/chrome_apk.gyp:chrome_public_apk', + '../chrome/chrome.gyp:chrome_shell_apk', + '../chrome/chrome.gyp:chrome_sync_shell_apk', + '../remoting/remoting.gyp:remoting_apk', + ], + }], + # TODO: Enable packed relocations for x64. See: b/20532404 + ['target_arch != "x64"', { + 'dependencies': [ + '../third_party/android_platform/relocation_packer.gyp:android_relocation_packer_unittests#host', + ], + }], + ], + }, { + 'dependencies': [ + '../content/content_shell_and_tests.gyp:*', + # TODO: This should build on Android and the target should move to the list above. + '../sync/sync.gyp:*', + ], + }], + ['OS!="ios" and OS!="android" and chromecast==0', { + 'dependencies': [ + '../third_party/re2/re2.gyp:re2', + '../chrome/chrome.gyp:*', + '../chrome/tools/profile_reset/jtl_compiler.gyp:*', + '../cc/blink/cc_blink_tests.gyp:*', + '../cc/cc_tests.gyp:*', + '../device/usb/usb.gyp:*', + '../extensions/extensions.gyp:*', + '../extensions/extensions_tests.gyp:*', + '../gin/gin.gyp:*', + '../gpu/gpu.gyp:*', + '../gpu/tools/tools.gyp:*', + '../ipc/ipc.gyp:*', + '../ipc/mojo/ipc_mojo.gyp:*', + '../jingle/jingle.gyp:*', + '../media/cast/cast.gyp:*', + '../media/media.gyp:*', + '../media/midi/midi.gyp:*', + '../mojo/mojo.gyp:*', + '../mojo/mojo_base.gyp:*', + '../ppapi/ppapi.gyp:*', + '../ppapi/ppapi_internal.gyp:*', + '../ppapi/tools/ppapi_tools.gyp:*', + '../printing/printing.gyp:*', + '../skia/skia.gyp:*', + '../sync/tools/sync_tools.gyp:*', + '../third_party/WebKit/public/all.gyp:*', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:*', + '../third_party/codesighs/codesighs.gyp:*', + '../third_party/ffmpeg/ffmpeg.gyp:*', + '../third_party/iccjpeg/iccjpeg.gyp:*', + '../third_party/libpng/libpng.gyp:*', + '../third_party/libusb/libusb.gyp:*', + '../third_party/libwebp/libwebp.gyp:*', + '../third_party/libxslt/libxslt.gyp:*', + '../third_party/lzma_sdk/lzma_sdk.gyp:*', + '../third_party/mesa/mesa.gyp:*', + '../third_party/modp_b64/modp_b64.gyp:*', + '../third_party/npapi/npapi.gyp:*', + '../third_party/ots/ots.gyp:*', + '../third_party/pdfium/samples/samples.gyp:*', + '../third_party/qcms/qcms.gyp:*', + '../tools/gn/gn.gyp:*', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../tools/telemetry/telemetry.gyp:*', + '../v8/tools/gyp/v8.gyp:*', + '<(libjpeg_gyp_path):*', + ], + }], + ['OS!="ios"', { + 'dependencies': [ + '../device/bluetooth/bluetooth.gyp:*', + '../device/device_tests.gyp:*', + '../gpu/skia_runner/skia_runner.gyp:*', + ], + }], + ['use_openssl==0 and (OS=="mac" or OS=="ios" or OS=="win")', { + 'dependencies': [ + '../third_party/nss/nss.gyp:*', + ], + }], + ['OS=="win" or OS=="ios" or OS=="linux"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:*', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../sandbox/sandbox.gyp:*', + '../third_party/crashpad/crashpad/crashpad.gyp:*', + '../third_party/ocmock/ocmock.gyp:*', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../courgette/courgette.gyp:*', + '../sandbox/sandbox.gyp:*', + ], + 'conditions': [ + ['branding=="Chrome"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_packages_<(channel)', + ], + }], + ['enable_ipc_fuzzer==1', { + 'dependencies': [ + '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*', + ], + }], + ['use_dbus==1', { + 'dependencies': [ + '../dbus/dbus.gyp:*', + ], + }], + ], + }], + ['chromecast==1', { + 'dependencies': [ + '../chromecast/chromecast.gyp:*', + ], + }], + ['use_x11==1', { + 'dependencies': [ + '../tools/xdisplaycheck/xdisplaycheck.gyp:*', + ], + }], + ['OS=="win"', { + 'conditions': [ + ['win_use_allocator_shim==1', { + 'dependencies': [ + '../base/allocator/allocator.gyp:*', + ], + }], + ], + 'dependencies': [ + '../chrome/tools/crash_service/caps/caps.gyp:*', + '../chrome_elf/chrome_elf.gyp:*', + '../cloud_print/cloud_print.gyp:*', + '../courgette/courgette.gyp:*', + '../rlz/rlz.gyp:*', + '../sandbox/sandbox.gyp:*', + '<(angle_path)/src/angle.gyp:*', + '../third_party/bspatch/bspatch.gyp:*', + '../tools/win/static_initializers/static_initializers.gyp:*', + ], + }, { + 'dependencies': [ + '../third_party/libevent/libevent.gyp:*', + ], + }], + ['toolkit_views==1', { + 'dependencies': [ + '../ui/views/controls/webview/webview.gyp:*', + '../ui/views/views.gyp:*', + ], + }], + ['use_aura==1', { + 'dependencies': [ + '../ui/aura/aura.gyp:*', + '../ui/aura_extra/aura_extra.gyp:*', + ], + }], + ['use_ash==1', { + 'dependencies': [ + '../ash/ash.gyp:*', + ], + }], + ['remoting==1', { + 'dependencies': [ + '../remoting/remoting_all.gyp:remoting_all', + ], + }], + ['use_openssl==0', { + 'dependencies': [ + '../net/third_party/nss/ssl.gyp:*', + ], + }], + ['use_openssl==1', { + 'dependencies': [ + '../third_party/boringssl/boringssl.gyp:*', + '../third_party/boringssl/boringssl_tests.gyp:*', + ], + }], + ['enable_app_list==1', { + 'dependencies': [ + '../ui/app_list/app_list.gyp:*', + ], + }], + ['OS!="android" and OS!="ios"', { + 'dependencies': [ + '../google_apis/gcm/gcm.gyp:*', + ], + }], + ['(chromeos==1 or OS=="linux" or OS=="win" or OS=="mac") and chromecast==0', { + 'dependencies': [ + '../extensions/shell/app_shell.gyp:*', + ], + }], + ['envoy==1', { + 'dependencies': [ + '../envoy/envoy.gyp:*', + ], + }], + ], + }, # target_name: All + { + 'target_name': 'All_syzygy', + 'type': 'none', + 'conditions': [ + ['OS=="win" and fastbuild==0 and target_arch=="ia32" and ' + '(syzyasan==1 or syzygy_optimize==1)', { + 'dependencies': [ + '../chrome/installer/mini_installer_syzygy.gyp:*', + ], + }], + ], + }, # target_name: All_syzygy + { + # Note: Android uses android_builder_tests below. + # TODO: Consider merging that with this target. + 'target_name': 'chromium_builder_tests', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../components/components_tests.gyp:components_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../net/net.gyp:net_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/display/display.gyp:display_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../url/url.gyp:url_unittests', + ], + 'conditions': [ + ['OS!="ios"', { + 'dependencies': [ + '../ui/gl/gl_tests.gyp:gl_unittests', + ], + }], + ['OS!="ios" and OS!="mac"', { + 'dependencies': [ + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + ], + }], + ['OS!="ios" and OS!="android"', { + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_shell', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../gin/gin.gyp:gin_unittests', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/cast/cast.gyp:cast_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../mojo/mojo.gyp:mojo', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../third_party/WebKit/public/all.gyp:all_blink', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/telemetry/telemetry.gyp:*', + ], + }], + ['OS!="ios" and OS!="android" and chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chromedriver_tests', + '../chrome/chrome.gyp:chromedriver_unittests', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../extensions/extensions_tests.gyp:extensions_browsertests', + '../extensions/extensions_tests.gyp:extensions_unittests', + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service', + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:setup_unittests', + # ../chrome/test/mini_installer requires mini_installer. + '../chrome/installer/mini_installer.gyp:mini_installer', + '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests', + '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin', + '../courgette/courgette.gyp:courgette_unittests', + '../sandbox/sandbox.gyp:sbox_integration_tests', + '../sandbox/sandbox.gyp:sbox_unittests', + '../sandbox/sandbox.gyp:sbox_validation_tests', + '../ui/app_list/app_list.gyp:app_list_unittests', + ], + 'conditions': [ + # remoting_host_installation uses lots of non-trivial GYP that tend + # to break because of differences between ninja and msbuild. Make + # sure this target is built by the builders on the main waterfall. + # See http://crbug.com/180600. + ['wix_exists == "True" and sas_dll_exists == "True"', { + 'dependencies': [ + '../remoting/remoting.gyp:remoting_host_installation', + ], + }], + ['syzyasan==1', { + 'variables': { + # Disable incremental linking for all modules. + # 0: inherit, 1: disabled, 2: enabled. + 'msvs_debug_link_incremental': '1', + 'msvs_large_module_debug_link_mode': '1', + # Disable RTC. Syzygy explicitly doesn't support RTC + # instrumented binaries for now. + 'win_debug_RuntimeChecks': '0', + }, + 'defines': [ + # Disable iterator debugging (huge speed boost). + '_HAS_ITERATOR_DEBUGGING=0', + ], + 'msvs_settings': { + 'VCLinkerTool': { + # Enable profile information (necessary for SyzyAsan + # instrumentation). This is incompatible with incremental + # linking. + 'Profile': 'true', + }, + } + }], + ], + }], + ['chromeos==1', { + 'dependencies': [ + '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../sandbox/sandbox.gyp:sandbox_linux_unittests', + ], + }], + ['OS=="linux" and use_dbus==1', { + 'dependencies': [ + '../dbus/dbus.gyp:dbus_unittests', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../ui/app_list/app_list.gyp:app_list_unittests', + '../ui/message_center/message_center.gyp:*', + ], + }], + ['test_isolation_mode != "noop"', { + 'dependencies': [ + 'chromium_swarm_tests', + ], + }], + ['OS!="android"', { + 'dependencies': [ + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + ], + }], + ['enable_basic_printing==1 or enable_print_preview==1', { + 'dependencies': [ + '../printing/printing.gyp:printing_unittests', + ], + }], + ['use_aura==1', { + 'dependencies': [ + '../ui/app_list/app_list.gyp:app_list_unittests', + '../ui/aura/aura.gyp:aura_unittests', + '../ui/compositor/compositor.gyp:compositor_unittests', + ], + }], + ['use_aura==1 and chromecast==0', { + 'dependencies': [ + '../ui/keyboard/keyboard.gyp:keyboard_unittests', + '../ui/views/views.gyp:views_unittests', + ], + }], + ['use_aura==1 or toolkit_views==1', { + 'dependencies': [ + '../ui/events/events.gyp:events_unittests', + ], + }], + ['use_ash==1', { + 'dependencies': [ + '../ash/ash.gyp:ash_unittests', + ], + }], + ['disable_nacl==0', { + 'dependencies': [ + '../components/nacl.gyp:nacl_loader_unittests', + ], + }], + ['disable_nacl==0 and disable_nacl_untrusted==0 and enable_nacl_nonsfi_test==1', { + 'dependencies': [ + '../components/nacl.gyp:nacl_helper_nonsfi_unittests', + ], + }], + ['disable_nacl==0 and disable_nacl_untrusted==0', { + 'dependencies': [ + '../mojo/mojo_nacl_untrusted.gyp:libmojo', + '../mojo/mojo_nacl.gyp:monacl_codegen', + '../mojo/mojo_nacl.gyp:monacl_sel', + '../mojo/mojo_nacl.gyp:monacl_shell', + ], + }], + ], + }, # target_name: chromium_builder_tests + ], + 'conditions': [ + # TODO(GYP): make gn_migration.gypi work unconditionally. + ['OS=="mac" or OS=="win" or (OS=="linux" and target_arch=="x64" and chromecast==0)', { + 'includes': [ + 'gn_migration.gypi', + ], + }], + ['OS!="ios"', { + 'targets': [ + { + 'target_name': 'blink_tests', + 'type': 'none', + 'dependencies': [ + '../third_party/WebKit/public/all.gyp:all_blink', + ], + 'conditions': [ + ['OS=="android"', { + 'dependencies': [ + '../content/content_shell_and_tests.gyp:content_shell_apk', + '../breakpad/breakpad.gyp:dump_syms#host', + '../breakpad/breakpad.gyp:minidump_stackwalk#host', + ], + }, { # OS!="android" + 'dependencies': [ + '../content/content_shell_and_tests.gyp:content_shell', + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + ], + }], + ['OS!="win" and OS!="android"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="mac"', { + 'dependencies': [ + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../breakpad/breakpad.gyp:dump_syms#host', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:dump_syms#host', + ], + }], + ], + }, # target_name: blink_tests + ], + }], # OS!=ios + ['OS!="ios" and OS!="android" and chromecast==0', { + 'targets': [ + { + 'target_name': 'chromium_builder_nacl_win_integration', + 'type': 'none', + 'dependencies': [ + 'chromium_builder_tests', + ], + }, # target_name: chromium_builder_nacl_win_integration + { + 'target_name': 'chromium_builder_perf', + 'type': 'none', + 'dependencies': [ + '../cc/cc_tests.gyp:cc_perftests', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:load_library_perf_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../chrome/chrome.gyp:sync_performance_tests', + '../content/content_shell_and_tests.gyp:content_shell', + '../gpu/gpu.gyp:gpu_perftests', + '../media/media.gyp:media_perftests', + '../media/midi/midi.gyp:midi_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../tools/telemetry/telemetry.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="win"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_symbols' + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service', + '../gpu/gpu.gyp:angle_perftests', + ], + }], + ['OS=="win" and target_arch=="ia32"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service_win64', + ], + }], + ], + }, # target_name: chromium_builder_perf + { + 'target_name': 'chromium_gpu_builder', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:performance_browser_tests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_gl_tests', + '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test', + '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gpu_unittests', + '../tools/telemetry/telemetry.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="win"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_symbols' + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service', + ], + }], + ['OS=="win" and target_arch=="ia32"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service_win64', + ], + }], + ], + }, # target_name: chromium_gpu_builder + { + 'target_name': 'chromium_gpu_debug_builder', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_gl_tests', + '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test', + '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gpu_unittests', + '../tools/telemetry/telemetry.gyp:*', + ], + 'conditions': [ + ['OS!="ios" and OS!="win"', { + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + ], + }], + ['OS=="linux"', { + 'dependencies': [ + '../chrome/chrome.gyp:linux_symbols' + ], + }], + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service', + ], + }], + ['OS=="win" and target_arch=="ia32"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service_win64', + ], + }], + ], + }, # target_name: chromium_gpu_debug_builder + { + # This target contains everything we need to run tests on the special + # device-equipped WebRTC bots. We have device-requiring tests in + # browser_tests and content_browsertests. + 'target_name': 'chromium_builder_webrtc', + 'type': 'none', + 'dependencies': [ + 'chromium_builder_perf', + '../chrome/chrome.gyp:browser_tests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../third_party/webrtc/tools/tools.gyp:frame_analyzer', + '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter', + ], + 'conditions': [ + ['remoting==1', { + 'dependencies': [ + '../remoting/remoting.gyp:*', + ], + }], + ], + }, # target_name: chromium_builder_webrtc + { + 'target_name': 'chromium_builder_chromedriver', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chromedriver', + '../chrome/chrome.gyp:chromedriver_tests', + '../chrome/chrome.gyp:chromedriver_unittests', + ], + }, # target_name: chromium_builder_chromedriver + { + 'target_name': 'chromium_builder_asan', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + + # We refer to content_shell directly rather than blink_tests + # because we don't want the _unittests binaries. + '../content/content_shell_and_tests.gyp:content_shell', + ], + 'conditions': [ + ['OS!="win"', { + 'dependencies': [ + '../net/net.gyp:hpack_fuzz_wrapper', + '../net/net.gyp:dns_fuzz_stub', + '../skia/skia.gyp:filter_fuzz_stub', + ], + }], + ['enable_ipc_fuzzer==1 and component!="shared_library" and ' + '(OS=="linux" or OS=="win")', { + 'dependencies': [ + '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*', + ], + }], + ['chromeos==0', { + 'dependencies': [ + '../v8/src/d8.gyp:d8#host', + '../third_party/pdfium/samples/samples.gyp:pdfium_test', + ], + }], + ['internal_filter_fuzzer==1', { + 'dependencies': [ + '../skia/tools/clusterfuzz-data/fuzzers/filter_fuzzer/filter_fuzzer.gyp:filter_fuzzer', + ], + }], # internal_filter_fuzzer + ['clang==1', { + 'dependencies': [ + 'sanitizers/sanitizers.gyp:llvm-symbolizer', + ], + }], + ['OS=="win" and fastbuild==0 and target_arch=="ia32" and syzyasan==1', { + 'dependencies': [ + '../chrome/chrome_syzygy.gyp:chrome_dll_syzygy', + '../content/content_shell_and_tests.gyp:content_shell_syzyasan', + ], + 'conditions': [ + ['chrome_multiple_dll==1', { + 'dependencies': [ + '../chrome/chrome_syzygy.gyp:chrome_child_dll_syzygy', + ], + }], + ], + }], + ], + }, + { + 'target_name': 'chromium_builder_nacl_sdk', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:chrome', + ], + 'conditions': [ + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:chrome_nacl_win64', + ] + }], + ], + }, #target_name: chromium_builder_nacl_sdk + ], # targets + }], #OS!=ios and OS!=android + ['OS=="android"', { + 'targets': [ + { + # The current list of tests for android. This is temporary + # until the full set supported. If adding a new test here, + # please also add it to build/android/pylib/gtest/gtest_config.py, + # else the test is not run. + # + # WARNING: + # Do not add targets here without communicating the implications + # on tryserver triggers and load. Discuss with + # chrome-infrastructure-team please. + 'target_name': 'android_builder_tests', + 'type': 'none', + 'dependencies': [ + '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests', + '../base/base.gyp:base_unittests', + '../breakpad/breakpad.gyp:breakpad_unittests_deps', + # Also compile the tools needed to deal with minidumps, they are + # needed to run minidump tests upstream. + '../breakpad/breakpad.gyp:dump_syms#host', + '../breakpad/breakpad.gyp:symupload#host', + '../breakpad/breakpad.gyp:minidump_dump#host', + '../breakpad/breakpad.gyp:minidump_stackwalk#host', + '../build/android/pylib/device/commands/commands.gyp:chromium_commands', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_perftests_apk', + '../cc/cc_tests.gyp:cc_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_gl_tests', + '../content/content_shell_and_tests.gyp:content_junit_tests', + '../content/content_shell_and_tests.gyp:chromium_linker_test_apk', + '../content/content_shell_and_tests.gyp:content_shell_test_apk', + '../content/content_shell_and_tests.gyp:content_unittests', + '../gpu/gpu.gyp:gl_tests', + '../gpu/gpu.gyp:gpu_perftests_apk', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../media/media.gyp:media_perftests_apk', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests_apk', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../sandbox/sandbox.gyp:sandbox_linux_unittests_deps', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../testing/android/junit/junit_test.gyp:junit_unit_tests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/WebKit/public/all.gyp:*', + '../tools/android/android_tools.gyp:android_tools', + '../tools/android/android_tools.gyp:memconsumer', + '../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test', + '../ui/android/ui_android.gyp:ui_android_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/events/events.gyp:events_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + # Unit test bundles packaged as an apk. + '../base/base.gyp:base_unittests_apk', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk', + '../cc/cc_tests.gyp:cc_unittests_apk', + '../components/components_tests.gyp:components_browsertests_apk', + '../components/components_tests.gyp:components_unittests_apk', + '../content/content_shell_and_tests.gyp:content_browsertests_apk', + '../content/content_shell_and_tests.gyp:content_gl_tests_apk', + '../content/content_shell_and_tests.gyp:content_unittests_apk', + '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest_apk', + '../gpu/gpu.gyp:gl_tests_apk', + '../gpu/gpu.gyp:gpu_unittests_apk', + '../ipc/ipc.gyp:ipc_tests_apk', + '../media/media.gyp:media_unittests_apk', + '../media/midi/midi.gyp:midi_unittests_apk', + '../net/net.gyp:net_unittests_apk', + '../sandbox/sandbox.gyp:sandbox_linux_jni_unittests_apk', + '../skia/skia_tests.gyp:skia_unittests_apk', + '../sql/sql.gyp:sql_unittests_apk', + '../sync/sync.gyp:sync_unit_tests_apk', + '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_apk', + '../ui/android/ui_android.gyp:ui_android_unittests_apk', + '../ui/base/ui_base_tests.gyp:ui_base_unittests_apk', + '../ui/events/events.gyp:events_unittests_apk', + '../ui/gfx/gfx_tests.gyp:gfx_unittests_apk', + '../ui/gl/gl_tests.gyp:gl_unittests_apk', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_apk', + ], + 'conditions': [ + ['chromecast==0', { + 'dependencies': [ + '../android_webview/android_webview.gyp:android_webview_unittests', + '../chrome/chrome.gyp:unit_tests', + # Unit test bundles packaged as an apk. + '../android_webview/android_webview.gyp:android_webview_test_apk', + '../android_webview/android_webview.gyp:android_webview_unittests_apk', + '../chrome/android/chrome_apk.gyp:chrome_public_test_apk', + '../chrome/chrome.gyp:chrome_junit_tests', + '../chrome/chrome.gyp:chrome_shell_test_apk', + '../chrome/chrome.gyp:chrome_sync_shell_test_apk', + '../chrome/chrome.gyp:chrome_shell_uiautomator_tests', + '../chrome/chrome.gyp:chromedriver_webview_shell_apk', + '../chrome/chrome.gyp:unit_tests_apk', + '../third_party/custom_tabs_client/src/custom_tabs_client.gyp:custom_tabs_client_example_apk', + ], + }], + ], + }, + { + # WebRTC Chromium tests to run on Android. + 'target_name': 'android_builder_chromium_webrtc', + 'type': 'none', + 'dependencies': [ + '../build/android/pylib/device/commands/commands.gyp:chromium_commands', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../tools/android/android_tools.gyp:android_tools', + '../tools/android/android_tools.gyp:memconsumer', + '../content/content_shell_and_tests.gyp:content_browsertests_apk', + ], + }, # target_name: android_builder_chromium_webrtc + ], # targets + }], # OS="android" + ['OS=="mac"', { + 'targets': [ + { + # Target to build everything plus the dmg. We don't put the dmg + # in the All target because developers really don't need it. + 'target_name': 'all_and_dmg', + 'type': 'none', + 'dependencies': [ + 'All', + '../chrome/chrome.gyp:build_app_dmg', + ], + }, + # These targets are here so the build bots can use them to build + # subsets of a full tree for faster cycle times. + { + 'target_name': 'chromium_builder_dbg', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../rlz/rlz.gyp:*', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../tools/telemetry/telemetry.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_rel', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../tools/telemetry/telemetry.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_dbg_tsan_mac', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_dbg_valgrind_mac', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../chrome/chrome.gyp:unit_tests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../device/device_tests.gyp:device_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../url/url.gyp:url_unittests', + ], + }, + ], # targets + }], # OS="mac" + ['OS=="win"', { + 'targets': [ + # These targets are here so the build bots can use them to build + # subsets of a full tree for faster cycle times. + { + 'target_name': 'chromium_builder', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:crash_service', + '../chrome/chrome.gyp:gcapi_test', + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:performance_browser_tests', + '../chrome/chrome.gyp:setup_unittests', + '../chrome/chrome.gyp:sync_integration_tests', + '../chrome/chrome.gyp:unit_tests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin', + # ../chrome/test/mini_installer requires mini_installer. + '../chrome/installer/mini_installer.gyp:mini_installer', + '../courgette/courgette.gyp:courgette_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:*', + '../tools/telemetry/telemetry.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/events/events.gyp:events_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../ui/views/views.gyp:views_unittests', + '../url/url.gyp:url_unittests', + ], + 'conditions': [ + ['target_arch=="ia32"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service_win64', + ], + }], + ], + }, + { + 'target_name': 'chromium_builder_dbg_tsan_win', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../sql/sql.gyp:sql_unittests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../url/url.gyp:url_unittests', + ], + }, + { + 'target_name': 'chromium_builder_lkgr_drmemory_win', + 'type': 'none', + 'dependencies': [ + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_shell', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + ], + }, + { + 'target_name': 'chromium_builder_dbg_drmemory_win', + 'type': 'none', + 'dependencies': [ + '../ash/ash.gyp:ash_shell_unittests', + '../ash/ash.gyp:ash_unittests', + '../base/base.gyp:base_unittests', + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chrome_app_unittests', + '../chrome/chrome.gyp:chromedriver_unittests', + '../chrome/chrome.gyp:installer_util_unittests', + '../chrome/chrome.gyp:setup_unittests', + '../chrome/chrome.gyp:unit_tests', + '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests', + '../cloud_print/cloud_print.gyp:cloud_print_unittests', + '../components/components_tests.gyp:components_unittests', + '../components/test_runner/test_runner.gyp:layout_test_helper', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_shell', + '../content/content_shell_and_tests.gyp:content_shell_crash_service', + '../content/content_shell_and_tests.gyp:content_unittests', + '../courgette/courgette.gyp:courgette_unittests', + '../crypto/crypto.gyp:crypto_unittests', + '../device/device_tests.gyp:device_unittests', + '../extensions/extensions_tests.gyp:extensions_browsertests', + '../extensions/extensions_tests.gyp:extensions_unittests', + '../gin/gin.gyp:gin_shell', + '../gin/gin.gyp:gin_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../google_apis/google_apis.gyp:google_apis_unittests', + '../gpu/gpu.gyp:angle_unittests', + '../gpu/gpu.gyp:gpu_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests', + '../jingle/jingle.gyp:jingle_unittests', + '../media/cast/cast.gyp:cast_unittests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../mojo/mojo.gyp:mojo', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests', + '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests', + '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests', + '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests', + '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests', + '../ui/accessibility/accessibility.gyp:accessibility_unittests', + '../ui/app_list/app_list.gyp:app_list_unittests', + '../ui/aura/aura.gyp:aura_unittests', + '../ui/compositor/compositor.gyp:compositor_unittests', + '../ui/display/display.gyp:display_unittests', + '../ui/events/events.gyp:events_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/keyboard/keyboard.gyp:keyboard_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../url/url.gyp:url_unittests', + ], + }, + ], # targets + 'conditions': [ + ['branding=="Chrome"', { + 'targets': [ + { + 'target_name': 'chrome_official_builder_no_unittests', + 'type': 'none', + 'dependencies': [ + '../chrome/chrome.gyp:crash_service', + '../chrome/chrome.gyp:gcapi_dll', + '../chrome/chrome.gyp:pack_policy_templates', + '../chrome/installer/mini_installer.gyp:mini_installer', + '../cloud_print/cloud_print.gyp:cloud_print', + '../courgette/courgette.gyp:courgette', + '../courgette/courgette.gyp:courgette64', + '../remoting/remoting.gyp:remoting_webapp', + '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmadapter', + ], + 'conditions': [ + ['target_arch=="ia32"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service_win64', + ], + }], + ['component != "shared_library" and wix_exists == "True" and \ + sas_dll_exists == "True"', { + 'dependencies': [ + '../remoting/remoting.gyp:remoting_host_installation', + ], + }], # component != "shared_library" + ] + }, { + 'target_name': 'chrome_official_builder', + 'type': 'none', + 'dependencies': [ + 'chrome_official_builder_no_unittests', + '../base/base.gyp:base_unittests', + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:sync_integration_tests', + '../ipc/ipc.gyp:ipc_tests', + '../media/media.gyp:media_unittests', + '../media/midi/midi.gyp:midi_unittests', + '../net/net.gyp:net_unittests', + '../printing/printing.gyp:printing_unittests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../ui/views/views.gyp:views_unittests', + '../url/url.gyp:url_unittests', + ], + }, + ], # targets + }], # branding=="Chrome" + ], # conditions + }], # OS="win" + ['chromeos==1', { + 'targets': [ + { + 'target_name': 'chromiumos_preflight', + 'type': 'none', + 'dependencies': [ + '../breakpad/breakpad.gyp:minidump_stackwalk', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:chromedriver', + '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest', + '../content/content_shell_and_tests.gyp:video_encode_accelerator_unittest', + '../media/media.gyp:media_unittests', + '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode', + '../sandbox/sandbox.gyp:chrome_sandbox', + '../sandbox/sandbox.gyp:sandbox_linux_unittests', + '../third_party/mesa/mesa.gyp:osmesa', + '../tools/telemetry/telemetry.gyp:bitmaptools#host', + '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache', + ], + 'conditions': [ + ['disable_nacl==0', { + 'dependencies': [ + '../components/nacl.gyp:nacl_helper', + '../native_client/src/trusted/service_runtime/linux/nacl_bootstrap.gyp:nacl_helper_bootstrap', + ], + }], + ], + }, + ], # targets + }], # "chromeos==1" + ['use_aura==1', { + 'targets': [ + { + 'target_name': 'aura_builder', + 'type': 'none', + 'dependencies': [ + '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests', + '../cc/cc_tests.gyp:cc_unittests', + '../components/components_tests.gyp:components_unittests', + '../content/content_shell_and_tests.gyp:content_browsertests', + '../content/content_shell_and_tests.gyp:content_unittests', + '../device/device_tests.gyp:device_unittests', + '../google_apis/gcm/gcm.gyp:gcm_unit_tests', + '../ppapi/ppapi_internal.gyp:ppapi_unittests', + '../remoting/remoting.gyp:remoting_unittests', + '../skia/skia_tests.gyp:skia_unittests', + '../ui/app_list/app_list.gyp:*', + '../ui/aura/aura.gyp:*', + '../ui/aura_extra/aura_extra.gyp:*', + '../ui/base/ui_base_tests.gyp:ui_base_unittests', + '../ui/compositor/compositor.gyp:*', + '../ui/display/display.gyp:display_unittests', + '../ui/events/events.gyp:*', + '../ui/gfx/gfx_tests.gyp:gfx_unittests', + '../ui/gl/gl_tests.gyp:gl_unittests', + '../ui/keyboard/keyboard.gyp:*', + '../ui/snapshot/snapshot.gyp:snapshot_unittests', + '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests', + '../ui/wm/wm.gyp:*', + 'blink_tests', + ], + 'conditions': [ + ['OS=="win"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service', + ], + }], + ['OS=="win" and target_arch=="ia32"', { + 'dependencies': [ + '../chrome/chrome.gyp:crash_service_win64', + ], + }], + ['use_ash==1', { + 'dependencies': [ + '../ash/ash.gyp:ash_shell', + '../ash/ash.gyp:ash_unittests', + ], + }], + ['OS=="linux"', { + # Tests that currently only work on Linux. + 'dependencies': [ + '../base/base.gyp:base_unittests', + '../ipc/ipc.gyp:ipc_tests', + '../sql/sql.gyp:sql_unittests', + '../sync/sync.gyp:sync_unit_tests', + ], + }], + ['chromeos==1', { + 'dependencies': [ + '../chromeos/chromeos.gyp:chromeos_unittests', + '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests', + ], + }], + ['use_ozone==1', { + 'dependencies': [ + '../ui/ozone/ozone.gyp:*', + '../ui/ozone/demo/ozone_demos.gyp:*', + ], + }], + ['chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests', + '../chrome/chrome.gyp:chrome', + '../chrome/chrome.gyp:interactive_ui_tests', + '../chrome/chrome.gyp:unit_tests', + '../ui/message_center/message_center.gyp:*', + '../ui/views/examples/examples.gyp:views_examples_with_content_exe', + '../ui/views/views.gyp:views', + '../ui/views/views.gyp:views_unittests', + ], + }], + ], + }, + ], # targets + }], # "use_aura==1" + ['test_isolation_mode != "noop"', { + 'targets': [ + { + 'target_name': 'chromium_swarm_tests', + 'type': 'none', + 'dependencies': [ + '../base/base.gyp:base_unittests_run', + '../content/content_shell_and_tests.gyp:content_browsertests_run', + '../content/content_shell_and_tests.gyp:content_unittests_run', + '../net/net.gyp:net_unittests_run', + ], + 'conditions': [ + ['chromecast==0', { + 'dependencies': [ + '../chrome/chrome.gyp:browser_tests_run', + '../chrome/chrome.gyp:interactive_ui_tests_run', + '../chrome/chrome.gyp:sync_integration_tests_run', + '../chrome/chrome.gyp:unit_tests_run', + ], + }], + ], + }, # target_name: chromium_swarm_tests + ], + }], + ['archive_chromoting_tests==1', { + 'targets': [ + { + 'target_name': 'chromoting_swarm_tests', + 'type': 'none', + 'dependencies': [ + '../testing/chromoting/integration_tests.gyp:*', + ], + }, # target_name: chromoting_swarm_tests + ] + }], + ['OS=="mac" and toolkit_views==1', { + 'targets': [ + { + 'target_name': 'macviews_builder', + 'type': 'none', + 'dependencies': [ + '../ui/views/examples/examples.gyp:views_examples_with_content_exe', + '../ui/views/views.gyp:views', + '../ui/views/views.gyp:views_unittests', + ], + }, # target_name: macviews_builder + ], # targets + }], # os=='mac' and toolkit_views==1 + ], # conditions +} diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml new file mode 100644 index 00000000000..f27872eee47 --- /dev/null +++ b/build/android/AndroidManifest.xml @@ -0,0 +1,20 @@ + + + + + + + + + diff --git a/build/android/BUILD.gn b/build/android/BUILD.gn new file mode 100644 index 00000000000..d90ad70abe6 --- /dev/null +++ b/build/android/BUILD.gn @@ -0,0 +1,56 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar" + +action("find_sun_tools_jar") { + script = "//build/android/gyp/find_sun_tools_jar.py" + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + depfile, + sun_tools_jar_path, + ] + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(sun_tools_jar_path, root_build_dir), + ] +} + +java_prebuilt("sun_tools_java") { + jar_path = sun_tools_jar_path + jar_dep = ":find_sun_tools_jar" +} + +action("cpplib_stripped") { + _strip_bin = "${android_tool_prefix}strip" + _soname = "libc++_shared.so" + _input_so = "${android_libcpp_root}/libs/${android_app_abi}/${_soname}" + _output_so = "${root_out_dir}/lib.stripped/${_soname}" + + script = "//build/gn_run_binary.py" + inputs = [ + _strip_bin, + ] + sources = [ + _input_so, + ] + outputs = [ + _output_so, + ] + + _rebased_strip_bin = rebase_path(_strip_bin, root_out_dir) + _rebased_input_so = rebase_path(_input_so, root_out_dir) + _rebased_output_so = rebase_path(_output_so, root_out_dir) + args = [ + _rebased_strip_bin, + "--strip-unneeded", + "-o", + _rebased_output_so, + _rebased_input_so, + ] +} diff --git a/build/android/CheckInstallApk-debug.apk b/build/android/CheckInstallApk-debug.apk new file mode 100644 index 00000000000..3dc31910a53 Binary files /dev/null and b/build/android/CheckInstallApk-debug.apk differ diff --git a/build/android/OWNERS b/build/android/OWNERS new file mode 100644 index 00000000000..9a5d2701f1d --- /dev/null +++ b/build/android/OWNERS @@ -0,0 +1,3 @@ +jbudorick@chromium.org +klundberg@chromium.org +pasko@chromium.org diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py new file mode 100644 index 00000000000..6e0a3de8846 --- /dev/null +++ b/build/android/PRESUBMIT.py @@ -0,0 +1,64 @@ +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Presubmit script for android buildbot. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for +details on the presubmit API built into depot_tools. +""" + + +def CommonChecks(input_api, output_api): + output = [] + + def J(*dirs): + """Returns a path relative to presubmit directory.""" + return input_api.os_path.join(input_api.PresubmitLocalPath(), *dirs) + + output.extend(input_api.canned_checks.RunPylint( + input_api, + output_api, + black_list=[r'pylib/symbols/.*\.py$', r'gyp/.*\.py$', r'gn/.*\.py'], + extra_paths_list=[ + J(), J('..', '..', 'third_party', 'android_testrunner'), + J('buildbot')])) + output.extend(input_api.canned_checks.RunPylint( + input_api, + output_api, + white_list=[r'gyp/.*\.py$', r'gn/.*\.py'], + extra_paths_list=[J('gyp'), J('gn')])) + + # Disabled due to http://crbug.com/410936 + #output.extend(input_api.canned_checks.RunUnitTestsInDirectory( + #input_api, output_api, J('buildbot', 'tests'))) + + pylib_test_env = dict(input_api.environ) + pylib_test_env.update({ + 'PYTHONPATH': input_api.PresubmitLocalPath(), + 'PYTHONDONTWRITEBYTECODE': '1', + }) + output.extend(input_api.canned_checks.RunUnitTests( + input_api, + output_api, + unit_tests=[ + J('pylib', 'base', 'test_dispatcher_unittest.py'), + J('pylib', 'device', 'battery_utils_test.py'), + J('pylib', 'device', 'device_utils_test.py'), + J('pylib', 'device', 'logcat_monitor_test.py'), + J('pylib', 'gtest', 'gtest_test_instance_test.py'), + J('pylib', 'instrumentation', + 'instrumentation_test_instance_test.py'), + J('pylib', 'results', 'json_results_test.py'), + J('pylib', 'utils', 'md5sum_test.py'), + ], + env=pylib_test_env)) + return output + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/build/android/adb_android_webview_command_line b/build/android/adb_android_webview_command_line new file mode 100755 index 00000000000..791e27069e2 --- /dev/null +++ b/build/android/adb_android_webview_command_line @@ -0,0 +1,20 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_android_webview_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_android_webview_command_line "" + +. $(dirname $0)/adb_command_line_functions.sh +CMD_LINE_FILE=/data/local/tmp/android-webview-command-line +REQUIRES_SU=0 +set_command_line "$@" + diff --git a/build/android/adb_chrome_public_command_line b/build/android/adb_chrome_public_command_line new file mode 100755 index 00000000000..9bf91c6ace7 --- /dev/null +++ b/build/android/adb_chrome_public_command_line @@ -0,0 +1,19 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current Chrome flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the Chrome +# flags. For example: +# adb_chrome_public_command_line --enable-webgl +# +# To remove all Chrome flags, pass an empty string for the flags: +# adb_chrome_public_command_line "" + +. $(dirname $0)/adb_command_line_functions.sh +CMD_LINE_FILE=/data/local/chrome-command-line +REQUIRES_SU=1 +set_command_line "$@" diff --git a/build/android/adb_chrome_shell_command_line b/build/android/adb_chrome_shell_command_line new file mode 100755 index 00000000000..750f9060365 --- /dev/null +++ b/build/android/adb_chrome_shell_command_line @@ -0,0 +1,20 @@ +#!/bin/bash +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current chrome shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the chrome shell +# flags. For example: +# adb_chrome_shell_command_line --enable-webgl +# +# To remove all chrome shell flags, pass an empty string for the flags: +# adb_chrome_shell_command_line "" + +. $(dirname $0)/adb_command_line_functions.sh +CMD_LINE_FILE=/data/local/tmp/chrome-shell-command-line +REQUIRES_SU=0 +set_command_line "$@" + diff --git a/build/android/adb_command_line_functions.sh b/build/android/adb_command_line_functions.sh new file mode 100755 index 00000000000..7ea98b09be6 --- /dev/null +++ b/build/android/adb_command_line_functions.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Variables must be set before calling: +# CMD_LINE_FILE - Path on device to flags file. +# REQUIRES_SU - Set to 1 if path requires root. +function set_command_line() { + SU_CMD="" + if [[ "$REQUIRES_SU" = 1 ]]; then + # Older androids accept "su -c", while newer use "su uid". + SDK_LEVEL=$(adb shell getprop ro.build.version.sdk | tr -d '\r') + # E.g. if no device connected. + if [[ -z "$SDK_LEVEL" ]]; then + exit 1 + fi + SU_CMD="su -c" + if (( $SDK_LEVEL >= 21 )); then + SU_CMD="su 0" + fi + fi + + if [ $# -eq 0 ] ; then + # If nothing specified, print the command line (stripping off "chrome ") + adb shell "cat $CMD_LINE_FILE 2>/dev/null" | cut -d ' ' -s -f2- + elif [ $# -eq 1 ] && [ "$1" = '' ] ; then + # If given an empty string, delete the command line. + set -x + adb shell $SU_CMD rm $CMD_LINE_FILE >/dev/null + else + # Else set it. + set -x + adb shell "echo 'chrome $*' | $SU_CMD dd of=$CMD_LINE_FILE" + # Prevent other apps from modifying flags (this can create security issues). + adb shell $SU_CMD chmod 0664 $CMD_LINE_FILE + fi +} + diff --git a/build/android/adb_content_shell_command_line b/build/android/adb_content_shell_command_line new file mode 100755 index 00000000000..2ac7ece75c7 --- /dev/null +++ b/build/android/adb_content_shell_command_line @@ -0,0 +1,20 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_content_shell_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_content_shell_command_line "" + +. $(dirname $0)/adb_command_line_functions.sh +CMD_LINE_FILE=/data/local/tmp/content-shell-command-line +REQUIRES_SU=0 +set_command_line "$@" + diff --git a/build/android/adb_device_functions.sh b/build/android/adb_device_functions.sh new file mode 100755 index 00000000000..66cc32fc4e3 --- /dev/null +++ b/build/android/adb_device_functions.sh @@ -0,0 +1,139 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# A collection of functions useful for maintaining android devices + + +# Run an adb command on all connected device in parallel. +# Usage: adb_all command line to eval. Quoting is optional. +# +# Examples: +# adb_all install Chrome.apk +# adb_all 'shell cat /path/to/file' +# +adb_all() { + if [[ $# == 0 ]]; then + echo "Usage: adb_all . Quoting is optional." + echo "Example: adb_all install Chrome.apk" + return 1 + fi + local DEVICES=$(adb_get_devices -b) + local NUM_DEVICES=$(echo $DEVICES | wc -w) + if (( $NUM_DEVICES > 1 )); then + echo "Looping over $NUM_DEVICES devices" + fi + _adb_multi "$DEVICES" "$*" +} + + +# Run a command on each connected device. Quoting the command is suggested but +# not required. The script setups up variable DEVICE to correspond to the +# current serial number. Intended for complex one_liners that don't work in +# adb_all +# Usage: adb_device_loop 'command line to eval' +adb_device_loop() { + if [[ $# == 0 ]]; then + echo "Intended for more complex one-liners that cannot be done with" \ + "adb_all." + echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \ + 'adb shell cat /data/local.prop)"' + return 1 + fi + local DEVICES=$(adb_get_devices) + if [[ -z $DEVICES ]]; then + return + fi + # Do not change DEVICE variable name - part of api + for DEVICE in $DEVICES; do + DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//') + echo "Running on $DEVICE ($DEV_TYPE)" + ANDROID_SERIAL=$DEVICE eval "$*" + done +} + +# Erases data from any devices visible on adb. To preserve a device, +# disconnect it or: +# 1) Reboot it into fastboot with 'adb reboot bootloader' +# 2) Run wipe_all_devices to wipe remaining devices +# 3) Restore device it with 'fastboot reboot' +# +# Usage: wipe_all_devices [-f] +# +wipe_all_devices() { + if [[ -z $(which adb) || -z $(which fastboot) ]]; then + echo "aborting: adb and fastboot not in path" + return 1 + elif ! $(groups | grep -q 'plugdev'); then + echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'" + fi + + local DEVICES=$(adb_get_devices -b) + + if [[ $1 != '-f' ]]; then + echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device." + read -p "Hit enter to continue" + fi + + _adb_multi "$DEVICES" "reboot bootloader" + # Subshell to isolate job list + ( + for DEVICE in $DEVICES; do + fastboot_erase $DEVICE & + done + wait + ) + + # Reboot devices together + for DEVICE in $DEVICES; do + fastboot -s $DEVICE reboot + done +} + +# Wipe a device in fastboot. +# Usage fastboot_erase [serial] +fastboot_erase() { + if [[ -n $1 ]]; then + echo "Wiping $1" + local SERIAL="-s $1" + else + if [ -z $(fastboot devices) ]; then + echo "No devices in fastboot, aborting." + echo "Check out wipe_all_devices to see if sufficient" + echo "You can put a device in fastboot using adb reboot bootloader" + return 1 + fi + local SERIAL="" + fi + fastboot $SERIAL erase cache + fastboot $SERIAL erase userdata +} + +# Get list of devices connected via adb +# Args: -b block until adb detects a device +adb_get_devices() { + local DEVICES="$(adb devices | grep 'device$')" + if [[ -z $DEVICES && $1 == '-b' ]]; then + echo '- waiting for device -' >&2 + local DEVICES="$(adb wait-for-device devices | grep 'device$')" + fi + echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/' +} + +################################################### +## HELPER FUNCTIONS +################################################### + +# Run an adb command in parallel over a device list +_adb_multi() { + local DEVICES=$1 + local ADB_ARGS=$2 + ( + for DEVICE in $DEVICES; do + adb -s $DEVICE $ADB_ARGS & + done + wait + ) +} diff --git a/build/android/adb_gdb b/build/android/adb_gdb new file mode 100755 index 00000000000..65ec7b20b87 --- /dev/null +++ b/build/android/adb_gdb @@ -0,0 +1,1047 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# + +# A generic script used to attach to a running Chromium process and +# debug it. Most users should not use this directly, but one of the +# wrapper scripts like adb_gdb_content_shell +# +# Use --help to print full usage instructions. +# + +PROGNAME=$(basename "$0") +PROGDIR=$(dirname "$0") + +# Location of Chromium-top-level sources. +CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null) + +# Location of Chromium out/ directory. +if [ -z "$CHROMIUM_OUT_DIR" ]; then + CHROMIUM_OUT_DIR=out +fi + +TMPDIR= +GDBSERVER_PIDFILE= +TARGET_GDBSERVER= +COMMAND_PREFIX= + +clean_exit () { + if [ "$TMPDIR" ]; then + GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null) + if [ "$GDBSERVER_PID" ]; then + log "Killing background gdbserver process: $GDBSERVER_PID" + kill -9 $GDBSERVER_PID >/dev/null 2>&1 + fi + if [ "$TARGET_GDBSERVER" ]; then + log "Removing target gdbserver binary: $TARGET_GDBSERVER." + "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" >/dev/null 2>&1 + fi + log "Cleaning up: $TMPDIR" + rm -rf "$TMPDIR" + fi + trap "" EXIT + exit $1 +} + +# Ensure clean exit on Ctrl-C or normal exit. +trap "clean_exit 1" INT HUP QUIT TERM +trap "clean_exit \$?" EXIT + +panic () { + echo "ERROR: $@" >&2 + exit 1 +} + +fail_panic () { + if [ $? != 0 ]; then panic "$@"; fi +} + +log () { + if [ "$VERBOSE" -gt 0 ]; then + echo "$@" + fi +} + +DEFAULT_PULL_LIBS_DIR=/tmp/$USER-adb-gdb-libs + +# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX +# environment variables. This is only for cosmetic reasons, i.e. to +# display proper + +# Allow wrapper scripts to set the default activity through +# the ADB_GDB_ACTIVITY variable. Users are still able to change the +# final activity name through --activity= option. +# +# This is only for cosmetic reasons, i.e. to display the proper default +# in the --help output. +# +DEFAULT_ACTIVITY=${ADB_GDB_ACTIVITY:-".Main"} + +# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME +PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")} + +ACTIVITY=$DEFAULT_ACTIVITY +ADB= +ANNOTATE= +# Note: Ignore BUILDTYPE variable, because the Ninja build doesn't use it. +BUILDTYPE= +FORCE= +GDBEXEPOSTFIX=gdb +GDBINIT= +GDBSERVER= +HELP= +NDK_DIR= +NO_PULL_LIBS= +PACKAGE_NAME= +PID= +PORT= +PRIVILEGED= +PRIVILEGED_INDEX= +PROGRAM_NAME="activity" +PULL_LIBS= +PULL_LIBS_DIR= +SANDBOXED= +SANDBOXED_INDEX= +START= +SU_PREFIX= +SYMBOL_DIR= +TARGET_ARCH= +TOOLCHAIN= +VERBOSE=0 + +for opt; do + optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)') + case $opt in + --adb=*) + ADB=$optarg + ;; + --activity=*) + ACTIVITY=$optarg + ;; + --annotate=3) + ANNOTATE=$optarg + ;; + --force) + FORCE=true + ;; + --gdbserver=*) + GDBSERVER=$optarg + ;; + --gdb=*) + GDB=$optarg + ;; + --help|-h|-?) + HELP=true + ;; + --ndk-dir=*) + NDK_DIR=$optarg + ;; + --no-pull-libs) + NO_PULL_LIBS=true + ;; + --package-name=*) + PACKAGE_NAME=$optarg + ;; + --pid=*) + PID=$optarg + ;; + --port=*) + PORT=$optarg + ;; + --privileged) + PRIVILEGED=true + ;; + --privileged=*) + PRIVILEGED=true + PRIVILEGED_INDEX=$optarg + ;; + --program-name=*) + PROGRAM_NAME=$optarg + ;; + --pull-libs) + PULL_LIBS=true + ;; + --pull-libs-dir=*) + PULL_LIBS_DIR=$optarg + ;; + --sandboxed) + SANDBOXED=true + ;; + --sandboxed=*) + SANDBOXED=true + SANDBOXED_INDEX=$optarg + ;; + --script=*) + GDBINIT=$optarg + ;; + --start) + START=true + ;; + --su-prefix=*) + SU_PREFIX=$optarg + ;; + --symbol-dir=*) + SYMBOL_DIR=$optarg + ;; + --out-dir=*) + CHROMIUM_OUT_DIR=$optarg + ;; + --target-arch=*) + TARGET_ARCH=$optarg + ;; + --toolchain=*) + TOOLCHAIN=$optarg + ;; + --ui) + GDBEXEPOSTFIX=gdbtui + ;; + --verbose) + VERBOSE=$(( $VERBOSE + 1 )) + ;; + --debug) + BUILDTYPE=Debug + ;; + --release) + BUILDTYPE=Release + ;; + -*) + panic "Unknown option $OPT, see --help." >&2 + ;; + *) + if [ "$PACKAGE_NAME" ]; then + panic "You can only provide a single package name as argument!\ + See --help." + fi + PACKAGE_NAME=$opt + ;; + esac +done + +print_help_options () { + cat <] + +Attach gdb to a running Android $PROGRAM_NAME process. + +If provided, must be the name of the Android application's +package name to be debugged. You can also use --package-name= to +specify it. +EOF + fi + + cat < option) or a privileged (--privileged or +--privileged=) service. + +This script needs several things to work properly. It will try to pick +them up automatically for you though: + + - target gdbserver binary + - host gdb client (e.g. arm-linux-androideabi-gdb) + - directory with symbolic version of $PROGRAM_NAME's shared libraries. + +You can also use --ndk-dir= to specify an alternative NDK installation +directory. + +The script tries to find the most recent version of the debug version of +shared libraries under one of the following directories: + + \$CHROMIUM_SRC//Release/lib/ (used by Ninja builds) + \$CHROMIUM_SRC//Debug/lib/ (used by Ninja builds) + \$CHROMIUM_SRC//Release/lib.target/ (used by Make builds) + \$CHROMIUM_SRC//Debug/lib.target/ (used by Make builds) + +Where is 'out' by default, unless the --out= option is used or +the CHROMIUM_OUT_DIR environment variable is defined. + +You can restrict this search by using --release or --debug to specify the +build type, or simply use --symbol-dir= to specify the file manually. + +The script tries to extract the target architecture from your target device, +but if this fails, will default to 'arm'. Use --target-arch= to force +its value. + +Otherwise, the script will complain, but you can use the --gdbserver, +--gdb and --symbol-lib options to specify everything manually. + +An alternative to --gdb= is to use --toollchain= to specify +the path to the host target-specific cross-toolchain. + +You will also need the 'adb' tool in your path. Otherwise, use the --adb +option. The script will complain if there is more than one device connected +and ANDROID_SERIAL is not defined. + +The first time you use it on a device, the script will pull many system +libraries required by the process into a temporary directory. This +is done to strongly improve the debugging experience, like allowing +readable thread stacks and more. The libraries are copied to the following +directory by default: + + $DEFAULT_PULL_LIBS_DIR/ + +But you can use the --pull-libs-dir= option to specify an +alternative. The script can detect when you change the connected device, +and will re-pull the libraries only in this case. You can however force it +with the --pull-libs option. + +Any local .gdbinit script will be ignored, but it is possible to pass a +gdb command script with the --script= option. Note that its commands +will be passed to gdb after the remote connection and library symbol +loading have completed. + +Valid options: + --help|-h|-? Print this message. + --verbose Increase verbosity. + + --sandboxed Debug first sandboxed process we find. + --sandboxed= Debug specific sandboxed process. + --symbol-dir= Specify directory with symbol shared libraries. + --out-dir= Specify the out directory. + --package-name= Specify package name (alternative to 1st argument). + --privileged Debug first privileged process we find. + --privileged= Debug specific privileged process. + --program-name= Specify program name (cosmetic only). + --pid= Specify application process pid. + --force Kill any previous debugging session, if any. + --start Start package's activity on device. + --ui Use gdbtui instead of gdb + --activity= Activity name for --start [$DEFAULT_ACTIVITY]. + --annotate= Enable gdb annotation. + --script= Specify extra GDB init script. + + --gdbserver= Specify target gdbserver binary. + --gdb= Specify host gdb client binary. + --target-arch= Specify NDK target arch. + --adb= Specify host ADB binary. + --port= Specify the tcp port to use. + + --su-prefix= Prepend to 'adb shell' commands that are + run by this script. This can be useful to use + the 'su' program on rooted production devices. + e.g. --su-prefix="su -c" + + --pull-libs Force system libraries extraction. + --no-pull-libs Do not extract any system library. + --libs-dir= Specify system libraries extraction directory. + + --debug Use libraries under out/Debug. + --release Use libraries under out/Release. + +EOF + exit 0 +fi + +if [ -z "$PACKAGE_NAME" ]; then + panic "Please specify a package name on the command line. See --help." +fi + +if [ -z "$NDK_DIR" ]; then + ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \ +'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,') +else + if [ ! -d "$NDK_DIR" ]; then + panic "Invalid directory: $NDK_DIR" + fi + if [ ! -f "$NDK_DIR/ndk-build" ]; then + panic "Not a valid NDK directory: $NDK_DIR" + fi + ANDROID_NDK_ROOT=$NDK_DIR +fi + +if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then + panic "Unknown --script file: $GDBINIT" +fi + +# Check that ADB is in our path +if [ -z "$ADB" ]; then + ADB=$(which adb 2>/dev/null) + if [ -z "$ADB" ]; then + panic "Can't find 'adb' tool in your path. Install it or use \ +--adb=" + fi + log "Auto-config: --adb=$ADB" +fi + +# Check that it works minimally +ADB_VERSION=$($ADB version 2>/dev/null) +echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge" +if [ $? != 0 ]; then + panic "Your 'adb' tool seems invalid, use --adb= to specify a \ +different one: $ADB" +fi + +# If there are more than one device connected, and ANDROID_SERIAL is not +# defined, print an error message. +NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l) +if [ "$NUM_DEVICES_PLUS2" -lt 3 -a -z "$ANDROID_SERIAL" ]; then + echo "ERROR: There is more than one Android device connected to ADB." + echo "Please define ANDROID_SERIAL to specify which one to use." + exit 1 +fi + +# Run a command through adb shell, strip the extra \r from the output +# and return the correct status code to detect failures. This assumes +# that the adb shell command prints a final \n to stdout. +# $1+: command to run +# Out: command's stdout +# Return: command's status +# Note: the command's stderr is lost +adb_shell () { + local TMPOUT="$(mktemp)" + local LASTLINE RET + local ADB=${ADB:-adb} + + # The weird sed rule is to strip the final \r on each output line + # Since 'adb shell' never returns the command's proper exit/status code, + # we force it to print it as '%%' in the temporary output file, + # which we will later strip from it. + $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \ + sed -e 's![[:cntrl:]]!!g' > $TMPOUT + # Get last line in log, which contains the exit code from the command + LASTLINE=$(sed -e '$!d' $TMPOUT) + # Extract the status code from the end of the line, which must + # be '%%'. + RET=$(echo "$LASTLINE" | \ + awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }') + # Remove the status code from the last line. Note that this may result + # in an empty line. + LASTLINE=$(echo "$LASTLINE" | \ + awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }') + # The output itself: all lines except the status code. + sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE" + # Remove temp file. + rm -f $TMPOUT + # Exit with the appropriate status. + return $RET +} + +# Find the target architecture from the target device. +# This returns an NDK-compatible architecture name. +# out: NDK Architecture name, or empty string. +get_gyp_target_arch () { + local ARCH=$(adb_shell getprop ro.product.cpu.abi) + case $ARCH in + mips|x86|x86_64) echo "$ARCH";; + arm64*) echo "arm64";; + arm*) echo "arm";; + *) echo ""; + esac +} + +if [ -z "$TARGET_ARCH" ]; then + TARGET_ARCH=$(get_gyp_target_arch) + if [ -z "$TARGET_ARCH" ]; then + TARGET_ARCH=arm + fi +else + # Nit: accept Chromium's 'ia32' as a valid target architecture. This + # script prefers the NDK 'x86' name instead because it uses it to find + # NDK-specific files (host gdb) with it. + if [ "$TARGET_ARCH" = "ia32" ]; then + TARGET_ARCH=x86 + log "Auto-config: --arch=$TARGET_ARCH (equivalent to ia32)" + fi +fi + +# Detect the NDK system name, i.e. the name used to identify the host. +# out: NDK system name (e.g. 'linux' or 'darwin') +get_ndk_host_system () { + local HOST_OS + if [ -z "$NDK_HOST_SYSTEM" ]; then + HOST_OS=$(uname -s) + case $HOST_OS in + Linux) NDK_HOST_SYSTEM=linux;; + Darwin) NDK_HOST_SYSTEM=darwin;; + *) panic "You can't run this script on this system: $HOST_OS";; + esac + fi + echo "$NDK_HOST_SYSTEM" +} + +# Detect the NDK host architecture name. +# out: NDK arch name (e.g. 'x86' or 'x86_64') +get_ndk_host_arch () { + local HOST_ARCH HOST_OS + if [ -z "$NDK_HOST_ARCH" ]; then + HOST_OS=$(get_ndk_host_system) + HOST_ARCH=$(uname -p) + case $HOST_ARCH in + i?86) NDK_HOST_ARCH=x86;; + x86_64|amd64) NDK_HOST_ARCH=x86_64;; + *) panic "You can't run this script on this host architecture: $HOST_ARCH";; + esac + # Darwin trick: "uname -p" always returns i386 on 64-bit installations. + if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then + # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts + # implementations of the tool. See http://b.android.com/53769 + HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64") + if [ "$HOST_64BITS" ]; then + NDK_HOST_ARCH=x86_64 + fi + fi + fi + echo "$NDK_HOST_ARCH" +} + +# Convert an NDK architecture name into a GNU configure triplet. +# $1: NDK architecture name (e.g. 'arm') +# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi') +get_arch_gnu_config () { + case $1 in + arm) + echo "arm-linux-androideabi" + ;; + arm64) + echo "aarch64-linux-android" + ;; + x86) + echo "i686-linux-android" + ;; + x86_64) + echo "x86_64-linux-android" + ;; + mips) + echo "mipsel-linux-android" + ;; + *) + echo "$ARCH-linux-android" + ;; + esac +} + +# Convert an NDK architecture name into a toolchain name prefix +# $1: NDK architecture name (e.g. 'arm') +# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi') +get_arch_toolchain_prefix () { + # Return the configure triplet, except for x86! + if [ "$1" = "x86" ]; then + echo "$1" + else + get_arch_gnu_config $1 + fi +} + +# Find a NDK toolchain prebuilt file or sub-directory. +# This will probe the various arch-specific toolchain directories +# in the NDK for the needed file. +# $1: NDK install path +# $2: NDK architecture name +# $3: prebuilt sub-path to look for. +# Out: file path, or empty if none is found. +get_ndk_toolchain_prebuilt () { + local NDK_DIR="${1%/}" + local ARCH="$2" + local SUBPATH="$3" + local NAME="$(get_arch_toolchain_prefix $ARCH)" + local FILE TARGET + FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH + if [ ! -f "$FILE" ]; then + FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH + if [ ! -f "$FILE" ]; then + FILE= + fi + fi + echo "$FILE" +} + +# Find the path to an NDK's toolchain full prefix for a given architecture +# $1: NDK install path +# $2: NDK target architecture name +# Out: install path + binary prefix (e.g. +# ".../path/to/bin/arm-linux-androideabi-") +get_ndk_toolchain_fullprefix () { + local NDK_DIR="$1" + local ARCH="$2" + local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG + + # NOTE: This will need to be updated if the NDK changes the names or moves + # the location of its prebuilt toolchains. + # + GCC= + HOST_OS=$(get_ndk_host_system) + HOST_ARCH=$(get_ndk_host_arch) + CONFIG=$(get_arch_gnu_config $ARCH) + GCC=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc") + if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then + GCC=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc") + fi + if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then + # Special case, the x86 toolchain used to be incorrectly + # named i686-android-linux-gcc! + GCC=$(get_ndk_toolchain_prebuilt \ + "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc") + fi + if [ -z "$GCC" ]; then + panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \ +Please verify your NDK installation!" + fi + echo "${GCC%%gcc}" +} + +# $1: NDK install path +# $2: target architecture. +get_ndk_gdbserver () { + local NDK_DIR="$1" + local ARCH=$2 + local BINARY + + # The location has moved after NDK r8 + BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver + if [ ! -f "$BINARY" ]; then + BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver) + fi + echo "$BINARY" +} + +# Check/probe the path to the Android toolchain installation. Always +# use the NDK versions of gdb and gdbserver. They must match to avoid +# issues when both binaries do not speak the same wire protocol. +# +if [ -z "$TOOLCHAIN" ]; then + ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \ + "$ANDROID_NDK_ROOT" "$TARGET_ARCH") + ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN") + log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN" +else + # Be flexible, allow one to specify either the install path or the bin + # sub-directory in --toolchain: + # + if [ -d "$TOOLCHAIN/bin" ]; then + TOOLCHAIN=$TOOLCHAIN/bin + fi + ANDROID_TOOLCHAIN=$TOOLCHAIN +fi + +# Cosmetic: Remove trailing directory separator. +ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/} + +# Find host GDB client binary +if [ -z "$GDB" ]; then + GDB=$(which $ANDROID_TOOLCHAIN/*-$GDBEXEPOSTFIX 2>/dev/null | head -1) + if [ -z "$GDB" ]; then + panic "Can't find Android gdb client in your path, check your \ +--toolchain or --gdb path." + fi + log "Host gdb client: $GDB" +fi + +# Find gdbserver binary, we will later push it to /data/local/tmp +# This ensures that both gdbserver and $GDB talk the same binary protocol, +# otherwise weird problems will appear. +# +if [ -z "$GDBSERVER" ]; then + GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH") + if [ -z "$GDBSERVER" ]; then + panic "Can't find NDK gdbserver binary. use --gdbserver to specify \ +valid one!" + fi + log "Auto-config: --gdbserver=$GDBSERVER" +fi + +# A unique ID for this script's session. This needs to be the same in all +# sub-shell commands we're going to launch, so take the PID of the launcher +# process. +TMP_ID=$$ + +# Temporary directory, will get cleaned up on exit. +TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID +mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/* + +GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid + +# If --force is specified, try to kill any gdbserver process started by the +# same user on the device. Normally, these are killed automatically by the +# script on exit, but there are a few corner cases where this would still +# be needed. +if [ "$FORCE" ]; then + GDBSERVER_PIDS=$(adb_shell ps | awk '$9 ~ /gdbserver/ { print $2; }') + for GDB_PID in $GDBSERVER_PIDS; do + log "Killing previous gdbserver (PID=$GDB_PID)" + adb_shell kill -9 $GDB_PID + done +fi + +if [ "$START" ]; then + log "Starting $PROGRAM_NAME on device." + adb_shell am start -n $PACKAGE_NAME/$ACTIVITY 2>/dev/null + adb_shell ps | grep -q $PACKAGE_NAME + fail_panic "Could not start $PROGRAM_NAME on device. Are you sure the \ +package is installed?" +fi + +# Return the timestamp of a given time, as number of seconds since epoch. +# $1: file path +# Out: file timestamp +get_file_timestamp () { + stat -c %Y "$1" 2>/dev/null +} + +# Detect the build type and symbol directory. This is done by finding +# the most recent sub-directory containing debug shared libraries under +# $CHROMIUM_SRC/$CHROMIUM_OUT_DIR/ +# +# $1: $BUILDTYPE value, can be empty +# Out: nothing, but this sets SYMBOL_DIR +# +detect_symbol_dir () { + local SUBDIRS SUBDIR LIST DIR DIR_LIBS TSTAMP + # Note: Ninja places debug libraries under out/$BUILDTYPE/lib/, while + # Make places then under out/$BUILDTYPE/lib.target. + if [ "$1" ]; then + SUBDIRS="$1/lib $1/lib.target" + else + SUBDIRS="Release/lib Debug/lib Release/lib.target Debug/lib.target" + fi + LIST=$TMPDIR/scan-subdirs-$$.txt + printf "" > "$LIST" + for SUBDIR in $SUBDIRS; do + DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR + if [ -d "$DIR" ]; then + # Ignore build directories that don't contain symbol versions + # of the shared libraries. + DIR_LIBS=$(ls "$DIR"/lib*.so 2>/dev/null) + if [ -z "$DIR_LIBS" ]; then + echo "No shared libs: $DIR" + continue + fi + TSTAMP=$(get_file_timestamp "$DIR") + printf "%s %s\n" "$TSTAMP" "$SUBDIR" >> "$LIST" + fi + done + SUBDIR=$(cat $LIST | sort -r | head -1 | cut -d" " -f2) + rm -f "$LIST" + + if [ -z "$SUBDIR" ]; then + if [ -z "$1" ]; then + panic "Could not find any build directory under \ +$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Please build the program first!" + else + panic "Could not find any $1 directory under \ +$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Check your build type!" + fi + fi + + SYMBOL_DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR + log "Auto-config: --symbol-dir=$SYMBOL_DIR" +} + +if [ -z "$SYMBOL_DIR" ]; then + detect_symbol_dir "$BUILDTYPE" +fi + +# Allow several concurrent debugging sessions +TARGET_GDBSERVER=/data/data/$PACKAGE_NAME/gdbserver-adb-gdb-$TMP_ID +TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID + +# Return the build fingerprint contained in a build.prop file. +# $1: path to build.prop file +get_build_fingerprint_from () { + cat "$1" | grep -e '^ro.build.fingerprint=' | cut -d= -f2 +} + + +ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR +PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR} + +HOST_FINGERPRINT= +DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint) +log "Device build fingerprint: $DEVICE_FINGERPRINT" + +# If --pull-libs-dir is not specified, and this is a platform build, look +# if we can use the symbolic libraries under $ANDROID_PRODUCT_OUT/symbols/ +# directly, if the build fingerprint matches the device. +if [ -z "$ORG_PULL_LIBS_DIR" -a \ + "$ANDROID_PRODUCT_OUT" -a \ + -f "$ANDROID_PRODUCT_OUT/system/build.prop" ]; then + ANDROID_FINGERPRINT=$(get_build_fingerprint_from \ + "$ANDROID_PRODUCT_OUT"/system/build.prop) + log "Android build fingerprint: $ANDROID_FINGERPRINT" + if [ "$ANDROID_FINGERPRINT" = "$DEVICE_FINGERPRINT" ]; then + log "Perfect match!" + PULL_LIBS_DIR=$ANDROID_PRODUCT_OUT/symbols + HOST_FINGERPRINT=$ANDROID_FINGERPRINT + if [ "$PULL_LIBS" ]; then + log "Ignoring --pull-libs since the device and platform build \ +fingerprints match." + NO_PULL_LIBS=true + fi + fi +fi + +# If neither --pull-libs an --no-pull-libs were specified, check the build +# fingerprints of the device, and the cached system libraries on the host. +# +if [ -z "$NO_PULL_LIBS" -a -z "$PULL_LIBS" ]; then + if [ ! -f "$PULL_LIBS_DIR/build.prop" ]; then + log "Auto-config: --pull-libs (no cached libraries)" + PULL_LIBS=true + else + HOST_FINGERPRINT=$(get_build_fingerprint_from "$PULL_LIBS_DIR/build.prop") + log "Host build fingerprint: $HOST_FINGERPRINT" + if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then + log "Auto-config: --no-pull-libs (fingerprint match)" + NO_PULL_LIBS=true + else + log "Auto-config: --pull-libs (fingerprint mismatch)" + PULL_LIBS=true + fi + fi +fi + +# Extract the system libraries from the device if necessary. +if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then + echo "Extracting system libraries into: $PULL_LIBS_DIR" +fi + +mkdir -p "$PULL_LIBS_DIR" +fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR" + +# If requested, work for M-x gdb. The gdb indirections make it +# difficult to pass --annotate=3 to the gdb binary itself. +GDB_ARGS= +if [ "$ANNOTATE" ]; then + GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE" +fi + +# Get the PID from the first argument or else find the PID of the +# browser process. +if [ -z "$PID" ]; then + PROCESSNAME=$PACKAGE_NAME + if [ "$SANDBOXED_INDEX" ]; then + PROCESSNAME=$PROCESSNAME:sandboxed_process$SANDBOXED_INDEX + elif [ "$SANDBOXED" ]; then + PROCESSNAME=$PROCESSNAME:sandboxed_process + PID=$(adb_shell ps | \ + awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1) + elif [ "$PRIVILEGED_INDEX" ]; then + PROCESSNAME=$PROCESSNAME:privileged_process$PRIVILEGED_INDEX + elif [ "$PRIVILEGED" ]; then + PROCESSNAME=$PROCESSNAME:privileged_process + PID=$(adb_shell ps | \ + awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1) + fi + if [ -z "$PID" ]; then + PID=$(adb_shell ps | \ + awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1) + fi + if [ -z "$PID" ]; then + if [ "$START" ]; then + panic "Can't find application process PID, did it crash?" + else + panic "Can't find application process PID, are you sure it is \ +running? Try using --start." + fi + fi + log "Found process PID: $PID" +elif [ "$SANDBOXED" ]; then + echo "WARNING: --sandboxed option ignored due to use of --pid." +elif [ "$PRIVILEGED" ]; then + echo "WARNING: --privileged option ignored due to use of --pid." +fi + +# Determine if 'adb shell' runs as root or not. +# If so, we can launch gdbserver directly, otherwise, we have to +# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable. +# +if [ "$SU_PREFIX" ]; then + # Need to check that this works properly. + SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log + adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1 + if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then + echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:" + echo "$ adb shell $SU_PREFIX \"echo foo\"" + cat $SU_PREFIX_TEST_LOG + exit 1 + fi + COMMAND_PREFIX="$SU_PREFIX \"" + COMMAND_SUFFIX="\"" +else + SHELL_UID=$(adb shell cat /proc/self/status | \ + awk '$1 == "Uid:" { print $2; }') + log "Shell UID: $SHELL_UID" + if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then + COMMAND_PREFIX="run-as $PACKAGE_NAME" + COMMAND_SUFFIX= + else + COMMAND_PREFIX= + COMMAND_SUFFIX= + fi +fi +log "Command prefix: '$COMMAND_PREFIX'" +log "Command suffix: '$COMMAND_SUFFIX'" + +# Pull device's system libraries that are mapped by our process. +# Pulling all system libraries is too long, so determine which ones +# we need by looking at /proc/$PID/maps instead +if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then + echo "Extracting system libraries into: $PULL_LIBS_DIR" + rm -f $PULL_LIBS_DIR/build.prop + MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX) + if [ $? != 0 ]; then + echo "ERROR: Could not list process's memory mappings." + if [ "$SU_PREFIX" ]; then + panic "Are you sure your --su-prefix is correct?" + else + panic "Use --su-prefix if the application is not debuggable." + fi + fi + SYSTEM_LIBS=$(echo "$MAPPINGS" | \ + awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u) + for SYSLIB in /system/bin/linker $SYSTEM_LIBS; do + echo "Pulling from device: $SYSLIB" + DST_FILE=$PULL_LIBS_DIR$SYSLIB + DST_DIR=$(dirname "$DST_FILE") + mkdir -p "$DST_DIR" && adb pull $SYSLIB "$DST_FILE" 2>/dev/null + fail_panic "Could not pull $SYSLIB from device !?" + done + echo "Pulling device build.prop" + adb pull /system/build.prop $PULL_LIBS_DIR/build.prop + fail_panic "Could not pull device build.prop !?" +fi + +# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4 +# so we can add them to solib-search-path later. +SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \ + grep -v "^$" | tr '\n' ':') + +# This is a re-implementation of gdbclient, where we use compatible +# versions of gdbserver and $GDBNAME to ensure that everything works +# properly. +# + +# Push gdbserver to the device +log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER" +adb push $GDBSERVER $TMP_TARGET_GDBSERVER &>/dev/null +adb shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER +adb shell rm $TMP_TARGET_GDBSERVER +fail_panic "Could not copy gdbserver to the device!" + +if [ -z "$PORT" ]; then + PORT=5039 +fi +HOST_PORT=$PORT +TARGET_PORT=$PORT + +# Select correct app_process for architecture. +case $TARGET_ARCH in + arm|x86|mips) GDBEXEC=app_process;; + arm64|x86_64) GDBEXEC=app_process64;; + *) fail_panic "Unknown app_process for architecture!";; +esac + +# Detect AddressSanitizer setup on the device. In that case app_process is a +# script, and the real executable is app_process.real. +GDBEXEC_ASAN=app_process.real +adb_shell ls /system/bin/$GDBEXEC_ASAN +if [ $? == 0 ]; then + GDBEXEC=$GDBEXEC_ASAN +fi + +# Pull the app_process binary from the device. +log "Pulling $GDBEXEC from device" +adb pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null +fail_panic "Could not retrieve $GDBEXEC from the device!" + +# Setup network redirection +log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_PORT)" +adb forward tcp:$HOST_PORT tcp:$TARGET_PORT +fail_panic "Could not setup network redirection from \ +host:localhost:$HOST_PORT to device:localhost:$TARGET_PORT!" + +# Start gdbserver in the background +# Note that using run-as requires the package to be debuggable. +# +# If not, this will fail horribly. The alternative is to run the +# program as root, which requires of course root privileges. +# Maybe we should add a --root option to enable this? +# +log "Starting gdbserver in the background:" +GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log +log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \ +--attach $PID $COMMAND_SUFFIX" +("$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \ + --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1) & +GDBSERVER_PID=$! +echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE +log "background job pid: $GDBSERVER_PID" + +# Check that it is still running after a few seconds. If not, this means we +# could not properly attach to it +sleep 2 +log "Job control: $(jobs -l)" +STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }') +if [ "$STATE" != "Running" ]; then + echo "ERROR: GDBServer could not attach to PID $PID!" + if [ $(adb_shell su -c getenforce) != "Permissive" ]; then + echo "Device mode is Enforcing. Changing Device mode to Permissive " + $(adb_shell su -c setenforce 0) + if [ $(adb_shell su -c getenforce) != "Permissive" ]; then + echo "ERROR: Failed to Change Device mode to Permissive" + echo "Failure log (use --verbose for more information):" + cat $GDBSERVER_LOG + exit 1 + fi + else + echo "Failure log (use --verbose for more information):" + cat $GDBSERVER_LOG + exit 1 + fi +fi + +# Generate a file containing useful GDB initialization commands +readonly COMMANDS=$TMPDIR/gdb.init +log "Generating GDB initialization commands file: $COMMANDS" +echo -n "" > $COMMANDS +echo "set print pretty 1" >> $COMMANDS +echo "python" >> $COMMANDS +echo "import sys" >> $COMMANDS +echo "sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')" >> $COMMANDS +echo "try:" >> $COMMANDS +echo " import gdb_chrome" >> $COMMANDS +echo "finally:" >> $COMMANDS +echo " sys.path.pop(0)" >> $COMMANDS +echo "end" >> $COMMANDS +echo "file $TMPDIR/$GDBEXEC" >> $COMMANDS +echo "directory $CHROMIUM_SRC" >> $COMMANDS +echo "set solib-absolute-prefix $PULL_LIBS_DIR" >> $COMMANDS +echo "set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR" \ + >> $COMMANDS +echo "echo Attaching and reading symbols, this may take a while.." \ + >> $COMMANDS +echo "target remote :$HOST_PORT" >> $COMMANDS + +if [ "$GDBINIT" ]; then + cat "$GDBINIT" >> $COMMANDS +fi + +if [ "$VERBOSE" -gt 0 ]; then + echo "### START $COMMANDS" + cat $COMMANDS + echo "### END $COMMANDS" +fi + +log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS" +$GDB $GDB_ARGS -x $COMMANDS && +rm -f "$GDBSERVER_PIDFILE" diff --git a/build/android/adb_gdb_android_webview_shell b/build/android/adb_gdb_android_webview_shell new file mode 100755 index 00000000000..f685fda77c5 --- /dev/null +++ b/build/android/adb_gdb_android_webview_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.AwShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=AwShellApplication \ + --package-name=org.chromium.android_webview.shell \ + "$@" diff --git a/build/android/adb_gdb_chrome_public b/build/android/adb_gdb_chrome_public new file mode 100755 index 00000000000..4366c838e78 --- /dev/null +++ b/build/android/adb_gdb_chrome_public @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ChromePublic process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=com.google.android.apps.chrome.Main +"$PROGDIR"/adb_gdb \ + --program-name=ChromePublic \ + --package-name=org.chromium.chrome \ + "$@" diff --git a/build/android/adb_gdb_chrome_shell b/build/android/adb_gdb_chrome_shell new file mode 100755 index 00000000000..e5c8a306be2 --- /dev/null +++ b/build/android/adb_gdb_chrome_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ChromeShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.ChromeShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=ChromeShell \ + --package-name=org.chromium.chrome.shell \ + "$@" diff --git a/build/android/adb_gdb_content_shell b/build/android/adb_gdb_content_shell new file mode 100755 index 00000000000..18e1a61d893 --- /dev/null +++ b/build/android/adb_gdb_content_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.ContentShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=ContentShell \ + --package-name=org.chromium.content_shell_apk \ + "$@" diff --git a/build/android/adb_gdb_cronet_sample b/build/android/adb_gdb_cronet_sample new file mode 100755 index 00000000000..8d0c864d133 --- /dev/null +++ b/build/android/adb_gdb_cronet_sample @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.CronetSampleActivity +"$PROGDIR"/adb_gdb \ + --program-name=CronetSample \ + --package-name=org.chromium.cronet_sample_apk \ + "$@" diff --git a/build/android/adb_gdb_mojo_shell b/build/android/adb_gdb_mojo_shell new file mode 100755 index 00000000000..ba91149cce9 --- /dev/null +++ b/build/android/adb_gdb_mojo_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Attach to or start a ContentShell process and debug it. +# See --help for details. +# +PROGDIR=$(dirname "$0") +export ADB_GDB_PROGNAME=$(basename "$0") +export ADB_GDB_ACTIVITY=.MojoShellActivity +"$PROGDIR"/adb_gdb \ + --program-name=MojoShell \ + --package-name=org.chromium.mojo_shell_apk \ + "$@" diff --git a/build/android/adb_install_apk.py b/build/android/adb_install_apk.py new file mode 100755 index 00000000000..50faea7d291 --- /dev/null +++ b/build/android/adb_install_apk.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility script to install APKs from the command line quickly.""" + +import argparse +import glob +import logging +import os +import sys + +from pylib import constants +from pylib.device import device_blacklist +from pylib.device import device_errors +from pylib.device import device_utils +from pylib.utils import apk_helper +from pylib.utils import run_tests_helper + + +def main(): + parser = argparse.ArgumentParser() + + apk_group = parser.add_mutually_exclusive_group(required=True) + apk_group.add_argument('--apk', dest='apk_name', + help='DEPRECATED The name of the apk containing the' + ' application (with the .apk extension).') + apk_group.add_argument('apk_path', nargs='?', + help='The path to the APK to install.') + + # TODO(jbudorick): Remove once no clients pass --apk_package + parser.add_argument('--apk_package', help='DEPRECATED unused') + parser.add_argument('--split', + action='append', + dest='splits', + help='A glob matching the apk splits. ' + 'Can be specified multiple times.') + parser.add_argument('--keep_data', + action='store_true', + default=False, + help='Keep the package data when installing ' + 'the application.') + parser.add_argument('--debug', action='store_const', const='Debug', + dest='build_type', + default=os.environ.get('BUILDTYPE', 'Debug'), + help='If set, run test suites under out/Debug. ' + 'Default is env var BUILDTYPE or Debug') + parser.add_argument('--release', action='store_const', const='Release', + dest='build_type', + help='If set, run test suites under out/Release. ' + 'Default is env var BUILDTYPE or Debug.') + parser.add_argument('-d', '--device', dest='device', + help='Target device for apk to install on.') + parser.add_argument('-v', '--verbose', action='count', + help='Enable verbose logging.') + + args = parser.parse_args() + + run_tests_helper.SetLogLevel(args.verbose) + constants.SetBuildType(args.build_type) + + apk = args.apk_path or args.apk_name + if not apk.endswith('.apk'): + apk += '.apk' + if not os.path.exists(apk): + apk = os.path.join(constants.GetOutDirectory(), 'apks', apk) + if not os.path.exists(apk): + parser.error('%s not found.' % apk) + + if args.splits: + splits = [] + base_apk_package = apk_helper.ApkHelper(apk).GetPackageName() + for split_glob in args.splits: + apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')] + if not apks: + logging.warning('No apks matched for %s.' % split_glob) + for f in apks: + helper = apk_helper.ApkHelper(f) + if (helper.GetPackageName() == base_apk_package + and helper.GetSplitName()): + splits.append(f) + + devices = device_utils.DeviceUtils.HealthyDevices() + + if args.device: + devices = [d for d in devices if d == args.device] + if not devices: + raise device_errors.DeviceUnreachableError(args.device) + elif not devices: + raise device_errors.NoDevicesError() + + def blacklisting_install(device): + try: + if args.splits: + device.InstallSplitApk(apk, splits, reinstall=args.keep_data) + else: + device.Install(apk, reinstall=args.keep_data) + except device_errors.CommandFailedError: + logging.exception('Failed to install %s', args.apk_name) + device_blacklist.ExtendBlacklist([str(device)]) + logging.warning('Blacklisting %s', str(device)) + except device_errors.CommandTimeoutError: + logging.exception('Timed out while installing %s', args.apk_name) + device_blacklist.ExtendBlacklist([str(device)]) + logging.warning('Blacklisting %s', str(device)) + + device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install) + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/adb_kill_android_webview_shell b/build/android/adb_kill_android_webview_shell new file mode 100755 index 00000000000..5f287f08266 --- /dev/null +++ b/build/android/adb_kill_android_webview_shell @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running android webview shell. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.android_webview.shell') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running android webview shell." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "Android webview shell does not appear to be running." + fi +fi diff --git a/build/android/adb_kill_chrome_public b/build/android/adb_kill_chrome_public new file mode 100755 index 00000000000..5b539a043d4 --- /dev/null +++ b/build/android/adb_kill_chrome_public @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running instance of ChromePublic. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep -w 'org.chromium.chrome') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running ChromePublic." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "ChromePublic does not appear to be running." + fi +fi diff --git a/build/android/adb_kill_chrome_shell b/build/android/adb_kill_chrome_shell new file mode 100755 index 00000000000..2b63c9af3d2 --- /dev/null +++ b/build/android/adb_kill_chrome_shell @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running chrome shell. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.chrome.shell') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running Chrome shell." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "Chrome shell does not appear to be running." + fi +fi diff --git a/build/android/adb_kill_content_shell b/build/android/adb_kill_content_shell new file mode 100755 index 00000000000..e379dd47149 --- /dev/null +++ b/build/android/adb_kill_content_shell @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Kill a running content shell. +# +# Assumes you have sourced the build/android/envsetup.sh script. + +SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell_apk') +VAL=$(echo "$SHELL_PID_LINES" | wc -l) +if [ $VAL -lt 1 ] ; then + echo "Not running Content shell." +else + SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}') + if [ "$SHELL_PID" != "" ] ; then + set -x + adb shell kill $SHELL_PID + set - + else + echo "Content shell does not appear to be running." + fi +fi diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py new file mode 100755 index 00000000000..d3cc67dbcc8 --- /dev/null +++ b/build/android/adb_logcat_monitor.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Saves logcats from all connected devices. + +Usage: adb_logcat_monitor.py [] + +This script will repeatedly poll adb for new devices and save logcats +inside the directory, which it attempts to create. The +script will run until killed by an external signal. To test, run the +script in a shell and -C it after a while. It should be +resilient across phone disconnects and reconnects and start the logcat +early enough to not miss anything. +""" + +import logging +import os +import re +import shutil +import signal +import subprocess +import sys +import time + +# Map from device_id -> (process, logcat_num) +devices = {} + + +class TimeoutException(Exception): + """Exception used to signal a timeout.""" + pass + + +class SigtermError(Exception): + """Exception used to catch a sigterm.""" + pass + + +def StartLogcatIfNecessary(device_id, adb_cmd, base_dir): + """Spawns a adb logcat process if one is not currently running.""" + process, logcat_num = devices[device_id] + if process: + if process.poll() is None: + # Logcat process is still happily running + return + else: + logging.info('Logcat for device %s has died', device_id) + error_filter = re.compile('- waiting for device -') + for line in process.stderr: + if not error_filter.match(line): + logging.error(device_id + ': ' + line) + + logging.info('Starting logcat %d for device %s', logcat_num, + device_id) + logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num) + logcat_file = open(os.path.join(base_dir, logcat_filename), 'w') + process = subprocess.Popen([adb_cmd, '-s', device_id, + 'logcat', '-v', 'threadtime'], + stdout=logcat_file, + stderr=subprocess.PIPE) + devices[device_id] = (process, logcat_num + 1) + + +def GetAttachedDevices(adb_cmd): + """Gets the device list from adb. + + We use an alarm in this function to avoid deadlocking from an external + dependency. + + Args: + adb_cmd: binary to run adb + + Returns: + list of devices or an empty list on timeout + """ + signal.alarm(2) + try: + out, err = subprocess.Popen([adb_cmd, 'devices'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate() + if err: + logging.warning('adb device error %s', err.strip()) + return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE) + except TimeoutException: + logging.warning('"adb devices" command timed out') + return [] + except (IOError, OSError): + logging.exception('Exception from "adb devices"') + return [] + finally: + signal.alarm(0) + + +def main(base_dir, adb_cmd='adb'): + """Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill.""" + # We create the directory to ensure 'run once' semantics + if os.path.exists(base_dir): + print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir + shutil.rmtree(base_dir, ignore_errors=True) + + os.makedirs(base_dir) + logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'), + level=logging.INFO, + format='%(asctime)-2s %(levelname)-8s %(message)s') + + # Set up the alarm for calling 'adb devices'. This is to ensure + # our script doesn't get stuck waiting for a process response + def TimeoutHandler(_signum, _unused_frame): + raise TimeoutException() + signal.signal(signal.SIGALRM, TimeoutHandler) + + # Handle SIGTERMs to ensure clean shutdown + def SigtermHandler(_signum, _unused_frame): + raise SigtermError() + signal.signal(signal.SIGTERM, SigtermHandler) + + logging.info('Started with pid %d', os.getpid()) + pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID') + + try: + with open(pid_file_path, 'w') as f: + f.write(str(os.getpid())) + while True: + for device_id in GetAttachedDevices(adb_cmd): + if not device_id in devices: + subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c']) + devices[device_id] = (None, 0) + + for device in devices: + # This will spawn logcat watchers for any device ever detected + StartLogcatIfNecessary(device, adb_cmd, base_dir) + + time.sleep(5) + except SigtermError: + logging.info('Received SIGTERM, shutting down') + except: # pylint: disable=bare-except + logging.exception('Unexpected exception in main.') + finally: + for process, _ in devices.itervalues(): + if process: + try: + process.terminate() + except OSError: + pass + os.remove(pid_file_path) + + +if __name__ == '__main__': + if 2 <= len(sys.argv) <= 3: + print 'adb_logcat_monitor: Initializing' + sys.exit(main(*sys.argv[1:3])) + + print 'Usage: %s []' % sys.argv[0] diff --git a/build/android/adb_logcat_printer.py b/build/android/adb_logcat_printer.py new file mode 100755 index 00000000000..55176ab9201 --- /dev/null +++ b/build/android/adb_logcat_printer.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Shutdown adb_logcat_monitor and print accumulated logs. + +To test, call './adb_logcat_printer.py ' where + contains 'adb logcat -v threadtime' files named as +logcat__ + +The script will print the files to out, and will combine multiple +logcats from a single device if there is overlap. + +Additionally, if a /LOGCAT_MONITOR_PID exists, the script +will attempt to terminate the contained PID by sending a SIGINT and +monitoring for the deletion of the aforementioned file. +""" +# pylint: disable=W0702 + +import cStringIO +import logging +import optparse +import os +import re +import signal +import sys +import time + + +# Set this to debug for more verbose output +LOG_LEVEL = logging.INFO + + +def CombineLogFiles(list_of_lists, logger): + """Splices together multiple logcats from the same device. + + Args: + list_of_lists: list of pairs (filename, list of timestamped lines) + logger: handler to log events + + Returns: + list of lines with duplicates removed + """ + cur_device_log = [''] + for cur_file, cur_file_lines in list_of_lists: + # Ignore files with just the logcat header + if len(cur_file_lines) < 2: + continue + common_index = 0 + # Skip this step if list just has empty string + if len(cur_device_log) > 1: + try: + line = cur_device_log[-1] + # Used to make sure we only splice on a timestamped line + if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line): + common_index = cur_file_lines.index(line) + else: + logger.warning('splice error - no timestamp in "%s"?', line.strip()) + except ValueError: + # The last line was valid but wasn't found in the next file + cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****'] + logger.info('Unable to splice %s. Incomplete logcat?', cur_file) + + cur_device_log += ['*'*30 + ' %s' % cur_file] + cur_device_log.extend(cur_file_lines[common_index:]) + + return cur_device_log + + +def FindLogFiles(base_dir): + """Search a directory for logcat files. + + Args: + base_dir: directory to search + + Returns: + Mapping of device_id to a sorted list of file paths for a given device + """ + logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$') + # list of tuples (, , ) + filtered_list = [] + for cur_file in os.listdir(base_dir): + matcher = logcat_filter.match(cur_file) + if matcher: + filtered_list += [(matcher.group(1), int(matcher.group(2)), + os.path.join(base_dir, cur_file))] + filtered_list.sort() + file_map = {} + for device_id, _, cur_file in filtered_list: + if device_id not in file_map: + file_map[device_id] = [] + + file_map[device_id] += [cur_file] + return file_map + + +def GetDeviceLogs(log_filenames, logger): + """Read log files, combine and format. + + Args: + log_filenames: mapping of device_id to sorted list of file paths + logger: logger handle for logging events + + Returns: + list of formatted device logs, one for each device. + """ + device_logs = [] + + for device, device_files in log_filenames.iteritems(): + logger.debug('%s: %s', device, str(device_files)) + device_file_lines = [] + for cur_file in device_files: + with open(cur_file) as f: + device_file_lines += [(cur_file, f.read().splitlines())] + combined_lines = CombineLogFiles(device_file_lines, logger) + # Prepend each line with a short unique ID so it's easy to see + # when the device changes. We don't use the start of the device + # ID because it can be the same among devices. Example lines: + # AB324: foo + # AB324: blah + device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)] + return device_logs + + +def ShutdownLogcatMonitor(base_dir, logger): + """Attempts to shutdown adb_logcat_monitor and blocks while waiting.""" + try: + monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID') + with open(monitor_pid_path) as f: + monitor_pid = int(f.readline()) + + logger.info('Sending SIGTERM to %d', monitor_pid) + os.kill(monitor_pid, signal.SIGTERM) + i = 0 + while True: + time.sleep(.2) + if not os.path.exists(monitor_pid_path): + return + if not os.path.exists('/proc/%d' % monitor_pid): + logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid) + return + logger.info('Waiting for logcat process to terminate.') + i += 1 + if i >= 10: + logger.warning('Monitor pid did not terminate. Continuing anyway.') + return + + except (ValueError, IOError, OSError): + logger.exception('Error signaling logcat monitor - continuing') + + +def main(argv): + parser = optparse.OptionParser(usage='Usage: %prog [options] ') + parser.add_option('--output-path', + help='Output file path (if unspecified, prints to stdout)') + options, args = parser.parse_args(argv) + if len(args) != 1: + parser.error('Wrong number of unparsed args') + base_dir = args[0] + if options.output_path: + output_file = open(options.output_path, 'w') + else: + output_file = sys.stdout + + log_stringio = cStringIO.StringIO() + logger = logging.getLogger('LogcatPrinter') + logger.setLevel(LOG_LEVEL) + sh = logging.StreamHandler(log_stringio) + sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s' + ' %(message)s')) + logger.addHandler(sh) + + try: + # Wait at least 5 seconds after base_dir is created before printing. + # + # The idea is that 'adb logcat > file' output consists of 2 phases: + # 1 Dump all the saved logs to the file + # 2 Stream log messages as they are generated + # + # We want to give enough time for phase 1 to complete. There's no + # good method to tell how long to wait, but it usually only takes a + # second. On most bots, this code path won't occur at all, since + # adb_logcat_monitor.py command will have spawned more than 5 seconds + # prior to called this shell script. + try: + sleep_time = 5 - (time.time() - os.path.getctime(base_dir)) + except OSError: + sleep_time = 5 + if sleep_time > 0: + logger.warning('Monitor just started? Sleeping %.1fs', sleep_time) + time.sleep(sleep_time) + + assert os.path.exists(base_dir), '%s does not exist' % base_dir + ShutdownLogcatMonitor(base_dir, logger) + separator = '\n' + '*' * 80 + '\n\n' + for log in GetDeviceLogs(FindLogFiles(base_dir), logger): + output_file.write(log) + output_file.write(separator) + with open(os.path.join(base_dir, 'eventlog')) as f: + output_file.write('\nLogcat Monitor Event Log\n') + output_file.write(f.read()) + except: + logger.exception('Unexpected exception') + + logger.info('Done.') + sh.flush() + output_file.write('\nLogcat Printer Event Log\n') + output_file.write(log_stringio.getvalue()) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/adb_profile_chrome b/build/android/adb_profile_chrome new file mode 100755 index 00000000000..21f6faf7b98 --- /dev/null +++ b/build/android/adb_profile_chrome @@ -0,0 +1,8 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Start / stop profiling in chrome. +exec $(dirname $0)/../../tools/profile_chrome.py $@ diff --git a/build/android/adb_reverse_forwarder.py b/build/android/adb_reverse_forwarder.py new file mode 100755 index 00000000000..3ce53595dae --- /dev/null +++ b/build/android/adb_reverse_forwarder.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Command line tool for forwarding ports from a device to the host. + +Allows an Android device to connect to services running on the host machine, +i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder| +to be built. +""" + +import logging +import optparse +import sys +import time + +from pylib import constants +from pylib import forwarder +from pylib.device import adb_wrapper +from pylib.device import device_errors +from pylib.device import device_utils +from pylib.utils import run_tests_helper + + +def main(argv): + parser = optparse.OptionParser(usage='Usage: %prog [options] device_port ' + 'host_port [device_port_2 host_port_2] ...', + description=__doc__) + parser.add_option('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level (multiple times for more)') + parser.add_option('--device', + help='Serial number of device we should use.') + parser.add_option('--debug', action='store_const', const='Debug', + dest='build_type', default='Release', + help='Use Debug build of host tools instead of Release.') + + options, args = parser.parse_args(argv) + run_tests_helper.SetLogLevel(options.verbose_count) + + if len(args) < 2 or not len(args) % 2: + parser.error('Need even number of port pairs') + sys.exit(1) + + try: + port_pairs = map(int, args[1:]) + port_pairs = zip(port_pairs[::2], port_pairs[1::2]) + except ValueError: + parser.error('Bad port number') + sys.exit(1) + + devices = device_utils.DeviceUtils.HealthyDevices() + + if options.device: + device = next((d for d in devices if d == options.device), None) + if not device: + raise device_errors.DeviceUnreachableError(options.device) + elif devices: + device = devices[0] + logging.info('No device specified. Defaulting to %s', devices[0]) + else: + raise device_errors.NoDevicesError() + + constants.SetBuildType(options.build_type) + try: + forwarder.Forwarder.Map(port_pairs, device) + while True: + time.sleep(60) + except KeyboardInterrupt: + sys.exit(0) + finally: + forwarder.Forwarder.UnmapAllDevicePorts(device) + +if __name__ == '__main__': + main(sys.argv) diff --git a/build/android/adb_run_android_webview_shell b/build/android/adb_run_android_webview_shell new file mode 100755 index 00000000000..1014a731f47 --- /dev/null +++ b/build/android/adb_run_android_webview_shell @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.android_webview.shell/.AwShellActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_chrome_public b/build/android/adb_run_chrome_public new file mode 100755 index 00000000000..bf150711442 --- /dev/null +++ b/build/android/adb_run_chrome_public @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.chrome/com.google.android.apps.chrome.Main \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_chrome_shell b/build/android/adb_run_chrome_shell new file mode 100755 index 00000000000..79c4c32b5c3 --- /dev/null +++ b/build/android/adb_run_chrome_shell @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.chrome.shell/.ChromeShellActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_content_shell b/build/android/adb_run_content_shell new file mode 100755 index 00000000000..3f01f3bf02f --- /dev/null +++ b/build/android/adb_run_content_shell @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 + +adb shell am start \ + -a android.intent.action.VIEW \ + -n org.chromium.content_shell_apk/.ContentShellActivity \ + ${optional_url:+-d "$optional_url"} diff --git a/build/android/adb_run_mojo_shell b/build/android/adb_run_mojo_shell new file mode 100755 index 00000000000..b585e4a71f6 --- /dev/null +++ b/build/android/adb_run_mojo_shell @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +optional_url=$1 +parameters=$2 + +adb logcat -c +adb shell am start -S \ + -a android.intent.action.VIEW \ + -n org.chromium.mojo_shell_apk/.MojoShellActivity \ + ${parameters:+--esa parameters "$parameters"} \ + ${optional_url:+-d "$optional_url"} +adb logcat -s MojoShellApplication MojoShellActivity chromium diff --git a/build/android/android_no_jni_exports.lst b/build/android/android_no_jni_exports.lst new file mode 100644 index 00000000000..ffc6cf7028c --- /dev/null +++ b/build/android/android_no_jni_exports.lst @@ -0,0 +1,17 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script makes all JNI exported symbols local, to prevent the JVM from +# being able to find them, enforcing use of manual JNI function registration. +# This is used for all Android binaries by default, unless they explicitly state +# that they want JNI exported symbols to remain visible, as we need to ensure +# the manual registration path is correct to maintain compatibility with the +# crazy linker. +# Check ld version script manual: +# https://sourceware.org/binutils/docs-2.24/ld/VERSION.html#VERSION + +{ + local: + Java_*; +}; diff --git a/build/android/ant/BUILD.gn b/build/android/ant/BUILD.gn new file mode 100644 index 00000000000..a30fb54bf4a --- /dev/null +++ b/build/android/ant/BUILD.gn @@ -0,0 +1,13 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +copy("keystore") { + sources = [ + "chromium-debug.keystore", + ] + + outputs = [ + "$root_out_dir/chromium-debug.keystore", + ] +} diff --git a/build/android/ant/apk-package.xml b/build/android/ant/apk-package.xml new file mode 100644 index 00000000000..e8b76f7e453 --- /dev/null +++ b/build/android/ant/apk-package.xml @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/build/android/ant/chromium-debug.keystore b/build/android/ant/chromium-debug.keystore new file mode 100644 index 00000000000..67eb0aa34c5 Binary files /dev/null and b/build/android/ant/chromium-debug.keystore differ diff --git a/build/android/ant/empty/res/.keep b/build/android/ant/empty/res/.keep new file mode 100644 index 00000000000..1fd038b8cf3 --- /dev/null +++ b/build/android/ant/empty/res/.keep @@ -0,0 +1,2 @@ +# This empty res folder can be passed to aapt while building Java libraries or +# APKs that don't have any resources. diff --git a/build/android/apkbuilder_action.gypi b/build/android/apkbuilder_action.gypi new file mode 100644 index 00000000000..27807d861fb --- /dev/null +++ b/build/android/apkbuilder_action.gypi @@ -0,0 +1,79 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is a helper to java_apk.gypi. It should be used to create an +# action that runs ApkBuilder via ANT. +# +# Required variables: +# apk_name - File name (minus path & extension) of the output apk. +# apk_path - Path to output apk. +# package_input_paths - Late-evaluated list of resource zips. +# native_libs_dir - Path to lib/ directory to use. Set to an empty directory +# if no native libs are needed. +# Optional variables: +# has_code - Whether to include classes.dex in the apk. +# dex_path - Path to classes.dex. Used only when has_code=1. +# extra_inputs - List of extra action inputs. +{ + 'variables': { + 'variables': { + 'has_code%': 1, + }, + 'conditions': [ + ['has_code == 0', { + 'has_code_str': 'false', + }, { + 'has_code_str': 'true', + }], + ], + 'has_code%': '<(has_code)', + 'extra_inputs%': [], + # Write the inputs list to a file, so that its mtime is updated when + # the list of inputs changes. + 'inputs_list_file': '>|(apk_package.<(_target_name).<(apk_name).gypcmd >@(package_input_paths))', + 'resource_packaged_apk_name': '<(apk_name)-resources.ap_', + 'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)', + }, + 'action_name': 'apkbuilder_<(apk_name)', + 'message': 'Packaging <(apk_name)', + 'inputs': [ + '<(DEPTH)/build/android/ant/apk-package.xml', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/ant.py', + '<(resource_packaged_apk_path)', + '<@(extra_inputs)', + '>@(package_input_paths)', + '>(inputs_list_file)', + ], + 'outputs': [ + '<(apk_path)', + ], + 'conditions': [ + ['has_code == 1', { + 'inputs': ['<(dex_path)'], + 'action': [ + '-DDEX_FILE_PATH=<(dex_path)', + ] + }], + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/ant.py', + '--', + '-quiet', + '-DHAS_CODE=<(has_code_str)', + '-DANDROID_SDK_ROOT=<(android_sdk_root)', + '-DANDROID_SDK_TOOLS=<(android_sdk_tools)', + '-DRESOURCE_PACKAGED_APK_NAME=<(resource_packaged_apk_name)', + '-DNATIVE_LIBS_DIR=<(native_libs_dir)', + '-DAPK_NAME=<(apk_name)', + '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)', + '-DOUT_DIR=<(intermediate_dir)', + '-DUNSIGNED_APK_PATH=<(apk_path)', + '-DEMMA_INSTRUMENT=<(emma_instrument)', + '-DEMMA_DEVICE_JAR=<(emma_device_jar)', + '-Dbasedir=.', + '-buildfile', + '<(DEPTH)/build/android/ant/apk-package.xml', + ] +} diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py new file mode 100755 index 00000000000..10087a637ae --- /dev/null +++ b/build/android/asan_symbolize.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import collections +import optparse +import os +import re +import sys + +from pylib import constants + +# Uses symbol.py from third_party/android_platform, not python's. +sys.path.insert(0, + os.path.join(constants.DIR_SOURCE_ROOT, + 'third_party/android_platform/development/scripts')) +import symbol + + +_RE_ASAN = re.compile(r'(.*?)(#\S*?) (\S*?) \((.*?)\+(.*?)\)') + +def _ParseAsanLogLine(line): + m = re.match(_RE_ASAN, line) + if not m: + return None + return { + 'prefix': m.group(1), + 'library': m.group(4), + 'pos': m.group(2), + 'rel_address': '%08x' % int(m.group(5), 16), + } + + +def _FindASanLibraries(): + asan_lib_dir = os.path.join(constants.DIR_SOURCE_ROOT, + 'third_party', 'llvm-build', + 'Release+Asserts', 'lib') + asan_libs = [] + for src_dir, _, files in os.walk(asan_lib_dir): + asan_libs += [os.path.relpath(os.path.join(src_dir, f)) + for f in files + if f.endswith('.so')] + return asan_libs + + +def _TranslateLibPath(library, asan_libs): + for asan_lib in asan_libs: + if os.path.basename(library) == os.path.basename(asan_lib): + return '/' + asan_lib + return symbol.TranslateLibPath(library) + + +def _Symbolize(asan_input): + asan_libs = _FindASanLibraries() + libraries = collections.defaultdict(list) + asan_lines = [] + for asan_log_line in [a.rstrip() for a in asan_input]: + m = _ParseAsanLogLine(asan_log_line) + if m: + libraries[m['library']].append(m) + asan_lines.append({'raw_log': asan_log_line, 'parsed': m}) + + all_symbols = collections.defaultdict(dict) + for library, items in libraries.iteritems(): + libname = _TranslateLibPath(library, asan_libs) + lib_relative_addrs = set([i['rel_address'] for i in items]) + info_dict = symbol.SymbolInformationForSet(libname, + lib_relative_addrs, + True) + if info_dict: + all_symbols[library]['symbols'] = info_dict + + for asan_log_line in asan_lines: + m = asan_log_line['parsed'] + if not m: + print asan_log_line['raw_log'] + continue + if (m['library'] in all_symbols and + m['rel_address'] in all_symbols[m['library']]['symbols']): + s = all_symbols[m['library']]['symbols'][m['rel_address']][0] + print '%s%s %s %s' % (m['prefix'], m['pos'], s[0], s[1]) + else: + print asan_log_line['raw_log'] + + +def main(): + parser = optparse.OptionParser() + parser.add_option('-l', '--logcat', + help='File containing adb logcat output with ASan stacks. ' + 'Use stdin if not specified.') + options, _ = parser.parse_args() + if options.logcat: + asan_input = file(options.logcat, 'r') + else: + asan_input = sys.stdin + _Symbolize(asan_input.readlines()) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/build/android/avd.py b/build/android/avd.py new file mode 100755 index 00000000000..c45544f8bc7 --- /dev/null +++ b/build/android/avd.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Launches Android Virtual Devices with a set configuration for testing Chrome. + +The script will launch a specified number of Android Virtual Devices (AVD's). +""" + + +import install_emulator_deps +import logging +import optparse +import os +import re +import sys + +from pylib import cmd_helper +from pylib import constants +from pylib.utils import emulator + + +def main(argv): + # ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch + # the emulator to find the system images upon launch. + emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk') + os.environ['ANDROID_SDK_ROOT'] = emulator_sdk + + opt_parser = optparse.OptionParser(description='AVD script.') + opt_parser.add_option('--name', help='Optinaly, name of existing AVD to ' + 'launch. If not specified, new AVD\'s will be created') + opt_parser.add_option('-n', '--num', dest='emulator_count', + help='Number of emulators to launch (default is 1).', + type='int', default='1') + opt_parser.add_option('--abi', default='x86', + help='Platform of emulators to launch (x86 default).') + opt_parser.add_option('--api-level', dest='api_level', + help='API level for the image, e.g. 19 for Android 4.4', + type='int', default=constants.ANDROID_SDK_VERSION) + + options, _ = opt_parser.parse_args(argv[1:]) + + logging.basicConfig(level=logging.INFO, + format='# %(asctime)-15s: %(message)s') + logging.root.setLevel(logging.INFO) + + # Check if KVM is enabled for x86 AVD's and check for x86 system images. + # TODO(andrewhayden) Since we can fix all of these with install_emulator_deps + # why don't we just run it? + if options.abi == 'x86': + if not install_emulator_deps.CheckKVM(): + logging.critical('ERROR: KVM must be enabled in BIOS, and installed. ' + 'Enable KVM in BIOS and run install_emulator_deps.py') + return 1 + elif not install_emulator_deps.CheckX86Image(options.api_level): + logging.critical('ERROR: System image for x86 AVD not installed. Run ' + 'install_emulator_deps.py') + return 1 + + if not install_emulator_deps.CheckSDK(): + logging.critical('ERROR: Emulator SDK not installed. Run ' + 'install_emulator_deps.py.') + return 1 + + # If AVD is specified, check that the SDK has the required target. If not, + # check that the SDK has the desired target for the temporary AVD's. + api_level = options.api_level + if options.name: + android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools', + 'android') + avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd']) + names = re.findall(r'Name: (\w+)', avds_output) + api_levels = re.findall(r'API level (\d+)', avds_output) + try: + avd_index = names.index(options.name) + except ValueError: + logging.critical('ERROR: Specified AVD %s does not exist.' % options.name) + return 1 + api_level = int(api_levels[avd_index]) + + if not install_emulator_deps.CheckSDKPlatform(api_level): + logging.critical('ERROR: Emulator SDK missing required target for API %d. ' + 'Run install_emulator_deps.py.') + return 1 + + if options.name: + emulator.LaunchEmulator(options.name, options.abi) + else: + emulator.LaunchTempEmulators(options.emulator_count, options.abi, + options.api_level, True) + + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/bb_run_sharded_steps.py b/build/android/bb_run_sharded_steps.py new file mode 100755 index 00000000000..6aeba5b5f58 --- /dev/null +++ b/build/android/bb_run_sharded_steps.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""DEPRECATED! +TODO(bulach): remove me once all other repositories reference +'test_runner.py perf' directly. +""" + +import optparse +import sys + +from pylib import cmd_helper + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('-s', '--steps', + help='A JSON file containing all the steps to be ' + 'sharded.') + parser.add_option('--flaky_steps', + help='A JSON file containing steps that are flaky and ' + 'will have its exit code ignored.') + parser.add_option('-p', '--print_results', + help='Only prints the results for the previously ' + 'executed step, do not run it again.') + options, _ = parser.parse_args(argv) + if options.print_results: + return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf', + '--print-step', options.print_results]) + flaky_options = [] + if options.flaky_steps: + flaky_options = ['--flaky-steps', options.flaky_steps] + return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf', '-v', + '--steps', options.steps] + flaky_options) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/buildbot/OWNERS b/build/android/buildbot/OWNERS new file mode 100644 index 00000000000..f289720120e --- /dev/null +++ b/build/android/buildbot/OWNERS @@ -0,0 +1,6 @@ +set noparent + +cmp@chromium.org +jbudorick@chromium.org +navabi@chromium.org + diff --git a/build/android/buildbot/bb_annotations.py b/build/android/buildbot/bb_annotations.py new file mode 100644 index 00000000000..059d673188d --- /dev/null +++ b/build/android/buildbot/bb_annotations.py @@ -0,0 +1,46 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper functions to print buildbot messages.""" + +def PrintLink(label, url): + """Adds a link with name |label| linking to |url| to current buildbot step. + + Args: + label: A string with the name of the label. + url: A string of the URL. + """ + print '@@@STEP_LINK@%s@%s@@@' % (label, url) + + +def PrintMsg(msg): + """Appends |msg| to the current buildbot step text. + + Args: + msg: String to be appended. + """ + print '@@@STEP_TEXT@%s@@@' % msg + + +def PrintSummaryText(msg): + """Appends |msg| to main build summary. Visible from waterfall. + + Args: + msg: String to be appended. + """ + print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg + + +def PrintError(): + """Marks the current step as failed.""" + print '@@@STEP_FAILURE@@@' + + +def PrintWarning(): + """Marks the current step with a warning.""" + print '@@@STEP_WARNINGS@@@' + + +def PrintNamedStep(step): + print '@@@BUILD_STEP %s@@@' % step diff --git a/build/android/buildbot/bb_device_status_check.py b/build/android/buildbot/bb_device_status_check.py new file mode 100755 index 00000000000..917c51e28dc --- /dev/null +++ b/build/android/buildbot/bb_device_status_check.py @@ -0,0 +1,404 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A class to keep track of devices across builds and report state.""" +import json +import logging +import optparse +import os +import psutil +import re +import signal +import smtplib +import subprocess +import sys +import time +import urllib + +import bb_annotations +import bb_utils + +sys.path.append(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, 'util', 'lib', + 'common')) +import perf_tests_results_helper # pylint: disable=F0401 + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +from pylib import constants +from pylib.cmd_helper import GetCmdOutput +from pylib.device import adb_wrapper +from pylib.device import battery_utils +from pylib.device import device_blacklist +from pylib.device import device_errors +from pylib.device import device_list +from pylib.device import device_utils +from pylib.utils import run_tests_helper + +_RE_DEVICE_ID = re.compile('Device ID = (\d+)') + +def DeviceInfo(device, options): + """Gathers info on a device via various adb calls. + + Args: + device: A DeviceUtils instance for the device to construct info about. + + Returns: + Tuple of device type, build id, report as a string, error messages, and + boolean indicating whether or not device can be used for testing. + """ + battery = battery_utils.BatteryUtils(device) + + build_product = '' + build_id = '' + battery_level = 100 + errors = [] + dev_good = True + json_data = {} + + try: + build_product = device.build_product + build_id = device.build_id + + json_data = { + 'serial': device.adb.GetDeviceSerial(), + 'type': build_product, + 'build': build_id, + 'build_detail': device.GetProp('ro.build.fingerprint'), + 'battery': {}, + 'imei_slice': 'Unknown', + 'wifi_ip': device.GetProp('dhcp.wlan0.ipaddress'), + } + + battery_info = {} + try: + battery_info = battery.GetBatteryInfo(timeout=5) + battery_level = int(battery_info.get('level', battery_level)) + json_data['battery'] = battery_info + except device_errors.CommandFailedError: + logging.exception('Failed to get battery information for %s', str(device)) + + try: + for l in device.RunShellCommand(['dumpsys', 'iphonesubinfo'], + check_return=True, timeout=5): + m = _RE_DEVICE_ID.match(l) + if m: + json_data['imei_slice'] = m.group(1)[-6:] + except device_errors.CommandFailedError: + logging.exception('Failed to get IMEI slice for %s', str(device)) + + if battery_level < 15: + errors += ['Device critically low in battery.'] + dev_good = False + if not battery.GetCharging(): + battery.SetCharging(True) + if not options.no_provisioning_check: + setup_wizard_disabled = ( + device.GetProp('ro.setupwizard.mode') == 'DISABLED') + if not setup_wizard_disabled and device.build_type != 'user': + errors += ['Setup wizard not disabled. Was it provisioned correctly?'] + if (device.product_name == 'mantaray' and + battery_info.get('AC powered', None) != 'true'): + errors += ['Mantaray device not connected to AC power.'] + except device_errors.CommandFailedError: + logging.exception('Failure while getting device status.') + dev_good = False + except device_errors.CommandTimeoutError: + logging.exception('Timeout while getting device status.') + dev_good = False + + return (build_product, build_id, battery_level, errors, dev_good, json_data) + + +def CheckForMissingDevices(options, devices): + """Uses file of previous online devices to detect broken phones. + + Args: + options: out_dir parameter of options argument is used as the base + directory to load and update the cache file. + devices: A list of DeviceUtils instance for the currently visible and + online attached devices. + """ + out_dir = os.path.abspath(options.out_dir) + device_serials = set(d.adb.GetDeviceSerial() for d in devices) + + # last_devices denotes all known devices prior to this run + last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME) + last_missing_devices_path = os.path.join(out_dir, + device_list.LAST_MISSING_DEVICES_FILENAME) + try: + last_devices = device_list.GetPersistentDeviceList(last_devices_path) + except IOError: + # Ignore error, file might not exist + last_devices = [] + + try: + last_missing_devices = device_list.GetPersistentDeviceList( + last_missing_devices_path) + except IOError: + last_missing_devices = [] + + missing_devs = list(set(last_devices) - device_serials) + new_missing_devs = list(set(missing_devs) - set(last_missing_devices)) + + if new_missing_devs and os.environ.get('BUILDBOT_SLAVENAME'): + logging.info('new_missing_devs %s' % new_missing_devs) + devices_missing_msg = '%d devices not detected.' % len(missing_devs) + bb_annotations.PrintSummaryText(devices_missing_msg) + + from_address = 'chrome-bot@chromium.org' + to_addresses = ['chrome-labs-tech-ticket@google.com', + 'chrome-android-device-alert@google.com'] + cc_addresses = ['chrome-android-device-alert@google.com'] + subject = 'Devices offline on %s, %s, %s' % ( + os.environ.get('BUILDBOT_SLAVENAME'), + os.environ.get('BUILDBOT_BUILDERNAME'), + os.environ.get('BUILDBOT_BUILDNUMBER')) + msg = ('Please reboot the following devices:\n%s' % + '\n'.join(map(str, new_missing_devs))) + SendEmail(from_address, to_addresses, cc_addresses, subject, msg) + + all_known_devices = list(device_serials | set(last_devices)) + device_list.WritePersistentDeviceList(last_devices_path, all_known_devices) + device_list.WritePersistentDeviceList(last_missing_devices_path, missing_devs) + + if not all_known_devices: + # This can happen if for some reason the .last_devices file is not + # present or if it was empty. + return ['No online devices. Have any devices been plugged in?'] + if missing_devs: + devices_missing_msg = '%d devices not detected.' % len(missing_devs) + bb_annotations.PrintSummaryText(devices_missing_msg) + return ['Current online devices: %s' % ', '.join(d for d in device_serials), + '%s are no longer visible. Were they removed?' % missing_devs] + else: + new_devs = device_serials - set(last_devices) + if new_devs and os.path.exists(last_devices_path): + bb_annotations.PrintWarning() + bb_annotations.PrintSummaryText( + '%d new devices detected' % len(new_devs)) + logging.info('New devices detected:') + for d in new_devs: + logging.info(' %s', d) + + +def SendEmail(from_address, to_addresses, cc_addresses, subject, msg): + msg_body = '\r\n'.join(['From: %s' % from_address, + 'To: %s' % ', '.join(to_addresses), + 'CC: %s' % ', '.join(cc_addresses), + 'Subject: %s' % subject, '', msg]) + try: + server = smtplib.SMTP('localhost') + server.sendmail(from_address, to_addresses, msg_body) + server.quit() + except Exception: + logging.exception('Failed to send alert email.') + + +def RestartUsb(): + if not os.path.isfile('/usr/bin/restart_usb'): + logging.error('Could not restart usb. ''/usr/bin/restart_usb not ' + 'installed on host (see BUG=305769).') + return False + + lsusb_proc = bb_utils.SpawnCmd(['lsusb'], stdout=subprocess.PIPE) + lsusb_output, _ = lsusb_proc.communicate() + if lsusb_proc.returncode: + logging.error('Could not get list of USB ports (i.e. lsusb).') + return lsusb_proc.returncode + + usb_devices = [re.findall(r'Bus (\d\d\d) Device (\d\d\d)', lsusb_line)[0] + for lsusb_line in lsusb_output.strip().split('\n')] + + all_restarted = True + # Walk USB devices from leaves up (i.e reverse sorted) restarting the + # connection. If a parent node (e.g. usb hub) is restarted before the + # devices connected to it, the (bus, dev) for the hub can change, making the + # output we have wrong. This way we restart the devices before the hub. + for (bus, dev) in reversed(sorted(usb_devices)): + # Can not restart root usb connections + if dev != '001': + return_code = bb_utils.RunCmd(['/usr/bin/restart_usb', bus, dev]) + if return_code: + logging.error('Error restarting USB device /dev/bus/usb/%s/%s', + bus, dev) + all_restarted = False + else: + logging.info('Restarted USB device /dev/bus/usb/%s/%s', bus, dev) + + return all_restarted + + +def KillAllAdb(): + def GetAllAdb(): + for p in psutil.process_iter(): + try: + if 'adb' in p.name: + yield p + except (psutil.NoSuchProcess, psutil.AccessDenied): + pass + + for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]: + for p in GetAllAdb(): + try: + logging.info('kill %d %d (%s [%s])', sig, p.pid, p.name, + ' '.join(p.cmdline)) + p.send_signal(sig) + except (psutil.NoSuchProcess, psutil.AccessDenied): + pass + for p in GetAllAdb(): + try: + logging.error('Unable to kill %d (%s [%s])', p.pid, p.name, + ' '.join(p.cmdline)) + except (psutil.NoSuchProcess, psutil.AccessDenied): + pass + + +def main(): + parser = optparse.OptionParser() + parser.add_option('', '--out-dir', + help='Directory where the device path is stored', + default=os.path.join(constants.DIR_SOURCE_ROOT, 'out')) + parser.add_option('--no-provisioning-check', action='store_true', + help='Will not check if devices are provisioned properly.') + parser.add_option('--device-status-dashboard', action='store_true', + help='Output device status data for dashboard.') + parser.add_option('--restart-usb', action='store_true', + help='Restart USB ports before running device check.') + parser.add_option('--json-output', + help='Output JSON information into a specified file.') + parser.add_option('-v', '--verbose', action='count', default=1, + help='Log more information.') + + options, args = parser.parse_args() + if args: + parser.error('Unknown options %s' % args) + + run_tests_helper.SetLogLevel(options.verbose) + + # Remove the last build's "bad devices" before checking device statuses. + device_blacklist.ResetBlacklist() + + try: + expected_devices = device_list.GetPersistentDeviceList( + os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME)) + except IOError: + expected_devices = [] + devices = device_utils.DeviceUtils.HealthyDevices() + device_serials = [d.adb.GetDeviceSerial() for d in devices] + # Only restart usb if devices are missing. + if set(expected_devices) != set(device_serials): + logging.warning('expected_devices: %s', expected_devices) + logging.warning('devices: %s', device_serials) + KillAllAdb() + retries = 5 + usb_restarted = True + if options.restart_usb: + if not RestartUsb(): + usb_restarted = False + bb_annotations.PrintWarning() + logging.error('USB reset stage failed, ' + 'wait for any device to come back.') + while retries: + logging.info('retry adb devices...') + time.sleep(1) + devices = device_utils.DeviceUtils.HealthyDevices() + device_serials = [d.adb.GetDeviceSerial() for d in devices] + if set(expected_devices) == set(device_serials): + # All devices are online, keep going. + break + if not usb_restarted and devices: + # The USB wasn't restarted, but there's at least one device online. + # No point in trying to wait for all devices. + break + retries -= 1 + + types, builds, batteries, errors, devices_ok, json_data = ( + [], [], [], [], [], []) + if devices: + types, builds, batteries, errors, devices_ok, json_data = ( + zip(*[DeviceInfo(dev, options) for dev in devices])) + + # Write device info to file for buildbot info display. + if os.path.exists('/home/chrome-bot'): + with open('/home/chrome-bot/.adb_device_info', 'w') as f: + for device in json_data: + try: + f.write('%s %s %s %.1fC %s%%\n' % (device['serial'], device['type'], + device['build'], float(device['battery']['temperature']) / 10, + device['battery']['level'])) + except Exception: + pass + + err_msg = CheckForMissingDevices(options, devices) or [] + + unique_types = list(set(types)) + unique_builds = list(set(builds)) + + bb_annotations.PrintMsg('Online devices: %d. Device types %s, builds %s' + % (len(devices), unique_types, unique_builds)) + + for j in json_data: + logging.info('Device %s (%s)', j.get('serial'), j.get('type')) + logging.info(' Build: %s (%s)', j.get('build'), j.get('build_detail')) + logging.info(' Current Battery Service state:') + for k, v in j.get('battery', {}).iteritems(): + logging.info(' %s: %s', k, v) + logging.info(' IMEI slice: %s', j.get('imei_slice')) + logging.info(' WiFi IP: %s', j.get('wifi_ip')) + + + for dev, dev_errors in zip(devices, errors): + if dev_errors: + err_msg += ['%s errors:' % str(dev)] + err_msg += [' %s' % error for error in dev_errors] + + if err_msg: + bb_annotations.PrintWarning() + for e in err_msg: + logging.error(e) + from_address = 'buildbot@chromium.org' + to_addresses = ['chromium-android-device-alerts@google.com'] + bot_name = os.environ.get('BUILDBOT_BUILDERNAME') + slave_name = os.environ.get('BUILDBOT_SLAVENAME') + subject = 'Device status check errors on %s, %s.' % (slave_name, bot_name) + SendEmail(from_address, to_addresses, [], subject, '\n'.join(err_msg)) + + if options.device_status_dashboard: + offline_devices = [ + device_utils.DeviceUtils(a) + for a in adb_wrapper.AdbWrapper.Devices(is_ready=False) + if a.GetState() == 'offline'] + + perf_tests_results_helper.PrintPerfResult('BotDevices', 'OnlineDevices', + [len(devices)], 'devices') + perf_tests_results_helper.PrintPerfResult('BotDevices', 'OfflineDevices', + [len(offline_devices)], 'devices', + 'unimportant') + for dev, battery in zip(devices, batteries): + perf_tests_results_helper.PrintPerfResult('DeviceBattery', str(dev), + [battery], '%', + 'unimportant') + + if options.json_output: + with open(options.json_output, 'wb') as f: + f.write(json.dumps(json_data, indent=4)) + + num_failed_devs = 0 + for device_ok, device in zip(devices_ok, devices): + if not device_ok: + logging.warning('Blacklisting %s', str(device)) + device_blacklist.ExtendBlacklist([str(device)]) + num_failed_devs += 1 + + if num_failed_devs == len(devices): + return 2 + + if not devices: + return 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/buildbot/bb_device_steps.py b/build/android/buildbot/bb_device_steps.py new file mode 100755 index 00000000000..8ad42b90d15 --- /dev/null +++ b/build/android/buildbot/bb_device_steps.py @@ -0,0 +1,796 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import glob +import hashlib +import json +import os +import random +import re +import shutil +import sys + +import bb_utils +import bb_annotations + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +import provision_devices +from pylib import constants +from pylib.device import device_utils +from pylib.gtest import gtest_config + +CHROME_SRC_DIR = bb_utils.CHROME_SRC +DIR_BUILD_ROOT = os.path.dirname(CHROME_SRC_DIR) +CHROME_OUT_DIR = bb_utils.CHROME_OUT_DIR +BLINK_SCRIPTS_DIR = 'third_party/WebKit/Tools/Scripts' + +SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave') +LOGCAT_DIR = os.path.join(bb_utils.CHROME_OUT_DIR, 'logcat') +GS_URL = 'https://storage.googleapis.com' +GS_AUTH_URL = 'https://storage.cloud.google.com' + +# Describes an instrumation test suite: +# test: Name of test we're running. +# apk: apk to be installed. +# apk_package: package for the apk to be installed. +# test_apk: apk to run tests on. +# test_data: data folder in format destination:source. +# host_driven_root: The host-driven test root directory. +# annotation: Annotation of the tests to include. +# exclude_annotation: The annotation of the tests to exclude. +I_TEST = collections.namedtuple('InstrumentationTest', [ + 'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'isolate_file_path', + 'host_driven_root', 'annotation', 'exclude_annotation', 'extra_flags']) + + +def SrcPath(*path): + return os.path.join(CHROME_SRC_DIR, *path) + + +def I(name, apk, apk_package, test_apk, test_data, isolate_file_path=None, + host_driven_root=None, annotation=None, exclude_annotation=None, + extra_flags=None): + return I_TEST(name, apk, apk_package, test_apk, test_data, isolate_file_path, + host_driven_root, annotation, exclude_annotation, extra_flags) + +INSTRUMENTATION_TESTS = dict((suite.name, suite) for suite in [ + I('ContentShell', + 'ContentShell.apk', + 'org.chromium.content_shell_apk', + 'ContentShellTest', + 'content:content/test/data/android/device_files', + isolate_file_path='content/content_shell_test_apk.isolate'), + I('ChromeShell', + 'ChromeShell.apk', + 'org.chromium.chrome.shell', + 'ChromeShellTest', + 'chrome:chrome/test/data/android/device_files', + isolate_file_path='chrome/chrome_shell_test_apk.isolate', + host_driven_root=constants.CHROME_SHELL_HOST_DRIVEN_DIR), + I('AndroidWebView', + 'AndroidWebView.apk', + 'org.chromium.android_webview.shell', + 'AndroidWebViewTest', + 'webview:android_webview/test/data/device_files', + isolate_file_path='android_webview/android_webview_test_apk.isolate'), + I('ChromeSyncShell', + 'ChromeSyncShell.apk', + 'org.chromium.chrome.browser.sync', + 'ChromeSyncShellTest', + None), + ]) + +InstallablePackage = collections.namedtuple('InstallablePackage', [ + 'name', 'apk', 'apk_package']) + +INSTALLABLE_PACKAGES = dict((package.name, package) for package in ( + [InstallablePackage(i.name, i.apk, i.apk_package) + for i in INSTRUMENTATION_TESTS.itervalues()] + + [InstallablePackage('ChromeDriverWebViewShell', + 'ChromeDriverWebViewShell.apk', + 'org.chromium.chromedriver_webview_shell')])) + +VALID_TESTS = set([ + 'base_junit_tests', + 'chromedriver', + 'chrome_proxy', + 'components_browsertests', + 'gfx_unittests', + 'gl_unittests', + 'gpu', + 'python_unittests', + 'telemetry_unittests', + 'telemetry_perf_unittests', + 'ui', + 'unit', + 'webkit', + 'webkit_layout' +]) + +RunCmd = bb_utils.RunCmd + + +def _GetRevision(options): + """Get the SVN revision number. + + Args: + options: options object. + + Returns: + The revision number. + """ + revision = options.build_properties.get('got_revision') + if not revision: + revision = options.build_properties.get('revision', 'testing') + return revision + + +def _RunTest(options, cmd, suite): + """Run test command with runtest.py. + + Args: + options: options object. + cmd: the command to run. + suite: test name. + """ + property_args = bb_utils.EncodeProperties(options) + args = [os.path.join(SLAVE_SCRIPTS_DIR, 'runtest.py')] + property_args + args += ['--test-platform', 'android'] + if options.factory_properties.get('generate_gtest_json'): + args.append('--generate-json-file') + args += ['-o', 'gtest-results/%s' % suite, + '--annotate', 'gtest', + '--build-number', str(options.build_properties.get('buildnumber', + '')), + '--builder-name', options.build_properties.get('buildername', '')] + if options.target == 'Release': + args += ['--target', 'Release'] + else: + args += ['--target', 'Debug'] + if options.flakiness_server: + args += ['--flakiness-dashboard-server=%s' % + options.flakiness_server] + args += cmd + RunCmd(args, cwd=DIR_BUILD_ROOT) + + +def RunTestSuites(options, suites, suites_options=None): + """Manages an invocation of test_runner.py for gtests. + + Args: + options: options object. + suites: List of suite names to run. + suites_options: Command line options dictionary for particular suites. + For example, + {'content_browsertests', ['--num_retries=1', '--release']} + will add the options only to content_browsertests. + """ + + if not suites_options: + suites_options = {} + + args = ['--verbose'] + if options.target == 'Release': + args.append('--release') + if options.asan: + args.append('--tool=asan') + if options.gtest_filter: + args.append('--gtest-filter=%s' % options.gtest_filter) + + for suite in suites: + bb_annotations.PrintNamedStep(suite) + cmd = [suite] + args + cmd += suites_options.get(suite, []) + if suite == 'content_browsertests' or suite == 'components_browsertests': + cmd.append('--num_retries=1') + _RunTest(options, cmd, suite) + + +def RunJunitSuite(suite): + bb_annotations.PrintNamedStep(suite) + RunCmd(['build/android/test_runner.py', 'junit', '-s', suite]) + + +def RunChromeDriverTests(options): + """Run all the steps for running chromedriver tests.""" + bb_annotations.PrintNamedStep('chromedriver_annotation') + RunCmd(['chrome/test/chromedriver/run_buildbot_steps.py', + '--android-packages=%s,%s,%s,%s' % + ('chrome_shell', + 'chrome_stable', + 'chrome_beta', + 'chromedriver_webview_shell'), + '--revision=%s' % _GetRevision(options), + '--update-log']) + +def RunChromeProxyTests(options): + """Run the chrome_proxy tests. + + Args: + options: options object. + """ + InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False) + args = ['--browser', 'android-chrome-shell'] + devices = device_utils.DeviceUtils.HealthyDevices() + if devices: + args = args + ['--device', devices[0].adb.GetDeviceSerial()] + bb_annotations.PrintNamedStep('chrome_proxy') + RunCmd(['tools/chrome_proxy/run_tests'] + args) + + +def RunTelemetryTests(options, step_name, run_tests_path): + """Runs either telemetry_perf_unittests or telemetry_unittests. + + Args: + options: options object. + step_name: either 'telemetry_unittests' or 'telemetry_perf_unittests' + run_tests_path: path to run_tests script (tools/perf/run_tests for + perf_unittests and tools/telemetry/run_tests for + telemetry_unittests) + """ + InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False) + args = ['--browser', 'android-chrome-shell'] + devices = device_utils.DeviceUtils.HealthyDevices() + if devices: + args = args + ['--device', 'android'] + bb_annotations.PrintNamedStep(step_name) + RunCmd([run_tests_path] + args) + + +def InstallApk(options, test, print_step=False): + """Install an apk to all phones. + + Args: + options: options object + test: An I_TEST namedtuple + print_step: Print a buildbot step + """ + if print_step: + bb_annotations.PrintNamedStep('install_%s' % test.name.lower()) + + args = ['--apk_package', test.apk_package] + if options.target == 'Release': + args.append('--release') + args.append(test.apk) + + RunCmd(['build/android/adb_install_apk.py'] + args, halt_on_failure=True) + + +def RunInstrumentationSuite(options, test, flunk_on_failure=True, + python_only=False, official_build=False): + """Manages an invocation of test_runner.py for instrumentation tests. + + Args: + options: options object + test: An I_TEST namedtuple + flunk_on_failure: Flunk the step if tests fail. + Python: Run only host driven Python tests. + official_build: Run official-build tests. + """ + bb_annotations.PrintNamedStep('%s_instrumentation_tests' % test.name.lower()) + + if test.apk: + InstallApk(options, test) + args = ['--test-apk', test.test_apk, '--verbose'] + if test.test_data: + args.extend(['--test_data', test.test_data]) + if options.target == 'Release': + args.append('--release') + if options.asan: + args.append('--tool=asan') + if options.flakiness_server: + args.append('--flakiness-dashboard-server=%s' % + options.flakiness_server) + if options.coverage_bucket: + args.append('--coverage-dir=%s' % options.coverage_dir) + if test.isolate_file_path: + args.append('--isolate-file-path=%s' % test.isolate_file_path) + if test.host_driven_root: + args.append('--host-driven-root=%s' % test.host_driven_root) + if test.annotation: + args.extend(['-A', test.annotation]) + if test.exclude_annotation: + args.extend(['-E', test.exclude_annotation]) + if test.extra_flags: + args.extend(test.extra_flags) + if python_only: + args.append('-p') + if official_build: + # The option needs to be assigned 'True' as it does not have an action + # associated with it. + args.append('--official-build') + + RunCmd(['build/android/test_runner.py', 'instrumentation'] + args, + flunk_on_failure=flunk_on_failure) + + +def RunWebkitLint(): + """Lint WebKit's TestExpectation files.""" + bb_annotations.PrintNamedStep('webkit_lint') + RunCmd([SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'lint-test-expectations'))]) + + +def RunWebkitLayoutTests(options): + """Run layout tests on an actual device.""" + bb_annotations.PrintNamedStep('webkit_tests') + cmd_args = [ + '--no-show-results', + '--no-new-test-results', + '--full-results-html', + '--clobber-old-results', + '--exit-after-n-failures', '5000', + '--exit-after-n-crashes-or-timeouts', '100', + '--debug-rwt-logging', + '--results-directory', '../layout-test-results', + '--target', options.target, + '--builder-name', options.build_properties.get('buildername', ''), + '--build-number', str(options.build_properties.get('buildnumber', '')), + '--master-name', 'ChromiumWebkit', # TODO: Get this from the cfg. + '--build-name', options.build_properties.get('buildername', ''), + '--platform=android'] + + for flag in 'test_results_server', 'driver_name', 'additional_driver_flag': + if flag in options.factory_properties: + cmd_args.extend(['--%s' % flag.replace('_', '-'), + options.factory_properties.get(flag)]) + + for f in options.factory_properties.get('additional_expectations', []): + cmd_args.extend( + ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)]) + + # TODO(dpranke): Remove this block after + # https://codereview.chromium.org/12927002/ lands. + for f in options.factory_properties.get('additional_expectations_files', []): + cmd_args.extend( + ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)]) + + exit_code = RunCmd( + [SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'run-webkit-tests'))] + cmd_args) + if exit_code == 255: # test_run_results.UNEXPECTED_ERROR_EXIT_STATUS + bb_annotations.PrintMsg('?? (crashed or hung)') + elif exit_code == 254: # test_run_results.NO_DEVICES_EXIT_STATUS + bb_annotations.PrintMsg('?? (no devices found)') + elif exit_code == 253: # test_run_results.NO_TESTS_EXIT_STATUS + bb_annotations.PrintMsg('?? (no tests found)') + else: + full_results_path = os.path.join('..', 'layout-test-results', + 'full_results.json') + if os.path.exists(full_results_path): + full_results = json.load(open(full_results_path)) + unexpected_passes, unexpected_failures, unexpected_flakes = ( + _ParseLayoutTestResults(full_results)) + if unexpected_failures: + _PrintDashboardLink('failed', unexpected_failures.keys(), + max_tests=25) + elif unexpected_passes: + _PrintDashboardLink('unexpected passes', unexpected_passes.keys(), + max_tests=10) + if unexpected_flakes: + _PrintDashboardLink('unexpected flakes', unexpected_flakes.keys(), + max_tests=10) + + if exit_code == 0 and (unexpected_passes or unexpected_flakes): + # If exit_code != 0, RunCmd() will have already printed an error. + bb_annotations.PrintWarning() + else: + bb_annotations.PrintError() + bb_annotations.PrintMsg('?? (results missing)') + + if options.factory_properties.get('archive_webkit_results', False): + bb_annotations.PrintNamedStep('archive_webkit_results') + base = 'https://storage.googleapis.com/chromium-layout-test-archives' + builder_name = options.build_properties.get('buildername', '') + build_number = str(options.build_properties.get('buildnumber', '')) + results_link = '%s/%s/%s/layout-test-results/results.html' % ( + base, EscapeBuilderName(builder_name), build_number) + bb_annotations.PrintLink('results', results_link) + bb_annotations.PrintLink('(zip)', '%s/%s/%s/layout-test-results.zip' % ( + base, EscapeBuilderName(builder_name), build_number)) + gs_bucket = 'gs://chromium-layout-test-archives' + RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'chromium', + 'archive_layout_test_results.py'), + '--results-dir', '../../layout-test-results', + '--build-number', build_number, + '--builder-name', builder_name, + '--gs-bucket', gs_bucket], + cwd=DIR_BUILD_ROOT) + + +def _ParseLayoutTestResults(results): + """Extract the failures from the test run.""" + # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results + tests = _ConvertTrieToFlatPaths(results['tests']) + failures = {} + flakes = {} + passes = {} + for (test, result) in tests.iteritems(): + if result.get('is_unexpected'): + actual_results = result['actual'].split() + expected_results = result['expected'].split() + if len(actual_results) > 1: + # We report the first failure type back, even if the second + # was more severe. + if actual_results[1] in expected_results: + flakes[test] = actual_results[0] + else: + failures[test] = actual_results[0] + elif actual_results[0] == 'PASS': + passes[test] = result + else: + failures[test] = actual_results[0] + + return (passes, failures, flakes) + + +def _ConvertTrieToFlatPaths(trie, prefix=None): + """Flatten the trie of failures into a list.""" + # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results + result = {} + for name, data in trie.iteritems(): + if prefix: + name = prefix + '/' + name + + if len(data) and 'actual' not in data and 'expected' not in data: + result.update(_ConvertTrieToFlatPaths(data, name)) + else: + result[name] = data + + return result + + +def _PrintDashboardLink(link_text, tests, max_tests): + """Add a link to the flakiness dashboard in the step annotations.""" + if len(tests) > max_tests: + test_list_text = ' '.join(tests[:max_tests]) + ' and more' + else: + test_list_text = ' '.join(tests) + + dashboard_base = ('http://test-results.appspot.com' + '/dashboards/flakiness_dashboard.html#' + 'master=ChromiumWebkit&tests=') + + bb_annotations.PrintLink('%d %s: %s' % + (len(tests), link_text, test_list_text), + dashboard_base + ','.join(tests)) + + +def EscapeBuilderName(builder_name): + return re.sub('[ ()]', '_', builder_name) + + +def SpawnLogcatMonitor(): + shutil.rmtree(LOGCAT_DIR, ignore_errors=True) + bb_utils.SpawnCmd([ + os.path.join(CHROME_SRC_DIR, 'build', 'android', 'adb_logcat_monitor.py'), + LOGCAT_DIR]) + + # Wait for logcat_monitor to pull existing logcat + RunCmd(['sleep', '5']) + + +def ProvisionDevices(options): + bb_annotations.PrintNamedStep('provision_devices') + + if not bb_utils.TESTING: + # Restart adb to work around bugs, sleep to wait for usb discovery. + device_utils.RestartServer() + RunCmd(['sleep', '1']) + provision_cmd = ['build/android/provision_devices.py', '-t', options.target] + if options.auto_reconnect: + provision_cmd.append('--auto-reconnect') + if options.skip_wipe: + provision_cmd.append('--skip-wipe') + if options.disable_location: + provision_cmd.append('--disable-location') + RunCmd(provision_cmd, halt_on_failure=True) + + +def DeviceStatusCheck(options): + bb_annotations.PrintNamedStep('device_status_check') + cmd = ['build/android/buildbot/bb_device_status_check.py'] + if options.restart_usb: + cmd.append('--restart-usb') + RunCmd(cmd, halt_on_failure=True) + + +def GetDeviceSetupStepCmds(): + return [ + ('device_status_check', DeviceStatusCheck), + ('provision_devices', ProvisionDevices), + ] + + +def RunUnitTests(options): + suites = gtest_config.STABLE_TEST_SUITES + if options.asan: + suites = [s for s in suites + if s not in gtest_config.ASAN_EXCLUDED_TEST_SUITES] + RunTestSuites(options, suites) + + +def RunTelemetryUnitTests(options): + RunTelemetryTests(options, 'telemetry_unittests', 'tools/telemetry/run_tests') + + +def RunTelemetryPerfUnitTests(options): + RunTelemetryTests(options, 'telemetry_perf_unittests', 'tools/perf/run_tests') + + +def RunInstrumentationTests(options): + for test in INSTRUMENTATION_TESTS.itervalues(): + RunInstrumentationSuite(options, test) + + +def RunWebkitTests(options): + RunTestSuites(options, ['webkit_unit_tests', 'blink_heap_unittests']) + RunWebkitLint() + + +def RunGPUTests(options): + revision = _GetRevision(options) + builder_name = options.build_properties.get('buildername', 'noname') + + bb_annotations.PrintNamedStep('pixel_tests') + RunCmd(['content/test/gpu/run_gpu_test.py', + 'pixel', '-v', + '--browser', + 'android-content-shell', + '--build-revision', + str(revision), + '--upload-refimg-to-cloud-storage', + '--refimg-cloud-storage-bucket', + 'chromium-gpu-archive/reference-images', + '--os-type', + 'android', + '--test-machine-name', + EscapeBuilderName(builder_name)]) + + bb_annotations.PrintNamedStep('webgl_conformance_tests') + RunCmd(['content/test/gpu/run_gpu_test.py', '-v', + '--browser=android-content-shell', 'webgl_conformance', + '--webgl-conformance-version=1.0.1']) + + bb_annotations.PrintNamedStep('android_webview_webgl_conformance_tests') + RunCmd(['content/test/gpu/run_gpu_test.py', '-v', + '--browser=android-webview-shell', 'webgl_conformance', + '--webgl-conformance-version=1.0.1']) + + bb_annotations.PrintNamedStep('gpu_rasterization_tests') + RunCmd(['content/test/gpu/run_gpu_test.py', + 'gpu_rasterization', '-v', + '--browser', + 'android-content-shell', + '--build-revision', + str(revision), + '--test-machine-name', + EscapeBuilderName(builder_name)]) + + +def RunPythonUnitTests(_options): + for suite in constants.PYTHON_UNIT_TEST_SUITES: + bb_annotations.PrintNamedStep(suite) + RunCmd(['build/android/test_runner.py', 'python', '-s', suite]) + + +def GetTestStepCmds(): + return [ + ('base_junit_tests', + lambda _options: RunJunitSuite('base_junit_tests')), + ('chromedriver', RunChromeDriverTests), + ('chrome_proxy', RunChromeProxyTests), + ('components_browsertests', + lambda options: RunTestSuites(options, ['components_browsertests'])), + ('gfx_unittests', + lambda options: RunTestSuites(options, ['gfx_unittests'])), + ('gl_unittests', + lambda options: RunTestSuites(options, ['gl_unittests'])), + ('gpu', RunGPUTests), + ('python_unittests', RunPythonUnitTests), + ('telemetry_unittests', RunTelemetryUnitTests), + ('telemetry_perf_unittests', RunTelemetryPerfUnitTests), + ('ui', RunInstrumentationTests), + ('unit', RunUnitTests), + ('webkit', RunWebkitTests), + ('webkit_layout', RunWebkitLayoutTests), + ] + + +def MakeGSPath(options, gs_base_dir): + revision = _GetRevision(options) + bot_id = options.build_properties.get('buildername', 'testing') + randhash = hashlib.sha1(str(random.random())).hexdigest() + gs_path = '%s/%s/%s/%s' % (gs_base_dir, bot_id, revision, randhash) + # remove double slashes, happens with blank revisions and confuses gsutil + gs_path = re.sub('/+', '/', gs_path) + return gs_path + +def UploadHTML(options, gs_base_dir, dir_to_upload, link_text, + link_rel_path='index.html', gs_url=GS_URL): + """Uploads directory at |dir_to_upload| to Google Storage and output a link. + + Args: + options: Command line options. + gs_base_dir: The Google Storage base directory (e.g. + 'chromium-code-coverage/java') + dir_to_upload: Absolute path to the directory to be uploaded. + link_text: Link text to be displayed on the step. + link_rel_path: Link path relative to |dir_to_upload|. + gs_url: Google storage URL. + """ + gs_path = MakeGSPath(options, gs_base_dir) + RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-R', dir_to_upload, 'gs://%s' % gs_path]) + bb_annotations.PrintLink(link_text, + '%s/%s/%s' % (gs_url, gs_path, link_rel_path)) + + +def GenerateJavaCoverageReport(options): + """Generates an HTML coverage report using EMMA and uploads it.""" + bb_annotations.PrintNamedStep('java_coverage_report') + + coverage_html = os.path.join(options.coverage_dir, 'coverage_html') + RunCmd(['build/android/generate_emma_html.py', + '--coverage-dir', options.coverage_dir, + '--metadata-dir', os.path.join(CHROME_OUT_DIR, options.target), + '--cleanup', + '--output', os.path.join(coverage_html, 'index.html')]) + return coverage_html + + +def LogcatDump(options): + # Print logcat, kill logcat monitor + bb_annotations.PrintNamedStep('logcat_dump') + logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt') + RunCmd([SrcPath('build', 'android', 'adb_logcat_printer.py'), + '--output-path', logcat_file, LOGCAT_DIR]) + gs_path = MakeGSPath(options, 'chromium-android/logcat_dumps') + RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-z', 'txt', logcat_file, + 'gs://%s' % gs_path]) + bb_annotations.PrintLink('logcat dump', '%s/%s' % (GS_AUTH_URL, gs_path)) + + +def RunStackToolSteps(options): + """Run stack tool steps. + + Stack tool is run for logcat dump, optionally for ASAN. + """ + bb_annotations.PrintNamedStep('Run stack tool with logcat dump') + logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt') + RunCmd([os.path.join(CHROME_SRC_DIR, 'third_party', 'android_platform', + 'development', 'scripts', 'stack'), + '--more-info', logcat_file]) + if options.asan_symbolize: + bb_annotations.PrintNamedStep('Run stack tool for ASAN') + RunCmd([ + os.path.join(CHROME_SRC_DIR, 'build', 'android', 'asan_symbolize.py'), + '-l', logcat_file]) + + +def GenerateTestReport(options): + bb_annotations.PrintNamedStep('test_report') + for report in glob.glob( + os.path.join(CHROME_OUT_DIR, options.target, 'test_logs', '*.log')): + RunCmd(['cat', report]) + os.remove(report) + + +def MainTestWrapper(options): + try: + # Spawn logcat monitor + SpawnLogcatMonitor() + + # Run all device setup steps + for _, cmd in GetDeviceSetupStepCmds(): + cmd(options) + + if options.install: + for i in options.install: + install_obj = INSTALLABLE_PACKAGES[i] + InstallApk(options, install_obj, print_step=True) + + if options.test_filter: + bb_utils.RunSteps(options.test_filter, GetTestStepCmds(), options) + + if options.coverage_bucket: + coverage_html = GenerateJavaCoverageReport(options) + UploadHTML(options, '%s/java' % options.coverage_bucket, coverage_html, + 'Coverage Report') + shutil.rmtree(coverage_html, ignore_errors=True) + + if options.experimental: + RunTestSuites(options, gtest_config.EXPERIMENTAL_TEST_SUITES) + + finally: + # Run all post test steps + LogcatDump(options) + if not options.disable_stack_tool: + RunStackToolSteps(options) + GenerateTestReport(options) + # KillHostHeartbeat() has logic to check if heartbeat process is running, + # and kills only if it finds the process is running on the host. + provision_devices.KillHostHeartbeat() + if options.cleanup: + shutil.rmtree(os.path.join(CHROME_OUT_DIR, options.target), + ignore_errors=True) + + +def GetDeviceStepsOptParser(): + parser = bb_utils.GetParser() + parser.add_option('--experimental', action='store_true', + help='Run experiemental tests') + parser.add_option('-f', '--test-filter', metavar='', default=[], + action='append', + help=('Run a test suite. Test suites: "%s"' % + '", "'.join(VALID_TESTS))) + parser.add_option('--gtest-filter', + help='Filter for running a subset of tests of a gtest test') + parser.add_option('--asan', action='store_true', help='Run tests with asan.') + parser.add_option('--install', metavar='', action="append", + help='Install an apk by name') + parser.add_option('--no-reboot', action='store_true', + help='Do not reboot devices during provisioning.') + parser.add_option('--coverage-bucket', + help=('Bucket name to store coverage results. Coverage is ' + 'only run if this is set.')) + parser.add_option('--restart-usb', action='store_true', + help='Restart usb ports before device status check.') + parser.add_option( + '--flakiness-server', + help=('The flakiness dashboard server to which the results should be ' + 'uploaded.')) + parser.add_option( + '--auto-reconnect', action='store_true', + help='Push script to device which restarts adbd on disconnections.') + parser.add_option('--skip-wipe', action='store_true', + help='Do not wipe devices during provisioning.') + parser.add_option('--disable-location', action='store_true', + help='Disable location settings.') + parser.add_option( + '--logcat-dump-output', + help='The logcat dump output will be "tee"-ed into this file') + # During processing perf bisects, a seperate working directory created under + # which builds are produced. Therefore we should look for relevent output + # file under this directory.(/b/build/slave//build/bisect/src/out) + parser.add_option( + '--chrome-output-dir', + help='Chrome output directory to be used while bisecting.') + + parser.add_option('--disable-stack-tool', action='store_true', + help='Do not run stack tool.') + parser.add_option('--asan-symbolize', action='store_true', + help='Run stack tool for ASAN') + parser.add_option('--cleanup', action='store_true', + help='Delete out/ directory at the end of the run.') + return parser + + +def main(argv): + parser = GetDeviceStepsOptParser() + options, args = parser.parse_args(argv[1:]) + + if args: + return sys.exit('Unused args %s' % args) + + unknown_tests = set(options.test_filter) - VALID_TESTS + if unknown_tests: + return sys.exit('Unknown tests %s' % list(unknown_tests)) + + setattr(options, 'target', options.factory_properties.get('target', 'Debug')) + + if options.chrome_output_dir: + global CHROME_OUT_DIR + global LOGCAT_DIR + CHROME_OUT_DIR = options.chrome_output_dir + LOGCAT_DIR = os.path.join(CHROME_OUT_DIR, 'logcat') + + if options.coverage_bucket: + setattr(options, 'coverage_dir', + os.path.join(CHROME_OUT_DIR, options.target, 'coverage')) + + MainTestWrapper(options) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/buildbot/bb_host_steps.py b/build/android/buildbot/bb_host_steps.py new file mode 100755 index 00000000000..1e927fb399d --- /dev/null +++ b/build/android/buildbot/bb_host_steps.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import json +import sys + +import bb_utils +import bb_annotations + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +from pylib import constants + + +SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave') +VALID_HOST_TESTS = set(['check_webview_licenses']) + +DIR_BUILD_ROOT = os.path.dirname(constants.DIR_SOURCE_ROOT) + +# Short hand for RunCmd which is used extensively in this file. +RunCmd = bb_utils.RunCmd + + +def SrcPath(*path): + return os.path.join(constants.DIR_SOURCE_ROOT, *path) + + +def CheckWebViewLicenses(_): + bb_annotations.PrintNamedStep('check_licenses') + RunCmd([SrcPath('android_webview', 'tools', 'webview_licenses.py'), 'scan'], + warning_code=1) + + +def RunHooks(build_type): + RunCmd([SrcPath('build', 'landmines.py')]) + build_path = SrcPath('out', build_type) + landmine_path = os.path.join(build_path, '.landmines_triggered') + clobber_env = os.environ.get('BUILDBOT_CLOBBER') + if clobber_env or os.path.isfile(landmine_path): + bb_annotations.PrintNamedStep('Clobber') + if not clobber_env: + print 'Clobbering due to triggered landmines:' + with open(landmine_path) as f: + print f.read() + RunCmd(['rm', '-rf', build_path]) + + bb_annotations.PrintNamedStep('runhooks') + RunCmd(['gclient', 'runhooks'], halt_on_failure=True) + + +def Compile(options): + RunHooks(options.target) + cmd = [os.path.join(SLAVE_SCRIPTS_DIR, 'compile.py'), + '--build-tool=ninja', + '--compiler=goma', + '--target=%s' % options.target, + '--goma-dir=%s' % bb_utils.GOMA_DIR] + bb_annotations.PrintNamedStep('compile') + if options.build_targets: + build_targets = options.build_targets.split(',') + cmd += ['--build-args', ' '.join(build_targets)] + RunCmd(cmd, halt_on_failure=True, cwd=DIR_BUILD_ROOT) + + +def ZipBuild(options): + bb_annotations.PrintNamedStep('zip_build') + RunCmd([ + os.path.join(SLAVE_SCRIPTS_DIR, 'zip_build.py'), + '--src-dir', constants.DIR_SOURCE_ROOT, + '--exclude-files', 'lib.target,gen,android_webview,jingle_unittests'] + + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT) + + +def ExtractBuild(options): + bb_annotations.PrintNamedStep('extract_build') + RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'extract_build.py')] + + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT) + + +def BisectPerfRegression(options): + args = [] + if options.extra_src: + args = ['--extra_src', options.extra_src] + RunCmd([SrcPath('tools', 'prepare-bisect-perf-regression.py'), + '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)]) + RunCmd([SrcPath('tools', 'run-bisect-perf-regression.py'), + '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir), + '--build-properties=%s' % json.dumps(options.build_properties)] + + args) + + +def GetHostStepCmds(): + return [ + ('compile', Compile), + ('extract_build', ExtractBuild), + ('check_webview_licenses', CheckWebViewLicenses), + ('bisect_perf_regression', BisectPerfRegression), + ('zip_build', ZipBuild) + ] + + +def GetHostStepsOptParser(): + parser = bb_utils.GetParser() + parser.add_option('--steps', help='Comma separated list of host tests.') + parser.add_option('--build-targets', default='', + help='Comma separated list of build targets.') + parser.add_option('--experimental', action='store_true', + help='Indicate whether to compile experimental targets.') + parser.add_option('--extra_src', default='', + help='Path to extra source file. If this is supplied, ' + 'bisect script will use it to override default behavior.') + + return parser + + +def main(argv): + parser = GetHostStepsOptParser() + options, args = parser.parse_args(argv[1:]) + if args: + return sys.exit('Unused args %s' % args) + + setattr(options, 'target', options.factory_properties.get('target', 'Debug')) + setattr(options, 'extra_src', + options.factory_properties.get('extra_src', '')) + + if options.steps: + bb_utils.RunSteps(options.steps.split(','), GetHostStepCmds(), options) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/buildbot/bb_run_bot.py b/build/android/buildbot/bb_run_bot.py new file mode 100755 index 00000000000..0c8a977c9d8 --- /dev/null +++ b/build/android/buildbot/bb_run_bot.py @@ -0,0 +1,320 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import copy +import json +import os +import pipes +import re +import subprocess +import sys + +import bb_utils + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +from pylib import constants + + +CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage' + +_BotConfig = collections.namedtuple( + 'BotConfig', ['bot_id', 'host_obj', 'test_obj']) + +HostConfig = collections.namedtuple( + 'HostConfig', + ['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch']) + +TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args']) + + +def BotConfig(bot_id, host_object, test_object=None): + return _BotConfig(bot_id, host_object, test_object) + + +def DictDiff(d1, d2): + diff = [] + for key in sorted(set(d1.keys() + d2.keys())): + if key in d1 and d1[key] != d2.get(key): + diff.append('- %s=%s' % (key, pipes.quote(d1[key]))) + if key in d2 and d2[key] != d1.get(key): + diff.append('+ %s=%s' % (key, pipes.quote(d2[key]))) + return '\n'.join(diff) + + +def GetEnvironment(host_obj, testing, extra_env_vars=None): + init_env = dict(os.environ) + init_env['GYP_GENERATORS'] = 'ninja' + if extra_env_vars: + init_env.update(extra_env_vars) + envsetup_cmd = '. build/android/envsetup.sh' + if testing: + # Skip envsetup to avoid presubmit dependence on android deps. + print 'Testing mode - skipping "%s"' % envsetup_cmd + envsetup_cmd = ':' + else: + print 'Running %s' % envsetup_cmd + proc = subprocess.Popen(['bash', '-exc', + envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + cwd=bb_utils.CHROME_SRC, env=init_env) + json_env, envsetup_output = proc.communicate() + if proc.returncode != 0: + print >> sys.stderr, 'FATAL Failure in envsetup.' + print >> sys.stderr, envsetup_output + sys.exit(1) + env = json.loads(json_env) + env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \ + ' OS=android fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR + if host_obj.target_arch: + env['GYP_DEFINES'] += ' target_arch=%s' % host_obj.target_arch + extra_gyp = host_obj.extra_gyp_defines + if extra_gyp: + env['GYP_DEFINES'] += ' %s' % extra_gyp + if re.search('(asan|clang)=1', extra_gyp): + env.pop('CXX_target', None) + + # Bots checkout chrome in /b/build/slave//build/src + build_internal_android = os.path.abspath(os.path.join( + bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal', + 'scripts', 'slave', 'android')) + if os.path.exists(build_internal_android): + env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']]) + return env + + +def GetCommands(options, bot_config): + """Get a formatted list of commands. + + Args: + options: Options object. + bot_config: A BotConfig named tuple. + host_step_script: Host step script. + device_step_script: Device step script. + Returns: + list of Command objects. + """ + property_args = bb_utils.EncodeProperties(options) + commands = [[bot_config.host_obj.script, + '--steps=%s' % ','.join(bot_config.host_obj.host_steps)] + + property_args + (bot_config.host_obj.extra_args or [])] + + test_obj = bot_config.test_obj + if test_obj: + run_test_cmd = [test_obj.script] + property_args + for test in test_obj.tests: + run_test_cmd.extend(['-f', test]) + if test_obj.extra_args: + run_test_cmd.extend(test_obj.extra_args) + commands.append(run_test_cmd) + return commands + + +def GetBotStepMap(): + compile_step = ['compile'] + chrome_proxy_tests = ['chrome_proxy'] + python_unittests = ['python_unittests'] + std_host_tests = ['check_webview_licenses'] + std_build_steps = ['compile', 'zip_build'] + std_test_steps = ['extract_build'] + std_tests = ['ui', 'unit'] + telemetry_tests = ['telemetry_perf_unittests'] + telemetry_tests_user_build = ['telemetry_unittests', + 'telemetry_perf_unittests'] + trial_tests = [ + 'base_junit_tests', + 'components_browsertests', + 'gfx_unittests', + 'gl_unittests', + ] + flakiness_server = ( + '--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER) + experimental = ['--experimental'] + bisect_chrome_output_dir = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, + os.pardir, 'bisect', 'src', 'out')) + B = BotConfig + H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None: + HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args, + extra_gyp, target_arch)) + T = (lambda tests, extra_args=None: + TestConfig('build/android/buildbot/bb_device_steps.py', tests, + extra_args)) + + bot_configs = [ + # Main builders + B('main-builder-dbg', H(std_build_steps + std_host_tests)), + B('main-builder-rel', H(std_build_steps)), + B('main-clang-builder', + H(compile_step, extra_gyp='clang=1 component=shared_library')), + B('main-clobber', H(compile_step)), + B('main-tests-rel', H(std_test_steps), + T(std_tests + telemetry_tests + chrome_proxy_tests, + ['--cleanup', flakiness_server])), + B('main-tests', H(std_test_steps), + T(std_tests, ['--cleanup', flakiness_server])), + + # Other waterfalls + B('asan-builder-tests', H(compile_step, + extra_gyp='asan=1 component=shared_library'), + T(std_tests, ['--asan', '--asan-symbolize'])), + B('blink-try-builder', H(compile_step)), + B('chromedriver-fyi-tests-dbg', H(std_test_steps), + T(['chromedriver'], + ['--install=ChromeShell', '--install=ChromeDriverWebViewShell', + '--skip-wipe', '--disable-location', '--cleanup'])), + B('fyi-x86-builder-dbg', + H(compile_step + std_host_tests, experimental, target_arch='ia32')), + B('fyi-builder-dbg', + H(std_build_steps + std_host_tests, experimental, + extra_gyp='emma_coverage=1')), + B('x86-builder-dbg', + H(compile_step + std_host_tests, target_arch='ia32')), + B('fyi-builder-rel', H(std_build_steps, experimental)), + B('fyi-tests', H(std_test_steps), + T(std_tests + python_unittests, + ['--experimental', flakiness_server, + '--coverage-bucket', CHROMIUM_COVERAGE_BUCKET, + '--cleanup'])), + B('user-build-fyi-tests-dbg', H(std_test_steps), + T(sorted(telemetry_tests_user_build + trial_tests))), + B('fyi-component-builder-tests-dbg', + H(compile_step, extra_gyp='component=shared_library'), + T(std_tests, ['--experimental', flakiness_server])), + B('gpu-builder-tests-dbg', + H(compile_step), + T(['gpu'], ['--install=ContentShell'])), + # Pass empty T([]) so that logcat monitor and device status check are run. + B('perf-bisect-builder-tests-dbg', + H(['bisect_perf_regression']), + T([], ['--chrome-output-dir', bisect_chrome_output_dir])), + B('perf-tests-rel', H(std_test_steps), + T([], ['--install=ChromeShell', '--cleanup'])), + B('webkit-latest-webkit-tests', H(std_test_steps), + T(['webkit_layout', 'webkit'], ['--cleanup', '--auto-reconnect'])), + B('webkit-latest-contentshell', H(compile_step), + T(['webkit_layout'], ['--auto-reconnect'])), + B('builder-unit-tests', H(compile_step), T(['unit'])), + + # Generic builder config (for substring match). + B('builder', H(std_build_steps)), + ] + + bot_map = dict((config.bot_id, config) for config in bot_configs) + + # These bots have identical configuration to ones defined earlier. + copy_map = [ + ('lkgr-clobber', 'main-clobber'), + ('try-builder-dbg', 'main-builder-dbg'), + ('try-builder-rel', 'main-builder-rel'), + ('try-clang-builder', 'main-clang-builder'), + ('try-fyi-builder-dbg', 'fyi-builder-dbg'), + ('try-x86-builder-dbg', 'x86-builder-dbg'), + ('try-tests-rel', 'main-tests-rel'), + ('try-tests', 'main-tests'), + ('try-fyi-tests', 'fyi-tests'), + ('webkit-latest-tests', 'main-tests'), + ] + for to_id, from_id in copy_map: + assert to_id not in bot_map + # pylint: disable=W0212 + bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id) + + # Trybots do not upload to flakiness dashboard. They should be otherwise + # identical in configuration to their trunk building counterparts. + test_obj = bot_map[to_id].test_obj + if to_id.startswith('try') and test_obj: + extra_args = test_obj.extra_args + if extra_args and flakiness_server in extra_args: + extra_args.remove(flakiness_server) + return bot_map + + +# Return an object from the map, looking first for an exact id match. +# If this fails, look for an id which is a substring of the specified id. +# Choose the longest of all substring matches. +# pylint: disable=W0622 +def GetBestMatch(id_map, id): + config = id_map.get(id) + if not config: + substring_matches = [x for x in id_map.iterkeys() if x in id] + if substring_matches: + max_id = max(substring_matches, key=len) + print 'Using config from id="%s" (substring match).' % max_id + config = id_map[max_id] + return config + + +def GetRunBotOptParser(): + parser = bb_utils.GetParser() + parser.add_option('--bot-id', help='Specify bot id directly.') + parser.add_option('--testing', action='store_true', + help='For testing: print, but do not run commands') + + return parser + + +def GetBotConfig(options, bot_step_map): + bot_id = options.bot_id or options.factory_properties.get('android_bot_id') + if not bot_id: + print (sys.stderr, + 'A bot id must be specified through option or factory_props.') + return + + bot_config = GetBestMatch(bot_step_map, bot_id) + if not bot_config: + print 'Error: config for id="%s" cannot be inferred.' % bot_id + return bot_config + + +def RunBotCommands(options, commands, env): + print 'Environment changes:' + print DictDiff(dict(os.environ), env) + + for command in commands: + print bb_utils.CommandToString(command) + sys.stdout.flush() + if options.testing: + env['BUILDBOT_TESTING'] = '1' + return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env) + if return_code != 0: + return return_code + + +def main(argv): + proc = subprocess.Popen( + ['/bin/hostname', '-f'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + hostname_stdout, hostname_stderr = proc.communicate() + if proc.returncode == 0: + print 'Running on: ' + hostname_stdout + else: + print >> sys.stderr, 'WARNING: failed to run hostname' + print >> sys.stderr, hostname_stdout + print >> sys.stderr, hostname_stderr + sys.exit(1) + + parser = GetRunBotOptParser() + options, args = parser.parse_args(argv[1:]) + if args: + parser.error('Unused args: %s' % args) + + bot_config = GetBotConfig(options, GetBotStepMap()) + if not bot_config: + sys.exit(1) + + print 'Using config:', bot_config + + commands = GetCommands(options, bot_config) + for command in commands: + print 'Will run: ', bb_utils.CommandToString(command) + print + + env = GetEnvironment(bot_config.host_obj, options.testing) + return RunBotCommands(options, commands, env) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/buildbot/bb_utils.py b/build/android/buildbot/bb_utils.py new file mode 100644 index 00000000000..3c16cc2bc5b --- /dev/null +++ b/build/android/buildbot/bb_utils.py @@ -0,0 +1,100 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import optparse +import os +import pipes +import subprocess +import sys + +import bb_annotations + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +from pylib import constants + + +TESTING = 'BUILDBOT_TESTING' in os.environ + +BB_BUILD_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, + os.pardir, os.pardir, os.pardir, os.pardir)) + +CHROME_SRC = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', '..')) + +# TODO: Figure out how to merge this with pylib.cmd_helper.OutDirectory(). +CHROME_OUT_DIR = os.path.join(CHROME_SRC, 'out') + +GOMA_DIR = os.environ.get('GOMA_DIR', os.path.join(BB_BUILD_DIR, 'goma')) + +GSUTIL_PATH = os.path.join(BB_BUILD_DIR, 'third_party', 'gsutil', 'gsutil') + +def CommandToString(command): + """Returns quoted command that can be run in bash shell.""" + return ' '.join(map(pipes.quote, command)) + + +def SpawnCmd(command, stdout=None, cwd=CHROME_SRC): + """Spawn a process without waiting for termination.""" + print '>', CommandToString(command) + sys.stdout.flush() + if TESTING: + class MockPopen(object): + @staticmethod + def wait(): + return 0 + @staticmethod + def communicate(): + return '', '' + return MockPopen() + return subprocess.Popen(command, cwd=cwd, stdout=stdout) + + +def RunCmd(command, flunk_on_failure=True, halt_on_failure=False, + warning_code=constants.WARNING_EXIT_CODE, stdout=None, + cwd=CHROME_SRC): + """Run a command relative to the chrome source root.""" + code = SpawnCmd(command, stdout, cwd).wait() + print '<', CommandToString(command) + if code != 0: + print 'ERROR: process exited with code %d' % code + if code != warning_code and flunk_on_failure: + bb_annotations.PrintError() + else: + bb_annotations.PrintWarning() + # Allow steps to have both halting (i.e. 1) and non-halting exit codes. + if code != warning_code and halt_on_failure: + print 'FATAL %d != %d' % (code, warning_code) + sys.exit(1) + return code + + +def GetParser(): + def ConvertJson(option, _, value, parser): + setattr(parser.values, option.dest, json.loads(value)) + parser = optparse.OptionParser() + parser.add_option('--build-properties', action='callback', + callback=ConvertJson, type='string', default={}, + help='build properties in JSON format') + parser.add_option('--factory-properties', action='callback', + callback=ConvertJson, type='string', default={}, + help='factory properties in JSON format') + return parser + + +def EncodeProperties(options): + return ['--factory-properties=%s' % json.dumps(options.factory_properties), + '--build-properties=%s' % json.dumps(options.build_properties)] + + +def RunSteps(steps, step_cmds, options): + unknown_steps = set(steps) - set(step for step, _ in step_cmds) + if unknown_steps: + print >> sys.stderr, 'FATAL: Unknown steps %s' % list(unknown_steps) + sys.exit(1) + + for step, cmd in step_cmds: + if step in steps: + cmd(options) diff --git a/build/android/buildbot/env_to_json.py b/build/android/buildbot/env_to_json.py new file mode 100755 index 00000000000..f9a7a443d39 --- /dev/null +++ b/build/android/buildbot/env_to_json.py @@ -0,0 +1,11 @@ +#!/usr/bin/python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Encode current environment into json. + +import json +import os + +print json.dumps(dict(os.environ)) diff --git a/build/android/buildbot/tests/bb_run_bot_test.py b/build/android/buildbot/tests/bb_run_bot_test.py new file mode 100755 index 00000000000..810c60dac2f --- /dev/null +++ b/build/android/buildbot/tests/bb_run_bot_test.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import subprocess +import sys + +BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..') +sys.path.append(BUILDBOT_DIR) +import bb_run_bot + +def RunBotProcesses(bot_process_map): + code = 0 + for bot, proc in bot_process_map: + _, err = proc.communicate() + code |= proc.returncode + if proc.returncode != 0: + print 'Error running the bot script with id="%s"' % bot, err + + return code + + +def main(): + procs = [ + (bot, subprocess.Popen( + [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot, + '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) + for bot in bb_run_bot.GetBotStepMap()] + return RunBotProcesses(procs) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/chrome_with_libs.gyp b/build/android/chrome_with_libs.gyp new file mode 100644 index 00000000000..690be885f0a --- /dev/null +++ b/build/android/chrome_with_libs.gyp @@ -0,0 +1,82 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to add more loadable libs into Chrome_apk. +# +# This is useful when building Chrome_apk with some loadable modules which are +# not included in Chrome_apk. +# As an example, when building Chrome_apk with +# libpeer_target_type=loadable_module, +# the libpeerconnection.so is not included in Chrome_apk. To add the missing +# lib, follow the steps below: +# - Run gyp: +# GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium +# - Build chrome_with_libs: +# ninja (or make) chrome_with_libs +# +# This tool also allows replacing the loadable module with a new one via the +# following steps: +# - Build Chrome_apk with the gyp define: +# GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" build/gyp_chromium +# ninja (or make) Chrome_apk +# - Replace libpeerconnection.so with a new one: +# cp the_new_one path/to/libpeerconnection.so +# - Run gyp: +# GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium +# - Build chrome_with_libs: +# ninja (or make) chrome_with_libs +{ + 'targets': [ + { + # An "All" target is required for a top-level gyp-file. + 'target_name': 'All', + 'type': 'none', + 'dependencies': [ + 'chrome_with_libs', + ], + }, + { + 'target_name': 'chrome_with_libs', + 'type': 'none', + 'variables': { + 'intermediate_dir': '<(PRODUCT_DIR)/prebuilt_libs/', + 'chrome_unsigned_path': '<(PRODUCT_DIR)/chrome_apk/Chrome-unsigned.apk', + 'chrome_with_libs_unsigned': '<(intermediate_dir)/Chrome-with-libs-unsigned.apk', + 'chrome_with_libs_final': '<(PRODUCT_DIR)/apks/Chrome-with-libs.apk', + }, + 'dependencies': [ + '<(DEPTH)/clank/native/framework/clank.gyp:chrome_apk' + ], + 'copies': [ + { + 'destination': '<(intermediate_dir)/lib/<(android_app_abi)', + 'files': [ + '<(PRODUCT_DIR)/libpeerconnection.so', + ], + }, + ], + 'actions': [ + { + 'action_name': 'put_libs_in_chrome', + 'variables': { + 'inputs': [ + '<(intermediate_dir)/lib/<(android_app_abi)/libpeerconnection.so', + ], + 'input_apk_path': '<(chrome_unsigned_path)', + 'output_apk_path': '<(chrome_with_libs_unsigned)', + 'libraries_top_dir%': '<(intermediate_dir)', + }, + 'includes': [ 'create_standalone_apk_action.gypi' ], + }, + { + 'action_name': 'finalize_chrome_with_libs', + 'variables': { + 'input_apk_path': '<(chrome_with_libs_unsigned)', + 'output_apk_path': '<(chrome_with_libs_final)', + }, + 'includes': [ 'finalize_apk_action.gypi'], + }, + ], + }], +} diff --git a/build/android/create_standalone_apk_action.gypi b/build/android/create_standalone_apk_action.gypi new file mode 100644 index 00000000000..d17af7c8e5d --- /dev/null +++ b/build/android/create_standalone_apk_action.gypi @@ -0,0 +1,41 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide an action that +# combines a directory of shared libraries and an incomplete APK into a +# standalone APK. +# +# To use this, create a gyp action with the following form: +# { +# 'action_name': 'some descriptive action name', +# 'variables': { +# 'inputs': [ 'input_path1', 'input_path2' ], +# 'input_apk_path': '<(unsigned_apk_path)', +# 'output_apk_path': '<(unsigned_standalone_apk_path)', +# 'libraries_top_dir': '<(libraries_top_dir)', +# }, +# 'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ], +# }, + +{ + 'message': 'Creating standalone APK: <(output_apk_path)', + 'variables': { + 'inputs': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/create_standalone_apk.py', + '<(input_apk_path)', + '>@(inputs)', + ], + 'outputs': [ + '<(output_apk_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py', + '--libraries-top-dir=<(libraries_top_dir)', + '--input-apk-path=<(input_apk_path)', + '--output-apk-path=<(output_apk_path)', + ], +} diff --git a/build/android/developer_recommended_flags.gypi b/build/android/developer_recommended_flags.gypi new file mode 100644 index 00000000000..79c201deccb --- /dev/null +++ b/build/android/developer_recommended_flags.gypi @@ -0,0 +1,61 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is the set of recommended gyp variable settings for Chrome for Android development. +# +# These can be used by copying this file to $CHROME_SRC/chrome/supplement.gypi. +# +# Even better, create chrome/supplement.gypi containing the following: +# { +# 'includes': [ '../build/android/developer_recommended_flags.gypi' ] +# } +# and you'll get new settings automatically. +# When using this method, you can override individual settings by setting them unconditionally (with +# no %) in chrome/supplement.gypi. +# I.e. to disable gyp_managed_install but use everything else: +# { +# 'variables': { +# 'gyp_managed_install': 0, +# }, +# 'includes': [ '../build/android/developer_recommended_flags.gypi' ] +# } + +{ + 'variables': { + 'variables': { + # Set component to 'shared_library' to enable the component build. This builds native code as + # many small shared libraries instead of one monolithic library. This slightly reduces the time + # required for incremental builds. + 'component%': 'shared_library', + }, + 'component%': '<(component)', + + # When gyp_managed_install is set to 1, building an APK will install that APK on the connected + # device(/emulator). To install on multiple devices (or onto a new device), build the APK once + # with each device attached. This greatly reduces the time required for incremental builds. + # + # This comes with some caveats: + # Only works with a single device connected (it will print a warning if + # zero or multiple devices are attached). + # Device must be flashed with a user-debug unsigned Android build. + # Some actions are always run (i.e. ninja will never say "no work to do"). + 'gyp_managed_install%': 1, + + # With gyp_managed_install, we do not necessarily need a standalone APK. + # When create_standalone_apk is set to 1, we will build a standalone APK + # anyway. For even faster builds, you can set create_standalone_apk to 0. + 'create_standalone_apk%': 1, + + # Set clang to 1 to use the clang compiler. Clang has much (much, much) better warning/error + # messages than gcc. + # TODO(cjhopman): Enable this when http://crbug.com/156420 is addressed. Until then, users can + # set clang to 1, but Android stack traces will sometimes be incomplete. + #'clang%': 1, + + # Set fastbuild to 1 to build with less debugging information. This can greatly decrease linking + # time. The downside is that stack traces will be missing useful information (like line + # numbers). + #'fastbuild%': 1, + }, +} diff --git a/build/android/dex_action.gypi b/build/android/dex_action.gypi new file mode 100644 index 00000000000..56d386ffad1 --- /dev/null +++ b/build/android/dex_action.gypi @@ -0,0 +1,60 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that dexes +# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME == +# "Release", then it will dex the proguard_enabled_input_path instead of the +# normal dex_input_paths/dex_generated_input_paths. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'some name for the action' +# 'actions': [ +# 'variables': { +# 'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ], +# 'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ], +# +# # For targets that use proguard: +# 'proguard_enabled': 'true', +# 'proguard_enabled_input_path': 'path to dex when using proguard', +# }, +# 'includes': [ 'relative/path/to/dex_action.gypi' ], +# ], +# }, +# + +{ + 'message': 'Creating dex file: <(output_path)', + 'variables': { + 'dex_input_paths': [], + 'dex_generated_input_dirs': [], + 'proguard_enabled%': 'false', + 'proguard_enabled_input_path%': '', + 'dex_no_locals%': 0, + 'dex_additional_options': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/util/md5_check.py', + '<(DEPTH)/build/android/gyp/dex.py', + '>@(dex_input_paths)', + ], + 'outputs': [ + '<(output_path)', + '<(output_path).inputs', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/dex.py', + '--dex-path=<(output_path)', + '--android-sdk-tools=<(android_sdk_tools)', + '--output-directory=<(PRODUCT_DIR)', + '--configuration-name=<(CONFIGURATION_NAME)', + '--proguard-enabled=>(proguard_enabled)', + '--proguard-enabled-input-path=<(proguard_enabled_input_path)', + '--no-locals=>(dex_no_locals)', + '>@(dex_additional_options)', + '>@(dex_input_paths)', + '>@(dex_generated_input_dirs)', + ] +} diff --git a/build/android/disable_lto.gypi b/build/android/disable_lto.gypi new file mode 100644 index 00000000000..e379cfd3199 --- /dev/null +++ b/build/android/disable_lto.gypi @@ -0,0 +1,20 @@ +# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included to disable LTO on a target. + +{ + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['OS=="android" and (use_lto==1 or use_lto_o2==1)', { + 'cflags!': [ + '-flto', + '-ffat-lto-objects', + ], + }], + ], + }], + ], +} diff --git a/build/android/empty/src/.keep b/build/android/empty/src/.keep new file mode 100644 index 00000000000..0f710b673dd --- /dev/null +++ b/build/android/empty/src/.keep @@ -0,0 +1,6 @@ +This is a file that needs to live here until http://crbug.com/158155 has +been fixed. + +The ant build system requires that a src folder is always present, and for +some of our targets that is not the case. Giving it an empty src-folder works +nicely though. diff --git a/build/android/empty_proguard.flags b/build/android/empty_proguard.flags new file mode 100644 index 00000000000..53484fe8154 --- /dev/null +++ b/build/android/empty_proguard.flags @@ -0,0 +1 @@ +# Used for apk targets that do not need proguard. See build/java_apk.gypi. diff --git a/build/android/enable_asserts.py b/build/android/enable_asserts.py new file mode 100755 index 00000000000..8fb7dca4725 --- /dev/null +++ b/build/android/enable_asserts.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Enables dalvik vm asserts in the android device.""" + +import argparse +import sys + +from pylib.device import device_utils + + +def main(): + parser = argparse.ArgumentParser() + + set_asserts_group = parser.add_mutually_exclusive_group(required=True) + set_asserts_group.add_argument( + '--enable_asserts', dest='set_asserts', action='store_true', + help='Sets the dalvik.vm.enableassertions property to "all"') + set_asserts_group.add_argument( + '--disable_asserts', dest='set_asserts', action='store_false', + help='Removes the dalvik.vm.enableassertions property') + + args = parser.parse_args() + + # TODO(jbudorick): Accept optional serial number and run only for the + # specified device when present. + devices = device_utils.DeviceUtils.parallel() + + def set_java_asserts_and_restart(device): + if device.SetJavaAsserts(args.set_asserts): + device.RunShellCommand('stop') + device.RunShellCommand('start') + + devices.pMap(set_java_asserts_and_restart) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh new file mode 100755 index 00000000000..0545330bb2b --- /dev/null +++ b/build/android/envsetup.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Sets up environment for building Chromium on Android. + +# Make sure we're being sourced (possibly by another script). Check for bash +# since zsh sets $0 when sourcing. +if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then + echo "ERROR: envsetup must be sourced." + exit 1 +fi + +# This only exists to set local variables. Don't call this manually. +android_envsetup_main() { + local SCRIPT_PATH="$1" + local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")" + + local CURRENT_DIR="$(readlink -f "${SCRIPT_DIR}/../../")" + if [[ -z "${CHROME_SRC}" ]]; then + # If $CHROME_SRC was not set, assume current directory is CHROME_SRC. + local CHROME_SRC="${CURRENT_DIR}" + fi + + if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then + # If current directory is not in $CHROME_SRC, it might be set for other + # source tree. If $CHROME_SRC was set correctly and we are in the correct + # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "". + # Otherwise, it will equal to "${CURRENT_DIR}" + echo "Warning: Current directory is out of CHROME_SRC, it may not be \ + the one you want." + echo "${CHROME_SRC}" + fi + + # Allow the caller to override a few environment variables. If any of them is + # unset, we default to a sane value that's known to work. This allows for + # experimentation with a custom SDK. + if [[ -z "${ANDROID_SDK_ROOT}" || ! -d "${ANDROID_SDK_ROOT}" ]]; then + local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/" + fi + + # Add Android SDK tools to system path. + export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools + + # Add Android utility tools to the system path. + export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/ + + # Add Chromium Android development scripts to system path. + # Must be after CHROME_SRC is set. + export PATH=$PATH:${CHROME_SRC}/build/android + + export ENVSETUP_GYP_CHROME_SRC=${CHROME_SRC} # TODO(thakis): Remove. +} +# In zsh, $0 is the name of the file being sourced. +android_envsetup_main "${BASH_SOURCE:-$0}" +unset -f android_envsetup_main + +android_gyp() { + echo "Please call build/gyp_chromium instead. android_gyp is going away." + "${ENVSETUP_GYP_CHROME_SRC}/build/gyp_chromium" --depth="${ENVSETUP_GYP_CHROME_SRC}" --check "$@" +} diff --git a/build/android/finalize_apk_action.gypi b/build/android/finalize_apk_action.gypi new file mode 100644 index 00000000000..644f9e8ecc5 --- /dev/null +++ b/build/android/finalize_apk_action.gypi @@ -0,0 +1,49 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide an action that +# signs and zipaligns an APK. +# +# To use this, create a gyp action with the following form: +# { +# 'action_name': 'some descriptive action name', +# 'variables': { +# 'input_apk_path': 'relative/path/to/input.apk', +# 'output_apk_path': 'relative/path/to/output.apk', +# }, +# 'includes': [ '../../build/android/finalize_apk_action.gypi' ], +# }, +# + +{ + 'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)', + 'variables': { + 'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore', + 'keystore_name%': 'chromiumdebugkey', + 'keystore_password%': 'chromium', + 'zipalign_path%': '<(android_sdk_tools)/zipalign', + 'rezip_apk_jar_path%': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar', + 'load_library_from_zip%': 0, + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/finalize_apk.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(keystore_path)', + '<(input_apk_path)', + ], + 'outputs': [ + '<(output_apk_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/finalize_apk.py', + '--zipalign-path=<(zipalign_path)', + '--unsigned-apk-path=<(input_apk_path)', + '--final-apk-path=<(output_apk_path)', + '--key-path=<(keystore_path)', + '--key-name=<(keystore_name)', + '--key-passwd=<(keystore_password)', + '--load-library-from-zip=<(load_library_from_zip)', + '--rezip-apk-jar-path=<(rezip_apk_jar_path)', + ], +} diff --git a/build/android/finalize_splits_action.gypi b/build/android/finalize_splits_action.gypi new file mode 100644 index 00000000000..daa7f834ebb --- /dev/null +++ b/build/android/finalize_splits_action.gypi @@ -0,0 +1,76 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide an action that +# signs and zipaligns split APKs. +# +# Required variables: +# apk_name - Base name of the apk. +# Optional variables: +# density_splits - Whether to process density splits +# language_splits - Whether to language splits + +{ + 'variables': { + 'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore', + 'keystore_name%': 'chromiumdebugkey', + 'keystore_password%': 'chromium', + 'zipalign_path%': '<(android_sdk_tools)/zipalign', + 'density_splits%': 0, + 'language_splits%': [], + 'resource_packaged_apk_name': '<(apk_name)-resources.ap_', + 'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)', + 'base_output_path': '<(PRODUCT_DIR)/apks/<(apk_name)', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/finalize_splits.py', + '<(DEPTH)/build/android/gyp/finalize_apk.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(keystore_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/finalize_splits.py', + '--resource-packaged-apk-path=<(resource_packaged_apk_path)', + '--base-output-path=<(base_output_path)', + '--zipalign-path=<(zipalign_path)', + '--key-path=<(keystore_path)', + '--key-name=<(keystore_name)', + '--key-passwd=<(keystore_password)', + ], + 'conditions': [ + ['density_splits == 1', { + 'message': 'Signing/aligning <(_target_name) density splits', + 'inputs': [ + '<(resource_packaged_apk_path)_hdpi', + '<(resource_packaged_apk_path)_xhdpi', + '<(resource_packaged_apk_path)_xxhdpi', + '<(resource_packaged_apk_path)_xxxhdpi', + '<(resource_packaged_apk_path)_tvdpi', + ], + 'outputs': [ + '<(base_output_path)-density-hdpi.apk', + '<(base_output_path)-density-xhdpi.apk', + '<(base_output_path)-density-xxhdpi.apk', + '<(base_output_path)-density-xxxhdpi.apk', + '<(base_output_path)-density-tvdpi.apk', + ], + 'action': [ + '--densities=hdpi,xhdpi,xxhdpi,xxxhdpi,tvdpi', + ], + }], + ['language_splits != []', { + 'message': 'Signing/aligning <(_target_name) language splits', + 'inputs': [ + "(auxclasspath)', + '--stamp', '<(stamp_path)', + '<(findbugs_target_jar_path)', + ], +} diff --git a/build/android/findbugs_diff.py b/build/android/findbugs_diff.py new file mode 100755 index 00000000000..f55e46261be --- /dev/null +++ b/build/android/findbugs_diff.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs findbugs, and returns an error code if there are new warnings. + +Other options + --only-analyze used to only analyze the class you are interested. + --relase-build analyze the classes in out/Release directory. + --findbugs-args used to passin other findbugs's options. + +Run + $CHROMIUM_SRC/third_party/findbugs/bin/findbugs -textui for details. + +""" + +import argparse +import os +import sys + +from pylib import constants +from pylib.utils import findbugs + +_DEFAULT_BASE_DIR = os.path.join( + constants.DIR_SOURCE_ROOT, 'build', 'android', 'findbugs_filter') + +sys.path.append( + os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')) +from util import build_utils + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument( + '-a', '--auxclasspath', default=None, dest='auxclasspath', + help='Set aux classpath for analysis.') + parser.add_argument( + '--auxclasspath-gyp', dest='auxclasspath_gyp', + help='A gyp list containing the aux classpath for analysis') + parser.add_argument( + '-o', '--only-analyze', default=None, + dest='only_analyze', help='Only analyze the given classes and packages.') + parser.add_argument( + '-e', '--exclude', default=None, dest='exclude', + help='Exclude bugs matching given filter.') + parser.add_argument( + '-l', '--release-build', action='store_true', dest='release_build', + help='Analyze release build instead of debug.') + parser.add_argument( + '-f', '--findbug-args', default=None, dest='findbug_args', + help='Additional findbug arguments.') + parser.add_argument( + '-b', '--base-dir', default=_DEFAULT_BASE_DIR, + dest='base_dir', help='Base directory for configuration file.') + parser.add_argument( + '--output-file', dest='output_file', + help='Path to save the output to.') + parser.add_argument( + '--stamp', help='Path to touch on success.') + parser.add_argument( + '--depfile', help='Path to the depfile. This must be specified as the ' + "action's first output.") + + parser.add_argument( + 'jar_paths', metavar='JAR_PATH', nargs='+', + help='JAR file to analyze') + + args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) + if args.auxclasspath: + args.auxclasspath = args.auxclasspath.split(':') + elif args.auxclasspath_gyp: + args.auxclasspath = build_utils.ParseGypList(args.auxclasspath_gyp) + + if args.base_dir: + if not args.exclude: + args.exclude = os.path.join(args.base_dir, 'findbugs_exclude.xml') + + findbugs_command, findbugs_warnings = findbugs.Run( + args.exclude, args.only_analyze, args.auxclasspath, + args.output_file, args.findbug_args, args.jar_paths) + + if findbugs_warnings: + print + print '*' * 80 + print 'FindBugs run via:' + print findbugs_command + print + print 'FindBugs reported the following issues:' + for warning in sorted(findbugs_warnings): + print str(warning) + print '*' * 80 + print + else: + if args.depfile: + build_utils.WriteDepfile( + args.depfile, + build_utils.GetPythonDependencies() + args.auxclasspath + + args.jar_paths) + if args.stamp: + build_utils.Touch(args.stamp) + + return len(findbugs_warnings) + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/findbugs_filter/findbugs_exclude.xml b/build/android/findbugs_filter/findbugs_exclude.xml new file mode 100644 index 00000000000..dbff9d9667f --- /dev/null +++ b/build/android/findbugs_filter/findbugs_exclude.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + diff --git a/build/android/generate_emma_html.py b/build/android/generate_emma_html.py new file mode 100755 index 00000000000..93b0b0e59c5 --- /dev/null +++ b/build/android/generate_emma_html.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Aggregates EMMA coverage files to produce html output.""" + +import fnmatch +import json +import optparse +import os +import sys + +from pylib import cmd_helper +from pylib import constants + + +def _GetFilesWithExt(root_dir, ext): + """Gets all files with a given extension. + + Args: + root_dir: Directory in which to search for files. + ext: Extension to look for (including dot) + + Returns: + A list of absolute paths to files that match. + """ + files = [] + for root, _, filenames in os.walk(root_dir): + basenames = fnmatch.filter(filenames, '*.' + ext) + files.extend([os.path.join(root, basename) + for basename in basenames]) + + return files + + +def main(): + option_parser = optparse.OptionParser() + option_parser.add_option('--output', help='HTML output filename.') + option_parser.add_option('--coverage-dir', default=None, + help=('Root of the directory in which to search for ' + 'coverage data (.ec) files.')) + option_parser.add_option('--metadata-dir', default=None, + help=('Root of the directory in which to search for ' + 'coverage metadata (.em) files.')) + option_parser.add_option('--cleanup', action='store_true', + help=('If set, removes coverage files generated at ' + 'runtime.')) + options, _ = option_parser.parse_args() + + if not (options.coverage_dir and options.metadata_dir and options.output): + option_parser.error('One or more mandatory options are missing.') + + coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec') + metadata_files = _GetFilesWithExt(options.metadata_dir, 'em') + print 'Found coverage files: %s' % str(coverage_files) + print 'Found metadata files: %s' % str(metadata_files) + + sources = [] + for f in metadata_files: + sources_file = os.path.splitext(f)[0] + '_sources.txt' + with open(sources_file, 'r') as sf: + sources.extend(json.load(sf)) + sources = [os.path.join(constants.DIR_SOURCE_ROOT, s) for s in sources] + print 'Sources: %s' % sources + + input_args = [] + for f in coverage_files + metadata_files: + input_args.append('-in') + input_args.append(f) + + output_args = ['-Dreport.html.out.file', options.output] + source_args = ['-sp', ','.join(sources)] + + exit_code = cmd_helper.RunCmd( + ['java', '-cp', + os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'), + 'emma', 'report', '-r', 'html'] + + input_args + output_args + source_args) + + if options.cleanup: + for f in coverage_files: + os.remove(f) + + return exit_code + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gn/zip.py b/build/android/gn/zip.py new file mode 100755 index 00000000000..5050ea07107 --- /dev/null +++ b/build/android/gn/zip.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Archives a set of files. +""" + +import ast +import optparse +import os +import sys +import zipfile + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) +from util import build_utils + +def DoZip(inputs, output, base_dir): + with zipfile.ZipFile(output, 'w') as outfile: + for f in inputs: + outfile.write(f, os.path.relpath(f, base_dir)) + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--inputs', help='List of files to archive.') + parser.add_option('--output', help='Path to output archive.') + parser.add_option('--base-dir', + help='If provided, the paths in the archive will be ' + 'relative to this directory', default='.') + + options, _ = parser.parse_args() + + inputs = ast.literal_eval(options.inputs) + output = options.output + base_dir = options.base_dir + + DoZip(inputs, output, base_dir) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py new file mode 100755 index 00000000000..d5aa5467709 --- /dev/null +++ b/build/android/gyp/aidl.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Invokes Android's aidl +""" + +import optparse +import os +import sys + +from util import build_utils + + +def main(argv): + option_parser = optparse.OptionParser() + build_utils.AddDepfileOption(option_parser) + option_parser.add_option('--aidl-path', help='Path to the aidl binary.') + option_parser.add_option('--imports', help='Files to import.') + option_parser.add_option('--includes', + help='Directories to add as import search paths.') + option_parser.add_option('--srcjar', help='Path for srcjar output.') + options, args = option_parser.parse_args(argv[1:]) + + with build_utils.TempDir() as temp_dir: + for f in args: + classname = os.path.splitext(os.path.basename(f))[0] + output = os.path.join(temp_dir, classname + '.java') + aidl_cmd = [options.aidl_path] + aidl_cmd += [ + '-p' + s for s in build_utils.ParseGypList(options.imports) + ] + if options.includes is not None: + aidl_cmd += [ + '-I' + s for s in build_utils.ParseGypList(options.includes) + ] + aidl_cmd += [ + f, + output + ] + build_utils.CheckOutput(aidl_cmd) + + build_utils.ZipDir(options.srcjar, temp_dir) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/ant.py b/build/android/gyp/ant.py new file mode 100755 index 00000000000..5394b9ec7d5 --- /dev/null +++ b/build/android/gyp/ant.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""An Ant wrapper that suppresses useless Ant output. + +Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of +every build. In the Android build, this just adds a lot of useless noise to the +build output. This script forwards its arguments to ant, and prints Ant's +output up until the BUILD SUCCESSFUL line. + +Also, when a command fails, this script will re-run that ant command with the +'-verbose' argument so that the failure is easier to debug. +""" + +import optparse +import sys +import traceback + +from util import build_utils + + +def main(argv): + option_parser = optparse.OptionParser() + build_utils.AddDepfileOption(option_parser) + options, args = option_parser.parse_args(argv[1:]) + + try: + stdout = build_utils.CheckOutput(['ant'] + args) + except build_utils.CalledProcessError: + # It is very difficult to diagnose ant failures without the '-verbose' + # argument. So, when an ant command fails, re-run it with '-verbose' so that + # the cause of the failure is easier to identify. + verbose_args = ['-verbose'] + [a for a in args if a != '-quiet'] + try: + stdout = build_utils.CheckOutput(['ant'] + verbose_args) + except build_utils.CalledProcessError: + traceback.print_exc() + sys.exit(1) + + # If this did sys.exit(1), building again would succeed (which would be + # awkward). Instead, just print a big warning. + build_utils.PrintBigWarning( + 'This is unexpected. `ant ' + ' '.join(args) + '` failed.' + + 'But, running `ant ' + ' '.join(verbose_args) + '` passed.') + + stdout = stdout.strip().split('\n') + for line in stdout: + if line.strip() == 'BUILD SUCCESSFUL': + break + print line + + if options.depfile: + assert '-buildfile' in args + ant_buildfile = args[args.index('-buildfile') + 1] + + build_utils.WriteDepfile( + options.depfile, + [ant_buildfile] + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/apk_install.py b/build/android/gyp/apk_install.py new file mode 100755 index 00000000000..a512e50e3d5 --- /dev/null +++ b/build/android/gyp/apk_install.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Installs an APK. + +""" + +import optparse +import os +import re +import sys + +from util import build_device +from util import build_utils +from util import md5_check + +BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..') +sys.path.append(BUILD_ANDROID_DIR) + +from pylib import constants +from pylib.utils import apk_helper + + +def GetNewMetadata(device, apk_package): + """Gets the metadata on the device for the apk_package apk.""" + output = device.RunShellCommand('ls -l /data/app/') + # Matches lines like: + # -rw-r--r-- system system 7376582 2013-04-19 16:34 \ + # org.chromium.chrome.shell.apk + # -rw-r--r-- system system 7376582 2013-04-19 16:34 \ + # org.chromium.chrome.shell-1.apk + apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?(.apk)?$' % apk_package, s) + matches = filter(apk_matcher, output) + return matches[0] if matches else None + +def HasInstallMetadataChanged(device, apk_package, metadata_path): + """Checks if the metadata on the device for apk_package has changed.""" + if not os.path.exists(metadata_path): + return True + + with open(metadata_path, 'r') as expected_file: + return expected_file.read() != device.GetInstallMetadata(apk_package) + + +def RecordInstallMetadata(device, apk_package, metadata_path): + """Records the metadata from the device for apk_package.""" + metadata = GetNewMetadata(device, apk_package) + if not metadata: + raise Exception('APK install failed unexpectedly.') + + with open(metadata_path, 'w') as outfile: + outfile.write(metadata) + + +def main(): + parser = optparse.OptionParser() + parser.add_option('--apk-path', + help='Path to .apk to install.') + parser.add_option('--split-apk-path', + help='Path to .apk splits (can specify multiple times, causes ' + '--install-multiple to be used.', + action='append') + parser.add_option('--android-sdk-tools', + help='Path to the Android SDK build tools folder. ' + + 'Required when using --split-apk-path.') + parser.add_option('--install-record', + help='Path to install record (touched only when APK is installed).') + parser.add_option('--build-device-configuration', + help='Path to build device configuration.') + parser.add_option('--stamp', + help='Path to touch on success.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME') + options, _ = parser.parse_args() + + device = build_device.GetBuildDeviceFromPath( + options.build_device_configuration) + if not device: + return + + constants.SetBuildType(options.configuration_name) + + serial_number = device.GetSerialNumber() + apk_package = apk_helper.GetPackageName(options.apk_path) + + metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number) + + # If the APK on the device does not match the one that was last installed by + # the build, then the APK has to be installed (regardless of the md5 record). + force_install = HasInstallMetadataChanged(device, apk_package, metadata_path) + + + def Install(): + if options.split_apk_path: + device.InstallSplitApk(options.apk_path, options.split_apk_path) + else: + device.Install(options.apk_path, reinstall=True) + + RecordInstallMetadata(device, apk_package, metadata_path) + build_utils.Touch(options.install_record) + + + record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number) + md5_check.CallAndRecordIfStale( + Install, + record_path=record_path, + input_paths=[options.apk_path], + force=force_install) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/apk_obfuscate.py b/build/android/gyp/apk_obfuscate.py new file mode 100755 index 00000000000..b0757588007 --- /dev/null +++ b/build/android/gyp/apk_obfuscate.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates the obfuscated jar and test jar for an apk. + +If proguard is not enabled or 'Release' is not in the configuration name, +obfuscation will be a no-op. +""" + +import optparse +import os +import sys + +from util import build_utils +from util import proguard_util + + +def ParseArgs(argv): + parser = optparse.OptionParser() + parser.add_option('--android-sdk', help='path to the Android SDK folder') + parser.add_option('--android-sdk-tools', + help='path to the Android SDK build tools folder') + parser.add_option('--android-sdk-jar', + help='path to Android SDK\'s android.jar') + parser.add_option('--proguard-jar-path', + help='Path to proguard.jar in the sdk') + parser.add_option('--input-jars-paths', + help='Path to jars to include in obfuscated jar') + + parser.add_option('--proguard-configs', + help='Paths to proguard config files') + + parser.add_option('--configuration-name', + help='Gyp configuration name (i.e. Debug, Release)') + parser.add_option('--proguard-enabled', action='store_true', + help='Set if proguard is enabled for this target.') + + parser.add_option('--obfuscated-jar-path', + help='Output path for obfuscated jar.') + + parser.add_option('--testapp', action='store_true', + help='Set this if building an instrumentation test apk') + parser.add_option('--tested-apk-obfuscated-jar-path', + help='Path to obfusctated jar of the tested apk') + parser.add_option('--test-jar-path', + help='Output path for jar containing all the test apk\'s ' + 'code.') + + parser.add_option('--stamp', help='File to touch on success') + + (options, args) = parser.parse_args(argv) + + if args: + parser.error('No positional arguments should be given. ' + str(args)) + + # Check that required options have been provided. + required_options = ( + 'android_sdk', + 'android_sdk_tools', + 'android_sdk_jar', + 'proguard_jar_path', + 'input_jars_paths', + 'configuration_name', + 'obfuscated_jar_path', + ) + + if options.testapp: + required_options += ( + 'test_jar_path', + ) + + build_utils.CheckOptions(options, parser, required=required_options) + return options, args + + +def DoProguard(options): + proguard = proguard_util.ProguardCmdBuilder(options.proguard_jar_path) + proguard.outjar(options.obfuscated_jar_path) + + library_classpath = [options.android_sdk_jar] + input_jars = build_utils.ParseGypList(options.input_jars_paths) + + exclude_paths = [] + configs = build_utils.ParseGypList(options.proguard_configs) + if options.tested_apk_obfuscated_jar_path: + # configs should only contain the process_resources.py generated config. + assert len(configs) == 1, ( + 'test apks should not have custom proguard configs: ' + str(configs)) + tested_jar_info = build_utils.ReadJson( + options.tested_apk_obfuscated_jar_path + '.info') + exclude_paths = tested_jar_info['inputs'] + configs = tested_jar_info['configs'] + + proguard.is_test(True) + proguard.mapping(options.tested_apk_obfuscated_jar_path + '.mapping') + library_classpath.append(options.tested_apk_obfuscated_jar_path) + + proguard.libraryjars(library_classpath) + proguard_injars = [p for p in input_jars if p not in exclude_paths] + proguard.injars(proguard_injars) + proguard.configs(configs) + + proguard.CheckOutput() + + this_info = { + 'inputs': proguard_injars, + 'configs': configs + } + + build_utils.WriteJson( + this_info, options.obfuscated_jar_path + '.info') + + +def main(argv): + options, _ = ParseArgs(argv) + + input_jars = build_utils.ParseGypList(options.input_jars_paths) + + if options.testapp: + dependency_class_filters = [ + '*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class'] + build_utils.MergeZips( + options.test_jar_path, input_jars, dependency_class_filters) + + if options.configuration_name == 'Release' and options.proguard_enabled: + DoProguard(options) + else: + output_files = [ + options.obfuscated_jar_path, + options.obfuscated_jar_path + '.info', + options.obfuscated_jar_path + '.dump', + options.obfuscated_jar_path + '.seeds', + options.obfuscated_jar_path + '.usage', + options.obfuscated_jar_path + '.mapping'] + for f in output_files: + if os.path.exists(f): + os.remove(f) + build_utils.Touch(f) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py new file mode 100755 index 00000000000..a474e770654 --- /dev/null +++ b/build/android/gyp/copy_ex.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copies files to a directory.""" + +import optparse +import os +import shutil +import sys + +from util import build_utils + + +def _get_all_files(base): + """Returns a list of all the files in |base|. Each entry is relative to the + last path entry of |base|.""" + result = [] + dirname = os.path.dirname(base) + for root, _, files in os.walk(base): + result.extend([os.path.join(root[len(dirname):], f) for f in files]) + return result + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--dest', help='Directory to copy files to.') + parser.add_option('--files', action='append', + help='List of files to copy.') + parser.add_option('--clear', action='store_true', + help='If set, the destination directory will be deleted ' + 'before copying files to it. This is highly recommended to ' + 'ensure that no stale files are left in the directory.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args(args) + + if options.clear: + build_utils.DeleteDirectory(options.dest) + build_utils.MakeDirectory(options.dest) + + files = [] + for file_arg in options.files: + files += build_utils.ParseGypList(file_arg) + + deps = [] + + for f in files: + if os.path.isdir(f): + if not options.clear: + print ('To avoid stale files you must use --clear when copying ' + 'directories') + sys.exit(-1) + shutil.copytree(f, os.path.join(options.dest, os.path.basename(f))) + deps.extend(_get_all_files(f)) + else: + shutil.copy(f, options.dest) + deps.append(f) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + deps + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py new file mode 100755 index 00000000000..3e630b67c9f --- /dev/null +++ b/build/android/gyp/create_device_library_links.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates symlinks to native libraries for an APK. + +The native libraries should have previously been pushed to the device (in +options.target_dir). This script then creates links in an apk's lib/ folder to +those native libraries. +""" + +import optparse +import os +import sys + +from util import build_device +from util import build_utils + +BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..') +sys.path.append(BUILD_ANDROID_DIR) + +from pylib import constants +from pylib.utils import apk_helper + +def RunShellCommand(device, cmd): + output = device.RunShellCommand(cmd) + + if output: + raise Exception( + 'Unexpected output running command: ' + cmd + '\n' + + '\n'.join(output)) + + +def CreateSymlinkScript(options): + libraries = build_utils.ParseGypList(options.libraries) + + link_cmd = ( + 'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n' + 'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s ' + '$APK_LIBRARIES_DIR/%(lib_basename)s \n' + ) + + script = '#!/bin/sh \n' + + for lib in libraries: + script += link_cmd % { 'lib_basename': lib } + + with open(options.script_host_path, 'w') as scriptfile: + scriptfile.write(script) + + +def TriggerSymlinkScript(options): + device = build_device.GetBuildDeviceFromPath( + options.build_device_configuration) + if not device: + return + + apk_package = apk_helper.GetPackageName(options.apk) + apk_libraries_dir = '/data/data/%s/lib' % apk_package + + device_dir = os.path.dirname(options.script_device_path) + mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' % + { 'dir': device_dir }) + RunShellCommand(device, mkdir_cmd) + device.PushChangedFiles([(options.script_host_path, + options.script_device_path)]) + + trigger_cmd = ( + 'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; ' + 'STRIPPED_LIBRARIES_DIR=%(target_dir)s; ' + '. %(script_device_path)s' + ) % { + 'apk_libraries_dir': apk_libraries_dir, + 'target_dir': options.target_dir, + 'script_device_path': options.script_device_path + } + RunShellCommand(device, trigger_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + parser.add_option('--apk', help='Path to the apk.') + parser.add_option('--script-host-path', + help='Path on the host for the symlink script.') + parser.add_option('--script-device-path', + help='Path on the device to push the created symlink script.') + parser.add_option('--libraries', + help='List of native libraries.') + parser.add_option('--target-dir', + help='Device directory that contains the target libraries for symlinks.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--build-device-configuration', + help='Path to build device configuration.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME') + options, _ = parser.parse_args(args) + + required_options = ['apk', 'libraries', 'script_host_path', + 'script_device_path', 'target_dir', 'configuration_name'] + build_utils.CheckOptions(options, parser, required=required_options) + constants.SetBuildType(options.configuration_name) + + CreateSymlinkScript(options) + TriggerSymlinkScript(options) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_dist_jar.py b/build/android/gyp/create_dist_jar.py new file mode 100755 index 00000000000..0d31c5db93f --- /dev/null +++ b/build/android/gyp/create_dist_jar.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Merges a list of jars into a single jar.""" + +import optparse +import sys + +from util import build_utils + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--output', help='Path to output jar.') + parser.add_option('--inputs', action='append', help='List of jar inputs.') + options, _ = parser.parse_args(args) + build_utils.CheckOptions(options, parser, ['output', 'inputs']) + + input_jars = [] + for inputs_arg in options.inputs: + input_jars.extend(build_utils.ParseGypList(inputs_arg)) + + build_utils.MergeZips(options.output, input_jars) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + input_jars + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_flutter_jar.py b/build/android/gyp/create_flutter_jar.py new file mode 100644 index 00000000000..c30bae2bd9f --- /dev/null +++ b/build/android/gyp/create_flutter_jar.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create a JAR incorporating all the components required to build a Flutter application""" + +import optparse +import os +import sys +import zipfile + +from util import build_utils + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--output', help='Path to output jar.') + parser.add_option('--dist_jar', help='Flutter shell Java code jar.') + parser.add_option('--native_lib', action='append', help='Native code library.') + parser.add_option('--android_abi', help='Native code ABI.') + parser.add_option('--asset_dir', help='Path to assets.') + options, _ = parser.parse_args(args) + build_utils.CheckOptions(options, parser, [ + 'output', 'dist_jar', 'native_lib', 'android_abi', 'asset_dir' + ]) + + input_deps = [] + + with zipfile.ZipFile(options.output, 'w', zipfile.ZIP_DEFLATED) as out_zip: + input_deps.append(options.dist_jar) + with zipfile.ZipFile(options.dist_jar, 'r') as dist_zip: + for dist_file in dist_zip.infolist(): + if dist_file.filename.endswith('.class'): + out_zip.writestr(dist_file.filename, dist_zip.read(dist_file.filename)) + + for native_lib in options.native_lib: + input_deps.append(native_lib) + out_zip.write(native_lib, + 'lib/%s/%s' % (options.android_abi, os.path.basename(native_lib))) + + for asset_file in os.listdir(options.asset_dir): + input_deps.append(asset_file) + out_zip.write(os.path.join(options.asset_dir, asset_file), + 'assets/%s' % asset_file) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + input_deps + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py new file mode 100755 index 00000000000..5de43f2d4a4 --- /dev/null +++ b/build/android/gyp/create_java_binary_script.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a simple script to run a java "binary". + +This creates a script that sets up the java command line for running a java +jar. This includes correctly setting the classpath and the main class. +""" + +import optparse +import os +import sys + +from util import build_utils + +# The java command must be executed in the current directory because there may +# be user-supplied paths in the args. The script receives the classpath relative +# to the directory that the script is written in and then, when run, must +# recalculate the paths relative to the current directory. +script_template = """\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_java_binary_script.py + +import os +import sys + +self_dir = os.path.dirname(__file__) +classpath = [{classpath}] +if os.getcwd() != self_dir: + offset = os.path.relpath(self_dir, os.getcwd()) + classpath = [os.path.join(offset, p) for p in classpath] +java_args = [ + "java", + "-classpath", ":".join(classpath), + "-enableassertions", + \"{main_class}\"] + sys.argv[1:] +os.execvp("java", java_args) +""" + +def main(argv): + argv = build_utils.ExpandFileArgs(argv) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--output', help='Output path for executable script.') + parser.add_option('--jar-path', help='Path to the main jar.') + parser.add_option('--main-class', + help='Name of the java class with the "main" entry point.') + parser.add_option('--classpath', action='append', + help='Classpath for running the jar.') + options, _ = parser.parse_args(argv) + + classpath = [options.jar_path] + for cp_arg in options.classpath: + classpath += build_utils.ParseGypList(cp_arg) + + run_dir = os.path.dirname(options.output) + classpath = [os.path.relpath(p, run_dir) for p in classpath] + + with open(options.output, 'w') as script: + script.write(script_template.format( + classpath=('"%s"' % '", "'.join(classpath)), + main_class=options.main_class)) + + os.chmod(options.output, 0750) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/create_placeholder_files.py b/build/android/gyp/create_placeholder_files.py new file mode 100755 index 00000000000..103e1df7f2d --- /dev/null +++ b/build/android/gyp/create_placeholder_files.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create placeholder files. +""" + +import optparse +import os +import sys + +from util import build_utils + +def main(): + parser = optparse.OptionParser() + parser.add_option( + '--dest-lib-dir', + help='Destination directory to have placeholder files.') + parser.add_option( + '--stamp', + help='Path to touch on success') + + options, args = parser.parse_args() + + for name in args: + target_path = os.path.join(options.dest_lib_dir, name) + build_utils.Touch(target_path) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/create_standalone_apk.py b/build/android/gyp/create_standalone_apk.py new file mode 100755 index 00000000000..c5605992860 --- /dev/null +++ b/build/android/gyp/create_standalone_apk.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Combines stripped libraries and incomplete APK into single standalone APK. + +""" + +import optparse +import os +import shutil +import sys +import tempfile + +from util import build_utils +from util import md5_check + +def CreateStandaloneApk(options): + def DoZip(): + with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file: + intermediate_path = intermediate_file.name + shutil.copy(options.input_apk_path, intermediate_path) + apk_path_abs = os.path.abspath(intermediate_path) + build_utils.CheckOutput( + ['zip', '-r', '-1', apk_path_abs, 'lib'], + cwd=options.libraries_top_dir) + shutil.copy(intermediate_path, options.output_apk_path) + + input_paths = [options.input_apk_path, options.libraries_top_dir] + record_path = '%s.standalone.stamp' % options.input_apk_path + md5_check.CallAndRecordIfStale( + DoZip, + record_path=record_path, + input_paths=input_paths) + + +def main(): + parser = optparse.OptionParser() + parser.add_option('--libraries-top-dir', + help='Top directory that contains libraries ' + '(i.e. library paths are like ' + 'libraries_top_dir/lib/android_app_abi/foo.so).') + parser.add_option('--input-apk-path', help='Path to incomplete APK.') + parser.add_option('--output-apk-path', help='Path for standalone APK.') + parser.add_option('--stamp', help='Path to touch on success.') + options, _ = parser.parse_args() + + required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path'] + build_utils.CheckOptions(options, parser, required=required_options) + + CreateStandaloneApk(options) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/create_test_runner_script.py b/build/android/gyp/create_test_runner_script.py new file mode 100755 index 00000000000..247bf20ad52 --- /dev/null +++ b/build/android/gyp/create_test_runner_script.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a script to run an android test using build/android/test_runner.py. +""" + +import argparse +import os +import sys + +from util import build_utils + +SCRIPT_TEMPLATE = """\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_test_runner_script.py + +import logging +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + + def ResolvePath(path): + \"\"\"Returns an absolute filepath given a path relative to this script. + \"\"\" + return os.path.abspath(os.path.join(script_directory, path)) + + test_runner_path = ResolvePath('{test_runner_path}') + test_runner_args = {test_runner_args} + test_runner_path_args = {test_runner_path_args} + for arg, path in test_runner_path_args.iteritems(): + test_runner_args.extend([arg, ResolvePath(path)]) + + test_runner_cmd = ' '.join( + [test_runner_path] + test_runner_args + sys.argv[1:]) + logging.critical(test_runner_cmd) + os.system(test_runner_cmd) + +if __name__ == '__main__': + sys.exit(main()) +""" + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', + help='Output path for executable script.') + parser.add_argument('--depfile', + help='Path to the depfile. This must be specified as ' + "the action's first output.") + # We need to intercept any test runner path arguments and make all + # of the paths relative to the output script directory. + group = parser.add_argument_group('Test runner path arguments.') + group.add_argument('--output-directory') + group.add_argument('--isolate-file-path') + group.add_argument('--support-apk') + args, test_runner_args = parser.parse_known_args() + + def RelativizePathToScript(path): + """Returns the path relative to the output script directory.""" + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + + test_runner_path = os.path.join( + os.path.dirname(__file__), os.path.pardir, 'test_runner.py') + test_runner_path = RelativizePathToScript(test_runner_path) + + test_runner_path_args = {} + if args.output_directory: + test_runner_path_args['--output-directory'] = RelativizePathToScript( + args.output_directory) + if args.isolate_file_path: + test_runner_path_args['--isolate-file-path'] = RelativizePathToScript( + args.isolate_file_path) + if args.support_apk: + test_runner_path_args['--support-apk'] = RelativizePathToScript( + args.support_apk) + + with open(args.script_output_path, 'w') as script: + script.write(SCRIPT_TEMPLATE.format( + test_runner_path=str(test_runner_path), + test_runner_args=str(test_runner_args), + test_runner_path_args=str(test_runner_path_args))) + + os.chmod(args.script_output_path, 0750) + + if args.depfile: + build_utils.WriteDepfile( + args.depfile, + build_utils.GetPythonDependencies()) + +if __name__ == '__main__': + sys.exit(main()) \ No newline at end of file diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py new file mode 100755 index 00000000000..c26d23a6116 --- /dev/null +++ b/build/android/gyp/dex.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils +from util import md5_check + + +def DoDex(options, paths): + dx_binary = os.path.join(options.android_sdk_tools, 'dx') + # See http://crbug.com/272064 for context on --force-jumbo. + dex_cmd = [dx_binary, '--dex', '--force-jumbo', '--output', options.dex_path] + if options.no_locals != '0': + dex_cmd.append('--no-locals') + + dex_cmd += paths + + record_path = '%s.md5.stamp' % options.dex_path + md5_check.CallAndRecordIfStale( + lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False), + record_path=record_path, + input_paths=paths, + input_strings=dex_cmd, + force=not os.path.exists(options.dex_path)) + build_utils.WriteJson( + [os.path.relpath(p, options.output_directory) for p in paths], + options.dex_path + '.inputs') + + +def main(): + args = build_utils.ExpandFileArgs(sys.argv[1:]) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--android-sdk-tools', + help='Android sdk build tools directory.') + parser.add_option('--output-directory', + default=os.getcwd(), + help='Path to the output build directory.') + parser.add_option('--dex-path', help='Dex output path.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME.') + parser.add_option('--proguard-enabled', + help='"true" if proguard is enabled.') + parser.add_option('--proguard-enabled-input-path', + help=('Path to dex in Release mode when proguard ' + 'is enabled.')) + parser.add_option('--no-locals', + help='Exclude locals list from the dex file.') + parser.add_option('--inputs', help='A list of additional input paths.') + parser.add_option('--excluded-paths', + help='A list of paths to exclude from the dex file.') + + options, paths = parser.parse_args(args) + + required_options = ('android_sdk_tools',) + build_utils.CheckOptions(options, parser, required=required_options) + + if (options.proguard_enabled == 'true' + and options.configuration_name == 'Release'): + paths = [options.proguard_enabled_input_path] + + if options.inputs: + paths += build_utils.ParseGypList(options.inputs) + + if options.excluded_paths: + # Excluded paths are relative to the output directory. + exclude_paths = build_utils.ParseGypList(options.excluded_paths) + paths = [p for p in paths if not + os.path.relpath(p, options.output_directory) in exclude_paths] + + DoDex(options, paths) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + paths + build_utils.GetPythonDependencies()) + + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/emma_instr.py b/build/android/gyp/emma_instr.py new file mode 100755 index 00000000000..6f3555a3276 --- /dev/null +++ b/build/android/gyp/emma_instr.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Instruments classes and jar files. + +This script corresponds to the 'emma_instr' action in the java build process. +Depending on whether emma_instrument is set, the 'emma_instr' action will either +call one of the instrument commands, or the copy command. + +Possible commands are: +- instrument_jar: Accepts a jar and instruments it using emma.jar. +- instrument_classes: Accepts a directory containing java classes and + instruments it using emma.jar. +- copy: Called when EMMA coverage is not enabled. This allows us to make + this a required step without necessarily instrumenting on every build. + Also removes any stale coverage files. +""" + +import collections +import json +import os +import shutil +import sys +import tempfile + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from pylib.utils import command_option_parser + +from util import build_utils + + +def _AddCommonOptions(option_parser): + """Adds common options to |option_parser|.""" + option_parser.add_option('--input-path', + help=('Path to input file(s). Either the classes ' + 'directory, or the path to a jar.')) + option_parser.add_option('--output-path', + help=('Path to output final file(s) to. Either the ' + 'final classes directory, or the directory in ' + 'which to place the instrumented/copied jar.')) + option_parser.add_option('--stamp', help='Path to touch when done.') + option_parser.add_option('--coverage-file', + help='File to create with coverage metadata.') + option_parser.add_option('--sources-file', + help='File to create with the list of sources.') + + +def _AddInstrumentOptions(option_parser): + """Adds options related to instrumentation to |option_parser|.""" + _AddCommonOptions(option_parser) + option_parser.add_option('--sources', + help='Space separated list of sources.') + option_parser.add_option('--src-root', + help='Root of the src repository.') + option_parser.add_option('--emma-jar', + help='Path to emma.jar.') + option_parser.add_option( + '--filter-string', default='', + help=('Filter string consisting of a list of inclusion/exclusion ' + 'patterns separated with whitespace and/or comma.')) + + +def _RunCopyCommand(_command, options, _, option_parser): + """Copies the jar from input to output locations. + + Also removes any old coverage/sources file. + + Args: + command: String indicating the command that was received to trigger + this function. + options: optparse options dictionary. + args: List of extra args from optparse. + option_parser: optparse.OptionParser object. + + Returns: + An exit code. + """ + if not (options.input_path and options.output_path and + options.coverage_file and options.sources_file): + option_parser.error('All arguments are required.') + + coverage_file = os.path.join(os.path.dirname(options.output_path), + options.coverage_file) + sources_file = os.path.join(os.path.dirname(options.output_path), + options.sources_file) + if os.path.exists(coverage_file): + os.remove(coverage_file) + if os.path.exists(sources_file): + os.remove(sources_file) + + if os.path.isdir(options.input_path): + shutil.rmtree(options.output_path, ignore_errors=True) + shutil.copytree(options.input_path, options.output_path) + else: + shutil.copy(options.input_path, options.output_path) + + if options.stamp: + build_utils.Touch(options.stamp) + + +def _CreateSourcesFile(sources_string, sources_file, src_root): + """Adds all normalized source directories to |sources_file|. + + Args: + sources_string: String generated from gyp containing the list of sources. + sources_file: File into which to write the JSON list of sources. + src_root: Root which sources added to the file should be relative to. + + Returns: + An exit code. + """ + src_root = os.path.abspath(src_root) + sources = build_utils.ParseGypList(sources_string) + relative_sources = [] + for s in sources: + abs_source = os.path.abspath(s) + if abs_source[:len(src_root)] != src_root: + print ('Error: found source directory not under repository root: %s %s' + % (abs_source, src_root)) + return 1 + rel_source = os.path.relpath(abs_source, src_root) + + relative_sources.append(rel_source) + + with open(sources_file, 'w') as f: + json.dump(relative_sources, f) + + +def _RunInstrumentCommand(command, options, _, option_parser): + """Instruments the classes/jar files using EMMA. + + Args: + command: 'instrument_jar' or 'instrument_classes'. This distinguishes + whether we copy the output from the created lib/ directory, or classes/ + directory. + options: optparse options dictionary. + args: List of extra args from optparse. + option_parser: optparse.OptionParser object. + + Returns: + An exit code. + """ + if not (options.input_path and options.output_path and + options.coverage_file and options.sources_file and options.sources and + options.src_root and options.emma_jar): + option_parser.error('All arguments are required.') + + coverage_file = os.path.join(os.path.dirname(options.output_path), + options.coverage_file) + sources_file = os.path.join(os.path.dirname(options.output_path), + options.sources_file) + if os.path.exists(coverage_file): + os.remove(coverage_file) + temp_dir = tempfile.mkdtemp() + try: + cmd = ['java', '-cp', options.emma_jar, + 'emma', 'instr', + '-ip', options.input_path, + '-ix', options.filter_string, + '-d', temp_dir, + '-out', coverage_file, + '-m', 'fullcopy'] + build_utils.CheckOutput(cmd) + + if command == 'instrument_jar': + for jar in os.listdir(os.path.join(temp_dir, 'lib')): + shutil.copy(os.path.join(temp_dir, 'lib', jar), + options.output_path) + else: # 'instrument_classes' + if os.path.isdir(options.output_path): + shutil.rmtree(options.output_path, ignore_errors=True) + shutil.copytree(os.path.join(temp_dir, 'classes'), + options.output_path) + finally: + shutil.rmtree(temp_dir) + + _CreateSourcesFile(options.sources, sources_file, options.src_root) + + if options.stamp: + build_utils.Touch(options.stamp) + + return 0 + + +CommandFunctionTuple = collections.namedtuple( + 'CommandFunctionTuple', ['add_options_func', 'run_command_func']) +VALID_COMMANDS = { + 'copy': CommandFunctionTuple(_AddCommonOptions, + _RunCopyCommand), + 'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions, + _RunInstrumentCommand), + 'instrument_classes': CommandFunctionTuple(_AddInstrumentOptions, + _RunInstrumentCommand), +} + + +def main(): + option_parser = command_option_parser.CommandOptionParser( + commands_dict=VALID_COMMANDS) + command_option_parser.ParseAndExecute(option_parser) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py new file mode 100755 index 00000000000..0a8003508b5 --- /dev/null +++ b/build/android/gyp/finalize_apk.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Signs and zipaligns APK. + +""" + +import optparse +import shutil +import sys +import tempfile + +from util import build_utils + +def RenameInflateAndAddPageAlignment( + rezip_apk_jar_path, in_zip_file, out_zip_file): + rezip_apk_cmd = [ + 'java', + '-classpath', + rezip_apk_jar_path, + 'RezipApk', + 'renamealign', + in_zip_file, + out_zip_file, + ] + build_utils.CheckOutput(rezip_apk_cmd) + + +def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file): + rezip_apk_cmd = [ + 'java', + '-classpath', + rezip_apk_jar_path, + 'RezipApk', + 'reorder', + in_zip_file, + out_zip_file, + ] + build_utils.CheckOutput(rezip_apk_cmd) + + +def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path): + shutil.copy(unsigned_path, signed_path) + sign_cmd = [ + 'jarsigner', + '-sigalg', 'MD5withRSA', + '-digestalg', 'SHA1', + '-keystore', key_path, + '-storepass', key_passwd, + signed_path, + key_name, + ] + build_utils.CheckOutput(sign_cmd) + + +def AlignApk(zipalign_path, unaligned_path, final_path): + align_cmd = [ + zipalign_path, + '-f', '4', # 4 bytes + unaligned_path, + final_path, + ] + build_utils.CheckOutput(align_cmd) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--rezip-apk-jar-path', + help='Path to the RezipApk jar file.') + parser.add_option('--zipalign-path', help='Path to the zipalign tool.') + parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.') + parser.add_option('--final-apk-path', + help='Path to output signed and aligned APK.') + parser.add_option('--key-path', help='Path to keystore for signing.') + parser.add_option('--key-passwd', help='Keystore password') + parser.add_option('--key-name', help='Keystore name') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--load-library-from-zip', type='int', + help='If non-zero, build the APK such that the library can be loaded ' + + 'directly from the zip file using the crazy linker. The library ' + + 'will be renamed, uncompressed and page aligned.') + + options, _ = parser.parse_args() + + FinalizeApk(options) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +def FinalizeApk(options): + with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \ + tempfile.NamedTemporaryFile() as apk_to_sign_tmp: + + if options.load_library_from_zip: + # We alter the name of the library so that the Android Package Manager + # does not extract it into a separate file. This must be done before + # signing, as the filename is part of the signed manifest. At the same + # time we uncompress the library, which is necessary so that it can be + # loaded directly from the APK. + # Move the library to a page boundary by adding a page alignment file. + apk_to_sign = apk_to_sign_tmp.name + RenameInflateAndAddPageAlignment( + options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign) + else: + apk_to_sign = options.unsigned_apk_path + + signed_apk_path = signed_apk_path_tmp.name + JarSigner(options.key_path, options.key_name, options.key_passwd, + apk_to_sign, signed_apk_path) + + if options.load_library_from_zip: + # Reorder the contents of the APK. This re-establishes the canonical + # order which means the library will be back at its page aligned location. + # This step also aligns uncompressed items to 4 bytes. + ReorderAndAlignApk( + options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path) + else: + # Align uncompressed items to 4 bytes + AlignApk(options.zipalign_path, signed_apk_path, options.final_apk_path) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/finalize_splits.py b/build/android/gyp/finalize_splits.py new file mode 100755 index 00000000000..a6796bbe4f4 --- /dev/null +++ b/build/android/gyp/finalize_splits.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Signs and zipaligns split APKs. + +This script is require only by GYP (not GN). +""" + +import optparse +import sys + +import finalize_apk +from util import build_utils + +def main(): + parser = optparse.OptionParser() + parser.add_option('--zipalign-path', help='Path to the zipalign tool.') + parser.add_option('--resource-packaged-apk-path', + help='Base path to input .ap_s.') + parser.add_option('--base-output-path', + help='Path to output .apk, minus extension.') + parser.add_option('--key-path', help='Path to keystore for signing.') + parser.add_option('--key-passwd', help='Keystore password') + parser.add_option('--key-name', help='Keystore name') + parser.add_option('--densities', + help='Comma separated list of densities finalize.') + parser.add_option('--languages', + help='GYP list of language splits to finalize.') + + options, _ = parser.parse_args() + options.load_library_from_zip = 0 + + if options.densities: + for density in options.densities.split(','): + options.unsigned_apk_path = ("%s_%s" % + (options.resource_packaged_apk_path, density)) + options.final_apk_path = ("%s-density-%s.apk" % + (options.base_output_path, density)) + finalize_apk.FinalizeApk(options) + + if options.languages: + for lang in build_utils.ParseGypList(options.languages): + options.unsigned_apk_path = ("%s_%s" % + (options.resource_packaged_apk_path, lang)) + options.final_apk_path = ("%s-lang-%s.apk" % + (options.base_output_path, lang)) + finalize_apk.FinalizeApk(options) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py new file mode 100755 index 00000000000..a9f1d498556 --- /dev/null +++ b/build/android/gyp/find.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Finds files in directories. +""" + +import fnmatch +import optparse +import os +import sys + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('--pattern', default='*', help='File pattern to match.') + options, directories = parser.parse_args(argv) + + for d in directories: + if not os.path.exists(d): + print >> sys.stderr, '%s does not exist' % d + return 1 + for root, _, filenames in os.walk(d): + for f in fnmatch.filter(filenames, options.pattern): + print os.path.join(root, f) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/find_sun_tools_jar.py b/build/android/gyp/find_sun_tools_jar.py new file mode 100755 index 00000000000..2f15a154abd --- /dev/null +++ b/build/android/gyp/find_sun_tools_jar.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This finds the java distribution's tools.jar and copies it somewhere. +""" + +import argparse +import os +import re +import shutil +import sys + +from util import build_utils + +RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]') + +def main(): + parser = argparse.ArgumentParser(description='Find Sun Tools Jar') + parser.add_argument('--depfile', + help='Path to depfile. This must be specified as the ' + 'action\'s first output.') + parser.add_argument('--output', required=True) + args = parser.parse_args() + + sun_tools_jar_path = FindSunToolsJarPath() + + if sun_tools_jar_path is None: + raise Exception("Couldn\'t find tools.jar") + + # Using copyfile instead of copy() because copy() calls copymode() + # We don't want the locked mode because we may copy over this file again + shutil.copyfile(sun_tools_jar_path, args.output) + + if args.depfile: + build_utils.WriteDepfile( + args.depfile, + [sun_tools_jar_path] + build_utils.GetPythonDependencies()) + + +def FindSunToolsJarPath(): + # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7 + stdout = build_utils.CheckOutput( + ["java", "-verbose", "-version"], print_stderr=False) + for ln in stdout.splitlines(): + match = RT_JAR_FINDER.match(ln) + if match: + return os.path.join(match.group(1), 'lib', 'tools.jar') + + return None + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py new file mode 100755 index 00000000000..03becf918fe --- /dev/null +++ b/build/android/gyp/gcc_preprocess.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils + +def DoGcc(options): + build_utils.MakeDirectory(os.path.dirname(options.output)) + + gcc_cmd = [ 'gcc' ] # invoke host gcc. + if options.defines: + gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), [])) + gcc_cmd.extend([ + '-E', # stop after preprocessing. + '-D', 'ANDROID', # Specify ANDROID define for pre-processor. + '-x', 'c-header', # treat sources as C header files + '-P', # disable line markers, i.e. '#line 309' + '-I', options.include_path, + '-o', options.output, + options.template + ]) + + build_utils.CheckOutput(gcc_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--include-path', help='Include path for gcc.') + parser.add_option('--template', help='Path to template.') + parser.add_option('--output', help='Path for generated file.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--defines', help='Pre-defines macros', action='append') + + options, _ = parser.parse_args(args) + + DoGcc(options) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/generate_split_manifest.py b/build/android/gyp/generate_split_manifest.py new file mode 100755 index 00000000000..9cb3bca4b55 --- /dev/null +++ b/build/android/gyp/generate_split_manifest.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates an AndroidManifest.xml for an APK split. + +Given the manifest file for the main APK, generates an AndroidManifest.xml with +the value required for a Split APK (package, versionCode, etc). +""" + +import optparse +import xml.etree.ElementTree + +from util import build_utils + +MANIFEST_TEMPLATE = """ + + + + + +""" + +def ParseArgs(): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--main-manifest', help='The main manifest of the app') + parser.add_option('--out-manifest', help='The output manifest') + parser.add_option('--split', help='The name of the split') + parser.add_option( + '--has-code', + action='store_true', + default=False, + help='Whether the split will contain a .dex file') + + (options, args) = parser.parse_args() + + if args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ('main_manifest', 'out_manifest', 'split') + build_utils.CheckOptions(options, parser, required=required_options) + + return options + + +def Build(main_manifest, split, has_code): + """Builds a split manifest based on the manifest of the main APK. + + Args: + main_manifest: the XML manifest of the main APK as a string + split: the name of the split as a string + has_code: whether this split APK will contain .dex files + + Returns: + The XML split manifest as a string + """ + + doc = xml.etree.ElementTree.fromstring(main_manifest) + package = doc.get('package') + + return MANIFEST_TEMPLATE % { + 'package': package, + 'split': split.replace('-', '_'), + 'has_code': str(has_code).lower() + } + + +def main(): + options = ParseArgs() + main_manifest = file(options.main_manifest).read() + split_manifest = Build( + main_manifest, + options.split, + options.has_code) + + with file(options.out_manifest, 'w') as f: + f.write(split_manifest) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + [options.main_manifest] + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/generate_v14_compatible_resources.py b/build/android/gyp/generate_v14_compatible_resources.py new file mode 100755 index 00000000000..9c8ff3b4e9d --- /dev/null +++ b/build/android/gyp/generate_v14_compatible_resources.py @@ -0,0 +1,319 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Convert Android xml resources to API 14 compatible. + +There are two reasons that we cannot just use API 17 attributes, +so we are generating another set of resources by this script. + +1. paddingStart attribute can cause a crash on Galaxy Tab 2. +2. There is a bug that paddingStart does not override paddingLeft on + JB-MR1. This is fixed on JB-MR2. b/8654490 + +Therefore, this resource generation script can be removed when +we drop the support for JB-MR1. + +Please refer to http://crbug.com/235118 for the details. +""" + +import optparse +import os +import re +import shutil +import sys +import xml.dom.minidom as minidom + +from util import build_utils + +# Note that we are assuming 'android:' is an alias of +# the namespace 'http://schemas.android.com/apk/res/android'. + +GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity') + +# Almost all the attributes that has "Start" or "End" in +# its name should be mapped. +ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft', + 'drawableStart' : 'drawableLeft', + 'layout_alignStart' : 'layout_alignLeft', + 'layout_marginStart' : 'layout_marginLeft', + 'layout_alignParentStart' : 'layout_alignParentLeft', + 'layout_toStartOf' : 'layout_toLeftOf', + 'paddingEnd' : 'paddingRight', + 'drawableEnd' : 'drawableRight', + 'layout_alignEnd' : 'layout_alignRight', + 'layout_marginEnd' : 'layout_marginRight', + 'layout_alignParentEnd' : 'layout_alignParentRight', + 'layout_toEndOf' : 'layout_toRightOf'} + +ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v + in ATTRIBUTES_TO_MAP.iteritems()) + +ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v + in ATTRIBUTES_TO_MAP.iteritems()) + + +def IterateXmlElements(node): + """minidom helper function that iterates all the element nodes. + Iteration order is pre-order depth-first.""" + if node.nodeType == node.ELEMENT_NODE: + yield node + for child_node in node.childNodes: + for child_node_element in IterateXmlElements(child_node): + yield child_node_element + + +def ParseAndReportErrors(filename): + try: + return minidom.parse(filename) + except Exception: + import traceback + traceback.print_exc() + sys.stderr.write('Failed to parse XML file: %s\n' % filename) + sys.exit(1) + + +def AssertNotDeprecatedAttribute(name, value, filename): + """Raises an exception if the given attribute is deprecated.""" + msg = None + if name in ATTRIBUTES_TO_MAP_REVERSED: + msg = '{0} should use {1} instead of {2}'.format(filename, + ATTRIBUTES_TO_MAP_REVERSED[name], name) + elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value): + msg = '{0} should use start/end instead of left/right for {1}'.format( + filename, name) + + if msg: + msg += ('\nFor background, see: http://android-developers.blogspot.com/' + '2013/03/native-rtl-support-in-android-42.html\n' + 'If you have a legitimate need for this attribute, discuss with ' + 'kkimlabs@chromium.org or newt@chromium.org') + raise Exception(msg) + + +def WriteDomToFile(dom, filename): + """Write the given dom to filename.""" + build_utils.MakeDirectory(os.path.dirname(filename)) + with open(filename, 'w') as f: + dom.writexml(f, '', ' ', '\n', encoding='utf-8') + + +def HasStyleResource(dom): + """Return True if the dom is a style resource, False otherwise.""" + root_node = IterateXmlElements(dom).next() + return bool(root_node.nodeName == 'resources' and + list(root_node.getElementsByTagName('style'))) + + +def ErrorIfStyleResourceExistsInDir(input_dir): + """If a style resource is in input_dir, raises an exception.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + dom = ParseAndReportErrors(input_filename) + if HasStyleResource(dom): + raise Exception('error: style file ' + input_filename + + ' should be under ' + input_dir + + '-v17 directory. Please refer to ' + 'http://crbug.com/243952 for the details.') + + +def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True): + """Convert layout resource to API 14 compatible layout resource. + + Args: + dom: Parsed minidom object to be modified. + filename: Filename that the DOM was parsed from. + assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will + cause an exception to be thrown. + + Returns: + True if dom is modified, False otherwise. + """ + is_modified = False + + # Iterate all the elements' attributes to find attributes to convert. + for element in IterateXmlElements(dom): + for name, value in list(element.attributes.items()): + # Convert any API 17 Start/End attributes to Left/Right attributes. + # For example, from paddingStart="10dp" to paddingLeft="10dp" + # Note: gravity attributes are not necessary to convert because + # start/end values are backward-compatible. Explained at + # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom + if name in ATTRIBUTES_TO_MAP: + element.setAttribute(ATTRIBUTES_TO_MAP[name], value) + del element.attributes[name] + is_modified = True + elif assert_not_deprecated: + AssertNotDeprecatedAttribute(name, value, filename) + + return is_modified + + +def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True): + """Convert style resource to API 14 compatible style resource. + + Args: + dom: Parsed minidom object to be modified. + filename: Filename that the DOM was parsed from. + assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will + cause an exception to be thrown. + + Returns: + True if dom is modified, False otherwise. + """ + is_modified = False + + for style_element in dom.getElementsByTagName('style'): + for item_element in style_element.getElementsByTagName('item'): + name = item_element.attributes['name'].value + value = item_element.childNodes[0].nodeValue + if name in ATTRIBUTES_TO_MAP: + item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name] + is_modified = True + elif assert_not_deprecated: + AssertNotDeprecatedAttribute(name, value, filename) + + return is_modified + + +def GenerateV14LayoutResource(input_filename, output_v14_filename, + output_v17_filename): + """Convert API 17 layout resource to API 14 compatible layout resource. + + It's mostly a simple replacement, s/Start/Left s/End/Right, + on the attribute names. + If the generated resource is identical to the original resource, + don't do anything. If not, write the generated resource to + output_v14_filename, and copy the original resource to output_v17_filename. + """ + dom = ParseAndReportErrors(input_filename) + is_modified = GenerateV14LayoutResourceDom(dom, input_filename) + + if is_modified: + # Write the generated resource. + WriteDomToFile(dom, output_v14_filename) + + # Copy the original resource. + build_utils.MakeDirectory(os.path.dirname(output_v17_filename)) + shutil.copy2(input_filename, output_v17_filename) + + +def GenerateV14StyleResource(input_filename, output_v14_filename): + """Convert API 17 style resources to API 14 compatible style resource. + + Write the generated style resource to output_v14_filename. + It's mostly a simple replacement, s/Start/Left s/End/Right, + on the attribute names. + """ + dom = ParseAndReportErrors(input_filename) + GenerateV14StyleResourceDom(dom, input_filename) + + # Write the generated resource. + WriteDomToFile(dom, output_v14_filename) + + +def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir): + """Convert layout resources to API 14 compatible resources in input_dir.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + rel_filename = os.path.relpath(input_filename, input_dir) + output_v14_filename = os.path.join(output_v14_dir, rel_filename) + output_v17_filename = os.path.join(output_v17_dir, rel_filename) + GenerateV14LayoutResource(input_filename, output_v14_filename, + output_v17_filename) + + +def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir): + """Convert style resources to API 14 compatible resources in input_dir.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + rel_filename = os.path.relpath(input_filename, input_dir) + output_v14_filename = os.path.join(output_v14_dir, rel_filename) + GenerateV14StyleResource(input_filename, output_v14_filename) + + +def ParseArgs(): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + parser.add_option('--res-dir', + help='directory containing resources ' + 'used to generate v14 compatible resources') + parser.add_option('--res-v14-compatibility-dir', + help='output directory into which ' + 'v14 compatible resources will be generated') + parser.add_option('--stamp', help='File to touch on success') + + options, args = parser.parse_args() + + if args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ('res_dir', 'res_v14_compatibility_dir') + build_utils.CheckOptions(options, parser, required=required_options) + return options + +def GenerateV14Resources(res_dir, res_v14_dir): + for name in os.listdir(res_dir): + if not os.path.isdir(os.path.join(res_dir, name)): + continue + + dir_pieces = name.split('-') + resource_type = dir_pieces[0] + qualifiers = dir_pieces[1:] + + api_level_qualifier_index = -1 + api_level_qualifier = '' + for index, qualifier in enumerate(qualifiers): + if re.match('v[0-9]+$', qualifier): + api_level_qualifier_index = index + api_level_qualifier = qualifier + break + + # Android pre-v17 API doesn't support RTL. Skip. + if 'ldrtl' in qualifiers: + continue + + input_dir = os.path.abspath(os.path.join(res_dir, name)) + + # We also need to copy the original v17 resource to *-v17 directory + # because the generated v14 resource will hide the original resource. + output_v14_dir = os.path.join(res_v14_dir, name) + output_v17_dir = os.path.join(res_v14_dir, name + '-v17') + + # We only convert layout resources under layout*/, xml*/, + # and style resources under values*/. + if resource_type in ('layout', 'xml'): + if not api_level_qualifier: + GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, + output_v17_dir) + elif resource_type == 'values': + if api_level_qualifier == 'v17': + output_qualifiers = qualifiers[:] + del output_qualifiers[api_level_qualifier_index] + output_v14_dir = os.path.join(res_v14_dir, + '-'.join([resource_type] + + output_qualifiers)) + GenerateV14StyleResourcesInDir(input_dir, output_v14_dir) + elif not api_level_qualifier: + ErrorIfStyleResourceExistsInDir(input_dir) + +def main(): + options = ParseArgs() + + res_v14_dir = options.res_v14_compatibility_dir + + build_utils.DeleteDirectory(res_v14_dir) + build_utils.MakeDirectory(res_v14_dir) + + GenerateV14Resources(options.res_dir, res_v14_dir) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/get_device_configuration.py b/build/android/gyp/get_device_configuration.py new file mode 100755 index 00000000000..390eb2ffcb8 --- /dev/null +++ b/build/android/gyp/get_device_configuration.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Gets and writes the configurations of the attached devices. + +This configuration is used by later build steps to determine which devices to +install to and what needs to be installed to those devices. +""" + +import optparse +import sys + +from util import build_utils +from util import build_device + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('--stamp', action='store') + parser.add_option('--output', action='store') + options, _ = parser.parse_args(argv) + + devices = build_device.GetAttachedDevices() + + device_configurations = [] + for d in devices: + configuration, is_online, has_root = ( + build_device.GetConfigurationForDevice(d)) + + if not is_online: + build_utils.PrintBigWarning( + '%s is not online. Skipping managed install for this device. ' + 'Try rebooting the device to fix this warning.' % d) + continue + + if not has_root: + build_utils.PrintBigWarning( + '"adb root" failed on device: %s\n' + 'Skipping managed install for this device.' + % configuration['description']) + continue + + device_configurations.append(configuration) + + if len(device_configurations) == 0: + build_utils.PrintBigWarning( + 'No valid devices attached. Skipping managed install steps.') + elif len(devices) > 1: + # Note that this checks len(devices) and not len(device_configurations). + # This way, any time there are multiple devices attached it is + # explicitly stated which device we will install things to even if all but + # one device were rejected for other reasons (e.g. two devices attached, + # one w/o root). + build_utils.PrintBigWarning( + 'Multiple devices attached. ' + 'Installing to the preferred device: ' + '%(id)s (%(description)s)' % (device_configurations[0])) + + + build_device.WriteConfigurations(device_configurations, options.output) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/insert_chromium_version.py b/build/android/gyp/insert_chromium_version.py new file mode 100755 index 00000000000..171f9d41b8d --- /dev/null +++ b/build/android/gyp/insert_chromium_version.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Insert a version string into a library as a section '.chromium.version'. +""" + +import optparse +import os +import sys +import tempfile + +from util import build_utils + +def InsertChromiumVersion(android_objcopy, + library_path, + version_string): + # Remove existing .chromium.version section from .so + objcopy_command = [android_objcopy, + '--remove-section=.chromium.version', + library_path] + build_utils.CheckOutput(objcopy_command) + + # Add a .chromium.version section. + with tempfile.NamedTemporaryFile() as stream: + stream.write(version_string) + stream.flush() + objcopy_command = [android_objcopy, + '--add-section', '.chromium.version=%s' % stream.name, + library_path] + build_utils.CheckOutput(objcopy_command) + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + + parser.add_option('--android-objcopy', + help='Path to the toolchain\'s objcopy binary') + parser.add_option('--stripped-libraries-dir', + help='Directory of native libraries') + parser.add_option('--libraries', + help='List of libraries') + parser.add_option('--version-string', + help='Version string to be inserted') + parser.add_option('--stamp', help='Path to touch on success') + + options, _ = parser.parse_args(args) + libraries = build_utils.ParseGypList(options.libraries) + + for library in libraries: + library_path = os.path.join(options.stripped_libraries_dir, library) + + InsertChromiumVersion(options.android_objcopy, + library_path, + options.version_string) + + if options.stamp: + build_utils.Touch(options.stamp) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/jar.py b/build/android/gyp/jar.py new file mode 100755 index 00000000000..48abf5edb7d --- /dev/null +++ b/build/android/gyp/jar.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import optparse +import os +import sys + +from util import build_utils +from util import md5_check + + +def Jar(class_files, classes_dir, jar_path, manifest_file=None): + jar_path = os.path.abspath(jar_path) + + # The paths of the files in the jar will be the same as they are passed in to + # the command. Because of this, the command should be run in + # options.classes_dir so the .class file paths in the jar are correct. + jar_cwd = classes_dir + class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files] + jar_cmd = ['jar', 'cf0', jar_path] + if manifest_file: + jar_cmd[1] += 'm' + jar_cmd.append(os.path.abspath(manifest_file)) + jar_cmd.extend(class_files_rel) + + with build_utils.TempDir() as temp_dir: + empty_file = os.path.join(temp_dir, '.empty') + build_utils.Touch(empty_file) + jar_cmd.append(os.path.relpath(empty_file, jar_cwd)) + record_path = '%s.md5.stamp' % jar_path + md5_check.CallAndRecordIfStale( + lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd), + record_path=record_path, + input_paths=class_files, + input_strings=jar_cmd, + force=not os.path.exists(jar_path), + ) + + build_utils.Touch(jar_path, fail_if_missing=True) + + +def JarDirectory(classes_dir, excluded_classes, jar_path, manifest_file=None): + class_files = build_utils.FindInDirectory(classes_dir, '*.class') + for exclude in excluded_classes: + class_files = filter( + lambda f: not fnmatch.fnmatch(f, exclude), class_files) + + Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file) + + +def main(): + parser = optparse.OptionParser() + parser.add_option('--classes-dir', help='Directory containing .class files.') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option('--excluded-classes', + help='List of .class file patterns to exclude from the jar.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args() + + if options.excluded_classes: + excluded_classes = build_utils.ParseGypList(options.excluded_classes) + else: + excluded_classes = [] + JarDirectory(options.classes_dir, + excluded_classes, + options.jar_path) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/gyp/jar_toc.py b/build/android/gyp/jar_toc.py new file mode 100755 index 00000000000..00d97d211f3 --- /dev/null +++ b/build/android/gyp/jar_toc.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a TOC file from a Java jar. + +The TOC file contains the non-package API of the jar. This includes all +public/protected/package classes/functions/members and the values of static +final variables (members with package access are kept because in some cases we +have multiple libraries with the same package, particularly test+non-test). Some +other information (major/minor javac version) is also included. + +This TOC file then can be used to determine if a dependent library should be +rebuilt when this jar changes. I.e. any change to the jar that would require a +rebuild, will have a corresponding change in the TOC file. +""" + +import optparse +import os +import re +import sys +import zipfile + +from util import build_utils +from util import md5_check + + +def GetClassesInZipFile(zip_file): + classes = [] + files = zip_file.namelist() + for f in files: + if f.endswith('.class'): + # f is of the form org/chromium/base/Class$Inner.class + classes.append(f.replace('/', '.')[:-6]) + return classes + + +def CallJavap(classpath, classes): + javap_cmd = [ + 'javap', + '-package', # Show public/protected/package. + # -verbose is required to get constant values (which can be inlined in + # dependents). + '-verbose', + '-J-XX:NewSize=4m', + '-classpath', classpath + ] + classes + return build_utils.CheckOutput(javap_cmd) + + +def ExtractToc(disassembled_classes): + # javap output is structured by indent (2-space) levels. + good_patterns = [ + '^[^ ]', # This includes all class/function/member signatures. + '^ SourceFile:', + '^ minor version:', + '^ major version:', + '^ Constant value:', + ] + bad_patterns = [ + '^const #', # Matches the constant pool (i.e. literals used in the class). + ] + + def JavapFilter(line): + return (re.match('|'.join(good_patterns), line) and + not re.match('|'.join(bad_patterns), line)) + toc = filter(JavapFilter, disassembled_classes.split('\n')) + + return '\n'.join(toc) + + +def UpdateToc(jar_path, toc_path): + classes = GetClassesInZipFile(zipfile.ZipFile(jar_path)) + toc = [] + + limit = 1000 # Split into multiple calls to stay under command size limit + for i in xrange(0, len(classes), limit): + javap_output = CallJavap(classpath=jar_path, classes=classes[i:i+limit]) + toc.append(ExtractToc(javap_output)) + + with open(toc_path, 'w') as tocfile: + tocfile.write(''.join(toc)) + + +def DoJarToc(options): + jar_path = options.jar_path + toc_path = options.toc_path + record_path = '%s.md5.stamp' % toc_path + md5_check.CallAndRecordIfStale( + lambda: UpdateToc(jar_path, toc_path), + record_path=record_path, + input_paths=[jar_path], + force=not os.path.exists(toc_path), + ) + build_utils.Touch(toc_path, fail_if_missing=True) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--jar-path', help='Input .jar path.') + parser.add_option('--toc-path', help='Output .jar.TOC path.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args() + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + DoJarToc(options) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py new file mode 100755 index 00000000000..c2f1764b1be --- /dev/null +++ b/build/android/gyp/java_cpp_enum.py @@ -0,0 +1,340 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import re +import optparse +import os +from string import Template +import sys + +from util import build_utils + +# List of C++ types that are compatible with the Java code generated by this +# script. +# +# This script can parse .idl files however, at present it ignores special +# rules such as [cpp_enum_prefix_override="ax_attr"]. +ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char', + 'short', 'unsigned short', + 'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t'] + +class EnumDefinition(object): + def __init__(self, original_enum_name=None, class_name_override=None, + enum_package=None, entries=None, fixed_type=None): + self.original_enum_name = original_enum_name + self.class_name_override = class_name_override + self.enum_package = enum_package + self.entries = collections.OrderedDict(entries or []) + self.prefix_to_strip = None + self.fixed_type = fixed_type + + def AppendEntry(self, key, value): + if key in self.entries: + raise Exception('Multiple definitions of key %s found.' % key) + self.entries[key] = value + + @property + def class_name(self): + return self.class_name_override or self.original_enum_name + + def Finalize(self): + self._Validate() + self._AssignEntryIndices() + self._StripPrefix() + + def _Validate(self): + assert self.class_name + assert self.enum_package + assert self.entries + if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST: + raise Exception('Fixed type %s for enum %s not whitelisted.' % + (self.fixed_type, self.class_name)) + + def _AssignEntryIndices(self): + # Enums, if given no value, are given the value of the previous enum + 1. + if not all(self.entries.values()): + prev_enum_value = -1 + for key, value in self.entries.iteritems(): + if not value: + self.entries[key] = prev_enum_value + 1 + elif value in self.entries: + self.entries[key] = self.entries[value] + else: + try: + self.entries[key] = int(value) + except ValueError: + raise Exception('Could not interpret integer from enum value "%s" ' + 'for key %s.' % (value, key)) + prev_enum_value = self.entries[key] + + + def _StripPrefix(self): + prefix_to_strip = self.prefix_to_strip + if not prefix_to_strip: + prefix_to_strip = self.original_enum_name + prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper() + prefix_to_strip += '_' + if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]): + prefix_to_strip = '' + + entries = collections.OrderedDict() + for (k, v) in self.entries.iteritems(): + stripped_key = k.replace(prefix_to_strip, '', 1) + if isinstance(v, basestring): + stripped_value = v.replace(prefix_to_strip, '', 1) + else: + stripped_value = v + entries[stripped_key] = stripped_value + + self.entries = entries + +class DirectiveSet(object): + class_name_override_key = 'CLASS_NAME_OVERRIDE' + enum_package_key = 'ENUM_PACKAGE' + prefix_to_strip_key = 'PREFIX_TO_STRIP' + + known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key] + + def __init__(self): + self._directives = {} + + def Update(self, key, value): + if key not in DirectiveSet.known_keys: + raise Exception("Unknown directive: " + key) + self._directives[key] = value + + @property + def empty(self): + return len(self._directives) == 0 + + def UpdateDefinition(self, definition): + definition.class_name_override = self._directives.get( + DirectiveSet.class_name_override_key, '') + definition.enum_package = self._directives.get( + DirectiveSet.enum_package_key) + definition.prefix_to_strip = self._directives.get( + DirectiveSet.prefix_to_strip_key) + + +class HeaderParser(object): + single_line_comment_re = re.compile(r'\s*//') + multi_line_comment_start_re = re.compile(r'\s*/\*') + enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?') + enum_end_re = re.compile(r'^\s*}\s*;\.*$') + generator_directive_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$') + multi_line_generator_directive_start_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$') + multi_line_directive_continuation_re = re.compile( + r'^\s*//\s+([\.\w]+)$') + multi_line_directive_end_re = re.compile( + r'^\s*//\s+([\.\w]*)\)$') + + optional_class_or_struct_re = r'(class|struct)?' + enum_name_re = r'(\w+)' + optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?' + enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' + + optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' + + optional_fixed_type_re + '\s*{\s*$') + + def __init__(self, lines, path=None): + self._lines = lines + self._path = path + self._enum_definitions = [] + self._in_enum = False + self._current_definition = None + self._generator_directives = DirectiveSet() + self._multi_line_generator_directive = None + + def _ApplyGeneratorDirectives(self): + self._generator_directives.UpdateDefinition(self._current_definition) + self._generator_directives = DirectiveSet() + + def ParseDefinitions(self): + for line in self._lines: + self._ParseLine(line) + return self._enum_definitions + + def _ParseLine(self, line): + if self._multi_line_generator_directive: + self._ParseMultiLineDirectiveLine(line) + elif not self._in_enum: + self._ParseRegularLine(line) + else: + self._ParseEnumLine(line) + + def _ParseEnumLine(self, line): + if HeaderParser.single_line_comment_re.match(line): + return + if HeaderParser.multi_line_comment_start_re.match(line): + raise Exception('Multi-line comments in enums are not supported in ' + + self._path) + enum_end = HeaderParser.enum_end_re.match(line) + enum_entry = HeaderParser.enum_line_re.match(line) + if enum_end: + self._ApplyGeneratorDirectives() + self._current_definition.Finalize() + self._enum_definitions.append(self._current_definition) + self._in_enum = False + elif enum_entry: + enum_key = enum_entry.groups()[0] + enum_value = enum_entry.groups()[2] + self._current_definition.AppendEntry(enum_key, enum_value) + + def _ParseMultiLineDirectiveLine(self, line): + multi_line_directive_continuation = ( + HeaderParser.multi_line_directive_continuation_re.match(line)) + multi_line_directive_end = ( + HeaderParser.multi_line_directive_end_re.match(line)) + + if multi_line_directive_continuation: + value_cont = multi_line_directive_continuation.groups()[0] + self._multi_line_generator_directive[1].append(value_cont) + elif multi_line_directive_end: + directive_name = self._multi_line_generator_directive[0] + directive_value = "".join(self._multi_line_generator_directive[1]) + directive_value += multi_line_directive_end.groups()[0] + self._multi_line_generator_directive = None + self._generator_directives.Update(directive_name, directive_value) + else: + raise Exception('Malformed multi-line directive declaration in ' + + self._path) + + def _ParseRegularLine(self, line): + enum_start = HeaderParser.enum_start_re.match(line) + generator_directive = HeaderParser.generator_directive_re.match(line) + multi_line_generator_directive_start = ( + HeaderParser.multi_line_generator_directive_start_re.match(line)) + + if generator_directive: + directive_name = generator_directive.groups()[0] + directive_value = generator_directive.groups()[1] + self._generator_directives.Update(directive_name, directive_value) + elif multi_line_generator_directive_start: + directive_name = multi_line_generator_directive_start.groups()[0] + directive_value = multi_line_generator_directive_start.groups()[1] + self._multi_line_generator_directive = (directive_name, [directive_value]) + elif enum_start: + if self._generator_directives.empty: + return + self._current_definition = EnumDefinition( + original_enum_name=enum_start.groups()[1], + fixed_type=enum_start.groups()[3]) + self._in_enum = True + +def GetScriptName(): + script_components = os.path.abspath(sys.argv[0]).split(os.path.sep) + build_index = script_components.index('build') + return os.sep.join(script_components[build_index:]) + + +def DoGenerate(output_dir, source_paths, print_output_only=False): + output_paths = [] + for source_path in source_paths: + enum_definitions = DoParseHeaderFile(source_path) + if not enum_definitions: + raise Exception('No enums found in %s\n' + 'Did you forget prefixing enums with ' + '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' % + source_path) + for enum_definition in enum_definitions: + package_path = enum_definition.enum_package.replace('.', os.path.sep) + file_name = enum_definition.class_name + '.java' + output_path = os.path.join(output_dir, package_path, file_name) + output_paths.append(output_path) + if not print_output_only: + build_utils.MakeDirectory(os.path.dirname(output_path)) + DoWriteOutput(source_path, output_path, enum_definition) + return output_paths + + +def DoParseHeaderFile(path): + with open(path) as f: + return HeaderParser(f.readlines(), path).ParseDefinitions() + + +def GenerateOutput(source_path, enum_definition): + template = Template(""" +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +public class ${CLASS_NAME} { +${ENUM_ENTRIES} +} +""") + + enum_template = Template(' public static final int ${NAME} = ${VALUE};') + enum_entries_string = [] + for enum_name, enum_value in enum_definition.entries.iteritems(): + values = { + 'NAME': enum_name, + 'VALUE': enum_value, + } + enum_entries_string.append(enum_template.substitute(values)) + enum_entries_string = '\n'.join(enum_entries_string) + + values = { + 'CLASS_NAME': enum_definition.class_name, + 'ENUM_ENTRIES': enum_entries_string, + 'PACKAGE': enum_definition.enum_package, + 'SCRIPT_NAME': GetScriptName(), + 'SOURCE_PATH': source_path, + } + return template.substitute(values) + + +def DoWriteOutput(source_path, output_path, enum_definition): + with open(output_path, 'w') as out_file: + out_file.write(GenerateOutput(source_path, enum_definition)) + +def AssertFilesList(output_paths, assert_files_list): + actual = set(output_paths) + expected = set(assert_files_list) + if not actual == expected: + need_to_add = list(actual - expected) + need_to_remove = list(expected - actual) + raise Exception('Output files list does not match expectations. Please ' + 'add %s and remove %s.' % (need_to_add, need_to_remove)) + +def DoMain(argv): + usage = 'usage: %prog [options] output_dir input_file(s)...' + parser = optparse.OptionParser(usage=usage) + + parser.add_option('--assert_file', action="append", default=[], + dest="assert_files_list", help='Assert that the given ' + 'file is an output. There can be multiple occurrences of ' + 'this flag.') + parser.add_option('--print_output_only', help='Only print output paths.', + action='store_true') + parser.add_option('--verbose', help='Print more information.', + action='store_true') + + options, args = parser.parse_args(argv) + if len(args) < 2: + parser.error('Need to specify output directory and at least one input file') + output_paths = DoGenerate(args[0], args[1:], + print_output_only=options.print_output_only) + + if options.assert_files_list: + AssertFilesList(output_paths, options.assert_files_list) + + if options.verbose: + print 'Output paths:' + print '\n'.join(output_paths) + + return ' '.join(output_paths) + +if __name__ == '__main__': + DoMain(sys.argv[1:]) diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py new file mode 100755 index 00000000000..44f9766c82c --- /dev/null +++ b/build/android/gyp/java_cpp_enum_tests.py @@ -0,0 +1,436 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for enum_preprocess.py. + +This test suite containss various tests for the C++ -> Java enum generator. +""" + +import collections +import optparse +import os +import sys +import unittest + +import java_cpp_enum +from java_cpp_enum import EnumDefinition, GenerateOutput, GetScriptName +from java_cpp_enum import HeaderParser + +sys.path.append(os.path.join(os.path.dirname(__file__), "gyp")) +from util import build_utils + +class TestPreprocess(unittest.TestCase): + def testOutput(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='some.package', + entries=[('E1', 1), ('E2', '2 << 2')]) + output = GenerateOutput('path/to/file', definition) + expected = """ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// path/to/file + +package some.package; + +public class ClassName { + public static final int E1 = 1; + public static final int E2 = 2 << 2; +} +""" + self.assertEqual(expected % GetScriptName(), output) + + def testParseSimpleEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO, + VALUE_ONE, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0), + ('VALUE_ONE', 1)]), + definition.entries) + + def testParseBitShifts(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO = 1 << 0, + VALUE_ONE = 1 << 1, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'), + ('VALUE_ONE', '1 << 1')]), + definition.entries) + + def testParseClassNameOverride(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + FOO + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride + enum PrefixTest { + PREFIX_TEST_A, + PREFIX_TEST_B, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('OverrideName', definition.class_name) + + definition = definitions[1] + self.assertEqual('OtherOverride', definition.class_name) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseTwoEnums(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + ENUM_ONE_A = 1, + // Comment there + ENUM_ONE_B = A, + }; + + enum EnumIgnore { + C, D, E + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { + P_A, + P_B + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumOne', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', 'A')]), + definition.entries) + + definition = definitions[1] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseThrowsOnUnknownDirective(self): + test_data = """ + // GENERATED_JAVA_UNKNOWN: Value + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseReturnsEmptyListWithoutDirectives(self): + test_data = """ + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + self.assertEqual([], HeaderParser(test_data).ParseDefinitions()) + + def testParseEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseEnumStruct(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum struct Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Foo : int { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('int', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: unsigned short { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('unsigned short', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseUnknownFixedTypeRaises(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: foo_type { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseSimpleMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: (te + // st.name + // space) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + + def testParseMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: ( + // Ba + // r + // ) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMalformedMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirectiveShort(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testEnumValueAssignmentNoneDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2)]), + definition.entries) + + def testEnumValueAssignmentAllDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', '1') + definition.AppendEntry('B', '2') + definition.AppendEntry('C', '3') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', '2'), + ('C', '3')]), + definition.entries) + + def testEnumValueAssignmentReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', None) + definition.AppendEntry('D', 'C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 1), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentSet(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', '2') + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 2), + ('C', 3)]), + definition.entries) + + def testEnumValueAssignmentSetReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', 'B') + definition.AppendEntry('D', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 0), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentRaises(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'foo') + definition.AppendEntry('C', None) + with self.assertRaises(Exception): + definition.Finalize() + + def testExplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('P_A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('P_C', None) + definition.AppendEntry('P_LAST', 'P_C') + definition.prefix_to_strip = 'P_' + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='p') + definition.AppendEntry('CLASS_NAME_A', None) + definition.AppendEntry('CLASS_NAME_B', None) + definition.AppendEntry('CLASS_NAME_C', None) + definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self): + definition = EnumDefinition(original_enum_name='Name', + enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('NAME_LAST', None) + definition.Finalize() + self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys()) + + def testGenerateThrowsOnEmptyInput(self): + with self.assertRaises(Exception): + original_do_parse = java_cpp_enum.DoParseHeaderFile + try: + java_cpp_enum.DoParseHeaderFile = lambda _: [] + java_cpp_enum.DoGenerate('dir', ['file']) + finally: + java_cpp_enum.DoParseHeaderFile = original_do_parse + +def main(argv): + parser = optparse.OptionParser() + parser.add_option("--stamp", help="File to touch on success.") + options, _ = parser.parse_args(argv) + + suite = unittest.TestLoader().loadTestsFromTestCase(TestPreprocess) + unittest.TextTestRunner(verbosity=0).run(suite) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py new file mode 100755 index 00000000000..dafe5dff0ab --- /dev/null +++ b/build/android/gyp/javac.py @@ -0,0 +1,321 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import optparse +import os +import shutil +import re +import sys +import textwrap + +from util import build_utils +from util import md5_check + +import jar + +sys.path.append(build_utils.COLORAMA_ROOT) +import colorama + + +def ColorJavacOutput(output): + fileline_prefix = r'(?P(?P[-.\w/\\]+.java):(?P[0-9]+):)' + warning_re = re.compile( + fileline_prefix + r'(?P warning: (?P.*))$') + error_re = re.compile( + fileline_prefix + r'(?P (?P.*))$') + marker_re = re.compile(r'\s*(?P\^)\s*$') + + warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM] + error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT] + marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] + + def Colorize(line, regex, color): + match = regex.match(line) + start = match.start(color[0]) + end = match.end(color[0]) + return (line[:start] + + color[1] + line[start:end] + + colorama.Fore.RESET + colorama.Style.RESET_ALL + + line[end:]) + + def ApplyColor(line): + if warning_re.match(line): + line = Colorize(line, warning_re, warning_color) + elif error_re.match(line): + line = Colorize(line, error_re, error_color) + elif marker_re.match(line): + line = Colorize(line, marker_re, marker_color) + return line + + return '\n'.join(map(ApplyColor, output.split('\n'))) + + +ERRORPRONE_OPTIONS = [ + '-Xepdisable:' + # Something in chrome_private_java makes this check crash. + 'com.google.errorprone.bugpatterns.ClassCanBeStatic,' + # These crash on lots of targets. + 'com.google.errorprone.bugpatterns.WrongParameterPackage,' + 'com.google.errorprone.bugpatterns.GuiceOverridesGuiceInjectableMethod,' + 'com.google.errorprone.bugpatterns.GuiceOverridesJavaxInjectableMethod,' + 'com.google.errorprone.bugpatterns.ElementsCountedInLoop' +] + +def DoJavac( + bootclasspath, classpath, classes_dir, chromium_code, + use_errorprone_path, java_files): + """Runs javac. + + Builds |java_files| with the provided |classpath| and puts the generated + .class files into |classes_dir|. If |chromium_code| is true, extra lint + checking will be enabled. + """ + + jar_inputs = [] + for path in classpath: + if os.path.exists(path + '.TOC'): + jar_inputs.append(path + '.TOC') + else: + jar_inputs.append(path) + + javac_args = [ + '-g', + # Chromium only allows UTF8 source files. Being explicit avoids + # javac pulling a default encoding from the user's environment. + '-encoding', 'UTF-8', + '-classpath', ':'.join(classpath), + '-d', classes_dir] + + if bootclasspath: + javac_args.extend([ + '-bootclasspath', ':'.join(bootclasspath), + '-source', '1.7', + '-target', '1.7', + ]) + + if chromium_code: + # TODO(aurimas): re-enable '-Xlint:deprecation' checks once they are fixed. + javac_args.extend(['-Xlint:unchecked']) + else: + # XDignore.symbol.file makes javac compile against rt.jar instead of + # ct.sym. This means that using a java internal package/class will not + # trigger a compile warning or error. + javac_args.extend(['-XDignore.symbol.file']) + + if use_errorprone_path: + javac_cmd = [use_errorprone_path] + ERRORPRONE_OPTIONS + else: + javac_cmd = ['javac'] + + javac_cmd = javac_cmd + javac_args + java_files + + def Compile(): + build_utils.CheckOutput( + javac_cmd, + print_stdout=chromium_code, + stderr_filter=ColorJavacOutput) + + record_path = os.path.join(classes_dir, 'javac.md5.stamp') + md5_check.CallAndRecordIfStale( + Compile, + record_path=record_path, + input_paths=java_files + jar_inputs, + input_strings=javac_cmd) + + +_MAX_MANIFEST_LINE_LEN = 72 + + +def CreateManifest(manifest_path, classpath, main_class=None, + manifest_entries=None): + """Creates a manifest file with the given parameters. + + This generates a manifest file that compiles with the spec found at + http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#JAR_Manifest + + Args: + manifest_path: The path to the manifest file that should be created. + classpath: The JAR files that should be listed on the manifest file's + classpath. + main_class: If present, the class containing the main() function. + manifest_entries: If present, a list of (key, value) pairs to add to + the manifest. + + """ + output = ['Manifest-Version: 1.0'] + if main_class: + output.append('Main-Class: %s' % main_class) + if manifest_entries: + for k, v in manifest_entries: + output.append('%s: %s' % (k, v)) + if classpath: + sanitized_paths = [] + for path in classpath: + sanitized_paths.append(os.path.basename(path.strip('"'))) + output.append('Class-Path: %s' % ' '.join(sanitized_paths)) + output.append('Created-By: ') + output.append('') + + wrapper = textwrap.TextWrapper(break_long_words=True, + drop_whitespace=False, + subsequent_indent=' ', + width=_MAX_MANIFEST_LINE_LEN - 2) + output = '\r\n'.join(w for l in output for w in wrapper.wrap(l)) + + with open(manifest_path, 'w') as f: + f.write(output) + + +def main(argv): + colorama.init() + + argv = build_utils.ExpandFileArgs(argv) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option( + '--src-gendirs', + help='Directories containing generated java files.') + parser.add_option( + '--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_option( + '--bootclasspath', + action='append', + default=[], + help='Boot classpath for javac. If this is specified multiple times, ' + 'they will all be appended to construct the classpath.') + parser.add_option( + '--classpath', + action='append', + help='Classpath for javac. If this is specified multiple times, they ' + 'will all be appended to construct the classpath.') + parser.add_option( + '--javac-includes', + help='A list of file patterns. If provided, only java files that match' + 'one of the patterns will be compiled.') + parser.add_option( + '--jar-excluded-classes', + default='', + help='List of .class file patterns to exclude from the jar.') + + parser.add_option( + '--chromium-code', + type='int', + help='Whether code being compiled should be built with stricter ' + 'warnings for chromium code.') + + parser.add_option( + '--use-errorprone-path', + help='Use the Errorprone compiler at this path.') + + parser.add_option( + '--classes-dir', + help='Directory for compiled .class files.') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option( + '--main-class', + help='The class containing the main method.') + parser.add_option( + '--manifest-entry', + action='append', + help='Key:value pairs to add to the .jar manifest.') + + parser.add_option('--stamp', help='Path to touch on success.') + + options, args = parser.parse_args(argv) + + if options.main_class and not options.jar_path: + parser.error('--main-class requires --jar-path') + + bootclasspath = [] + for arg in options.bootclasspath: + bootclasspath += build_utils.ParseGypList(arg) + + classpath = [] + for arg in options.classpath: + classpath += build_utils.ParseGypList(arg) + + java_srcjars = [] + for arg in options.java_srcjars: + java_srcjars += build_utils.ParseGypList(arg) + + java_files = args + if options.src_gendirs: + src_gendirs = build_utils.ParseGypList(options.src_gendirs) + java_files += build_utils.FindInDirectories(src_gendirs, '*.java') + + input_files = bootclasspath + classpath + java_srcjars + java_files + with build_utils.TempDir() as temp_dir: + classes_dir = os.path.join(temp_dir, 'classes') + os.makedirs(classes_dir) + if java_srcjars: + java_dir = os.path.join(temp_dir, 'java') + os.makedirs(java_dir) + for srcjar in java_srcjars: + build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java') + java_files += build_utils.FindInDirectory(java_dir, '*.java') + + if options.javac_includes: + javac_includes = build_utils.ParseGypList(options.javac_includes) + filtered_java_files = [] + for f in java_files: + for include in javac_includes: + if fnmatch.fnmatch(f, include): + filtered_java_files.append(f) + break + java_files = filtered_java_files + + if len(java_files) != 0: + DoJavac( + bootclasspath, + classpath, + classes_dir, + options.chromium_code, + options.use_errorprone_path, + java_files) + + if options.jar_path: + if options.main_class or options.manifest_entry: + if options.manifest_entry: + entries = map(lambda e: e.split(":"), options.manifest_entry) + else: + entries = [] + manifest_file = os.path.join(temp_dir, 'manifest') + CreateManifest(manifest_file, classpath, options.main_class, entries) + else: + manifest_file = None + jar.JarDirectory(classes_dir, + build_utils.ParseGypList(options.jar_excluded_classes), + options.jar_path, + manifest_file=manifest_file) + + if options.classes_dir: + # Delete the old classes directory. This ensures that all .class files in + # the output are actually from the input .java files. For example, if a + # .java file is deleted or an inner class is removed, the classes + # directory should not contain the corresponding old .class file after + # running this action. + build_utils.DeleteDirectory(options.classes_dir) + shutil.copytree(classes_dir, options.classes_dir) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + input_files + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + + diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py new file mode 100755 index 00000000000..e7c9a3436a4 --- /dev/null +++ b/build/android/gyp/jinja_template.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Renders one or more template files using the Jinja template engine.""" + +import codecs +import optparse +import os +import sys + +from util import build_utils + +# Import jinja2 from third_party/jinja2 +sys.path.append(os.path.join(os.path.dirname(__file__), '../../../third_party')) +import jinja2 # pylint: disable=F0401 + + +class RecordingFileSystemLoader(jinja2.FileSystemLoader): + '''A FileSystemLoader that stores a list of loaded templates.''' + def __init__(self, searchpath): + jinja2.FileSystemLoader.__init__(self, searchpath) + self.loaded_templates = set() + + def get_source(self, environment, template): + contents, filename, uptodate = jinja2.FileSystemLoader.get_source( + self, environment, template) + self.loaded_templates.add(os.path.relpath(filename)) + return contents, filename, uptodate + + def get_loaded_templates(self): + return list(self.loaded_templates) + + +def ProcessFile(env, input_filename, loader_base_dir, output_filename, + variables): + input_rel_path = os.path.relpath(input_filename, loader_base_dir) + template = env.get_template(input_rel_path) + output = template.render(variables) + with codecs.open(output_filename, 'w', 'utf-8') as output_file: + output_file.write(output) + + +def ProcessFiles(env, input_filenames, loader_base_dir, inputs_base_dir, + outputs_zip, variables): + with build_utils.TempDir() as temp_dir: + for input_filename in input_filenames: + relpath = os.path.relpath(os.path.abspath(input_filename), + os.path.abspath(inputs_base_dir)) + if relpath.startswith(os.pardir): + raise Exception('input file %s is not contained in inputs base dir %s' + % (input_filename, inputs_base_dir)) + + output_filename = os.path.join(temp_dir, relpath) + parent_dir = os.path.dirname(output_filename) + build_utils.MakeDirectory(parent_dir) + ProcessFile(env, input_filename, loader_base_dir, output_filename, + variables) + + build_utils.ZipDir(outputs_zip, temp_dir) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--inputs', help='The template files to process.') + parser.add_option('--output', help='The output file to generate. Valid ' + 'only if there is a single input.') + parser.add_option('--outputs-zip', help='A zip file containing the processed ' + 'templates. Required if there are multiple inputs.') + parser.add_option('--inputs-base-dir', help='A common ancestor directory of ' + 'the inputs. Each output\'s path in the output zip will ' + 'match the relative path from INPUTS_BASE_DIR to the ' + 'input. Required if --output-zip is given.') + parser.add_option('--loader-base-dir', help='Base path used by the template ' + 'loader. Must be a common ancestor directory of ' + 'the inputs. Defaults to CHROMIUM_SRC.', + default=build_utils.CHROMIUM_SRC) + parser.add_option('--variables', help='Variables to be made available in the ' + 'template processing environment, as a GYP list (e.g. ' + '--variables "channel=beta mstone=39")', default='') + options, args = parser.parse_args() + + build_utils.CheckOptions(options, parser, required=['inputs']) + inputs = build_utils.ParseGypList(options.inputs) + + if (options.output is None) == (options.outputs_zip is None): + parser.error('Exactly one of --output and --output-zip must be given') + if options.output and len(inputs) != 1: + parser.error('--output cannot be used with multiple inputs') + if options.outputs_zip and not options.inputs_base_dir: + parser.error('--inputs-base-dir must be given when --output-zip is used') + if args: + parser.error('No positional arguments should be given.') + + variables = {} + for v in build_utils.ParseGypList(options.variables): + if '=' not in v: + parser.error('--variables argument must contain "=": ' + v) + name, _, value = v.partition('=') + variables[name] = value + + loader = RecordingFileSystemLoader(options.loader_base_dir) + env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined, + line_comment_prefix='##') + if options.output: + ProcessFile(env, inputs[0], options.loader_base_dir, options.output, + variables) + else: + ProcessFiles(env, inputs, options.loader_base_dir, options.inputs_base_dir, + options.outputs_zip, variables) + + if options.depfile: + deps = loader.get_loaded_templates() + build_utils.GetPythonDependencies() + build_utils.WriteDepfile(options.depfile, deps) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py new file mode 100755 index 00000000000..6c4645abeab --- /dev/null +++ b/build/android/gyp/lint.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs Android's lint tool.""" + + +import optparse +import os +import sys +from xml.dom import minidom + +from util import build_utils + + +_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', '..', '..')) + + +def _RunLint(lint_path, config_path, processed_config_path, manifest_path, + result_path, product_dir, sources, jar_path, resource_dir=None): + + def _RelativizePath(path): + """Returns relative path to top-level src dir. + + Args: + path: A path relative to cwd. + """ + return os.path.relpath(os.path.abspath(path), _SRC_ROOT) + + def _ProcessConfigFile(): + if not build_utils.IsTimeStale(processed_config_path, [config_path]): + return + + with open(config_path, 'rb') as f: + content = f.read().replace( + 'PRODUCT_DIR', _RelativizePath(product_dir)) + + with open(processed_config_path, 'wb') as f: + f.write(content) + + def _ProcessResultFile(): + with open(result_path, 'rb') as f: + content = f.read().replace( + _RelativizePath(product_dir), 'PRODUCT_DIR') + + with open(result_path, 'wb') as f: + f.write(content) + + def _ParseAndShowResultFile(): + dom = minidom.parse(result_path) + issues = dom.getElementsByTagName('issue') + print >> sys.stderr + for issue in issues: + issue_id = issue.attributes['id'].value + message = issue.attributes['message'].value + location_elem = issue.getElementsByTagName('location')[0] + path = location_elem.attributes['file'].value + line = location_elem.getAttribute('line') + if line: + error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id) + else: + # Issues in class files don't have a line number. + error = '%s %s: %s [warning]' % (path, message, issue_id) + print >> sys.stderr, error + for attr in ['errorLine1', 'errorLine2']: + error_line = issue.getAttribute(attr) + if error_line: + print >> sys.stderr, error_line + return len(issues) + + with build_utils.TempDir() as temp_dir: + _ProcessConfigFile() + + cmd = [ + _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall', + '--config', _RelativizePath(processed_config_path), + '--classpath', _RelativizePath(jar_path), + '--xml', _RelativizePath(result_path), + ] + if resource_dir: + cmd.extend(['--resources', _RelativizePath(resource_dir)]) + + # There may be multiple source files with the same basename (but in + # different directories). It is difficult to determine what part of the path + # corresponds to the java package, and so instead just link the source files + # into temporary directories (creating a new one whenever there is a name + # conflict). + src_dirs = [] + def NewSourceDir(): + new_dir = os.path.join(temp_dir, str(len(src_dirs))) + os.mkdir(new_dir) + src_dirs.append(new_dir) + cmd.extend(['--sources', _RelativizePath(new_dir)]) + return new_dir + + def PathInDir(d, src): + return os.path.join(d, os.path.basename(src)) + + for src in sources: + src_dir = None + for d in src_dirs: + if not os.path.exists(PathInDir(d, src)): + src_dir = d + break + if not src_dir: + src_dir = NewSourceDir() + os.symlink(os.path.abspath(src), PathInDir(src_dir, src)) + + cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir))) + + if os.path.exists(result_path): + os.remove(result_path) + + try: + build_utils.CheckOutput(cmd, cwd=_SRC_ROOT) + except build_utils.CalledProcessError as e: + # There is a problem with lint usage + if not os.path.exists(result_path): + print 'Something is wrong:' + print e + return 1 + + # There are actual lint issues + else: + try: + num_issues = _ParseAndShowResultFile() + except Exception: + print 'Lint created unparseable xml file...' + print 'File contents:' + with open(result_path) as f: + print f.read() + return 1 + + _ProcessResultFile() + msg = ('\nLint found %d new issues.\n' + ' - For full explanation refer to %s\n' + ' - Wanna suppress these issues?\n' + ' 1. Read comment in %s\n' + ' 2. Run "python %s %s"\n' % + (num_issues, + _RelativizePath(result_path), + _RelativizePath(config_path), + _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android', + 'lint', 'suppress.py')), + _RelativizePath(result_path))) + print >> sys.stderr, msg + return 1 + + return 0 + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--lint-path', help='Path to lint executable.') + parser.add_option('--config-path', help='Path to lint suppressions file.') + parser.add_option('--processed-config-path', + help='Path to processed lint suppressions file.') + parser.add_option('--manifest-path', help='Path to AndroidManifest.xml') + parser.add_option('--result-path', help='Path to XML lint result file.') + parser.add_option('--product-dir', help='Path to product dir.') + parser.add_option('--src-dirs', help='Directories containing java files.') + parser.add_option('--java-files', help='Paths to java files.') + parser.add_option('--jar-path', help='Jar file containing class files.') + parser.add_option('--resource-dir', help='Path to resource dir.') + parser.add_option('--can-fail-build', action='store_true', + help='If set, script will exit with nonzero exit status' + ' if lint errors are present') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--enable', action='store_true', + help='Run lint instead of just touching stamp.') + + options, _ = parser.parse_args() + + build_utils.CheckOptions( + options, parser, required=['lint_path', 'config_path', + 'processed_config_path', 'manifest_path', + 'result_path', 'product_dir', + 'jar_path']) + + rc = 0 + + if options.enable: + sources = [] + if options.src_dirs: + src_dirs = build_utils.ParseGypList(options.src_dirs) + sources = build_utils.FindInDirectories(src_dirs, '*.java') + elif options.java_files: + sources = build_utils.ParseGypList(options.java_files) + else: + print 'One of --src-dirs or --java-files must be specified.' + return 1 + rc = _RunLint(options.lint_path, options.config_path, + options.processed_config_path, + options.manifest_path, options.result_path, + options.product_dir, sources, options.jar_path, + options.resource_dir) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + if options.stamp and not rc: + build_utils.Touch(options.stamp) + + return rc if options.can_fail_build else 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/locale_pak_resources.py b/build/android/gyp/locale_pak_resources.py new file mode 100755 index 00000000000..84c4a37ba92 --- /dev/null +++ b/build/android/gyp/locale_pak_resources.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a resources.zip for locale .pak files. + +Places the locale.pak files into appropriate resource configs +(e.g. en-GB.pak -> res/raw-en/en_gb.lpak). Also generates a locale_paks +TypedArray so that resource files can be enumerated at runtime. +""" + +import collections +import optparse +import os +import sys +import zipfile + +from util import build_utils + + +# This should stay in sync with: +# base/android/java/src/org/chromium/base/LocaleUtils.java +_CHROME_TO_ANDROID_LOCALE_MAP = { + 'he': 'iw', + 'id': 'in', + 'fil': 'tl', +} + + +def ToResourceFileName(name): + """Returns the resource-compatible file name for the given file.""" + # Resources file names must consist of [a-z0-9_.]. + # Changes extension to .lpak so that compression can be toggled separately for + # locale pak files vs other pak files. + return name.replace('-', '_').replace('.pak', '.lpak').lower() + + +def CreateLocalePaksXml(names): + """Creates the contents for the locale-paks.xml files.""" + VALUES_FILE_TEMPLATE = ''' + + %s + + +''' + VALUES_ITEM_TEMPLATE = ''' + @raw/%s''' + + res_names = (os.path.splitext(name)[0] for name in names) + items = ''.join((VALUES_ITEM_TEMPLATE % name for name in res_names)) + return VALUES_FILE_TEMPLATE % items + + +def ComputeMappings(sources): + """Computes the mappings of sources -> resources. + + Returns a tuple of: + - mappings: List of (src, dest) paths + - lang_to_locale_map: Map of language -> list of resource names + e.g. "en" -> ["en_gb.lpak"] + """ + lang_to_locale_map = collections.defaultdict(list) + mappings = [] + for src_path in sources: + basename = os.path.basename(src_path) + name = os.path.splitext(basename)[0] + res_name = ToResourceFileName(basename) + if name == 'en-US': + dest_dir = 'raw' + else: + # Chrome's uses different region mapping logic from Android, so include + # all regions for each language. + android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(name, name) + lang = android_locale[0:2] + dest_dir = 'raw-' + lang + lang_to_locale_map[lang].append(res_name) + mappings.append((src_path, os.path.join(dest_dir, res_name))) + return mappings, lang_to_locale_map + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--locale-paks', help='List of files for res/raw-LOCALE') + parser.add_option('--resources-zip', help='Path to output resources.zip') + parser.add_option('--print-languages', + action='store_true', + help='Print out the list of languages that cover the given locale paks ' + '(using Android\'s language codes)') + + options, _ = parser.parse_args() + build_utils.CheckOptions(options, parser, + required=['locale_paks']) + + sources = build_utils.ParseGypList(options.locale_paks) + + if options.depfile: + deps = sources + build_utils.GetPythonDependencies() + build_utils.WriteDepfile(options.depfile, deps) + + mappings, lang_to_locale_map = ComputeMappings(sources) + if options.print_languages: + print '\n'.join(sorted(lang_to_locale_map)) + + if options.resources_zip: + with zipfile.ZipFile(options.resources_zip, 'w', zipfile.ZIP_STORED) as out: + for mapping in mappings: + out.write(mapping[0], mapping[1]) + + # Create TypedArray resources so ResourceExtractor can enumerate files. + def WriteValuesFile(lang, names): + dest_dir = 'values' + if lang: + dest_dir += '-' + lang + # Always extract en-US.lpak since it's the fallback. + xml = CreateLocalePaksXml(names + ['en_us.lpak']) + out.writestr(os.path.join(dest_dir, 'locale-paks.xml'), xml) + + for lang, names in lang_to_locale_map.iteritems(): + WriteValuesFile(lang, names) + WriteValuesFile(None, []) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/gyp/pack_relocations.py b/build/android/gyp/pack_relocations.py new file mode 100755 index 00000000000..02e44999938 --- /dev/null +++ b/build/android/gyp/pack_relocations.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Pack relocations in a library (or copy unchanged). + +If --enable-packing and --configuration-name=='Release', invoke the +relocation_packer tool to pack the .rel.dyn or .rela.dyn section in the given +library files. This step is inserted after the libraries are stripped. + +If --enable-packing is zero, the script copies files verbatim, with no +attempt to pack relocations. + +Any library listed in --exclude-packing-list is also copied verbatim, +irrespective of any --enable-packing setting. Typically this would be +'libchromium_android_linker.so'. +""" + +import optparse +import os +import shlex +import shutil +import sys +import tempfile + +from util import build_utils + +def PackLibraryRelocations(android_pack_relocations, library_path, output_path): + shutil.copy(library_path, output_path) + pack_command = [android_pack_relocations, output_path] + build_utils.CheckOutput(pack_command) + + +def CopyLibraryUnchanged(library_path, output_path): + shutil.copy(library_path, output_path) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--clear-dir', action='store_true', + help='If set, the destination directory will be deleted ' + 'before copying files to it. This is highly recommended to ' + 'ensure that no stale files are left in the directory.') + + parser.add_option('--configuration-name', + default='Release', + help='Gyp configuration name (i.e. Debug, Release)') + parser.add_option('--enable-packing', + choices=['0', '1'], + help=('Pack relocations if 1 and configuration name is \'Release\',' + ' otherwise plain file copy')) + parser.add_option('--exclude-packing-list', + default='', + help='Names of any libraries explicitly not packed') + parser.add_option('--android-pack-relocations', + help='Path to the relocations packer binary') + parser.add_option('--stripped-libraries-dir', + help='Directory for stripped libraries') + parser.add_option('--packed-libraries-dir', + help='Directory for packed libraries') + parser.add_option('--libraries', action='append', + help='List of libraries') + parser.add_option('--stamp', help='Path to touch on success') + + options, _ = parser.parse_args(args) + enable_packing = (options.enable_packing == '1' and + options.configuration_name == 'Release') + exclude_packing_set = set(shlex.split(options.exclude_packing_list)) + + libraries = [] + for libs_arg in options.libraries: + libraries += build_utils.ParseGypList(libs_arg) + + if options.clear_dir: + build_utils.DeleteDirectory(options.packed_libraries_dir) + + build_utils.MakeDirectory(options.packed_libraries_dir) + + for library in libraries: + library_path = os.path.join(options.stripped_libraries_dir, library) + output_path = os.path.join( + options.packed_libraries_dir, os.path.basename(library)) + + if enable_packing and library not in exclude_packing_set: + PackLibraryRelocations(options.android_pack_relocations, + library_path, + output_path) + else: + CopyLibraryUnchanged(library_path, output_path) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + libraries + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/package_resources.py b/build/android/gyp/package_resources.py new file mode 100755 index 00000000000..d17d1fe2f95 --- /dev/null +++ b/build/android/gyp/package_resources.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=C0301 +"""Package resources into an apk. + +See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java +and +https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml +""" +# pylint: enable=C0301 + +import optparse +import os +import re +import shutil +import zipfile + +from util import build_utils + + +# List is generated from the chrome_apk.apk_intermediates.ap_ via: +# unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \ +# | uniq | grep -- -tvdpi- | cut -c10- +# and then manually sorted. +# Note that we can't just do a cross-product of dimentions because the filenames +# become too big and aapt fails to create the files. +# This leaves all default drawables (mdpi) in the main apk. Android gets upset +# though if any drawables are missing from the default drawables/ directory. +DENSITY_SPLITS = { + 'hdpi': ( + 'hdpi-v4', # Order matters for output file names. + 'ldrtl-hdpi-v4', + 'sw600dp-hdpi-v13', + 'ldrtl-hdpi-v17', + 'ldrtl-sw600dp-hdpi-v17', + 'hdpi-v21', + ), + 'xhdpi': ( + 'xhdpi-v4', + 'ldrtl-xhdpi-v4', + 'sw600dp-xhdpi-v13', + 'ldrtl-xhdpi-v17', + 'ldrtl-sw600dp-xhdpi-v17', + 'xhdpi-v21', + ), + 'xxhdpi': ( + 'xxhdpi-v4', + 'ldrtl-xxhdpi-v4', + 'sw600dp-xxhdpi-v13', + 'ldrtl-xxhdpi-v17', + 'ldrtl-sw600dp-xxhdpi-v17', + 'xxhdpi-v21', + ), + 'xxxhdpi': ( + 'xxxhdpi-v4', + 'ldrtl-xxxhdpi-v4', + 'sw600dp-xxxhdpi-v13', + 'ldrtl-xxxhdpi-v17', + 'ldrtl-sw600dp-xxxhdpi-v17', + 'xxxhdpi-v21', + ), + 'tvdpi': ( + 'tvdpi-v4', + 'sw600dp-tvdpi-v13', + 'ldrtl-sw600dp-tvdpi-v17', + ), +} + + +def ParseArgs(): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--android-sdk', help='path to the Android SDK folder') + parser.add_option('--aapt-path', + help='path to the Android aapt tool') + + parser.add_option('--configuration-name', + help='Gyp\'s configuration name (Debug or Release).') + + parser.add_option('--android-manifest', help='AndroidManifest.xml path') + parser.add_option('--version-code', help='Version code for apk.') + parser.add_option('--version-name', help='Version name for apk.') + parser.add_option( + '--shared-resources', + action='store_true', + help='Make a resource package that can be loaded by a different' + 'application at runtime to access the package\'s resources.') + parser.add_option('--resource-zips', + help='zip files containing resources to be packaged') + parser.add_option('--asset-dir', + help='directories containing assets to be packaged') + parser.add_option('--no-compress', help='disables compression for the ' + 'given comma separated list of extensions') + parser.add_option( + '--create-density-splits', + action='store_true', + help='Enables density splits') + parser.add_option('--language-splits', + help='GYP list of languages to create splits for') + + parser.add_option('--apk-path', + help='Path to output (partial) apk.') + + (options, args) = parser.parse_args() + + if args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ('android_sdk', 'aapt_path', 'configuration_name', + 'android_manifest', 'version_code', 'version_name', + 'apk_path') + + build_utils.CheckOptions(options, parser, required=required_options) + + return options + + +def MoveImagesToNonMdpiFolders(res_root): + """Move images from drawable-*-mdpi-* folders to drawable-* folders. + + Why? http://crbug.com/289843 + """ + for src_dir_name in os.listdir(res_root): + src_components = src_dir_name.split('-') + if src_components[0] != 'drawable' or 'mdpi' not in src_components: + continue + src_dir = os.path.join(res_root, src_dir_name) + if not os.path.isdir(src_dir): + continue + dst_components = [c for c in src_components if c != 'mdpi'] + assert dst_components != src_components + dst_dir_name = '-'.join(dst_components) + dst_dir = os.path.join(res_root, dst_dir_name) + build_utils.MakeDirectory(dst_dir) + for src_file_name in os.listdir(src_dir): + if not src_file_name.endswith('.png'): + continue + src_file = os.path.join(src_dir, src_file_name) + dst_file = os.path.join(dst_dir, src_file_name) + assert not os.path.lexists(dst_file) + shutil.move(src_file, dst_file) + + +def PackageArgsForExtractedZip(d): + """Returns the aapt args for an extracted resources zip. + + A resources zip either contains the resources for a single target or for + multiple targets. If it is multiple targets merged into one, the actual + resource directories will be contained in the subdirectories 0, 1, 2, ... + """ + subdirs = [os.path.join(d, s) for s in os.listdir(d)] + subdirs = [s for s in subdirs if os.path.isdir(s)] + is_multi = '0' in [os.path.basename(s) for s in subdirs] + if is_multi: + res_dirs = sorted(subdirs, key=lambda p : int(os.path.basename(p))) + else: + res_dirs = [d] + package_command = [] + for d in res_dirs: + MoveImagesToNonMdpiFolders(d) + package_command += ['-S', d] + return package_command + + +def RenameDensitySplits(apk_path): + """Renames all density splits to have shorter / predictable names.""" + for density, config in DENSITY_SPLITS.iteritems(): + src_path = '%s_%s' % (apk_path, '_'.join(config)) + dst_path = '%s_%s' % (apk_path, density) + if src_path != dst_path: + if os.path.exists(dst_path): + os.unlink(dst_path) + os.rename(src_path, dst_path) + + +def CheckForMissedConfigs(apk_path, check_density, languages): + """Raises an exception if apk_path contains any unexpected configs.""" + triggers = [] + if check_density: + triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS) + if languages: + triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages) + with zipfile.ZipFile(apk_path) as main_apk_zip: + for name in main_apk_zip.namelist(): + for trigger in triggers: + if trigger.search(name) and not 'mipmap-' in name: + raise Exception(('Found config in main apk that should have been ' + + 'put into a split: %s\nYou need to update ' + + 'package_resources.py to include this new ' + + 'config (trigger=%s)') % (name, trigger.pattern)) + + +def main(): + options = ParseArgs() + android_jar = os.path.join(options.android_sdk, 'android.jar') + aapt = options.aapt_path + + with build_utils.TempDir() as temp_dir: + package_command = [aapt, + 'package', + '--version-code', options.version_code, + '--version-name', options.version_name, + '-M', options.android_manifest, + '--no-crunch', + '-f', + '--auto-add-overlay', + '-I', android_jar, + '-F', options.apk_path, + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN, + ] + + if options.no_compress: + for ext in options.no_compress.split(','): + package_command += ['-0', ext] + if options.shared_resources: + package_command.append('--shared-lib') + + if options.asset_dir and os.path.exists(options.asset_dir): + package_command += ['-A', options.asset_dir] + + if options.resource_zips: + dep_zips = build_utils.ParseGypList(options.resource_zips) + for z in dep_zips: + subdir = os.path.join(temp_dir, os.path.basename(z)) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + os.path.basename(z)) + build_utils.ExtractAll(z, path=subdir) + package_command += PackageArgsForExtractedZip(subdir) + + if options.create_density_splits: + for config in DENSITY_SPLITS.itervalues(): + package_command.extend(('--split', ','.join(config))) + + language_splits = None + if options.language_splits: + language_splits = build_utils.ParseGypList(options.language_splits) + for lang in language_splits: + package_command.extend(('--split', lang)) + + if 'Debug' in options.configuration_name: + package_command += ['--debug-mode'] + + build_utils.CheckOutput( + package_command, print_stdout=False, print_stderr=False) + + if options.create_density_splits or language_splits: + CheckForMissedConfigs( + options.apk_path, options.create_density_splits, language_splits) + + if options.create_density_splits: + RenameDensitySplits(options.apk_path) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/process_resources.py b/build/android/gyp/process_resources.py new file mode 100755 index 00000000000..d227954ae96 --- /dev/null +++ b/build/android/gyp/process_resources.py @@ -0,0 +1,420 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Process Android resources to generate R.java, and prepare for packaging. + +This will crunch images and generate v14 compatible resources +(see generate_v14_compatible_resources.py). +""" + +import codecs +import optparse +import os +import re +import shutil +import sys +import zipfile + +import generate_v14_compatible_resources + +from util import build_utils + +# Import jinja2 from third_party/jinja2 +sys.path.insert(1, + os.path.join(os.path.dirname(__file__), '../../../third_party')) +from jinja2 import Template # pylint: disable=F0401 + + +def ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from optparse.OptionsParser.parse_args() + """ + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--android-sdk', help='path to the Android SDK folder') + parser.add_option('--aapt-path', + help='path to the Android aapt tool') + parser.add_option('--non-constant-id', action='store_true') + + parser.add_option('--android-manifest', help='AndroidManifest.xml path') + parser.add_option('--custom-package', help='Java package for R.java') + parser.add_option( + '--shared-resources', + action='store_true', + help='Make a resource package that can be loaded by a different' + 'application at runtime to access the package\'s resources.') + + parser.add_option('--resource-dirs', + help='Directories containing resources of this target.') + parser.add_option('--dependencies-res-zips', + help='Resources from dependents.') + + parser.add_option('--resource-zip-out', + help='Path for output zipped resources.') + + parser.add_option('--R-dir', + help='directory to hold generated R.java.') + parser.add_option('--srcjar-out', + help='Path to srcjar to contain generated R.java.') + parser.add_option('--r-text-out', + help='Path to store the R.txt file generated by appt.') + + parser.add_option('--proguard-file', + help='Path to proguard.txt generated file') + + parser.add_option( + '--v14-skip', + action="store_true", + help='Do not generate nor verify v14 resources') + + parser.add_option( + '--extra-res-packages', + help='Additional package names to generate R.java files for') + parser.add_option( + '--extra-r-text-files', + help='For each additional package, the R.txt file should contain a ' + 'list of resources to be included in the R.java file in the format ' + 'generated by aapt') + parser.add_option( + '--include-all-resources', + action='store_true', + help='Include every resource ID in every generated R.java file ' + '(ignoring R.txt).') + + parser.add_option( + '--all-resources-zip-out', + help='Path for output of all resources. This includes resources in ' + 'dependencies.') + + parser.add_option('--stamp', help='File to touch on success') + + (options, args) = parser.parse_args(args) + + if args: + parser.error('No positional arguments should be given.') + + # Check that required options have been provided. + required_options = ( + 'android_sdk', + 'aapt_path', + 'android_manifest', + 'dependencies_res_zips', + 'resource_dirs', + 'resource_zip_out', + ) + build_utils.CheckOptions(options, parser, required=required_options) + + if (options.R_dir is None) == (options.srcjar_out is None): + raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.') + + return options + + +def CreateExtraRJavaFiles( + r_dir, extra_packages, extra_r_text_files, shared_resources, include_all): + if include_all: + java_files = build_utils.FindInDirectory(r_dir, "R.java") + if len(java_files) != 1: + return + r_java_file = java_files[0] + r_java_contents = codecs.open(r_java_file, encoding='utf-8').read() + + for package in extra_packages: + package_r_java_dir = os.path.join(r_dir, *package.split('.')) + build_utils.MakeDirectory(package_r_java_dir) + package_r_java_path = os.path.join(package_r_java_dir, 'R.java') + new_r_java = re.sub(r'package [.\w]*;', u'package %s;' % package, + r_java_contents) + codecs.open(package_r_java_path, 'w', encoding='utf-8').write(new_r_java) + else: + if len(extra_packages) != len(extra_r_text_files): + raise Exception('Need one R.txt file per extra package') + + all_resources = {} + r_txt_file = os.path.join(r_dir, 'R.txt') + if not os.path.exists(r_txt_file): + return + with open(r_txt_file) as f: + for line in f: + m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line) + if not m: + raise Exception('Unexpected line in R.txt: %s' % line) + java_type, resource_type, name, value = m.groups() + all_resources[(resource_type, name)] = (java_type, value) + + for package, r_text_file in zip(extra_packages, extra_r_text_files): + if os.path.exists(r_text_file): + package_r_java_dir = os.path.join(r_dir, *package.split('.')) + build_utils.MakeDirectory(package_r_java_dir) + package_r_java_path = os.path.join(package_r_java_dir, 'R.java') + CreateExtraRJavaFile( + package, package_r_java_path, r_text_file, all_resources, + shared_resources) + + +def CreateExtraRJavaFile( + package, r_java_path, r_text_file, all_resources, shared_resources): + resources = {} + with open(r_text_file) as f: + for line in f: + m = re.match(r'int(?:\[\])? (\w+) (\w+) ', line) + if not m: + raise Exception('Unexpected line in R.txt: %s' % line) + resource_type, name = m.groups() + java_type, value = all_resources[(resource_type, name)] + if resource_type not in resources: + resources[resource_type] = [] + resources[resource_type].append((name, java_type, value)) + + template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; + +public final class R { + {% for resource_type in resources %} + public static final class {{ resource_type }} { + {% for name, java_type, value in resources[resource_type] %} + {% if shared_resources %} + public static {{ java_type }} {{ name }} = {{ value }}; + {% else %} + public static final {{ java_type }} {{ name }} = {{ value }}; + {% endif %} + {% endfor %} + } + {% endfor %} + {% if shared_resources %} + public static void onResourcesLoaded(int packageId) { + {% for resource_type in resources %} + {% for name, java_type, value in resources[resource_type] %} + {% if java_type == 'int[]' %} + for(int i = 0; i < {{ resource_type }}.{{ name }}.length; ++i) { + {{ resource_type }}.{{ name }}[i] = + ({{ resource_type }}.{{ name }}[i] & 0x00ffffff) + | (packageId << 24); + } + {% else %} + {{ resource_type }}.{{ name }} = + ({{ resource_type }}.{{ name }} & 0x00ffffff) + | (packageId << 24); + {% endif %} + {% endfor %} + {% endfor %} + } + {% endif %} +} +""", trim_blocks=True, lstrip_blocks=True) + + output = template.render(package=package, resources=resources, + shared_resources=shared_resources) + with open(r_java_path, 'w') as f: + f.write(output) + + +def CrunchDirectory(aapt, input_dir, output_dir): + """Crunches the images in input_dir and its subdirectories into output_dir. + + If an image is already optimized, crunching often increases image size. In + this case, the crunched image is overwritten with the original image. + """ + aapt_cmd = [aapt, + 'crunch', + '-C', output_dir, + '-S', input_dir, + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] + build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr, + fail_func=DidCrunchFail) + + # Check for images whose size increased during crunching and replace them + # with their originals (except for 9-patches, which must be crunched). + for dir_, _, files in os.walk(output_dir): + for crunched in files: + if crunched.endswith('.9.png'): + continue + if not crunched.endswith('.png'): + raise Exception('Unexpected file in crunched dir: ' + crunched) + crunched = os.path.join(dir_, crunched) + original = os.path.join(input_dir, os.path.relpath(crunched, output_dir)) + original_size = os.path.getsize(original) + crunched_size = os.path.getsize(crunched) + if original_size < crunched_size: + shutil.copyfile(original, crunched) + + +def FilterCrunchStderr(stderr): + """Filters out lines from aapt crunch's stderr that can safely be ignored.""" + filtered_lines = [] + for line in stderr.splitlines(True): + # Ignore this libpng warning, which is a known non-error condition. + # http://crbug.com/364355 + if ('libpng warning: iCCP: Not recognizing known sRGB profile that has ' + + 'been edited' in line): + continue + filtered_lines.append(line) + return ''.join(filtered_lines) + + +def DidCrunchFail(returncode, stderr): + """Determines whether aapt crunch failed from its return code and output. + + Because aapt's return code cannot be trusted, any output to stderr is + an indication that aapt has failed (http://crbug.com/314885). + """ + return returncode != 0 or stderr + + +def ZipResources(resource_dirs, zip_path): + # Python zipfile does not provide a way to replace a file (it just writes + # another file with the same name). So, first collect all the files to put + # in the zip (with proper overriding), and then zip them. + files_to_zip = dict() + for d in resource_dirs: + for root, _, files in os.walk(d): + for f in files: + archive_path = os.path.join(os.path.relpath(root, d), f) + path = os.path.join(root, f) + files_to_zip[archive_path] = path + with zipfile.ZipFile(zip_path, 'w') as outzip: + for archive_path, path in files_to_zip.iteritems(): + outzip.write(path, archive_path) + + +def CombineZips(zip_files, output_path): + # When packaging resources, if the top-level directories in the zip file are + # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a + # resources directory. While some resources just clobber others (image files, + # etc), other resources (particularly .xml files) need to be more + # intelligently merged. That merging is left up to aapt. + with zipfile.ZipFile(output_path, 'w') as outzip: + for i, z in enumerate(zip_files): + with zipfile.ZipFile(z, 'r') as inzip: + for name in inzip.namelist(): + new_name = '%d/%s' % (i, name) + outzip.writestr(new_name, inzip.read(name)) + + +def main(): + args = build_utils.ExpandFileArgs(sys.argv[1:]) + + options = ParseArgs(args) + android_jar = os.path.join(options.android_sdk, 'android.jar') + aapt = options.aapt_path + + input_files = [] + + with build_utils.TempDir() as temp_dir: + deps_dir = os.path.join(temp_dir, 'deps') + build_utils.MakeDirectory(deps_dir) + v14_dir = os.path.join(temp_dir, 'v14') + build_utils.MakeDirectory(v14_dir) + + gen_dir = os.path.join(temp_dir, 'gen') + build_utils.MakeDirectory(gen_dir) + + input_resource_dirs = build_utils.ParseGypList(options.resource_dirs) + + if not options.v14_skip: + for resource_dir in input_resource_dirs: + generate_v14_compatible_resources.GenerateV14Resources( + resource_dir, + v14_dir) + + dep_zips = build_utils.ParseGypList(options.dependencies_res_zips) + input_files += dep_zips + dep_subdirs = [] + for z in dep_zips: + subdir = os.path.join(deps_dir, os.path.basename(z)) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + os.path.basename(z)) + build_utils.ExtractAll(z, path=subdir) + dep_subdirs.append(subdir) + + # Generate R.java. This R.java contains non-final constants and is used only + # while compiling the library jar (e.g. chromium_content.jar). When building + # an apk, a new R.java file with the correct resource -> ID mappings will be + # generated by merging the resources from all libraries and the main apk + # project. + package_command = [aapt, + 'package', + '-m', + '-M', options.android_manifest, + '--auto-add-overlay', + '-I', android_jar, + '--output-text-symbols', gen_dir, + '-J', gen_dir, + '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] + + for d in input_resource_dirs: + package_command += ['-S', d] + + for d in dep_subdirs: + package_command += ['-S', d] + + if options.non_constant_id: + package_command.append('--non-constant-id') + if options.custom_package: + package_command += ['--custom-package', options.custom_package] + if options.proguard_file: + package_command += ['-G', options.proguard_file] + if options.shared_resources: + package_command.append('--shared-lib') + build_utils.CheckOutput(package_command, print_stderr=False) + + if options.extra_res_packages: + CreateExtraRJavaFiles( + gen_dir, + build_utils.ParseGypList(options.extra_res_packages), + build_utils.ParseGypList(options.extra_r_text_files), + options.shared_resources, + options.include_all_resources) + + # This is the list of directories with resources to put in the final .zip + # file. The order of these is important so that crunched/v14 resources + # override the normal ones. + zip_resource_dirs = input_resource_dirs + [v14_dir] + + base_crunch_dir = os.path.join(temp_dir, 'crunch') + + # Crunch image resources. This shrinks png files and is necessary for + # 9-patch images to display correctly. 'aapt crunch' accepts only a single + # directory at a time and deletes everything in the output directory. + for idx, input_dir in enumerate(input_resource_dirs): + crunch_dir = os.path.join(base_crunch_dir, str(idx)) + build_utils.MakeDirectory(crunch_dir) + zip_resource_dirs.append(crunch_dir) + CrunchDirectory(aapt, input_dir, crunch_dir) + + ZipResources(zip_resource_dirs, options.resource_zip_out) + + if options.all_resources_zip_out: + CombineZips([options.resource_zip_out] + dep_zips, + options.all_resources_zip_out) + + if options.R_dir: + build_utils.DeleteDirectory(options.R_dir) + shutil.copytree(gen_dir, options.R_dir) + else: + build_utils.ZipDir(options.srcjar_out, gen_dir) + + if options.r_text_out: + r_text_path = os.path.join(gen_dir, 'R.txt') + if os.path.exists(r_text_path): + shutil.copyfile(r_text_path, options.r_text_out) + else: + open(options.r_text_out, 'w').close() + + if options.depfile: + input_files += build_utils.GetPythonDependencies() + build_utils.WriteDepfile(options.depfile, input_files) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + main() diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py new file mode 100755 index 00000000000..5127100a890 --- /dev/null +++ b/build/android/gyp/proguard.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import sys + +from util import build_utils +from util import proguard_util + +def DoProguard(options): + proguard = proguard_util.ProguardCmdBuilder(options.proguard_path) + proguard.injars(build_utils.ParseGypList(options.input_paths)) + proguard.configs(build_utils.ParseGypList(options.proguard_configs)) + proguard.outjar(options.output_path) + + if options.mapping: + proguard.mapping(options.mapping) + + if options.is_test: + proguard.is_test(True) + + classpath = [] + for arg in options.classpath: + classpath += build_utils.ParseGypList(arg) + classpath = list(set(classpath)) + proguard.libraryjars(classpath) + + proguard.CheckOutput() + + return proguard.GetInputs() + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--proguard-path', + help='Path to the proguard executable.') + parser.add_option('--input-paths', + help='Paths to the .jar files proguard should run on.') + parser.add_option('--output-path', help='Path to the generated .jar file.') + parser.add_option('--proguard-configs', + help='Paths to proguard configuration files.') + parser.add_option('--mapping', help='Path to proguard mapping to apply.') + parser.add_option('--is-test', action='store_true', + help='If true, extra proguard options for instrumentation tests will be ' + 'added.') + parser.add_option('--classpath', action='append', + help='Classpath for proguard.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args(args) + + inputs = DoProguard(options) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + inputs + build_utils.GetPythonDependencies()) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py new file mode 100755 index 00000000000..6b31a2e19d6 --- /dev/null +++ b/build/android/gyp/push_libraries.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Pushes native libraries to a device. + +""" + +import optparse +import os +import sys + +BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), os.pardir) +sys.path.append(BUILD_ANDROID_DIR) + +from pylib import constants + +from util import build_device +from util import build_utils +from util import md5_check + +def DoPush(options): + libraries = build_utils.ParseGypList(options.libraries) + + device = build_device.GetBuildDeviceFromPath( + options.build_device_configuration) + if not device: + return + + serial_number = device.GetSerialNumber() + # A list so that it is modifiable in Push below. + needs_directory = [True] + for lib in libraries: + device_path = os.path.join(options.device_dir, lib) + host_path = os.path.join(options.libraries_dir, lib) + + def Push(): + if needs_directory: + device.RunShellCommand('mkdir -p ' + options.device_dir) + needs_directory[:] = [] # = False + device.PushChangedFiles([(host_path, device_path)]) + + record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number) + md5_check.CallAndRecordIfStale( + Push, + record_path=record_path, + input_paths=[host_path], + input_strings=[device_path]) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = optparse.OptionParser() + parser.add_option('--libraries-dir', + help='Directory that contains stripped libraries.') + parser.add_option('--device-dir', + help='Device directory to push the libraries to.') + parser.add_option('--libraries', + help='List of native libraries.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--build-device-configuration', + help='Path to build device configuration.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME') + options, _ = parser.parse_args(args) + + required_options = ['libraries', 'device_dir', 'libraries'] + build_utils.CheckOptions(options, parser, required=required_options) + constants.SetBuildType(options.configuration_name) + + DoPush(options) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/strip_library_for_device.py b/build/android/gyp/strip_library_for_device.py new file mode 100755 index 00000000000..9e2daae33ac --- /dev/null +++ b/build/android/gyp/strip_library_for_device.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils + + +def StripLibrary(android_strip, android_strip_args, library_path, output_path): + if build_utils.IsTimeStale(output_path, [library_path]): + strip_cmd = ([android_strip] + + android_strip_args + + ['-o', output_path, library_path]) + build_utils.CheckOutput(strip_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--android-strip', + help='Path to the toolchain\'s strip binary') + parser.add_option('--android-strip-arg', action='append', + help='Argument to be passed to strip') + parser.add_option('--libraries-dir', + help='Directory for un-stripped libraries') + parser.add_option('--stripped-libraries-dir', + help='Directory for stripped libraries') + parser.add_option('--libraries', + help='List of libraries to strip') + parser.add_option('--stamp', help='Path to touch on success') + + options, _ = parser.parse_args(args) + + libraries = build_utils.ParseGypList(options.libraries) + + build_utils.MakeDirectory(options.stripped_libraries_dir) + + for library in libraries: + for base_path in options.libraries_dir.split(','): + library_path = os.path.join(base_path, library) + if (os.path.exists(library_path)): + break + stripped_library_path = os.path.join( + options.stripped_libraries_dir, library) + StripLibrary(options.android_strip, options.android_strip_arg, library_path, + stripped_library_path) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/test/BUILD.gn b/build/android/gyp/test/BUILD.gn new file mode 100644 index 00000000000..2deac1d56f2 --- /dev/null +++ b/build/android/gyp/test/BUILD.gn @@ -0,0 +1,13 @@ +import("//build/config/android/rules.gni") + +java_library("hello_world_java") { + java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ] +} + +java_binary("hello_world") { + deps = [ + ":hello_world_java", + ] + java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ] + main_class = "org.chromium.helloworld.HelloWorldMain" +} diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java new file mode 100644 index 00000000000..10860d8332d --- /dev/null +++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java @@ -0,0 +1,15 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldMain { + public static void main(String[] args) { + if (args.length > 0) { + System.exit(Integer.parseInt(args[0])); + } + HelloWorldPrinter.print(); + } +} + diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java new file mode 100644 index 00000000000..b09673e21f4 --- /dev/null +++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java @@ -0,0 +1,12 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldPrinter { + public static void print() { + System.out.println("Hello, world!"); + } +} + diff --git a/build/android/gyp/touch.py b/build/android/gyp/touch.py new file mode 100755 index 00000000000..7b4375e40ab --- /dev/null +++ b/build/android/gyp/touch.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import sys + +from util import build_utils + +def main(argv): + for f in argv[1:]: + build_utils.Touch(f) + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py new file mode 100644 index 00000000000..727e987e6b6 --- /dev/null +++ b/build/android/gyp/util/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + diff --git a/build/android/gyp/util/build_device.py b/build/android/gyp/util/build_device.py new file mode 100644 index 00000000000..8ab11124f74 --- /dev/null +++ b/build/android/gyp/util/build_device.py @@ -0,0 +1,108 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" A simple device interface for build steps. + +""" + +import logging +import os +import re +import sys + +from util import build_utils + +BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..', '..') +sys.path.append(BUILD_ANDROID_DIR) + +from pylib import android_commands +from pylib.device import device_errors +from pylib.device import device_utils + +GetAttachedDevices = android_commands.GetAttachedDevices + + +class BuildDevice(object): + def __init__(self, configuration): + self.id = configuration['id'] + self.description = configuration['description'] + self.install_metadata = configuration['install_metadata'] + self.device = device_utils.DeviceUtils(self.id) + + def RunShellCommand(self, *args, **kwargs): + return self.device.RunShellCommand(*args, **kwargs) + + def PushChangedFiles(self, *args, **kwargs): + return self.device.PushChangedFiles(*args, **kwargs) + + def GetSerialNumber(self): + return self.id + + def Install(self, *args, **kwargs): + return self.device.Install(*args, **kwargs) + + def InstallSplitApk(self, *args, **kwargs): + return self.device.InstallSplitApk(*args, **kwargs) + + def GetInstallMetadata(self, apk_package): + """Gets the metadata on the device for the apk_package apk.""" + # Matches lines like: + # -rw-r--r-- system system 7376582 2013-04-19 16:34 \ + # org.chromium.chrome.shell.apk + # -rw-r--r-- system system 7376582 2013-04-19 16:34 \ + # org.chromium.chrome.shell-1.apk + apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?.apk$' % apk_package, s) + matches = filter(apk_matcher, self.install_metadata) + return matches[0] if matches else None + + +def GetConfigurationForDevice(device_id): + device = device_utils.DeviceUtils(device_id) + configuration = None + has_root = False + is_online = device.IsOnline() + if is_online: + cmd = 'ls -l /data/app; getprop ro.build.description' + cmd_output = device.RunShellCommand(cmd) + has_root = not 'Permission denied' in cmd_output[0] + if not has_root: + # Disable warning log messages from EnableRoot() + logging.getLogger().disabled = True + try: + device.EnableRoot() + has_root = True + except device_errors.CommandFailedError: + has_root = False + finally: + logging.getLogger().disabled = False + cmd_output = device.RunShellCommand(cmd) + + configuration = { + 'id': device_id, + 'description': cmd_output[-1], + 'install_metadata': cmd_output[:-1], + } + return configuration, is_online, has_root + + +def WriteConfigurations(configurations, path): + # Currently we only support installing to the first device. + build_utils.WriteJson(configurations[:1], path, only_if_changed=True) + + +def ReadConfigurations(path): + return build_utils.ReadJson(path) + + +def GetBuildDevice(configurations): + assert len(configurations) == 1 + return BuildDevice(configurations[0]) + + +def GetBuildDeviceFromPath(path): + configurations = ReadConfigurations(path) + if len(configurations) > 0: + return GetBuildDevice(ReadConfigurations(path)) + return None + diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py new file mode 100644 index 00000000000..65b1a643c26 --- /dev/null +++ b/build/android/gyp/util/build_utils.py @@ -0,0 +1,376 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import ast +import contextlib +import fnmatch +import json +import os +import pipes +import re +import shlex +import shutil +import subprocess +import sys +import tempfile +import zipfile + + +CHROMIUM_SRC = os.path.normpath( + os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, os.pardir)) +COLORAMA_ROOT = os.path.join(CHROMIUM_SRC, + 'third_party', 'colorama', 'src') +# aapt should ignore OWNERS files in addition the default ignore pattern. +AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:_*:' + + '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp') + + +@contextlib.contextmanager +def TempDir(): + dirname = tempfile.mkdtemp() + try: + yield dirname + finally: + shutil.rmtree(dirname) + + +def MakeDirectory(dir_path): + try: + os.makedirs(dir_path) + except OSError: + pass + + +def DeleteDirectory(dir_path): + if os.path.exists(dir_path): + shutil.rmtree(dir_path) + + +def Touch(path, fail_if_missing=False): + if fail_if_missing and not os.path.exists(path): + raise Exception(path + ' doesn\'t exist.') + + MakeDirectory(os.path.dirname(path)) + with open(path, 'a'): + os.utime(path, None) + + +def FindInDirectory(directory, filename_filter): + files = [] + for root, _dirnames, filenames in os.walk(directory): + matched_files = fnmatch.filter(filenames, filename_filter) + files.extend((os.path.join(root, f) for f in matched_files)) + return files + + +def FindInDirectories(directories, filename_filter): + all_files = [] + for directory in directories: + all_files.extend(FindInDirectory(directory, filename_filter)) + return all_files + + +def ParseGnList(gn_string): + return ast.literal_eval(gn_string) + + +def ParseGypList(gyp_string): + # The ninja generator doesn't support $ in strings, so use ## to + # represent $. + # TODO(cjhopman): Remove when + # https://code.google.com/p/gyp/issues/detail?id=327 + # is addressed. + gyp_string = gyp_string.replace('##', '$') + + if gyp_string.startswith('['): + return ParseGnList(gyp_string) + return shlex.split(gyp_string) + + +def CheckOptions(options, parser, required=None): + if not required: + return + for option_name in required: + if getattr(options, option_name) is None: + parser.error('--%s is required' % option_name.replace('_', '-')) + + +def WriteJson(obj, path, only_if_changed=False): + old_dump = None + if os.path.exists(path): + with open(path, 'r') as oldfile: + old_dump = oldfile.read() + + new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) + + if not only_if_changed or old_dump != new_dump: + with open(path, 'w') as outfile: + outfile.write(new_dump) + + +def ReadJson(path): + with open(path, 'r') as jsonfile: + return json.load(jsonfile) + + +class CalledProcessError(Exception): + """This exception is raised when the process run by CheckOutput + exits with a non-zero exit code.""" + + def __init__(self, cwd, args, output): + super(CalledProcessError, self).__init__() + self.cwd = cwd + self.args = args + self.output = output + + def __str__(self): + # A user should be able to simply copy and paste the command that failed + # into their shell. + copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), + ' '.join(map(pipes.quote, self.args))) + return 'Command failed: {}\n{}'.format(copyable_command, self.output) + + +# This can be used in most cases like subprocess.check_output(). The output, +# particularly when the command fails, better highlights the command's failure. +# If the command fails, raises a build_utils.CalledProcessError. +def CheckOutput(args, cwd=None, + print_stdout=False, print_stderr=True, + stdout_filter=None, + stderr_filter=None, + fail_func=lambda returncode, stderr: returncode != 0): + if not cwd: + cwd = os.getcwd() + + child = subprocess.Popen(args, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) + stdout, stderr = child.communicate() + + if stdout_filter is not None: + stdout = stdout_filter(stdout) + + if stderr_filter is not None: + stderr = stderr_filter(stderr) + + if fail_func(child.returncode, stderr): + raise CalledProcessError(cwd, args, stdout + stderr) + + if print_stdout: + sys.stdout.write(stdout) + if print_stderr: + sys.stderr.write(stderr) + + return stdout + + +def GetModifiedTime(path): + # For a symlink, the modified time should be the greater of the link's + # modified time and the modified time of the target. + return max(os.lstat(path).st_mtime, os.stat(path).st_mtime) + + +def IsTimeStale(output, inputs): + if not os.path.exists(output): + return True + + output_time = GetModifiedTime(output) + for i in inputs: + if GetModifiedTime(i) > output_time: + return True + return False + + +def IsDeviceReady(): + device_state = CheckOutput(['adb', 'get-state']) + return device_state.strip() == 'device' + + +def CheckZipPath(name): + if os.path.normpath(name) != name: + raise Exception('Non-canonical zip path: %s' % name) + if os.path.isabs(name): + raise Exception('Absolute zip path: %s' % name) + + +def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None): + if path is None: + path = os.getcwd() + elif not os.path.exists(path): + MakeDirectory(path) + + with zipfile.ZipFile(zip_path) as z: + for name in z.namelist(): + if name.endswith('/'): + continue + if pattern is not None: + if not fnmatch.fnmatch(name, pattern): + continue + CheckZipPath(name) + if no_clobber: + output_path = os.path.join(path, name) + if os.path.exists(output_path): + raise Exception( + 'Path already exists from zip: %s %s %s' + % (zip_path, name, output_path)) + + z.extractall(path=path) + + +def DoZip(inputs, output, base_dir): + with zipfile.ZipFile(output, 'w') as outfile: + for f in inputs: + CheckZipPath(os.path.relpath(f, base_dir)) + outfile.write(f, os.path.relpath(f, base_dir)) + + +def ZipDir(output, base_dir): + with zipfile.ZipFile(output, 'w') as outfile: + for root, _, files in os.walk(base_dir): + for f in files: + path = os.path.join(root, f) + archive_path = os.path.relpath(path, base_dir) + CheckZipPath(archive_path) + outfile.write(path, archive_path) + + +def MergeZips(output, inputs, exclude_patterns=None): + added_names = set() + def Allow(name): + if exclude_patterns is not None: + for p in exclude_patterns: + if fnmatch.fnmatch(name, p): + return False + return True + + with zipfile.ZipFile(output, 'w') as out_zip: + for in_file in inputs: + with zipfile.ZipFile(in_file, 'r') as in_zip: + for name in in_zip.namelist(): + if name not in added_names and Allow(name): + out_zip.writestr(name, in_zip.read(name)) + added_names.add(name) + + +def PrintWarning(message): + print 'WARNING: ' + message + + +def PrintBigWarning(message): + print '***** ' * 8 + PrintWarning(message) + print '***** ' * 8 + + +def GetSortedTransitiveDependencies(top, deps_func): + """Gets the list of all transitive dependencies in sorted order. + + There should be no cycles in the dependency graph. + + Args: + top: a list of the top level nodes + deps_func: A function that takes a node and returns its direct dependencies. + Returns: + A list of all transitive dependencies of nodes in top, in order (a node will + appear in the list at a higher index than all of its dependencies). + """ + def Node(dep): + return (dep, deps_func(dep)) + + # First: find all deps + unchecked_deps = list(top) + all_deps = set(top) + while unchecked_deps: + dep = unchecked_deps.pop() + new_deps = deps_func(dep).difference(all_deps) + unchecked_deps.extend(new_deps) + all_deps = all_deps.union(new_deps) + + # Then: simple, slow topological sort. + sorted_deps = [] + unsorted_deps = dict(map(Node, all_deps)) + while unsorted_deps: + for library, dependencies in unsorted_deps.items(): + if not dependencies.intersection(unsorted_deps.keys()): + sorted_deps.append(library) + del unsorted_deps[library] + + return sorted_deps + + +def GetPythonDependencies(): + """Gets the paths of imported non-system python modules. + + A path is assumed to be a "system" import if it is outside of chromium's + src/. The paths will be relative to the current directory. + """ + module_paths = (m.__file__ for m in sys.modules.itervalues() + if m is not None and hasattr(m, '__file__')) + + abs_module_paths = map(os.path.abspath, module_paths) + + non_system_module_paths = [ + p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)] + def ConvertPycToPy(s): + if s.endswith('.pyc'): + return s[:-1] + return s + + non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) + non_system_module_paths = map(os.path.relpath, non_system_module_paths) + return sorted(set(non_system_module_paths)) + + +def AddDepfileOption(parser): + parser.add_option('--depfile', + help='Path to depfile. This must be specified as the ' + 'action\'s first output.') + + +def WriteDepfile(path, dependencies): + with open(path, 'w') as depfile: + depfile.write(path) + depfile.write(': ') + depfile.write(' '.join(dependencies)) + depfile.write('\n') + + +def ExpandFileArgs(args): + """Replaces file-arg placeholders in args. + + These placeholders have the form: + @FileArg(filename:key1:key2:...:keyn) + + The value of such a placeholder is calculated by reading 'filename' as json. + And then extracting the value at [key1][key2]...[keyn]. + + Note: This intentionally does not return the list of files that appear in such + placeholders. An action that uses file-args *must* know the paths of those + files prior to the parsing of the arguments (typically by explicitly listing + them in the action's inputs in build files). + """ + new_args = list(args) + file_jsons = dict() + r = re.compile('@FileArg\((.*?)\)') + for i, arg in enumerate(args): + match = r.search(arg) + if not match: + continue + + if match.end() != len(arg): + raise Exception('Unexpected characters after FileArg: ' + arg) + + lookup_path = match.group(1).split(':') + file_path = lookup_path[0] + if not file_path in file_jsons: + file_jsons[file_path] = ReadJson(file_path) + + expansion = file_jsons[file_path] + for k in lookup_path[1:]: + expansion = expansion[k] + + new_args[i] = arg[:match.start()] + str(expansion) + + return new_args + diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py new file mode 100644 index 00000000000..9f365aa0816 --- /dev/null +++ b/build/android/gyp/util/md5_check.py @@ -0,0 +1,86 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import hashlib +import os + + +def CallAndRecordIfStale( + function, record_path=None, input_paths=None, input_strings=None, + force=False): + """Calls function if the md5sum of the input paths/strings has changed. + + The md5sum of the inputs is compared with the one stored in record_path. If + this has changed (or the record doesn't exist), function will be called and + the new md5sum will be recorded. + + If force is True, the function will be called regardless of whether the + md5sum is out of date. + """ + if not input_paths: + input_paths = [] + if not input_strings: + input_strings = [] + md5_checker = _Md5Checker( + record_path=record_path, + input_paths=input_paths, + input_strings=input_strings) + if force or md5_checker.IsStale(): + function() + md5_checker.Write() + + +def _UpdateMd5ForFile(md5, path, block_size=2**16): + with open(path, 'rb') as infile: + while True: + data = infile.read(block_size) + if not data: + break + md5.update(data) + + +def _UpdateMd5ForDirectory(md5, dir_path): + for root, _, files in os.walk(dir_path): + for f in files: + _UpdateMd5ForFile(md5, os.path.join(root, f)) + + +def _UpdateMd5ForPath(md5, path): + if os.path.isdir(path): + _UpdateMd5ForDirectory(md5, path) + else: + _UpdateMd5ForFile(md5, path) + + +class _Md5Checker(object): + def __init__(self, record_path=None, input_paths=None, input_strings=None): + if not input_paths: + input_paths = [] + if not input_strings: + input_strings = [] + + assert record_path.endswith('.stamp'), ( + 'record paths must end in \'.stamp\' so that they are easy to find ' + 'and delete') + + self.record_path = record_path + + md5 = hashlib.md5() + for i in sorted(input_paths): + _UpdateMd5ForPath(md5, i) + for s in input_strings: + md5.update(s) + self.new_digest = md5.hexdigest() + + self.old_digest = '' + if os.path.exists(self.record_path): + with open(self.record_path, 'r') as old_record: + self.old_digest = old_record.read() + + def IsStale(self): + return self.old_digest != self.new_digest + + def Write(self): + with open(self.record_path, 'w') as new_record: + new_record.write(self.new_digest) diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py new file mode 100644 index 00000000000..4f89fc2be83 --- /dev/null +++ b/build/android/gyp/util/md5_check_test.py @@ -0,0 +1,72 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import tempfile +import unittest + +import md5_check # pylint: disable=W0403 + + +class TestMd5Check(unittest.TestCase): + def setUp(self): + self.called = False + + def testCallAndRecordIfStale(self): + input_strings = ['string1', 'string2'] + input_file1 = tempfile.NamedTemporaryFile() + input_file2 = tempfile.NamedTemporaryFile() + file1_contents = 'input file 1' + file2_contents = 'input file 2' + input_file1.write(file1_contents) + input_file1.flush() + input_file2.write(file2_contents) + input_file2.flush() + input_files = [input_file1.name, input_file2.name] + + record_path = tempfile.NamedTemporaryFile(suffix='.stamp') + + def CheckCallAndRecord(should_call, message, force=False): + self.called = False + def MarkCalled(): + self.called = True + md5_check.CallAndRecordIfStale( + MarkCalled, + record_path=record_path.name, + input_paths=input_files, + input_strings=input_strings, + force=force) + self.failUnlessEqual(should_call, self.called, message) + + CheckCallAndRecord(True, 'should call when record doesn\'t exist') + CheckCallAndRecord(False, 'should not call when nothing changed') + CheckCallAndRecord(True, force=True, message='should call when forced') + + input_file1.write('some more input') + input_file1.flush() + CheckCallAndRecord(True, 'changed input file should trigger call') + + input_files = input_files[::-1] + CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call') + + input_files = input_files[:1] + CheckCallAndRecord(True, 'removing file should trigger call') + + input_files.append(input_file2.name) + CheckCallAndRecord(True, 'added input file should trigger call') + + input_strings[0] = input_strings[0] + ' a bit longer' + CheckCallAndRecord(True, 'changed input string should trigger call') + + input_strings = input_strings[::-1] + CheckCallAndRecord(True, 'reordering of string inputs should trigger call') + + input_strings = input_strings[:1] + CheckCallAndRecord(True, 'removing a string should trigger call') + + input_strings.append('a brand new string') + CheckCallAndRecord(True, 'added input string should trigger call') + + +if __name__ == '__main__': + unittest.main() diff --git a/build/android/gyp/util/proguard_util.py b/build/android/gyp/util/proguard_util.py new file mode 100644 index 00000000000..901cd9f2a8f --- /dev/null +++ b/build/android/gyp/util/proguard_util.py @@ -0,0 +1,128 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +from util import build_utils + +def FilterProguardOutput(output): + '''ProGuard outputs boring stuff to stdout (proguard version, jar path, etc) + as well as interesting stuff (notes, warnings, etc). If stdout is entirely + boring, this method suppresses the output. + ''' + ignore_patterns = [ + 'ProGuard, version ', + 'Reading program jar [', + 'Reading library jar [', + 'Preparing output jar [', + ' Copying resources from program jar [', + ] + for line in output.splitlines(): + for pattern in ignore_patterns: + if line.startswith(pattern): + break + else: + # line doesn't match any of the patterns; it's probably something worth + # printing out. + return output + return '' + + +class ProguardCmdBuilder(object): + def __init__(self, proguard_jar): + assert os.path.exists(proguard_jar) + self._proguard_jar_path = proguard_jar + self._test = None + self._mapping = None + self._libraries = None + self._injars = None + self._configs = None + self._outjar = None + + def outjar(self, path): + assert self._outjar is None + self._outjar = path + + def is_test(self, enable): + assert self._test is None + self._test = enable + + def mapping(self, path): + assert self._mapping is None + assert os.path.exists(path), path + self._mapping = path + + def libraryjars(self, paths): + assert self._libraries is None + for p in paths: + assert os.path.exists(p), p + self._libraries = paths + + def injars(self, paths): + assert self._injars is None + for p in paths: + assert os.path.exists(p), p + self._injars = paths + + def configs(self, paths): + assert self._configs is None + for p in paths: + assert os.path.exists(p), p + self._configs = paths + + def build(self): + assert self._injars is not None + assert self._outjar is not None + assert self._configs is not None + cmd = [ + 'java', '-jar', self._proguard_jar_path, + '-forceprocessing', + ] + if self._test: + cmd += [ + '-dontobfuscate', + '-dontoptimize', + '-dontshrink', + '-dontskipnonpubliclibraryclassmembers', + ] + + if self._mapping: + cmd += [ + '-applymapping', self._mapping, + ] + + if self._libraries: + cmd += [ + '-libraryjars', ':'.join(self._libraries), + ] + + cmd += [ + '-injars', ':'.join(self._injars) + ] + + for config_file in self._configs: + cmd += ['-include', config_file] + + # The output jar must be specified after inputs. + cmd += [ + '-outjars', self._outjar, + '-dump', self._outjar + '.dump', + '-printseeds', self._outjar + '.seeds', + '-printusage', self._outjar + '.usage', + '-printmapping', self._outjar + '.mapping', + ] + return cmd + + def GetInputs(self): + inputs = [self._proguard_jar_path] + self._configs + self._injars + if self._mapping: + inputs.append(self._mapping) + if self._libraries: + inputs += self._libraries + return inputs + + + def CheckOutput(self): + build_utils.CheckOutput(self.build(), print_stdout=True, + stdout_filter=FilterProguardOutput) + diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py new file mode 100755 index 00000000000..3773e98bc70 --- /dev/null +++ b/build/android/gyp/write_build_config.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes a build_config file. + +The build_config file for a target is a json file containing information about +how to build that target based on the target's dependencies. This includes +things like: the javac classpath, the list of android resources dependencies, +etc. It also includes the information needed to create the build_config for +other targets that depend on that one. + +Android build scripts should not refer to the build_config directly, and the +build specification should instead pass information in using the special +file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing +of values in a json dict in a file and looks like this: + --python-arg=@FileArg(build_config_path:javac:classpath) + +Note: If paths to input files are passed in this way, it is important that: + 1. inputs/deps of the action ensure that the files are available the first + time the action runs. + 2. Either (a) or (b) + a. inputs/deps ensure that the action runs whenever one of the files changes + b. the files are added to the action's depfile +""" + +import optparse +import os +import sys +import xml.dom.minidom + +from util import build_utils + +import write_ordered_libraries + +class AndroidManifest(object): + def __init__(self, path): + self.path = path + dom = xml.dom.minidom.parse(path) + manifests = dom.getElementsByTagName('manifest') + assert len(manifests) == 1 + self.manifest = manifests[0] + + def GetInstrumentation(self): + instrumentation_els = self.manifest.getElementsByTagName('instrumentation') + if len(instrumentation_els) == 0: + return None + if len(instrumentation_els) != 1: + raise Exception( + 'More than one element found in %s' % self.path) + return instrumentation_els[0] + + def CheckInstrumentation(self, expected_package): + instr = self.GetInstrumentation() + if not instr: + raise Exception('No elements found in %s' % self.path) + instrumented_package = instr.getAttributeNS( + 'http://schemas.android.com/apk/res/android', 'targetPackage') + if instrumented_package != expected_package: + raise Exception( + 'Wrong instrumented package. Expected %s, got %s' + % (expected_package, instrumented_package)) + + def GetPackageName(self): + return self.manifest.getAttribute('package') + + +dep_config_cache = {} +def GetDepConfig(path): + if not path in dep_config_cache: + dep_config_cache[path] = build_utils.ReadJson(path)['deps_info'] + return dep_config_cache[path] + + +def DepsOfType(wanted_type, configs): + return [c for c in configs if c['type'] == wanted_type] + + +def GetAllDepsConfigsInOrder(deps_config_paths): + def GetDeps(path): + return set(GetDepConfig(path)['deps_configs']) + return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps) + + +class Deps(object): + def __init__(self, direct_deps_config_paths): + self.all_deps_config_paths = GetAllDepsConfigsInOrder( + direct_deps_config_paths) + self.direct_deps_configs = [ + GetDepConfig(p) for p in direct_deps_config_paths] + self.all_deps_configs = [ + GetDepConfig(p) for p in self.all_deps_config_paths] + + def All(self, wanted_type=None): + if type is None: + return self.all_deps_configs + return DepsOfType(wanted_type, self.all_deps_configs) + + def Direct(self, wanted_type=None): + if wanted_type is None: + return self.direct_deps_configs + return DepsOfType(wanted_type, self.direct_deps_configs) + + def AllConfigPaths(self): + return self.all_deps_config_paths + + +def main(argv): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--build-config', help='Path to build_config output.') + parser.add_option( + '--type', + help='Type of this target (e.g. android_library).') + parser.add_option( + '--possible-deps-configs', + help='List of paths for dependency\'s build_config files. Some ' + 'dependencies may not write build_config files. Missing build_config ' + 'files are handled differently based on the type of this target.') + + # android_resources options + parser.add_option('--srcjar', help='Path to target\'s resources srcjar.') + parser.add_option('--resources-zip', help='Path to target\'s resources zip.') + parser.add_option('--r-text', help='Path to target\'s R.txt file.') + parser.add_option('--package-name', + help='Java package name for these resources.') + parser.add_option('--android-manifest', help='Path to android manifest.') + + # java library options + parser.add_option('--jar-path', help='Path to target\'s jar output.') + parser.add_option('--supports-android', action='store_true', + help='Whether this library supports running on the Android platform.') + parser.add_option('--requires-android', action='store_true', + help='Whether this library requires running on the Android platform.') + parser.add_option('--bypass-platform-checks', action='store_true', + help='Bypass checks for support/require Android platform.') + + # android library options + parser.add_option('--dex-path', help='Path to target\'s dex output.') + + # native library options + parser.add_option('--native-libs', help='List of top-level native libs.') + parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.') + + parser.add_option('--tested-apk-config', + help='Path to the build config of the tested apk (for an instrumentation ' + 'test apk).') + + options, args = parser.parse_args(argv) + + if args: + parser.error('No positional arguments should be given.') + + + if not options.type in [ + 'java_library', 'android_resources', 'android_apk', 'deps_dex']: + raise Exception('Unknown type: <%s>' % options.type) + + required_options = ['build_config'] + { + 'java_library': ['jar_path'], + 'android_resources': ['resources_zip'], + 'android_apk': ['jar_path', 'dex_path', 'resources_zip'], + 'deps_dex': ['dex_path'] + }[options.type] + + if options.native_libs: + required_options.append('readelf_path') + + build_utils.CheckOptions(options, parser, required_options) + + if options.type == 'java_library': + if options.supports_android and not options.dex_path: + raise Exception('java_library that supports Android requires a dex path.') + + if options.requires_android and not options.supports_android: + raise Exception( + '--supports-android is required when using --requires-android') + + possible_deps_config_paths = build_utils.ParseGypList( + options.possible_deps_configs) + + allow_unknown_deps = (options.type == 'android_apk' or + options.type == 'android_resources') + unknown_deps = [ + c for c in possible_deps_config_paths if not os.path.exists(c)] + if unknown_deps and not allow_unknown_deps: + raise Exception('Unknown deps: ' + str(unknown_deps)) + + direct_deps_config_paths = [ + c for c in possible_deps_config_paths if not c in unknown_deps] + + deps = Deps(direct_deps_config_paths) + direct_library_deps = deps.Direct('java_library') + all_library_deps = deps.All('java_library') + + direct_resources_deps = deps.Direct('android_resources') + all_resources_deps = deps.All('android_resources') + # Resources should be ordered with the highest-level dependency first so that + # overrides are done correctly. + all_resources_deps.reverse() + + if options.type == 'android_apk' and options.tested_apk_config: + tested_apk_deps = Deps([options.tested_apk_config]) + tested_apk_resources_deps = tested_apk_deps.All('android_resources') + all_resources_deps = [ + d for d in all_resources_deps if not d in tested_apk_resources_deps] + + # Initialize some common config. + config = { + 'deps_info': { + 'name': os.path.basename(options.build_config), + 'path': options.build_config, + 'type': options.type, + 'deps_configs': direct_deps_config_paths, + } + } + deps_info = config['deps_info'] + + if options.type == 'java_library' and not options.bypass_platform_checks: + deps_info['requires_android'] = options.requires_android + deps_info['supports_android'] = options.supports_android + + deps_require_android = (all_resources_deps + + [d['name'] for d in all_library_deps if d['requires_android']]) + deps_not_support_android = ( + [d['name'] for d in all_library_deps if not d['supports_android']]) + + if deps_require_android and not options.requires_android: + raise Exception('Some deps require building for the Android platform: ' + + str(deps_require_android)) + + if deps_not_support_android and options.supports_android: + raise Exception('Not all deps support the Android platform: ' + + str(deps_not_support_android)) + + if options.type in ['java_library', 'android_apk']: + javac_classpath = [c['jar_path'] for c in direct_library_deps] + java_full_classpath = [c['jar_path'] for c in all_library_deps] + deps_info['resources_deps'] = [c['path'] for c in all_resources_deps] + deps_info['jar_path'] = options.jar_path + if options.type == 'android_apk' or options.supports_android: + deps_info['dex_path'] = options.dex_path + config['javac'] = { + 'classpath': javac_classpath, + } + config['java'] = { + 'full_classpath': java_full_classpath + } + + if options.type == 'java_library': + # Only resources might have srcjars (normal srcjar targets are listed in + # srcjar_deps). A resource's srcjar contains the R.java file for those + # resources, and (like Android's default build system) we allow a library to + # refer to the resources in any of its dependents. + config['javac']['srcjars'] = [ + c['srcjar'] for c in direct_resources_deps if 'srcjar' in c] + + if options.type == 'android_apk': + # Apks will get their resources srcjar explicitly passed to the java step. + config['javac']['srcjars'] = [] + + if options.type == 'android_resources': + deps_info['resources_zip'] = options.resources_zip + if options.srcjar: + deps_info['srcjar'] = options.srcjar + if options.android_manifest: + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if options.package_name: + deps_info['package_name'] = options.package_name + if options.r_text: + deps_info['r_text'] = options.r_text + + if options.type == 'android_resources' or options.type == 'android_apk': + config['resources'] = {} + config['resources']['dependency_zips'] = [ + c['resources_zip'] for c in all_resources_deps] + config['resources']['extra_package_names'] = [] + config['resources']['extra_r_text_files'] = [] + + if options.type == 'android_apk': + config['resources']['extra_package_names'] = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c] + config['resources']['extra_r_text_files'] = [ + c['r_text'] for c in all_resources_deps if 'r_text' in c] + + if options.type in ['android_apk', 'deps_dex']: + deps_dex_files = [c['dex_path'] for c in all_library_deps] + + # An instrumentation test apk should exclude the dex files that are in the apk + # under test. + if options.type == 'android_apk' and options.tested_apk_config: + tested_apk_deps = Deps([options.tested_apk_config]) + tested_apk_library_deps = tested_apk_deps.All('java_library') + tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps] + deps_dex_files = [ + p for p in deps_dex_files if not p in tested_apk_deps_dex_files] + + tested_apk_config = GetDepConfig(options.tested_apk_config) + expected_tested_package = tested_apk_config['package_name'] + AndroidManifest(options.android_manifest).CheckInstrumentation( + expected_tested_package) + + # Dependencies for the final dex file of an apk or a 'deps_dex'. + if options.type in ['android_apk', 'deps_dex']: + config['final_dex'] = {} + dex_config = config['final_dex'] + # TODO(cjhopman): proguard version + dex_config['dependency_dex_files'] = deps_dex_files + + if options.type == 'android_apk': + config['dist_jar'] = { + 'dependency_jars': [ + c['jar_path'] for c in all_library_deps + ] + } + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if not options.tested_apk_config and manifest.GetInstrumentation(): + # This must then have instrumentation only for itself. + manifest.CheckInstrumentation(manifest.GetPackageName()) + + library_paths = [] + java_libraries_list = [] + if options.native_libs: + libraries = build_utils.ParseGypList(options.native_libs) + if libraries: + libraries_dir = os.path.dirname(libraries[0]) + write_ordered_libraries.SetReadelfPath(options.readelf_path) + write_ordered_libraries.SetLibraryDirs([libraries_dir]) + all_native_library_deps = ( + write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries( + libraries)) + # Create a java literal array with the "base" library names: + # e.g. libfoo.so -> foo + java_libraries_list = '{%s}' % ','.join( + ['"%s"' % s[3:-3] for s in all_native_library_deps]) + library_paths = map( + write_ordered_libraries.FullLibraryPath, all_native_library_deps) + + config['native'] = { + 'libraries': library_paths, + 'java_libraries_list': java_libraries_list + } + + build_utils.WriteJson(config, options.build_config, only_if_changed=True) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + deps.AllConfigPaths() + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py new file mode 100755 index 00000000000..0fc9a8ca3ef --- /dev/null +++ b/build/android/gyp/write_ordered_libraries.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes dependency ordered list of native libraries. + +The list excludes any Android system libraries, as those are not bundled with +the APK. + +This list of libraries is used for several steps of building an APK. +In the component build, the --input-libraries only needs to be the top-level +library (i.e. libcontent_shell_content_view). This will then use readelf to +inspect the shared libraries and determine the full list of (non-system) +libraries that should be included in the APK. +""" + +# TODO(cjhopman): See if we can expose the list of library dependencies from +# gyp, rather than calculating it ourselves. +# http://crbug.com/225558 + +import optparse +import os +import re +import sys + +from util import build_utils + +_readelf = None +_library_dirs = None + +_library_re = re.compile( + '.*NEEDED.*Shared library: \[(?P.+)\]') + + +def SetReadelfPath(path): + global _readelf + _readelf = path + + +def SetLibraryDirs(dirs): + global _library_dirs + _library_dirs = dirs + + +def FullLibraryPath(library_name): + assert _library_dirs is not None + for directory in _library_dirs: + path = '%s/%s' % (directory, library_name) + if os.path.exists(path): + return path + return library_name + + +def IsSystemLibrary(library_name): + # If the library doesn't exist in the libraries directory, assume that it is + # an Android system library. + return not os.path.exists(FullLibraryPath(library_name)) + + +def CallReadElf(library_or_executable): + assert _readelf is not None + readelf_cmd = [_readelf, + '-d', + FullLibraryPath(library_or_executable)] + return build_utils.CheckOutput(readelf_cmd) + + +def GetDependencies(library_or_executable): + elf = CallReadElf(library_or_executable) + return set(_library_re.findall(elf)) + + +def GetNonSystemDependencies(library_name): + all_deps = GetDependencies(library_name) + return set((lib for lib in all_deps if not IsSystemLibrary(lib))) + + +def GetSortedTransitiveDependencies(libraries): + """Returns all transitive library dependencies in dependency order.""" + return build_utils.GetSortedTransitiveDependencies( + libraries, GetNonSystemDependencies) + + +def GetSortedTransitiveDependenciesForBinaries(binaries): + if binaries[0].endswith('.so'): + libraries = [os.path.basename(lib) for lib in binaries] + else: + assert len(binaries) == 1 + all_deps = GetDependencies(binaries[0]) + libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)] + + return GetSortedTransitiveDependencies(libraries) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--input-libraries', + help='A list of top-level input libraries.') + parser.add_option('--libraries-dir', + help='The directory which contains shared libraries.') + parser.add_option('--readelf', help='Path to the readelf binary.') + parser.add_option('--output', help='Path to the generated .json file.') + parser.add_option('--stamp', help='Path to touch on success.') + + options, _ = parser.parse_args() + + SetReadelfPath(options.readelf) + SetLibraryDirs(options.libraries_dir.split(',')) + + libraries = build_utils.ParseGypList(options.input_libraries) + if len(libraries): + libraries = GetSortedTransitiveDependenciesForBinaries(libraries) + + # Convert to "base" library names: e.g. libfoo.so -> foo + java_libraries_list = ( + '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries])) + + out_json = { + 'libraries': libraries, + 'lib_paths': [FullLibraryPath(l) for l in libraries], + 'java_libraries_list': java_libraries_list + } + build_utils.WriteJson( + out_json, + options.output, + only_if_changed=True) + + if options.stamp: + build_utils.Touch(options.stamp) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + libraries + build_utils.GetPythonDependencies()) + + +if __name__ == '__main__': + sys.exit(main()) + + diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py new file mode 100755 index 00000000000..51322dfd5b2 --- /dev/null +++ b/build/android/gyp/zip.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Archives a set of files. +""" + +import optparse +import sys + +from util import build_utils + +def main(): + parser = optparse.OptionParser() + parser.add_option('--input-dir', help='Directory of files to archive.') + parser.add_option('--output', help='Path to output archive.') + options, _ = parser.parse_args() + + inputs = build_utils.FindInDirectory(options.input_dir, '*') + build_utils.DoZip(inputs, options.output, options.input_dir) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/host_heartbeat.py b/build/android/host_heartbeat.py new file mode 100755 index 00000000000..6a7cdd1d8e6 --- /dev/null +++ b/build/android/host_heartbeat.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Sends a heart beat pulse to the currently online Android devices. +This heart beat lets the devices know that they are connected to a host. +""" +# pylint: disable=W0702 + +import sys +import time + +from pylib.device import device_utils + +PULSE_PERIOD = 20 + +def main(): + while True: + try: + devices = device_utils.DeviceUtils.HealthyDevices() + for d in devices: + d.RunShellCommand(['touch', '/sdcard/host_heartbeat'], + check_return=True) + except: + # Keep the heatbeat running bypassing all errors. + pass + time.sleep(PULSE_PERIOD) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/increase_size_for_speed.gypi b/build/android/increase_size_for_speed.gypi new file mode 100644 index 00000000000..48d17f5f19c --- /dev/null +++ b/build/android/increase_size_for_speed.gypi @@ -0,0 +1,42 @@ +# Copyright (c) 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included to optimize a target for speed +# rather than for size on Android. +# This is used in some carefully tailored targets and is not meant +# to be included everywhere. Before adding the template to another target, +# please ask in chromium-dev@. See crbug.com/411909 + +{ + 'configurations': { + 'Release': { + 'target_conditions': [ + ['_toolset=="target"', { + 'conditions': [ + ['OS=="android"', { + 'cflags!': ['-Os'], + 'cflags': ['-O2'], + }], + # Do not merge -Os and -O2 in LTO. + # LTO merges all optimization options at link-time. -O2 takes + # precedence over -Os. Avoid using LTO simultaneously + # on -Os and -O2 parts for that reason. + ['OS=="android" and use_lto==1', { + 'cflags!': [ + '-flto', + '-ffat-lto-objects', + ], + }], + ['OS=="android" and use_lto_o2==1', { + 'cflags': [ + '-flto', + '-ffat-lto-objects', + ], + }], + ], + }], + ], + }, + }, +} diff --git a/build/android/insert_chromium_version.gypi b/build/android/insert_chromium_version.gypi new file mode 100644 index 00000000000..a6ff9081086 --- /dev/null +++ b/build/android/insert_chromium_version.gypi @@ -0,0 +1,53 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# inserts a chromium version string into native libraries. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'insert_chromium_version', +# 'actions': [ +# 'variables': { +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'stripped_libraries_dir': 'the directory contains native libraries' +# 'input_paths': 'files to be added to the list of inputs' +# 'stamp': 'file to touch when the action is complete' +# 'version_string': 'chromium version string to be inserted' +# 'includes': [ '../../build/android/insert_chromium_version.gypi' ], +# ], +# }, +# + +{ + 'message': 'Inserting chromium version string into native libraries', + 'variables': { + 'input_paths': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/insert_chromium_version.py', + '<(ordered_libraries_file)', + '>@(input_paths)', + ], + 'outputs': [ + '<(stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/insert_chromium_version.py', + '--android-objcopy=<(android_objcopy)', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--version-string=<(version_string)', + '--stamp=<(stamp)', + ], + 'conditions': [ + ['component == "shared_library"', { + # Add a fake output to force the build to always re-run this step. This + # is required because the real inputs are not known at gyp-time and + # changing base.so may not trigger changes to dependent libraries. + 'outputs': [ '<(stamp).fake' ] + }], + ], +} diff --git a/build/android/install_emulator_deps.py b/build/android/install_emulator_deps.py new file mode 100755 index 00000000000..82d1c751373 --- /dev/null +++ b/build/android/install_emulator_deps.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Installs deps for using SDK emulator for testing. + +The script will download the SDK and system images, if they are not present, and +install and enable KVM, if virtualization has been enabled in the BIOS. +""" + + +import logging +import optparse +import os +import re +import shutil +import sys + +from pylib import cmd_helper +from pylib import constants +from pylib import pexpect +from pylib.utils import run_tests_helper + +# Android API level +DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION + +# From the Android Developer's website. +# Keep this up to date; the user can install older API levels as necessary. +SDK_BASE_URL = 'http://dl.google.com/android/adt' +SDK_ZIP = 'adt-bundle-linux-x86_64-20131030.zip' + +# pylint: disable=line-too-long +# Android x86 system image from the Intel website: +# http://software.intel.com/en-us/articles/intel-eula-x86-android-4-2-jelly-bean-bin +# These don't exist prior to Android-15. +# As of 08 Nov 2013, Android-19 is not yet available either. +X86_IMG_URLS = { + 15: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-15_r01.zip', + 16: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-16_r01.zip', + 17: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-17_r01.zip', + 18: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-18_r01.zip', + 19: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-19_r01.zip'} +#pylint: enable=line-too-long + +def CheckSDK(): + """Check if SDK is already installed. + + Returns: + True if the emulator SDK directory (src/android_emulator_sdk/) exists. + """ + return os.path.exists(constants.EMULATOR_SDK_ROOT) + + +def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL): + """Check if the "SDK Platform" for the specified API level is installed. + This is necessary in order for the emulator to run when the target + is specified. + + Args: + api_level: the Android API level to check; defaults to the latest API. + + Returns: + True if the platform is already installed. + """ + android_binary = os.path.join(constants.EMULATOR_SDK_ROOT, + 'sdk', 'tools', 'android') + pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level) + try: + exit_code, stdout = cmd_helper.GetCmdStatusAndOutput( + [android_binary, 'list']) + if exit_code != 0: + raise Exception('\'android list\' command failed') + for line in stdout.split('\n'): + if pattern.match(line): + return True + return False + except OSError: + logging.exception('Unable to execute \'android list\'') + return False + + +def CheckX86Image(api_level=DEFAULT_ANDROID_API_LEVEL): + """Check if Android system images have been installed. + + Args: + api_level: the Android API level to check for; defaults to the latest API. + + Returns: + True if sdk/system-images/android-/x86 exists inside + EMULATOR_SDK_ROOT. + """ + api_target = 'android-%d' % api_level + return os.path.exists(os.path.join(constants.EMULATOR_SDK_ROOT, + 'sdk', 'system-images', + api_target, 'x86')) + + +def CheckKVM(): + """Quickly check whether KVM is enabled. + + Returns: + True iff /dev/kvm exists (Linux only). + """ + return os.path.exists('/dev/kvm') + + +def RunKvmOk(): + """Run kvm-ok as root to check that KVM is properly enabled after installation + of the required packages. + + Returns: + True iff KVM is enabled (/dev/kvm exists). On failure, returns False + but also print detailed information explaining why KVM isn't enabled + (e.g. CPU doesn't support it, or BIOS disabled it). + """ + try: + # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it. + return not cmd_helper.RunCmd(['sudo', 'kvm-ok']) + except OSError: + logging.info('kvm-ok not installed') + return False + + +def GetSDK(): + """Download the SDK and unzip it into EMULATOR_SDK_ROOT.""" + logging.info('Download Android SDK.') + sdk_url = '%s/%s' % (SDK_BASE_URL, SDK_ZIP) + try: + cmd_helper.RunCmd(['curl', '-o', '/tmp/sdk.zip', sdk_url]) + print 'curled unzipping...' + rc = cmd_helper.RunCmd(['unzip', '-o', '/tmp/sdk.zip', '-d', '/tmp/']) + if rc: + raise Exception('ERROR: could not download/unzip Android SDK.') + # Get the name of the sub-directory that everything will be extracted to. + dirname, _ = os.path.splitext(SDK_ZIP) + zip_dir = '/tmp/%s' % dirname + # Move the extracted directory to EMULATOR_SDK_ROOT + shutil.move(zip_dir, constants.EMULATOR_SDK_ROOT) + finally: + os.unlink('/tmp/sdk.zip') + + +def InstallKVM(): + """Installs KVM packages.""" + rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm']) + if rc: + logging.critical('ERROR: Did not install KVM. Make sure hardware ' + 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' + 'AMD SVM).') + # TODO(navabi): Use modprobe kvm-amd on AMD processors. + rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel']) + if rc: + logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure ' + 'hardware virtualization is enabled in BIOS.') + # Now check to ensure KVM acceleration can be used. + if not RunKvmOk(): + logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware ' + 'virtualization is enabled in BIOS (i.e. Intel VT-x or ' + 'AMD SVM).') + + +def GetX86Image(api_level=DEFAULT_ANDROID_API_LEVEL): + """Download x86 system image from Intel's website. + + Args: + api_level: the Android API level to download for. + """ + logging.info('Download x86 system image directory into sdk directory.') + # TODO(andrewhayden): Use python tempfile lib instead + temp_file = '/tmp/x86_img_android-%d.zip' % api_level + if api_level not in X86_IMG_URLS: + raise Exception('ERROR: no URL known for x86 image for android-%s' % + api_level) + try: + cmd_helper.RunCmd(['curl', '-o', temp_file, X86_IMG_URLS[api_level]]) + rc = cmd_helper.RunCmd(['unzip', '-o', temp_file, '-d', '/tmp/']) + if rc: + raise Exception('ERROR: Could not download/unzip image zip.') + api_target = 'android-%d' % api_level + sys_imgs = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', + 'system-images', api_target, 'x86') + logging.info('Deploying system image to %s' % sys_imgs) + shutil.move('/tmp/x86', sys_imgs) + finally: + os.unlink(temp_file) + + +def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL): + """Update the SDK to include the platform specified. + + Args: + api_level: the Android API level to download + """ + android_binary = os.path.join(constants.EMULATOR_SDK_ROOT, + 'sdk', 'tools', 'android') + pattern = re.compile( + r'\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' % api_level) + # Example: + # 2- SDK Platform Android 4.3, API 18, revision 2 + exit_code, stdout = cmd_helper.GetCmdStatusAndOutput( + [android_binary, 'list', 'sdk']) + if exit_code != 0: + raise Exception('\'android list sdk\' command return %d' % exit_code) + for line in stdout.split('\n'): + match = pattern.match(line) + if match: + index = match.group(1) + print 'package %s corresponds to platform level %d' % (index, api_level) + # update sdk --no-ui --filter $INDEX + update_command = [android_binary, + 'update', 'sdk', '--no-ui', '--filter', index] + update_command_str = ' '.join(update_command) + logging.info('running update command: %s' % update_command_str) + update_process = pexpect.spawn(update_command_str) + # TODO(andrewhayden): Do we need to bug the user about this? + if update_process.expect('Do you accept the license') != 0: + raise Exception('License agreement check failed') + update_process.sendline('y') + if update_process.expect('Done. 1 package installed.') == 0: + print 'Successfully installed platform for API level %d' % api_level + return + else: + raise Exception('Failed to install platform update') + raise Exception('Could not find android-%d update for the SDK!' % api_level) + + +def main(argv): + opt_parser = optparse.OptionParser( + description='Install dependencies for running the Android emulator') + opt_parser.add_option('--api-level', dest='api_level', + help='The API level (e.g., 19 for Android 4.4) to ensure is available', + type='int', default=DEFAULT_ANDROID_API_LEVEL) + opt_parser.add_option('-v', dest='verbose', action='store_true', + help='enable verbose logging') + options, _ = opt_parser.parse_args(argv[1:]) + + # run_tests_helper will set logging to INFO or DEBUG + # We achieve verbose output by configuring it with 2 (==DEBUG) + verbosity = 1 + if options.verbose: + verbosity = 2 + logging.basicConfig(level=logging.INFO, + format='# %(asctime)-15s: %(message)s') + run_tests_helper.SetLogLevel(verbose_count=verbosity) + + # Calls below will download emulator SDK and/or system images only if needed. + if CheckSDK(): + logging.info('android_emulator_sdk/ already exists, skipping download.') + else: + GetSDK() + + # Check target. The target has to be installed in order to run the emulator. + if CheckSDKPlatform(options.api_level): + logging.info('SDK platform android-%d already present, skipping.' % + options.api_level) + else: + logging.info('SDK platform android-%d not present, installing.' % + options.api_level) + GetSDKPlatform(options.api_level) + + # Download the x86 system image only if needed. + if CheckX86Image(options.api_level): + logging.info('x86 image for android-%d already present, skipping.' % + options.api_level) + else: + GetX86Image(options.api_level) + + # Make sure KVM packages are installed and enabled. + if CheckKVM(): + logging.info('KVM already installed and enabled.') + else: + InstallKVM() + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/instr_action.gypi b/build/android/instr_action.gypi new file mode 100644 index 00000000000..fa6d06235ea --- /dev/null +++ b/build/android/instr_action.gypi @@ -0,0 +1,53 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# instruments either java class files, or jars. + +{ + 'variables': { + 'instr_type%': 'jar', + 'input_path%': '', + 'output_path%': '', + 'stamp_path%': '', + 'extra_instr_args': [ + '--coverage-file=<(_target_name).em', + '--sources-file=<(_target_name)_sources.txt', + ], + 'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar', + 'conditions': [ + ['emma_instrument != 0', { + 'extra_instr_args': [ + '--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)', + '--src-root=<(DEPTH)', + '--emma-jar=<(emma_jar)', + '--filter-string=<(emma_filter)', + ], + 'conditions': [ + ['instr_type == "jar"', { + 'instr_action': 'instrument_jar', + }, { + 'instr_action': 'instrument_classes', + }] + ], + }, { + 'instr_action': 'copy', + 'extra_instr_args': [], + }] + ] + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/emma_instr.py', + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/pylib/utils/command_option_parser.py', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/emma_instr.py', + '<(instr_action)', + '--input-path=<(input_path)', + '--output-path=<(output_path)', + '--stamp=<(stamp_path)', + '<@(extra_instr_args)', + ] +} diff --git a/build/android/java_cpp_enum.gypi b/build/android/java_cpp_enum.gypi new file mode 100644 index 00000000000..d4abafa812f --- /dev/null +++ b/build/android/java_cpp_enum.gypi @@ -0,0 +1,64 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide an action +# to generate Java source files from a C++ header file containing annotated +# enum definitions using a Python script. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'bitmap_format_java', +# 'type': 'none', +# 'variables': { +# 'source_file': 'ui/android/bitmap_format.h', +# }, +# 'includes': [ '../build/android/java_cpp_enum.gypi' ], +# }, +# +# Then have the gyp target which compiles the java code depend on the newly +# created target. + +{ + 'variables': { + # Location where all generated Java sources will be placed. + 'output_dir': '<(SHARED_INTERMEDIATE_DIR)/enums/<(_target_name)', + 'generator_path': '<(DEPTH)/build/android/gyp/java_cpp_enum.py', + 'generator_args': '<(output_dir) <(source_file)', + }, + 'direct_dependent_settings': { + 'variables': { + # Ensure that the output directory is used in the class path + # when building targets that depend on this one. + 'generated_src_dirs': [ + '<(output_dir)/', + ], + # Ensure that the targets depending on this one are rebuilt if the sources + # of this one are modified. + 'additional_input_paths': [ + '<(source_file)', + ], + }, + }, + 'actions': [ + { + 'action_name': 'generate_java_constants', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(generator_path)', + '<(source_file)', + ], + 'outputs': [ + # This is the main reason this is an action and not a rule. Gyp doesn't + # properly expand RULE_INPUT_PATH here and so it's impossible to + # calculate the list of outputs. + '//ClassName.java. The files which +# template dependents on and typically included by the template should be listed +# in template_deps variables. Any change to them will force a rebuild of +# the template, and hence of any source that depends on it. +# + +{ + # Location where all generated Java sources will be placed. + 'variables': { + 'include_path%': '<(DEPTH)', + 'output_dir': '<(SHARED_INTERMEDIATE_DIR)/templates/<(_target_name)/<(package_name)', + }, + 'direct_dependent_settings': { + 'variables': { + # Ensure that the output directory is used in the class path + # when building targets that depend on this one. + 'generated_src_dirs': [ + '<(output_dir)/', + ], + # Ensure dependents are rebuilt when sources for this rule change. + 'additional_input_paths': [ + '<@(_sources)', + '<@(template_deps)', + ], + }, + }, + # Define a single rule that will be apply to each .template file + # listed in 'sources'. + 'rules': [ + { + 'rule_name': 'generate_java_constants', + 'extension': 'template', + # Set template_deps as additional dependencies. + 'variables': { + 'output_path': '<(output_dir)/<(RULE_INPUT_ROOT).java', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/gcc_preprocess.py', + '<@(template_deps)' + ], + 'outputs': [ + '<(output_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py', + '--include-path=<(include_path)', + '--output=<(output_path)', + '--template=<(RULE_INPUT_PATH)', + ], + 'message': 'Generating Java from cpp template <(RULE_INPUT_PATH)', + } + ], +} diff --git a/build/android/jinja_template.gypi b/build/android/jinja_template.gypi new file mode 100644 index 00000000000..9c49360dc6e --- /dev/null +++ b/build/android/jinja_template.gypi @@ -0,0 +1,85 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to process one or more +# Jinja templates. +# +# To process a single template file, create a gyp target with the following +# form: +# { +# 'target_name': 'chrome_shell_manifest', +# 'type': 'none', +# 'variables': { +# 'jinja_inputs': ['android/shell/java/AndroidManifest.xml'], +# 'jinja_output': '<(SHARED_INTERMEDIATE_DIR)/chrome_shell_manifest/AndroidManifest.xml', +# 'jinja_variables': ['app_name=ChromeShell'], +# }, +# 'includes': [ '../build/android/jinja_template.gypi' ], +# }, +# +# To process multiple template files and package the results into a zip file, +# create a gyp target with the following form: +# { +# 'target_name': 'chrome_template_resources', +# 'type': 'none', +# 'variables': { +# 'jinja_inputs_base_dir': 'android/shell/java/res_template', +# 'jinja_inputs': [ +# '<(jinja_inputs_base_dir)/xml/searchable.xml', +# '<(jinja_inputs_base_dir)/xml/syncadapter.xml', +# ], +# 'jinja_outputs_zip': '<(PRODUCT_DIR)/res.java/<(_target_name).zip', +# 'jinja_variables': ['app_name=ChromeShell'], +# }, +# 'includes': [ '../build/android/jinja_template.gypi' ], +# }, +# + +{ + 'actions': [ + { + 'action_name': '<(_target_name)_jinja_template', + 'message': 'processing jinja template', + 'variables': { + 'jinja_output%': '', + 'jinja_outputs_zip%': '', + 'jinja_inputs_base_dir%': '', + 'jinja_includes%': [], + 'jinja_variables%': [], + 'jinja_args': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/jinja_template.py', + '<@(jinja_inputs)', + '<@(jinja_includes)', + ], + 'conditions': [ + ['jinja_output != ""', { + 'outputs': [ '<(jinja_output)' ], + 'variables': { + 'jinja_args': ['--output', '<(jinja_output)'], + }, + }], + ['jinja_outputs_zip != ""', { + 'outputs': [ '<(jinja_outputs_zip)' ], + 'variables': { + 'jinja_args': ['--outputs-zip', '<(jinja_outputs_zip)'], + }, + }], + ['jinja_inputs_base_dir != ""', { + 'variables': { + 'jinja_args': ['--inputs-base-dir', '<(jinja_inputs_base_dir)'], + }, + }], + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/jinja_template.py', + '--inputs', '<(jinja_inputs)', + '--variables', '<(jinja_variables)', + '<@(jinja_args)', + ], + }, + ], +} diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py new file mode 100755 index 00000000000..a5195ac815e --- /dev/null +++ b/build/android/lighttpd_server.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provides a convenient wrapper for spawning a test lighttpd instance. + +Usage: + lighttpd_server PATH_TO_DOC_ROOT +""" + +import codecs +import contextlib +import httplib +import os +import random +import shutil +import socket +import subprocess +import sys +import tempfile +import time + +from pylib import constants +from pylib import pexpect + +class LighttpdServer(object): + """Wraps lighttpd server, providing robust startup. + + Args: + document_root: Path to root of this server's hosted files. + port: TCP port on the _host_ machine that the server will listen on. If + ommitted it will attempt to use 9000, or if unavailable it will find + a free port from 8001 - 8999. + lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries. + base_config_path: If supplied this file will replace the built-in default + lighttpd config file. + extra_config_contents: If specified, this string will be appended to the + base config (default built-in, or from base_config_path). + config_path, error_log, access_log: Optional paths where the class should + place temprary files for this session. + """ + + def __init__(self, document_root, port=None, + lighttpd_path=None, lighttpd_module_path=None, + base_config_path=None, extra_config_contents=None, + config_path=None, error_log=None, access_log=None): + self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android') + self.document_root = os.path.abspath(document_root) + self.fixed_port = port + self.port = port or constants.LIGHTTPD_DEFAULT_PORT + self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999)) + self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd' + self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd' + self.base_config_path = base_config_path + self.extra_config_contents = extra_config_contents + self.config_path = config_path or self._Mktmp('config') + self.error_log = error_log or self._Mktmp('error_log') + self.access_log = access_log or self._Mktmp('access_log') + self.pid_file = self._Mktmp('pid_file') + self.process = None + + def _Mktmp(self, name): + return os.path.join(self.temp_dir, name) + + @staticmethod + def _GetRandomPort(): + # The ports of test server is arranged in constants.py. + return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST, + constants.LIGHTTPD_RANDOM_PORT_LAST) + + def StartupHttpServer(self): + """Starts up a http server with specified document root and port.""" + # If we want a specific port, make sure no one else is listening on it. + if self.fixed_port: + self._KillProcessListeningOnPort(self.fixed_port) + while True: + if self.base_config_path: + # Read the config + with codecs.open(self.base_config_path, 'r', 'utf-8') as f: + config_contents = f.read() + else: + config_contents = self._GetDefaultBaseConfig() + if self.extra_config_contents: + config_contents += self.extra_config_contents + # Write out the config, filling in placeholders from the members of |self| + with codecs.open(self.config_path, 'w', 'utf-8') as f: + f.write(config_contents % self.__dict__) + if (not os.path.exists(self.lighttpd_path) or + not os.access(self.lighttpd_path, os.X_OK)): + raise EnvironmentError( + 'Could not find lighttpd at %s.\n' + 'It may need to be installed (e.g. sudo apt-get install lighttpd)' + % self.lighttpd_path) + self.process = pexpect.spawn(self.lighttpd_path, + ['-D', '-f', self.config_path, + '-m', self.lighttpd_module_path], + cwd=self.temp_dir) + client_error, server_error = self._TestServerConnection() + if not client_error: + assert int(open(self.pid_file, 'r').read()) == self.process.pid + break + self.process.close() + + if self.fixed_port or not 'in use' in server_error: + print 'Client error:', client_error + print 'Server error:', server_error + return False + self.port = self._GetRandomPort() + return True + + def ShutdownHttpServer(self): + """Shuts down our lighttpd processes.""" + if self.process: + self.process.terminate() + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def _TestServerConnection(self): + # Wait for server to start + server_msg = '' + for timeout in xrange(1, 5): + client_error = None + try: + with contextlib.closing(httplib.HTTPConnection( + '127.0.0.1', self.port, timeout=timeout)) as http: + http.set_debuglevel(timeout > 3) + http.request('HEAD', '/') + r = http.getresponse() + r.read() + if (r.status == 200 and r.reason == 'OK' and + r.getheader('Server') == self.server_tag): + return (None, server_msg) + client_error = ('Bad response: %s %s version %s\n ' % + (r.status, r.reason, r.version) + + '\n '.join([': '.join(h) for h in r.getheaders()])) + except (httplib.HTTPException, socket.error) as client_error: + pass # Probably too quick connecting: try again + # Check for server startup error messages + ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'], + timeout=timeout) + if ix == 2: # stdout spew from the server + server_msg += self.process.match.group(0) + elif ix == 1: # EOF -- server has quit so giveup. + client_error = client_error or 'Server exited' + break + return (client_error or 'Timeout', server_msg) + + @staticmethod + def _KillProcessListeningOnPort(port): + """Checks if there is a process listening on port number |port| and + terminates it if found. + + Args: + port: Port number to check. + """ + if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0: + # Give the process some time to terminate and check that it is gone. + time.sleep(2) + assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \ + 'Unable to kill process listening on port %d.' % port + + @staticmethod + def _GetDefaultBaseConfig(): + return """server.tag = "%(server_tag)s" +server.modules = ( "mod_access", + "mod_accesslog", + "mod_alias", + "mod_cgi", + "mod_rewrite" ) + +# default document root required +#server.document-root = "." + +# files to check for if .../ is requested +index-file.names = ( "index.php", "index.pl", "index.cgi", + "index.html", "index.htm", "default.htm" ) +# mimetype mapping +mimetype.assign = ( + ".gif" => "image/gif", + ".jpg" => "image/jpeg", + ".jpeg" => "image/jpeg", + ".png" => "image/png", + ".svg" => "image/svg+xml", + ".css" => "text/css", + ".html" => "text/html", + ".htm" => "text/html", + ".xhtml" => "application/xhtml+xml", + ".xhtmlmp" => "application/vnd.wap.xhtml+xml", + ".js" => "application/x-javascript", + ".log" => "text/plain", + ".conf" => "text/plain", + ".text" => "text/plain", + ".txt" => "text/plain", + ".dtd" => "text/xml", + ".xml" => "text/xml", + ".manifest" => "text/cache-manifest", + ) + +# Use the "Content-Type" extended attribute to obtain mime type if possible +mimetype.use-xattr = "enable" + +## +# which extensions should not be handle via static-file transfer +# +# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi +static-file.exclude-extensions = ( ".php", ".pl", ".cgi" ) + +server.bind = "127.0.0.1" +server.port = %(port)s + +## virtual directory listings +dir-listing.activate = "enable" +#dir-listing.encoding = "iso-8859-2" +#dir-listing.external-css = "style/oldstyle.css" + +## enable debugging +#debug.log-request-header = "enable" +#debug.log-response-header = "enable" +#debug.log-request-handling = "enable" +#debug.log-file-not-found = "enable" + +#### SSL engine +#ssl.engine = "enable" +#ssl.pemfile = "server.pem" + +# Autogenerated test-specific config follows. + +cgi.assign = ( ".cgi" => "/usr/bin/env", + ".pl" => "/usr/bin/env", + ".asis" => "/bin/cat", + ".php" => "/usr/bin/php-cgi" ) + +server.errorlog = "%(error_log)s" +accesslog.filename = "%(access_log)s" +server.upload-dirs = ( "/tmp" ) +server.pid-file = "%(pid_file)s" +server.document-root = "%(document_root)s" + +""" + + +def main(argv): + server = LighttpdServer(*argv[1:]) + try: + if server.StartupHttpServer(): + raw_input('Server running at http://127.0.0.1:%s -' + ' press Enter to exit it.' % server.port) + else: + print 'Server exit code:', server.process.exitstatus + finally: + server.ShutdownHttpServer() + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/build/android/lint/OWNERS b/build/android/lint/OWNERS new file mode 100644 index 00000000000..cd396e7e571 --- /dev/null +++ b/build/android/lint/OWNERS @@ -0,0 +1,2 @@ +newt@chromium.org +aurimas@chromium.org diff --git a/build/android/lint/suppress.py b/build/android/lint/suppress.py new file mode 100755 index 00000000000..52d7579b964 --- /dev/null +++ b/build/android/lint/suppress.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Add all generated lint_result.xml files to suppressions.xml""" + + +import collections +import optparse +import os +import sys +from xml.dom import minidom + +_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..') +sys.path.append(_BUILD_ANDROID_DIR) + +from pylib import constants + + +_THIS_FILE = os.path.abspath(__file__) +_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE), 'suppressions.xml') +_DOC = ( + '\nSTOP! It looks like you want to suppress some lint errors:\n' + '- Have you tried identifing the offending patch?\n' + ' Ask the author for a fix and/or revert the patch.\n' + '- It is preferred to add suppressions in the code instead of\n' + ' sweeping it under the rug here. See:\n\n' + ' http://developer.android.com/tools/debugging/improving-w-lint.html\n' + '\n' + 'Still reading?\n' + '- You can edit this file manually to suppress an issue\n' + ' globally if it is not applicable to the project.\n' + '- You can also automatically add issues found so for in the\n' + ' build process by running:\n\n' + ' ' + os.path.relpath(_THIS_FILE, constants.DIR_SOURCE_ROOT) + '\n\n' + ' which will generate this file (Comments are not preserved).\n' + ' Note: PRODUCT_DIR will be substituted at run-time with actual\n' + ' directory path (e.g. out/Debug)\n' +) + + +_Issue = collections.namedtuple('Issue', ['severity', 'paths']) + + +def _ParseConfigFile(config_path): + print 'Parsing %s' % config_path + issues_dict = {} + dom = minidom.parse(config_path) + for issue in dom.getElementsByTagName('issue'): + issue_id = issue.attributes['id'].value + severity = issue.getAttribute('severity') + paths = set( + [p.attributes['path'].value for p in + issue.getElementsByTagName('ignore')]) + issues_dict[issue_id] = _Issue(severity, paths) + return issues_dict + + +def _ParseAndMergeResultFile(result_path, issues_dict): + print 'Parsing and merging %s' % result_path + dom = minidom.parse(result_path) + for issue in dom.getElementsByTagName('issue'): + issue_id = issue.attributes['id'].value + severity = issue.attributes['severity'].value + path = issue.getElementsByTagName('location')[0].attributes['file'].value + if issue_id not in issues_dict: + issues_dict[issue_id] = _Issue(severity, set()) + issues_dict[issue_id].paths.add(path) + + +def _WriteConfigFile(config_path, issues_dict): + new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None) + top_element = new_dom.documentElement + top_element.appendChild(new_dom.createComment(_DOC)) + for issue_id in sorted(issues_dict.keys()): + severity = issues_dict[issue_id].severity + paths = issues_dict[issue_id].paths + issue = new_dom.createElement('issue') + issue.attributes['id'] = issue_id + if severity: + issue.attributes['severity'] = severity + if severity == 'ignore': + print 'Warning: [%s] is suppressed globally.' % issue_id + else: + for path in sorted(paths): + ignore = new_dom.createElement('ignore') + ignore.attributes['path'] = path + issue.appendChild(ignore) + top_element.appendChild(issue) + + with open(config_path, 'w') as f: + f.write(new_dom.toprettyxml(indent=' ', encoding='utf-8')) + print 'Updated %s' % config_path + + +def _Suppress(config_path, result_path): + issues_dict = _ParseConfigFile(config_path) + _ParseAndMergeResultFile(result_path, issues_dict) + _WriteConfigFile(config_path, issues_dict) + + +def main(): + parser = optparse.OptionParser(usage='%prog RESULT-FILE') + _, args = parser.parse_args() + + if len(args) != 1 or not os.path.exists(args[0]): + parser.error('Must provide RESULT-FILE') + + _Suppress(_CONFIG_PATH, args[0]) + + +if __name__ == '__main__': + main() diff --git a/build/android/lint/suppressions.xml b/build/android/lint/suppressions.xml new file mode 100644 index 00000000000..cb77c1f1e1f --- /dev/null +++ b/build/android/lint/suppressions.xml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/build/android/lint_action.gypi b/build/android/lint_action.gypi new file mode 100644 index 00000000000..e1adf1f842c --- /dev/null +++ b/build/android/lint_action.gypi @@ -0,0 +1,43 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule to +# run lint on java/class files. + +{ + 'action_name': 'lint_<(_target_name)', + 'message': 'Linting <(_target_name)', + 'variables': { + 'conditions': [ + ['chromium_code != 0 and android_lint != 0 and never_lint == 0', { + 'is_enabled': '--enable', + }, { + 'is_enabled': '', + }] + ], + 'android_manifest_path%': '<(DEPTH)/build/android/AndroidManifest.xml', + 'resource_dir%': '<(DEPTH)/build/android/ant/empty/res', + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/lint.py', + '<(DEPTH)/build/android/lint/suppressions.xml', + '<(lint_jar_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/lint.py', + '--lint-path=<(android_sdk_root)/tools/lint', + '--config-path=<(DEPTH)/build/android/lint/suppressions.xml', + '--processed-config-path=<(config_path)', + '--manifest-path=<(android_manifest_path)', + '--result-path=<(result_path)', + '--resource-dir=<(resource_dir)', + '--product-dir=<(PRODUCT_DIR)', + '--src-dirs=>(src_dirs)', + '--jar-path=<(lint_jar_path)', + '--can-fail-build', + '--stamp=<(stamp_path)', + '<(is_enabled)', + ], +} diff --git a/build/android/locale_pak_resources.gypi b/build/android/locale_pak_resources.gypi new file mode 100644 index 00000000000..6f8e56f8f15 --- /dev/null +++ b/build/android/locale_pak_resources.gypi @@ -0,0 +1,52 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Creates a resources.zip with locale.pak files placed into appropriate +# resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates +# a locale_paks TypedArray so that resource files can be enumerated at runtime. +# +# If this target is included in the deps of an android resources/library/apk, +# the resources will be included with that target. +# +# Variables: +# locale_pak_files - List of .pak files to process. +# Names must be of the form "en.pak" or "en-US.pak". +# +# Example +# { +# 'target_name': 'my_locale_resources', +# 'type': 'none', +# 'variables': { +# 'locale_paks_files': ['path1/fr.pak'], +# }, +# 'includes': [ '../build/android/locale_pak_resources.gypi' ], +# }, +# +{ + 'variables': { + 'resources_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip', + }, + 'all_dependent_settings': { + 'variables': { + 'additional_input_paths': ['<(resources_zip_path)'], + 'dependencies_res_zip_paths': ['<(resources_zip_path)'], + }, + }, + 'actions': [{ + 'action_name': '<(_target_name)_locale_pak_resources', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/locale_pak_resources.py', + '<@(locale_pak_files)', + ], + 'outputs': [ + '<(resources_zip_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/locale_pak_resources.py', + '--locale-paks', '<(locale_pak_files)', + '--resources-zip', '<(resources_zip_path)', + ], + }], +} diff --git a/build/android/method_count.py b/build/android/method_count.py new file mode 100755 index 00000000000..93250b5c542 --- /dev/null +++ b/build/android/method_count.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import re +import sys + +from pylib import constants +from pylib.sdk import dexdump + +sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', + 'common')) +import perf_tests_results_helper + + +_METHOD_IDS_SIZE_RE = re.compile(r'^method_ids_size +: +(\d+)$') + +def MethodCount(dexfile): + for line in dexdump.DexDump(dexfile, file_summary=True): + m = _METHOD_IDS_SIZE_RE.match(line) + if m: + return m.group(1) + raise Exception('"method_ids_size" not found in dex dump of %s' % dexfile) + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--apk-name', help='Name of the APK to which the dexfile corresponds.') + parser.add_argument('dexfile') + + args = parser.parse_args() + + if not args.apk_name: + dirname, basename = os.path.split(args.dexfile) + while basename: + if 'apk' in basename: + args.apk_name = basename + break + dirname, basename = os.path.split(dirname) + else: + parser.error( + 'Unable to determine apk name from %s, ' + 'and --apk-name was not provided.' % args.dexfile) + + method_count = MethodCount(args.dexfile) + perf_tests_results_helper.PrintPerfResult( + '%s_methods' % args.apk_name, 'total', [method_count], 'methods') + return 0 + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/build/android/native_app_dependencies.gypi b/build/android/native_app_dependencies.gypi new file mode 100644 index 00000000000..6032274dbba --- /dev/null +++ b/build/android/native_app_dependencies.gypi @@ -0,0 +1,67 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into a target to provide a rule +# to strip and place dependent shared libraries required by a native binary in a +# single folder that can later be pushed to the device. +# +# NOTE: consider packaging your binary as an apk instead of running a native +# library. +# +# To use this, create a gyp target with the following form: +# { +# 'target_name': 'target_that_depends_on_my_binary', +# 'type': 'none', +# 'dependencies': [ +# 'my_binary', +# ], +# 'variables': { +# 'native_binary': '<(PRODUCT_DIR)/my_binary', +# 'output_dir': 'location to place binary and dependent libraries' +# }, +# 'includes': [ '../../build/android/native_app_dependencies.gypi' ], +# }, +# + +{ + 'variables': { + 'include_main_binary%': 1, + }, + 'conditions': [ + ['component == "shared_library"', { + 'dependencies': [ + '<(DEPTH)/build/android/setup.gyp:copy_system_libraries', + ], + 'variables': { + 'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)', + 'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json', + }, + 'actions': [ + { + 'variables': { + 'input_libraries': ['<(native_binary)'], + }, + 'includes': ['../../build/android/write_ordered_libraries.gypi'], + }, + { + 'action_name': 'stripping native libraries', + 'variables': { + 'stripped_libraries_dir%': '<(output_dir)', + 'input_paths': ['<(native_binary)'], + 'stamp': '<(intermediate_dir)/strip.stamp', + }, + 'includes': ['../../build/android/strip_native_libraries.gypi'], + }, + ], + }], + ['include_main_binary==1', { + 'copies': [ + { + 'destination': '<(output_dir)', + 'files': [ '<(native_binary)' ], + } + ], + }], + ], +} diff --git a/build/android/ndk.gyp b/build/android/ndk.gyp new file mode 100644 index 00000000000..2838a9857e9 --- /dev/null +++ b/build/android/ndk.gyp @@ -0,0 +1,20 @@ +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +{ + 'targets': [ + { + 'target_name': 'cpu_features', + 'type': 'static_library', + 'direct_dependent_settings': { + 'include_dirs': [ + '<(android_ndk_root)/sources/android/cpufeatures', + ], + }, + 'sources': [ + '<(android_ndk_root)/sources/android/cpufeatures/cpu-features.c', + ], + }, + ], +} diff --git a/build/android/pack_relocations.gypi b/build/android/pack_relocations.gypi new file mode 100644 index 00000000000..8567fa6e45f --- /dev/null +++ b/build/android/pack_relocations.gypi @@ -0,0 +1,77 @@ +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# packs relocations in Release builds of native libraries. +# +# To use this, create a gyp target with the following form: +# { +# 'action_name': 'pack_relocations', +# 'actions': [ +# 'variables': { +# 'enable_packing': 'pack relocations if 1, plain file copy if 0' +# 'exclude_packing_list': 'names of libraries explicitly not packed', +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'input_paths': 'files to be added to the list of inputs' +# 'stamp': 'file to touch when the action is complete' +# 'stripped_libraries_dir': 'directory holding stripped libraries', +# 'packed_libraries_dir': 'directory holding packed libraries', +# 'includes': [ '../../build/android/pack_relocations.gypi' ], +# ], +# }, +# + +{ + 'variables': { + 'input_paths': [], + }, + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/pack_relocations.py', + '<(ordered_libraries_file)', + '>@(input_paths)', + ], + 'outputs': [ + '<(stamp)', + ], + 'conditions': [ + ['enable_packing == 1', { + 'message': 'Packing relocations for <(_target_name)', + 'dependencies': [ + '<(DEPTH)/third_party/android_platform/relocation_packer.gyp:android_relocation_packer#host', + ], + 'inputs': [ + '<(PRODUCT_DIR)/android_relocation_packer', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/pack_relocations.py', + '--configuration-name=<(CONFIGURATION_NAME)', + '--enable-packing=1', + '--exclude-packing-list=<@(exclude_packing_list)', + '--android-pack-relocations=<(PRODUCT_DIR)/android_relocation_packer', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--packed-libraries-dir=<(packed_libraries_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(stamp)', + ], + }, { + 'message': 'Copying libraries (no relocation packing) for <(_target_name)', + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/pack_relocations.py', + '--configuration-name=<(CONFIGURATION_NAME)', + '--enable-packing=0', + '--stripped-libraries-dir=<(stripped_libraries_dir)', + '--packed-libraries-dir=<(packed_libraries_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(stamp)', + ], + }], + ['component == "shared_library"', { + # Add a fake output to force the build to always re-run this step. This + # is required because the real inputs are not known at gyp-time and + # changing base.so may not trigger changes to dependent libraries. + 'outputs': [ '<(stamp).fake' ] + }], + ], +} diff --git a/build/android/package_resources_action.gypi b/build/android/package_resources_action.gypi new file mode 100644 index 00000000000..eb608718951 --- /dev/null +++ b/build/android/package_resources_action.gypi @@ -0,0 +1,97 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is a helper to java_apk.gypi. It should be used to create an +# action that runs ApkBuilder via ANT. +# +# Required variables: +# apk_name - File name (minus path & extension) of the output apk. +# android_manifest_path - Path to AndroidManifest.xml. +# app_manifest_version_name - set the apps 'human readable' version number. +# app_manifest_version_code - set the apps version number. +# Optional variables: +# asset_location - The directory where assets are located (if any). +# create_density_splits - Whether to create density-based apk splits. Splits +# are supported only for minSdkVersion >= 21. +# language_splits - List of languages to create apk splits for. +# resource_zips - List of paths to resource zip files. +# shared_resources - Make a resource package that can be loaded by a different +# application at runtime to access the package's resources. +# extensions_to_not_compress - E.g.: 'pak,dat,bin' +# extra_inputs - List of extra action inputs. +{ + 'variables': { + 'asset_location%': '', + 'create_density_splits%': 0, + 'resource_zips%': [], + 'shared_resources%': 0, + 'extensions_to_not_compress%': '', + 'extra_inputs%': [], + 'resource_packaged_apk_name': '<(apk_name)-resources.ap_', + 'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)', + }, + 'action_name': 'package_resources_<(apk_name)', + 'message': 'packaging resources for <(apk_name)', + 'inputs': [ + # TODO: This isn't always rerun correctly, http://crbug.com/351928 + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/package_resources.py', + '<(android_manifest_path)', + '<@(extra_inputs)', + ], + 'outputs': [ + '<(resource_packaged_apk_path)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/package_resources.py', + '--android-sdk', '<(android_sdk)', + '--aapt-path', '<(android_aapt_path)', + '--configuration-name', '<(CONFIGURATION_NAME)', + '--android-manifest', '<(android_manifest_path)', + '--version-code', '<(app_manifest_version_code)', + '--version-name', '<(app_manifest_version_name)', + '--no-compress', '<(extensions_to_not_compress)', + '--apk-path', '<(resource_packaged_apk_path)', + ], + 'conditions': [ + ['shared_resources == 1', { + 'action': [ + '--shared-resources', + ], + }], + ['asset_location != ""', { + 'action': [ + '--asset-dir', '<(asset_location)', + ], + }], + ['create_density_splits == 1', { + 'action': [ + '--create-density-splits', + ], + 'outputs': [ + '<(resource_packaged_apk_path)_hdpi', + '<(resource_packaged_apk_path)_xhdpi', + '<(resource_packaged_apk_path)_xxhdpi', + '<(resource_packaged_apk_path)_xxxhdpi', + '<(resource_packaged_apk_path)_tvdpi', + ], + }], + ['language_splits != []', { + 'action': [ + '--language-splits=<(language_splits)', + ], + 'outputs': [ + "(resource_zips)', + ], + 'inputs': [ + '>@(resource_zips)', + ], + }], + ], +} diff --git a/build/android/preprocess_google_play_services.config.json b/build/android/preprocess_google_play_services.config.json new file mode 100644 index 00000000000..8b3198be7ae --- /dev/null +++ b/build/android/preprocess_google_play_services.config.json @@ -0,0 +1,90 @@ +{ + "lib_version": "7.3.0", + "clients": [ + "play-services-base", + "play-services-cast", + "play-services-identity" + ], + "client_filter": [ + "res/drawable*", + "res/values-af", + "res/values-az", + "res/values-be", + "res/values-bn", + "res/values-bn-rBD", + "res/values-de-rAT", + "res/values-de-rCH", + "res/values-en-rIE", + "res/values-en-rIN", + "res/values-en-rSG", + "res/values-en-rZA", + "res/values-es-rAR", + "res/values-es-rBO", + "res/values-es-rCL", + "res/values-es-rCO", + "res/values-es-rCR", + "res/values-es-rDO", + "res/values-es-rEC", + "res/values-es-rGT", + "res/values-es-rHN", + "res/values-es-rMX", + "res/values-es-rNI", + "res/values-es-rPA", + "res/values-es-rPE", + "res/values-es-rPR", + "res/values-es-rPY", + "res/values-es-rSV", + "res/values-es-rUS", + "res/values-es-rUY", + "res/values-es-rVE", + "res/values-eu-rES", + "res/values-fr-rCA", + "res/values-fr-rCH", + "res/values-gl", + "res/values-gl-rES", + "res/values-gu", + "res/values-he", + "res/values-hy", + "res/values-hy-rAM", + "res/values-in", + "res/values-is", + "res/values-is-rIS", + "res/values-ka", + "res/values-ka-rGE", + "res/values-kk-rKZ", + "res/values-km", + "res/values-km-rKH", + "res/values-kn", + "res/values-kn-rIN", + "res/values-ky", + "res/values-ky-rKG", + "res/values-lo", + "res/values-lo-rLA", + "res/values-mk-rMK", + "res/values-ml", + "res/values-ml-rIN", + "res/values-mn", + "res/values-mn-rMN", + "res/values-mo", + "res/values-mr", + "res/values-mr-rIN", + "res/values-ms", + "res/values-ms-rMY", + "res/values-my-rMM", + "res/values-nb", + "res/values-ne", + "res/values-ne-rNP", + "res/values-si", + "res/values-si-rLK", + "res/values-ta", + "res/values-ta-rIN", + "res/values-te", + "res/values-te-rIN", + "res/values-tl", + "res/values-ur-rPK", + "res/values-uz-rUZ", + "res/values-zh", + "res/values-zh-rHK", + "res/values-zu" + ] +} diff --git a/build/android/preprocess_google_play_services.py b/build/android/preprocess_google_play_services.py new file mode 100755 index 00000000000..85d239ad302 --- /dev/null +++ b/build/android/preprocess_google_play_services.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +'''Prepares the Google Play services split client libraries before usage by +Chrome's build system. + +We need to preprocess Google Play services before using it in Chrome +builds for 2 main reasons: + +- Getting rid of unused resources: unsupported languages, unused +drawables, etc. + +- Merging the differents jars so that it can be proguarded more +easily. This is necessary since debug and test apks get very close +to the dex limit. + +The script is supposed to be used with the maven repository that can be obtained +by downloading the "extra-google-m2repository" from the Android SDK Manager. It +also supports importing from already extracted AAR files using the +--is-extracted-repo flag. + +The json config (see the -c argument) file should provide the following fields: + +- lib_version: String. Used when building from the maven repository. It should + be the package's version (e.g. "7.3.0") + +- clients: String array. List of clients to pick. For example, when building + from the maven repository, it's the artifactId (e.g. "play-services-base") of + each client. + +- client_filter: String array. Pattern of files to prune from the clients once + extracted. Metacharacters are allowed. (e.g. "res/drawable*") + +The output is a directory with the following structure: + + OUT_DIR + +-- google-play-services.jar + +-- res + | +-- CLIENT_1 + | | +-- color + | | +-- values + | | +-- etc. + | +-- CLIENT_2 + | +-- ... + +-- stub + +-- res/[.git-keep-directory] + +-- src/android/UnusedStub.java + +Requires the `jar` utility in the path. + +''' + +import argparse +import glob +import itertools +import json +import os +import shutil +import stat +import sys + +from pylib import cmd_helper +from pylib import constants + +sys.path.append( + os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')) +from util import build_utils + + +M2_PKG_PATH = os.path.join('com', 'google', 'android', 'gms') + + +def main(): + parser = argparse.ArgumentParser(description=("Prepares the Google Play " + "services split client libraries before usage by Chrome's build system")) + parser.add_argument('-r', + '--repository', + help='The Google Play services repository location', + required=True, + metavar='FILE') + parser.add_argument('-o', + '--out-dir', + help='The output directory', + required=True, + metavar='FILE') + parser.add_argument('-c', + '--config-file', + help='Config file path', + required=True, + metavar='FILE') + parser.add_argument('-g', + '--git-friendly', + action='store_true', + default=False, + help='Add a .gitkeep file to the empty directories') + parser.add_argument('-x', + '--is-extracted-repo', + action='store_true', + default=False, + help='The provided repository is not made of AAR files.') + + args = parser.parse_args() + + ProcessGooglePlayServices(args.repository, + args.out_dir, + args.config_file, + args.git_friendly, + args.is_extracted_repo) + + +def ProcessGooglePlayServices(repo, out_dir, config_path, git_friendly, + is_extracted_repo): + with open(config_path, 'r') as json_file: + config = json.load(json_file) + + with build_utils.TempDir() as tmp_root: + tmp_paths = _SetupTempDir(tmp_root) + + if is_extracted_repo: + _ImportFromExtractedRepo(config, tmp_paths, repo) + else: + _ImportFromAars(config, tmp_paths, repo) + + _GenerateCombinedJar(tmp_paths) + _ProcessResources(config, tmp_paths) + _BuildOutput(config, tmp_paths, out_dir, git_friendly) + + +def _SetupTempDir(tmp_root): + tmp_paths = { + 'root': tmp_root, + 'imported_clients': os.path.join(tmp_root, 'imported_clients'), + 'extracted_jars': os.path.join(tmp_root, 'jar'), + 'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'), + } + os.mkdir(tmp_paths['imported_clients']) + os.mkdir(tmp_paths['extracted_jars']) + + return tmp_paths + + +def _SetupOutputDir(out_dir): + out_paths = { + 'root': out_dir, + 'res': os.path.join(out_dir, 'res'), + 'jar': os.path.join(out_dir, 'google-play-services.jar'), + 'stub': os.path.join(out_dir, 'stub'), + } + + shutil.rmtree(out_paths['jar'], ignore_errors=True) + shutil.rmtree(out_paths['res'], ignore_errors=True) + shutil.rmtree(out_paths['stub'], ignore_errors=True) + + return out_paths + + +def _MakeWritable(dir_path): + for root, dirs, files in os.walk(dir_path): + for path in itertools.chain(dirs, files): + st = os.stat(os.path.join(root, path)) + os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR) + + +def _ImportFromAars(config, tmp_paths, repo): + for client in config['clients']: + aar_name = '%s-%s.aar' % (client, config['lib_version']) + aar_path = os.path.join(repo, M2_PKG_PATH, client, + config['lib_version'], aar_name) + aar_out_path = os.path.join(tmp_paths['imported_clients'], client) + build_utils.ExtractAll(aar_path, aar_out_path) + + client_jar_path = os.path.join(aar_out_path, 'classes.jar') + build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'], + no_clobber=False) + + +def _ImportFromExtractedRepo(config, tmp_paths, repo): + # Import the clients + try: + for client in config['clients']: + client_out_dir = os.path.join(tmp_paths['imported_clients'], client) + shutil.copytree(os.path.join(repo, client), client_out_dir) + + client_jar_path = os.path.join(client_out_dir, 'classes.jar') + build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'], + no_clobber=False) + finally: + _MakeWritable(tmp_paths['imported_clients']) + + +def _GenerateCombinedJar(tmp_paths): + out_file_name = tmp_paths['combined_jar'] + working_dir = tmp_paths['extracted_jars'] + cmd_helper.Call(['jar', '-cf', out_file_name, '-C', working_dir, '.']) + + +def _ProcessResources(config, tmp_paths): + # Prune unused resources + for res_filter in config['client_filter']: + glob_pattern = os.path.join(tmp_paths['imported_clients'], '*', res_filter) + for prune_target in glob.glob(glob_pattern): + shutil.rmtree(prune_target) + + +def _BuildOutput(config, tmp_paths, out_dir, git_friendly): + out_paths = _SetupOutputDir(out_dir) + + # Copy the resources to the output dir + for client in config['clients']: + res_in_tmp_dir = os.path.join(tmp_paths['imported_clients'], client, 'res') + if os.path.isdir(res_in_tmp_dir) and os.listdir(res_in_tmp_dir): + res_in_final_dir = os.path.join(out_paths['res'], client) + shutil.copytree(res_in_tmp_dir, res_in_final_dir) + + # Copy the jar + shutil.copyfile(tmp_paths['combined_jar'], out_paths['jar']) + + # Write the java dummy stub. Needed for gyp to create the resource jar + stub_location = os.path.join(out_paths['stub'], 'src', 'android') + os.makedirs(stub_location) + with open(os.path.join(stub_location, 'UnusedStub.java'), 'w') as stub: + stub.write('package android;' + 'public final class UnusedStub {' + ' private UnusedStub() {}' + '}') + + # Create the main res directory. Will be empty but is needed by gyp + stub_res_location = os.path.join(out_paths['stub'], 'res') + os.makedirs(stub_res_location) + if git_friendly: + build_utils.Touch(os.path.join(stub_res_location, '.git-keep-directory')) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py new file mode 100755 index 00000000000..a5f8fc67236 --- /dev/null +++ b/build/android/provision_devices.py @@ -0,0 +1,349 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provisions Android devices with settings required for bots. + +Usage: + ./provision_devices.py [-d ] +""" + +import argparse +import json +import logging +import os +import posixpath +import re +import subprocess +import sys +import time + +from pylib import constants +from pylib import device_settings +from pylib.device import battery_utils +from pylib.device import device_blacklist +from pylib.device import device_errors +from pylib.device import device_utils +from pylib.utils import run_tests_helper +from pylib.utils import timeout_retry + +sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, + 'third_party', 'android_testrunner')) +import errors + + +class _DEFAULT_TIMEOUTS(object): + # L can take a while to reboot after a wipe. + LOLLIPOP = 600 + PRE_LOLLIPOP = 180 + + HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP) + + +class _PHASES(object): + WIPE = 'wipe' + PROPERTIES = 'properties' + FINISH = 'finish' + + ALL = [WIPE, PROPERTIES, FINISH] + + +def ProvisionDevices(options): + devices = device_utils.DeviceUtils.HealthyDevices() + if options.device: + devices = [d for d in devices if d == options.device] + if not devices: + raise device_errors.DeviceUnreachableError(options.device) + + parallel_devices = device_utils.DeviceUtils.parallel(devices) + parallel_devices.pMap(ProvisionDevice, options) + if options.auto_reconnect: + _LaunchHostHeartbeat() + blacklist = device_blacklist.ReadBlacklist() + if options.output_device_blacklist: + with open(options.output_device_blacklist, 'w') as f: + json.dump(blacklist, f) + if all(d in blacklist for d in devices): + raise device_errors.NoDevicesError + return 0 + + +def ProvisionDevice(device, options): + if options.reboot_timeout: + reboot_timeout = options.reboot_timeout + elif (device.build_version_sdk >= + constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP): + reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP + else: + reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP + + def should_run_phase(phase_name): + return not options.phases or phase_name in options.phases + + def run_phase(phase_func, reboot=True): + try: + device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0) + except device_errors.CommandTimeoutError: + logging.error('Device did not finish booting. Will try to reboot.') + device.Reboot(timeout=reboot_timeout) + phase_func(device, options) + if reboot: + device.Reboot(False, retries=0) + device.adb.WaitForDevice() + + try: + if should_run_phase(_PHASES.WIPE): + run_phase(WipeDevice) + + if should_run_phase(_PHASES.PROPERTIES): + run_phase(SetProperties) + + if should_run_phase(_PHASES.FINISH): + run_phase(FinishProvisioning, reboot=False) + + except (errors.WaitForResponseTimedOutError, + device_errors.CommandTimeoutError): + logging.exception('Timed out waiting for device %s. Adding to blacklist.', + str(device)) + device_blacklist.ExtendBlacklist([str(device)]) + + except device_errors.CommandFailedError: + logging.exception('Failed to provision device %s. Adding to blacklist.', + str(device)) + device_blacklist.ExtendBlacklist([str(device)]) + + +def WipeDevice(device, options): + """Wipes data from device, keeping only the adb_keys for authorization. + + After wiping data on a device that has been authorized, adb can still + communicate with the device, but after reboot the device will need to be + re-authorized because the adb keys file is stored in /data/misc/adb/. + Thus, adb_keys file is rewritten so the device does not need to be + re-authorized. + + Arguments: + device: the device to wipe + """ + if options.skip_wipe: + return + + try: + device.EnableRoot() + device_authorized = device.FileExists(constants.ADB_KEYS_FILE) + if device_authorized: + adb_keys = device.ReadFile(constants.ADB_KEYS_FILE, + as_root=True).splitlines() + device.RunShellCommand(['wipe', 'data'], + as_root=True, check_return=True) + device.adb.WaitForDevice() + + if device_authorized: + adb_keys_set = set(adb_keys) + for adb_key_file in options.adb_key_files or []: + try: + with open(adb_key_file, 'r') as f: + adb_public_keys = f.readlines() + adb_keys_set.update(adb_public_keys) + except IOError: + logging.warning('Unable to find adb keys file %s.' % adb_key_file) + _WriteAdbKeysFile(device, '\n'.join(adb_keys_set)) + except device_errors.CommandFailedError: + logging.exception('Possible failure while wiping the device. ' + 'Attempting to continue.') + + +def _WriteAdbKeysFile(device, adb_keys_string): + dir_path = posixpath.dirname(constants.ADB_KEYS_FILE) + device.RunShellCommand(['mkdir', '-p', dir_path], + as_root=True, check_return=True) + device.RunShellCommand(['restorecon', dir_path], + as_root=True, check_return=True) + device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True) + device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE], + as_root=True, check_return=True) + + +def SetProperties(device, options): + try: + device.EnableRoot() + except device_errors.CommandFailedError as e: + logging.warning(str(e)) + + _ConfigureLocalProperties(device, options.enable_java_debug) + device_settings.ConfigureContentSettings( + device, device_settings.DETERMINISTIC_DEVICE_SETTINGS) + if options.disable_location: + device_settings.ConfigureContentSettings( + device, device_settings.DISABLE_LOCATION_SETTINGS) + else: + device_settings.ConfigureContentSettings( + device, device_settings.ENABLE_LOCATION_SETTINGS) + + if options.disable_mock_location: + device_settings.ConfigureContentSettings( + device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS) + else: + device_settings.ConfigureContentSettings( + device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS) + + device_settings.SetLockScreenSettings(device) + if options.disable_network: + device_settings.ConfigureContentSettings( + device, device_settings.NETWORK_DISABLED_SETTINGS) + +def _ConfigureLocalProperties(device, java_debug=True): + """Set standard readonly testing device properties prior to reboot.""" + local_props = [ + 'persist.sys.usb.config=adb', + 'ro.monkey=1', + 'ro.test_harness=1', + 'ro.audio.silent=1', + 'ro.setupwizard.mode=DISABLED', + ] + if java_debug: + local_props.append( + '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY) + local_props.append('debug.checkjni=1') + try: + device.WriteFile( + constants.DEVICE_LOCAL_PROPERTIES_PATH, + '\n'.join(local_props), as_root=True) + # Android will not respect the local props file if it is world writable. + device.RunShellCommand( + ['chmod', '644', constants.DEVICE_LOCAL_PROPERTIES_PATH], + as_root=True, check_return=True) + except device_errors.CommandFailedError: + logging.exception('Failed to configure local properties.') + + +def FinishProvisioning(device, options): + if options.min_battery_level is not None: + try: + battery = battery_utils.BatteryUtils(device) + battery.ChargeDeviceToLevel(options.min_battery_level) + except device_errors.CommandFailedError: + logging.exception('Unable to charge device to specified level.') + + if options.max_battery_temp is not None: + try: + battery = battery_utils.BatteryUtils(device) + battery.LetBatteryCoolToTemperature(options.max_battery_temp) + except device_errors.CommandFailedError: + logging.exception('Unable to let battery cool to specified temperature.') + + device.RunShellCommand( + ['date', '-s', time.strftime('%Y%m%d.%H%M%S', time.gmtime())], + as_root=True, check_return=True) + props = device.RunShellCommand('getprop', check_return=True) + for prop in props: + logging.info(' %s' % prop) + if options.auto_reconnect: + _PushAndLaunchAdbReboot(device, options.target) + + +def _PushAndLaunchAdbReboot(device, target): + """Pushes and launches the adb_reboot binary on the device. + + Arguments: + device: The DeviceUtils instance for the device to which the adb_reboot + binary should be pushed. + target: The build target (example, Debug or Release) which helps in + locating the adb_reboot binary. + """ + logging.info('Will push and launch adb_reboot on %s' % str(device)) + # Kill if adb_reboot is already running. + device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True) + # Push adb_reboot + logging.info(' Pushing adb_reboot ...') + adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT, + 'out/%s/adb_reboot' % target) + device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')]) + # Launch adb_reboot + logging.info(' Launching adb_reboot ...') + device.RunShellCommand( + ['/data/local/tmp/adb_reboot'], + check_return=True) + + +def _LaunchHostHeartbeat(): + # Kill if existing host_heartbeat + KillHostHeartbeat() + # Launch a new host_heartbeat + logging.info('Spawning host heartbeat...') + subprocess.Popen([os.path.join(constants.DIR_SOURCE_ROOT, + 'build/android/host_heartbeat.py')]) + + +def KillHostHeartbeat(): + ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) + stdout, _ = ps.communicate() + matches = re.findall('\\n.*host_heartbeat.*', stdout) + for match in matches: + logging.info('An instance of host heart beart running... will kill') + pid = re.findall(r'(\S+)', match)[1] + subprocess.call(['kill', str(pid)]) + + +def main(): + # Recommended options on perf bots: + # --disable-network + # TODO(tonyg): We eventually want network on. However, currently radios + # can cause perfbots to drain faster than they charge. + # --min-battery-level 95 + # Some perf bots run benchmarks with USB charging disabled which leads + # to gradual draining of the battery. We must wait for a full charge + # before starting a run in order to keep the devices online. + + parser = argparse.ArgumentParser( + description='Provision Android devices with settings required for bots.') + parser.add_argument('-d', '--device', metavar='SERIAL', + help='the serial number of the device to be provisioned' + ' (the default is to provision all devices attached)') + parser.add_argument('--phase', action='append', choices=_PHASES.ALL, + dest='phases', + help='Phases of provisioning to run. ' + '(If omitted, all phases will be run.)') + parser.add_argument('--skip-wipe', action='store_true', default=False, + help="don't wipe device data during provisioning") + parser.add_argument('--reboot-timeout', metavar='SECS', type=int, + help='when wiping the device, max number of seconds to' + ' wait after each reboot ' + '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT) + parser.add_argument('--min-battery-level', type=int, metavar='NUM', + help='wait for the device to reach this minimum battery' + ' level before trying to continue') + parser.add_argument('--disable-location', action='store_true', + help='disable Google location services on devices') + parser.add_argument('--disable-mock-location', action='store_true', + default=False, help='Set ALLOW_MOCK_LOCATION to false') + parser.add_argument('--disable-network', action='store_true', + help='disable network access on devices') + parser.add_argument('--disable-java-debug', action='store_false', + dest='enable_java_debug', default=True, + help='disable Java property asserts and JNI checking') + parser.add_argument('-t', '--target', default='Debug', + help='the build target (default: %(default)s)') + parser.add_argument('-r', '--auto-reconnect', action='store_true', + help='push binary which will reboot the device on adb' + ' disconnections') + parser.add_argument('--adb-key-files', type=str, nargs='+', + help='list of adb keys to push to device') + parser.add_argument('-v', '--verbose', action='count', default=1, + help='Log more information.') + parser.add_argument('--max-battery-temp', type=int, metavar='NUM', + help='Wait for the battery to have this temp or lower.') + parser.add_argument('--output-device-blacklist', + help='Json file to output the device blacklist.') + args = parser.parse_args() + constants.SetBuildType(args.target) + + run_tests_helper.SetLogLevel(args.verbose) + + return ProvisionDevices(args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/android/push_libraries.gypi b/build/android/push_libraries.gypi new file mode 100644 index 00000000000..773c44f8ab1 --- /dev/null +++ b/build/android/push_libraries.gypi @@ -0,0 +1,49 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an action to provide a rule that +# pushes stripped shared libraries to the attached Android device. This should +# only be used with the gyp_managed_install flag set. +# +# To use this, create a gyp target with the following form: +# { +# 'actions': [ +# 'variables': { +# 'ordered_libraries_file': 'file generated by write_ordered_libraries' +# 'strip_stamp': 'stamp from strip action to block on' +# 'libraries_source_dir': 'location where stripped libraries are stored' +# 'device_library_dir': 'location on the device where to put pushed libraries', +# 'push_stamp': 'file to touch when the action is complete' +# 'configuration_name': 'The build CONFIGURATION_NAME' +# }, +# 'includes': [ '../../build/android/push_libraries.gypi' ], +# ], +# }, +# + +{ + 'action_name': 'push_libraries_<(_target_name)', + 'message': 'Pushing libraries to device for <(_target_name)', + 'inputs': [ + '<(DEPTH)/build/android/gyp/util/build_utils.py', + '<(DEPTH)/build/android/gyp/util/md5_check.py', + '<(DEPTH)/build/android/gyp/push_libraries.py', + '<(strip_stamp)', + '<(strip_additional_stamp)', + '<(build_device_config_path)', + '<(pack_relocations_stamp)', + ], + 'outputs': [ + '<(push_stamp)', + ], + 'action': [ + 'python', '<(DEPTH)/build/android/gyp/push_libraries.py', + '--build-device-configuration=<(build_device_config_path)', + '--libraries-dir=<(libraries_source_dir)', + '--device-dir=<(device_library_dir)', + '--libraries=@FileArg(<(ordered_libraries_file):libraries)', + '--stamp=<(push_stamp)', + '--configuration-name=<(configuration_name)', + ], +} diff --git a/build/android/pylib/OWNERS b/build/android/pylib/OWNERS new file mode 100644 index 00000000000..dbbbba7f275 --- /dev/null +++ b/build/android/pylib/OWNERS @@ -0,0 +1,4 @@ +jbudorick@chromium.org +klundberg@chromium.org +navabi@chromium.org +skyostil@chromium.org diff --git a/build/android/pylib/__init__.py b/build/android/pylib/__init__.py new file mode 100644 index 00000000000..96196cffb27 --- /dev/null +++ b/build/android/pylib/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py new file mode 100644 index 00000000000..f7191f7935d --- /dev/null +++ b/build/android/pylib/android_commands.py @@ -0,0 +1,1976 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provides an interface to communicate with the device via the adb command. + +Assumes adb binary is currently on system path. + +Note that this module is deprecated. +""" +# TODO(jbudorick): Delete this file once no clients use it. + +# pylint: skip-file + +import collections +import datetime +import inspect +import logging +import os +import random +import re +import shlex +import signal +import subprocess +import sys +import tempfile +import time + +import cmd_helper +import constants +import system_properties +from utils import host_utils + +try: + from pylib import pexpect +except ImportError: + pexpect = None + +sys.path.append(os.path.join( + constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner')) +import adb_interface +import am_instrument_parser +import errors + +from pylib.device import device_blacklist +from pylib.device import device_errors + +# Pattern to search for the next whole line of pexpect output and capture it +# into a match group. We can't use ^ and $ for line start end with pexpect, +# see http://www.noah.org/python/pexpect/#doc for explanation why. +PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r') + +# Set the adb shell prompt to be a unique marker that will [hopefully] not +# appear at the start of any line of a command's output. +SHELL_PROMPT = '~+~PQ\x17RS~+~' + +# Java properties file +LOCAL_PROPERTIES_PATH = constants.DEVICE_LOCAL_PROPERTIES_PATH + +# Property in /data/local.prop that controls Java assertions. +JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions' + +# Keycode "enum" suitable for passing to AndroidCommands.SendKey(). +KEYCODE_HOME = 3 +KEYCODE_BACK = 4 +KEYCODE_DPAD_UP = 19 +KEYCODE_DPAD_DOWN = 20 +KEYCODE_DPAD_RIGHT = 22 +KEYCODE_ENTER = 66 +KEYCODE_MENU = 82 + +MD5SUM_DEVICE_FOLDER = constants.TEST_EXECUTABLE_DIR + '/md5sum/' +MD5SUM_DEVICE_PATH = MD5SUM_DEVICE_FOLDER + 'md5sum_bin' + +PIE_WRAPPER_PATH = constants.TEST_EXECUTABLE_DIR + '/run_pie' + +CONTROL_USB_CHARGING_COMMANDS = [ + { + # Nexus 4 + 'witness_file': '/sys/module/pm8921_charger/parameters/disabled', + 'enable_command': 'echo 0 > /sys/module/pm8921_charger/parameters/disabled', + 'disable_command': + 'echo 1 > /sys/module/pm8921_charger/parameters/disabled', + }, + { + # Nexus 5 + # Setting the HIZ bit of the bq24192 causes the charger to actually ignore + # energy coming from USB. Setting the power_supply offline just updates the + # Android system to reflect that. + 'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT', + 'enable_command': ( + 'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && ' + 'echo 1 > /sys/class/power_supply/usb/online'), + 'disable_command': ( + 'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && ' + 'chmod 644 /sys/class/power_supply/usb/online && ' + 'echo 0 > /sys/class/power_supply/usb/online'), + }, +] + +class DeviceTempFile(object): + def __init__(self, android_commands, prefix='temp_file', suffix=''): + """Find an unused temporary file path in the devices external directory. + + When this object is closed, the file will be deleted on the device. + """ + self.android_commands = android_commands + while True: + # TODO(cjhopman): This could actually return the same file in multiple + # calls if the caller doesn't write to the files immediately. This is + # expected to never happen. + i = random.randint(0, 1000000) + self.name = '%s/%s-%d-%010d%s' % ( + android_commands.GetExternalStorage(), + prefix, int(time.time()), i, suffix) + if not android_commands.FileExistsOnDevice(self.name): + break + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def close(self): + self.android_commands.RunShellCommand('rm ' + self.name) + + +def GetAVDs(): + """Returns a list of AVDs.""" + re_avd = re.compile('^[ ]+Name: ([a-zA-Z0-9_:.-]+)', re.MULTILINE) + avds = re_avd.findall(cmd_helper.GetCmdOutput(['android', 'list', 'avd'])) + return avds + +def ResetBadDevices(): + """Removes the blacklist that keeps track of bad devices for a current + build. + """ + device_blacklist.ResetBlacklist() + +def ExtendBadDevices(devices): + """Adds devices to the blacklist that keeps track of bad devices for a + current build. + + The devices listed in the bad devices file will not be returned by + GetAttachedDevices. + + Args: + devices: list of bad devices to be added to the bad devices file. + """ + device_blacklist.ExtendBlacklist(devices) + + +def GetAttachedDevices(hardware=True, emulator=True, offline=False): + """Returns a list of attached, android devices and emulators. + + If a preferred device has been set with ANDROID_SERIAL, it will be first in + the returned list. The arguments specify what devices to include in the list. + + Example output: + + * daemon not running. starting it now on port 5037 * + * daemon started successfully * + List of devices attached + 027c10494100b4d7 device + emulator-5554 offline + + Args: + hardware: Include attached actual devices that are online. + emulator: Include emulators (i.e. AVD's) currently on host. + offline: Include devices and emulators that are offline. + + Returns: List of devices. + """ + adb_devices_output = cmd_helper.GetCmdOutput([constants.GetAdbPath(), + 'devices']) + + re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE) + online_devices = re_device.findall(adb_devices_output) + + re_device = re.compile('^(emulator-[0-9]+)\tdevice', re.MULTILINE) + emulator_devices = re_device.findall(adb_devices_output) + + re_device = re.compile('^([a-zA-Z0-9_:.-]+)\t(?:offline|unauthorized)$', + re.MULTILINE) + offline_devices = re_device.findall(adb_devices_output) + + devices = [] + # First determine list of online devices (e.g. hardware and/or emulator). + if hardware and emulator: + devices = online_devices + elif hardware: + devices = [device for device in online_devices + if device not in emulator_devices] + elif emulator: + devices = emulator_devices + + # Now add offline devices if offline is true + if offline: + devices = devices + offline_devices + + # Remove any devices in the blacklist. + blacklist = device_blacklist.ReadBlacklist() + if len(blacklist): + logging.info('Avoiding bad devices %s', ' '.join(blacklist)) + devices = [device for device in devices if device not in blacklist] + + preferred_device = os.environ.get('ANDROID_SERIAL') + if preferred_device in devices: + devices.remove(preferred_device) + devices.insert(0, preferred_device) + return devices + + +def IsDeviceAttached(device): + """Return true if the device is attached and online.""" + return device in GetAttachedDevices() + + +def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None): + """Gets a list of files from `ls` command output. + + Python's os.walk isn't used because it doesn't work over adb shell. + + Args: + path: The path to list. + ls_output: A list of lines returned by an `ls -lR` command. + re_file: A compiled regular expression which parses a line into named groups + consisting of at minimum "filename", "date", "time", "size" and + optionally "timezone". + utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a + 2-digit string giving the number of UTC offset hours, and MM is a + 2-digit string giving the number of UTC offset minutes. If the input + utc_offset is None, will try to look for the value of "timezone" if it + is specified in re_file. + + Returns: + A dict of {"name": (size, lastmod), ...} where: + name: The file name relative to |path|'s directory. + size: The file size in bytes (0 for directories). + lastmod: The file last modification date in UTC. + """ + re_directory = re.compile('^%s/(?P[^:]+):$' % re.escape(path)) + path_dir = os.path.dirname(path) + + current_dir = '' + files = {} + for line in ls_output: + directory_match = re_directory.match(line) + if directory_match: + current_dir = directory_match.group('dir') + continue + file_match = re_file.match(line) + if file_match: + filename = os.path.join(current_dir, file_match.group('filename')) + if filename.startswith(path_dir): + filename = filename[len(path_dir) + 1:] + lastmod = datetime.datetime.strptime( + file_match.group('date') + ' ' + file_match.group('time')[:5], + '%Y-%m-%d %H:%M') + if not utc_offset and 'timezone' in re_file.groupindex: + utc_offset = file_match.group('timezone') + if isinstance(utc_offset, str) and len(utc_offset) == 5: + utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]), + minutes=int(utc_offset[3:5])) + if utc_offset[0:1] == '-': + utc_delta = -utc_delta + lastmod -= utc_delta + files[filename] = (int(file_match.group('size')), lastmod) + return files + + +def _ParseMd5SumOutput(md5sum_output): + """Returns a list of tuples from the provided md5sum output. + + Args: + md5sum_output: output directly from md5sum binary. + + Returns: + List of namedtuples with attributes |hash| and |path|, where |path| is the + absolute path to the file with an Md5Sum of |hash|. + """ + HashAndPath = collections.namedtuple('HashAndPath', ['hash', 'path']) + split_lines = [line.split(' ') for line in md5sum_output] + return [HashAndPath._make(s) for s in split_lines if len(s) == 2] + + +def _HasAdbPushSucceeded(command_output): + """Returns whether adb push has succeeded from the provided output.""" + # TODO(frankf): We should look at the return code instead of the command + # output for many of the commands in this file. + if not command_output: + return True + # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)" + # Errors look like this: "failed to copy ... " + if not re.search('^[0-9]', command_output.splitlines()[-1]): + logging.critical('PUSH FAILED: ' + command_output) + return False + return True + + +def GetLogTimestamp(log_line, year): + """Returns the timestamp of the given |log_line| in the given year.""" + try: + return datetime.datetime.strptime('%s-%s' % (year, log_line[:18]), + '%Y-%m-%d %H:%M:%S.%f') + except (ValueError, IndexError): + logging.critical('Error reading timestamp from ' + log_line) + return None + + +class AndroidCommands(object): + """Helper class for communicating with Android device via adb.""" + + def __init__(self, device=None): + """Constructor. + + Args: + device: If given, adb commands are only send to the device of this ID. + Otherwise commands are sent to all attached devices. + """ + self._adb = adb_interface.AdbInterface(constants.GetAdbPath()) + if device: + self._adb.SetTargetSerial(device) + self._device = device + self._logcat = None + self.logcat_process = None + self._logcat_tmpoutfile = None + self._pushed_files = [] + self._device_utc_offset = None + self._potential_push_size = 0 + self._actual_push_size = 0 + self._external_storage = '' + self._util_wrapper = '' + self._system_properties = system_properties.SystemProperties(self.Adb()) + self._push_if_needed_cache = {} + self._control_usb_charging_command = { + 'command': None, + 'cached': False, + } + self._protected_file_access_method_initialized = None + self._privileged_command_runner = None + self._pie_wrapper = None + + @property + def system_properties(self): + return self._system_properties + + def _LogShell(self, cmd): + """Logs the adb shell command.""" + if self._device: + device_repr = self._device[-4:] + else: + device_repr = '????' + logging.info('[%s]> %s', device_repr, cmd) + + def Adb(self): + """Returns our AdbInterface to avoid us wrapping all its methods.""" + # TODO(tonyg): Goal should be to git rid of this method by making this API + # complete and alleviating the need. + return self._adb + + def GetDevice(self): + """Returns the device serial.""" + return self._device + + def IsOnline(self): + """Checks whether the device is online. + + Returns: + True if device is in 'device' mode, False otherwise. + """ + # TODO(aurimas): revert to using adb get-state when android L adb is fixed. + #out = self._adb.SendCommand('get-state') + #return out.strip() == 'device' + + out = self._adb.SendCommand('devices') + for line in out.split('\n'): + if self._device in line and 'device' in line: + return True + return False + + def IsRootEnabled(self): + """Checks if root is enabled on the device.""" + root_test_output = self.RunShellCommand('ls /root') or [''] + return not 'Permission denied' in root_test_output[0] + + def EnableAdbRoot(self): + """Enables adb root on the device. + + Returns: + True: if output from executing adb root was as expected. + False: otherwise. + """ + if self.GetBuildType() == 'user': + logging.warning("Can't enable root in production builds with type user") + return False + else: + return_value = self._adb.EnableAdbRoot() + # EnableAdbRoot inserts a call for wait-for-device only when adb logcat + # output matches what is expected. Just to be safe add a call to + # wait-for-device. + self._adb.SendCommand('wait-for-device') + return return_value + + def GetDeviceYear(self): + """Returns the year information of the date on device.""" + return self.RunShellCommand('date +%Y')[0] + + def GetExternalStorage(self): + if not self._external_storage: + self._external_storage = self.RunShellCommand('echo $EXTERNAL_STORAGE')[0] + if not self._external_storage: + raise device_errors.CommandFailedError( + ['shell', "'echo $EXTERNAL_STORAGE'"], + 'Unable to find $EXTERNAL_STORAGE') + return self._external_storage + + def WaitForDevicePm(self, timeout=120): + """Blocks until the device's package manager is available. + + To workaround http://b/5201039, we restart the shell and retry if the + package manager isn't back after 120 seconds. + + Raises: + errors.WaitForResponseTimedOutError after max retries reached. + """ + last_err = None + retries = 3 + while retries: + try: + self._adb.WaitForDevicePm(wait_time=timeout) + return # Success + except errors.WaitForResponseTimedOutError as e: + last_err = e + logging.warning('Restarting and retrying after timeout: %s', e) + retries -= 1 + self.RestartShell() + raise last_err # Only reached after max retries, re-raise the last error. + + def RestartShell(self): + """Restarts the shell on the device. Does not block for it to return.""" + self.RunShellCommand('stop') + self.RunShellCommand('start') + + def Reboot(self, full_reboot=True): + """Reboots the device and waits for the package manager to return. + + Args: + full_reboot: Whether to fully reboot the device or just restart the shell. + """ + # TODO(torne): hive can't reboot the device either way without breaking the + # connection; work out if we can handle this better + if os.environ.get('USING_HIVE'): + logging.warning('Ignoring reboot request as we are on hive') + return + if full_reboot or not self.IsRootEnabled(): + self._adb.SendCommand('reboot') + self._system_properties = system_properties.SystemProperties(self.Adb()) + timeout = 300 + retries = 1 + # Wait for the device to disappear. + while retries < 10 and self.IsOnline(): + time.sleep(1) + retries += 1 + else: + self.RestartShell() + timeout = 120 + # To run tests we need at least the package manager and the sd card (or + # other external storage) to be ready. + self.WaitForDevicePm(timeout) + self.WaitForSdCardReady(timeout) + + def Shutdown(self): + """Shuts down the device.""" + self._adb.SendCommand('reboot -p') + self._system_properties = system_properties.SystemProperties(self.Adb()) + + def Uninstall(self, package): + """Uninstalls the specified package from the device. + + Args: + package: Name of the package to remove. + + Returns: + A status string returned by adb uninstall + """ + uninstall_command = 'uninstall %s' % package + + self._LogShell(uninstall_command) + return self._adb.SendCommand(uninstall_command, timeout_time=60) + + def Install(self, package_file_path, reinstall=False): + """Installs the specified package to the device. + + Args: + package_file_path: Path to .apk file to install. + reinstall: Reinstall an existing apk, keeping the data. + + Returns: + A status string returned by adb install + """ + assert os.path.isfile(package_file_path), ('<%s> is not file' % + package_file_path) + + install_cmd = ['install'] + + if reinstall: + install_cmd.append('-r') + + install_cmd.append(package_file_path) + install_cmd = ' '.join(install_cmd) + + self._LogShell(install_cmd) + return self._adb.SendCommand(install_cmd, + timeout_time=2 * 60, + retry_count=0) + + def ManagedInstall(self, apk_path, keep_data=False, package_name=None, + reboots_on_timeout=2): + """Installs specified package and reboots device on timeouts. + + If package_name is supplied, checks if the package is already installed and + doesn't reinstall if the apk md5sums match. + + Args: + apk_path: Path to .apk file to install. + keep_data: Reinstalls instead of uninstalling first, preserving the + application data. + package_name: Package name (only needed if keep_data=False). + reboots_on_timeout: number of time to reboot if package manager is frozen. + """ + # Check if package is already installed and up to date. + if package_name: + installed_apk_path = self.GetApplicationPath(package_name) + if (installed_apk_path and + not self.GetFilesChanged(apk_path, installed_apk_path, + ignore_filenames=True)): + logging.info('Skipped install: identical %s APK already installed' % + package_name) + return + # Install. + reboots_left = reboots_on_timeout + while True: + try: + if not keep_data: + assert package_name + self.Uninstall(package_name) + install_status = self.Install(apk_path, reinstall=keep_data) + if 'Success' in install_status: + return + else: + raise Exception('Install failure: %s' % install_status) + except errors.WaitForResponseTimedOutError: + print '@@@STEP_WARNINGS@@@' + logging.info('Timeout on installing %s on device %s', apk_path, + self._device) + + if reboots_left <= 0: + raise Exception('Install timed out') + + # Force a hard reboot on last attempt + self.Reboot(full_reboot=(reboots_left == 1)) + reboots_left -= 1 + + def MakeSystemFolderWritable(self): + """Remounts the /system folder rw.""" + out = self._adb.SendCommand('remount') + if out.strip() != 'remount succeeded': + raise errors.MsgException('Remount failed: %s' % out) + + def RestartAdbdOnDevice(self): + logging.info('Restarting adbd on the device...') + with DeviceTempFile(self, suffix=".sh") as temp_script_file: + host_script_path = os.path.join(constants.DIR_SOURCE_ROOT, + 'build', + 'android', + 'pylib', + 'restart_adbd.sh') + self._adb.Push(host_script_path, temp_script_file.name) + self.RunShellCommand('. %s' % temp_script_file.name) + self._adb.SendCommand('wait-for-device') + + def RestartAdbServer(self): + """Restart the adb server.""" + ret = self.KillAdbServer() + if ret != 0: + raise errors.MsgException('KillAdbServer: %d' % ret) + + ret = self.StartAdbServer() + if ret != 0: + raise errors.MsgException('StartAdbServer: %d' % ret) + + @staticmethod + def KillAdbServer(): + """Kill adb server.""" + adb_cmd = [constants.GetAdbPath(), 'kill-server'] + ret = cmd_helper.RunCmd(adb_cmd) + retry = 0 + while retry < 3: + ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']) + if ret != 0: + # pgrep didn't find adb, kill-server succeeded. + return 0 + retry += 1 + time.sleep(retry) + return ret + + def StartAdbServer(self): + """Start adb server.""" + adb_cmd = ['taskset', '-c', '0', constants.GetAdbPath(), 'start-server'] + ret, _ = cmd_helper.GetCmdStatusAndOutput(adb_cmd) + retry = 0 + while retry < 3: + ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']) + if ret == 0: + # pgrep found adb, start-server succeeded. + # Waiting for device to reconnect before returning success. + self._adb.SendCommand('wait-for-device') + return 0 + retry += 1 + time.sleep(retry) + return ret + + def WaitForSystemBootCompleted(self, wait_time): + """Waits for targeted system's boot_completed flag to be set. + + Args: + wait_time: time in seconds to wait + + Raises: + WaitForResponseTimedOutError if wait_time elapses and flag still not + set. + """ + logging.info('Waiting for system boot completed...') + self._adb.SendCommand('wait-for-device') + # Now the device is there, but system not boot completed. + # Query the sys.boot_completed flag with a basic command + boot_completed = False + attempts = 0 + wait_period = 5 + while not boot_completed and (attempts * wait_period) < wait_time: + output = self.system_properties['sys.boot_completed'] + output = output.strip() + if output == '1': + boot_completed = True + else: + # If 'error: xxx' returned when querying the flag, it means + # adb server lost the connection to the emulator, so restart the adb + # server. + if 'error:' in output: + self.RestartAdbServer() + time.sleep(wait_period) + attempts += 1 + if not boot_completed: + raise errors.WaitForResponseTimedOutError( + 'sys.boot_completed flag was not set after %s seconds' % wait_time) + + def WaitForSdCardReady(self, timeout_time): + """Wait for the SD card ready before pushing data into it.""" + logging.info('Waiting for SD card ready...') + sdcard_ready = False + attempts = 0 + wait_period = 5 + external_storage = self.GetExternalStorage() + while not sdcard_ready and attempts * wait_period < timeout_time: + output = self.RunShellCommand('ls ' + external_storage) + if output: + sdcard_ready = True + else: + time.sleep(wait_period) + attempts += 1 + if not sdcard_ready: + raise errors.WaitForResponseTimedOutError( + 'SD card not ready after %s seconds' % timeout_time) + + def GetAndroidToolStatusAndOutput(self, command, lib_path=None, *args, **kw): + """Runs a native Android binary, wrapping the command as necessary. + + This is a specialization of GetShellCommandStatusAndOutput, which is meant + for running tools/android/ binaries and handle properly: (1) setting the + lib path (for component=shared_library), (2) using the PIE wrapper on ICS. + See crbug.com/373219 for more context. + + Args: + command: String containing the command to send. + lib_path: (optional) path to the folder containing the dependent libs. + Same other arguments of GetCmdStatusAndOutput. + """ + # The first time this command is run the device is inspected to check + # whether a wrapper for running PIE executable is needed (only Android ICS) + # or not. The results is cached, so the wrapper is pushed only once. + if self._pie_wrapper is None: + # None: did not check; '': did check and not needed; '/path': use /path. + self._pie_wrapper = '' + if self.GetBuildId().startswith('I'): # Ixxxx = Android ICS. + run_pie_dist_path = os.path.join(constants.GetOutDirectory(), 'run_pie') + assert os.path.exists(run_pie_dist_path), 'Please build run_pie' + # The PIE loader must be pushed manually (i.e. no PushIfNeeded) because + # PushIfNeeded requires md5sum and md5sum requires the wrapper as well. + adb_command = 'push %s %s' % (run_pie_dist_path, PIE_WRAPPER_PATH) + assert _HasAdbPushSucceeded(self._adb.SendCommand(adb_command)) + self._pie_wrapper = PIE_WRAPPER_PATH + + if self._pie_wrapper: + command = '%s %s' % (self._pie_wrapper, command) + if lib_path: + command = 'LD_LIBRARY_PATH=%s %s' % (lib_path, command) + return self.GetShellCommandStatusAndOutput(command, *args, **kw) + + # It is tempting to turn this function into a generator, however this is not + # possible without using a private (local) adb_shell instance (to ensure no + # other command interleaves usage of it), which would defeat the main aim of + # being able to reuse the adb shell instance across commands. + def RunShellCommand(self, command, timeout_time=20, log_result=False): + """Send a command to the adb shell and return the result. + + Args: + command: String containing the shell command to send. + timeout_time: Number of seconds to wait for command to respond before + retrying, used by AdbInterface.SendShellCommand. + log_result: Boolean to indicate whether we should log the result of the + shell command. + + Returns: + list containing the lines of output received from running the command + """ + self._LogShell(command) + if "'" in command: + command = command.replace('\'', '\'\\\'\'') + result = self._adb.SendShellCommand( + "'%s'" % command, timeout_time).splitlines() + # TODO(b.kelemen): we should really be able to drop the stderr of the + # command or raise an exception based on what the caller wants. + result = [ l for l in result if not l.startswith('WARNING') ] + if ['error: device not found'] == result: + raise errors.DeviceUnresponsiveError('device not found') + if log_result: + self._LogShell('\n'.join(result)) + return result + + def GetShellCommandStatusAndOutput(self, command, timeout_time=20, + log_result=False): + """See RunShellCommand() above. + + Returns: + The tuple (exit code, list of output lines). + """ + lines = self.RunShellCommand( + command + '; echo %$?', timeout_time, log_result) + last_line = lines[-1] + status_pos = last_line.rfind('%') + assert status_pos >= 0 + status = int(last_line[status_pos + 1:]) + if status_pos == 0: + lines = lines[:-1] + else: + lines = lines[:-1] + [last_line[:status_pos]] + return (status, lines) + + def KillAll(self, process, signum=9, with_su=False): + """Android version of killall, connected via adb. + + Args: + process: name of the process to kill off. + signum: signal to use, 9 (SIGKILL) by default. + with_su: wether or not to use su to kill the processes. + + Returns: + the number of processes killed + """ + pids = self.ExtractPid(process) + if pids: + cmd = 'kill -%d %s' % (signum, ' '.join(pids)) + if with_su: + self.RunShellCommandWithSU(cmd) + else: + self.RunShellCommand(cmd) + return len(pids) + + def KillAllBlocking(self, process, timeout_sec, signum=9, with_su=False): + """Blocking version of killall, connected via adb. + + This waits until no process matching the corresponding name appears in ps' + output anymore. + + Args: + process: name of the process to kill off + timeout_sec: the timeout in seconds + signum: same as |KillAll| + with_su: same as |KillAll| + Returns: + the number of processes killed + """ + processes_killed = self.KillAll(process, signum=signum, with_su=with_su) + if processes_killed: + elapsed = 0 + wait_period = 0.1 + # Note that this doesn't take into account the time spent in ExtractPid(). + while self.ExtractPid(process) and elapsed < timeout_sec: + time.sleep(wait_period) + elapsed += wait_period + if elapsed >= timeout_sec: + return processes_killed - self.ExtractPid(process) + return processes_killed + + @staticmethod + def _GetActivityCommand(package, activity, wait_for_completion, action, + category, data, extras, trace_file_name, force_stop, + flags): + """Creates command to start |package|'s activity on the device. + + Args - as for StartActivity + + Returns: + the command to run on the target to start the activity + """ + cmd = 'am start -a %s' % action + if force_stop: + cmd += ' -S' + if wait_for_completion: + cmd += ' -W' + if category: + cmd += ' -c %s' % category + if package and activity: + cmd += ' -n %s/%s' % (package, activity) + if data: + cmd += ' -d "%s"' % data + if extras: + for key in extras: + value = extras[key] + if isinstance(value, str): + cmd += ' --es' + elif isinstance(value, bool): + cmd += ' --ez' + elif isinstance(value, int): + cmd += ' --ei' + else: + raise NotImplementedError( + 'Need to teach StartActivity how to pass %s extras' % type(value)) + cmd += ' %s %s' % (key, value) + if trace_file_name: + cmd += ' --start-profiler ' + trace_file_name + if flags: + cmd += ' -f %s' % flags + return cmd + + def StartActivity(self, package, activity, wait_for_completion=False, + action='android.intent.action.VIEW', + category=None, data=None, + extras=None, trace_file_name=None, + force_stop=False, flags=None): + """Starts |package|'s activity on the device. + + Args: + package: Name of package to start (e.g. 'com.google.android.apps.chrome'). + activity: Name of activity (e.g. '.Main' or + 'com.google.android.apps.chrome.Main'). + wait_for_completion: wait for the activity to finish launching (-W flag). + action: string (e.g. "android.intent.action.MAIN"). Default is VIEW. + category: string (e.g. "android.intent.category.HOME") + data: Data string to pass to activity (e.g. 'http://www.example.com/'). + extras: Dict of extras to pass to activity. Values are significant. + trace_file_name: If used, turns on and saves the trace to this file name. + force_stop: force stop the target app before starting the activity (-S + flag). + Returns: + The output of the underlying command as a list of lines. + """ + cmd = self._GetActivityCommand(package, activity, wait_for_completion, + action, category, data, extras, + trace_file_name, force_stop, flags) + return self.RunShellCommand(cmd) + + def StartActivityTimed(self, package, activity, wait_for_completion=False, + action='android.intent.action.VIEW', + category=None, data=None, + extras=None, trace_file_name=None, + force_stop=False, flags=None): + """Starts |package|'s activity on the device, returning the start time + + Args - as for StartActivity + + Returns: + A tuple containing: + - the output of the underlying command as a list of lines, and + - a timestamp string for the time at which the activity started + """ + cmd = self._GetActivityCommand(package, activity, wait_for_completion, + action, category, data, extras, + trace_file_name, force_stop, flags) + self.StartMonitoringLogcat() + out = self.RunShellCommand('log starting activity; ' + cmd) + activity_started_re = re.compile('.*starting activity.*') + m = self.WaitForLogMatch(activity_started_re, None) + assert m + start_line = m.group(0) + return (out, GetLogTimestamp(start_line, self.GetDeviceYear())) + + def StartCrashUploadService(self, package): + # TODO(frankf): We really need a python wrapper around Intent + # to be shared with StartActivity/BroadcastIntent. + cmd = ( + 'am startservice -a %s.crash.ACTION_FIND_ALL -n ' + '%s/%s.crash.MinidumpUploadService' % + (constants.PACKAGE_INFO['chrome'].package, + package, + constants.PACKAGE_INFO['chrome'].package)) + am_output = self.RunShellCommandWithSU(cmd) + assert am_output and 'Starting' in am_output[-1], ( + 'Service failed to start: %s' % am_output) + time.sleep(15) + + def BroadcastIntent(self, package, intent, *args): + """Send a broadcast intent. + + Args: + package: Name of package containing the intent. + intent: Name of the intent. + args: Optional extra arguments for the intent. + """ + cmd = 'am broadcast -a %s.%s %s' % (package, intent, ' '.join(args)) + self.RunShellCommand(cmd) + + def GoHome(self): + """Tell the device to return to the home screen. Blocks until completion.""" + self.RunShellCommand('am start -W ' + '-a android.intent.action.MAIN -c android.intent.category.HOME') + + def CloseApplication(self, package): + """Attempt to close down the application, using increasing violence. + + Args: + package: Name of the process to kill off, e.g. + com.google.android.apps.chrome + """ + self.RunShellCommand('am force-stop ' + package) + + def GetApplicationPath(self, package): + """Get the installed apk path on the device for the given package. + + Args: + package: Name of the package. + + Returns: + Path to the apk on the device if it exists, None otherwise. + """ + pm_path_output = self.RunShellCommand('pm path ' + package) + # The path output contains anything if and only if the package + # exists. + if pm_path_output: + # pm_path_output is of the form: "package:/path/to/foo.apk" + return pm_path_output[0].split(':')[1] + else: + return None + + def ClearApplicationState(self, package): + """Closes and clears all state for the given |package|.""" + # Check that the package exists before clearing it. Necessary because + # calling pm clear on a package that doesn't exist may never return. + pm_path_output = self.RunShellCommand('pm path ' + package) + # The path output only contains anything if and only if the package exists. + if pm_path_output: + self.RunShellCommand('pm clear ' + package) + + def SendKeyEvent(self, keycode): + """Sends keycode to the device. + + Args: + keycode: Numeric keycode to send (see "enum" at top of file). + """ + self.RunShellCommand('input keyevent %d' % keycode) + + def _RunMd5Sum(self, host_path, device_path): + """Gets the md5sum of a host path and device path. + + Args: + host_path: Path (file or directory) on the host. + device_path: Path on the device. + + Returns: + A tuple containing lists of the host and device md5sum results as + created by _ParseMd5SumOutput(). + """ + md5sum_dist_path = os.path.join(constants.GetOutDirectory(), + 'md5sum_dist') + assert os.path.exists(md5sum_dist_path), 'Please build md5sum.' + md5sum_dist_mtime = os.stat(md5sum_dist_path).st_mtime + if (md5sum_dist_path not in self._push_if_needed_cache or + self._push_if_needed_cache[md5sum_dist_path] != md5sum_dist_mtime): + command = 'push %s %s' % (md5sum_dist_path, MD5SUM_DEVICE_FOLDER) + assert _HasAdbPushSucceeded(self._adb.SendCommand(command)) + self._push_if_needed_cache[md5sum_dist_path] = md5sum_dist_mtime + + (_, md5_device_output) = self.GetAndroidToolStatusAndOutput( + self._util_wrapper + ' ' + MD5SUM_DEVICE_PATH + ' ' + device_path, + lib_path=MD5SUM_DEVICE_FOLDER, + timeout_time=2 * 60) + device_hash_tuples = _ParseMd5SumOutput(md5_device_output) + assert os.path.exists(host_path), 'Local path not found %s' % host_path + md5sum_output = cmd_helper.GetCmdOutput( + [os.path.join(constants.GetOutDirectory(), 'md5sum_bin_host'), + host_path]) + host_hash_tuples = _ParseMd5SumOutput(md5sum_output.splitlines()) + return (host_hash_tuples, device_hash_tuples) + + def GetFilesChanged(self, host_path, device_path, ignore_filenames=False): + """Compares the md5sum of a host path against a device path. + + Note: Ignores extra files on the device. + + Args: + host_path: Path (file or directory) on the host. + device_path: Path on the device. + ignore_filenames: If True only the file contents are considered when + checking whether a file has changed, otherwise the relative path + must also match. + + Returns: + A list of tuples of the form (host_path, device_path) for files whose + md5sums do not match. + """ + + # Md5Sum resolves symbolic links in path names so the calculation of + # relative path names from its output will need the real path names of the + # base directories. Having calculated these they are used throughout the + # function since this makes us less subject to any future changes to Md5Sum. + real_host_path = os.path.realpath(host_path) + real_device_path = self.RunShellCommand('realpath "%s"' % device_path)[0] + + host_hash_tuples, device_hash_tuples = self._RunMd5Sum( + real_host_path, real_device_path) + + if len(host_hash_tuples) > len(device_hash_tuples): + logging.info('%s files do not exist on the device' % + (len(host_hash_tuples) - len(device_hash_tuples))) + + host_rel = [(os.path.relpath(os.path.normpath(t.path), real_host_path), + t.hash) + for t in host_hash_tuples] + + if os.path.isdir(real_host_path): + def RelToRealPaths(rel_path): + return (os.path.join(real_host_path, rel_path), + os.path.join(real_device_path, rel_path)) + else: + assert len(host_rel) == 1 + def RelToRealPaths(_): + return (real_host_path, real_device_path) + + if ignore_filenames: + # If we are ignoring file names, then we want to push any file for which + # a file with an equivalent MD5 sum does not exist on the device. + device_hashes = set([h.hash for h in device_hash_tuples]) + ShouldPush = lambda p, h: h not in device_hashes + else: + # Otherwise, we want to push any file on the host for which a file with + # an equivalent MD5 sum does not exist at the same relative path on the + # device. + device_rel = dict([(os.path.relpath(os.path.normpath(t.path), + real_device_path), + t.hash) + for t in device_hash_tuples]) + ShouldPush = lambda p, h: p not in device_rel or h != device_rel[p] + + return [RelToRealPaths(path) for path, host_hash in host_rel + if ShouldPush(path, host_hash)] + + def PushIfNeeded(self, host_path, device_path): + """Pushes |host_path| to |device_path|. + + Works for files and directories. This method skips copying any paths in + |test_data_paths| that already exist on the device with the same hash. + + All pushed files can be removed by calling RemovePushedFiles(). + """ + MAX_INDIVIDUAL_PUSHES = 50 + if not os.path.exists(host_path): + raise device_errors.CommandFailedError( + 'Local path not found %s' % host_path, device=str(self)) + + # See if the file on the host changed since the last push (if any) and + # return early if it didn't. Note that this shortcut assumes that the tests + # on the device don't modify the files. + if not os.path.isdir(host_path): + if host_path in self._push_if_needed_cache: + host_path_mtime = self._push_if_needed_cache[host_path] + if host_path_mtime == os.stat(host_path).st_mtime: + return + + size = host_utils.GetRecursiveDiskUsage(host_path) + self._pushed_files.append(device_path) + self._potential_push_size += size + + if os.path.isdir(host_path): + self.RunShellCommand('mkdir -p "%s"' % device_path) + + changed_files = self.GetFilesChanged(host_path, device_path) + logging.info('Found %d files that need to be pushed to %s', + len(changed_files), device_path) + if not changed_files: + return + + def Push(host, device): + # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout + # of 60 seconds which isn't sufficient for a lot of users of this method. + push_command = 'push %s %s' % (host, device) + self._LogShell(push_command) + + # Retry push with increasing backoff if the device is busy. + retry = 0 + while True: + output = self._adb.SendCommand(push_command, timeout_time=30 * 60) + if _HasAdbPushSucceeded(output): + if not os.path.isdir(host_path): + self._push_if_needed_cache[host] = os.stat(host).st_mtime + return + if retry < 3: + retry += 1 + wait_time = 5 * retry + logging.error('Push failed, retrying in %d seconds: %s' % + (wait_time, output)) + time.sleep(wait_time) + else: + raise Exception('Push failed: %s' % output) + + diff_size = 0 + if len(changed_files) <= MAX_INDIVIDUAL_PUSHES: + diff_size = sum(host_utils.GetRecursiveDiskUsage(f[0]) + for f in changed_files) + + # TODO(craigdh): Replace this educated guess with a heuristic that + # approximates the push time for each method. + if len(changed_files) > MAX_INDIVIDUAL_PUSHES or diff_size > 0.5 * size: + self._actual_push_size += size + Push(host_path, device_path) + else: + for f in changed_files: + Push(f[0], f[1]) + self._actual_push_size += diff_size + + def GetPushSizeInfo(self): + """Get total size of pushes to the device done via PushIfNeeded() + + Returns: + A tuple: + 1. Total size of push requests to PushIfNeeded (MB) + 2. Total size that was actually pushed (MB) + """ + return (self._potential_push_size, self._actual_push_size) + + def GetFileContents(self, filename, log_result=False): + """Gets contents from the file specified by |filename|.""" + return self.RunShellCommand('cat "%s" 2>/dev/null' % filename, + log_result=log_result) + + def SetFileContents(self, filename, contents): + """Writes |contents| to the file specified by |filename|.""" + with tempfile.NamedTemporaryFile() as f: + f.write(contents) + f.flush() + self._adb.Push(f.name, filename) + + def RunShellCommandWithSU(self, command, timeout_time=20, log_result=False): + return self.RunShellCommand('su -c %s' % command, timeout_time, log_result) + + def CanAccessProtectedFileContents(self): + """Returns True if Get/SetProtectedFileContents would work via "su" or adb + shell running as root. + + Devices running user builds don't have adb root, but may provide "su" which + can be used for accessing protected files. + """ + return (self._GetProtectedFileCommandRunner() != None) + + def _GetProtectedFileCommandRunner(self): + """Finds the best method to access protected files on the device. + + Returns: + 1. None when privileged files cannot be accessed on the device. + 2. Otherwise: A function taking a single parameter: a string with command + line arguments. Running that function executes the command with + the appropriate method. + """ + if self._protected_file_access_method_initialized: + return self._privileged_command_runner + + self._privileged_command_runner = None + self._protected_file_access_method_initialized = True + + for cmd in [self.RunShellCommand, self.RunShellCommandWithSU]: + # Get contents of the auxv vector for the init(8) process from a small + # binary file that always exists on linux and is always read-protected. + contents = cmd('cat /proc/1/auxv') + # The leading 4 or 8-bytes of auxv vector is a_type. There are not many + # reserved a_type values, hence byte 2 must always be '\0' for a realistic + # auxv. See /usr/include/elf.h. + if len(contents) > 0 and (contents[0][2] == '\0'): + self._privileged_command_runner = cmd + break + return self._privileged_command_runner + + def GetProtectedFileContents(self, filename): + """Gets contents from the protected file specified by |filename|. + + This is potentially less efficient than GetFileContents. + """ + command = 'cat "%s" 2> /dev/null' % filename + command_runner = self._GetProtectedFileCommandRunner() + if command_runner: + return command_runner(command) + else: + logging.warning('Could not access protected file: %s' % filename) + return [] + + def SetProtectedFileContents(self, filename, contents): + """Writes |contents| to the protected file specified by |filename|. + + This is less efficient than SetFileContents. + """ + with DeviceTempFile(self) as temp_file: + with DeviceTempFile(self, suffix=".sh") as temp_script: + # Put the contents in a temporary file + self.SetFileContents(temp_file.name, contents) + # Create a script to copy the file contents to its final destination + self.SetFileContents(temp_script.name, + 'cat %s > %s' % (temp_file.name, filename)) + + command = 'sh %s' % temp_script.name + command_runner = self._GetProtectedFileCommandRunner() + if command_runner: + return command_runner(command) + else: + logging.warning( + 'Could not set contents of protected file: %s' % filename) + + + def RemovePushedFiles(self): + """Removes all files pushed with PushIfNeeded() from the device.""" + for p in self._pushed_files: + self.RunShellCommand('rm -r %s' % p, timeout_time=2 * 60) + + def ListPathContents(self, path): + """Lists files in all subdirectories of |path|. + + Args: + path: The path to list. + + Returns: + A dict of {"name": (size, lastmod), ...}. + """ + # Example output: + # /foo/bar: + # -rw-r----- user group 102 2011-05-12 12:29:54.131623387 +0100 baz.txt + re_file = re.compile('^-(?P[^\s]+)\s+' + '(?P[^\s]+)\s+' + '(?P[^\s]+)\s+' + '(?P[^\s]+)\s+' + '(?P[^\s]+)\s+' + '(?P