1
0
mirror of https://github.com/dart-lang/sdk synced 2024-07-03 00:08:46 +00:00

Starting work on full GN build

This change:
- Sucks in gn binaries
- Sucks in a version of llvm that the GN build likes
- Adds tools/gn.py to invoke gn
- Adds a root BUILD.gn and .gn file
- Removes chrome boilerplate we don't need
- etc.

This lets us do a standalone build of the 'runtime'
target for x64, ia32, arm, arm64, mips, and the
simulators on Linux, and arm and arm64 on Android.

It does not include tcmalloc, and hasn't been tested
on Mac or Windows. That work and more cleanup of
chrome boilerplate will come in follow-up CLs.

R=johnmccutchan@google.com

Review URL: https://codereview.chromium.org/2350583002 .
This commit is contained in:
Zachary Anderson 2016-09-23 07:47:36 -07:00
parent f87e62ef14
commit 7e1b7e54d7
51 changed files with 2630 additions and 5653 deletions

14
.gn Normal file
View File

@ -0,0 +1,14 @@
# Copyright 2016 The Dart project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is used by the gn meta-buildsystem find the root of the source tree
# and to set startup options.
# The location of the build configuration file.
buildconfig = "//build/config/BUILDCONFIG.gn"
# The secondary source root is a parallel directory tree where
# GN build files are placed when they can not be placed directly
# in the source tree, e.g. for third party source trees.
secondary_source = "//build/secondary/"

21
BUILD.gn Normal file
View File

@ -0,0 +1,21 @@
# Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
# This target will be built if no target is specified when invoking ninja.
group("default") {
deps = [
":runtime",
]
}
group("runtime") {
deps = [
"//runtime/bin:dart",
"//runtime/bin:dart_bootstrap($host_toolchain)",
"//runtime/bin:run_vm_tests",
"//runtime/bin:process_test",
"//runtime/bin:test_extension",
"//runtime/bin:sample_extension",
]
}

94
DEPS
View File

@ -30,8 +30,7 @@ vars = {
"gyp_rev": "@6ee91ad8659871916f9aa840d42e1513befdf638",
"co19_rev": "@d4767b4caea3c5828ad8e053cd051d44a59061af",
# Revisions of GN/Mojo/Flutter related dependencies.
"base_revision": "@672b04e54b937ec899429a6bd5409c5a6300d151",
# Revisions of GN related dependencies.
"buildtools_revision": "@565d04e8741429fb1b4f26d102f2c6c3b849edeb",
"gperftools_revision": "@7822b5b0b9fa7e016e1f6b46ea86f26f4691a457",
@ -45,7 +44,7 @@ vars = {
"barback_tag" : "@0.15.2+9",
"bazel_worker_tag": "@0.1.1",
"boolean_selector_tag" : "@1.0.2",
"boringssl_gen_rev": "@ef64e76254a12ee08a7142c825aa59ea32005958",
"boringssl_gen_rev": "@1e8e5da213d0d5b1d50fcc1356c4783091bcc20d",
"boringssl_rev" : "@8d343b44bbab829d1a28fdef650ca95f7db4412e",
"charcode_tag": "@1.1.0",
"chrome_rev" : "@19997",
@ -131,11 +130,7 @@ deps = {
Var("dart_root") + "/third_party/gyp":
Var('chromium_git') + '/external/gyp.git' + Var("gyp_rev"),
# Stuff needed for GN/Mojo/Flutter.
Var("dart_root") + "/base":
Var('chromium_git') + '/external/github.com/domokit/base'
+ Var('base_revision'),
# Stuff needed for GN build.
Var("dart_root") + "/buildtools":
Var('chromium_git') + '/chromium/buildtools.git' +
Var('buildtools_revision'),
@ -363,6 +358,83 @@ deps_os = {
# TODO(iposva): Move the necessary tools so that hooks can be run
# without the runtime being available.
hooks = [
# Pull GN binaries. This needs to be before running GYP below.
{
'name': 'gn_linux64',
'pattern': '.',
'action': [
'download_from_google_storage',
'--no_auth',
'--no_resume',
'--quiet',
'--platform=linux*',
'--bucket',
'chromium-gn',
'-s',
Var('dart_root') + '/buildtools/linux64/gn.sha1',
],
},
{
'name': 'gn_mac',
'pattern': '.',
'action': [
'download_from_google_storage',
'--no_auth',
'--no_resume',
'--quiet',
'--platform=darwin',
'--bucket',
'chromium-gn',
'-s',
Var('dart_root') + '/buildtools/mac/gn.sha1',
],
},
{
'name': 'gn_win',
'pattern': '.',
'action': [
'download_from_google_storage',
'--no_auth',
'--no_resume',
'--quiet',
'--platform=win*',
'--bucket',
'chromium-gn',
'-s',
Var('dart_root') + '/buildtools/win/gn.exe.sha1',
],
},
# Pull clang-format binaries using checked-in hashes.
{
'name': 'clang_format_linux',
'pattern': '.',
'action': [
'download_from_google_storage',
'--no_auth',
'--no_resume',
'--quiet',
'--platform=linux*',
'--bucket',
'chromium-clang-format',
'-s',
Var('dart_root') + '/buildtools/linux64/clang-format.sha1',
],
},
{
'name': 'clang_format_mac',
'pattern': '.',
'action': [
'download_from_google_storage',
'--no_auth',
'--no_resume',
'--quiet',
'--platform=darwin',
'--bucket',
'chromium-clang-format',
'-s',
Var('dart_root') + '/buildtools/mac/clang-format.sha1',
],
},
{
'name': 'd8_testing_binaries',
'pattern': '.',
@ -497,6 +569,12 @@ hooks = [
Var('dart_root') + "/third_party/clang.tar.gz.sha1",
],
},
{
# Pull clang if needed or requested via GYP_DEFINES.
'name': 'gn_clang',
'pattern': '.',
'action': ['python', 'sdk/tools/clang/scripts/update.py', '--if-needed'],
},
{
"pattern": ".",
"action": ["python", Var("dart_root") + "/tools/gyp_dart.py"],

View File

@ -2,433 +2,23 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/allocator.gni")
import("//build/config/chrome_build.gni")
import("//build/config/crypto.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//build/module_args/v8.gni")
declare_args() {
# When set, turns off the (normally-on) iterator debugging and related stuff
# that is normally turned on for Debug builds. These are generally useful for
# catching bugs but in some cases may cause conflicts or excessive slowness.
disable_iterator_debugging = false
# Set to true to not store any build metadata, e.g. ifdef out all __DATE__
# and __TIME__. Set to 0 to reenable the use of these macros in the code
# base. See http://crbug.com/314403.
#
# Continue to embed build meta data in Official builds, basically the
# time it was built.
# TODO(maruel): This decision should be revisited because having an
# official deterministic build has high value too but MSVC toolset can't
# generate anything deterministic with WPO enabled AFAIK.
dont_embed_build_metadata = !is_official_build
# Set to true to enable dcheck in Release builds.
dcheck_always_on = false
# Set to true to compile with the OpenGL ES 2.0 conformance tests.
internal_gles2_conform_tests = false
}
# TODO(brettw) Most of these should be removed. Instead of global feature
# flags, we should have more modular flags that apply only to a target and its
# dependents. For example, depending on the "x11" meta-target should define
# USE_X11 for all dependents so that everything that could use X11 gets the
# define, but anything that doesn't depend on X11 doesn't see it.
#
# For now we define these globally to match the current GYP build.
config("feature_flags") {
# TODO(brettw) this probably needs to be parameterized.
defines = [ "V8_DEPRECATION_WARNINGS" ] # Don't use deprecated V8 APIs anywhere.
if (cld_version > 0) {
defines += [ "CLD_VERSION=$cld_version" ]
}
if (enable_mdns) {
defines += [ "ENABLE_MDNS=1" ]
}
if (enable_notifications) {
defines += [ "ENABLE_NOTIFICATIONS" ]
}
if (enable_pepper_cdms) {
# TODO(brettw) should probably be "=1"
defines += [ "ENABLE_PEPPER_CDMS" ]
}
if (enable_browser_cdms) {
# TODO(brettw) should probably be "=1"
defines += [ "ENABLE_BROWSER_CDMS" ]
}
if (enable_plugins) {
defines += [ "ENABLE_PLUGINS=1" ]
}
if (enable_basic_printing || enable_print_preview) {
# Convenience define for ENABLE_BASIC_PRINTING || ENABLE_PRINT_PREVIEW.
defines += [ "ENABLE_PRINTING=1" ]
if (enable_basic_printing) {
# Enable basic printing support and UI.
defines += [ "ENABLE_BASIC_PRINTING=1" ]
}
if (enable_print_preview) {
# Enable printing with print preview.
# Can be defined without ENABLE_BASIC_PRINTING.
defines += [ "ENABLE_PRINT_PREVIEW=1" ]
}
}
if (enable_spellcheck) {
defines += [ "ENABLE_SPELLCHECK=1" ]
}
if (use_platform_spellchecker) {
defines += [ "USE_PLATFORM_SPELLCHECKER=1" ]
}
if (dont_embed_build_metadata) {
defines += [ "DONT_EMBED_BUILD_METADATA" ]
}
if (dcheck_always_on) {
defines += [ "DCHECK_ALWAYS_ON=1" ]
}
if (use_udev) {
# TODO(brettw) should probably be "=1".
defines += [ "USE_UDEV" ]
}
if (ui_compositor_image_transport) {
# TODO(brettw) should probably be "=1".
defines += [ "UI_COMPOSITOR_IMAGE_TRANSPORT" ]
}
if (use_ash) {
defines += [ "USE_ASH=1" ]
}
if (use_aura) {
defines += [ "USE_AURA=1" ]
}
if (use_glfw) {
defines += [ "USE_GLFW=1" ]
}
if (use_pango) {
defines += [ "USE_PANGO=1" ]
}
if (use_cairo) {
defines += [ "USE_CAIRO=1" ]
}
if (use_clipboard_aurax11) {
defines += [ "USE_CLIPBOARD_AURAX11=1" ]
}
if (use_default_render_theme) {
defines += [ "USE_DEFAULT_RENDER_THEME=1" ]
}
if (use_openssl) {
defines += [ "USE_OPENSSL=1" ]
}
if (use_openssl_certs) {
defines += [ "USE_OPENSSL_CERTS=1" ]
}
if (use_nss_certs) {
defines += [ "USE_NSS_CERTS=1" ]
}
if (use_ozone) {
defines += [ "USE_OZONE=1" ]
}
if (use_x11) {
defines += [ "USE_X11=1" ]
}
if (use_allocator != "tcmalloc") {
defines += [ "NO_TCMALLOC" ]
}
if (is_asan || is_lsan || is_tsan || is_msan || is_ios) {
defines += [
"MEMORY_TOOL_REPLACES_ALLOCATOR",
"MEMORY_SANITIZER_INITIAL_SIZE",
]
}
if (is_asan) {
defines += [ "ADDRESS_SANITIZER" ]
}
if (is_lsan) {
defines += [
"LEAK_SANITIZER",
"WTF_USE_LEAK_SANITIZER=1",
]
}
if (is_tsan) {
defines += [
"THREAD_SANITIZER",
"DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
"WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
]
}
if (is_msan) {
defines += [ "MEMORY_SANITIZER" ]
}
if (enable_webrtc) {
defines += [ "ENABLE_WEBRTC=1" ]
}
if (disable_ftp_support) {
defines += [ "DISABLE_FTP_SUPPORT=1" ]
}
if (!enable_nacl) {
defines += [ "DISABLE_NACL" ]
}
if (enable_extensions) {
defines += [ "ENABLE_EXTENSIONS=1" ]
}
if (enable_configuration_policy) {
defines += [ "ENABLE_CONFIGURATION_POLICY" ]
}
if (enable_task_manager) {
defines += [ "ENABLE_TASK_MANAGER=1" ]
}
if (enable_themes) {
defines += [ "ENABLE_THEMES=1" ]
}
if (enable_captive_portal_detection) {
defines += [ "ENABLE_CAPTIVE_PORTAL_DETECTION=1" ]
}
if (enable_session_service) {
defines += [ "ENABLE_SESSION_SERVICE=1" ]
}
if (enable_rlz) {
defines += [ "ENABLE_RLZ" ]
}
if (enable_plugin_installation) {
defines += [ "ENABLE_PLUGIN_INSTALLATION=1" ]
}
if (enable_app_list) {
defines += [ "ENABLE_APP_LIST=1" ]
}
if (enable_settings_app) {
defines += [ "ENABLE_SETTINGS_APP=1" ]
}
if (enable_supervised_users) {
defines += [ "ENABLE_SUPERVISED_USERS=1" ]
}
if (enable_service_discovery) {
defines += [ "ENABLE_SERVICE_DISCOVERY=1" ]
}
if (enable_autofill_dialog) {
defines += [ "ENABLE_AUTOFILL_DIALOG=1" ]
}
if (enable_wifi_bootstrapping) {
defines += [ "ENABLE_WIFI_BOOTSTRAPPING=1" ]
}
if (enable_image_loader_extension) {
defines += [ "IMAGE_LOADER_EXTENSION=1" ]
}
if (enable_remoting) {
defines += [ "ENABLE_REMOTING=1" ]
}
if (enable_google_now) {
defines += [ "ENABLE_GOOGLE_NOW=1" ]
}
if (enable_one_click_signin) {
defines += [ "ENABLE_ONE_CLICK_SIGNIN" ]
}
if (enable_hidpi) {
defines += [ "ENABLE_HIDPI=1" ]
}
if (enable_topchrome_md) {
defines += [ "ENABLE_TOPCHROME_MD=1" ]
}
if (proprietary_codecs) {
defines += [ "USE_PROPRIETARY_CODECS" ]
}
if (enable_hangout_services_extension) {
defines += [ "ENABLE_HANGOUT_SERVICES_EXTENSION=1" ]
}
if (v8_use_external_startup_data) {
defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
}
if (enable_background) {
defines += [ "ENABLE_BACKGROUND=1" ]
}
if (enable_pre_sync_backup) {
defines += [ "ENABLE_PRE_SYNC_BACKUP" ]
}
if (enable_video_hole) {
defines += [ "VIDEO_HOLE=1" ]
}
if (safe_browsing_mode == 1) {
defines += [ "FULL_SAFE_BROWSING" ]
defines += [ "SAFE_BROWSING_CSD" ]
defines += [ "SAFE_BROWSING_DB_LOCAL" ]
defines += [ "SAFE_BROWSING_SERVICE" ]
} else if (safe_browsing_mode == 2) {
defines += [ "MOBILE_SAFE_BROWSING" ]
defines += [ "SAFE_BROWSING_SERVICE" ]
} else if (safe_browsing_mode == 3) {
defines += [ "MOBILE_SAFE_BROWSING" ]
defines += [ "SAFE_BROWSING_DB_REMOTE" ]
defines += [ "SAFE_BROWSING_SERVICE" ]
}
if (is_official_build) {
defines += [ "OFFICIAL_BUILD" ]
}
if (is_chrome_branded) {
defines += [ "GOOGLE_CHROME_BUILD" ]
} else {
defines += [ "CHROMIUM_BUILD" ]
}
if (enable_media_router) {
defines += [ "ENABLE_MEDIA_ROUTER=1" ]
}
if (enable_webvr) {
defines += [ "ENABLE_WEBVR" ]
}
if (is_fnl) {
defines += [ "HAVE_SYS_QUEUE_H_=0" ]
}
}
# Debug/release ----------------------------------------------------------------
config("debug") {
defines = [
"_DEBUG",
"DYNAMIC_ANNOTATIONS_ENABLED=1",
"WTF_USE_DYNAMIC_ANNOTATIONS=1",
"DEBUG",
]
if (is_nacl) {
defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
}
if (is_win) {
if (disable_iterator_debugging) {
# Iterator debugging is enabled by the compiler on debug builds, and we
# have to tell it to turn it off.
defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
}
} else if (is_linux && !is_android && current_cpu == "x64" &&
!disable_iterator_debugging) {
# Enable libstdc++ debugging facilities to help catch problems early, see
# http://crbug.com/65151 .
# TODO(phajdan.jr): Should we enable this for all of POSIX?
defines += [ "_GLIBCXX_DEBUG=1" ]
}
}
config("release") {
defines = [ "NDEBUG" ]
# Sanitizers.
# TODO(GYP) The GYP build has "release_valgrind_build == 0" for this
# condition. When Valgrind is set up, we need to do the same here.
if (is_tsan) {
defines += [
"DYNAMIC_ANNOTATIONS_ENABLED=1",
"WTF_USE_DYNAMIC_ANNOTATIONS=1",
]
} else {
defines += [ "NVALGRIND" ]
if (!is_nacl) {
# NaCl always enables dynamic annotations. Currently this value is set to
# 1 for all .nexes.
defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
}
}
defines = [
"NDEBUG"
]
}
# Default libraries ------------------------------------------------------------
# This config defines the default libraries applied to all targets.
config("default_libs") {
if (is_win) {
# TODO(brettw) this list of defaults should probably be smaller, and
# instead the targets that use the less common ones (e.g. wininet or
# winspool) should include those explicitly.
libs = [
"advapi32.lib",
"comdlg32.lib",
"dbghelp.lib",
"delayimp.lib",
"dnsapi.lib",
"gdi32.lib",
"kernel32.lib",
"msimg32.lib",
"odbc32.lib",
"odbccp32.lib",
"ole32.lib",
"oleaut32.lib",
"psapi.lib",
"shell32.lib",
"shlwapi.lib",
"user32.lib",
"usp10.lib",
"uuid.lib",
"version.lib",
"wininet.lib",
"winmm.lib",
"winspool.lib",
"ws2_32.lib",
# Please don't add more stuff here. We should actually be making this
# list smaller, since all common things should be covered. If you need
# some extra libraries, please just add a libs = [ "foo.lib" ] to your
# target that needs it.
]
} else if (is_android) {
# Android uses -nostdlib so we need to add even libc here.
libs = [
# TODO(brettw) write a version of this, hopefully we can express this
# without forking out to GCC just to get the library name. The android
# toolchain directory should probably be extracted into a .gni file that
# this file and the android toolchain .gn file can share.
# # Manually link the libgcc.a that the cross compiler uses.
# '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
"c",
"dl",
"m",
]
} else if (is_mac) {
libs = [
"AppKit.framework",
"ApplicationServices.framework",
"Carbon.framework",
"CoreFoundation.framework",
"Foundation.framework",
"IOKit.framework",
"Security.framework",
"OpenGL.framework",
]
} else if (is_ios) {
# The libraries listed here will be specified for both the target and the
# host. Only the common ones should be listed here.
libs = [
"CoreFoundation.framework",
"CoreGraphics.framework",
"CoreText.framework",
"Foundation.framework",
]
} else if (is_linux) {
libs = [ "dl" ]
}
}
# Add this config to your target to enable precompiled headers.
#
# On Windows, precompiled headers are done on a per-target basis. If you have
# just a couple of files, the time it takes to precompile (~2 seconds) can
# actually be longer than the time saved. On a Z620, a 100 file target compiles
# about 2 seconds faster with precompiled headers, with greater savings for
# larger targets.
#
# Recommend precompiled headers for targets with more than 50 .cc files.
config("precompiled_headers") {
# TODO(brettw) enable this when GN support in the binary has been rolled.
#if (is_win) {
if (false) {
# This is a string rather than a file GN knows about. It has to match
# exactly what's in the /FI flag below, and what might appear in the source
# code in quotes for an #include directive.
precompiled_header = "build/precompile.h"
# This is a file that GN will compile with the above header. It will be
# implicitly added to the sources (potentially multiple times, with one
# variant for each language used in the target).
precompiled_source = "//build/precompile.cc"
# Force include the header.
cflags = [ "/FI$precompiled_header" ]
}
config("product") {
defines = [
"NDEBUG",
"PRODUCT",
]
}

View File

@ -38,17 +38,8 @@ if (target_os == "") {
target_os = host_os
}
if (target_cpu == "") {
if (target_os == "android") {
# If we're building for Android, we should assume that we want to
# build for ARM by default, not the host_cpu (which is likely x64).
# This allows us to not have to specify both target_os and target_cpu
# on the command line.
target_cpu = "arm"
} else {
target_cpu = host_cpu
}
}
assert(host_cpu != "")
assert(target_cpu != "")
if (current_cpu == "") {
current_cpu = target_cpu
@ -110,28 +101,23 @@ if (current_os == "") {
# - Don't call exec_script inside declare_args. This will execute the script
# even if the value is overridden, which is wasteful. See first bullet.
# There is no component build for the Dart VM, but build files in some
# dependencies check this.
is_component_build = false
declare_args() {
# How many symbols to include in the build. This affects the performance of
# the build since the symbols are large and dealing with them is slow.
# 2 means regular build with symbols.
# 1 means minimal symbols, usually enough for backtraces only.
# 0 means no symbols.
# -1 means auto-set (off in release, regular in debug).
symbol_level = -1
# Component build.
is_component_build = false
# Debug build.
is_debug = true
# Whether we're a traditional desktop unix.
is_desktop_linux = current_os == "linux" && current_os != "chromeos"
# Release build.
is_release = false
# Product build.
is_product = false
# Set to true when compiling with the Clang compiler. Typically this is used
# to configure warnings.
is_clang = current_os == "mac" || current_os == "ios" ||
current_os == "linux" || current_os == "chromeos"
is_clang = current_os == "mac" || current_os == "linux"
# Compile for Address Sanitizer to find memory bugs.
is_asan = false
@ -144,15 +130,6 @@ declare_args() {
# Compile for Thread Sanitizer to find threading bugs.
is_tsan = false
if (current_os == "chromeos") {
# Allows the target toolchain to be injected as arguments. This is needed
# to support the CrOS build system which supports per-build-configuration
# toolchains.
cros_use_custom_toolchain = false
}
# DON'T ADD MORE FLAGS HERE. Read the comment above.
}
# =============================================================================
@ -176,7 +153,6 @@ declare_args() {
if (current_os == "win") {
is_android = false
is_chromeos = false
is_fnl = false
is_ios = false
is_linux = false
is_mac = false
@ -186,7 +162,6 @@ if (current_os == "win") {
} else if (current_os == "mac") {
is_android = false
is_chromeos = false
is_fnl = false
is_ios = false
is_linux = false
is_mac = true
@ -196,60 +171,15 @@ if (current_os == "win") {
} else if (current_os == "android") {
is_android = true
is_chromeos = false
is_fnl = false
is_ios = false
is_linux = false
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "chromeos") {
is_android = false
is_chromeos = true
is_fnl = false
is_ios = false
is_linux = true
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "nacl") {
# current_os == "nacl" will be passed by the nacl toolchain definition.
# It is not set by default or on the command line. We treat is as a
# Posix variant.
is_android = false
is_chromeos = false
is_fnl = false
is_ios = false
is_linux = false
is_mac = false
is_nacl = true
is_posix = true
is_win = false
} else if (current_os == "ios") {
is_android = false
is_chromeos = false
is_fnl = false
is_ios = true
is_linux = false
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "linux") {
is_android = false
is_chromeos = false
is_fnl = false
is_ios = false
is_linux = true
is_mac = false
is_nacl = false
is_posix = true
is_win = false
} else if (current_os == "fnl") {
is_android = false
is_chromeos = false
is_fnl = true
is_ios = false
is_linux = true
is_mac = false
@ -258,119 +188,6 @@ if (current_os == "win") {
is_win = false
}
# =============================================================================
# SOURCES FILTERS
# =============================================================================
#
# These patterns filter out platform-specific files when assigning to the
# sources variable. The magic variable |sources_assignment_filter| is applied
# to each assignment or appending to the sources variable and matches are
# automatcally removed.
#
# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
# boundary = end of string or slash) are supported, and the entire string
# muct match the pattern (so you need "*.cc" to match all .cc files, for
# example).
# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
# below.
sources_assignment_filter = []
if (!is_posix) {
sources_assignment_filter += [
"*_posix.h",
"*_posix.cc",
"*_posix_unittest.h",
"*_posix_unittest.cc",
"*\bposix/*",
]
}
if (!is_win) {
sources_assignment_filter += [
"*_win.cc",
"*_win.h",
"*_win_unittest.cc",
"*\bwin/*",
"*.def",
"*.rc",
]
}
if (!is_mac) {
sources_assignment_filter += [
"*_mac.h",
"*_mac.cc",
"*_mac.mm",
"*_mac_unittest.h",
"*_mac_unittest.cc",
"*_mac_unittest.mm",
"*\bmac/*",
"*_cocoa.h",
"*_cocoa.cc",
"*_cocoa.mm",
"*_cocoa_unittest.h",
"*_cocoa_unittest.cc",
"*_cocoa_unittest.mm",
"*\bcocoa/*",
]
}
if (!is_ios) {
sources_assignment_filter += [
"*_ios.h",
"*_ios.cc",
"*_ios.mm",
"*_ios_unittest.h",
"*_ios_unittest.cc",
"*_ios_unittest.mm",
"*\bios/*",
]
}
if (!is_mac && !is_ios) {
sources_assignment_filter += [ "*.mm" ]
}
if (!is_linux) {
sources_assignment_filter += [
"*_linux.h",
"*_linux.cc",
"*_linux_unittest.h",
"*_linux_unittest.cc",
"*\blinux/*",
]
}
if (!is_android) {
sources_assignment_filter += [
"*_android.h",
"*_android.cc",
"*_android_unittest.h",
"*_android_unittest.cc",
"*\bandroid/*",
]
}
if (!is_chromeos) {
sources_assignment_filter += [
"*_chromeos.h",
"*_chromeos.cc",
"*_chromeos_unittest.h",
"*_chromeos_unittest.cc",
"*\bchromeos/*",
]
}
# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
# below.
# Actually save this list.
#
# These patterns are executed for every file in the source tree of every run.
# Therefore, adding more patterns slows down the build for everybody. We should
# only add automatic patterns for configurations affecting hundreds of files
# across many projects in the tree.
#
# Therefore, we only add rules to this list corresponding to platforms on the
# Chromium waterfall. This is not for non-officially-supported platforms
# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
# write a conditional in the target to remove the file(s) from the list when
# your platform/toolkit/feature doesn't apply.
set_sources_assignment_filter(sources_assignment_filter)
# =============================================================================
# BUILD OPTIONS
# =============================================================================
@ -393,7 +210,6 @@ if (!is_clang && (is_asan || is_lsan || is_tsan || is_msan)) {
# Holds all configs used for making native executables and libraries, to avoid
# duplication in each target below.
_native_compiler_configs = [
"//build/config:feature_flags",
"//build/config/compiler:compiler",
"//build/config/compiler:compiler_arm_fpu",
"//build/config/compiler:chromium_code",
@ -413,18 +229,13 @@ if (is_win) {
if (is_posix) {
_native_compiler_configs += [
"//build/config/gcc:no_exceptions",
"//build/config/gcc:symbol_visibility_hidden",
]
}
if (is_fnl) {
_native_compiler_configs += [ "//build/config/fnl:sdk" ]
} else if (is_linux) {
if (is_linux) {
_native_compiler_configs += [ "//build/config/linux:sdk" ]
} else if (is_mac) {
_native_compiler_configs += [ "//build/config/mac:sdk" ]
} else if (is_ios) {
_native_compiler_configs += [ "//build/config/ios:sdk" ]
} else if (is_android) {
_native_compiler_configs += [ "//build/config/android:sdk" ]
}
@ -440,36 +251,18 @@ if (is_clang) {
if (is_debug) {
_native_compiler_configs += [ "//build/config:debug" ]
_default_optimization_config = "//build/config/compiler:no_optimize"
} else {
} else if (is_release) {
_native_compiler_configs += [ "//build/config:release" ]
_default_optimization_config = "//build/config/compiler:optimize"
} else {
assert(is_product)
_native_compiler_configs += [ "//build/config:product" ]
_default_optimization_config = "//build/config/compiler:optimize"
}
_native_compiler_configs += [ _default_optimization_config ]
# If it wasn't manually set, set to an appropriate default.
if (symbol_level == -1) {
# Linux is slowed by having symbols as part of the target binary, whereas
# Mac and Windows have them separate, so in Release Linux, default them off.
if (is_debug || !is_linux) {
symbol_level = 2
} else if (is_asan || is_lsan || is_tsan || is_msan) {
# Sanitizers require symbols for filename suppressions to work.
symbol_level = 1
} else {
symbol_level = 0
}
}
# Symbol setup.
if (symbol_level == 2) {
_default_symbols_config = "//build/config/compiler:symbols"
} else if (symbol_level == 1) {
_default_symbols_config = "//build/config/compiler:minimal_symbols"
} else if (symbol_level == 0) {
_default_symbols_config = "//build/config/compiler:no_symbols"
} else {
assert(false, "Bad value for symbol_level.")
}
_default_symbols_config = "//build/config/compiler:symbols"
_native_compiler_configs += [ _default_symbols_config ]
# Windows linker setup for EXEs and DLLs.
@ -486,8 +279,7 @@ if (is_win) {
}
# Executable defaults.
_executable_configs =
_native_compiler_configs + [ "//build/config:default_libs" ]
_executable_configs = _native_compiler_configs
if (is_win) {
_executable_configs += _windows_linker_configs
} else if (is_mac) {
@ -511,8 +303,7 @@ set_defaults("static_library") {
}
# Shared library defaults (also for components in component mode).
_shared_library_configs =
_native_compiler_configs + [ "//build/config:default_libs" ]
_shared_library_configs = _native_compiler_configs
if (is_win) {
_shared_library_configs += _windows_linker_configs
} else if (is_mac) {
@ -526,29 +317,13 @@ if (is_win) {
set_defaults("shared_library") {
configs = _shared_library_configs
}
if (is_component_build) {
set_defaults("component") {
configs = _shared_library_configs
}
}
# Source set defaults (also for components in non-component mode).
set_defaults("source_set") {
configs = _native_compiler_configs
}
if (!is_component_build) {
set_defaults("component") {
configs = _native_compiler_configs
}
}
# Test defaults.
set_defaults("test") {
if (is_android) {
configs = _shared_library_configs
} else {
configs = _executable_configs
}
set_defaults("component") {
configs = _native_compiler_configs
}
# ==============================================================================
@ -570,7 +345,6 @@ if (is_win) {
set_default_toolchain("$host_toolchain")
} else if (is_android) {
if (host_os == "linux") {
# Use clang for the x86/64 Linux host builds.
if (host_cpu == "x86" || host_cpu == "x64") {
host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
} else {
@ -594,234 +368,108 @@ if (is_win) {
host_toolchain = "//build/toolchain/linux:$host_cpu"
set_default_toolchain("//build/toolchain/linux:$current_cpu")
}
if (is_chromeos && cros_use_custom_toolchain) {
set_default_toolchain("//build/toolchain/cros:target")
}
if (is_fnl) {
set_default_toolchain("//build/toolchain/fnl:target")
}
} else if (is_mac) {
host_toolchain = "//build/toolchain/mac:clang_x64"
set_default_toolchain(host_toolchain)
} else if (is_ios) {
host_toolchain = "//build/toolchain/mac:clang_x64"
if (use_ios_simulator) {
set_default_toolchain("//build/toolchain/mac:ios_clang_x64")
} else {
set_default_toolchain("//build/toolchain/mac:ios_clang_arm")
}
} else if (is_nacl) {
# TODO(GYP): This will need to change when we get NaCl working
# on multiple platforms, but this whole block of code (how we define
# host_toolchain) needs to be reworked regardless to key off of host_os
# and host_cpu rather than the is_* variables.
host_toolchain = "//build/toolchain/linux:clang_x64"
}
# ==============================================================================
# COMPONENT SETUP
# ==============================================================================
# TODO(brettw) erase this once the built-in "component" function is removed.
if (is_component_build) {
component_mode = "shared_library"
} else {
component_mode = "source_set"
}
# Don't try to do component builds for the standalone Dart VM.
assert(!is_component_build)
component_mode = "source_set"
template("component") {
if (is_component_build) {
shared_library(target_name) {
# Configs will always be defined since we set_defaults for a component
# above. We want to use those rather than whatever came with the nested
# shared/static library inside the component.
configs = [] # Prevent list overwriting warning.
configs = invoker.configs
source_set(target_name) {
# See above.
configs = [] # Prevent list overwriting warning.
configs = invoker.configs
# The sources assignment filter will have already been applied when the
# code was originally executed. We don't want to apply it again, since
# the original target may have override it for some assignments.
set_sources_assignment_filter([])
# See above call.
set_sources_assignment_filter([])
if (defined(invoker.all_dependent_configs)) {
all_dependent_configs = invoker.all_dependent_configs
}
if (defined(invoker.allow_circular_includes_from)) {
allow_circular_includes_from = invoker.allow_circular_includes_from
}
if (defined(invoker.cflags)) {
cflags = invoker.cflags
}
if (defined(invoker.cflags_c)) {
cflags_c = invoker.cflags_c
}
if (defined(invoker.cflags_cc)) {
cflags_cc = invoker.cflags_cc
}
if (defined(invoker.cflags_objc)) {
cflags_objc = invoker.cflags_objc
}
if (defined(invoker.cflags_objcc)) {
cflags_objcc = invoker.cflags_objcc
}
if (defined(invoker.check_includes)) {
check_includes = invoker.check_includes
}
if (defined(invoker.data)) {
data = invoker.data
}
if (defined(invoker.data_deps)) {
data_deps = invoker.data_deps
}
if (defined(invoker.datadeps)) {
datadeps = invoker.datadeps
}
if (defined(invoker.defines)) {
defines = invoker.defines
}
# All shared libraries must have the sanitizer deps to properly link in
# asan mode (this target will be empty in other cases).
if (defined(invoker.deps)) {
deps = invoker.deps + [ "//build/config/sanitizers:deps" ]
} else {
deps = [
"//build/config/sanitizers:deps",
]
}
if (defined(invoker.direct_dependent_configs)) {
direct_dependent_configs = invoker.direct_dependent_configs
}
if (defined(invoker.forward_dependent_configs_from)) {
forward_dependent_configs_from = invoker.forward_dependent_configs_from
}
if (defined(invoker.include_dirs)) {
include_dirs = invoker.include_dirs
}
if (defined(invoker.ldflags)) {
ldflags = invoker.ldflags
}
if (defined(invoker.lib_dirs)) {
lib_dirs = invoker.lib_dirs
}
if (defined(invoker.libs)) {
libs = invoker.libs
}
if (defined(invoker.output_extension)) {
output_extension = invoker.output_extension
}
if (defined(invoker.output_name)) {
output_name = invoker.output_name
}
if (defined(invoker.public)) {
public = invoker.public
}
if (defined(invoker.public_configs)) {
public_configs = invoker.public_configs
}
if (defined(invoker.public_deps)) {
public_deps = invoker.public_deps
}
if (defined(invoker.sources)) {
sources = invoker.sources
}
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
if (defined(invoker.all_dependent_configs)) {
all_dependent_configs = invoker.all_dependent_configs
}
} else {
source_set(target_name) {
# See above.
configs = [] # Prevent list overwriting warning.
configs = invoker.configs
# See above call.
set_sources_assignment_filter([])
if (defined(invoker.all_dependent_configs)) {
all_dependent_configs = invoker.all_dependent_configs
}
if (defined(invoker.allow_circular_includes_from)) {
allow_circular_includes_from = invoker.allow_circular_includes_from
}
if (defined(invoker.cflags)) {
cflags = invoker.cflags
}
if (defined(invoker.cflags_c)) {
cflags_c = invoker.cflags_c
}
if (defined(invoker.cflags_cc)) {
cflags_cc = invoker.cflags_cc
}
if (defined(invoker.cflags_objc)) {
cflags_objc = invoker.cflags_objc
}
if (defined(invoker.cflags_objcc)) {
cflags_objcc = invoker.cflags_objcc
}
if (defined(invoker.check_includes)) {
check_includes = invoker.check_includes
}
if (defined(invoker.data)) {
data = invoker.data
}
if (defined(invoker.data_deps)) {
data_deps = invoker.data_deps
}
if (defined(invoker.datadeps)) {
datadeps = invoker.datadeps
}
if (defined(invoker.defines)) {
defines = invoker.defines
}
if (defined(invoker.deps)) {
deps = invoker.deps
}
if (defined(invoker.direct_dependent_configs)) {
direct_dependent_configs = invoker.direct_dependent_configs
}
if (defined(invoker.forward_dependent_configs_from)) {
forward_dependent_configs_from = invoker.forward_dependent_configs_from
}
if (defined(invoker.include_dirs)) {
include_dirs = invoker.include_dirs
}
if (defined(invoker.ldflags)) {
ldflags = invoker.ldflags
}
if (defined(invoker.lib_dirs)) {
lib_dirs = invoker.lib_dirs
}
if (defined(invoker.libs)) {
libs = invoker.libs
}
if (defined(invoker.output_extension)) {
output_extension = invoker.output_extension
}
if (defined(invoker.output_name)) {
output_name = invoker.output_name
}
if (defined(invoker.public)) {
public = invoker.public
}
if (defined(invoker.public_configs)) {
public_configs = invoker.public_configs
}
if (defined(invoker.public_deps)) {
public_deps = invoker.public_deps
}
if (defined(invoker.sources)) {
sources = invoker.sources
}
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
if (defined(invoker.allow_circular_includes_from)) {
allow_circular_includes_from = invoker.allow_circular_includes_from
}
if (defined(invoker.cflags)) {
cflags = invoker.cflags
}
if (defined(invoker.cflags_c)) {
cflags_c = invoker.cflags_c
}
if (defined(invoker.cflags_cc)) {
cflags_cc = invoker.cflags_cc
}
if (defined(invoker.cflags_objc)) {
cflags_objc = invoker.cflags_objc
}
if (defined(invoker.cflags_objcc)) {
cflags_objcc = invoker.cflags_objcc
}
if (defined(invoker.check_includes)) {
check_includes = invoker.check_includes
}
if (defined(invoker.data)) {
data = invoker.data
}
if (defined(invoker.data_deps)) {
data_deps = invoker.data_deps
}
if (defined(invoker.datadeps)) {
datadeps = invoker.datadeps
}
if (defined(invoker.defines)) {
defines = invoker.defines
}
if (defined(invoker.deps)) {
deps = invoker.deps
}
if (defined(invoker.direct_dependent_configs)) {
direct_dependent_configs = invoker.direct_dependent_configs
}
if (defined(invoker.forward_dependent_configs_from)) {
forward_dependent_configs_from = invoker.forward_dependent_configs_from
}
if (defined(invoker.include_dirs)) {
include_dirs = invoker.include_dirs
}
if (defined(invoker.ldflags)) {
ldflags = invoker.ldflags
}
if (defined(invoker.lib_dirs)) {
lib_dirs = invoker.lib_dirs
}
if (defined(invoker.libs)) {
libs = invoker.libs
}
if (defined(invoker.output_extension)) {
output_extension = invoker.output_extension
}
if (defined(invoker.output_name)) {
output_name = invoker.output_name
}
if (defined(invoker.public)) {
public = invoker.public
}
if (defined(invoker.public_configs)) {
public_configs = invoker.public_configs
}
if (defined(invoker.public_deps)) {
public_deps = invoker.public_deps
}
if (defined(invoker.sources)) {
sources = invoker.sources
}
if (defined(invoker.testonly)) {
testonly = invoker.testonly
}
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
}
}

View File

@ -1,16 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# TODO(GYP): Make tcmalloc work on win.
if (is_android || current_cpu == "mipsel" || is_mac || is_ios || is_asan ||
is_lsan || is_tsan || is_msan || is_win) {
_default_allocator = "none"
} else {
_default_allocator = "tcmalloc"
}
declare_args() {
# Memory allocator to use. Set to "none" to use default allocator.
use_allocator = _default_allocator
}

View File

@ -51,10 +51,8 @@ if (is_android) {
# Defines the name the Android build gives to the current host CPU
# architecture, which is different than the names GN uses.
if (host_cpu == "x64") {
if ((host_cpu == "x64") || (host_cpu == "x86")) {
android_host_arch = "x86_64"
} else if (host_cpu == "x86") {
android_host_arch = "x86"
} else {
assert(false, "Need Android toolchain support for your build CPU arch.")
}

View File

@ -1,19 +0,0 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
declare_args() {
# Selects the desired build flavor. Official builds get additional
# processing to prepare for release. Normally you will want to develop and
# test with this flag off.
is_official_build = false
# Select the desired branding flavor. False means normal Chromium branding,
# true means official Google Chrome branding (requires extra Google-internal
# resources).
is_chrome_branded = false
# Break chrome.dll into multple pieces based on process type. Only available
# on Windows.
is_multi_dll_chrome = is_win && !is_component_build
}

View File

@ -5,5 +5,5 @@
declare_args() {
# Indicates if the build should use the Chrome-specific plugins for enforcing
# coding guidelines, etc. Only used when compiling with Clang.
clang_use_chrome_plugins = is_clang && !is_nacl
clang_use_chrome_plugins = false
}

View File

@ -3,7 +3,6 @@
# found in the LICENSE file.
import("//build/config/android/config.gni")
import("//build/config/chrome_build.gni")
if (current_cpu == "arm") {
import("//build/config/arm.gni")
}
@ -21,39 +20,6 @@ import("//build/toolchain/ccache.gni")
import("//build/config/sanitizers/sanitizers.gni")
declare_args() {
# Normally, Android builds are lightly optimized, even for debug builds, to
# keep binary size down. Setting this flag to true disables such optimization
android_full_debug = false
# Whether to use the binary binutils checked into third_party/binutils.
# These are not multi-arch so cannot be used except on x86 and x86-64 (the
# only two architectures that are currently checked in). Turn this off when
# you are using a custom toolchain and need to control -B in cflags.
linux_use_bundled_binutils = is_linux && current_cpu == "x64"
# Compile in such a way as to enable profiling of the generated code. For
# example, don't omit the frame pointer and leave in symbols.
enable_profiling = false
# Compile in such a way as to make it possible for the profiler to unwind full
# stack frames. Setting this flag has a large effect on the performance of the
# generated code than just setting profiling, but gives the profiler more
# information to analyze.
# Requires profiling to be set to true.
enable_full_stack_frames_for_profiling = false
# Use gold for linking on 64-bit Linux only (on 32-bit it runs out of
# address space, and it doesn't support cross-compiling).
use_gold = is_linux && current_cpu == "x64"
# use_debug_fission: whether to use split DWARF debug info
# files. This can reduce link time significantly, but is incompatible
# with some utilities such as icecc and ccache. Requires gold and
# gcc >= 4.8 or clang.
# http://gcc.gnu.org/wiki/DebugFission
use_debug_fission = is_debug && !is_win && use_gold &&
linux_use_bundled_binutils && !use_ccache
if (is_win) {
# Whether the VS xtree header has been patched to disable warning 4702. If
# it has, then we don't need to disable 4702 (unreachable code warning).
@ -143,7 +109,6 @@ config("compiler") {
} else {
# Common GCC compiler flags setup.
# --------------------------------
cflags += [ "-fno-strict-aliasing" ] # See http://crbug.com/32204
common_flags = [
# Not exporting C++ inline functions can generally be applied anywhere
# so we do so here. Normal function visibility is controlled by
@ -153,18 +118,8 @@ config("compiler") {
cflags_cc += common_flags
cflags_objcc += common_flags
# Stack protection.
if (is_mac) {
cflags += [ "-fstack-protector-all" ]
} else if (is_linux) {
cflags += [
"-fstack-protector",
"--param=ssp-buffer-size=4",
]
}
# Linker warnings.
if (!(is_chromeos && current_cpu == "arm") && !is_mac && !is_ios) {
if ((current_cpu != "arm") && !is_mac) {
# TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
ldflags += [ "-Wl,--fatal-warnings" ]
}
@ -217,11 +172,6 @@ config("compiler") {
"//buildtools/third_party/libc++abi/trunk/include",
]
}
if (is_fnl) {
# TODO(kulakowski) remove when fnl no longer uses gcc
cflags += [ "-Wno-maybe-uninitialized" ]
}
}
if (is_clang && is_debug) {
@ -237,7 +187,7 @@ config("compiler") {
cflags_objcc += extra_flags
}
if (is_clang && !is_nacl) {
if (is_clang) {
# This is here so that all files get recompiled after a clang roll and
# when turning clang on or off. (defines are passed via the command line,
# and build system rebuild things when their commandline changes). Nothing
@ -250,7 +200,7 @@ config("compiler") {
# Mac-specific compiler flags setup.
# ----------------------------------
if (is_mac || is_ios) {
if (is_mac) {
# These flags are shared between the C compiler and linker.
common_mac_flags = []
@ -310,6 +260,7 @@ config("compiler") {
]
}
} else if (current_cpu == "arm") {
cflags += [
"-march=$arm_arch",
"-mfloat-abi=$arm_float_abi",
@ -388,45 +339,7 @@ config("compiler") {
}
}
defines += [ "_FILE_OFFSET_BITS=64" ]
if (!is_android) {
defines += [
"_LARGEFILE_SOURCE",
"_LARGEFILE64_SOURCE",
]
}
# Omit unwind support in official builds to save space. We can use breakpad
# for these builds.
if (is_chrome_branded && is_official_build) {
cflags += [
"-fno-unwind-tables",
"-fno-asynchronous-unwind-tables",
]
defines += [ "NO_UNWIND_TABLES" ]
} else {
cflags += [ "-funwind-tables" ]
}
}
if (enable_profiling && !is_debug) {
# The GYP build spams this define into every compilation unit, as we do
# here, but it only appears to be used in base and a couple other places.
# TODO(abarth): Should we move this define closer to where it's used?
defines += [ "ENABLE_PROFILING" ]
cflags += [
"-fno-omit-frame-pointer",
"-g",
]
if (enable_full_stack_frames_for_profiling) {
cflags += [
"-fno-inline",
"-fno-optimize-sibling-calls",
]
}
cflags += [ "-funwind-tables" ]
}
# Linux/Android common flags setup.
@ -434,7 +347,6 @@ config("compiler") {
if (is_linux || is_android) {
cflags += [
"-fPIC",
"-pipe", # Use pipes for communicating between sub-processes. Faster.
]
ldflags += [
@ -443,9 +355,6 @@ config("compiler") {
"-Wl,-z,now",
"-Wl,-z,relro",
]
if (!using_sanitizer) {
ldflags += [ "-Wl,-z,defs" ]
}
}
# Linux-specific compiler flags setup.
@ -454,46 +363,6 @@ config("compiler") {
cflags += [ "-pthread" ]
ldflags += [ "-pthread" ]
}
if (use_gold) {
gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
root_build_dir)
ldflags += [
"-B$gold_path",
# Newer gccs and clangs support -fuse-ld, use the flag to force gold
# selection.
# gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
"-fuse-ld=gold",
# Experimentation found that using four linking threads
# saved ~20% of link time.
# https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
# Only apply this to the target linker, since the host
# linker might not be gold, but isn't used much anyway.
# TODO(raymes): Disable threading because gold is frequently
# crashing on the bots: crbug.com/161942.
#"-Wl,--threads",
#"-Wl,--thread-count=4",
]
if (!is_asan && !is_msan && !is_lsan && !is_tsan) {
# TODO(brettw) common.gypi has this only for target toolset.
ldflags += [ "-Wl,--icf=all" ]
}
# TODO(thestig): Make this flag work with GN.
#if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
# ldflags += [
# "-Wl,--detect-odr-violations",
# ]
#}
}
if (linux_use_bundled_binutils) {
binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
root_build_dir)
cflags += [ "-B$binutils_path" ]
}
# Clang-specific compiler flags setup.
# ------------------------------------
@ -501,20 +370,6 @@ config("compiler") {
cflags += [ "-fcolor-diagnostics" ]
}
# C++11 compiler flags setup.
# ---------------------------
if (is_linux || is_android || is_nacl) {
# gnu++11 instead of c++11 is needed because some code uses typeof() (a
# GNU extension).
# TODO(thakis): Eventually switch this to c++11 instead,
# http://crbug.com/427584
cflags_cc += [ "-std=gnu++11" ]
} else if (!is_win) {
cc_std = [ "-std=c++11" ]
cflags_cc += cc_std
cflags_objcc += cc_std
}
# Android-specific flags setup.
# -----------------------------
if (is_android) {
@ -552,17 +407,10 @@ config("compiler") {
}
ldflags += [
"-Wl,--no-undefined",
# Don't allow visible symbols from libgcc or libc++ to be
# re-exported.
"-Wl,--exclude-libs=libgcc.a",
"-Wl,--exclude-libs=libc++_static.a",
# Don't allow visible symbols from libraries that contain
# assembly code with symbols that aren't hidden properly.
# http://crbug.com/448386
"-Wl,--exclude-libs=libvpx_assembly_arm.a",
]
if (current_cpu == "arm") {
ldflags += [
@ -584,7 +432,7 @@ config("compiler") {
}
config("compiler_arm_fpu") {
if (current_cpu == "arm" && !is_ios) {
if (current_cpu == "arm") {
cflags = [ "-mfpu=$arm_fpu" ]
}
}
@ -833,19 +681,11 @@ if (is_win) {
# TODO(abarth): Re-enable once https://github.com/domokit/mojo/issues/728
# is fixed.
# default_warning_flags += [ "-Wnewline-eof" ]
if (!is_nacl) {
# When compiling Objective-C, warns if a method is used whose
# availability is newer than the deployment target. This is not
# required when compiling Chrome for iOS.
default_warning_flags += [ "-Wpartial-availability" ]
}
}
if (gcc_version >= 48) {
default_warning_flags_cc += [
# See comment for -Wno-c++11-narrowing.
"-Wno-narrowing",
]
# When compiling Objective-C, warns if a method is used whose
# availability is newer than the deployment target. This is not
# required when compiling Chrome for iOS.
default_warning_flags += [ "-Wpartial-availability" ]
}
# Suppress warnings about ABI changes on ARM (Clang doesn't give this
@ -883,43 +723,6 @@ if (is_win) {
default_warning_flags += [ "-Wno-unused-local-typedefs" ]
}
}
if (is_clang) {
default_warning_flags += [
# This warns on using ints as initializers for floats in
# initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
# which happens in several places in chrome code. Not sure if
# this is worth fixing.
"-Wno-c++11-narrowing",
# Don't die on dtoa code that uses a char as an array index.
# This is required solely for base/third_party/dmg_fp/dtoa.cc.
# TODO(brettw) move this to that project then!
"-Wno-char-subscripts",
# Warns on switches on enums that cover all enum values but
# also contain a default: branch. Chrome is full of that.
"-Wno-covered-switch-default",
# Clang considers the `register` keyword as deprecated, but e.g.
# code generated by flex (used in angle) contains that keyword.
# http://crbug.com/255186
"-Wno-deprecated-register",
]
# NaCl's Clang compiler and Chrome's hermetic Clang compiler will almost
# always have different versions. Certain flags may not be recognized by
# one version or the other.
if (!is_nacl) {
# Flags NaCl does not recognize.
default_warning_flags += [
# TODO(hans): Get this cleaned up, http://crbug.com/428099
"-Wno-inconsistent-missing-override",
# TODO(thakis): Enable this, crbug.com/507717
"-Wno-shift-negative-value",
]
}
}
# chromium_code ---------------------------------------------------------------
#
@ -935,14 +738,8 @@ config("chromium_code") {
"-Wextra",
]
# In Chromium code, we define __STDC_foo_MACROS in order to get the
# C99 macros on Mac and Linux.
defines = [
"__STDC_CONSTANT_MACROS",
"__STDC_FORMAT_MACROS",
]
if (!using_sanitizer && (!is_linux || !is_clang || is_official_build)) {
defines = []
if (!using_sanitizer && (!is_linux || !is_clang)) {
# _FORTIFY_SOURCE isn't really supported by Clang now, see
# http://llvm.org/bugs/show_bug.cgi?id=16821.
# It seems to work fine with Ubuntu 12 headers though, so use it in
@ -975,37 +772,6 @@ config("no_chromium_code") {
]
}
if (is_linux) {
# Don't warn about ignoring the return value from e.g. close(). This is
# off by default in some gccs but on by default in others. BSD systems do
# not support this option, since they are usually using gcc 4.2.1, which
# does not have this flag yet.
cflags += [ "-Wno-unused-result" ]
}
if (is_clang) {
cflags += [
# TODO(mgiuca): Move this suppression into individual third-party
# libraries as required. http://crbug.com/505301.
"-Wno-overloaded-virtual",
# Lots of third-party libraries have unused variables. Instead of
# suppressing them individually, we just blanket suppress them here.
"-Wno-unused-variable",
]
}
if (is_linux || is_android) {
cflags += [
# Don't warn about printf format problems. This is off by default in gcc
# but on in Ubuntu's gcc(!).
"-Wno-format",
]
cflags_cc += [
# Don't warn about hash_map in third-party code.
"-Wno-deprecated",
]
}
cflags += default_warning_flags
cflags_cc += default_warning_flags_cc
}
@ -1029,34 +795,6 @@ config("no_rtti") {
}
}
# Warnings ---------------------------------------------------------------------
# This will generate warnings when using Clang if code generates exit-time
# destructors, which will slow down closing the program.
# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
config("wexit_time_destructors") {
# TODO: Enable on Windows too, http://crbug.com/404525
if (is_clang && !is_win) {
cflags = [ "-Wexit-time-destructors" ]
}
}
# On Windows compiling on x64, VC will issue a warning when converting
# size_t to int because it will truncate the value. Our code should not have
# these warnings and one should use a static_cast or a checked_cast for the
# conversion depending on the case. However, a lot of code still needs to be
# fixed. Apply this config to such targets to disable the warning.
#
# Note that this can be applied regardless of platform and architecture to
# clean up the call sites. This will only apply the flag when necessary.
#
# TODO(jschuh): crbug.com/167187 fix this and delete this config.
config("no_size_t_to_int_warning") {
if (is_win && current_cpu == "x64") {
cflags = [ "/wd4267" ]
}
}
# Optimization -----------------------------------------------------------------
#
# Note that BUILDCONFIG.gn sets up a variable "default_optimization_config"
@ -1066,9 +804,9 @@ config("no_size_t_to_int_warning") {
# add back the one you want to override it with:
#
# configs -= default_optimization_config
# configs += [ "//build/config/compiler/optimize_max" ]
# configs += [ ":optimize_max" ]
# Shared settings for both "optimize" and "optimize_max" configs.
# Shared settings.
# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
if (is_win) {
common_optimize_on_cflags = [
@ -1100,25 +838,15 @@ if (is_win) {
common_optimize_on_ldflags = []
if (is_android) {
if (!using_sanitizer) {
common_optimize_on_cflags += [ "-fomit-frame-pointer" ]
}
# TODO(jdduke) Re-enable on mips after resolving linking
# issues with libc++ (crbug.com/456380).
if (current_cpu != "mipsel" && current_cpu != "mips64el") {
common_optimize_on_ldflags += [
# Warn in case of text relocations.
"-Wl,--warn-shared-textrel",
]
}
common_optimize_on_ldflags += [
# Warn in case of text relocations.
"-Wl,--warn-shared-textrel",
]
}
if (is_mac || is_ios) {
if (symbol_level == 2) {
# Mac dead code stripping requires symbols.
common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
}
if (is_mac) {
# Mac dead code stripping requires symbols.
common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
} else {
# Non-Mac Posix linker flags.
common_optimize_on_ldflags += [
@ -1143,10 +871,10 @@ config("optimize") {
# Favor size over speed, /O1 must be before the common flags. The GYP
# build also specifies /Os and /GF but these are implied by /O1.
cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
} else if (is_android || is_ios) {
} else if (is_android) {
cflags = [ "-Os" ] + common_optimize_on_cflags # Favor size over speed.
} else {
cflags = [ "-O2" ] + common_optimize_on_cflags
cflags = [ "-O3" ] + common_optimize_on_cflags
}
ldflags = common_optimize_on_ldflags
}
@ -1159,7 +887,7 @@ config("no_optimize") {
"/Ob0", # Disable all inlining (on by default).
"/RTC1", # Runtime checks for stack frame and uninitialized variables.
]
} else if (is_android && !android_full_debug) {
} else if (is_android) {
# On Android we kind of optimize some things that don't affect debugging
# much even when optimization is disabled to get the binary size down.
cflags = [
@ -1167,41 +895,9 @@ config("no_optimize") {
"-fdata-sections",
"-ffunction-sections",
]
if (!using_sanitizer) {
cflags += [ "-fomit-frame-pointer" ]
}
ldflags = common_optimize_on_ldflags
} else {
cflags = [ "-O0" ]
}
}
# Turns up the optimization level. On Windows, this implies whole program
# optimization and link-time code generation which is very expensive and should
# be used sparingly.
config("optimize_max") {
ldflags = common_optimize_on_ldflags
if (is_win) {
# Favor speed over size, /O2 must be before the common flags. The GYP
# build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
cflags = [ "/O2" ] + common_optimize_on_cflags
if (is_official_build) {
# TODO(GYP): TODO(dpranke): Should these only be on in an official
# build, or on all the time? For now we'll require official build so
# that the compile is clean.
cflags += [
"/GL", # Whole program optimization.
# Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
# Probably anything that this would catch that wouldn't be caught in a
# normal build isn't going to actually be a bug, so the incremental
# value of C4702 for PGO builds is likely very small.
"/wd4702",
]
ldflags += [ "/LTCG" ]
}
} else {
cflags = [ "-O2" ] + common_optimize_on_cflags
cflags = [ "-O1" ]
}
}
@ -1217,27 +913,9 @@ config("symbols") {
}
ldflags = [ "/DEBUG" ]
} else {
cflags = [ "-g2" ]
if (use_debug_fission) {
cflags += [ "-gsplit-dwarf" ]
}
}
}
config("minimal_symbols") {
if (is_win) {
# Linker symbols for backtraces only.
ldflags = [ "/DEBUG" ]
} else {
cflags = [ "-g1" ]
if (use_debug_fission) {
cflags += [ "-gsplit-dwarf" ]
}
}
}
config("no_symbols") {
if (!is_win) {
cflags = [ "-g0" ]
cflags = [
"-g3",
"-ggdb3",
]
}
}

View File

@ -1,29 +0,0 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file declares build flags for the SSL library configuration.
#
# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
# the global build dependency on it should be removed.
#
# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
# required. See the declare_args block of BUILDCONFIG.gn for advice on how
# to set up feature flags.
declare_args() {
# Use OpenSSL instead of NSS. This is used for all platforms but iOS. (See
# http://crbug.com/338886).
use_openssl = !is_ios
}
# True when we're using OpenSSL for representing certificates. When targeting
# Android, the platform certificate library is used for certificate
# verification. On other targets, this flag also enables OpenSSL for certificate
# verification, but this configuration is unsupported.
use_openssl_certs = is_android
# True if NSS is used for certificate verification. Note that this is
# independent from use_openssl. It is possible to use OpenSSL for the crypto
# library, but NSS for the platform certificate library.
use_nss_certs = false

View File

@ -1,204 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file contains Chrome-feature-related build flags (see ui.gni for
# UI-related ones). These should theoretically be moved to the build files of
# the features themselves.
#
# However, today we have many "bad" dependencies on some of these flags from,
# e.g. base, so they need to be global to match the GYP configuration. Also,
# anything that needs a grit define must be in either this file or ui.gni.
#
# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
# required. See the declare_args block of BUILDCONFIG.gn for advice on how
# to set up feature flags.
import("//build/config/chrome_build.gni")
if (is_android) {
import("//build/config/android/config.gni")
}
declare_args() {
# Multicast DNS.
enable_mdns = is_win || is_linux
enable_plugins = !is_android && !is_ios
# Enables Native Client support.
# TODO(GYP): Get NaCl linking on other platforms.
# Also, see if we can always get rid of enable_nacl_untrusted and
# enable_pnacl and always build them if enable_nacl is true.
# The "is_nacl" part of the condition is needed to ensure that
# the untrusted code is built properly; arguably it should be
# guarded by "is_nacl" directly rather than enable_nacl_untrusted, but
# this will go away when Mac and Win are working and we can just use
# the commented out logic.
# Eventually we want this to be:
# enable_nacl = !is_ios && !is_android
enable_nacl = (is_linux && !is_chromeos && current_cpu == "x64") || is_nacl
enable_nacl_untrusted = enable_nacl
enable_pnacl = enable_nacl_untrusted
# If debug_devtools is set to true, JavaScript files for DevTools are stored
# as is and loaded from disk. Otherwise, a concatenated file is stored in
# resources.pak. It is still possible to load JS files from disk by passing
# --debug-devtools cmdline switch.
debug_devtools = false
# Enables WebRTC.
# TODO(GYP) make mac and android work.
enable_webrtc = !is_ios && !is_mac && !is_android
# Enables the Media Router.
enable_media_router = !is_ios && !is_android
# Enables proprietary codecs and demuxers; e.g. H264, MOV, AAC, and MP3.
# Android OS includes support for proprietary codecs regardless of building
# Chromium or Google Chrome. We also ship Google Chrome and Chromecast with
# proprietary codecs.
# TODO(GYP) The GYP build has || chromecast==1 for this:
proprietary_codecs = is_android || is_chrome_branded
enable_configuration_policy = true
# Enables support for background apps.
enable_background = !is_ios && !is_android
enable_captive_portal_detection = !is_android && !is_ios
# Enables use of the session service, which is enabled by default.
# Android stores them separately on the Java side.
enable_session_service = !is_android && !is_ios
enable_plugin_installation = is_win || is_mac
enable_app_list = !is_ios && !is_android
enable_supervised_users = !is_ios
enable_autofill_dialog = !is_ios
enable_google_now = !is_ios && !is_android
enable_one_click_signin = is_win || is_mac || (is_linux && !is_chromeos)
enable_remoting = !is_ios && !is_android
# Enable hole punching for the protected video.
enable_video_hole = is_android
# Enables browser side Content Decryption Modules. Required for embedders
# (e.g. Android and ChromeCast) that use a browser side CDM.
enable_browser_cdms = is_android
# Variable safe_browsing is used to control the build time configuration
# for safe browsing feature. Safe browsing can be compiled in 4 different
# levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
# reporting features for use with Data Saver on Mobile, and 3 enables
# extended mobile protection via an external API. When 3 is fully deployed,
# it will replace 2.
if (is_android) {
safe_browsing_mode = 2
} else if (is_ios) {
safe_browsing_mode = 0
} else {
safe_browsing_mode = 1
}
}
# Additional dependent variables -----------------------------------------------
# Set the version of CLD.
# 0: Don't specify the version. This option is for the Finch testing.
# 1: Use only CLD1.
# 2: Use only CLD2.
if (is_android) {
cld_version = 1
} else {
cld_version = 2
}
# libudev usage. This currently only affects the content layer.
use_udev = is_linux
# Enable the spell checker.
enable_spellcheck = !is_android
# Use the operating system's spellchecker rather than hunspell.
use_platform_spellchecker = is_android || is_mac
enable_pepper_cdms = enable_plugins && (is_linux || is_mac || is_win)
# Enable basic printing support and UI.
enable_basic_printing = !is_chromeos
# Enable printing with print preview. It does not imply
# enable_basic_printing. It's possible to build Chrome with preview only.
enable_print_preview = !is_android
# The seccomp-bpf sandbox is only supported on three architectures
# currently.
# Do not disable seccomp_bpf anywhere without talking to
# security@chromium.org!
use_seccomp_bpf = (is_linux || is_android) &&
(current_cpu == "x86" || current_cpu == "x64" ||
current_cpu == "arm" || current_cpu == "mipsel")
# Enable notifications everywhere except iOS.
enable_notifications = !is_ios
# TODO(brettw) this should be moved to net and only dependents get this define.
disable_ftp_support = is_ios
enable_web_speech = !is_android && !is_ios
use_dbus = is_linux
enable_extensions = !is_android && !is_ios
enable_task_manager = !is_ios && !is_android
use_cups = is_desktop_linux || is_mac
enable_themes = !is_android && !is_ios
# TODO(scottmg) remove this when we've fixed printing.
win_pdf_metafile_for_printing = true
# Whether we are using the rlz library or not. Platforms like Android send
# rlz codes for searches but do not use the library.
enable_rlz_support = is_win || is_mac || is_ios || is_chromeos
enable_rlz = is_chrome_branded && enable_rlz_support
enable_settings_app = enable_app_list && !is_chromeos
enable_service_discovery = enable_mdns || is_mac
enable_wifi_bootstrapping = is_win || is_mac
# Image loader extension is enabled on ChromeOS only.
enable_image_loader_extension = is_chromeos
# Chrome OS: whether to also build the upcoming version of
# ChromeVox, which can then be enabled via a command-line switch.
enable_chromevox_next = false
# Use brlapi from brltty for braille display support.
use_brlapi = is_chromeos
# Option controlling the use of GConf (the classic GNOME configuration
# system).
# TODO(GYP) also require !embedded to enable.
use_gconf = is_linux && !is_chromeos
# Hangout services is an extension that adds extra features to Hangouts.
# For official GYP builds, this flag is set, it will likely need to be
# parameterized in the future for a similar use.
enable_hangout_services_extension = false
# Whether to back up data before sync.
enable_pre_sync_backup = is_win || is_mac || (is_linux && !is_chromeos)
# WebVR support disabled until platform implementations have been added
enable_webvr = false

View File

@ -2,10 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/config/linux/pkg_config.gni")
import("//build/config/features.gni")
import("//build/config/sysroot.gni")
import("//build/config/ui.gni")
config("sdk") {
if (sysroot != "") {
@ -22,41 +19,3 @@ config("sdk") {
"value") ]
}
}
config("fontconfig") {
libs = [ "fontconfig" ]
}
pkg_config("freetype2") {
packages = [ "freetype2" ]
}
config("x11") {
libs = [
"X11",
"Xcomposite",
"Xcursor",
"Xdamage",
"Xext",
"Xfixes",
"Xi",
"Xrender",
"Xtst",
]
}
config("xrandr") {
libs = [ "Xrandr" ]
}
config("xinerama") {
libs = [ "Xinerama" ]
}
config("xcomposite") {
libs = [ "Xcomposite" ]
}
config("xext") {
libs = [ "Xext" ]
}

View File

@ -5,8 +5,6 @@
# This header file defines the "sysroot" variable which is the absolute path
# of the sysroot. If no sysroot applies, the variable will be an empty string.
import("//build/config/chrome_build.gni")
declare_args() {
# The absolute path of the sysroot that is applied when compiling using
# the target toolchain.
@ -32,29 +30,9 @@ if (current_toolchain == default_toolchain && target_sysroot != "") {
} else {
sysroot = ""
}
} else if (is_linux && is_chrome_branded && is_official_build && !is_chromeos) {
# For official builds, use the sysroot checked into the internal source repo
# so that the builds work on older versions of Linux.
if (current_cpu == "x64") {
sysroot = rebase_path("//build/linux/debian_wheezy_amd64-sysroot")
} else if (current_cpu == "x86") {
sysroot = rebase_path("//build/linux/debian_wheezy_i386-sysroot")
} else {
# Any other builds don't use a sysroot.
sysroot = ""
}
} else if (is_linux && !is_chromeos) {
if (current_cpu == "mipsel") {
sysroot = rebase_path("//mipsel-sysroot/sysroot")
} else {
sysroot = ""
}
} else if (is_mac) {
import("//build/config/mac/mac_sdk.gni")
sysroot = mac_sdk_path
} else if (is_ios) {
import("//build/config/ios/ios_sdk.gni")
sysroot = ios_sdk_path
} else {
sysroot = ""
}

View File

@ -1,68 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file contains UI-related build flags. It should theoretically be in the
# src/ui directory and only things that depend on the ui module should get the
# definitions.
#
# However, today we have many "bad" dependencies on some of these flags from,
# e.g. base, so they need to be global.
#
# See also build/config/features.gni
declare_args() {
# Indicates if Ash is enabled. Ash is the Aura Shell which provides a
# desktop-like environment for Aura. Requires use_aura = true
use_ash = is_win || is_linux
# Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
# that does not require X11.
use_ozone = false
# Indicates if GLFW is enabled. GLFW is an abstraction layer for the
# windowing system and OpenGL rendering, providing cross-platform support
# for creating windows and OpenGL surfaces and contexts, and handling
# window system events and input.
use_glfw = false
# Support ChromeOS touchpad gestures with ozone.
use_evdev_gestures = false
# Indicates if Aura is enabled. Aura is a low-level windowing library, sort
# of a replacement for GDI or GTK.
use_aura = is_win || is_linux
# True means the UI is built using the "views" framework.
toolkit_views = is_mac || is_win || is_chromeos || use_aura
# Whether the entire browser uses toolkit-views on Mac instead of Cocoa.
mac_views_browser = false
# Whether we should use glib, a low level C utility library.
use_glib = is_linux && !use_ozone
}
# Additional dependent variables -----------------------------------------------
#
# These variables depend on other variables and can't be set externally.
use_cairo = false
use_pango = false
# Use GPU accelerated cross process image transport by default on linux builds
# with the Aura window manager.
ui_compositor_image_transport = use_aura && is_linux
use_default_render_theme = use_aura || is_linux
# Indicates if the UI toolkit depends on X11.
use_x11 = is_linux && !use_ozone && !use_glfw
use_ozone_evdev = use_ozone
use_clipboard_aurax11 = is_linux && use_aura && use_x11
enable_hidpi = is_mac || is_chromeos || is_win || is_linux
enable_topchrome_md = false

View File

@ -141,7 +141,7 @@ config("no_incremental_linking") {
# config should be applied to large modules to turn off incremental linking
# when it won't work.
config("default_large_module_incremental_linking") {
if (symbol_level > 0 && (current_cpu == "x86" || !is_component_build)) {
if (current_cpu == "x86" || !is_component_build) {
# When symbols are on, things get so large that the tools fail due to the
# size of the .ilk files.
ldflags = incremental_linking_off_switch

View File

@ -1,333 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script is wrapper for Chromium that adds some support for how GYP
# is invoked by Chromium beyond what can be done in the gclient hooks.
import argparse
import glob
import gyp_environment
import os
import re
import shlex
import subprocess
import string
import sys
import vs_toolchain
script_dir = os.path.dirname(os.path.realpath(__file__))
chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
import gyp
# Assume this file is in a one-level-deep subdirectory of the source root.
SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Add paths so that pymod_do_main(...) can import files.
sys.path.insert(1, os.path.join(chrome_src, 'android_webview', 'tools'))
sys.path.insert(1, os.path.join(chrome_src, 'build', 'android', 'gyp'))
sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
sys.path.insert(1, os.path.join(chrome_src, 'chromecast', 'tools', 'build'))
sys.path.insert(1, os.path.join(chrome_src, 'ios', 'chrome', 'tools', 'build'))
sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src',
'build_tools'))
sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build'))
sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis'))
sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit',
'Source', 'build', 'scripts'))
sys.path.insert(1, os.path.join(chrome_src, 'tools'))
sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
# seconds. Conversely, memory usage of build/gyp_chromium with Psyco
# maxes out at about 158 MB vs. 132 MB without it.
#
# Psyco uses native libraries, so we need to load a different
# installation depending on which OS we are running under. It has not
# been tested whether using Psyco on our Mac and Linux builds is worth
# it (the GYP running time is a lot shorter, so the JIT startup cost
# may not be worth it).
if sys.platform == 'win32':
try:
sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
import psyco
except:
psyco = None
else:
psyco = None
def GetSupplementalFiles():
"""Returns a list of the supplemental files that are included in all GYP
sources."""
return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
def ProcessGypDefinesItems(items):
"""Converts a list of strings to a list of key-value pairs."""
result = []
for item in items:
tokens = item.split('=', 1)
# Some GYP variables have hyphens, which we don't support.
if len(tokens) == 2:
result += [(tokens[0], tokens[1])]
else:
# No value supplied, treat it as a boolean and set it. Note that we
# use the string '1' here so we have a consistent definition whether
# you do 'foo=1' or 'foo'.
result += [(tokens[0], '1')]
return result
def GetGypVars(supplemental_files):
"""Returns a dictionary of all GYP vars."""
# Find the .gyp directory in the user's home directory.
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
if home_dot_gyp:
home_dot_gyp = os.path.expanduser(home_dot_gyp)
if not home_dot_gyp:
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
else:
break
if home_dot_gyp:
include_gypi = os.path.join(home_dot_gyp, "include.gypi")
if os.path.exists(include_gypi):
supplemental_files += [include_gypi]
# GYP defines from the supplemental.gypi files.
supp_items = []
for supplement in supplemental_files:
with open(supplement, 'r') as f:
try:
file_data = eval(f.read(), {'__builtins__': None}, None)
except SyntaxError, e:
e.filename = os.path.abspath(supplement)
raise
variables = file_data.get('variables', [])
for v in variables:
supp_items += [(v, str(variables[v]))]
# GYP defines from the environment.
env_items = ProcessGypDefinesItems(
shlex.split(os.environ.get('GYP_DEFINES', '')))
# GYP defines from the command line.
parser = argparse.ArgumentParser()
parser.add_argument('-D', dest='defines', action='append', default=[])
cmdline_input_items = parser.parse_known_args()[0].defines
cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
vars_dict = dict(supp_items + env_items + cmdline_items)
return vars_dict
def GetOutputDirectory():
"""Returns the output directory that GYP will use."""
# Handle command line generator flags.
parser = argparse.ArgumentParser()
parser.add_argument('-G', dest='genflags', default=[], action='append')
genflags = parser.parse_known_args()[0].genflags
# Handle generator flags from the environment.
genflags += shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
needle = 'output_dir='
for item in genflags:
if item.startswith(needle):
return item[len(needle):]
return 'out'
def additional_include_files(supplemental_files, args=[]):
"""
Returns a list of additional (.gypi) files to include, without duplicating
ones that are already specified on the command line. The list of supplemental
include files is passed in as an argument.
"""
# Determine the include files specified on the command line.
# This doesn't cover all the different option formats you can use,
# but it's mainly intended to avoid duplicating flags on the automatic
# makefile regeneration which only uses this format.
specified_includes = set()
for arg in args:
if arg.startswith('-I') and len(arg) > 2:
specified_includes.add(os.path.realpath(arg[2:]))
result = []
def AddInclude(path):
if os.path.realpath(path) not in specified_includes:
result.append(path)
if os.environ.get('GYP_INCLUDE_FIRST') != None:
AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_FIRST')))
# Always include common.gypi.
AddInclude(os.path.join(script_dir, 'common.gypi'))
# Optionally add supplemental .gypi files if present.
for supplement in supplemental_files:
AddInclude(supplement)
if os.environ.get('GYP_INCLUDE_LAST') != None:
AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_LAST')))
return result
if __name__ == '__main__':
# Disabling garbage collection saves about 1 second out of 16 on a Linux
# z620 workstation. Since this is a short-lived process it's not a problem to
# leak a few cyclyc references in order to spare the CPU cycles for
# scanning the heap.
import gc
gc.disable()
args = sys.argv[1:]
use_analyzer = len(args) and args[0] == '--analyzer'
if use_analyzer:
args.pop(0)
os.environ['GYP_GENERATORS'] = 'analyzer'
args.append('-Gconfig_path=' + args.pop(0))
args.append('-Ganalyzer_output_path=' + args.pop(0))
if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
sys.exit(0)
# Use the Psyco JIT if available.
if psyco:
psyco.profile()
print "Enabled Psyco JIT."
# Fall back on hermetic python if we happen to get run under cygwin.
# TODO(bradnelson): take this out once this issue is fixed:
# http://code.google.com/p/gyp/issues/detail?id=177
if sys.platform == 'cygwin':
import find_depot_tools
depot_tools_path = find_depot_tools.add_depot_tools_to_path()
python_dir = sorted(glob.glob(os.path.join(depot_tools_path,
'python2*_bin')))[-1]
env = os.environ.copy()
env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv
sys.exit(subprocess.call(cmd, env=env))
# This could give false positives since it doesn't actually do real option
# parsing. Oh well.
gyp_file_specified = any(arg.endswith('.gyp') for arg in args)
gyp_environment.SetEnvironment()
# If we didn't get a file, check an env var, and then fall back to
# assuming 'all.gyp' from the same directory as the script.
if not gyp_file_specified:
gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
if gyp_file:
# Note that CHROMIUM_GYP_FILE values can't have backslashes as
# path separators even on Windows due to the use of shlex.split().
args.extend(shlex.split(gyp_file))
else:
args.append(os.path.join(script_dir, 'all.gyp'))
supplemental_includes = GetSupplementalFiles()
gyp_vars_dict = GetGypVars(supplemental_includes)
# There shouldn't be a circular dependency relationship between .gyp files,
# but in Chromium's .gyp files, on non-Mac platforms, circular relationships
# currently exist. The check for circular dependencies is currently
# bypassed on other platforms, but is left enabled on iOS, where a violation
# of the rule causes Xcode to misbehave badly.
# TODO(mark): Find and kill remaining circular dependencies, and remove this
# option. http://crbug.com/35878.
# TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
# list.
if gyp_vars_dict.get('OS') != 'ios':
args.append('--no-circular-check')
# libtool on Mac warns about duplicate basenames in static libraries, so
# they're disallowed in general by gyp. We are lax on this point, so disable
# this check other than on Mac. GN does not use static libraries as heavily,
# so over time this restriction will mostly go away anyway, even on Mac.
# https://code.google.com/p/gyp/issues/detail?id=384
if sys.platform != 'darwin':
args.append('--no-duplicate-basename-check')
# We explicitly don't support the make gyp generator (crbug.com/348686). Be
# nice and fail here, rather than choking in gyp.
if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
print 'Error: make gyp generator not supported (check GYP_GENERATORS).'
sys.exit(1)
# We explicitly don't support the native msvs gyp generator. Be nice and
# fail here, rather than generating broken projects.
if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
print 'Error: msvs gyp generator not supported (check GYP_GENERATORS).'
print 'Did you mean to use the `msvs-ninja` generator?'
sys.exit(1)
# If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
# to enfore syntax checking.
syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
if syntax_check and int(syntax_check):
args.append('--check')
# TODO(dmikurube): Remove these checks and messages after a while.
if ('linux_use_tcmalloc' in gyp_vars_dict or
'android_use_tcmalloc' in gyp_vars_dict):
print '*****************************************************************'
print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!'
print '-----------------------------------------------------------------'
print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in'
print 'your GYP_DEFINES. Please switch them into "use_allocator" now.'
print 'See http://crbug.com/345554 for the details.'
print '*****************************************************************'
# Automatically turn on crosscompile support for platforms that need it.
# (The Chrome OS build sets CC_host / CC_target which implicitly enables
# this mode.)
if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
gyp_vars_dict.get('OS') in ['android', 'ios'],
'GYP_CROSSCOMPILE' not in os.environ)):
os.environ['GYP_CROSSCOMPILE'] = '1'
if gyp_vars_dict.get('OS') == 'android':
args.append('--check')
args.extend(
['-I' + i for i in additional_include_files(supplemental_includes, args)])
args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
if not use_analyzer:
print 'Updating projects from gyp files...'
sys.stdout.flush()
# Off we go...
gyp_rc = gyp.main(args)
if not use_analyzer:
vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
if vs2013_runtime_dll_dirs:
x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
vs_toolchain.CopyVsRuntimeDlls(
os.path.join(chrome_src, GetOutputDirectory()),
(x86_runtime, x64_runtime))
sys.exit(gyp_rc)

View File

@ -1,18 +0,0 @@
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is (possibly, depending on python version) imported by
# gyp_chromium when GYP_PARALLEL=1 and it creates sub-processes
# through the multiprocessing library.
# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
# imports that don't end in .py (and aren't directories with an
# __init__.py). This wrapper makes "import gyp_chromium" work with
# those old versions and makes it possible to execute gyp_chromium.py
# directly on Windows where the extension is useful.
import os
path = os.path.abspath(os.path.split(__file__)[0])
execfile(os.path.join(path, 'gyp_chromium'))

View File

@ -1,66 +0,0 @@
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
SRC_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock'))
import mock
# TODO(sbc): Make gyp_chromium more testable by putting the code in
# a .py file.
gyp_chromium = __import__('gyp_chromium')
class TestGetOutputDirectory(unittest.TestCase):
@mock.patch('os.environ', {})
@mock.patch('sys.argv', [__file__])
def testDefaultValue(self):
self.assertEqual(gyp_chromium.GetOutputDirectory(), 'out')
@mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'})
@mock.patch('sys.argv', [__file__])
def testEnvironment(self):
self.assertEqual(gyp_chromium.GetOutputDirectory(), 'envfoo')
@mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'})
@mock.patch('sys.argv', [__file__, '-Goutput_dir=cmdfoo'])
def testGFlagOverridesEnv(self):
self.assertEqual(gyp_chromium.GetOutputDirectory(), 'cmdfoo')
@mock.patch('os.environ', {})
@mock.patch('sys.argv', [__file__, '-G', 'output_dir=foo'])
def testGFlagWithSpace(self):
self.assertEqual(gyp_chromium.GetOutputDirectory(), 'foo')
class TestGetGypVars(unittest.TestCase):
@mock.patch('os.environ', {})
def testDefault(self):
self.assertEqual(gyp_chromium.GetGypVars([]), {})
@mock.patch('os.environ', {})
@mock.patch('sys.argv', [__file__, '-D', 'foo=bar'])
def testDFlags(self):
self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar'})
@mock.patch('os.environ', {})
@mock.patch('sys.argv', [__file__, '-D', 'foo'])
def testDFlagsNoValue(self):
self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': '1'})
@mock.patch('os.environ', {})
@mock.patch('sys.argv', [__file__, '-D', 'foo=bar', '-Dbaz'])
def testDFlagMulti(self):
self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar', 'baz': '1'})
if __name__ == '__main__':
unittest.main()

View File

@ -1,242 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Defines a static library corresponding to the output of schema compiler tools
# over a set of extensions API schemas (IDL or JSON format.) The library target
# has implicit hard dependencies on all schema files listed by the invoker and
# is itself a hard dependency.
#
# Invocations of this template may use the following variables:
#
# sources [required] A list of schema files to be compiled.
#
# root_namespace [required]
# A Python string substituion pattern used to generate the C++
# namespace for each API. Use %(namespace)s to replace with the API
# namespace, like "toplevel::%(namespace)s_api".
#
# schema_include_rules [optional]
# A list of paths to include when searching for referenced objects,
# with the namespace separated by a :.
# Example:
# [ '/foo/bar:Foo::Bar::%(namespace)s' ]
#
# schemas [optional, default = false]
# Boolean indicating if the schema files should be generated.
#
# bundle [optional, default = false]
# Boolean indicating if the schema bundle files should be generated.
#
# bundle_registration [optional, default = false]
# Boolean indicating if the API registration bundle files should be generated.
#
# impl_dir [required if bundle_registration = true, otherwise unused]
# The path containing C++ implementations of API functions. This path is
# used as the root path when looking for {schema}/{schema}_api.h headers
# when generating API registration bundles. Such headers, if found, are
# automatically included by the generated code.
#
# uncompiled_sources [optional, only used when bundle = true or
# bundle_registration = true]
# A list of schema files which should not be compiled, but which should still
# be processed for API bundle generation.
#
# deps [optional]
# If any deps are specified they will be inherited by the static library
# target.
#
# generate_static_library [optional, defaults to false]
# Produces a static library instead of a source_set.
#
# The generated library target also inherits the visibility and output_name
# of its invoker.
template("json_schema_api") {
assert(defined(invoker.sources),
"\"sources\" must be defined for the $target_name template.")
assert(defined(invoker.root_namespace),
"\"root_namespace\" must be defined for the $target_name template.")
schemas = defined(invoker.schemas) && invoker.schemas
bundle = defined(invoker.bundle) && invoker.bundle
bundle_registration =
defined(invoker.bundle_registration) && invoker.bundle_registration
schema_include_rules = ""
if (defined(invoker.schema_include_rules)) {
schema_include_rules = invoker.schema_include_rules
}
# Keep a copy of the target_name here since it will be trampled
# in nested targets.
target_visibility = [ ":$target_name" ]
generated_config_name = target_name + "_generated_config"
config(generated_config_name) {
include_dirs = [ root_gen_dir ]
visibility = target_visibility
}
root_namespace = invoker.root_namespace
compiler_root = "//tools/json_schema_compiler"
compiler_script = "$compiler_root/compiler.py"
compiler_sources = [
"$compiler_root/cc_generator.py",
"$compiler_root/code.py",
"$compiler_root/compiler.py",
"$compiler_root/cpp_generator.py",
"$compiler_root/cpp_type_generator.py",
"$compiler_root/cpp_util.py",
"$compiler_root/h_generator.py",
"$compiler_root/idl_schema.py",
"$compiler_root/model.py",
"$compiler_root/util_cc_helper.py",
]
if (schemas) {
schema_generator_name = target_name + "_schema_generator"
action_foreach(schema_generator_name) {
script = compiler_script
sources = invoker.sources
inputs = compiler_sources
outputs = [
"$target_gen_dir/{{source_name_part}}.cc",
"$target_gen_dir/{{source_name_part}}.h",
]
args = [
"{{source}}",
"--root=" + rebase_path("//", root_build_dir),
"--destdir=" + rebase_path(root_gen_dir, root_build_dir),
"--namespace=$root_namespace",
"--generator=cpp",
"--include-rules=$schema_include_rules",
]
if (defined(invoker.visibility)) {
# If visibility is restricted, add our own target to it.
visibility = invoker.visibility + target_visibility
}
}
}
if (bundle) {
uncompiled_sources = []
if (defined(invoker.uncompiled_sources)) {
uncompiled_sources = invoker.uncompiled_sources
}
bundle_generator_schema_name = target_name + "_bundle_generator_schema"
action(bundle_generator_schema_name) {
script = compiler_script
inputs = compiler_sources + invoker.sources + uncompiled_sources
outputs = [
"$target_gen_dir/generated_schemas.cc",
"$target_gen_dir/generated_schemas.h",
]
args = [
"--root=" + rebase_path("//", root_build_dir),
"--destdir=" + rebase_path(root_gen_dir, root_build_dir),
"--namespace=$root_namespace",
"--generator=cpp-bundle-schema",
"--include-rules=$schema_include_rules",
] + rebase_path(invoker.sources, root_build_dir) +
rebase_path(uncompiled_sources, root_build_dir)
}
}
if (bundle_registration) {
uncompiled_sources = []
if (defined(invoker.uncompiled_sources)) {
uncompiled_sources = invoker.uncompiled_sources
}
assert(defined(invoker.impl_dir),
"\"impl_dir\" must be defined for the $target_name template.")
# Child directory inside the generated file tree.
gen_child_dir = rebase_path(invoker.impl_dir, "//")
bundle_generator_registration_name =
target_name + "_bundle_generator_registration"
action(bundle_generator_registration_name) {
script = compiler_script
inputs = compiler_sources + invoker.sources + uncompiled_sources
outputs = [
"$root_gen_dir/$gen_child_dir/generated_api_registration.cc",
"$root_gen_dir/$gen_child_dir/generated_api_registration.h",
]
args = [
"--root=" + rebase_path("//", root_build_dir),
"--destdir=" + rebase_path(root_gen_dir, root_build_dir),
"--namespace=$root_namespace",
"--generator=cpp-bundle-registration",
"--impl-dir=$gen_child_dir",
"--include-rules=$schema_include_rules",
] + rebase_path(invoker.sources, root_build_dir) +
rebase_path(uncompiled_sources, root_build_dir)
}
}
# Compute the contents of the library/source set.
lib_sources = invoker.sources
lib_deps = []
lib_public_deps = []
lib_extra_configs = []
if (schemas) {
lib_sources += get_target_outputs(":$schema_generator_name")
lib_public_deps += [ ":$schema_generator_name" ]
lib_deps += [ "//tools/json_schema_compiler:generated_api_util" ]
lib_extra_configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
}
if (bundle) {
lib_sources += get_target_outputs(":$bundle_generator_schema_name")
lib_deps += [ ":$bundle_generator_schema_name" ]
}
if (bundle_registration) {
lib_sources += get_target_outputs(":$bundle_generator_registration_name")
lib_deps += [ ":$bundle_generator_registration_name" ]
}
if (defined(invoker.deps)) {
lib_deps += invoker.deps
}
# Generate either a static library or a source set.
if (defined(invoker.generate_static_library) &&
invoker.generate_static_library) {
static_library(target_name) {
sources = lib_sources
deps = lib_deps
public_deps = lib_public_deps
configs += lib_extra_configs
public_configs = [ ":$generated_config_name" ]
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
if (defined(invoker.output_name)) {
output_name = invoker.output_name
}
}
} else {
source_set(target_name) {
sources = lib_sources
deps = lib_deps
public_deps = lib_public_deps
configs += lib_extra_configs
public_configs = [ ":$generated_config_name" ]
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
if (defined(invoker.output_name)) {
output_name = invoker.output_name
}
}
}
}

View File

@ -1,83 +0,0 @@
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
# When including this gypi, the following variables must be set:
# schema_files:
# An array of json or idl files that comprise the api model.
# schema_include_rules (optional):
# An array of paths to include when searching for referenced objects,
# with the namespace separated by a :.
# Example:
# [ '/foo/bar:Foo::Bar::%(namespace)s' ]
# cc_dir:
# The directory to put the generated code in.
# root_namespace:
# A Python string substituion pattern used to generate the C++
# namespace for each API. Use %(namespace)s to replace with the API
# namespace, like "toplevel::%(namespace)s_api".
#
# Functions and namespaces can be excluded by setting "nocompile" to true.
# The default root path of API implementation sources is
# chrome/browser/extensions/api and can be overridden by setting "impl_dir".
'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
'api_gen': '<(api_gen_dir)/compiler.py',
'generator_files': [
'<(api_gen_dir)/cc_generator.py',
'<(api_gen_dir)/code.py',
'<(api_gen_dir)/compiler.py',
'<(api_gen_dir)/cpp_bundle_generator.py',
'<(api_gen_dir)/cpp_type_generator.py',
'<(api_gen_dir)/cpp_util.py',
'<(api_gen_dir)/h_generator.py',
'<(api_gen_dir)/idl_schema.py',
'<(api_gen_dir)/json_schema.py',
'<(api_gen_dir)/model.py',
'<(api_gen_dir)/util_cc_helper.py',
],
'schema_include_rules': [],
},
'actions': [
{
'action_name': 'genapi_bundle_schema',
'inputs': [
'<@(generator_files)',
'<@(schema_files)',
'<@(non_compiled_schema_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.h',
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.cc',
],
'action': [
'python',
'<(api_gen)',
'--root=<(DEPTH)',
'--destdir=<(SHARED_INTERMEDIATE_DIR)',
'--namespace=<(root_namespace)',
'--generator=cpp-bundle-schema',
'--include-rules=<(schema_include_rules)',
'<@(schema_files)',
'<@(non_compiled_schema_files)',
],
'message': 'Generating C++ API bundle code for schemas',
'process_outputs_as_sources': 1,
# Avoid running MIDL compiler on IDL input files.
'explicit_idl_action': 1,
},
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
'<(DEPTH)',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
]
},
# This target exports a hard dependency because it generates header
# files.
'hard_dependency': 1,
}

View File

@ -1,78 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
# When including this gypi, the following variables must be set:
# schema_files:
# An array of json or idl files that comprise the api model.
# impl_dir_:
# The root path of API implementations; also used for the
# output location. (N.B. Named as such to prevent gyp from
# expanding it as a relative path.)
# root_namespace:
# A Python string substituion pattern used to generate the C++
# namespace for each API. Use %(namespace)s to replace with the API
# namespace, like "toplevel::%(namespace)s_api".
#
# Functions and namespaces can be excluded by setting "nocompile" to true.
'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
'api_gen': '<(api_gen_dir)/compiler.py',
'generator_files': [
'<(api_gen_dir)/cc_generator.py',
'<(api_gen_dir)/code.py',
'<(api_gen_dir)/compiler.py',
'<(api_gen_dir)/cpp_bundle_generator.py',
'<(api_gen_dir)/cpp_type_generator.py',
'<(api_gen_dir)/cpp_util.py',
'<(api_gen_dir)/h_generator.py',
'<(api_gen_dir)/idl_schema.py',
'<(api_gen_dir)/json_schema.py',
'<(api_gen_dir)/model.py',
'<(api_gen_dir)/util_cc_helper.py',
],
},
'actions': [
{
# GN version: json_schema_api.gni
'action_name': 'genapi_bundle_registration',
'inputs': [
'<@(generator_files)',
'<@(schema_files)',
'<@(non_compiled_schema_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.h',
'<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.cc',
],
'action': [
'python',
'<(api_gen)',
'--root=<(DEPTH)',
'--destdir=<(SHARED_INTERMEDIATE_DIR)',
'--namespace=<(root_namespace)',
'--generator=cpp-bundle-registration',
'--impl-dir=<(impl_dir_)',
'<@(schema_files)',
'<@(non_compiled_schema_files)',
],
'message': 'Generating C++ API bundle code for function registration',
'process_outputs_as_sources': 1,
# Avoid running MIDL compiler on IDL input files.
'explicit_idl_action': 1,
},
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
'<(DEPTH)',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
]
},
# This target exports a hard dependency because it generates header
# files.
'hard_dependency': 1,
}

View File

@ -1,123 +0,0 @@
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
# When including this gypi, the following variables must be set:
# schema_files:
# An array of json or idl files that comprise the api model.
# schema_include_rules (optional):
# An array of paths to include when searching for referenced objects,
# with the namespace separated by a :.
# Example:
# [ '/foo/bar:Foo::Bar::%(namespace)s' ]
# cc_dir:
# The directory to put the generated code in.
# root_namespace:
# A Python string substituion pattern used to generate the C++
# namespace for each API. Use %(namespace)s to replace with the API
# namespace, like "toplevel::%(namespace)s_api".
#
# Functions and namespaces can be excluded by setting "nocompile" to true.
'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
'api_gen': '<(api_gen_dir)/compiler.py',
'schema_include_rules': [],
},
'rules': [
{
# GN version: json_schema_api.gni
'rule_name': 'genapi',
'msvs_external_rule': 1,
'extension': 'json',
'inputs': [
'<(api_gen_dir)/cc_generator.py',
'<(api_gen_dir)/code.py',
'<(api_gen_dir)/compiler.py',
'<(api_gen_dir)/cpp_generator.py',
'<(api_gen_dir)/cpp_type_generator.py',
'<(api_gen_dir)/cpp_util.py',
'<(api_gen_dir)/h_generator.py',
'<(api_gen_dir)/json_schema.py',
'<(api_gen_dir)/model.py',
'<(api_gen_dir)/util.cc',
'<(api_gen_dir)/util.h',
'<(api_gen_dir)/util_cc_helper.py',
# TODO(calamity): uncomment this when gyp on windows behaves like other
# platforms. List expansions of filepaths in inputs expand to different
# things.
# '<@(schema_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
],
'action': [
'python',
'<(api_gen)',
'<(RULE_INPUT_PATH)',
'--root=<(DEPTH)',
'--destdir=<(SHARED_INTERMEDIATE_DIR)',
'--namespace=<(root_namespace)',
'--generator=cpp',
'--include-rules=<(schema_include_rules)'
],
'message': 'Generating C++ code from <(RULE_INPUT_PATH) json files',
'process_outputs_as_sources': 1,
},
{
'rule_name': 'genapi_idl',
'msvs_external_rule': 1,
'extension': 'idl',
'inputs': [
'<(api_gen_dir)/cc_generator.py',
'<(api_gen_dir)/code.py',
'<(api_gen_dir)/compiler.py',
'<(api_gen_dir)/cpp_generator.py',
'<(api_gen_dir)/cpp_type_generator.py',
'<(api_gen_dir)/cpp_util.py',
'<(api_gen_dir)/h_generator.py',
'<(api_gen_dir)/idl_schema.py',
'<(api_gen_dir)/model.py',
'<(api_gen_dir)/util.cc',
'<(api_gen_dir)/util.h',
'<(api_gen_dir)/util_cc_helper.py',
# TODO(calamity): uncomment this when gyp on windows behaves like other
# platforms. List expansions of filepaths in inputs expand to different
# things.
# '<@(schema_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
],
'action': [
'python',
'<(api_gen)',
'<(RULE_INPUT_PATH)',
'--root=<(DEPTH)',
'--destdir=<(SHARED_INTERMEDIATE_DIR)',
'--namespace=<(root_namespace)',
'--generator=cpp',
'--include-rules=<(schema_include_rules)'
],
'message': 'Generating C++ code from <(RULE_INPUT_PATH) IDL files',
'process_outputs_as_sources': 1,
},
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
'<(DEPTH)',
],
'dependencies':[
'<(DEPTH)/tools/json_schema_compiler/api_gen_util.gyp:api_gen_util',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
]
},
# This target exports a hard dependency because it generates header
# files.
'hard_dependency': 1,
}

View File

@ -1,53 +0,0 @@
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
# When including this gypi, the following variables must be set:
# schema_file: a json file that comprise the structure model.
# namespace: the C++ namespace that all generated files go under
# cc_dir: path to generated files
# Functions and namespaces can be excluded by setting "nocompile" to true.
'struct_gen_dir': '<(DEPTH)/tools/json_to_struct',
'struct_gen%': '<(struct_gen_dir)/json_to_struct.py',
'output_filename%': '<(RULE_INPUT_ROOT)',
},
'rules': [
{
# GN version: //tools/json_to_struct/json_to_struct.gni
'rule_name': 'genstaticinit',
'extension': 'json',
'inputs': [
'<(struct_gen)',
'<(struct_gen_dir)/element_generator.py',
'<(struct_gen_dir)/json_to_struct.py',
'<(struct_gen_dir)/struct_generator.py',
'<(schema_file)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(output_filename).cc',
'<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(output_filename).h',
],
'action': [
'python',
'<(struct_gen)',
'<(RULE_INPUT_PATH)',
'--destbase=<(SHARED_INTERMEDIATE_DIR)',
'--destdir=<(cc_dir)',
'--namespace=<(namespace)',
'--schema=<(schema_file)',
'--output=<(output_filename)',
],
'message': 'Generating C++ static initializers from <(RULE_INPUT_PATH)',
'process_outputs_as_sources': 1,
},
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)',
'<(DEPTH)',
],
# This target exports a hard dependency because it generates header
# files.
'hard_dependency': 1,
}

View File

@ -1,6 +0,0 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This variable should point to the Dart SDK.
dart_sdk_root = "//third_party/dart-sdk/dart-sdk"

View File

@ -1,16 +0,0 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This variable should point to the parent directory of the Mojo SDK.
mojo_sdk_root = "//"
# To build the Mojo shell from source, set this variable to true. To use the
# prebuilt shell, omit this variable or set it to false. Note that the prebuilt
# shell will be used only on platforms for which it is published (currently
# Linux and Android).
mojo_build_mojo_shell_from_source = true
# To build the network service from source, set this variable to true. To use
# the prebuilt network service, omit this variable or set it to false.
mojo_build_network_service_from_source = true

View File

@ -1,6 +0,0 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Override nacl's build directory.
nacl_shared_build_dir = "//build"

View File

@ -1,13 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
if (is_android) {
import("//build/config/android/config.gni")
}
# TODO(sky): nuke this. Temporary while sorting out http://crbug.com/465456.
enable_correct_v8_arch = false
v8_use_external_startup_data = !(is_chromeos || is_win)
v8_extra_library_files = []

View File

@ -1,54 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
config("gmock_config") {
# Gmock headers need to be able to find themselves.
include_dirs = [ "include" ]
}
static_library("gmock") {
# TODO http://crbug.com/412064 enable this flag all the time.
testonly = !is_component_build
sources = [
# Sources based on files in r173 of gmock.
"include/gmock/gmock-actions.h",
"include/gmock/gmock-cardinalities.h",
"include/gmock/gmock-generated-actions.h",
"include/gmock/gmock-generated-function-mockers.h",
"include/gmock/gmock-generated-matchers.h",
"include/gmock/gmock-generated-nice-strict.h",
"include/gmock/gmock-matchers.h",
"include/gmock/gmock-spec-builders.h",
"include/gmock/gmock.h",
"include/gmock/internal/gmock-generated-internal-utils.h",
"include/gmock/internal/gmock-internal-utils.h",
"include/gmock/internal/gmock-port.h",
#"src/gmock-all.cc", # Not needed by our build.
"src/gmock-cardinalities.cc",
"src/gmock-internal-utils.cc",
"src/gmock-matchers.cc",
"src/gmock-spec-builders.cc",
"src/gmock.cc",
]
# This project includes some stuff form gtest's guts.
include_dirs = [ "../gtest/include" ]
public_configs = [
":gmock_config",
"//testing/gtest:gtest_config",
]
}
static_library("gmock_main") {
# TODO http://crbug.com/412064 enable this flag all the time.
testonly = !is_component_build
sources = [
"src/gmock_main.cc",
]
deps = [
":gmock",
]
}

View File

@ -1,135 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
config("gtest_config") {
visibility = [
":*",
"//testing/gmock:*", # gmock also shares this config.
]
defines = [
# In order to allow regex matches in gtest to be shared between Windows
# and other systems, we tell gtest to always use it's internal engine.
"GTEST_HAS_POSIX_RE=0",
# Chrome doesn't support / require C++11, yet.
"GTEST_LANG_CXX11=0",
]
# Gtest headers need to be able to find themselves.
include_dirs = [ "include" ]
if (is_win) {
cflags = [ "/wd4800" ] # Unused variable warning.
}
if (is_posix) {
defines += [
# gtest isn't able to figure out when RTTI is disabled for gcc
# versions older than 4.3.2, and assumes it's enabled. Our Mac
# and Linux builds disable RTTI, and cannot guarantee that the
# compiler will be 4.3.2. or newer. The Mac, for example, uses
# 4.2.1 as that is the latest available on that platform. gtest
# must be instructed that RTTI is disabled here, and for any
# direct dependents that might include gtest headers.
"GTEST_HAS_RTTI=0",
]
}
if (is_android) {
defines += [
# We want gtest features that use tr1::tuple, but we currently
# don't support the variadic templates used by libstdc++'s
# implementation. gtest supports this scenario by providing its
# own implementation but we must opt in to it.
"GTEST_USE_OWN_TR1_TUPLE=1",
# GTEST_USE_OWN_TR1_TUPLE only works if GTEST_HAS_TR1_TUPLE is set.
# gtest r625 made it so that GTEST_HAS_TR1_TUPLE is set to 0
# automatically on android, so it has to be set explicitly here.
"GTEST_HAS_TR1_TUPLE=1",
]
}
}
config("gtest_direct_config") {
visibility = [ ":*" ]
defines = [ "UNIT_TEST" ]
}
static_library("gtest") {
# TODO http://crbug.com/412064 enable this flag all the time.
testonly = !is_component_build
sources = [
"include/gtest/gtest-death-test.h",
"include/gtest/gtest-message.h",
"include/gtest/gtest-param-test.h",
"include/gtest/gtest-printers.h",
"include/gtest/gtest-spi.h",
"include/gtest/gtest-test-part.h",
"include/gtest/gtest-typed-test.h",
"include/gtest/gtest.h",
"include/gtest/gtest_pred_impl.h",
"include/gtest/internal/gtest-death-test-internal.h",
"include/gtest/internal/gtest-filepath.h",
"include/gtest/internal/gtest-internal.h",
"include/gtest/internal/gtest-linked_ptr.h",
"include/gtest/internal/gtest-param-util-generated.h",
"include/gtest/internal/gtest-param-util.h",
"include/gtest/internal/gtest-port.h",
"include/gtest/internal/gtest-string.h",
"include/gtest/internal/gtest-tuple.h",
"include/gtest/internal/gtest-type-util.h",
#"gtest/src/gtest-all.cc", # Not needed by our build.
"../multiprocess_func_list.cc",
"../multiprocess_func_list.h",
"../platform_test.h",
"src/gtest-death-test.cc",
"src/gtest-filepath.cc",
"src/gtest-internal-inl.h",
"src/gtest-port.cc",
"src/gtest-printers.cc",
"src/gtest-test-part.cc",
"src/gtest-typed-test.cc",
"src/gtest.cc",
]
if (is_mac) {
sources += [
"../gtest_mac.h",
"../gtest_mac.mm",
"../platform_test_mac.mm",
]
}
include_dirs = [ "." ]
all_dependent_configs = [ ":gtest_config" ]
public_configs = [ ":gtest_direct_config" ]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
config("gtest_warnings") {
if (is_win && is_clang) {
# The Mutex constructor initializer list in gtest-port.cc is incorrectly
# ordered. See
# https://groups.google.com/d/msg/googletestframework/S5uSV8L2TX8/U1FaTDa6J6sJ.
cflags = [ "-Wno-reorder" ]
}
}
configs += [ ":gtest_warnings" ]
}
source_set("gtest_main") {
# TODO http://crbug.com/412064 enable this flag all the time.
testonly = !is_component_build
sources = [
"src/gtest_main.cc",
]
deps = [
":gtest",
]
}

View File

@ -1,221 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Do not use the targets in this file unless you need a certain libjpeg
# implementation. Use the meta target //third_party:jpeg instead.
import("//build/config/sanitizers/sanitizers.gni")
if (current_cpu == "arm") {
import("//build/config/arm.gni")
}
if (current_cpu == "x86" || current_cpu == "x64") {
import("//third_party/yasm/yasm_assemble.gni")
yasm_assemble("simd_asm") {
defines = []
if (current_cpu == "x86") {
sources = [
"simd/jccolor-mmx.asm",
"simd/jccolor-sse2.asm",
"simd/jcgray-mmx.asm",
"simd/jcgray-sse2.asm",
"simd/jchuff-sse2.asm",
"simd/jcsample-mmx.asm",
"simd/jcsample-sse2.asm",
"simd/jdcolor-mmx.asm",
"simd/jdcolor-sse2.asm",
"simd/jdmerge-mmx.asm",
"simd/jdmerge-sse2.asm",
"simd/jdsample-mmx.asm",
"simd/jdsample-sse2.asm",
"simd/jfdctflt-3dn.asm",
"simd/jfdctflt-sse.asm",
"simd/jfdctfst-mmx.asm",
"simd/jfdctfst-sse2.asm",
"simd/jfdctint-mmx.asm",
"simd/jfdctint-sse2.asm",
"simd/jidctflt-3dn.asm",
"simd/jidctflt-sse.asm",
"simd/jidctflt-sse2.asm",
"simd/jidctfst-mmx.asm",
"simd/jidctfst-sse2.asm",
"simd/jidctint-mmx.asm",
"simd/jidctint-sse2.asm",
"simd/jidctred-mmx.asm",
"simd/jidctred-sse2.asm",
"simd/jquant-3dn.asm",
"simd/jquant-mmx.asm",
"simd/jquant-sse.asm",
"simd/jquantf-sse2.asm",
"simd/jquanti-sse2.asm",
"simd/jsimdcpu.asm",
]
defines += [
"__x86__",
"PIC",
]
} else if (current_cpu == "x64") {
sources = [
"simd/jccolor-sse2-64.asm",
"simd/jcgray-sse2-64.asm",
"simd/jchuff-sse2-64.asm",
"simd/jcsample-sse2-64.asm",
"simd/jdcolor-sse2-64.asm",
"simd/jdmerge-sse2-64.asm",
"simd/jdsample-sse2-64.asm",
"simd/jfdctflt-sse-64.asm",
"simd/jfdctfst-sse2-64.asm",
"simd/jfdctint-sse2-64.asm",
"simd/jidctflt-sse2-64.asm",
"simd/jidctfst-sse2-64.asm",
"simd/jidctint-sse2-64.asm",
"simd/jidctred-sse2-64.asm",
"simd/jquantf-sse2-64.asm",
"simd/jquanti-sse2-64.asm",
]
defines += [
"__x86_64__",
"PIC",
]
}
if (is_win) {
defines += [ "MSVC" ]
include_dirs = [ "win" ]
if (current_cpu == "x86") {
defines += [ "WIN32" ]
} else {
defines += [ "WIN64" ]
}
} else if (is_mac) {
defines += [ "MACHO" ]
include_dirs = [ "mac" ]
} else if (is_linux || is_android) {
defines += [ "ELF" ]
include_dirs = [ "linux" ]
}
}
}
source_set("simd") {
if (current_cpu == "x86") {
deps = [
":simd_asm",
]
sources = [
"simd/jsimd_i386.c",
]
if (is_win) {
cflags = [ "/wd4245" ]
}
} else if (current_cpu == "x64") {
deps = [
":simd_asm",
]
sources = [
"simd/jsimd_x86_64.c",
]
} else if (current_cpu == "arm" && arm_version >= 7 &&
(arm_use_neon || arm_optionally_use_neon)) {
sources = [
"simd/jsimd_arm.c",
"simd/jsimd_arm_neon.S",
]
} else {
sources = [
"jsimd_none.c",
]
}
if (is_win) {
cflags = [ "/wd4245" ]
}
}
config("libjpeg_config") {
include_dirs = [ "." ]
}
source_set("libjpeg") {
sources = [
"jcapimin.c",
"jcapistd.c",
"jccoefct.c",
"jccolor.c",
"jcdctmgr.c",
"jchuff.c",
"jchuff.h",
"jcinit.c",
"jcmainct.c",
"jcmarker.c",
"jcmaster.c",
"jcomapi.c",
"jconfig.h",
"jcparam.c",
"jcphuff.c",
"jcprepct.c",
"jcsample.c",
"jdapimin.c",
"jdapistd.c",
"jdatadst.c",
"jdatasrc.c",
"jdcoefct.c",
"jdcolor.c",
"jdct.h",
"jddctmgr.c",
"jdhuff.c",
"jdhuff.h",
"jdinput.c",
"jdmainct.c",
"jdmarker.c",
"jdmaster.c",
"jdmerge.c",
"jdphuff.c",
"jdpostct.c",
"jdsample.c",
"jerror.c",
"jerror.h",
"jfdctflt.c",
"jfdctfst.c",
"jfdctint.c",
"jidctflt.c",
"jidctfst.c",
"jidctint.c",
"jidctred.c",
"jinclude.h",
"jmemmgr.c",
"jmemnobs.c",
"jmemsys.h",
"jmorecfg.h",
"jpegint.h",
"jpeglib.h",
"jpeglibmangler.h",
"jquant1.c",
"jquant2.c",
"jutils.c",
"jversion.h",
]
defines = [
"WITH_SIMD",
"NO_GETENV",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
public_configs = [ ":libjpeg_config" ]
# MemorySanitizer doesn't support assembly code, so keep it disabled in
# MSan builds for now.
if (is_msan) {
sources += [ "jsimd_none.c" ]
} else {
deps = [
":simd",
]
}
}

View File

@ -1,391 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
declare_args() {
use_system_libsrtp = false
use_srtp_boringssl = true
}
config("libsrtp_config") {
defines = [
"HAVE_CONFIG_H",
"HAVE_STDLIB_H",
"HAVE_STRING_H",
"TESTAPP_SOURCE",
]
include_dirs = [
"config",
"srtp/include",
"srtp/crypto/include",
]
if (use_srtp_boringssl) {
defines += [ "OPENSSL" ]
}
if (is_posix) {
defines += [
"HAVE_INT16_T",
"HAVE_INT32_T",
"HAVE_INT8_T",
"HAVE_UINT16_T",
"HAVE_UINT32_T",
"HAVE_UINT64_T",
"HAVE_UINT8_T",
"HAVE_STDINT_H",
"HAVE_INTTYPES_H",
"HAVE_NETINET_IN_H",
"HAVE_ARPA_INET_H",
"HAVE_UNISTD_H",
]
cflags = [ "-Wno-unused-variable" ]
}
if (is_win) {
defines += [
"HAVE_BYTESWAP_METHODS_H",
# All Windows architectures are this way.
"SIZEOF_UNSIGNED_LONG=4",
"SIZEOF_UNSIGNED_LONG_LONG=8",
]
}
if (current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm") {
defines += [
# TODO(leozwang): CPU_RISC doesn"t work properly on android/arm
# platform for unknown reasons, need to investigate the root cause
# of it. CPU_RISC is used for optimization only, and CPU_CISC should
# just work just fine, it has been tested on android/arm with srtp
# test applications and libjingle.
"CPU_CISC",
]
}
if (current_cpu == "mipsel") {
defines += [ "CPU_RISC" ]
}
}
config("system_libsrtp_config") {
defines = [ "USE_SYSTEM_LIBSRTP" ]
include_dirs = [ "/usr/include/srtp" ]
}
if (use_system_libsrtp) {
group("libsrtp") {
public_configs = [
":libsrtp_config",
":system_libsrtp_config",
]
libs = [ "-lsrtp" ]
}
} else {
static_library("libsrtp") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
public_configs = [ ":libsrtp_config" ]
sources = [
# includes
"srtp/include/ekt.h",
"srtp/include/getopt_s.h",
"srtp/include/rtp.h",
"srtp/include/rtp_priv.h",
"srtp/include/srtp.h",
"srtp/include/srtp_priv.h",
"srtp/include/ut_sim.h",
# headers
"srtp/crypto/include/aes.h",
"srtp/crypto/include/aes_cbc.h",
"srtp/crypto/include/aes_icm.h",
"srtp/crypto/include/alloc.h",
"srtp/crypto/include/auth.h",
"srtp/crypto/include/cipher.h",
"srtp/crypto/include/crypto.h",
"srtp/crypto/include/crypto_kernel.h",
"srtp/crypto/include/crypto_math.h",
"srtp/crypto/include/crypto_types.h",
"srtp/crypto/include/cryptoalg.h",
"srtp/crypto/include/datatypes.h",
"srtp/crypto/include/err.h",
"srtp/crypto/include/gf2_8.h",
"srtp/crypto/include/hmac.h",
"srtp/crypto/include/integers.h",
"srtp/crypto/include/kernel_compat.h",
"srtp/crypto/include/key.h",
"srtp/crypto/include/null_auth.h",
"srtp/crypto/include/null_cipher.h",
"srtp/crypto/include/prng.h",
"srtp/crypto/include/rand_source.h",
"srtp/crypto/include/rdb.h",
"srtp/crypto/include/rdbx.h",
"srtp/crypto/include/sha1.h",
"srtp/crypto/include/stat.h",
"srtp/crypto/include/xfm.h",
# sources
"srtp/crypto/cipher/aes.c",
"srtp/crypto/cipher/aes_cbc.c",
"srtp/crypto/cipher/aes_icm.c",
"srtp/crypto/cipher/cipher.c",
"srtp/crypto/cipher/null_cipher.c",
"srtp/crypto/hash/auth.c",
"srtp/crypto/hash/hmac.c",
"srtp/crypto/hash/null_auth.c",
"srtp/crypto/hash/sha1.c",
"srtp/crypto/kernel/alloc.c",
"srtp/crypto/kernel/crypto_kernel.c",
"srtp/crypto/kernel/err.c",
"srtp/crypto/kernel/key.c",
"srtp/crypto/math/datatypes.c",
"srtp/crypto/math/gf2_8.c",
"srtp/crypto/math/stat.c",
"srtp/crypto/replay/rdb.c",
"srtp/crypto/replay/rdbx.c",
"srtp/crypto/replay/ut_sim.c",
"srtp/crypto/rng/ctr_prng.c",
"srtp/crypto/rng/prng.c",
"srtp/crypto/rng/rand_source.c",
"srtp/srtp/ekt.c",
"srtp/srtp/srtp.c",
]
if (is_clang) {
cflags = [ "-Wno-implicit-function-declaration" ]
}
if (use_srtp_boringssl) {
deps = [
"//third_party/boringssl:boringssl",
]
public_deps = [
"//third_party/boringssl:boringssl",
]
sources -= [
"srtp/crypto/cipher/aes_cbc.c",
"srtp/crypto/cipher/aes_icm.c",
"srtp/crypto/hash/hmac.c",
"srtp/crypto/hash/sha1.c",
"srtp/crypto/rng/ctr_prng.c",
"srtp/crypto/rng/prng.c",
]
sources += [
"srtp/crypto/cipher/aes_gcm_ossl.c",
"srtp/crypto/cipher/aes_icm_ossl.c",
"srtp/crypto/hash/hmac_ossl.c",
"srtp/crypto/include/aes_gcm_ossl.h",
"srtp/crypto/include/aes_icm_ossl.h",
]
}
}
# TODO(GYP): A bunch of these tests don't compile (in gyp either). They're
# not very broken, so could probably be made to work if it's useful.
if (!is_win) {
executable("rdbx_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/include/getopt_s.h",
"srtp/test/getopt_s.c",
"srtp/test/rdbx_driver.c",
]
}
executable("srtp_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/include/getopt_s.h",
"srtp/include/srtp_priv.h",
"srtp/test/getopt_s.c",
"srtp/test/srtp_driver.c",
]
}
executable("roc_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/include/rdbx.h",
"srtp/include/ut_sim.h",
"srtp/test/roc_driver.c",
]
}
executable("replay_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/include/rdbx.h",
"srtp/include/ut_sim.h",
"srtp/test/replay_driver.c",
]
}
executable("rtpw") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/include/datatypes.h",
"srtp/include/getopt_s.h",
"srtp/include/rtp.h",
"srtp/include/srtp.h",
"srtp/test/getopt_s.c",
"srtp/test/rtp.c",
"srtp/test/rtpw.c",
]
if (is_android) {
defines = [ "HAVE_SYS_SOCKET_H" ]
}
if (is_clang) {
cflags = [ "-Wno-implicit-function-declaration" ]
}
}
executable("srtp_test_cipher_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/cipher_driver.c",
"srtp/include/getopt_s.h",
"srtp/test/getopt_s.c",
]
}
executable("srtp_test_datatypes_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/datatypes_driver.c",
]
}
executable("srtp_test_stat_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/stat_driver.c",
]
}
executable("srtp_test_sha1_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/sha1_driver.c",
]
}
executable("srtp_test_kernel_driver") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/kernel_driver.c",
"srtp/include/getopt_s.h",
"srtp/test/getopt_s.c",
]
}
executable("srtp_test_aes_calc") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/aes_calc.c",
]
}
executable("srtp_test_rand_gen") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/rand_gen.c",
"srtp/include/getopt_s.h",
"srtp/test/getopt_s.c",
]
}
executable("srtp_test_rand_gen_soak") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/rand_gen_soak.c",
"srtp/include/getopt_s.h",
"srtp/test/getopt_s.c",
]
}
executable("srtp_test_env") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
deps = [
":libsrtp",
]
sources = [
"srtp/crypto/test/env.c",
]
}
group("srtp_runtest") {
deps = [
":rdbx_driver",
":srtp_driver",
":roc_driver",
":replay_driver",
":rtpw",
":srtp_test_cipher_driver",
":srtp_test_datatypes_driver",
":srtp_test_stat_driver",
":srtp_test_sha1_driver",
":srtp_test_kernel_driver",
":srtp_test_aes_calc",
":srtp_test_rand_gen",
":srtp_test_rand_gen_soak",
":srtp_test_env",
]
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,27 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This target creates a stamp file that depends on all the sources in the grit
# directory. By depending on this, a target can force itself to be rebuilt if
# grit itself changes.
action("grit_sources") {
depfile = "$target_out_dir/grit_sources.d"
script = "//build/secondary/tools/grit/stamp_grit_sources.py"
inputs = [
"grit.py",
]
# Note that we can't call this "grit_sources.stamp" because that file is
# implicitly created by GN for script actions.
outputs = [
"$target_out_dir/grit_sources.script.stamp",
]
args = [
rebase_path("//tools/grit", root_build_dir),
rebase_path(outputs[0], root_build_dir),
rebase_path(depfile, root_build_dir),
]
}

View File

@ -1,483 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Instantiate grit. This will produce a script target to run grit, and a
# static library that compiles the .cc files.
#
# Parameters
#
# source (required)
# Path to .grd file.
#
# outputs (required)
# List of outputs from grit, relative to the target_gen_dir. Grit will
# verify at build time that this list is correct and will fail if there
# is a mismatch between the outputs specified by the .grd file and the
# outputs list here.
#
# To get this list, you can look in the .grd file for
# <output filename="..." and put those filename here. The base directory
# of the list in Grit and the output list specified in the GN grit target
# are the same (the target_gen_dir) so you can generally copy the names
# exactly.
#
# To get the list of outputs programatically, run:
# python tools/grit/grit_info.py --outputs . path/to/your.grd
# And strip the leading "./" from the output files.
#
# defines (optional)
# Extra defines to pass to grit (on top of the global grit_defines list).
#
# grit_flags (optional)
# List of strings containing extra command-line flags to pass to Grit.
#
# resource_ids (optional)
# Path to a grit "firstidsfile". Default is
# //tools/gritsettings/resource_ids. Set to "" to use the value specified
# in the <grit> nodes of the processed files.
#
# output_dir (optional)
# Directory for generated files. If you specify this, you will often
# want to specify output_name if the target name is not particularly
# unique, since this can cause files from multiple grit targets to
# overwrite each other.
#
# output_name (optiona)
# Provide an alternate base name for the generated files, like the .d
# files. Normally these are based on the target name and go in the
# output_dir, but if multiple targets with the same name end up in
# the same output_dir, they can collide.
#
# depfile_dir (optional)
# If set, used to store the depfile and corresponding stamp file.
# Defaults to output_dir
#
# use_qualified_include (optional)
# If set, output_dir is not added to include_dirs.
#
# configs (optional)
# List of additional configs to be applied to the generated target.
# deps (optional)
# inputs (optional)
# List of additional files, required for grit to process source file.
# visibility (optional)
# Normal meaning.
#
# Example
#
# grit("my_resources") {
# # Source and outputs are required.
# source = "myfile.grd"
# outputs = [
# "foo_strings.h",
# "foo_strings.pak",
# ]
#
# grit_flags = [ "-E", "foo=bar" ] # Optional extra flags.
# # You can also put deps here if the grit source depends on generated
# # files.
# }
import("//build/config/chrome_build.gni")
import("//build/config/crypto.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
grit_defines = []
# Mac and iOS want Title Case strings.
use_titlecase_in_grd_files = is_mac || is_ios
if (use_titlecase_in_grd_files) {
grit_defines += [
"-D",
"use_titlecase",
]
}
if (is_chrome_branded) {
grit_defines += [
"-D",
"_google_chrome",
"-E",
"CHROMIUM_BUILD=google_chrome",
]
} else {
grit_defines += [
"-D",
"_chromium",
"-E",
"CHROMIUM_BUILD=chromium",
]
}
if (is_chromeos) {
grit_defines += [
"-D",
"chromeos",
"-D",
"scale_factors=2x",
]
}
if (is_desktop_linux) {
grit_defines += [
"-D",
"desktop_linux",
]
}
if (toolkit_views) {
grit_defines += [
"-D",
"toolkit_views",
]
}
if (use_aura) {
grit_defines += [
"-D",
"use_aura",
]
}
if (use_ash) {
grit_defines += [
"-D",
"use_ash",
]
}
if (use_nss_certs) {
grit_defines += [
"-D",
"use_nss_certs",
]
}
if (use_ozone) {
grit_defines += [
"-D",
"use_ozone",
]
}
if (enable_image_loader_extension) {
grit_defines += [
"-D",
"image_loader_extension",
]
}
if (enable_remoting) {
grit_defines += [
"-D",
"remoting",
]
}
if (is_android) {
grit_defines += [
"-t",
"android",
"-E",
"ANDROID_JAVA_TAGGED_ONLY=true",
]
}
if (is_mac || is_ios) {
grit_defines += [
"-D",
"scale_factors=2x",
]
}
if (is_ios) {
grit_defines += [
"-t",
"ios",
# iOS uses a whitelist to filter resources.
"-w",
rebase_path("//build/ios/grit_whitelist.txt", root_build_dir),
]
}
if (enable_extensions) {
grit_defines += [
"-D",
"enable_extensions",
]
}
if (enable_media_router) {
grit_defines += [
"-D",
"enable_media_router",
]
}
if (enable_plugins) {
grit_defines += [
"-D",
"enable_plugins",
]
}
if (enable_basic_printing || enable_print_preview) {
grit_defines += [
"-D",
"enable_printing",
]
if (enable_print_preview) {
grit_defines += [
"-D",
"enable_print_preview",
]
}
}
if (enable_themes) {
grit_defines += [
"-D",
"enable_themes",
]
}
if (enable_app_list) {
grit_defines += [
"-D",
"enable_app_list",
]
}
if (enable_settings_app) {
grit_defines += [
"-D",
"enable_settings_app",
]
}
if (enable_google_now) {
grit_defines += [
"-D",
"enable_google_now",
]
}
# Note: use_concatenated_impulse_responses is omitted. It is never used and
# should probably be removed from GYP build.
if (enable_webrtc) {
grit_defines += [
"-D",
"enable_webrtc",
]
}
if (enable_hangout_services_extension) {
grit_defines += [
"-D",
"enable_hangout_services_extension",
]
}
if (enable_task_manager) {
grit_defines += [
"-D",
"enable_task_manager",
]
}
if (enable_notifications) {
grit_defines += [
"-D",
"enable_notifications",
]
}
if (enable_wifi_bootstrapping) {
grit_defines += [
"-D",
"enable_wifi_bootstrapping",
]
}
if (enable_service_discovery) {
grit_defines += [
"-D",
"enable_service_discovery",
]
}
if (mac_views_browser) {
grit_defines += [
"-D",
"mac_views_browser",
]
}
grit_resource_id_file = "//tools/gritsettings/resource_ids"
grit_info_script = "//tools/grit/grit_info.py"
template("grit") {
assert(defined(invoker.source),
"\"source\" must be defined for the grit template $target_name")
grit_inputs = [ invoker.source ]
if (defined(invoker.resource_ids)) {
resource_ids = invoker.resource_ids
} else {
resource_ids = grit_resource_id_file
}
if (resource_ids != "") {
# The script depends on the ID file. Only add this dependency if the ID
# file is specified.
grit_inputs += [ resource_ids ]
}
if (defined(invoker.output_dir)) {
output_dir = invoker.output_dir
} else {
output_dir = target_gen_dir
}
if (defined(invoker.output_name)) {
grit_output_name = invoker.output_name
} else {
grit_output_name = target_name
}
if (defined(invoker.depfile_dir)) {
depfile_dir = invoker.depfile_dir
} else {
depfile_dir = output_dir
}
# These are all passed as arguments to the script so have to be relative to
# the build directory.
if (resource_ids != "") {
resource_ids = rebase_path(resource_ids, root_build_dir)
}
rebased_output_dir = rebase_path(output_dir, root_build_dir)
source_path = rebase_path(invoker.source, root_build_dir)
if (defined(invoker.grit_flags)) {
grit_flags = invoker.grit_flags
} else {
grit_flags = [] # These are optional so default to empty list.
}
assert_files_flags = []
# We want to make sure the declared outputs actually match what Grit is
# writing. We write the list to a file (some of the output lists are long
# enough to not fit on a Windows command line) and ask Grit to verify those
# are the actual outputs at runtime.
asserted_list_file =
"$target_out_dir/${grit_output_name}_expected_outputs.txt"
write_file(asserted_list_file,
rebase_path(invoker.outputs, root_build_dir, output_dir))
assert_files_flags += [ "--assert-file-list=" +
rebase_path(asserted_list_file, root_build_dir) ]
grit_outputs =
get_path_info(rebase_path(invoker.outputs, ".", output_dir), "abspath")
# The config and the action below get this visibility son only the generated
# source set can depend on them. The variable "target_name" will get
# overwritten inside the inner classes so we need to compute it here.
target_visibility = [ ":$target_name" ]
# The current grit setup makes an file in $output_dir/grit/foo.h that
# the source code expects to include via "grit/foo.h". It would be nice to
# change this to including absolute paths relative to the root gen directory
# (like "mycomponent/foo.h"). This config sets up the include path.
grit_config = target_name + "_grit_config"
config(grit_config) {
if (!defined(invoker.use_qualified_include) ||
!invoker.use_qualified_include) {
include_dirs = [ output_dir ]
}
visibility = target_visibility
}
grit_custom_target = target_name + "_grit"
action(grit_custom_target) {
script = "//tools/grit/grit.py"
inputs = grit_inputs
depfile = "$depfile_dir/${grit_output_name}_stamp.d"
outputs = [ "${depfile}.stamp" ] + grit_outputs
args = [
"-i",
source_path,
"build",
]
if (resource_ids != "") {
args += [
"-f",
resource_ids,
]
}
args += [
"-o",
rebased_output_dir,
"--depdir",
".",
"--depfile",
rebase_path(depfile, root_build_dir),
"--write-only-new=1",
"--depend-on-stamp",
] + grit_defines
# Add extra defines with -D flags.
if (defined(invoker.defines)) {
foreach(i, invoker.defines) {
args += [
"-D",
i,
]
}
}
args += grit_flags + assert_files_flags
if (defined(invoker.visibility)) {
# This needs to include both what the invoker specified (since they
# probably include generated headers from this target), as well as the
# generated source set (since there's no guarantee that the visibility
# specified by the invoker includes our target).
#
# Only define visibility at all if the invoker specified it. Otherwise,
# we want to keep the public "no visibility specified" default.
visibility = target_visibility + invoker.visibility
}
deps = [
"//tools/grit:grit_sources",
]
if (defined(invoker.deps)) {
deps += invoker.deps
}
if (defined(invoker.inputs)) {
inputs += invoker.inputs
}
}
# This is the thing that people actually link with, it must be named the
# same as the argument the template was invoked with.
source_set(target_name) {
# Since we generate a file, we need to be run before the targets that
# depend on us.
sources = grit_outputs
# Deps set on the template invocation will go on the action that runs
# grit above rather than this library. This target needs to depend on the
# action publicly so other scripts can take the outputs from the grit
# script as inputs.
public_deps = [
":$grit_custom_target",
]
public_configs = [ ":$grit_config" ]
if (defined(invoker.public_configs)) {
public_configs += invoker.public_configs
}
if (defined(invoker.configs)) {
configs += invoker.configs
}
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
output_name = grit_output_name
}
}

View File

@ -1,47 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file defines a template to invoke grit repack in a consistent manner.
#
# Parameters:
# sources [required]
# List of pak files that need to be combined.
#
# output [required]
# File name (single string) of the output file.
#
# repack_options [optional]
# List of extra arguments to pass.
#
# deps [optional]
# visibility [optional]
# Normal meaning.
template("repack") {
action(target_name) {
assert(defined(invoker.sources), "Need sources for $target_name")
assert(defined(invoker.output), "Need output for $target_name")
if (defined(invoker.visibility)) {
visibility = invoker.visibility
}
script = "//tools/grit/grit/format/repack.py"
inputs = invoker.sources
outputs = [
invoker.output,
]
args = []
if (defined(invoker.repack_options)) {
args += invoker.repack_options
}
args += [ rebase_path(invoker.output, root_build_dir) ]
args += rebase_path(invoker.sources, root_build_dir)
if (defined(invoker.deps)) {
deps = invoker.deps
}
}
}

View File

@ -1,55 +0,0 @@
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script enumerates the files in the given directory, writing an empty
# stamp file and a .d file listing the inputs required to make the stamp. This
# allows us to dynamically depend on the grit sources without enumerating the
# grit directory for every invocation of grit (which is what adding the source
# files to every .grd file's .d file would entail) or shelling out to grit
# synchronously during GN execution to get the list (which would be slow).
#
# Usage:
# stamp_grit_sources.py <directory> <stamp-file> <.d-file>
import os
import sys
def GritSourceFiles(grit_root_dir):
files = []
for root, _, filenames in os.walk(grit_root_dir):
grit_src = [os.path.join(root, f) for f in filenames
if f.endswith('.py') and not f.endswith('_unittest.py')]
files.extend(grit_src)
files = [f.replace('\\', '/') for f in files]
return sorted(files)
def WriteDepFile(dep_file, stamp_file, source_files):
with open(dep_file, "w") as f:
f.write(stamp_file)
f.write(": ")
f.write(' '.join(source_files))
def WriteStampFile(stamp_file):
with open(stamp_file, "w"):
pass
def main(argv):
if len(argv) != 4:
print "Error: expecting 3 args."
return 1
grit_root_dir = sys.argv[1]
stamp_file = sys.argv[2]
dep_file = sys.argv[3]
WriteStampFile(stamp_file)
WriteDepFile(dep_file, stamp_file, GritSourceFiles(grit_root_dir))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -1,24 +0,0 @@
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/toolchain/gcc_toolchain.gni")
declare_args() {
toolchain_prefix = ""
}
gcc_toolchain("target") {
assert(toolchain_prefix != "", "Must provide toolchain_prefix")
cc = "${toolchain_prefix}gcc"
cxx = "${toolchain_prefix}g++"
ar = "${toolchain_prefix}ar"
ld = cxx
readelf = "${toolchain_prefix}readelf"
nm = "${toolchain_prefix}nm"
toolchain_cpu = "${target_cpu}"
toolchain_os = "linux"
is_clang = is_clang
}

View File

@ -8,6 +8,10 @@ import("//build/toolchain/clang.gni")
import("//build/toolchain/gcc_toolchain.gni")
import("//build/toolchain/goma.gni")
declare_args() {
toolchain_prefix = ""
}
if (use_goma) {
assert(!use_ccache, "Goma and ccache can't be used together.")
compiler_prefix = "$goma_dir/gomacc "
@ -31,14 +35,23 @@ gcc_toolchain("arm") {
is_clang = false
}
gcc_toolchain("arm64") {
cc = "${compiler_prefix}aarch64-linux-gnu-gcc"
cxx = "${compiler_prefix}aarch64-linux-gnu-g++"
ar = "aarch64-linux-gnu-ar"
ld = cxx
readelf = "aarch64-linux-gnu-readelf"
nm = "aarch64-linux-gnu-nm"
toolchain_cpu = "arm64"
toolchain_os = "linux"
is_clang = false
}
gcc_toolchain("clang_x86") {
if (use_clang_type_profiler) {
prefix = rebase_path("//third_party/llvm-allocated-type/Linux_ia32/bin",
root_build_dir)
} else {
prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
root_build_dir)
}
prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
root_build_dir)
cc = "${compiler_prefix}$prefix/clang"
cxx = "${compiler_prefix}$prefix/clang++"
readelf = "readelf"
@ -53,7 +66,7 @@ gcc_toolchain("clang_x86") {
gcc_toolchain("x86") {
cc = "${compiler_prefix}gcc"
cxx = "$compiler_prefix}g++"
cxx = "${compiler_prefix}g++"
readelf = "readelf"
nm = "nm"
@ -66,13 +79,8 @@ gcc_toolchain("x86") {
}
gcc_toolchain("clang_x64") {
if (use_clang_type_profiler) {
prefix = rebase_path("//third_party/llvm-allocated-type/Linux_x64/bin",
root_build_dir)
} else {
prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
root_build_dir)
}
prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
root_build_dir)
cc = "${compiler_prefix}$prefix/clang"
cxx = "${compiler_prefix}$prefix/clang++"
@ -102,14 +110,14 @@ gcc_toolchain("x64") {
}
gcc_toolchain("mipsel") {
cc = "mipsel-linux-gnu-gcc"
cxx = "mipsel-linux-gnu-g++"
ar = "mipsel-linux-gnu-ar"
cc = "${toolchain_prefix}gcc"
cxx = "${toolchain_prefix}g++"
ar = "${toolchain_prefix}ar"
ld = cxx
readelf = "mipsel-linux-gnu-readelf"
nm = "mipsel-linux-gnu-nm"
readelf = "${toolchain_prefix}readelf"
nm = "${toolchain_prefix}nm"
toolchain_cpu = "mipsel"
toolchain_cpu = "${target_cpu}"
toolchain_os = "linux"
is_clang = false
is_clang = is_clang
}

View File

@ -2,10 +2,6 @@
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
# TODO(zra): These build arguments should likely be moved to a gni file that is
# included in BUILD.gn files that care about the values of the flags. For now,
# since the GN build only happens as part of a Mojo build there is no need for
# the indirection.
declare_args() {
# Instead of using is_debug, we introduce a different flag for specifying a
# Debug build of Dart so that clients can still use a Release build of Dart
@ -20,6 +16,10 @@ declare_args() {
# CPU profiling features enabled.
# 'release' - The VM is built to run with AOT compiled code with no developer
# features enabled.
#
# These settings are only used for Flutter, at the moment. A standalone build
# of the Dart VM should leave this set to "develop", and should set
# 'is_debug', 'is_release', or 'is_product'.
dart_runtime_mode = "develop"
# Explicitly set the target architecture in case of precompilation. Leaving
@ -36,8 +36,8 @@ config("dart_public_config") {
]
}
# Controls PRODUCT #define.
config("dart_product_config") {
# Adds PRODUCT define if Flutter has specified "release" for dart_runtime_mode
config("dart_maybe_product_config") {
defines = []
if ((dart_runtime_mode != "develop") &&
@ -56,8 +56,9 @@ config("dart_product_config") {
}
}
# Controls DART_PRECOMPILED_RUNTIME #define.
config("dart_precompiled_runtime_config") {
# Adds the DART_PRECOMPILED_RUNTIME define if Flutter has specified "profile" or
# "release" for dart_runtime_mode.
config("dart_maybe_precompiled_runtime_config") {
defines = []
if ((dart_runtime_mode != "develop") &&
@ -83,12 +84,22 @@ config("dart_precompiled_runtime_config") {
}
}
config("dart_precompiled_runtime_config") {
defines = []
defines += ["DART_PRECOMPILED_RUNTIME"]
}
# Controls DART_PRECOMPILER #define.
config("dart_precompiler_config") {
defines = []
defines += ["DART_PRECOMPILER"]
}
config("dart_no_snapshot_config") {
defines = []
defines += ["DART_NO_SNAPSHOT"]
}
config("dart_config") {
defines = []
@ -97,23 +108,39 @@ config("dart_config") {
}
if (dart_target_arch != "") {
if (dart_target_arch == "arm") {
if ((dart_target_arch == "arm") ||
(dart_target_arch == "simarm")) {
defines += [ "TARGET_ARCH_ARM" ]
if (target_os == "mac" || target_os == "ios") {
defines += [ "TARGET_ABI_IOS" ]
} else {
defines += [ "TARGET_ABI_EABI" ]
}
} else if (dart_target_arch == "arm64") {
} else if ((dart_target_arch == "armv6") ||
(dart_target_arch == "simarmv6")) {
defines += [ "TARGET_ARCH_ARM" ]
defines += [ "TARGET_ARCH_ARM_6" ]
defines += [ "TARGET_ABI_EABI" ]
} else if ((dart_target_arch == "armv5te") ||
(dart_target_arch == "simarmv5te")) {
defines += [ "TARGET_ARCH_ARM" ]
defines += [ "TARGET_ARCH_ARM_5TE" ]
defines += [ "TARGET_ABI_EABI" ]
} else if ((dart_target_arch == "arm64") ||
(dart_target_arch == "simarm64")) {
defines += [ "TARGET_ARCH_ARM64" ]
} else if (dart_target_arch == "mips") {
} else if ((dart_target_arch == "mips") ||
(dart_target_arch == "simmips")) {
defines += [ "TARGET_ARCH_MIPS" ]
} else if (dart_target_arch == "x64") {
defines += [ "TARGET_ARCH_X64" ]
} else if (dart_target_arch == "ia32") {
defines += [ "TARGET_ARCH_IA32" ]
} else if (dart_target_arch == "dbc") {
} else if ((dart_target_arch == "dbc") ||
(dart_target_arch == "simdbc") ||
(dart_target_arch == "simdbc64")) {
defines += [ "TARGET_ARCH_DBC" ]
defines += [ "USING_SIMULATOR" ]
} else {
print("Invalid |dart_target_arch|")
assert(false)
@ -159,31 +186,97 @@ config("dart_config") {
}
}
static_library("libdart") {
configs += [":dart_config",
":dart_product_config",
":dart_precompiled_runtime_config"]
deps = [
template("libdart_library") {
extra_configs = []
if (defined(invoker.extra_configs)) {
extra_configs += invoker.extra_configs
}
extra_deps = []
if (defined(invoker.extra_deps)) {
extra_deps += invoker.extra_deps
}
static_library(target_name) {
configs += [
":dart_config",
":dart_maybe_product_config"
] + extra_configs
deps = [
"vm:libdart_platform",
"third_party/double-conversion/src:libdouble_conversion",
":generate_version_cc_file",
] + extra_deps
include_dirs = [
".",
]
public_configs = [":dart_public_config"]
sources = [
"include/dart_api.h",
"include/dart_mirrors_api.h",
"include/dart_native_api.h",
"include/dart_tools_api.h",
"vm/dart_api_impl.cc",
"vm/debugger_api_impl.cc",
"vm/mirrors_api_impl.cc",
"vm/native_api_impl.cc",
"vm/version.h",
"$target_gen_dir/version.cc",
]
defines = [
"DART_SHARED_LIB",
]
}
}
libdart_library("libdart") {
extra_configs = [
":dart_maybe_precompiled_runtime_config"
]
extra_deps = [
"vm:libdart_lib",
"vm:libdart_vm",
"third_party/double-conversion/src:libdouble_conversion",
":generate_version_cc_file",
]
include_dirs = [
".",
}
libdart_library("libdart_precompiled_runtime") {
extra_configs = [
":dart_precompiled_runtime_config"
]
public_configs = [":dart_public_config"]
sources = [
"include/dart_api.h",
"include/dart_mirrors_api.h",
"include/dart_native_api.h",
"include/dart_tools_api.h",
"vm/dart_api_impl.cc",
"vm/debugger_api_impl.cc",
"vm/mirrors_api_impl.cc",
"vm/native_api_impl.cc",
"vm/version.h",
"$target_gen_dir/version.cc",
extra_deps = [
"vm:libdart_lib_precompiled_runtime",
"vm:libdart_vm_precompiled_runtime",
]
}
libdart_library("libdart_nosnapshot") {
extra_configs = [
":dart_no_snapshot_config",
":dart_maybe_precompiled_runtime_config"
]
extra_deps = [
"vm:libdart_lib_nosnapshot",
"vm:libdart_vm_nosnapshot",
]
}
libdart_library("libdart_nosnapshot_precompiled_runtime") {
extra_configs = [
":dart_no_snapshot_config",
":dart_precompiled_runtime_config"
]
extra_deps = [
"vm:libdart_lib_nosnapshot_precompiled_runtime",
"vm:libdart_vm_nosnapshot_precompiled_runtime",
]
}
libdart_library("libdart_nosnapshot_with_precompiler") {
extra_configs = [
":dart_no_snapshot_config",
":dart_precompiler_config",
]
extra_deps = [
"vm:libdart_lib_nosnapshot_with_precompiler",
"vm:libdart_vm_nosnapshot_with_precompiler",
]
}
@ -211,8 +304,7 @@ action("generate_version_cc_file") {
executable("libdart_dependency_helper") {
configs += [":dart_config",
":dart_product_config",
":dart_precompiled_runtime_config"]
":dart_maybe_product_config"]
deps = [
"vm:libdart_lib_nosnapshot",
"vm:libdart_lib",

View File

@ -6,6 +6,12 @@ declare_args() {
# Whether to fall back to built-in root certificates when they cannot be
# verified at the operating system level.
dart_use_fallback_root_certificates = false
# The BUILD.gn file that we pull from chromium as part of zlib has a
# dependence on //base, which we don't pull in. In a standalone build of the
# VM, we set this to //runtime/bin/zlib where we have a BUILD.gn file without
# a dependence on //base.
dart_zlib_path = "//third_party/zlib"
}
resources_sources_gypi =
@ -37,7 +43,6 @@ action("gen_resources_cc") {
] + rebase_path(sources, root_build_dir)
}
template("gen_library_src_path") {
assert(defined(invoker.sources), "Need sources in $target_name")
assert(defined(invoker.output), "Need output in $target_name")
@ -51,17 +56,20 @@ template("gen_library_src_path") {
outputs = [ invoker.output, ]
name = invoker.name
kind = invoker.kind
library_name = "dart:${name}"
if (defined(invoker.library_name)) {
library_name = invoker.library_name
}
args = [
"--output", rebase_path(invoker.output, root_build_dir),
"--input_cc", rebase_path("builtin_in.cc", root_build_dir),
"--include", "bin/builtin.h",
"--var_name", "dart::bin::Builtin::${name}_${kind}_paths_",
"--library_name", "dart:${name}",] +
"--library_name", library_name,] +
rebase_path(invoker.sources, root_build_dir)
}
}
builtin_sources_gypi =
exec_script("../../tools/gypi_to_gn.py",
[rebase_path("builtin_sources.gypi")],
@ -75,7 +83,6 @@ gen_library_src_path("generate_builtin_cc_file") {
output = "$target_gen_dir/builtin_gen.cc"
}
sdk_io_sources_gypi =
exec_script("../../tools/gypi_to_gn.py",
[rebase_path("../../sdk/lib/io/io_sources.gypi")],
@ -104,14 +111,105 @@ gen_library_src_path("generate_io_patch_cc_file") {
output = "$target_gen_dir/io_patch_gen.cc"
}
gen_library_src_path("generate_html_cc_file") {
name = "html"
kind = "source"
sources = ["../../sdk/lib/html/dartium/html_dartium.dart"]
output = "$target_gen_dir/html_gen.cc"
}
gen_library_src_path("generate_html_common_cc_file") {
name = "html_common"
kind = "source"
sources = [
"../../sdk/lib/html/html_common/html_common.dart",
"../../sdk/lib/html/html_common/css_class_set.dart",
"../../sdk/lib/html/html_common/device.dart",
"../../sdk/lib/html/html_common/filtered_element_list.dart",
"../../sdk/lib/html/html_common/lists.dart",
"../../sdk/lib/html/html_common/conversions.dart",
"../../sdk/lib/html/html_common/conversions_dartium.dart",
]
output = "$target_gen_dir/html_common_gen.cc"
}
gen_library_src_path("generate_js_cc_file") {
name = "js"
kind = "source"
sources = ["../../sdk/lib/js/dartium/js_dartium.dart"]
output = "$target_gen_dir/js_gen.cc"
}
gen_library_src_path("generate_blink_cc_file") {
name = "_blink"
kind = "source"
sources = ["../../sdk/lib/_blink/dartium/_blink_dartium.dart"]
output = "$target_gen_dir/blink_gen.cc"
}
gen_library_src_path("generate_indexed_db_cc_file") {
name = "indexed_db"
kind = "source"
sources = ["../../sdk/lib/indexed_db/dartium/indexed_db_dartium.dart"]
output = "$target_gen_dir/indexed_db_gen.cc"
}
gen_library_src_path("generate_cached_patches_cc_file") {
name = "cached_patches"
library_name = "cached_patches.dart"
kind = "sources"
sources = ["../../sdk/lib/js/dartium/cached_patches.dart"]
output = "$target_gen_dir/cached_patches_gen.cc"
}
gen_library_src_path("generate_web_gl_cc_file") {
name = "web_gl"
kind = "source"
sources = ["../../sdk/lib/web_gl/dartium/web_gl_dartium.dart"]
output = "$target_gen_dir/web_gl_gen.cc"
}
gen_library_src_path("generate_metadata_cc_file") {
name = "metadata"
library_name = "metadata.dart"
kind = "source"
sources = ["../../sdk/lib/html/html_common/metadata.dart"]
output = "$target_gen_dir/metadata_gen.cc"
}
gen_library_src_path("generate_web_sql_cc_file") {
name = "web_sql"
kind = "source"
sources = ["../../sdk/lib/web_sql/dartium/web_sql_dartium.dart"]
output = "$target_gen_dir/web_sql_gen.cc"
}
gen_library_src_path("generate_svg_cc_file") {
name = "svg"
kind = "source"
sources = ["../../sdk/lib/svg/dartium/svg_dartium.dart"]
output = "$target_gen_dir/svg_gen.cc"
}
gen_library_src_path("generate_web_audio_cc_file") {
name = "web_audio"
kind = "source"
sources = ["../../sdk/lib/web_audio/dartium/web_audio_dartium.dart"]
output = "$target_gen_dir/web_audio_gen.cc"
}
config("libdart_builtin_config") {
libs = [
"dl",
]
if (is_android) {
libs += [
"android",
"log",
]
}
}
builtin_impl_sources_gypi =
exec_script("../../tools/gypi_to_gn.py",
[rebase_path("builtin_impl_sources.gypi")],
@ -119,12 +217,24 @@ builtin_impl_sources_gypi =
["builtin_impl_sources.gypi"])
static_library("libdart_builtin") {
configs += ["..:dart_config", "..:dart_product_config"]
configs += ["..:dart_config",
"..:dart_maybe_product_config"]
public_configs = [":libdart_builtin_config"]
deps = [
":generate_builtin_cc_file",
":generate_io_cc_file",
":generate_io_patch_cc_file",
":generate_html_cc_file",
":generate_html_common_cc_file",
":generate_js_cc_file",
":generate_blink_cc_file",
":generate_indexed_db_cc_file",
":generate_cached_patches_cc_file",
":generate_web_gl_cc_file",
":generate_metadata_cc_file",
":generate_web_sql_cc_file",
":generate_svg_cc_file",
":generate_web_audio_cc_file",
]
include_dirs = [
"..",
@ -139,74 +249,6 @@ static_library("libdart_builtin") {
] + builtin_impl_sources_gypi.sources
}
static_library("libdart_nosnapshot") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiled_runtime_config"]
deps = [
"../vm:libdart_lib_nosnapshot",
"../vm:libdart_vm_nosnapshot",
"../vm:libdart_platform",
"../third_party/double-conversion/src:libdouble_conversion",
"..:generate_version_cc_file",
]
sources = [
"../include/dart_api.h",
"../include/dart_mirrors_api.h",
"../include/dart_native_api.h",
"../include/dart_tools_api.h",
"../vm/dart_api_impl.cc",
"../vm/debugger_api_impl.cc",
"../vm/mirrors_api_impl.cc",
"../vm/native_api_impl.cc",
"$target_gen_dir/../version.cc",
]
include_dirs = [
"..",
]
defines = [
"DART_SHARED_LIB",
]
}
static_library("libdart_nosnapshot_with_precompiler") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiler_config"]
deps = [
"../vm:libdart_lib_nosnapshot_with_precompiler",
"../vm:libdart_vm_nosnapshot_with_precompiler",
"../vm:libdart_platform",
"../third_party/double-conversion/src:libdouble_conversion",
"..:generate_version_cc_file",
]
sources = [
"../include/dart_api.h",
"../include/dart_mirrors_api.h",
"../include/dart_native_api.h",
"../include/dart_tools_api.h",
"../vm/dart_api_impl.cc",
"../vm/debugger_api_impl.cc",
"../vm/mirrors_api_impl.cc",
"../vm/native_api_impl.cc",
"$target_gen_dir/../version.cc",
]
include_dirs = [
"..",
]
defines = [
"DART_SHARED_LIB",
]
}
io_impl_sources_gypi =
exec_script("../../tools/gypi_to_gn.py",
[ rebase_path("io_impl_sources.gypi") ],
@ -215,7 +257,7 @@ io_impl_sources_gypi =
executable("gen_snapshot") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_maybe_product_config",
"..:dart_precompiler_config"]
deps = [
":gen_resources_cc",
@ -224,7 +266,7 @@ executable("gen_snapshot") {
":generate_io_cc_file",
":generate_io_patch_cc_file",
":libdart_builtin",
":libdart_nosnapshot_with_precompiler",
"..:libdart_nosnapshot_with_precompiler",
]
sources = [
@ -260,11 +302,11 @@ executable("gen_snapshot") {
# (without secure sockets) suitable for linking with gen_snapshot.
source_set("gen_snapshot_dart_io") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_maybe_product_config",
"..:dart_precompiler_config"]
deps = [
"//third_party/zlib",
"$dart_zlib_path",
]
custom_sources_filter = [
@ -299,22 +341,16 @@ source_set("gen_snapshot_dart_io") {
source_set("libdart_embedder_noio") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiled_runtime_config"]
"..:dart_maybe_product_config"]
deps = [
"..:libdart",
"../vm:libdart_platform",
]
}
# A source set for the implementation of 'dart:io' library
# (without secure sockets).
# A source set for the implementation of 'dart:io' library.
source_set("embedded_dart_io") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiled_runtime_config"]
"..:dart_maybe_product_config"]
custom_sources_filter = [
"*_test.cc",
"*_test.h",
@ -330,6 +366,7 @@ source_set("embedded_dart_io") {
}
set_sources_assignment_filter(custom_sources_filter)
defines = []
if (is_mac || is_ios) {
libs = [
"CoreFoundation.framework",
@ -337,18 +374,13 @@ source_set("embedded_dart_io") {
"Security.framework",
]
} else if (defined(is_fuchsia) && is_fuchsia) {
defines = [
defines += [
"DART_IO_SECURE_SOCKET_DISABLED"
]
} else {
deps = [
"//third_party/boringssl",
]
if (is_linux && !dart_use_fallback_root_certificates) {
defines = [
"DART_IO_ROOT_CERTS_DISABLED"
]
}
}
sources = io_impl_sources_gypi.sources + builtin_impl_sources_gypi.sources
@ -366,8 +398,14 @@ source_set("embedded_dart_io") {
"log.h",
]
if (is_linux && dart_use_fallback_root_certificates) {
sources += [ "//third_party/root_certificates/root_certificates.cc"]
if (is_linux || is_win) {
if (dart_use_fallback_root_certificates) {
sources += [ "//third_party/root_certificates/root_certificates.cc"]
} else {
defines += [
"DART_IO_ROOT_CERTS_DISABLED",
]
}
}
include_dirs = [
@ -376,7 +414,6 @@ source_set("embedded_dart_io") {
]
}
action("generate_snapshot_bin") {
deps = [
"../bin:gen_snapshot($host_toolchain)",
@ -409,7 +446,6 @@ action("generate_snapshot_bin") {
]
}
action("generate_snapshot_file") {
deps = [
":generate_snapshot_bin",
@ -440,7 +476,6 @@ action("generate_snapshot_file") {
]
}
source_set("dart_snapshot_cc") {
sources = [
"$root_gen_dir/dart_snapshot.cc",
@ -451,123 +486,99 @@ source_set("dart_snapshot_cc") {
]
}
template("dart_executable") {
extra_configs = []
if (defined(invoker.extra_configs)) {
extra_configs += invoker.extra_configs
}
extra_deps = []
if (defined(invoker.extra_deps)) {
extra_deps += invoker.extra_deps
}
extra_defines = []
if (defined(invoker.extra_defines)) {
extra_defines = invoker.extra_defines
}
extra_sources = []
if (defined(invoker.extra_sources)) {
extra_sources += invoker.extra_sources
}
executable(target_name) {
configs += [
"..:dart_config",
"..:dart_maybe_product_config"
] + extra_configs
deps = [
":gen_resources_cc",
":embedded_dart_io",
":libdart_builtin",
"$dart_zlib_path",
] + extra_deps
defines = extra_defines
sources = [
"main.cc",
"vmservice_impl.cc",
"vmservice_impl.h",
"$target_gen_dir/resources_gen.cc",
] + extra_sources
include_dirs = [
"..",
"//third_party",
]
ldflags = [
"-rdynamic",
]
}
}
if (!defined(is_fuchsia) || !is_fuchsia) {
dart_executable("dart") {
extra_deps = [
"..:libdart",
":dart_snapshot_cc",
"../observatory:standalone_observatory_archive",
]
}
dart_executable("dart_precompiled_runtime") {
extra_configs = [
"..:dart_precompiled_runtime_config"
]
extra_deps = [
"..:libdart_precompiled_runtime",
":dart_snapshot_cc",
"../observatory:standalone_observatory_archive",
]
}
}
dart_executable("dart_bootstrap") {
extra_configs = [
"..:dart_precompiler_config",
"..:dart_no_snapshot_config",
]
extra_deps = [
"..:libdart",
]
extra_defines = [
"NO_OBSERVATORY",
]
extra_sources = [
"observatory_assets_empty.cc",
"snapshot_empty.cc",
]
}
if (defined(is_fuchsia) && is_fuchsia) {
copy("hello_fuchsia") {
sources = [ "../tests/vm/dart/hello_fuchsia_test.dart" ]
outputs = [ "$root_out_dir/hello_fuchsia.dart" ]
}
}
executable("dart_no_observatory") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiled_runtime_config",]
deps = [
":gen_resources_cc",
":embedded_dart_io",
":libdart_builtin",
"../vm:libdart_platform",
"..:libdart",
":dart_snapshot_cc",
"//third_party/zlib",
]
if (defined(is_fuchsia) && is_fuchsia) {
deps += [
":hello_fuchsia",
]
}
defines = [
"NO_OBSERVATORY",
]
sources = [
"main.cc",
"observatory_assets_empty.cc",
"vmservice_impl.cc",
"vmservice_impl.h",
"$target_gen_dir/resources_gen.cc",
]
include_dirs = [
"..",
"//third_party",
]
}
if (defined(is_fuchsia) && is_fuchsia) {
action("generate_snapshot_test_dat_file") {
snapshot_test_dat_file = "$root_gen_dir/snapshot_test.dat"
snapshot_test_in_dat_file = "../vm/snapshot_test_in.dat"
snapshot_test_dart_file = "../vm/snapshot_test.dart"
inputs = [
"../tools/create_string_literal.py",
snapshot_test_in_dat_file,
snapshot_test_dart_file,
]
outputs = [
snapshot_test_dat_file,
]
script = "../tools/create_string_literal.py"
args = [
"--output",
rebase_path(snapshot_test_dat_file),
"--input_cc",
rebase_path(snapshot_test_in_dat_file),
"--include",
"INTENTIONALLY_LEFT_BLANK",
"--var_name",
"INTENTIONALLY_LEFT_BLANK_TOO",
rebase_path(snapshot_test_dart_file),
]
}
executable("run_vm_tests") {
testonly = true
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiled_runtime_config",]
deps = [
"..:libdart",
":libdart_builtin",
":embedded_dart_io",
":dart_snapshot_cc",
":generate_snapshot_test_dat_file",
"../vm:libdart_platform",
"//third_party/zlib",
]
include_dirs = [
"..",
"$target_gen_dir",
]
defines = [
"TESTING",
]
vm_tests_list = exec_script("../../tools/gypi_to_gn.py",
[rebase_path("../vm/vm_sources.gypi"),
"--keep_only=_test.cc",
"--keep_only=_test.h",],
"scope",
["../vm/vm_sources.gypi"])
vm_tests = rebase_path(vm_tests_list.sources, ".", "../vm")
builtin_impl_tests_list =
exec_script("../../tools/gypi_to_gn.py",
[rebase_path("builtin_impl_sources.gypi"),
"--keep_only=_test.cc",
"--keep_only=_test.h",],
"scope",
["builtin_impl_sources.gypi"])
sources = [
"run_vm_tests.cc",
] + builtin_impl_tests_list.sources + vm_tests
}
executable("run_vm_tests_fuchsia") {
testonly = true
@ -581,4 +592,138 @@ if (defined(is_fuchsia) && is_fuchsia) {
"runtime",
]
}
} # defined(is_fuchsia) && is_fuchsia
}
# This is only needed for the Fuchsia target build until the Observatory is
# supported.
dart_executable("dart_no_observatory") {
extra_deps = [
"..:libdart",
":dart_snapshot_cc",
]
if (defined(is_fuchsia) && is_fuchsia) {
extra_deps += [ ":hello_fuchsia" ]
}
extra_defines = [
"NO_OBSERVATORY",
]
extra_sources = [
"observatory_assets_empty.cc",
]
}
executable("process_test") {
sources = [
"process_test.cc",
]
}
action("generate_snapshot_test_dat_file") {
snapshot_test_dat_file = "$root_gen_dir/snapshot_test.dat"
snapshot_test_in_dat_file = "../vm/snapshot_test_in.dat"
snapshot_test_dart_file = "../vm/snapshot_test.dart"
inputs = [
"../tools/create_string_literal.py",
snapshot_test_in_dat_file,
snapshot_test_dart_file,
]
outputs = [
snapshot_test_dat_file,
]
script = "../tools/create_string_literal.py"
args = [
"--output",
rebase_path(snapshot_test_dat_file),
"--input_cc",
rebase_path(snapshot_test_in_dat_file),
"--include",
"INTENTIONALLY_LEFT_BLANK",
"--var_name",
"INTENTIONALLY_LEFT_BLANK_TOO",
rebase_path(snapshot_test_dart_file),
]
}
executable("run_vm_tests") {
configs += ["..:dart_config",
"..:dart_maybe_product_config"]
deps = [
"..:libdart",
":libdart_builtin",
":embedded_dart_io",
":dart_snapshot_cc",
":generate_snapshot_test_dat_file",
"$dart_zlib_path",
]
include_dirs = [
"..",
"$target_gen_dir",
]
defines = [
"TESTING",
]
# The VM sources are already included in libdart, so we just want to add in
# the tests here.
vm_tests_list = exec_script("../../tools/gypi_to_gn.py",
[rebase_path("../vm/vm_sources.gypi"),
"--keep_only=_test.cc",
"--keep_only=_test.h",],
"scope",
["../vm/vm_sources.gypi"])
vm_tests = rebase_path(vm_tests_list.sources, ".", "../vm")
builtin_impl_tests_list =
exec_script("../../tools/gypi_to_gn.py",
[rebase_path("builtin_impl_sources.gypi"),
"--keep_only=_test.cc",
"--keep_only=_test.h",],
"scope",
["builtin_impl_sources.gypi"])
sources = [
"run_vm_tests.cc",
] + builtin_impl_tests_list.sources + vm_tests
ldflags = [
"-rdynamic",
]
}
if (!defined(is_fuchsia) || !is_fuchsia) {
shared_library("test_extension") {
deps = [
":dart",
]
sources = [
"test_extension.c",
"test_extension_dllmain_win.cc",
]
include_dirs = [
"..",
]
defines = [
# The only effect of DART_SHARED_LIB is to export the Dart API.
"DART_SHARED_LIB",
]
}
shared_library("sample_extension") {
deps = [
":dart",
]
sources = [
"../../samples/sample_extension/sample_extension.cc",
"../../samples/sample_extension/sample_extension_dllmain_win.cc",
]
include_dirs = [
"..",
]
defines = [
# The only effect of DART_SHARED_LIB is to export the Dart API.
"DART_SHARED_LIB",
]
}
}

View File

@ -290,7 +290,7 @@
'--output', '<(blink_cc_file)',
'--input_cc', '<(builtin_in_cc_file)',
'--include', 'bin/builtin.h',
'--var_name', 'dart::bin::Builtin::blink_source_paths_',
'--var_name', 'dart::bin::Builtin::_blink_source_paths_',
'--library_name', 'dart:_blink',
'<@(_sources)',
],
@ -322,7 +322,7 @@
'--output', '<(indexeddb_cc_file)',
'--input_cc', '<(builtin_in_cc_file)',
'--include', 'bin/builtin.h',
'--var_name', 'dart::bin::Builtin::indexeddb_source_paths_',
'--var_name', 'dart::bin::Builtin::indexed_db_source_paths_',
'--library_name', 'dart:indexed_db',
'<@(_sources)',
],
@ -450,7 +450,7 @@
'--output', '<(websql_cc_file)',
'--input_cc', '<(builtin_in_cc_file)',
'--include', 'bin/builtin.h',
'--var_name', 'dart::bin::Builtin::websql_source_paths_',
'--var_name', 'dart::bin::Builtin::web_sql_source_paths_',
'--library_name', 'dart:web_sql',
'<@(_sources)',
],
@ -514,7 +514,7 @@
'--output', '<(webaudio_cc_file)',
'--input_cc', '<(builtin_in_cc_file)',
'--include', 'bin/builtin.h',
'--var_name', 'dart::bin::Builtin::webaudio_source_paths_',
'--var_name', 'dart::bin::Builtin::web_audio_source_paths_',
'--library_name', 'dart:web_audio',
'<@(_sources)',
],

View File

@ -25,14 +25,14 @@ Builtin::builtin_lib_props Builtin::builtin_libraries_[] = {
{ "dart:html_common", html_common_source_paths_, NULL, NULL, true},
{ "dart:js", js_source_paths_, NULL, NULL, true},
{ "dart:js_util", js_util_source_paths_, NULL, NULL, true},
{ "dart:_blink", blink_source_paths_, NULL, NULL, true },
{ "dart:indexed_db", indexeddb_source_paths_, NULL, NULL, true },
{ "dart:_blink", _blink_source_paths_, NULL, NULL, true },
{ "dart:indexed_db", indexed_db_source_paths_, NULL, NULL, true },
{ "cached_patches.dart", cached_patches_source_paths_, NULL, NULL, true },
{ "dart:web_gl", web_gl_source_paths_, NULL, NULL, true },
{ "metadata.dart", metadata_source_paths_, NULL, NULL, true },
{ "dart:web_sql", websql_source_paths_, NULL, NULL, true },
{ "dart:web_sql", web_sql_source_paths_, NULL, NULL, true },
{ "dart:svg", svg_source_paths_, NULL, NULL, true },
{ "dart:web_audio", webaudio_source_paths_, NULL, NULL, true },
{ "dart:web_audio", web_audio_source_paths_, NULL, NULL, true },
#endif // defined(DART_NO_SNAPSHOT)
// End marker.

View File

@ -75,14 +75,14 @@ class Builtin {
static const char* html_common_source_paths_[];
static const char* js_source_paths_[];
static const char* js_util_source_paths_[];
static const char* blink_source_paths_[];
static const char* indexeddb_source_paths_[];
static const char* _blink_source_paths_[];
static const char* indexed_db_source_paths_[];
static const char* cached_patches_source_paths_[];
static const char* web_gl_source_paths_[];
static const char* metadata_source_paths_[];
static const char* websql_source_paths_[];
static const char* web_sql_source_paths_[];
static const char* svg_source_paths_[];
static const char* webaudio_source_paths_[];
static const char* web_audio_source_paths_[];
static Dart_Port load_port_;
static const int num_libs_;

56
runtime/bin/zlib/BUILD.gn Normal file
View File

@ -0,0 +1,56 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
config("zlib_config") {
include_dirs = [ "//third_party/zlib" ]
}
static_library("zlib") {
if (!is_win) {
# Don't stomp on "libzlib" on other platforms.
output_name = "chrome_zlib"
}
zlib_path = "//third_party/zlib"
sources = [
"$zlib_path/adler32.c",
"$zlib_path/compress.c",
"$zlib_path/crc32.c",
"$zlib_path/crc32.h",
"$zlib_path/deflate.c",
"$zlib_path/deflate.h",
"$zlib_path/gzclose.c",
"$zlib_path/gzguts.h",
"$zlib_path/gzlib.c",
"$zlib_path/gzread.c",
"$zlib_path/gzwrite.c",
"$zlib_path/infback.c",
"$zlib_path/inffast.c",
"$zlib_path/inffast.h",
"$zlib_path/inffixed.h",
"$zlib_path/inflate.c",
"$zlib_path/inflate.h",
"$zlib_path/inftrees.c",
"$zlib_path/inftrees.h",
"$zlib_path/mozzconf.h",
"$zlib_path/trees.c",
"$zlib_path/trees.h",
"$zlib_path/uncompr.c",
"$zlib_path/zconf.h",
"$zlib_path/zlib.h",
"$zlib_path/zutil.c",
"$zlib_path/zutil.h",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
all_dependent_configs = [ ":zlib_config" ]
if (is_clang) {
cflags = [
"-Wno-shift-negative-value",
]
}
}

View File

@ -1,4 +1,4 @@
# Generated by pub on 2016-09-22 14:19:01.104115.
# Generated by pub on 2016-09-23 00:19:32.016148.
analyzer:../../third_party/observatory_pub_packages/packages/analyzer/lib/
args:../../third_party/observatory_pub_packages/packages/args/lib/
async:../../third_party/observatory_pub_packages/packages/async/lib/

View File

@ -41,23 +41,24 @@ action("write_observatory_pubspec_yaml") {
rebase_path("pubspec.yaml"),
]
current_dir = rebase_path(".", "//")
args = [
"--silent=True",
"--pub-executable",
dart_host_pub_exe,
"--directory",
rebase_path("$root_gen_dir/observatory_copy/dart/runtime/observatory/"),
rebase_path("$root_gen_dir/observatory_copy/$current_dir/"),
"--command",
"rewrite",
rebase_path("../observatory/pubspec.yaml"),
rebase_path(
"$root_gen_dir/observatory_copy/dart/runtime/observatory/pubspec.yaml"),
"$root_gen_dir/observatory_copy/$current_dir/pubspec.yaml"),
"../../third_party/",
rebase_path("../../third_party/"),
]
outputs = [
"$root_gen_dir/observatory_copy/dart/runtime/observatory/pubspec.yaml",
"$root_gen_dir/observatory_copy/$current_dir/pubspec.yaml",
]
}
@ -68,9 +69,10 @@ action("copy_observatory_deps") {
script = "../../tools/observatory_tool.py"
current_dir = rebase_path(".", "//")
inputs = [
script,
"$root_gen_dir/observatory_copy/dart/runtime/observatory/pubspec.yaml",
"$root_gen_dir/observatory_copy/$current_dir/pubspec.yaml",
]
args = [
@ -78,23 +80,24 @@ action("copy_observatory_deps") {
"--pub-executable",
dart_host_pub_exe,
"--stamp",
rebase_path("$root_gen_dir/observatory_copy/dart/runtime/observatory/packages.stamp"),
rebase_path("$root_gen_dir/observatory_copy/$current_dir/packages.stamp"),
"--directory",
rebase_path("$root_gen_dir/observatory_copy/dart/runtime/observatory/"),
rebase_path("$root_gen_dir/observatory_copy/$current_dir/"),
"--command",
"get",
]
outputs = [
"$root_gen_dir/observatory_copy/dart/runtime/observatory/packages.stamp",
"$root_gen_dir/observatory_copy/$current_dir/packages.stamp",
]
}
action("pub_build_observatory") {
current_dir = rebase_path(".", "//")
sources =
rebase_path(observatory_sources_gypi.sources,
"",
"$root_gen_dir/observatory_copy/dart/runtime/observatory")
"$root_gen_dir/observatory_copy/$current_dir")
deps = [
":copy_observatory",
@ -105,7 +108,7 @@ action("pub_build_observatory") {
inputs = [
script,
"$root_gen_dir/observatory_copy/dart/runtime/observatory/packages.stamp",
"$root_gen_dir/observatory_copy/$current_dir/packages.stamp",
]
inputs += sources
@ -114,7 +117,7 @@ action("pub_build_observatory") {
"--pub-executable",
dart_host_pub_exe,
"--directory",
rebase_path("$root_gen_dir/observatory_copy/dart/runtime/observatory/"),
rebase_path("$root_gen_dir/observatory_copy/$current_dir/"),
"--command",
"build",
rebase_path("$root_out_dir/observatory/build"),
@ -152,41 +155,70 @@ action("deploy_observatory") {
]
}
action("archive_observatory") {
deps = [
":deploy_observatory",
]
template("observatory_archive") {
assert(defined(invoker.inner_namespace),
"Need inner_namespace in $target_name")
assert(defined(invoker.outer_namespace),
"Need outer_namespace in $target_name")
action(target_name) {
deps = [
":deploy_observatory",
]
script = "../tools/create_archive.py"
script = "../tools/create_archive.py"
inputs = [
script,
"$root_out_dir/observatory/deployed/web/main.dart.js",
]
args = [
"--output",
rebase_path("$root_gen_dir/observatory/observatory_archive.cc"),
"--tar_output",
rebase_path("$root_gen_dir/observatory/observatory_archive.tar"),
"--outer_namespace", "dart",
"--inner_namespace", "observatory",
"--name", "observatory_assets_archive",
"--client_root", rebase_path("$root_out_dir/observatory/deployed/web/"),
]
inner_namespace = invoker.inner_namespace
outer_namespace = invoker.outer_namespace
output_name = target_name
args = [
"--output",
rebase_path("$root_gen_dir/observatory/${output_name}.cc"),
"--tar_output",
rebase_path("$root_gen_dir/observatory/${output_name}.tar"),
"--outer_namespace", outer_namespace,
"--inner_namespace", inner_namespace,
"--name", "observatory_assets_archive",
"--client_root", rebase_path("$root_out_dir/observatory/deployed/web/"),
]
outputs = [
"$root_gen_dir/observatory/observatory_archive.cc",
"$root_gen_dir/observatory/observatory_archive.tar",
]
outputs = [
"$root_gen_dir/observatory/${output_name}.cc",
"$root_gen_dir/observatory/${output_name}.tar",
]
}
}
observatory_archive("embedded_archive_observatory") {
outer_namespace = "dart"
inner_namespace = "observatory"
}
source_set("embedded_observatory_archive") {
deps = [
":archive_observatory",
":embedded_archive_observatory",
]
sources = [
rebase_path("$root_gen_dir/observatory/observatory_archive.cc"),
rebase_path("$root_gen_dir/observatory/embedded_archive_observatory.cc"),
]
}
observatory_archive("standalone_archive_observatory") {
outer_namespace = "dart"
inner_namespace = "bin"
}
source_set("standalone_observatory_archive") {
deps = [
":standalone_archive_observatory",
]
sources = [
rebase_path("$root_gen_dir/observatory/standalone_archive_observatory.cc"),
]
}

View File

@ -25,7 +25,8 @@ config("libdart_vm_config") {
static_library("libdart_platform") {
configs += ["..:dart_config", "..:dart_product_config"]
configs += ["..:dart_config",
"..:dart_maybe_product_config"]
public_configs = [":libdart_vm_config"]
platform_headers_gypi =
@ -59,7 +60,20 @@ vm_sources_list = exec_script("../../tools/gypi_to_gn.py",
static_library("libdart_vm") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_maybe_product_config",
"..:dart_maybe_precompiled_runtime_config"]
public_configs = [":libdart_vm_config"]
set_sources_assignment_filter(["*_test.cc", "*_test.h"])
sources = vm_sources_list.sources
include_dirs = [
"..",
]
}
static_library("libdart_vm_precompiled_runtime") {
configs += ["..:dart_config",
"..:dart_maybe_product_config",
"..:dart_precompiled_runtime_config"]
public_configs = [":libdart_vm_config"]
set_sources_assignment_filter(["*_test.cc", "*_test.h"])
@ -72,10 +86,24 @@ static_library("libdart_vm") {
static_library("libdart_vm_nosnapshot") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiled_runtime_config"]
"..:dart_maybe_product_config",
"..:dart_maybe_precompiled_runtime_config",
"..:dart_no_snapshot_config",]
public_configs = [":libdart_vm_config"]
set_sources_assignment_filter(["*_test.cc", "*_test.h"])
sources = vm_sources_list.sources
include_dirs = [
"..",
]
}
static_library("libdart_vm_nosnapshot_precompiled_runtime") {
configs += ["..:dart_config",
"..:dart_maybe_product_config",
"..:dart_precompiled_runtime_config",
"..:dart_no_snapshot_config",]
public_configs = [":libdart_vm_config"]
defines = [ "DART_NO_SNAPSHOT" ]
set_sources_assignment_filter(["*_test.cc", "*_test.h"])
sources = vm_sources_list.sources
include_dirs = [
@ -86,10 +114,10 @@ static_library("libdart_vm_nosnapshot") {
static_library("libdart_vm_nosnapshot_with_precompiler") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_precompiler_config"]
"..:dart_maybe_product_config",
"..:dart_precompiler_config",
"..:dart_no_snapshot_config",]
public_configs = [":libdart_vm_config"]
defines = [ "DART_NO_SNAPSHOT" ]
set_sources_assignment_filter(["*_test.cc", "*_test.h"])
sources = vm_sources_list.sources
include_dirs = [
@ -190,7 +218,17 @@ template("generate_core_libraries") {
static_library("libdart_lib_nosnapshot") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_maybe_product_config",
"..:dart_maybe_precompiled_runtime_config"]
deps = libdeps
sources = libsources + ["bootstrap.cc"] + liboutputs
include_dirs = [
"..",
]
}
static_library("libdart_lib_nosnapshot_precompiled_runtime") {
configs += ["..:dart_config",
"..:dart_maybe_product_config",
"..:dart_precompiled_runtime_config"]
deps = libdeps
sources = libsources + ["bootstrap.cc"] + liboutputs
@ -200,7 +238,7 @@ template("generate_core_libraries") {
}
static_library("libdart_lib_nosnapshot_with_precompiler") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_maybe_product_config",
"..:dart_precompiler_config" ]
deps = libdeps
sources = libsources + [ "bootstrap.cc"] + liboutputs
@ -210,7 +248,16 @@ template("generate_core_libraries") {
}
static_library("libdart_lib") {
configs += ["..:dart_config",
"..:dart_product_config",
"..:dart_maybe_product_config",
"..:dart_maybe_precompiled_runtime_config"]
sources = libsources + [ "bootstrap_nocore.cc"]
include_dirs = [
"..",
]
}
static_library("libdart_lib_precompiled_runtime") {
configs += ["..:dart_config",
"..:dart_maybe_product_config",
"..:dart_precompiled_runtime_config"]
sources = libsources + [ "bootstrap_nocore.cc"]
include_dirs = [

771
tools/clang/scripts/update.py Executable file
View File

@ -0,0 +1,771 @@
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a Python implementation of
update.sh. This script should replace update.sh on all platforms eventually."""
import argparse
import contextlib
import cStringIO
import glob
import os
import pipes
import re
import shutil
import subprocess
import stat
import sys
import tarfile
import time
import urllib2
import zipfile
# Do NOT CHANGE this if you don't know what you're doing -- see
# https://code.google.com/p/chromium/wiki/UpdatingClang
# Reverting problematic clang rolls is safe, though.
# Note: this revision is only used for Windows. Other platforms use update.sh.
# TODO(thakis): Use the same revision on Windows and non-Windows.
# TODO(thakis): Remove update.sh, use update.py everywhere.
LLVM_WIN_REVISION = '242415'
use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ
if use_head_revision:
LLVM_WIN_REVISION = 'HEAD'
# This is incremented when pushing a new build of Clang at the same revision.
CLANG_SUB_REVISION=1
PACKAGE_VERSION = "%s-%s" % (LLVM_WIN_REVISION, CLANG_SUB_REVISION)
# Path constants. (All of these should be absolute paths.)
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
CHROMIUM_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', '..'))
THIRD_PARTY_DIR = os.path.join(CHROMIUM_DIR, 'third_party')
LLVM_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm')
LLVM_BOOTSTRAP_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm-bootstrap')
LLVM_BOOTSTRAP_INSTALL_DIR = os.path.join(THIRD_PARTY_DIR,
'llvm-bootstrap-install')
CHROME_TOOLS_SHIM_DIR = os.path.join(LLVM_DIR, 'tools', 'chrometools')
LLVM_BUILD_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm-build',
'Release+Asserts')
COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, '32bit-compiler-rt')
CLANG_DIR = os.path.join(LLVM_DIR, 'tools', 'clang')
LLD_DIR = os.path.join(LLVM_DIR, 'tools', 'lld')
COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt')
LIBCXX_DIR = os.path.join(LLVM_DIR, 'projects', 'libcxx')
LIBCXXABI_DIR = os.path.join(LLVM_DIR, 'projects', 'libcxxabi')
LLVM_BUILD_TOOLS_DIR = os.path.abspath(
os.path.join(LLVM_DIR, '..', 'llvm-build-tools'))
STAMP_FILE = os.path.join(LLVM_DIR, '..', 'llvm-build', 'cr_build_revision')
BINUTILS_DIR = os.path.join(THIRD_PARTY_DIR, 'binutils')
VERSION = '3.8.0'
# URL for pre-built binaries.
CDS_URL = 'https://commondatastorage.googleapis.com/chromium-browser-clang'
LLVM_REPO_URL='https://llvm.org/svn/llvm-project'
if 'LLVM_REPO_URL' in os.environ:
LLVM_REPO_URL = os.environ['LLVM_REPO_URL']
def DownloadUrl(url, output_file):
"""Download url into output_file."""
CHUNK_SIZE = 4096
TOTAL_DOTS = 10
sys.stdout.write('Downloading %s ' % url)
sys.stdout.flush()
response = urllib2.urlopen(url)
total_size = int(response.info().getheader('Content-Length').strip())
bytes_done = 0
dots_printed = 0
while True:
chunk = response.read(CHUNK_SIZE)
if not chunk:
break
output_file.write(chunk)
bytes_done += len(chunk)
num_dots = TOTAL_DOTS * bytes_done / total_size
sys.stdout.write('.' * (num_dots - dots_printed))
sys.stdout.flush()
dots_printed = num_dots
print ' Done.'
def ReadStampFile():
"""Return the contents of the stamp file, or '' if it doesn't exist."""
try:
with open(STAMP_FILE, 'r') as f:
return f.read()
except IOError:
return ''
def WriteStampFile(s):
"""Write s to the stamp file."""
if not os.path.exists(os.path.dirname(STAMP_FILE)):
os.makedirs(os.path.dirname(STAMP_FILE))
with open(STAMP_FILE, 'w') as f:
f.write(s)
def GetSvnRevision(svn_repo):
"""Returns current revision of the svn repo at svn_repo."""
svn_info = subprocess.check_output('svn info ' + svn_repo, shell=True)
m = re.search(r'Revision: (\d+)', svn_info)
return m.group(1)
def RmTree(dir):
"""Delete dir."""
def ChmodAndRetry(func, path, _):
# Subversion can leave read-only files around.
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
return func(path)
raise
shutil.rmtree(dir, onerror=ChmodAndRetry)
def RunCommand(command, msvc_arch=None, env=None, fail_hard=True):
"""Run command and return success (True) or failure; or if fail_hard is
True, exit on failure. If msvc_arch is set, runs the command in a
shell with the msvc tools for that architecture."""
if msvc_arch and sys.platform == 'win32':
command = GetVSVersion().SetupScript(msvc_arch) + ['&&'] + command
# https://docs.python.org/2/library/subprocess.html:
# "On Unix with shell=True [...] if args is a sequence, the first item
# specifies the command string, and any additional items will be treated as
# additional arguments to the shell itself. That is to say, Popen does the
# equivalent of:
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
#
# We want to pass additional arguments to command[0], not to the shell,
# so manually join everything into a single string.
# Annoyingly, for "svn co url c:\path", pipes.quote() thinks that it should
# quote c:\path but svn can't handle quoted paths on Windows. Since on
# Windows follow-on args are passed to args[0] instead of the shell, don't
# do the single-string transformation there.
if sys.platform != 'win32':
command = ' '.join([pipes.quote(c) for c in command])
print 'Running', command
if subprocess.call(command, env=env, shell=True) == 0:
return True
print 'Failed.'
if fail_hard:
sys.exit(1)
return False
def CopyFile(src, dst):
"""Copy a file from src to dst."""
shutil.copy(src, dst)
print "Copying %s to %s" % (src, dst)
def CopyDirectoryContents(src, dst, filename_filter=None):
"""Copy the files from directory src to dst
with an optional filename filter."""
if not os.path.exists(dst):
os.makedirs(dst)
for root, _, files in os.walk(src):
for f in files:
if filename_filter and not re.match(filename_filter, f):
continue
CopyFile(os.path.join(root, f), dst)
def Checkout(name, url, dir):
"""Checkout the SVN module at url into dir. Use name for the log message."""
print "Checking out %s r%s into '%s'" % (name, LLVM_WIN_REVISION, dir)
command = ['svn', 'checkout', '--force', url + '@' + LLVM_WIN_REVISION, dir]
if RunCommand(command, fail_hard=False):
return
if os.path.isdir(dir):
print "Removing %s." % (dir)
RmTree(dir)
print "Retrying."
RunCommand(command)
def RevertPreviouslyPatchedFiles():
print 'Reverting previously patched files'
files = [
'%(clang)s/test/Index/crash-recovery-modules.m',
'%(clang)s/unittests/libclang/LibclangTest.cpp',
'%(compiler_rt)s/lib/asan/asan_rtl.cc',
'%(compiler_rt)s/test/asan/TestCases/Linux/new_array_cookie_test.cc',
'%(llvm)s/test/DebugInfo/gmlt.ll',
'%(llvm)s/lib/CodeGen/SpillPlacement.cpp',
'%(llvm)s/lib/CodeGen/SpillPlacement.h',
'%(llvm)s/lib/Transforms/Instrumentation/MemorySanitizer.cpp',
'%(clang)s/test/Driver/env.c',
'%(clang)s/lib/Frontend/InitPreprocessor.cpp',
'%(clang)s/test/Frontend/exceptions.c',
'%(clang)s/test/Preprocessor/predefined-exceptions.m',
'%(llvm)s/test/Bindings/Go/go.test',
'%(clang)s/lib/Parse/ParseExpr.cpp',
'%(clang)s/lib/Parse/ParseTemplate.cpp',
'%(clang)s/lib/Sema/SemaDeclCXX.cpp',
'%(clang)s/lib/Sema/SemaExprCXX.cpp',
'%(clang)s/test/SemaCXX/default2.cpp',
'%(clang)s/test/SemaCXX/typo-correction-delayed.cpp',
'%(compiler_rt)s/lib/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc',
'%(compiler_rt)s/test/tsan/signal_segv_handler.cc',
'%(compiler_rt)s/lib/sanitizer_common/sanitizer_coverage_libcdep.cc',
'%(compiler_rt)s/cmake/config-ix.cmake',
'%(compiler_rt)s/CMakeLists.txt',
'%(compiler_rt)s/lib/ubsan/ubsan_platform.h',
]
for f in files:
f = f % {
'clang': CLANG_DIR,
'compiler_rt': COMPILER_RT_DIR,
'llvm': LLVM_DIR,
}
if os.path.exists(f):
os.remove(f) # For unversioned files.
RunCommand(['svn', 'revert', f])
def ApplyLocalPatches():
# There's no patch program on Windows by default. We don't need patches on
# Windows yet, and maybe this not working on Windows will motivate us to
# remove patches over time.
assert sys.platform != 'win32'
# Apply patch for tests failing with --disable-pthreads (llvm.org/PR11974)
clang_patches = [ r"""\
--- test/Index/crash-recovery-modules.m (revision 202554)
+++ test/Index/crash-recovery-modules.m (working copy)
@@ -12,6 +12,8 @@
// REQUIRES: crash-recovery
// REQUIRES: shell
+// XFAIL: *
+// (PR11974)
@import Crash;
""", r"""\
--- unittests/libclang/LibclangTest.cpp (revision 215949)
+++ unittests/libclang/LibclangTest.cpp (working copy)
@@ -431,7 +431,7 @@
EXPECT_EQ(0U, clang_getNumDiagnostics(ClangTU));
}
-TEST_F(LibclangReparseTest, ReparseWithModule) {
+TEST_F(LibclangReparseTest, DISABLED_ReparseWithModule) {
const char *HeaderTop = "#ifndef H\n#define H\nstruct Foo { int bar;";
const char *HeaderBottom = "\n};\n#endif\n";
const char *MFile = "#include \"HeaderFile.h\"\nint main() {"
"""
]
# This Go bindings test doesn't work after bootstrap on Linux, PR21552.
llvm_patches = [ r"""\
--- test/Bindings/Go/go.test (revision 223109)
+++ test/Bindings/Go/go.test (working copy)
@@ -1,3 +1,3 @@
-; RUN: llvm-go test llvm.org/llvm/bindings/go/llvm
+; RUN: true
; REQUIRES: shell
"""
]
# The UBSan run-time, which is now bundled with the ASan run-time, doesn't
# work on Mac OS X 10.8 (PR23539).
compiler_rt_patches = [ r"""\
--- CMakeLists.txt (revision 241602)
+++ CMakeLists.txt (working copy)
@@ -305,6 +305,7 @@
list(APPEND SANITIZER_COMMON_SUPPORTED_OS iossim)
endif()
endif()
+ set(SANITIZER_MIN_OSX_VERSION "10.7")
if(SANITIZER_MIN_OSX_VERSION VERSION_LESS "10.7")
message(FATAL_ERROR "Too old OS X version: ${SANITIZER_MIN_OSX_VERSION}")
endif()
"""
]
for path, patches in [(LLVM_DIR, llvm_patches),
(CLANG_DIR, clang_patches),
(COMPILER_RT_DIR, compiler_rt_patches)]:
print 'Applying patches in', path
for patch in patches:
print patch
p = subprocess.Popen( ['patch', '-p0', '-d', path], stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate(input=patch)
if p.returncode != 0:
raise RuntimeError('stdout %s, stderr %s' % (stdout, stderr))
def DeleteChromeToolsShim():
shutil.rmtree(CHROME_TOOLS_SHIM_DIR, ignore_errors=True)
def CreateChromeToolsShim():
"""Hooks the Chrome tools into the LLVM build.
Several Chrome tools have dependencies on LLVM/Clang libraries. The LLVM build
detects implicit tools in the tools subdirectory, so this helper install a
shim CMakeLists.txt that forwards to the real directory for the Chrome tools.
Note that the shim directory name intentionally has no - or _. The implicit
tool detection logic munges them in a weird way."""
assert not any(i in os.path.basename(CHROME_TOOLS_SHIM_DIR) for i in '-_')
os.mkdir(CHROME_TOOLS_SHIM_DIR)
with file(os.path.join(CHROME_TOOLS_SHIM_DIR, 'CMakeLists.txt'), 'w') as f:
f.write('# Automatically generated by tools/clang/scripts/update.py. ' +
'Do not edit.\n')
f.write('# Since tools/clang is located in another directory, use the \n')
f.write('# two arg version to specify where build artifacts go. CMake\n')
f.write('# disallows reuse of the same binary dir for multiple source\n')
f.write('# dirs, so the build artifacts need to go into a subdirectory.\n')
f.write('# dirs, so the build artifacts need to go into a subdirectory.\n')
f.write('if (CHROMIUM_TOOLS_SRC)\n')
f.write(' add_subdirectory(${CHROMIUM_TOOLS_SRC} ' +
'${CMAKE_CURRENT_BINARY_DIR}/a)\n')
f.write('endif (CHROMIUM_TOOLS_SRC)\n')
def AddCMakeToPath():
"""Download CMake and add it to PATH."""
if sys.platform == 'win32':
zip_name = 'cmake-3.2.2-win32-x86.zip'
cmake_dir = os.path.join(LLVM_BUILD_TOOLS_DIR,
'cmake-3.2.2-win32-x86', 'bin')
else:
suffix = 'Darwin' if sys.platform == 'darwin' else 'Linux'
zip_name = 'cmake310_%s.tgz' % suffix
cmake_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'cmake310', 'bin')
if not os.path.exists(cmake_dir):
if not os.path.exists(LLVM_BUILD_TOOLS_DIR):
os.makedirs(LLVM_BUILD_TOOLS_DIR)
# The cmake archive is smaller than 20 MB, small enough to keep in memory:
with contextlib.closing(cStringIO.StringIO()) as f:
DownloadUrl(CDS_URL + '/tools/' + zip_name, f)
f.seek(0)
if zip_name.endswith('.zip'):
zipfile.ZipFile(f).extractall(path=LLVM_BUILD_TOOLS_DIR)
else:
tarfile.open(mode='r:gz', fileobj=f).extractall(path=
LLVM_BUILD_TOOLS_DIR)
os.environ['PATH'] = cmake_dir + os.pathsep + os.environ.get('PATH', '')
vs_version = None
def GetVSVersion():
global vs_version
if vs_version:
return vs_version
# Try using the toolchain in depot_tools.
# This sets environment variables used by SelectVisualStudioVersion below.
sys.path.append(os.path.join(CHROMIUM_DIR, 'build'))
import vs_toolchain
vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
# Use gyp to find the MSVS installation, either in depot_tools as per above,
# or a system-wide installation otherwise.
sys.path.append(os.path.join(CHROMIUM_DIR, 'tools', 'gyp', 'pylib'))
import gyp.MSVSVersion
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion('2013')
return vs_version
def UpdateClang(args):
print 'Updating Clang to %s...' % PACKAGE_VERSION
if ReadStampFile() == PACKAGE_VERSION:
print 'Already up to date.'
return 0
# Reset the stamp file in case the build is unsuccessful.
WriteStampFile('')
if not args.force_local_build:
cds_file = "clang-%s.tgz" % PACKAGE_VERSION
cds_full_url = CDS_URL + '/Win/' + cds_file
# Check if there's a prebuilt binary and if so just fetch that. That's
# faster, and goma relies on having matching binary hashes on client and
# server too.
print 'Trying to download prebuilt clang'
# clang packages are smaller than 50 MB, small enough to keep in memory.
with contextlib.closing(cStringIO.StringIO()) as f:
try:
DownloadUrl(cds_full_url, f)
f.seek(0)
tarfile.open(mode='r:gz', fileobj=f).extractall(path=LLVM_BUILD_DIR)
print 'clang %s unpacked' % PACKAGE_VERSION
# Download the gold plugin if requested to by an environment variable.
# This is used by the CFI ClusterFuzz bot.
if 'LLVM_DOWNLOAD_GOLD_PLUGIN' in os.environ:
RunCommand(['python', CHROMIUM_DIR+'/build/download_gold_plugin.py'])
WriteStampFile(PACKAGE_VERSION)
return 0
except urllib2.HTTPError:
print 'Did not find prebuilt clang %s, building locally' % cds_file
AddCMakeToPath()
RevertPreviouslyPatchedFiles()
DeleteChromeToolsShim()
Checkout('LLVM', LLVM_REPO_URL + '/llvm/trunk', LLVM_DIR)
Checkout('Clang', LLVM_REPO_URL + '/cfe/trunk', CLANG_DIR)
if sys.platform == 'win32':
Checkout('LLD', LLVM_REPO_URL + '/lld/trunk', LLD_DIR)
Checkout('compiler-rt', LLVM_REPO_URL + '/compiler-rt/trunk', COMPILER_RT_DIR)
if sys.platform == 'darwin':
# clang needs a libc++ checkout, else -stdlib=libc++ won't find includes
# (i.e. this is needed for bootstrap builds).
Checkout('libcxx', LLVM_REPO_URL + '/libcxx/trunk', LIBCXX_DIR)
# While we're bundling our own libc++ on OS X, we need to compile libc++abi
# into it too (since OS X 10.6 doesn't have libc++abi.dylib either).
Checkout('libcxxabi', LLVM_REPO_URL + '/libcxxabi/trunk', LIBCXXABI_DIR)
if args.with_patches and sys.platform != 'win32':
ApplyLocalPatches()
cc, cxx = None, None
cflags = cxxflags = ldflags = []
# LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is
# needed, on OS X it requires libc++. clang only automatically links to libc++
# when targeting OS X 10.9+, so add stdlib=libc++ explicitly so clang can run
# on OS X versions as old as 10.7.
# TODO(thakis): Some bots are still on 10.6 (nacl...), so for now bundle
# libc++.dylib. Remove this once all bots are on 10.7+, then use
# -DLLVM_ENABLE_LIBCXX=ON and change deployment_target to 10.7.
deployment_target = ''
if sys.platform == 'darwin':
# When building on 10.9, /usr/include usually doesn't exist, and while
# Xcode's clang automatically sets a sysroot, self-built clangs don't.
cflags = ['-isysroot', subprocess.check_output(
['xcrun', '--show-sdk-path']).rstrip()]
cxxflags = ['-stdlib=libc++', '-nostdinc++',
'-I' + os.path.join(LIBCXX_DIR, 'include')] + cflags
if args.bootstrap:
deployment_target = '10.6'
base_cmake_args = ['-GNinja',
'-DCMAKE_BUILD_TYPE=Release',
'-DLLVM_ENABLE_ASSERTIONS=ON',
'-DLLVM_ENABLE_THREADS=OFF',
]
if args.bootstrap:
print 'Building bootstrap compiler'
if not os.path.exists(LLVM_BOOTSTRAP_DIR):
os.makedirs(LLVM_BOOTSTRAP_DIR)
os.chdir(LLVM_BOOTSTRAP_DIR)
bootstrap_args = base_cmake_args + [
'-DLLVM_TARGETS_TO_BUILD=host',
'-DCMAKE_INSTALL_PREFIX=' + LLVM_BOOTSTRAP_INSTALL_DIR,
'-DCMAKE_C_FLAGS=' + ' '.join(cflags),
'-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags),
]
if cc is not None: bootstrap_args.append('-DCMAKE_C_COMPILER=' + cc)
if cxx is not None: bootstrap_args.append('-DCMAKE_CXX_COMPILER=' + cxx)
RunCommand(['cmake'] + bootstrap_args + [LLVM_DIR], msvc_arch='x64')
RunCommand(['ninja'], msvc_arch='x64')
if args.run_tests:
RunCommand(['ninja', 'check-all'], msvc_arch='x64')
RunCommand(['ninja', 'install'], msvc_arch='x64')
if sys.platform == 'win32':
cc = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang-cl.exe')
cxx = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang-cl.exe')
# CMake has a hard time with backslashes in compiler paths:
# https://stackoverflow.com/questions/13050827
cc = cc.replace('\\', '/')
cxx = cxx.replace('\\', '/')
else:
cc = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang')
cxx = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang++')
print 'Building final compiler'
if sys.platform == 'darwin':
# Build libc++.dylib while some bots are still on OS X 10.6.
libcxxbuild = os.path.join(LLVM_BUILD_DIR, 'libcxxbuild')
if os.path.isdir(libcxxbuild):
RmTree(libcxxbuild)
libcxxflags = ['-O3', '-std=c++11', '-fstrict-aliasing']
# libcxx and libcxxabi both have a file stdexcept.cpp, so put their .o files
# into different subdirectories.
os.makedirs(os.path.join(libcxxbuild, 'libcxx'))
os.chdir(os.path.join(libcxxbuild, 'libcxx'))
RunCommand(['c++', '-c'] + cxxflags + libcxxflags +
glob.glob(os.path.join(LIBCXX_DIR, 'src', '*.cpp')))
os.makedirs(os.path.join(libcxxbuild, 'libcxxabi'))
os.chdir(os.path.join(libcxxbuild, 'libcxxabi'))
RunCommand(['c++', '-c'] + cxxflags + libcxxflags +
glob.glob(os.path.join(LIBCXXABI_DIR, 'src', '*.cpp')) +
['-I' + os.path.join(LIBCXXABI_DIR, 'include')])
os.chdir(libcxxbuild)
libdir = os.path.join(LIBCXX_DIR, 'lib')
RunCommand(['cc'] + glob.glob('libcxx/*.o') + glob.glob('libcxxabi/*.o') +
['-o', 'libc++.1.dylib', '-dynamiclib', '-nodefaultlibs',
'-current_version', '1', '-compatibility_version', '1', '-lSystem',
'-install_name', '@executable_path/libc++.dylib',
'-Wl,-unexported_symbols_list,' + libdir + '/libc++unexp.exp',
'-Wl,-force_symbols_not_weak_list,' + libdir + '/notweak.exp',
'-Wl,-force_symbols_weak_list,' + libdir + '/weak.exp'])
if os.path.exists('libc++.dylib'):
os.remove('libc++.dylib')
os.symlink('libc++.1.dylib', 'libc++.dylib')
ldflags += ['-stdlib=libc++', '-L' + libcxxbuild]
if args.bootstrap:
# Now that the libc++ headers have been installed and libc++.dylib is
# built, delete the libc++ checkout again so that it's not part of the
# main build below -- the libc++(abi) tests don't pass on OS X in
# bootstrap builds (http://llvm.org/PR24068)
RmTree(LIBCXX_DIR)
RmTree(LIBCXXABI_DIR)
cxxflags = ['-stdlib=libc++', '-nostdinc++',
'-I' + os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR,
'include/c++/v1')
] + cflags
# Build clang.
binutils_incdir = ''
if sys.platform.startswith('linux'):
binutils_incdir = os.path.join(BINUTILS_DIR, 'Linux_x64/Release/include')
# If building at head, define a macro that plugins can use for #ifdefing
# out code that builds at head, but not at LLVM_WIN_REVISION or vice versa.
if use_head_revision:
cflags += ['-DLLVM_FORCE_HEAD_REVISION']
cxxflags += ['-DLLVM_FORCE_HEAD_REVISION']
CreateChromeToolsShim()
deployment_env = None
if deployment_target:
deployment_env = os.environ.copy()
deployment_env['MACOSX_DEPLOYMENT_TARGET'] = deployment_target
cmake_args = base_cmake_args + [
'-DLLVM_BINUTILS_INCDIR=' + binutils_incdir,
'-DCMAKE_C_FLAGS=' + ' '.join(cflags),
'-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags),
'-DCMAKE_EXE_LINKER_FLAGS=' + ' '.join(ldflags),
'-DCMAKE_SHARED_LINKER_FLAGS=' + ' '.join(ldflags),
'-DCMAKE_MODULE_LINKER_FLAGS=' + ' '.join(ldflags),
'-DCMAKE_INSTALL_PREFIX=' + LLVM_BUILD_DIR,
'-DCHROMIUM_TOOLS_SRC=%s' % os.path.join(CHROMIUM_DIR, 'tools', 'clang'),
'-DCHROMIUM_TOOLS=%s' % ';'.join(args.tools)]
# TODO(thakis): Unconditionally append this to base_cmake_args instead once
# compiler-rt can build with clang-cl on Windows (http://llvm.org/PR23698)
cc_args = base_cmake_args if sys.platform != 'win32' else cmake_args
if cc is not None: cc_args.append('-DCMAKE_C_COMPILER=' + cc)
if cxx is not None: cc_args.append('-DCMAKE_CXX_COMPILER=' + cxx)
if not os.path.exists(LLVM_BUILD_DIR):
os.makedirs(LLVM_BUILD_DIR)
os.chdir(LLVM_BUILD_DIR)
RunCommand(['cmake'] + cmake_args + [LLVM_DIR],
msvc_arch='x64', env=deployment_env)
RunCommand(['ninja'], msvc_arch='x64')
if args.tools:
# If any Chromium tools were built, install those now.
RunCommand(['ninja', 'cr-install'], msvc_arch='x64')
if sys.platform == 'darwin':
CopyFile(os.path.join(LLVM_BUILD_DIR, 'libc++.1.dylib'),
os.path.join(LLVM_BUILD_DIR, 'bin'))
# See http://crbug.com/256342
RunCommand(['strip', '-x', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')])
elif sys.platform.startswith('linux'):
RunCommand(['strip', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')])
# Do an x86 build of compiler-rt to get the 32-bit ASan run-time.
# TODO(hans): Remove once the regular build above produces this.
if not os.path.exists(COMPILER_RT_BUILD_DIR):
os.makedirs(COMPILER_RT_BUILD_DIR)
os.chdir(COMPILER_RT_BUILD_DIR)
# TODO(thakis): Add this once compiler-rt can build with clang-cl (see
# above).
#if args.bootstrap and sys.platform == 'win32':
# The bootstrap compiler produces 64-bit binaries by default.
#cflags += ['-m32']
#cxxflags += ['-m32']
compiler_rt_args = base_cmake_args + [
'-DCMAKE_C_FLAGS=' + ' '.join(cflags),
'-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags)]
if sys.platform != 'win32':
compiler_rt_args += ['-DLLVM_CONFIG_PATH=' +
os.path.join(LLVM_BUILD_DIR, 'bin', 'llvm-config')]
RunCommand(['cmake'] + compiler_rt_args + [LLVM_DIR],
msvc_arch='x86', env=deployment_env)
RunCommand(['ninja', 'compiler-rt'], msvc_arch='x86')
# TODO(hans): Make this (and the .gypi and .isolate files) version number
# independent.
if sys.platform == 'win32':
platform = 'windows'
elif sys.platform == 'darwin':
platform = 'darwin'
else:
assert sys.platform.startswith('linux')
platform = 'linux'
asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang',
VERSION, 'lib', platform)
asan_rt_lib_dst_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
VERSION, 'lib', platform)
CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir,
r'^.*-i386\.lib$')
CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir,
r'^.*-i386\.dll$')
CopyFile(os.path.join(asan_rt_lib_src_dir, '..', '..', 'asan_blacklist.txt'),
os.path.join(asan_rt_lib_dst_dir, '..', '..'))
if sys.platform == 'win32':
# Make an extra copy of the sanitizer headers, to be put on the include path
# of the fallback compiler.
sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
VERSION, 'include', 'sanitizer')
aux_sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
VERSION, 'include_sanitizer',
'sanitizer')
if not os.path.exists(aux_sanitizer_include_dir):
os.makedirs(aux_sanitizer_include_dir)
for _, _, files in os.walk(sanitizer_include_dir):
for f in files:
CopyFile(os.path.join(sanitizer_include_dir, f),
aux_sanitizer_include_dir)
# Run tests.
if args.run_tests or use_head_revision:
os.chdir(LLVM_BUILD_DIR)
RunCommand(GetVSVersion().SetupScript('x64') +
['&&', 'ninja', 'cr-check-all'])
if args.run_tests:
os.chdir(LLVM_BUILD_DIR)
RunCommand(GetVSVersion().SetupScript('x64') +
['&&', 'ninja', 'check-all'])
WriteStampFile(PACKAGE_VERSION)
print 'Clang update was successful.'
return 0
def main():
if not sys.platform in ['win32', 'cygwin']:
# For non-Windows, fall back to update.sh.
# TODO(hans): Make update.py replace update.sh completely.
# This script is called by gclient. gclient opens its hooks subprocesses
# with (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does
# custom output processing that breaks printing '\r' characters for
# single-line updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, a
# dup()ed sys.stdin is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
if '--no-stdin-hack' in sys.argv:
sys.argv.remove('--no-stdin-hack')
stderr = None
else:
try:
stderr = os.fdopen(os.dup(sys.stdin.fileno()))
except:
stderr = sys.stderr
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=stderr)
parser = argparse.ArgumentParser(description='Build Clang.')
parser.add_argument('--bootstrap', action='store_true',
help='first build clang with CC, then with itself.')
parser.add_argument('--if-needed', action='store_true',
help="run only if the script thinks clang is needed")
parser.add_argument('--force-local-build', action='store_true',
help="don't try to download prebuild binaries")
parser.add_argument('--print-revision', action='store_true',
help='print current clang revision and exit.')
parser.add_argument('--print-clang-version', action='store_true',
help='print current clang version (e.g. x.y.z) and exit.')
parser.add_argument('--run-tests', action='store_true',
help='run tests after building; only for local builds')
parser.add_argument('--tools', nargs='*',
help='select which chrome tools to build',
default=['plugins', 'blink_gc_plugin'])
parser.add_argument('--without-patches', action='store_false',
help="don't apply patches (default)", dest='with_patches',
default=True)
# For now, these flags are only used for the non-Windows flow, but argparser
# gets mad if it sees a flag it doesn't recognize.
parser.add_argument('--no-stdin-hack', action='store_true')
args = parser.parse_args()
if args.if_needed:
is_clang_required = False
# clang is always used on Mac and Linux.
if sys.platform == 'darwin' or sys.platform.startswith('linux'):
is_clang_required = True
# clang requested via $GYP_DEFINES.
if re.search(r'\b(clang|asan|lsan|msan|tsan)=1',
os.environ.get('GYP_DEFINES', '')):
is_clang_required = True
# clang previously downloaded, keep it up-to-date.
# If you don't want this, delete third_party/llvm-build on your machine.
if os.path.isdir(LLVM_BUILD_DIR):
is_clang_required = True
if not is_clang_required:
return 0
if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')):
print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).'
return 0
global LLVM_WIN_REVISION, PACKAGE_VERSION
if args.print_revision:
if use_head_revision:
print GetSvnRevision(LLVM_DIR)
else:
print PACKAGE_VERSION
return 0
if args.print_clang_version:
sys.stdout.write(VERSION)
return 0
# Don't buffer stdout, so that print statements are immediately flushed.
# Do this only after --print-revision has been handled, else we'll get
# an error message when this script is run from gn for some reason.
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
if use_head_revision:
# Use a real revision number rather than HEAD to make sure that the stamp
# file logic works.
LLVM_WIN_REVISION = GetSvnRevision(LLVM_REPO_URL)
PACKAGE_VERSION = LLVM_WIN_REVISION + '-0'
args.force_local_build = True
# Skip local patches when using HEAD: they probably don't apply anymore.
args.with_patches = False
return UpdateClang(args)
if __name__ == '__main__':
sys.exit(main())

724
tools/clang/scripts/update.sh Executable file
View File

@ -0,0 +1,724 @@
#!/usr/bin/env bash
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script will check out llvm and clang into third_party/llvm and build it.
# Do NOT CHANGE this if you don't know what you're doing -- see
# https://code.google.com/p/chromium/wiki/UpdatingClang
# Reverting problematic clang rolls is safe, though.
CLANG_REVISION=241602
# This is incremented when pushing a new build of Clang at the same revision.
CLANG_SUB_REVISION=3
PACKAGE_VERSION="${CLANG_REVISION}-${CLANG_SUB_REVISION}"
THIS_DIR="$(dirname "${0}")"
LLVM_DIR="${THIS_DIR}/../../../third_party/llvm"
LLVM_BUILD_DIR="${LLVM_DIR}/../llvm-build/Release+Asserts"
COMPILER_RT_BUILD_DIR="${LLVM_DIR}/../llvm-build/compiler-rt"
LLVM_BOOTSTRAP_DIR="${LLVM_DIR}/../llvm-bootstrap"
LLVM_BOOTSTRAP_INSTALL_DIR="${LLVM_DIR}/../llvm-bootstrap-install"
CLANG_DIR="${LLVM_DIR}/tools/clang"
COMPILER_RT_DIR="${LLVM_DIR}/compiler-rt"
LIBCXX_DIR="${LLVM_DIR}/projects/libcxx"
LIBCXXABI_DIR="${LLVM_DIR}/projects/libcxxabi"
ANDROID_NDK_DIR="${THIS_DIR}/../../../third_party/android_tools/ndk"
STAMP_FILE="${LLVM_DIR}/../llvm-build/cr_build_revision"
CHROMIUM_TOOLS_DIR="${THIS_DIR}/.."
BINUTILS_DIR="${THIS_DIR}/../../../third_party/binutils"
ABS_CHROMIUM_TOOLS_DIR="${PWD}/${CHROMIUM_TOOLS_DIR}"
ABS_LIBCXX_DIR="${PWD}/${LIBCXX_DIR}"
ABS_LIBCXXABI_DIR="${PWD}/${LIBCXXABI_DIR}"
ABS_LLVM_DIR="${PWD}/${LLVM_DIR}"
ABS_LLVM_BUILD_DIR="${PWD}/${LLVM_BUILD_DIR}"
ABS_COMPILER_RT_DIR="${PWD}/${COMPILER_RT_DIR}"
ABS_BINUTILS_DIR="${PWD}/${BINUTILS_DIR}"
# ${A:-a} returns $A if it's set, a else.
LLVM_REPO_URL=${LLVM_URL:-https://llvm.org/svn/llvm-project}
CDS_URL=https://commondatastorage.googleapis.com/chromium-browser-clang
if [[ -z "$GYP_DEFINES" ]]; then
GYP_DEFINES=
fi
if [[ -z "$GYP_GENERATORS" ]]; then
GYP_GENERATORS=
fi
if [[ -z "$LLVM_DOWNLOAD_GOLD_PLUGIN" ]]; then
LLVM_DOWNLOAD_GOLD_PLUGIN=
fi
# Die if any command dies, error on undefined variable expansions.
set -eu
if [[ -n ${LLVM_FORCE_HEAD_REVISION:-''} ]]; then
# Use a real revision number rather than HEAD to make sure that the stamp file
# logic works.
CLANG_REVISION=$(svn info "$LLVM_REPO_URL" \
| grep 'Revision:' | awk '{ printf $2; }')
PACKAGE_VERSION="${CLANG_REVISION}-0"
fi
OS="$(uname -s)"
# Parse command line options.
if_needed=
force_local_build=
run_tests=
bootstrap=
with_android=yes
chrome_tools="plugins;blink_gc_plugin"
gcc_toolchain=
with_patches=yes
if [[ "${OS}" = "Darwin" ]]; then
with_android=
fi
while [[ $# > 0 ]]; do
case $1 in
--bootstrap)
bootstrap=yes
;;
--if-needed)
if_needed=yes
;;
--force-local-build)
force_local_build=yes
;;
--print-revision)
if [[ -n ${LLVM_FORCE_HEAD_REVISION:-''} ]]; then
svn info "$LLVM_DIR" | grep 'Revision:' | awk '{ printf $2; }'
else
echo $PACKAGE_VERSION
fi
exit 0
;;
--run-tests)
run_tests=yes
;;
--without-android)
with_android=
;;
--without-patches)
with_patches=
;;
--with-chrome-tools)
shift
if [[ $# == 0 ]]; then
echo "--with-chrome-tools requires an argument."
exit 1
fi
chrome_tools=$1
;;
--gcc-toolchain)
shift
if [[ $# == 0 ]]; then
echo "--gcc-toolchain requires an argument."
exit 1
fi
if [[ -x "$1/bin/gcc" ]]; then
gcc_toolchain=$1
else
echo "Invalid --gcc-toolchain: '$1'."
echo "'$1/bin/gcc' does not appear to be valid."
exit 1
fi
;;
--help)
echo "usage: $0 [--force-local-build] [--if-needed] [--run-tests] "
echo "--bootstrap: First build clang with CC, then with itself."
echo "--force-local-build: Don't try to download prebuilt binaries."
echo "--if-needed: Download clang only if the script thinks it is needed."
echo "--run-tests: Run tests after building. Only for local builds."
echo "--print-revision: Print current clang revision and exit."
echo "--without-android: Don't build ASan Android runtime library."
echo "--with-chrome-tools: Select which chrome tools to build." \
"Defaults to plugins;blink_gc_plugin."
echo " Example: --with-chrome-tools plugins;empty-string"
echo "--gcc-toolchain: Set the prefix for which GCC version should"
echo " be used for building. For example, to use gcc in"
echo " /opt/foo/bin/gcc, use '--gcc-toolchain '/opt/foo"
echo "--without-patches: Don't apply local patches."
echo
exit 1
;;
*)
echo "Unknown argument: '$1'."
echo "Use --help for help."
exit 1
;;
esac
shift
done
if [[ -n ${LLVM_FORCE_HEAD_REVISION:-''} ]]; then
force_local_build=yes
# Skip local patches when using HEAD: they probably don't apply anymore.
with_patches=
if ! [[ "$GYP_DEFINES" =~ .*OS=android.* ]]; then
# Only build the Android ASan rt when targetting Android.
with_android=
fi
LLVM_BUILD_TOOLS_DIR="${ABS_LLVM_DIR}/../llvm-build-tools"
if [[ "${OS}" == "Linux" ]] && [[ -z "${gcc_toolchain}" ]]; then
if [[ $(gcc -dumpversion) < "4.7.0" ]]; then
# We need a newer GCC version.
if [[ ! -e "${LLVM_BUILD_TOOLS_DIR}/gcc482" ]]; then
echo "Downloading pre-built GCC 4.8.2..."
mkdir -p "${LLVM_BUILD_TOOLS_DIR}"
curl --fail -L "${CDS_URL}/tools/gcc482.tgz" | \
tar zxf - -C "${LLVM_BUILD_TOOLS_DIR}"
echo Done
fi
gcc_toolchain="${LLVM_BUILD_TOOLS_DIR}/gcc482"
else
# Always set gcc_toolchain; llvm-symbolizer needs the bundled libstdc++.
gcc_toolchain="$(dirname $(dirname $(which gcc)))"
fi
fi
if [[ "${OS}" == "Linux" || "${OS}" == "Darwin" ]]; then
if [[ $(cmake --version | grep -Eo '[0-9.]+') < "3.0" ]]; then
# We need a newer CMake version.
if [[ ! -e "${LLVM_BUILD_TOOLS_DIR}/cmake310" ]]; then
echo "Downloading pre-built CMake 3.10..."
mkdir -p "${LLVM_BUILD_TOOLS_DIR}"
curl --fail -L "${CDS_URL}/tools/cmake310_${OS}.tgz" | \
tar zxf - -C "${LLVM_BUILD_TOOLS_DIR}"
echo Done
fi
export PATH="${LLVM_BUILD_TOOLS_DIR}/cmake310/bin:${PATH}"
fi
fi
echo "LLVM_FORCE_HEAD_REVISION was set; using r${CLANG_REVISION}"
fi
if [[ -n "$if_needed" ]]; then
if [[ "${OS}" == "Darwin" ]]; then
# clang is always used on Mac.
true
elif [[ "${OS}" == "Linux" ]]; then
# clang is also aways used on Linux.
true
elif [[ "$GYP_DEFINES" =~ .*(clang|tsan|asan|lsan|msan)=1.* ]]; then
# clang requested via $GYP_DEFINES.
true
elif [[ -d "${LLVM_BUILD_DIR}" ]]; then
# clang previously downloaded, keep it up-to-date.
# If you don't want this, delete third_party/llvm-build on your machine.
true
else
# clang wasn't needed, not doing anything.
exit 0
fi
fi
# Check if there's anything to be done, exit early if not.
if [[ -f "${STAMP_FILE}" ]]; then
PREVIOUSLY_BUILT_REVISON=$(cat "${STAMP_FILE}")
if [[ -z "$force_local_build" ]] && \
[[ "${PREVIOUSLY_BUILT_REVISON}" = \
"${PACKAGE_VERSION}" ]]; then
echo "Clang already at ${PACKAGE_VERSION}"
exit 0
fi
fi
# To always force a new build if someone interrupts their build half way.
rm -f "${STAMP_FILE}"
if [[ -z "$force_local_build" ]]; then
# Check if there's a prebuilt binary and if so just fetch that. That's faster,
# and goma relies on having matching binary hashes on client and server too.
CDS_FILE="clang-${PACKAGE_VERSION}.tgz"
CDS_OUT_DIR=$(mktemp -d -t clang_download.XXXXXX)
CDS_OUTPUT="${CDS_OUT_DIR}/${CDS_FILE}"
if [ "${OS}" = "Linux" ]; then
CDS_FULL_URL="${CDS_URL}/Linux_x64/${CDS_FILE}"
elif [ "${OS}" = "Darwin" ]; then
CDS_FULL_URL="${CDS_URL}/Mac/${CDS_FILE}"
fi
echo Trying to download prebuilt clang
if which curl > /dev/null; then
curl -L --fail "${CDS_FULL_URL}" -o "${CDS_OUTPUT}" || \
rm -rf "${CDS_OUT_DIR}"
elif which wget > /dev/null; then
wget "${CDS_FULL_URL}" -O "${CDS_OUTPUT}" || rm -rf "${CDS_OUT_DIR}"
else
echo "Neither curl nor wget found. Please install one of these."
exit 1
fi
if [ -f "${CDS_OUTPUT}" ]; then
rm -rf "${LLVM_BUILD_DIR}"
mkdir -p "${LLVM_BUILD_DIR}"
tar -xzf "${CDS_OUTPUT}" -C "${LLVM_BUILD_DIR}"
echo clang "${PACKAGE_VERSION}" unpacked
echo "${PACKAGE_VERSION}" > "${STAMP_FILE}"
rm -rf "${CDS_OUT_DIR}"
# Download the gold plugin if requested to by an environment variable.
# This is used by the CFI ClusterFuzz bot.
if [[ -n "${LLVM_DOWNLOAD_GOLD_PLUGIN}" ]]; then
${THIS_DIR}/../../../build/download_gold_plugin.py
fi
exit 0
else
echo Did not find prebuilt clang "${PACKAGE_VERSION}", building
fi
fi
if [[ -n "${with_android}" ]] && ! [[ -d "${ANDROID_NDK_DIR}" ]]; then
echo "Android NDK not found at ${ANDROID_NDK_DIR}"
echo "The Android NDK is needed to build a Clang whose -fsanitize=address"
echo "works on Android. See "
echo "http://code.google.com/p/chromium/wiki/AndroidBuildInstructions for how"
echo "to install the NDK, or pass --without-android."
exit 1
fi
# Check that cmake and ninja are available.
if ! which cmake > /dev/null; then
echo "CMake needed to build clang; please install"
exit 1
fi
if ! which ninja > /dev/null; then
echo "ninja needed to build clang, please install"
exit 1
fi
echo Reverting previously patched files
for i in \
"${CLANG_DIR}/test/Index/crash-recovery-modules.m" \
"${CLANG_DIR}/unittests/libclang/LibclangTest.cpp" \
"${COMPILER_RT_DIR}/lib/asan/asan_rtl.cc" \
"${COMPILER_RT_DIR}/test/asan/TestCases/Linux/new_array_cookie_test.cc" \
"${LLVM_DIR}/test/DebugInfo/gmlt.ll" \
"${LLVM_DIR}/lib/CodeGen/SpillPlacement.cpp" \
"${LLVM_DIR}/lib/CodeGen/SpillPlacement.h" \
"${LLVM_DIR}/lib/Transforms/Instrumentation/MemorySanitizer.cpp" \
"${CLANG_DIR}/test/Driver/env.c" \
"${CLANG_DIR}/lib/Frontend/InitPreprocessor.cpp" \
"${CLANG_DIR}/test/Frontend/exceptions.c" \
"${CLANG_DIR}/test/Preprocessor/predefined-exceptions.m" \
"${LLVM_DIR}/test/Bindings/Go/go.test" \
"${CLANG_DIR}/lib/Parse/ParseExpr.cpp" \
"${CLANG_DIR}/lib/Parse/ParseTemplate.cpp" \
"${CLANG_DIR}/lib/Sema/SemaDeclCXX.cpp" \
"${CLANG_DIR}/lib/Sema/SemaExprCXX.cpp" \
"${CLANG_DIR}/test/SemaCXX/default2.cpp" \
"${CLANG_DIR}/test/SemaCXX/typo-correction-delayed.cpp" \
"${COMPILER_RT_DIR}/lib/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc" \
"${COMPILER_RT_DIR}/test/tsan/signal_segv_handler.cc" \
"${COMPILER_RT_DIR}/lib/sanitizer_common/sanitizer_coverage_libcdep.cc" \
"${COMPILER_RT_DIR}/cmake/config-ix.cmake" \
"${COMPILER_RT_DIR}/CMakeLists.txt" \
"${COMPILER_RT_DIR}/lib/ubsan/ubsan_platform.h" \
; do
if [[ -e "${i}" ]]; then
rm -f "${i}" # For unversioned files.
svn revert "${i}"
fi;
done
echo Remove the Clang tools shim dir
CHROME_TOOLS_SHIM_DIR=${ABS_LLVM_DIR}/tools/chrometools
rm -rfv ${CHROME_TOOLS_SHIM_DIR}
echo Getting LLVM r"${CLANG_REVISION}" in "${LLVM_DIR}"
if ! svn co --force "${LLVM_REPO_URL}/llvm/trunk@${CLANG_REVISION}" \
"${LLVM_DIR}"; then
echo Checkout failed, retrying
rm -rf "${LLVM_DIR}"
svn co --force "${LLVM_REPO_URL}/llvm/trunk@${CLANG_REVISION}" "${LLVM_DIR}"
fi
echo Getting clang r"${CLANG_REVISION}" in "${CLANG_DIR}"
svn co --force "${LLVM_REPO_URL}/cfe/trunk@${CLANG_REVISION}" "${CLANG_DIR}"
# We have moved from building compiler-rt in the LLVM tree, to a separate
# directory. Nuke any previous checkout to avoid building it.
rm -rf "${LLVM_DIR}/projects/compiler-rt"
echo Getting compiler-rt r"${CLANG_REVISION}" in "${COMPILER_RT_DIR}"
svn co --force "${LLVM_REPO_URL}/compiler-rt/trunk@${CLANG_REVISION}" \
"${COMPILER_RT_DIR}"
# clang needs a libc++ checkout, else -stdlib=libc++ won't find includes
# (i.e. this is needed for bootstrap builds).
if [ "${OS}" = "Darwin" ]; then
echo Getting libc++ r"${CLANG_REVISION}" in "${LIBCXX_DIR}"
svn co --force "${LLVM_REPO_URL}/libcxx/trunk@${CLANG_REVISION}" \
"${LIBCXX_DIR}"
fi
# While we're bundling our own libc++ on OS X, we need to compile libc++abi
# into it too (since OS X 10.6 doesn't have libc++abi.dylib either).
if [ "${OS}" = "Darwin" ]; then
echo Getting libc++abi r"${CLANG_REVISION}" in "${LIBCXXABI_DIR}"
svn co --force "${LLVM_REPO_URL}/libcxxabi/trunk@${CLANG_REVISION}" \
"${LIBCXXABI_DIR}"
fi
if [[ -n "$with_patches" ]]; then
# Apply patch for tests failing with --disable-pthreads (llvm.org/PR11974)
pushd "${CLANG_DIR}"
cat << 'EOF' |
--- test/Index/crash-recovery-modules.m (revision 202554)
+++ test/Index/crash-recovery-modules.m (working copy)
@@ -12,6 +12,8 @@
// REQUIRES: crash-recovery
// REQUIRES: shell
+// XFAIL: *
+// (PR11974)
@import Crash;
EOF
patch -p0
popd
pushd "${CLANG_DIR}"
cat << 'EOF' |
--- unittests/libclang/LibclangTest.cpp (revision 215949)
+++ unittests/libclang/LibclangTest.cpp (working copy)
@@ -431,7 +431,7 @@
EXPECT_EQ(0U, clang_getNumDiagnostics(ClangTU));
}
-TEST_F(LibclangReparseTest, ReparseWithModule) {
+TEST_F(LibclangReparseTest, DISABLED_ReparseWithModule) {
const char *HeaderTop = "#ifndef H\n#define H\nstruct Foo { int bar;";
const char *HeaderBottom = "\n};\n#endif\n";
const char *MFile = "#include \"HeaderFile.h\"\nint main() {"
EOF
patch -p0
popd
# This Go bindings test doesn't work after the bootstrap build on Linux. (PR21552)
pushd "${LLVM_DIR}"
cat << 'EOF' |
--- test/Bindings/Go/go.test (revision 223109)
+++ test/Bindings/Go/go.test (working copy)
@@ -1,3 +1,3 @@
-; RUN: llvm-go test llvm.org/llvm/bindings/go/llvm
+; RUN: true
; REQUIRES: shell
EOF
patch -p0
popd
# The UBSan run-time, which is now bundled with the ASan run-time, doesn't work
# on Mac OS X 10.8 (PR23539).
pushd "${COMPILER_RT_DIR}"
cat << 'EOF' |
Index: CMakeLists.txt
===================================================================
--- CMakeLists.txt (revision 241602)
+++ CMakeLists.txt (working copy)
@@ -305,6 +305,7 @@
list(APPEND SANITIZER_COMMON_SUPPORTED_OS iossim)
endif()
endif()
+ set(SANITIZER_MIN_OSX_VERSION "10.7")
if(SANITIZER_MIN_OSX_VERSION VERSION_LESS "10.7")
message(FATAL_ERROR "Too old OS X version: ${SANITIZER_MIN_OSX_VERSION}")
endif()
EOF
patch -p0
popd
fi
# Echo all commands.
set -x
# Set default values for CC and CXX if they're not set in the environment.
CC=${CC:-cc}
CXX=${CXX:-c++}
if [[ -n "${gcc_toolchain}" ]]; then
# Use the specified gcc installation for building.
CC="$gcc_toolchain/bin/gcc"
CXX="$gcc_toolchain/bin/g++"
# Set LD_LIBRARY_PATH to make auxiliary targets (tablegen, bootstrap compiler,
# etc.) find the .so.
export LD_LIBRARY_PATH="$(dirname $(${CXX} -print-file-name=libstdc++.so.6))"
fi
CFLAGS=""
CXXFLAGS=""
LDFLAGS=""
# LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is
# needed, on OS X it requires libc++. clang only automatically links to libc++
# when targeting OS X 10.9+, so add stdlib=libc++ explicitly so clang can run on
# OS X versions as old as 10.7.
# TODO(thakis): Some bots are still on 10.6 (nacl...), so for now bundle
# libc++.dylib. Remove this once all bots are on 10.7+, then use
# -DLLVM_ENABLE_LIBCXX=ON and change deployment_target to 10.7.
deployment_target=""
if [ "${OS}" = "Darwin" ]; then
# When building on 10.9, /usr/include usually doesn't exist, and while
# Xcode's clang automatically sets a sysroot, self-built clangs don't.
CFLAGS="-isysroot $(xcrun --show-sdk-path)"
CXXFLAGS="-stdlib=libc++ -nostdinc++ -I${ABS_LIBCXX_DIR}/include ${CFLAGS}"
if [[ -n "${bootstrap}" ]]; then
deployment_target=10.6
fi
fi
# Build bootstrap clang if requested.
if [[ -n "${bootstrap}" ]]; then
ABS_INSTALL_DIR="${PWD}/${LLVM_BOOTSTRAP_INSTALL_DIR}"
echo "Building bootstrap compiler"
mkdir -p "${LLVM_BOOTSTRAP_DIR}"
pushd "${LLVM_BOOTSTRAP_DIR}"
cmake -GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_ASSERTIONS=ON \
-DLLVM_TARGETS_TO_BUILD=host \
-DLLVM_ENABLE_THREADS=OFF \
-DCMAKE_INSTALL_PREFIX="${ABS_INSTALL_DIR}" \
-DCMAKE_C_COMPILER="${CC}" \
-DCMAKE_CXX_COMPILER="${CXX}" \
-DCMAKE_C_FLAGS="${CFLAGS}" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS}" \
../llvm
ninja
if [[ -n "${run_tests}" ]]; then
ninja check-all
fi
ninja install
if [[ -n "${gcc_toolchain}" ]]; then
# Copy that gcc's stdlibc++.so.6 to the build dir, so the bootstrap
# compiler can start.
cp -v "$(${CXX} -print-file-name=libstdc++.so.6)" \
"${ABS_INSTALL_DIR}/lib/"
fi
popd
CC="${ABS_INSTALL_DIR}/bin/clang"
CXX="${ABS_INSTALL_DIR}/bin/clang++"
if [[ -n "${gcc_toolchain}" ]]; then
# Tell the bootstrap compiler to use a specific gcc prefix to search
# for standard library headers and shared object file.
CFLAGS="--gcc-toolchain=${gcc_toolchain}"
CXXFLAGS="--gcc-toolchain=${gcc_toolchain}"
fi
echo "Building final compiler"
fi
# Build clang (in a separate directory).
# The clang bots have this path hardcoded in built/scripts/slave/compile.py,
# so if you change it you also need to change these links.
mkdir -p "${LLVM_BUILD_DIR}"
pushd "${LLVM_BUILD_DIR}"
# Build libc++.dylib while some bots are still on OS X 10.6.
if [ "${OS}" = "Darwin" ]; then
rm -rf libcxxbuild
LIBCXXFLAGS="-O3 -std=c++11 -fstrict-aliasing"
# libcxx and libcxxabi both have a file stdexcept.cpp, so put their .o files
# into different subdirectories.
mkdir -p libcxxbuild/libcxx
pushd libcxxbuild/libcxx
${CXX:-c++} -c ${CXXFLAGS} ${LIBCXXFLAGS} "${ABS_LIBCXX_DIR}"/src/*.cpp
popd
mkdir -p libcxxbuild/libcxxabi
pushd libcxxbuild/libcxxabi
${CXX:-c++} -c ${CXXFLAGS} ${LIBCXXFLAGS} "${ABS_LIBCXXABI_DIR}"/src/*.cpp -I"${ABS_LIBCXXABI_DIR}/include"
popd
pushd libcxxbuild
${CC:-cc} libcxx/*.o libcxxabi/*.o -o libc++.1.dylib -dynamiclib \
-nodefaultlibs -current_version 1 -compatibility_version 1 \
-lSystem -install_name @executable_path/libc++.dylib \
-Wl,-unexported_symbols_list,${ABS_LIBCXX_DIR}/lib/libc++unexp.exp \
-Wl,-force_symbols_not_weak_list,${ABS_LIBCXX_DIR}/lib/notweak.exp \
-Wl,-force_symbols_weak_list,${ABS_LIBCXX_DIR}/lib/weak.exp
ln -sf libc++.1.dylib libc++.dylib
popd
LDFLAGS+="-stdlib=libc++ -L${PWD}/libcxxbuild"
if [[ -n "${bootstrap}" ]]; then
# Now that the libc++ headers have been installed and libc++.dylib is built,
# delete the libc++ checkout again so that it's not part of the main
# build below -- the libc++(abi) tests don't pass on OS X in bootstrap
# builds (http://llvm.org/PR24068)
rm -rf "${ABS_LIBCXX_DIR}"
rm -rf "${ABS_LIBCXXABI_DIR}"
CXXFLAGS="-stdlib=libc++ -nostdinc++ -I${ABS_INSTALL_DIR}/include/c++/v1 ${CFLAGS}"
fi
fi
# Find the binutils include dir for the gold plugin.
BINUTILS_INCDIR=""
if [ "${OS}" = "Linux" ]; then
BINUTILS_INCDIR="${ABS_BINUTILS_DIR}/Linux_x64/Release/include"
fi
# If building at head, define a macro that plugins can use for #ifdefing
# out code that builds at head, but not at CLANG_REVISION or vice versa.
if [[ -n ${LLVM_FORCE_HEAD_REVISION:-''} ]]; then
CFLAGS="${CFLAGS} -DLLVM_FORCE_HEAD_REVISION"
CXXFLAGS="${CXXFLAGS} -DLLVM_FORCE_HEAD_REVISION"
fi
# Hook the Chromium tools into the LLVM build. Several Chromium tools have
# dependencies on LLVM/Clang libraries. The LLVM build detects implicit tools
# in the tools subdirectory, so install a shim CMakeLists.txt that forwards to
# the real directory for the Chromium tools.
# Note that the shim directory name intentionally has no _ or _. The implicit
# tool detection logic munges them in a weird way.
mkdir -v ${CHROME_TOOLS_SHIM_DIR}
cat > ${CHROME_TOOLS_SHIM_DIR}/CMakeLists.txt << EOF
# Since tools/clang isn't actually a subdirectory, use the two argument version
# to specify where build artifacts go. CMake doesn't allow reusing the same
# binary dir for multiple source dirs, so the build artifacts have to go into a
# subdirectory...
add_subdirectory(\${CHROMIUM_TOOLS_SRC} \${CMAKE_CURRENT_BINARY_DIR}/a)
EOF
rm -fv CMakeCache.txt
MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_ASSERTIONS=ON \
-DLLVM_ENABLE_THREADS=OFF \
-DLLVM_BINUTILS_INCDIR="${BINUTILS_INCDIR}" \
-DCMAKE_C_COMPILER="${CC}" \
-DCMAKE_CXX_COMPILER="${CXX}" \
-DCMAKE_C_FLAGS="${CFLAGS}" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS}" \
-DCMAKE_EXE_LINKER_FLAGS="${LDFLAGS}" \
-DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS}" \
-DCMAKE_MODULE_LINKER_FLAGS="${LDFLAGS}" \
-DCMAKE_INSTALL_PREFIX="${ABS_LLVM_BUILD_DIR}" \
-DCHROMIUM_TOOLS_SRC="${ABS_CHROMIUM_TOOLS_DIR}" \
-DCHROMIUM_TOOLS="${chrome_tools}" \
"${ABS_LLVM_DIR}"
env
if [[ -n "${gcc_toolchain}" ]]; then
# Copy in the right stdlibc++.so.6 so clang can start.
mkdir -p lib
cp -v "$(${CXX} ${CXXFLAGS} -print-file-name=libstdc++.so.6)" lib/
fi
ninja
# If any Chromium tools were built, install those now.
if [[ -n "${chrome_tools}" ]]; then
ninja cr-install
fi
STRIP_FLAGS=
if [ "${OS}" = "Darwin" ]; then
# See http://crbug.com/256342
STRIP_FLAGS=-x
cp libcxxbuild/libc++.1.dylib bin/
fi
strip ${STRIP_FLAGS} bin/clang
popd
# Build compiler-rt out-of-tree.
mkdir -p "${COMPILER_RT_BUILD_DIR}"
pushd "${COMPILER_RT_BUILD_DIR}"
rm -fv CMakeCache.txt
MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_ASSERTIONS=ON \
-DLLVM_ENABLE_THREADS=OFF \
-DCMAKE_C_COMPILER="${CC}" \
-DCMAKE_CXX_COMPILER="${CXX}" \
-DLLVM_CONFIG_PATH="${ABS_LLVM_BUILD_DIR}/bin/llvm-config" \
"${ABS_COMPILER_RT_DIR}"
ninja
# Copy selected output to the main tree.
# Darwin doesn't support cp --parents, so pipe through tar instead.
CLANG_VERSION=$("${ABS_LLVM_BUILD_DIR}/bin/clang" --version | \
sed -ne 's/clang version \([0-9]\.[0-9]\.[0-9]\).*/\1/p')
ABS_LLVM_CLANG_LIB_DIR="${ABS_LLVM_BUILD_DIR}/lib/clang/${CLANG_VERSION}"
tar -c *blacklist.txt | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
tar -c include/sanitizer | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
if [[ "${OS}" = "Darwin" ]]; then
tar -c lib/darwin | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
else
tar -c lib/linux | tar -C ${ABS_LLVM_CLANG_LIB_DIR} -xv
fi
popd
if [[ -n "${with_android}" ]]; then
# Make a standalone Android toolchain.
${ANDROID_NDK_DIR}/build/tools/make-standalone-toolchain.sh \
--platform=android-19 \
--install-dir="${LLVM_BUILD_DIR}/android-toolchain" \
--system=linux-x86_64 \
--stl=stlport \
--toolchain=arm-linux-androideabi-4.9
# Android NDK r9d copies a broken unwind.h into the toolchain, see
# http://crbug.com/357890
rm -v "${LLVM_BUILD_DIR}"/android-toolchain/include/c++/*/unwind.h
# Build ASan runtime for Android in a separate build tree.
mkdir -p ${LLVM_BUILD_DIR}/android
pushd ${LLVM_BUILD_DIR}/android
rm -fv CMakeCache.txt
MACOSX_DEPLOYMENT_TARGET=${deployment_target} cmake -GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_ASSERTIONS=ON \
-DLLVM_ENABLE_THREADS=OFF \
-DCMAKE_C_COMPILER=${PWD}/../bin/clang \
-DCMAKE_CXX_COMPILER=${PWD}/../bin/clang++ \
-DLLVM_CONFIG_PATH=${PWD}/../bin/llvm-config \
-DCMAKE_C_FLAGS="--target=arm-linux-androideabi --sysroot=${PWD}/../android-toolchain/sysroot -B${PWD}/../android-toolchain" \
-DCMAKE_CXX_FLAGS="--target=arm-linux-androideabi --sysroot=${PWD}/../android-toolchain/sysroot -B${PWD}/../android-toolchain" \
-DANDROID=1 \
"${ABS_COMPILER_RT_DIR}"
ninja libclang_rt.asan-arm-android.so
# And copy it into the main build tree.
cp "$(find -name libclang_rt.asan-arm-android.so)" "${ABS_LLVM_CLANG_LIB_DIR}/lib/linux/"
popd
fi
if [[ -n "$run_tests" || -n "${LLVM_FORCE_HEAD_REVISION:-''}" ]]; then
# Run Chrome tool tests.
ninja -C "${LLVM_BUILD_DIR}" cr-check-all
fi
if [[ -n "$run_tests" ]]; then
# Run the LLVM and Clang tests.
ninja -C "${LLVM_BUILD_DIR}" check-all
fi
# After everything is done, log success for this revision.
echo "${PACKAGE_VERSION}" > "${STAMP_FILE}"

159
tools/gn.py Executable file
View File

@ -0,0 +1,159 @@
#!/usr/bin/env python
# Copyright 2016 The Dart project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import sys
import os
import utils
HOST_OS = utils.GuessOS()
HOST_ARCH = utils.GuessArchitecture()
HOST_CPUS = utils.GuessCpus()
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
def get_out_dir(args):
return utils.GetBuildRoot(HOST_OS, args.mode, args.arch, args.os)
def to_command_line(gn_args):
def merge(key, value):
if type(value) is bool:
return '%s=%s' % (key, 'true' if value else 'false')
return '%s="%s"' % (key, value)
return [merge(x, y) for x, y in gn_args.iteritems()]
def host_cpu_for_arch(arch):
if arch in ['ia32', 'arm', 'armv6', 'armv5te', 'mips',
'simarm', 'simarmv6', 'simarmv5te', 'simmips', 'simdbc']:
return 'x86'
if arch in ['x64', 'arm64', 'simarm64', 'simdbc64']:
return 'x64'
def target_cpu_for_arch(arch, os):
if arch in ['ia32', 'simarm', 'simarmv6', 'simarmv5te', 'simmips']:
return 'x86'
if arch in ['simarm64']:
return 'x64'
if arch == 'mips':
return 'mipsel'
if arch == 'simdbc':
return 'arm' if os == 'android' else 'x86'
if arch == 'simdbc64':
return 'arm64' if os == 'android' else 'x64'
return arch
def to_gn_args(args):
gn_args = {}
if args.os == 'host':
gn_args['target_os'] = HOST_OS
else:
gn_args['target_os'] = args.os
gn_args['dart_target_arch'] = args.arch
gn_args['target_cpu'] = target_cpu_for_arch(args.arch, args.os)
gn_args['host_cpu'] = host_cpu_for_arch(args.arch)
# TODO(zra): This is for the observatory, which currently builds using the
# checked-in sdk. If/when the observatory no longer builds with the
# checked-in sdk, this can be removed.
gn_args['dart_host_pub_exe'] = os.path.join(
DART_ROOT, 'tools', 'sdks', HOST_OS, 'dart-sdk', 'bin', 'pub')
# For Fuchsia support, the default is to not compile in the root
# certificates.
gn_args['dart_use_fallback_root_certificates'] = True
gn_args['dart_zlib_path'] = "//runtime/bin/zlib"
gn_args['is_debug'] = args.mode == 'debug'
gn_args['is_release'] = args.mode == 'release'
gn_args['is_product'] = args.mode == 'product'
gn_args['dart_debug'] = args.mode == 'debug'
# This setting is only meaningful for Flutter. Standalone builds of the VM
# should leave this set to 'develop', which causes the build to defer to
# 'is_debug', 'is_release' and 'is_product'.
gn_args['dart_runtime_mode'] = 'develop'
gn_args['is_clang'] = args.clang and args.os not in ['android']
if args.target_sysroot:
gn_args['target_sysroot'] = args.target_sysroot
if args.toolchain_prefix:
gn_args['toolchain_prefix'] = args.toolchain_prefix
goma_dir = os.environ.get('GOMA_DIR')
goma_home_dir = os.path.join(os.getenv('HOME', ''), 'goma')
if args.goma and goma_dir:
gn_args['use_goma'] = True
gn_args['goma_dir'] = goma_dir
elif args.goma and os.path.exists(goma_home_dir):
gn_args['use_goma'] = True
gn_args['goma_dir'] = goma_home_dir
else:
gn_args['use_goma'] = False
gn_args['goma_dir'] = None
return gn_args
def parse_args(args):
args = args[1:]
parser = argparse.ArgumentParser(description='A script run` gn gen`.')
parser.add_argument('--mode', '-m',
type=str,
choices=['debug', 'release', 'product'],
default='debug')
parser.add_argument('--os',
type=str,
choices=['host', 'android'],
default='host')
parser.add_argument('--arch', '-a',
type=str,
choices=['ia32', 'x64', 'simarm', 'arm', 'simarmv6', 'armv6',
'simarmv5te', 'armv5te', 'simmips', 'mips', 'simarm64', 'arm64',
'simdbc', 'simdbc64'],
default='x64')
parser.add_argument('--goma', default=True, action='store_true')
parser.add_argument('--no-goma', dest='goma', action='store_false')
parser.add_argument('--clang', default=True, action='store_true')
parser.add_argument('--no-clang', dest='clang', action='store_false')
parser.add_argument('--target-sysroot', '-s', type=str)
parser.add_argument('--toolchain-prefix', '-t', type=str)
return parser.parse_args(args)
def main(argv):
args = parse_args(argv)
if sys.platform.startswith(('cygwin', 'win')):
subdir = 'win'
elif sys.platform == 'darwin':
subdir = 'mac'
elif sys.platform.startswith('linux'):
subdir = 'linux64'
else:
raise Error('Unknown platform: ' + sys.platform)
command = [
'%s/buildtools/%s/gn' % (DART_ROOT, subdir),
'gen',
'--check'
]
gn_args = to_command_line(to_gn_args(args))
out_dir = get_out_dir(args)
print "gn gen --check in %s" % out_dir
command.append(out_dir)
command.append('--args=%s' % ' '.join(gn_args))
return subprocess.call(command, cwd=DART_ROOT)
if __name__ == '__main__':
sys.exit(main(sys.argv))