mirror of
https://github.com/dart-lang/sdk
synced 2024-10-06 16:31:07 +00:00
[infra,flutter] Adds semantically meaningful custom version strings
See https://github.com/flutter/flutter/issues/14751 Change-Id: I3b35c2a32c0dd1301e92abe23b1c6fe2c2e04a6c Reviewed-on: https://dart-review.googlesource.com/42026 Reviewed-by: Ryan Macnak <rmacnak@google.com> Commit-Queue: Zach Anderson <zra@google.com>
This commit is contained in:
parent
629bef0a91
commit
0d5cf900b0
|
@ -340,6 +340,12 @@ action("generate_version_cc_file") {
|
|||
rebase_path("vm/version_in.cc", root_build_dir),
|
||||
]
|
||||
if (!dart_version_git_info) {
|
||||
args += [ "--ignore_svn_revision" ]
|
||||
args += [ "--no_git_hash" ]
|
||||
}
|
||||
if (dart_custom_version_for_pub != "") {
|
||||
args += [
|
||||
"--custom_for_pub",
|
||||
dart_custom_version_for_pub,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,4 +56,17 @@ declare_args() {
|
|||
# Whether the Dart binary version string should include the git hash and
|
||||
# git commit time.
|
||||
dart_version_git_info = true
|
||||
|
||||
# When this argument is a non-empty string, the version repoted by the
|
||||
# Dart VM will be one that is compatible with pub's interpretation of
|
||||
# semantic version strings. The version string will also include the values
|
||||
# of the argument. In particular the version string will read:
|
||||
#
|
||||
# "M.m.p-dev.x.x-$(dart_custom_version_for_pub)-$(short_git_hash)"
|
||||
#
|
||||
# Where 'M', 'm', and 'p' are the major, minor and patch version numbers,
|
||||
# and 'dev.x.x' is the dev version tag most recently preceeding the current
|
||||
# revision. The short git hash can be omitted by setting
|
||||
# dart_version_git_info=false
|
||||
dart_custom_version_for_pub = ""
|
||||
}
|
||||
|
|
|
@ -40,14 +40,21 @@ VM_SNAPSHOT_FILES=[
|
|||
'symbols.cc',
|
||||
]
|
||||
|
||||
def makeVersionString(quiet, no_svn):
|
||||
version_string = utils.GetSemanticSDKVersion(ignore_svn_revision=no_svn)
|
||||
def MakeVersionString(quiet, no_git_hash, custom_for_pub=None):
|
||||
if custom_for_pub:
|
||||
if no_git_hash:
|
||||
version_string = ("%s-%s" % (utils.GetLatestDevTag(), custom_for_pub))
|
||||
else:
|
||||
version_string = ("%s-%s-%s" %
|
||||
(utils.GetLatestDevTag(), custom_for_pub, utils.GetShortGitHash()))
|
||||
else:
|
||||
version_string = utils.GetSemanticSDKVersion(no_git_hash=no_git_hash)
|
||||
if not quiet:
|
||||
debugLog("Returning version string: %s " % version_string)
|
||||
return version_string
|
||||
|
||||
|
||||
def makeSnapshotHashString():
|
||||
def MakeSnapshotHashString():
|
||||
vmhash = hashlib.md5()
|
||||
for vmfilename in VM_SNAPSHOT_FILES:
|
||||
vmfilepath = os.path.join(utils.DART_DIR, 'runtime', 'vm', vmfilename)
|
||||
|
@ -56,17 +63,17 @@ def makeSnapshotHashString():
|
|||
return vmhash.hexdigest()
|
||||
|
||||
|
||||
def makeFile(quiet, output_file, input_file, ignore_svn_revision):
|
||||
def MakeFile(quiet, output_file, input_file, no_git_hash, custom_for_pub):
|
||||
version_cc_text = open(input_file).read()
|
||||
version_string = makeVersionString(quiet, ignore_svn_revision)
|
||||
version_string = MakeVersionString(quiet, no_git_hash, custom_for_pub)
|
||||
version_cc_text = version_cc_text.replace("{{VERSION_STR}}",
|
||||
version_string)
|
||||
version_time = utils.GetGitTimestamp()
|
||||
if ignore_svn_revision or version_time == None:
|
||||
if no_git_hash or version_time == None:
|
||||
version_time = "Unknown timestamp"
|
||||
version_cc_text = version_cc_text.replace("{{COMMIT_TIME}}",
|
||||
version_time)
|
||||
snapshot_hash = makeSnapshotHashString()
|
||||
snapshot_hash = MakeSnapshotHashString()
|
||||
version_cc_text = version_cc_text.replace("{{SNAPSHOT_HASH}}",
|
||||
snapshot_hash)
|
||||
open(output_file, 'w').write(version_cc_text)
|
||||
|
@ -77,18 +84,27 @@ def main(args):
|
|||
try:
|
||||
# Parse input.
|
||||
parser = OptionParser()
|
||||
parser.add_option("-q", "--quiet",
|
||||
action="store_true", default=False,
|
||||
help="disable console output")
|
||||
parser.add_option("--ignore_svn_revision",
|
||||
action="store_true", default=False,
|
||||
help="Don't try to determine svn revision")
|
||||
parser.add_option("--output",
|
||||
action="store", type="string",
|
||||
help="output file name")
|
||||
parser.add_option("--custom_for_pub",
|
||||
action="store",
|
||||
type="string",
|
||||
help=("Generates a version string that works with pub that includes"
|
||||
"the given string"))
|
||||
parser.add_option("--input",
|
||||
action="store", type="string",
|
||||
help="input template file")
|
||||
action="store",
|
||||
type="string",
|
||||
help="input template file")
|
||||
parser.add_option("--no_git_hash",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Don't try to determine svn revision")
|
||||
parser.add_option("--output",
|
||||
action="store",
|
||||
type="string",
|
||||
help="output file name")
|
||||
parser.add_option("-q", "--quiet",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="disable console output")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
if not options.output:
|
||||
|
@ -102,8 +118,8 @@ def main(args):
|
|||
for arg in args:
|
||||
files.append(arg)
|
||||
|
||||
if not makeFile(options.quiet, options.output, options.input,
|
||||
options.ignore_svn_revision):
|
||||
if not MakeFile(options.quiet, options.output, options.input,
|
||||
options.no_git_hash, options.custom_for_pub):
|
||||
return -1
|
||||
|
||||
return 0
|
||||
|
|
|
@ -217,7 +217,7 @@ def ExecuteCommand(options, args):
|
|||
options.pub_executable,
|
||||
options.pub_snapshot,
|
||||
['build',
|
||||
'-DOBS_VER=' + utils.GetVersion(ignore_svn_revision=True),
|
||||
'-DOBS_VER=' + utils.GetVersion(no_git_hash=True),
|
||||
'--output', args[0]],
|
||||
options.silent)
|
||||
elif (cmd == 'deploy'):
|
||||
|
|
|
@ -30,10 +30,12 @@ except:
|
|||
DART_DIR = os.path.abspath(
|
||||
os.path.normpath(os.path.join(__file__, '..', '..')))
|
||||
|
||||
|
||||
def GetBotUtils():
|
||||
'''Dynamically load the tools/bots/bot_utils.py python module.'''
|
||||
return imp.load_source('bot_utils', os.path.join(DART_DIR, 'tools', 'bots', 'bot_utils.py'))
|
||||
|
||||
|
||||
class Version(object):
|
||||
def __init__(self, channel, major, minor, patch, prerelease,
|
||||
prerelease_patch):
|
||||
|
@ -44,6 +46,7 @@ class Version(object):
|
|||
self.prerelease = prerelease
|
||||
self.prerelease_patch = prerelease_patch
|
||||
|
||||
|
||||
# Try to guess the host operating system.
|
||||
def GuessOS():
|
||||
os_id = platform.system()
|
||||
|
@ -106,6 +109,7 @@ def GuessCpus():
|
|||
return int(win_cpu_count)
|
||||
return 2
|
||||
|
||||
|
||||
def GetWindowsRegistryKeyName(name):
|
||||
import win32process
|
||||
# Check if python process is 64-bit or if it's 32-bit running in 64-bit OS.
|
||||
|
@ -117,6 +121,7 @@ def GetWindowsRegistryKeyName(name):
|
|||
wow6432Node = ''
|
||||
return r'SOFTWARE\%s%s' % (wow6432Node, name)
|
||||
|
||||
|
||||
# Try to guess Visual Studio location when buiding on Windows.
|
||||
def GuessVisualStudioPath():
|
||||
defaultPath = r"C:\Program Files (x86)\Microsoft Visual Studio 14.0\Common7" \
|
||||
|
@ -197,6 +202,7 @@ def ReadLinesFrom(name):
|
|||
result.append(line)
|
||||
return result
|
||||
|
||||
|
||||
# Filters out all arguments until the next '--' argument
|
||||
# occurs.
|
||||
def ListArgCallback(option, value, parser):
|
||||
|
@ -265,23 +271,28 @@ BASE_DIR = os.path.abspath(os.path.join(os.curdir, '..'))
|
|||
DART_DIR = os.path.abspath(os.path.join(__file__, '..', '..'))
|
||||
VERSION_FILE = os.path.join(DART_DIR, 'tools', 'VERSION')
|
||||
|
||||
|
||||
def GetBuildbotGSUtilPath():
|
||||
gsutil = '/b/build/scripts/slave/gsutil'
|
||||
if platform.system() == 'Windows':
|
||||
gsutil = 'e:\\\\b\\build\\scripts\\slave\\gsutil'
|
||||
return gsutil
|
||||
|
||||
|
||||
def GetBuildMode(mode):
|
||||
return BUILD_MODES[mode]
|
||||
|
||||
|
||||
def GetArchFamily(arch):
|
||||
return ARCH_FAMILY[arch]
|
||||
|
||||
|
||||
def IsCrossBuild(target_os, arch):
|
||||
host_arch = ARCH_GUESS
|
||||
return ((GetArchFamily(host_arch) != GetArchFamily(arch)) or
|
||||
(target_os != GuessOS()))
|
||||
|
||||
|
||||
def GetBuildConf(mode, arch, conf_os=None):
|
||||
if conf_os == 'android':
|
||||
return '%s%s%s' % (GetBuildMode(mode), conf_os.title(), arch.upper())
|
||||
|
@ -293,35 +304,41 @@ def GetBuildConf(mode, arch, conf_os=None):
|
|||
cross_build = 'X'
|
||||
return '%s%s%s' % (GetBuildMode(mode), cross_build, arch.upper())
|
||||
|
||||
|
||||
def GetBuildDir(host_os):
|
||||
return BUILD_ROOT[host_os]
|
||||
|
||||
|
||||
def GetBuildRoot(host_os, mode=None, arch=None, target_os=None):
|
||||
build_root = GetBuildDir(host_os)
|
||||
if mode:
|
||||
build_root = os.path.join(build_root, GetBuildConf(mode, arch, target_os))
|
||||
return build_root
|
||||
|
||||
|
||||
def GetBuildSdkBin(host_os, mode=None, arch=None, target_os=None):
|
||||
build_root = GetBuildRoot(host_os, mode, arch, target_os)
|
||||
return os.path.join(build_root, 'dart-sdk', 'bin')
|
||||
|
||||
|
||||
def GetBaseDir():
|
||||
return BASE_DIR
|
||||
|
||||
|
||||
def GetShortVersion():
|
||||
version = ReadVersionFile()
|
||||
return ('%s.%s.%s.%s.%s' % (
|
||||
version.major, version.minor, version.patch, version.prerelease,
|
||||
version.prerelease_patch))
|
||||
|
||||
def GetSemanticSDKVersion(ignore_svn_revision=False):
|
||||
|
||||
def GetSemanticSDKVersion(no_git_hash=False):
|
||||
version = ReadVersionFile()
|
||||
if not version:
|
||||
return None
|
||||
|
||||
if version.channel == 'be':
|
||||
postfix = '-edge' if ignore_svn_revision else '-edge.%s' % GetGitRevision()
|
||||
postfix = '-edge' if no_git_hash else '-edge.%s' % GetGitRevision()
|
||||
elif version.channel == 'dev':
|
||||
postfix = '-dev.%s.%s' % (version.prerelease, version.prerelease_patch)
|
||||
else:
|
||||
|
@ -330,8 +347,10 @@ def GetSemanticSDKVersion(ignore_svn_revision=False):
|
|||
|
||||
return '%s.%s.%s%s' % (version.major, version.minor, version.patch, postfix)
|
||||
|
||||
def GetVersion(ignore_svn_revision=False):
|
||||
return GetSemanticSDKVersion(ignore_svn_revision)
|
||||
|
||||
def GetVersion(no_git_hash=False):
|
||||
return GetSemanticSDKVersion(no_git_hash)
|
||||
|
||||
|
||||
# The editor used to produce the VERSION file put on gcs. We now produce this
|
||||
# in the bots archiving the sdk.
|
||||
|
@ -347,16 +366,19 @@ def GetVersionFileContent():
|
|||
"revision": GetGitRevision()}
|
||||
return json.dumps(result, indent=2)
|
||||
|
||||
|
||||
def GetChannel():
|
||||
version = ReadVersionFile()
|
||||
return version.channel
|
||||
|
||||
|
||||
def GetUserName():
|
||||
key = 'USER'
|
||||
if sys.platform == 'win32':
|
||||
key = 'USERNAME'
|
||||
return os.environ.get(key, '')
|
||||
|
||||
|
||||
def ReadVersionFile():
|
||||
def match_against(pattern, file_content):
|
||||
match = re.search(pattern, file_content, flags=re.MULTILINE)
|
||||
|
@ -425,6 +447,37 @@ def GetGitRevision():
|
|||
return None
|
||||
return output
|
||||
|
||||
|
||||
def GetShortGitHash():
|
||||
p = subprocess.Popen(['git', 'log', '-n', '1', '--pretty=format:%h'],
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.STDOUT, shell=IsWindows(),
|
||||
cwd = DART_DIR)
|
||||
output, _ = p.communicate()
|
||||
if p.wait() != 0:
|
||||
return None
|
||||
return output
|
||||
|
||||
|
||||
def GetLatestDevTag():
|
||||
cmd = [
|
||||
'git',
|
||||
'for-each-ref',
|
||||
'refs/tags/*dev*',
|
||||
'--sort=-taggerdate',
|
||||
"--format=%(refname:lstrip=2)",
|
||||
'--count=1',
|
||||
]
|
||||
p = subprocess.Popen(cmd,
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.STDOUT, shell=IsWindows(),
|
||||
cwd = DART_DIR)
|
||||
output, _ = p.communicate()
|
||||
if p.wait() != 0:
|
||||
return None
|
||||
return output.strip()
|
||||
|
||||
|
||||
def GetGitTimestamp():
|
||||
p = subprocess.Popen(['git', 'log', '-n', '1', '--pretty=format:%cd'],
|
||||
stdout = subprocess.PIPE,
|
||||
|
@ -435,6 +488,7 @@ def GetGitTimestamp():
|
|||
return None
|
||||
return output
|
||||
|
||||
|
||||
# To eliminate clashing with older archived builds on bleeding edge we add
|
||||
# a base number bigger the largest svn revision (this also gives us an easy
|
||||
# way of seeing if an archive comes from git based or svn based commits).
|
||||
|
@ -452,6 +506,7 @@ def GetGitNumber():
|
|||
print "Warning: could not parse git count, output was %s" % output
|
||||
return None
|
||||
|
||||
|
||||
def ParseGitInfoOutput(output):
|
||||
"""Given a git log, determine the latest corresponding svn revision."""
|
||||
for line in output.split('\n'):
|
||||
|
@ -460,12 +515,14 @@ def ParseGitInfoOutput(output):
|
|||
return tokens[1].split('@')[1]
|
||||
return None
|
||||
|
||||
|
||||
def ParseSvnInfoOutput(output):
|
||||
revision_match = re.search('Last Changed Rev: (\d+)', output)
|
||||
if revision_match:
|
||||
return revision_match.group(1)
|
||||
return None
|
||||
|
||||
|
||||
def RewritePathSeparator(path, workspace):
|
||||
# Paths in test files are always specified using '/'
|
||||
# as the path separator. Replace with the actual
|
||||
|
@ -559,6 +616,7 @@ def Main():
|
|||
print "GetVersionFileContent() -> ", GetVersionFileContent()
|
||||
print "GetGitNumber() -> ", GetGitNumber()
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
@ -683,6 +741,7 @@ class TempDir(object):
|
|||
def __exit__(self, *_):
|
||||
shutil.rmtree(self._temp_dir, ignore_errors=True)
|
||||
|
||||
|
||||
class ChangedWorkingDirectory(object):
|
||||
def __init__(self, working_directory):
|
||||
self._working_directory = working_directory
|
||||
|
@ -706,6 +765,7 @@ class UnexpectedCrash(object):
|
|||
def __str__(self):
|
||||
return "Crash(%s: %s %s)" % (self.test, self.binary, self.pid)
|
||||
|
||||
|
||||
class SiteConfigBotoFileDisabler(object):
|
||||
def __init__(self):
|
||||
self._old_aws = None
|
||||
|
@ -726,6 +786,7 @@ class SiteConfigBotoFileDisabler(object):
|
|||
if self._old_boto:
|
||||
os.environ['BOTO_CONFIG'] = self._old_boto
|
||||
|
||||
|
||||
class PosixCoreDumpEnabler(object):
|
||||
def __init__(self):
|
||||
self._old_limits = None
|
||||
|
@ -737,6 +798,7 @@ class PosixCoreDumpEnabler(object):
|
|||
def __exit__(self, *_):
|
||||
resource.setrlimit(resource.RLIMIT_CORE, self._old_limits)
|
||||
|
||||
|
||||
# TODO(whesse): Re-enable after issue #30205 is addressed
|
||||
class LinuxCoreDumpEnabler(PosixCoreDumpEnabler):
|
||||
def __enter__(self):
|
||||
|
@ -750,6 +812,7 @@ class LinuxCoreDumpEnabler(PosixCoreDumpEnabler):
|
|||
# CheckLinuxCoreDumpPattern(fatal=True)
|
||||
# super(LinuxCoreDumpEnabler, self).__exit__(*args)
|
||||
|
||||
|
||||
class WindowsCoreDumpEnabler(object):
|
||||
"""Configure Windows Error Reporting to store crash dumps.
|
||||
|
||||
|
@ -835,6 +898,7 @@ class WindowsCoreDumpEnabler(object):
|
|||
handle.Close()
|
||||
self.winreg.DeleteKeyEx(key, subkey, wowbit, 0)
|
||||
|
||||
|
||||
class BaseCoreDumpArchiver(object):
|
||||
"""This class reads coredumps file written by UnexpectedCrashDumpArchiver
|
||||
into the current working directory and uploads all cores and binaries
|
||||
|
@ -976,14 +1040,17 @@ class PosixCoreDumpArchiver(BaseCoreDumpArchiver):
|
|||
if os.path.exists(core_filename):
|
||||
return core_filename
|
||||
|
||||
|
||||
class LinuxCoreDumpArchiver(PosixCoreDumpArchiver):
|
||||
def __init__(self):
|
||||
super(LinuxCoreDumpArchiver, self).__init__(os.getcwd())
|
||||
|
||||
|
||||
class MacOSCoreDumpArchiver(PosixCoreDumpArchiver):
|
||||
def __init__(self):
|
||||
super(MacOSCoreDumpArchiver, self).__init__('/cores')
|
||||
|
||||
|
||||
class WindowsCoreDumpArchiver(BaseCoreDumpArchiver):
|
||||
def __init__(self):
|
||||
super(WindowsCoreDumpArchiver, self).__init__(os.path.join(
|
||||
|
@ -1030,10 +1097,12 @@ class WindowsCoreDumpArchiver(BaseCoreDumpArchiver):
|
|||
missing_as_string = ', '.join([str(c) for c in missing])
|
||||
raise Exception('Missing crash dumps for: %s' % missing_as_string)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def NooptCoreDumpArchiver():
|
||||
yield
|
||||
|
||||
|
||||
def CoreDumpArchiver(args):
|
||||
enabled = '--copy-coredumps' in args
|
||||
|
||||
|
|
Loading…
Reference in a new issue