mirror of
https://github.com/flatcar/scripts.git
synced 2026-05-04 19:56:32 +02:00
Remove chromite from crosutils.git. It's been moved to chromite.git.
TEST=chromite works in new location BUG=chromium-os:11507 Change-Id: I8e5dee287dd0120f1cd01966953b79cf53a14790 Review URL: http://codereview.chromium.org/6371018
This commit is contained in:
parent
17d44e3659
commit
ac65e1e20a
@ -1,275 +0,0 @@
|
||||
#!/usr/bin/python2.6
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import multiprocessing
|
||||
import sys
|
||||
import tempfile
|
||||
sys.path.insert(0, os.path.abspath(__file__ + "/../../lib"))
|
||||
from cros_build_lib import Die
|
||||
from cros_build_lib import Info
|
||||
from cros_build_lib import RunCommand
|
||||
from cros_build_lib import Warning
|
||||
|
||||
|
||||
def BuildPackages():
|
||||
"""Build packages according to options specified on command-line."""
|
||||
|
||||
if os.getuid() != 0:
|
||||
Die("superuser access required")
|
||||
|
||||
scripts_dir = os.path.abspath(__file__ + "/../../..")
|
||||
builder = PackageBuilder(scripts_dir)
|
||||
options, _ = builder.ParseArgs()
|
||||
|
||||
# Calculate packages to install.
|
||||
# TODO(davidjames): Grab these from a spec file.
|
||||
packages = ["chromeos-base/chromeos"]
|
||||
if options.withdev:
|
||||
packages.append("chromeos-base/chromeos-dev")
|
||||
if options.withfactory:
|
||||
packages.append("chromeos-base/chromeos-factoryinstall")
|
||||
if options.withtest:
|
||||
packages.append("chromeos-base/chromeos-test")
|
||||
|
||||
if options.usetarball:
|
||||
builder.ExtractTarball(options, packages)
|
||||
else:
|
||||
builder.BuildTarball(options, packages)
|
||||
|
||||
|
||||
def _Apply(args):
|
||||
"""Call the function specified in args[0], with arguments in args[1:]."""
|
||||
return apply(args[0], args[1:])
|
||||
|
||||
|
||||
def _GetLatestPrebuiltPrefix(board):
|
||||
"""Get the latest prebuilt prefix for the specified board.
|
||||
|
||||
Args:
|
||||
board: The board you want prebuilts for.
|
||||
Returns:
|
||||
Latest prebuilt prefix.
|
||||
"""
|
||||
# TODO(davidjames): Also append profile names here.
|
||||
prefix = "http://commondatastorage.googleapis.com/chromeos-prebuilt/board"
|
||||
tmpfile = tempfile.NamedTemporaryFile()
|
||||
_Run("curl '%s/%s-latest' -o %s" % (prefix, board, tmpfile.name), retries=3)
|
||||
tmpfile.seek(0)
|
||||
latest = tmpfile.read().strip()
|
||||
tmpfile.close()
|
||||
return "%s/%s" % (prefix, latest)
|
||||
|
||||
|
||||
def _GetPrebuiltDownloadCommands(prefix):
|
||||
"""Return a list of commands for grabbing packages.
|
||||
|
||||
There must be a file called "packages/Packages" that contains the list of
|
||||
packages. The specified list of commands will fill the packages directory
|
||||
with the bzipped packages from the specified prefix.
|
||||
|
||||
Args:
|
||||
prefix: Url prefix to download packages from.
|
||||
Returns:
|
||||
List of commands for grabbing packages.
|
||||
"""
|
||||
|
||||
cmds = []
|
||||
for line in file("packages/Packages"):
|
||||
if line.startswith("CPV: "):
|
||||
pkgpath, pkgname = line.replace("CPV: ", "").strip().split("/")
|
||||
path = "%s/%s.tbz2" % (pkgpath, pkgname)
|
||||
url = "%s/%s" % (prefix, path)
|
||||
dirname = "packages/%s" % pkgpath
|
||||
fullpath = "packages/%s" % path
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
if not os.path.exists(fullpath):
|
||||
cmds.append("curl -s %s -o %s" % (url, fullpath))
|
||||
return cmds
|
||||
|
||||
|
||||
def _Run(cmd, retries=0):
|
||||
"""Run the specified command.
|
||||
|
||||
If the command fails, and the retries have been exhausted, the program exits
|
||||
with an appropriate error message.
|
||||
|
||||
Args:
|
||||
cmd: The command to run.
|
||||
retries: If exit code is non-zero, retry this many times.
|
||||
"""
|
||||
# TODO(davidjames): Move this to common library.
|
||||
for _ in range(retries+1):
|
||||
result = RunCommand(cmd, shell=True, exit_code=True, error_ok=True)
|
||||
if result.returncode == 0:
|
||||
Info("Command succeeded: %s" % cmd)
|
||||
break
|
||||
Warning("Command failed: %s" % cmd)
|
||||
else:
|
||||
Die("Command failed, exiting: %s" % cmd)
|
||||
|
||||
|
||||
def _RunManyParallel(cmds, retries=0):
|
||||
"""Run list of provided commands in parallel.
|
||||
|
||||
To work around a bug in the multiprocessing module, we use map_async instead
|
||||
of the usual map function. See http://bugs.python.org/issue9205
|
||||
|
||||
Args:
|
||||
cmds: List of commands to run.
|
||||
retries: Number of retries per command.
|
||||
"""
|
||||
# TODO(davidjames): Move this to common library.
|
||||
pool = multiprocessing.Pool()
|
||||
args = []
|
||||
for cmd in cmds:
|
||||
args.append((_Run, cmd, retries))
|
||||
result = pool.map_async(_Apply, args, chunksize=1)
|
||||
while True:
|
||||
try:
|
||||
result.get(60*60)
|
||||
break
|
||||
except multiprocessing.TimeoutError:
|
||||
pass
|
||||
|
||||
|
||||
class PackageBuilder(object):
|
||||
"""A class for building and extracting tarballs of Chromium OS packages."""
|
||||
|
||||
def __init__(self, scripts_dir):
|
||||
self.scripts_dir = scripts_dir
|
||||
|
||||
def BuildTarball(self, options, packages):
|
||||
"""Build a tarball with the specified packages.
|
||||
|
||||
Args:
|
||||
options: Options object, as output by ParseArgs.
|
||||
packages: List of packages to build.
|
||||
"""
|
||||
|
||||
board = options.board
|
||||
|
||||
# Run setup_board. TODO(davidjames): Integrate the logic used in
|
||||
# setup_board into chromite.
|
||||
_Run("%s/setup_board --force --board=%s" % (self.scripts_dir, board))
|
||||
|
||||
# Create complete build directory
|
||||
_Run(self._EmergeBoardCmd(options, packages))
|
||||
|
||||
# Archive build directory as tarballs
|
||||
os.chdir("/build/%s" % board)
|
||||
cmds = [
|
||||
"tar -c --wildcards --exclude='usr/lib/debug/*' "
|
||||
"--exclude='packages/*' * | pigz -c > packages/%s-build.tgz" % board,
|
||||
"tar -c usr/lib/debug/* | pigz -c > packages/%s-debug.tgz" % board
|
||||
]
|
||||
|
||||
# Run list of commands.
|
||||
_RunManyParallel(cmds)
|
||||
|
||||
def ExtractTarball(self, options, packages):
|
||||
"""Extract the latest build tarball, then update the specified packages.
|
||||
|
||||
Args:
|
||||
options: Options object, as output by ParseArgs.
|
||||
packages: List of packages to update.
|
||||
"""
|
||||
|
||||
board = options.board
|
||||
prefix = _GetLatestPrebuiltPrefix(board)
|
||||
|
||||
# If the user doesn't have emerge-${BOARD} setup yet, we need to run
|
||||
# setup_board. TODO(davidjames): Integrate the logic used in setup_board
|
||||
# into chromite.
|
||||
if not os.path.exists("/usr/local/bin/emerge-%s" % board):
|
||||
_Run("%s/setup_board --force --board=%s" % (self.scripts_dir, board))
|
||||
|
||||
# Delete old build directory. This process might take a while, so do it in
|
||||
# the background.
|
||||
cmds = []
|
||||
if os.path.exists("/build/%s" % board):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
_Run("mv /build/%s %s" % (board, tempdir))
|
||||
cmds.append("rm -rf %s" % tempdir)
|
||||
|
||||
# Create empty build directory, and chdir into it.
|
||||
os.makedirs("/build/%s/packages" % board)
|
||||
os.chdir("/build/%s" % board)
|
||||
|
||||
# Download and expand build tarball.
|
||||
build_url = "%s/%s-build.tgz" % (prefix, board)
|
||||
cmds.append("curl -s %s | tar -xz" % build_url)
|
||||
|
||||
# Download and expand debug tarball (if requested).
|
||||
if options.debug:
|
||||
debug_url = "%s/%s-debug.tgz" % (prefix, board)
|
||||
cmds.append("curl -s %s | tar -xz" % debug_url)
|
||||
|
||||
# Download prebuilt packages.
|
||||
_Run("curl '%s/Packages' -o packages/Packages" % prefix, retries=3)
|
||||
cmds.extend(_GetPrebuiltDownloadCommands(prefix))
|
||||
|
||||
# Run list of commands, with three retries per command, in case the network
|
||||
# is flaky.
|
||||
_RunManyParallel(cmds, retries=3)
|
||||
|
||||
# Emerge remaining packages.
|
||||
_Run(self._EmergeBoardCmd(options, packages))
|
||||
|
||||
def ParseArgs(self):
|
||||
"""Parse arguments from the command line using optparse."""
|
||||
|
||||
# TODO(davidjames): We should use spec files for this.
|
||||
default_board = self._GetDefaultBoard()
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option("--board", dest="board", default=default_board,
|
||||
help="The board to build packages for.")
|
||||
parser.add_option("--debug", action="store_true", dest="debug",
|
||||
default=False, help="Include debug symbols.")
|
||||
parser.add_option("--nowithdev", action="store_false", dest="withdev",
|
||||
default=True,
|
||||
help="Don't build useful developer friendly utilities.")
|
||||
parser.add_option("--nowithtest", action="store_false", dest="withtest",
|
||||
default=True, help="Build packages required for testing.")
|
||||
parser.add_option("--nowithfactory", action="store_false",
|
||||
dest="withfactory", default=True,
|
||||
help="Build factory installer")
|
||||
parser.add_option("--nousepkg", action="store_false",
|
||||
dest="usepkg", default=True,
|
||||
help="Don't use binary packages.")
|
||||
parser.add_option("--nousetarball", action="store_false",
|
||||
dest="usetarball", default=True,
|
||||
help="Don't use tarball.")
|
||||
parser.add_option("--nofast", action="store_false", dest="fast",
|
||||
default=True,
|
||||
help="Don't merge packages in parallel.")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def _EmergeBoardCmd(self, options, packages):
|
||||
"""Calculate board emerge command."""
|
||||
board = options.board
|
||||
scripts_dir = self.scripts_dir
|
||||
emerge_board = "emerge-%s" % board
|
||||
if options.fast:
|
||||
emerge_board = "%s/parallel_emerge --board=%s" % (scripts_dir, board)
|
||||
usepkg = ""
|
||||
if options.usepkg:
|
||||
usepkg = "g"
|
||||
return "%s -uDNv%s %s" % (emerge_board, usepkg, " ".join(packages))
|
||||
|
||||
def _GetDefaultBoard(self):
|
||||
"""Get the default board configured by the user."""
|
||||
|
||||
default_board_file = "%s/.default_board" % self.scripts_dir
|
||||
default_board = None
|
||||
if os.path.exists(default_board_file):
|
||||
default_board = file(default_board_file).read().strip()
|
||||
return default_board
|
||||
|
||||
if __name__ == "__main__":
|
||||
BuildPackages()
|
||||
@ -1,366 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Helper script for printing differences between tags."""
|
||||
|
||||
import cgi
|
||||
from datetime import datetime
|
||||
import operator
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../lib'))
|
||||
from cros_build_lib import RunCommand
|
||||
|
||||
|
||||
# TODO(dianders):
|
||||
# We use GData to access the tracker on code.google.com. Eventually, we
|
||||
# want to create an ebuild and add the ebuild to hard-host-depends
|
||||
# For now, we'll just include instructions for installing it.
|
||||
INSTRS_FOR_GDATA = """
|
||||
To access the tracker you need the GData library. To install in your home dir:
|
||||
|
||||
GDATA_INSTALL_DIR=~/gdatalib
|
||||
mkdir -p "$GDATA_INSTALL_DIR"
|
||||
|
||||
TMP_DIR=`mktemp -d`
|
||||
pushd $TMP_DIR
|
||||
wget http://gdata-python-client.googlecode.com/files/gdata-2.0.12.zip
|
||||
unzip gdata-2.0.12.zip
|
||||
cd gdata-2.0.12/
|
||||
python setup.py install --home="$GDATA_INSTALL_DIR"
|
||||
popd
|
||||
|
||||
export PYTHONPATH="$GDATA_INSTALL_DIR/lib/python:$PYTHONPATH"
|
||||
|
||||
You should add the PYTHONPATH line to your .bashrc file (or equivalent)."""
|
||||
|
||||
|
||||
DEFAULT_TRACKER = 'chromium-os'
|
||||
|
||||
|
||||
def _GrabOutput(cmd):
|
||||
"""Returns output from specified command."""
|
||||
return RunCommand(cmd, shell=True, print_cmd=False,
|
||||
redirect_stdout=True).output
|
||||
|
||||
|
||||
def _GrabTags():
|
||||
"""Returns list of tags from current git repository."""
|
||||
# TODO(dianders): replace this with the python equivalent.
|
||||
cmd = ("git for-each-ref refs/tags | awk '{print $3}' | "
|
||||
"sed 's,refs/tags/,,g' | sort -t. -k3,3rn -k4,4rn")
|
||||
return _GrabOutput(cmd).split()
|
||||
|
||||
|
||||
def _GrabDirs():
|
||||
"""Returns list of directories managed by repo."""
|
||||
return _GrabOutput('repo forall -c "pwd"').split()
|
||||
|
||||
|
||||
class Issue(object):
|
||||
"""Class for holding info about issues (aka bugs)."""
|
||||
|
||||
def __init__(self, project_name, issue_id, tracker_acc):
|
||||
"""Constructor for Issue object.
|
||||
|
||||
Args:
|
||||
project_name: The tracker project to query.
|
||||
issue_id: The ID of the issue to query
|
||||
tracker_acc: A TrackerAccess object, or None.
|
||||
"""
|
||||
self.project_name = project_name
|
||||
self.issue_id = issue_id
|
||||
self.milestone = ''
|
||||
self.priority = ''
|
||||
|
||||
if tracker_acc is not None:
|
||||
keyed_labels = tracker_acc.GetKeyedLabels(project_name, issue_id)
|
||||
if 'Mstone' in keyed_labels:
|
||||
self.milestone = keyed_labels['Mstone']
|
||||
if 'Pri' in keyed_labels:
|
||||
self.priority = keyed_labels['Pri']
|
||||
|
||||
def GetUrl(self):
|
||||
"""Returns the URL to access the issue."""
|
||||
bug_url_fmt = 'http://code.google.com/p/%s/issues/detail?id=%s'
|
||||
|
||||
# Get bug URL. We use short URLs to make the URLs a bit more readable.
|
||||
if self.project_name == 'chromium-os':
|
||||
bug_url = 'http://crosbug.com/%s' % self.issue_id
|
||||
elif self.project_name == 'chrome-os-partner':
|
||||
bug_url = 'http://crosbug.com/p/%s' % self.issue_id
|
||||
else:
|
||||
bug_url = bug_url_fmt % (self.project_name, self.issue_id)
|
||||
|
||||
return bug_url
|
||||
|
||||
def __str__(self):
|
||||
"""Provides a string representation of the issue.
|
||||
|
||||
Returns:
|
||||
A string that looks something like:
|
||||
|
||||
project:id (milestone, priority)
|
||||
"""
|
||||
if self.milestone and self.priority:
|
||||
info_str = ' (%s, P%s)' % (self.milestone, self.priority)
|
||||
elif self.milestone:
|
||||
info_str = ' (%s)' % self.milestone
|
||||
elif self.priority:
|
||||
info_str = ' (P%s)' % self.priority
|
||||
else:
|
||||
info_str = ''
|
||||
|
||||
return '%s:%s%s' % (self.project_name, self.issue_id, info_str)
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""Compare two Issue objects."""
|
||||
return cmp((self.project_name.lower(), self.issue_id),
|
||||
(other.project_name.lower(), other.issue_id))
|
||||
|
||||
|
||||
class Commit(object):
|
||||
"""Class for tracking git commits."""
|
||||
|
||||
def __init__(self, commit, projectname, commit_email, commit_date, subject,
|
||||
body, tracker_acc):
|
||||
"""Create commit logs.
|
||||
|
||||
Args:
|
||||
commit: The commit hash (sha) from git.
|
||||
projectname: The project name, from:
|
||||
git config --get remote.cros.projectname
|
||||
commit_email: The email address associated with the commit (%ce in git
|
||||
log)
|
||||
commit_date: The date of the commit, like "Mon Nov 1 17:34:14 2010 -0500"
|
||||
(%cd in git log))
|
||||
subject: The subject of the commit (%s in git log)
|
||||
body: The body of the commit (%b in git log)
|
||||
tracker_acc: A tracker_access.TrackerAccess object.
|
||||
"""
|
||||
self.commit = commit
|
||||
self.projectname = projectname
|
||||
self.commit_email = commit_email
|
||||
fmt = '%a %b %d %H:%M:%S %Y'
|
||||
self.commit_date = datetime.strptime(commit_date, fmt)
|
||||
self.subject = subject
|
||||
self.body = body
|
||||
self._tracker_acc = tracker_acc
|
||||
self._issues = self._GetIssues()
|
||||
|
||||
def _GetIssues(self):
|
||||
"""Get bug info from commit logs and issue tracker.
|
||||
|
||||
This should be called as the last step of __init__, since it
|
||||
assumes that our member variables are already setup.
|
||||
|
||||
Returns:
|
||||
A list of Issue objects, each of which holds info about a bug.
|
||||
"""
|
||||
# NOTE: most of this code is copied from bugdroid:
|
||||
# <http://src.chromium.org/viewvc/chrome/trunk/tools/bugdroid/bugdroid.py?revision=59229&view=markup>
|
||||
|
||||
# Get a list of bugs. Handle lots of possibilities:
|
||||
# - Multiple "BUG=" lines, with varying amounts of whitespace.
|
||||
# - For each BUG= line, bugs can be split by commas _or_ by whitespace (!)
|
||||
entries = []
|
||||
for line in self.body.split('\n'):
|
||||
match = re.match(r'^ *BUG *=(.*)', line)
|
||||
if match:
|
||||
for i in match.group(1).split(','):
|
||||
entries.extend(filter(None, [x.strip() for x in i.split()]))
|
||||
|
||||
# Try to parse the bugs. Handle lots of different formats:
|
||||
# - The whole URL, from which we parse the project and bug.
|
||||
# - A simple string that looks like "project:bug"
|
||||
# - A string that looks like "bug", which will always refer to the previous
|
||||
# tracker referenced (defaulting to the default tracker).
|
||||
#
|
||||
# We will create an "Issue" object for each bug.
|
||||
issues = []
|
||||
last_tracker = DEFAULT_TRACKER
|
||||
regex = (r'http://code.google.com/p/(\S+)/issues/detail\?id=([0-9]+)'
|
||||
r'|(\S+):([0-9]+)|(\b[0-9]+\b)')
|
||||
|
||||
for new_item in entries:
|
||||
bug_numbers = re.findall(regex, new_item)
|
||||
for bug_tuple in bug_numbers:
|
||||
if bug_tuple[0] and bug_tuple[1]:
|
||||
issues.append(Issue(bug_tuple[0], bug_tuple[1], self._tracker_acc))
|
||||
last_tracker = bug_tuple[0]
|
||||
elif bug_tuple[2] and bug_tuple[3]:
|
||||
issues.append(Issue(bug_tuple[2], bug_tuple[3], self._tracker_acc))
|
||||
last_tracker = bug_tuple[2]
|
||||
elif bug_tuple[4]:
|
||||
issues.append(Issue(last_tracker, bug_tuple[4], self._tracker_acc))
|
||||
|
||||
# Sort the issues and return...
|
||||
issues.sort()
|
||||
return issues
|
||||
|
||||
def AsHTMLTableRow(self):
|
||||
"""Returns HTML for this change, for printing as part of a table.
|
||||
|
||||
Columns: Project, Date, Commit, Committer, Bugs, Subject.
|
||||
|
||||
Returns:
|
||||
A string usable as an HTML table row, like:
|
||||
|
||||
<tr><td>Blah</td><td>Blah blah</td></tr>
|
||||
"""
|
||||
|
||||
bugs = []
|
||||
link_fmt = '<a href="%s">%s</a>'
|
||||
for issue in self._issues:
|
||||
bugs.append(link_fmt % (issue.GetUrl(), str(issue)))
|
||||
|
||||
url_fmt = 'http://chromiumos-git/git/?p=%s.git;a=commitdiff;h=%s'
|
||||
url = url_fmt % (self.projectname, self.commit)
|
||||
commit_desc = link_fmt % (url, self.commit[:8])
|
||||
bug_str = '<br>'.join(bugs)
|
||||
if not bug_str:
|
||||
if (self.projectname == 'kernel-next' or
|
||||
self.commit_email == 'chrome-bot@chromium.org'):
|
||||
bug_str = 'not needed'
|
||||
else:
|
||||
bug_str = '<font color="red">none</font>'
|
||||
|
||||
cols = [
|
||||
cgi.escape(self.projectname),
|
||||
str(self.commit_date),
|
||||
commit_desc,
|
||||
cgi.escape(self.commit_email),
|
||||
bug_str,
|
||||
cgi.escape(self.subject[:100]),
|
||||
]
|
||||
return '<tr><td>%s</td></tr>' % ('</td><td>'.join(cols))
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""Compare two Commit objects first by project name, then by date."""
|
||||
return (cmp(self.projectname, other.projectname) or
|
||||
cmp(self.commit_date, other.commit_date))
|
||||
|
||||
|
||||
def _GrabChanges(path, tag1, tag2, tracker_acc):
|
||||
"""Return list of commits to path between tag1 and tag2.
|
||||
|
||||
Args:
|
||||
path: One of the directories managed by repo.
|
||||
tag1: The first of the two tags to pass to git log.
|
||||
tag2: The second of the two tags to pass to git log.
|
||||
tracker_acc: A tracker_access.TrackerAccess object.
|
||||
|
||||
Returns:
|
||||
A list of "Commit" objects.
|
||||
"""
|
||||
|
||||
cmd = 'cd %s && git config --get remote.cros.projectname' % path
|
||||
projectname = _GrabOutput(cmd).strip()
|
||||
log_fmt = '%x00%H\t%ce\t%cd\t%s\t%b'
|
||||
cmd_fmt = 'cd %s && git log --format="%s" --date=local "%s..%s"'
|
||||
cmd = cmd_fmt % (path, log_fmt, tag1, tag2)
|
||||
output = _GrabOutput(cmd)
|
||||
commits = []
|
||||
for log_data in output.split('\0')[1:]:
|
||||
commit, commit_email, commit_date, subject, body = log_data.split('\t', 4)
|
||||
change = Commit(commit, projectname, commit_email, commit_date, subject,
|
||||
body, tracker_acc)
|
||||
commits.append(change)
|
||||
return commits
|
||||
|
||||
|
||||
def _ParseArgs():
|
||||
"""Parse command-line arguments.
|
||||
|
||||
Returns:
|
||||
An optparse.OptionParser object.
|
||||
"""
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
'--sort-by-date', dest='sort_by_date', default=False,
|
||||
action='store_true', help='Sort commits by date.')
|
||||
parser.add_option(
|
||||
'--tracker-user', dest='tracker_user', default=None,
|
||||
help='Specify a username to login to code.google.com.')
|
||||
parser.add_option(
|
||||
'--tracker-pass', dest='tracker_pass', default=None,
|
||||
help='Specify a password to go w/ user.')
|
||||
parser.add_option(
|
||||
'--tracker-passfile', dest='tracker_passfile', default=None,
|
||||
help='Specify a file containing a password to go w/ user.')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
tags = _GrabTags()
|
||||
tag1 = None
|
||||
options, args = _ParseArgs()
|
||||
if len(args) == 2:
|
||||
tag1, tag2 = args
|
||||
elif len(args) == 1:
|
||||
tag2, = args
|
||||
if tag2 in tags:
|
||||
tag2_index = tags.index(tag2)
|
||||
if tag2_index == len(tags) - 1:
|
||||
print >>sys.stderr, 'No previous tag for %s' % tag2
|
||||
sys.exit(1)
|
||||
tag1 = tags[tag2_index + 1]
|
||||
else:
|
||||
print >>sys.stderr, 'Unrecognized tag: %s' % tag2
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >>sys.stderr, 'Usage: %s [tag1] tag2' % sys.argv[0]
|
||||
print >>sys.stderr, 'If only one tag is specified, we view the differences'
|
||||
print >>sys.stderr, 'between that tag and the previous tag. You can also'
|
||||
print >>sys.stderr, 'specify cros/master to show differences with'
|
||||
print >>sys.stderr, 'tip-of-tree.'
|
||||
print >>sys.stderr, 'E.g. %s %s cros/master' % (sys.argv[0], tags[0])
|
||||
sys.exit(1)
|
||||
|
||||
if options.tracker_user is not None:
|
||||
# TODO(dianders): Once we install GData automatically, move the import
|
||||
# to the top of the file where it belongs. It's only here to allow
|
||||
# people to run the script without GData.
|
||||
try:
|
||||
import tracker_access
|
||||
except ImportError:
|
||||
print >>sys.stderr, INSTRS_FOR_GDATA
|
||||
sys.exit(1)
|
||||
if options.tracker_passfile is not None:
|
||||
options.tracker_pass = open(options.tracker_passfile, 'r').read().strip()
|
||||
tracker_acc = tracker_access.TrackerAccess(options.tracker_user,
|
||||
options.tracker_pass)
|
||||
else:
|
||||
tracker_acc = None
|
||||
|
||||
print >>sys.stderr, 'Finding differences between %s and %s' % (tag1, tag2)
|
||||
paths = _GrabDirs()
|
||||
changes = []
|
||||
for path in paths:
|
||||
changes.extend(_GrabChanges(path, tag1, tag2, tracker_acc))
|
||||
|
||||
title = 'Changelog for %s to %s' % (tag1, tag2)
|
||||
print '<html>'
|
||||
print '<head><title>%s</title></head>' % title
|
||||
print '<h1>%s</h1>' % title
|
||||
cols = ['Project', 'Date', 'Commit', 'Committer', 'Bugs', 'Subject']
|
||||
print '<table border="1" cellpadding="4">'
|
||||
print '<tr><th>%s</th>' % ('</th><th>'.join(cols))
|
||||
if options.sort_by_date:
|
||||
changes.sort(key=operator.attrgetter('commit_date'))
|
||||
else:
|
||||
changes.sort()
|
||||
for change in changes:
|
||||
print change.AsHTMLTableRow()
|
||||
print '</table>'
|
||||
print '</html>'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -1,59 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Chromite"""
|
||||
|
||||
import ConfigParser
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
|
||||
from cros_build_lib import Die
|
||||
from cros_build_lib import RunCommand
|
||||
|
||||
|
||||
def chromite_chroot(buildconfig):
|
||||
pass
|
||||
|
||||
|
||||
def chromite_build(buildconfig):
|
||||
pass
|
||||
|
||||
|
||||
def chromite_image(buildconfig):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='usage: %prog [options] build.spec')
|
||||
parser.add_option('-s', '--spec', default=None,
|
||||
help='Build Spec to build to')
|
||||
parser.add_option('-o', '--output-dir', default='./build',
|
||||
help='Output directory of build')
|
||||
parser.add_option('-i', '--interactive', default=None,
|
||||
help='Run in interactive build mode')
|
||||
(options, inputs) = parser.parse_args()
|
||||
|
||||
if not options.spec:
|
||||
parser.print_help()
|
||||
Die('Build Spec required')
|
||||
else:
|
||||
print "Using build spec.." + options.spec
|
||||
|
||||
buildconfig = ConfigParser.SafeConfigParser()
|
||||
buildconfig.read(options.spec)
|
||||
|
||||
for section in buildconfig.sections():
|
||||
print section
|
||||
for option in buildconfig.options(section):
|
||||
print " ", option, "=", buildconfig.get(section, option)
|
||||
|
||||
chromite_chroot(buildconfig)
|
||||
chromite_build(buildconfig)
|
||||
chromite_image(buildconfig)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -1,309 +0,0 @@
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# Adapted from portage/getbinpkg.py -- Portage binary-package helper functions
|
||||
# Copyright 2003-2004 Gentoo Foundation
|
||||
# Distributed under the terms of the GNU General Public License v2
|
||||
|
||||
import operator
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
class PackageIndex(object):
|
||||
"""A parser for the Portage Packages index file.
|
||||
|
||||
The Portage Packages index file serves to keep track of what packages are
|
||||
included in a tree. It contains the following sections:
|
||||
1) The header. The header tracks general key/value pairs that don't apply
|
||||
to any specific package. E.g., it tracks the base URL of the packages
|
||||
file, and the number of packages included in the file. The header is
|
||||
terminated by a blank line.
|
||||
2) The body. The body is a list of packages. Each package contains a list
|
||||
of key/value pairs. Packages are either terminated by a blank line or
|
||||
by the end of the file. Every package has a CPV entry, which serves as
|
||||
a unique identifier for the package.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
|
||||
# The header tracks general key/value pairs that don't apply to any
|
||||
# specific package. E.g., it tracks the base URL of the packages.
|
||||
self.header = {}
|
||||
|
||||
# A list of packages (stored as a list of dictionaries).
|
||||
self.packages = []
|
||||
|
||||
# Whether or not the PackageIndex has been modified since the last time it
|
||||
# was written.
|
||||
self.modified = False
|
||||
|
||||
def _PopulateDuplicateDB(self, db):
|
||||
"""Populate db with SHA1 -> URL mapping for packages.
|
||||
|
||||
Args:
|
||||
db: Dictionary to populate with SHA1 -> URL mapping for packages.
|
||||
"""
|
||||
|
||||
uri = self.header['URI']
|
||||
for pkg in self.packages:
|
||||
cpv, sha1 = pkg['CPV'], pkg.get('SHA1')
|
||||
if sha1:
|
||||
path = pkg.get('PATH', urllib.quote(cpv + '.tbz2'))
|
||||
db[sha1] = '%s/%s' % (uri.rstrip('/'), path)
|
||||
|
||||
def _ReadPkgIndex(self, pkgfile):
|
||||
"""Read a list of key/value pairs from the Packages file into a dictionary.
|
||||
|
||||
Both header entries and package entries are lists of key/value pairs, so
|
||||
they can both be read by this function. Entries can be terminated by empty
|
||||
lines or by the end of the file.
|
||||
|
||||
This function will read lines from the specified file until it encounters
|
||||
the a blank line or the end of the file.
|
||||
|
||||
Keys and values in the Packages file are separated by a colon and a space.
|
||||
Keys may contain capital letters, numbers, and underscores, but may not
|
||||
contain colons. Values may contain any character except a newline. In
|
||||
particular, it is normal for values to contain colons.
|
||||
|
||||
Lines that have content, and do not contain a valid key/value pair, are
|
||||
ignored. This is for compatibility with the Portage package parser, and
|
||||
to allow for future extensions to the Packages file format.
|
||||
|
||||
All entries must contain at least one key/value pair. If the end of the
|
||||
fils is reached, an empty dictionary is returned.
|
||||
|
||||
Args:
|
||||
pkgfile: A python file object.
|
||||
|
||||
Returns the dictionary of key-value pairs that was read from the file.
|
||||
"""
|
||||
d = {}
|
||||
for line in pkgfile:
|
||||
line = line.rstrip('\n')
|
||||
if not line:
|
||||
assert d, 'Packages entry must contain at least one key/value pair'
|
||||
break
|
||||
line = line.split(': ', 1)
|
||||
if len(line) == 2:
|
||||
k, v = line
|
||||
d[k] = v
|
||||
return d
|
||||
|
||||
def _WritePkgIndex(self, pkgfile, entry):
|
||||
"""Write header entry or package entry to packages file.
|
||||
|
||||
The keys and values will be separated by a colon and a space. The entry
|
||||
will be terminated by a blank line.
|
||||
|
||||
Args:
|
||||
pkgfile: A python file object.
|
||||
entry: A dictionary of the key/value pairs to write.
|
||||
"""
|
||||
lines = ['%s: %s' % (k, v) for k, v in sorted(entry.items()) if v]
|
||||
pkgfile.write('%s\n\n' % '\n'.join(lines))
|
||||
|
||||
def _ReadHeader(self, pkgfile):
|
||||
"""Read header of packages file.
|
||||
|
||||
Args:
|
||||
pkgfile: A python file object.
|
||||
"""
|
||||
assert not self.header, 'Should only read header once.'
|
||||
self.header = self._ReadPkgIndex(pkgfile)
|
||||
|
||||
def _ReadBody(self, pkgfile):
|
||||
"""Read body of packages file.
|
||||
|
||||
Before calling this function, you must first read the header (using
|
||||
_ReadHeader).
|
||||
|
||||
Args:
|
||||
pkgfile: A python file object.
|
||||
"""
|
||||
assert self.header, 'Should read header first.'
|
||||
assert not self.packages, 'Should only read body once.'
|
||||
|
||||
# Read all of the sections in the body by looping until we reach the end
|
||||
# of the file.
|
||||
while True:
|
||||
d = self._ReadPkgIndex(pkgfile)
|
||||
if not d:
|
||||
break
|
||||
if 'CPV' in d:
|
||||
self.packages.append(d)
|
||||
|
||||
def Read(self, pkgfile):
|
||||
"""Read the entire packages file.
|
||||
|
||||
Args:
|
||||
pkgfile: A python file object.
|
||||
"""
|
||||
self._ReadHeader(pkgfile)
|
||||
self._ReadBody(pkgfile)
|
||||
|
||||
def RemoveFilteredPackages(self, filter_fn):
|
||||
"""Remove packages which match filter_fn.
|
||||
|
||||
Args:
|
||||
filter_fn: A function which operates on packages. If it returns True,
|
||||
the package should be removed.
|
||||
"""
|
||||
|
||||
filtered = [p for p in self.packages if not filter_fn(p)]
|
||||
if filtered != self.packages:
|
||||
self.modified = True
|
||||
self.packages = filtered
|
||||
|
||||
def ResolveDuplicateUploads(self, pkgindexes):
|
||||
"""Point packages at files that have already been uploaded.
|
||||
|
||||
For each package in our index, check if there is an existing package that
|
||||
has already been uploaded to the same base URI. If so, point that package
|
||||
at the existing file, so that we don't have to upload the file.
|
||||
|
||||
Args:
|
||||
pkgindexes: A list of PackageIndex objects containing info about packages
|
||||
that have already been uploaded.
|
||||
|
||||
Returns:
|
||||
A list of the packages that still need to be uploaded.
|
||||
"""
|
||||
db = {}
|
||||
for pkgindex in pkgindexes:
|
||||
pkgindex._PopulateDuplicateDB(db)
|
||||
|
||||
uploads = []
|
||||
base_uri = self.header['URI']
|
||||
for pkg in self.packages:
|
||||
sha1 = pkg.get('SHA1')
|
||||
uri = db.get(sha1)
|
||||
if sha1 and uri and uri.startswith(base_uri):
|
||||
pkg['PATH'] = uri[len(base_uri):].lstrip('/')
|
||||
else:
|
||||
uploads.append(pkg)
|
||||
return uploads
|
||||
|
||||
def SetUploadLocation(self, base_uri, path_prefix):
|
||||
"""Set upload location to base_uri + path_prefix.
|
||||
|
||||
Args:
|
||||
base_uri: Base URI for all packages in the file. We set
|
||||
self.header['URI'] to this value, so all packages must live under
|
||||
this directory.
|
||||
path_prefix: Path prefix to use for all current packages in the file.
|
||||
This will be added to the beginning of the path for every package.
|
||||
"""
|
||||
self.header['URI'] = base_uri
|
||||
for pkg in self.packages:
|
||||
path = urllib.quote(pkg['CPV'] + '.tbz2')
|
||||
pkg['PATH'] = '%s/%s' % (path_prefix.rstrip('/'), path)
|
||||
|
||||
def Write(self, pkgfile):
|
||||
"""Write a packages file to disk.
|
||||
|
||||
If 'modified' flag is set, the TIMESTAMP and PACKAGES fields in the header
|
||||
will be updated before writing to disk.
|
||||
|
||||
Args:
|
||||
pkgfile: A python file object.
|
||||
"""
|
||||
if self.modified:
|
||||
self.header['TIMESTAMP'] = str(long(time.time()))
|
||||
self.header['PACKAGES'] = str(len(self.packages))
|
||||
self.modified = False
|
||||
self._WritePkgIndex(pkgfile, self.header)
|
||||
for metadata in sorted(self.packages, key=operator.itemgetter('CPV')):
|
||||
self._WritePkgIndex(pkgfile, metadata)
|
||||
|
||||
def WriteToNamedTemporaryFile(self):
|
||||
"""Write pkgindex to a temporary file.
|
||||
|
||||
Args:
|
||||
pkgindex: The PackageIndex object.
|
||||
|
||||
Returns:
|
||||
A temporary file containing the packages from pkgindex.
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
self.Write(f)
|
||||
f.flush()
|
||||
f.seek(0)
|
||||
return f
|
||||
|
||||
|
||||
def _RetryUrlOpen(url, tries=3):
|
||||
"""Open the specified url, retrying if we run into temporary errors.
|
||||
|
||||
We retry for both network errors and 5xx Server Errors. We do not retry
|
||||
for HTTP errors with a non-5xx code.
|
||||
|
||||
Args:
|
||||
url: The specified url.
|
||||
tries: The number of times to try.
|
||||
|
||||
Returns:
|
||||
The result of urllib2.urlopen(url).
|
||||
"""
|
||||
for i in range(tries):
|
||||
try:
|
||||
return urllib2.urlopen(url)
|
||||
except urllib2.HTTPError as e:
|
||||
if i + 1 >= tries or e.code < 500:
|
||||
raise
|
||||
else:
|
||||
print 'Cannot GET %s: %s' % (url, str(e))
|
||||
except urllib2.URLError as e:
|
||||
if i + 1 >= tries:
|
||||
raise
|
||||
else:
|
||||
print 'Cannot GET %s: %s' % (url, str(e))
|
||||
print 'Sleeping for 10 seconds before retrying...'
|
||||
time.sleep(10)
|
||||
|
||||
|
||||
def GrabRemotePackageIndex(binhost_url):
|
||||
"""Grab the latest binary package database from the specified URL.
|
||||
|
||||
Args:
|
||||
binhost_url: Base URL of remote packages (PORTAGE_BINHOST).
|
||||
|
||||
Returns:
|
||||
A PackageIndex object, if the Packages file can be retrieved. If the
|
||||
server returns status code 404, None is returned.
|
||||
"""
|
||||
|
||||
url = '%s/Packages' % binhost_url.rstrip('/')
|
||||
try:
|
||||
f = _RetryUrlOpen(url)
|
||||
except urllib2.HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
raise
|
||||
|
||||
pkgindex = PackageIndex()
|
||||
pkgindex.Read(f)
|
||||
pkgindex.header.setdefault('URI', binhost_url)
|
||||
f.close()
|
||||
return pkgindex
|
||||
|
||||
|
||||
def GrabLocalPackageIndex(package_path):
|
||||
"""Read a local packages file from disk into a PackageIndex() object.
|
||||
|
||||
Args:
|
||||
package_path: Directory containing Packages file.
|
||||
|
||||
Returns:
|
||||
A PackageIndex object.
|
||||
"""
|
||||
packages_file = file(os.path.join(package_path, 'Packages'))
|
||||
pkgindex = PackageIndex()
|
||||
pkgindex.Read(packages_file)
|
||||
packages_file.close()
|
||||
return pkgindex
|
||||
@ -1,251 +0,0 @@
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Common python commands used by various build scripts."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
_STDOUT_IS_TTY = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
||||
|
||||
|
||||
class CommandResult(object):
|
||||
"""An object to store various attributes of a child process."""
|
||||
|
||||
def __init__(self):
|
||||
self.cmd = None
|
||||
self.error = None
|
||||
self.output = None
|
||||
self.returncode = None
|
||||
|
||||
|
||||
class RunCommandError(Exception):
|
||||
"""Error caught in RunCommand() method."""
|
||||
pass
|
||||
|
||||
|
||||
def RunCommand(cmd, print_cmd=True, error_ok=False, error_message=None,
|
||||
exit_code=False, redirect_stdout=False, redirect_stderr=False,
|
||||
cwd=None, input=None, enter_chroot=False, shell=False):
|
||||
"""Runs a command.
|
||||
|
||||
Args:
|
||||
cmd: cmd to run. Should be input to subprocess.Popen.
|
||||
print_cmd: prints the command before running it.
|
||||
error_ok: does not raise an exception on error.
|
||||
error_message: prints out this message when an error occurrs.
|
||||
exit_code: returns the return code of the shell command.
|
||||
redirect_stdout: returns the stdout.
|
||||
redirect_stderr: holds stderr output until input is communicated.
|
||||
cwd: the working directory to run this cmd.
|
||||
input: input to pipe into this command through stdin.
|
||||
enter_chroot: this command should be run from within the chroot. If set,
|
||||
cwd must point to the scripts directory.
|
||||
shell: If shell is True, the specified command will be executed through
|
||||
the shell.
|
||||
|
||||
Returns:
|
||||
A CommandResult object.
|
||||
|
||||
Raises:
|
||||
Exception: Raises generic exception on error with optional error_message.
|
||||
"""
|
||||
# Set default for variables.
|
||||
stdout = None
|
||||
stderr = None
|
||||
stdin = None
|
||||
cmd_result = CommandResult()
|
||||
|
||||
# Modify defaults based on parameters.
|
||||
if redirect_stdout: stdout = subprocess.PIPE
|
||||
if redirect_stderr: stderr = subprocess.PIPE
|
||||
# TODO(sosa): gpylint complains about redefining built-in 'input'.
|
||||
# Can we rename this variable?
|
||||
if input: stdin = subprocess.PIPE
|
||||
if isinstance(cmd, basestring):
|
||||
if enter_chroot: cmd = './enter_chroot.sh -- ' + cmd
|
||||
cmd_str = cmd
|
||||
else:
|
||||
if enter_chroot: cmd = ['./enter_chroot.sh', '--'] + cmd
|
||||
cmd_str = ' '.join(cmd)
|
||||
|
||||
# Print out the command before running.
|
||||
if print_cmd:
|
||||
Info('RunCommand: %s' % cmd_str)
|
||||
cmd_result.cmd = cmd_str
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, cwd=cwd, stdin=stdin, stdout=stdout,
|
||||
stderr=stderr, shell=shell)
|
||||
(cmd_result.output, cmd_result.error) = proc.communicate(input)
|
||||
if exit_code:
|
||||
cmd_result.returncode = proc.returncode
|
||||
|
||||
if not error_ok and proc.returncode:
|
||||
msg = ('Command "%s" failed.\n' % cmd_str +
|
||||
(error_message or cmd_result.error or cmd_result.output or ''))
|
||||
raise RunCommandError(msg)
|
||||
# TODO(sosa): is it possible not to use the catch-all Exception here?
|
||||
except Exception, e:
|
||||
if not error_ok:
|
||||
raise
|
||||
else:
|
||||
Warning(str(e))
|
||||
|
||||
return cmd_result
|
||||
|
||||
|
||||
class Color(object):
|
||||
"""Conditionally wraps text in ANSI color escape sequences."""
|
||||
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
|
||||
BOLD = -1
|
||||
COLOR_START = '\033[1;%dm'
|
||||
BOLD_START = '\033[1m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
def __init__(self, enabled=True):
|
||||
self._enabled = enabled
|
||||
|
||||
def Color(self, color, text):
|
||||
"""Returns text with conditionally added color escape sequences.
|
||||
|
||||
Args:
|
||||
color: Text color -- one of the color constants defined in this class.
|
||||
text: The text to color.
|
||||
|
||||
Returns:
|
||||
If self._enabled is False, returns the original text. If it's True,
|
||||
returns text with color escape sequences based on the value of color.
|
||||
"""
|
||||
if not self._enabled:
|
||||
return text
|
||||
if color == self.BOLD:
|
||||
start = self.BOLD_START
|
||||
else:
|
||||
start = self.COLOR_START % (color + 30)
|
||||
return start + text + self.RESET
|
||||
|
||||
|
||||
def Die(message):
|
||||
"""Emits a red error message and halts execution.
|
||||
|
||||
Args:
|
||||
message: The message to be emitted before exiting.
|
||||
"""
|
||||
print >> sys.stderr, (
|
||||
Color(_STDOUT_IS_TTY).Color(Color.RED, '\nERROR: ' + message))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# pylint: disable-msg=W0622
|
||||
def Warning(message):
|
||||
"""Emits a yellow warning message and continues execution.
|
||||
|
||||
Args:
|
||||
message: The message to be emitted.
|
||||
"""
|
||||
print >> sys.stderr, (
|
||||
Color(_STDOUT_IS_TTY).Color(Color.YELLOW, '\nWARNING: ' + message))
|
||||
|
||||
|
||||
def Info(message):
|
||||
"""Emits a blue informational message and continues execution.
|
||||
|
||||
Args:
|
||||
message: The message to be emitted.
|
||||
"""
|
||||
print >> sys.stderr, (
|
||||
Color(_STDOUT_IS_TTY).Color(Color.BLUE, '\nINFO: ' + message))
|
||||
|
||||
|
||||
def ListFiles(base_dir):
|
||||
"""Recurively list files in a directory.
|
||||
|
||||
Args:
|
||||
base_dir: directory to start recursively listing in.
|
||||
|
||||
Returns:
|
||||
A list of files relative to the base_dir path or
|
||||
An empty list of there are no files in the directories.
|
||||
"""
|
||||
directories = [base_dir]
|
||||
files_list = []
|
||||
while directories:
|
||||
directory = directories.pop()
|
||||
for name in os.listdir(directory):
|
||||
fullpath = os.path.join(directory, name)
|
||||
if os.path.isfile(fullpath):
|
||||
files_list.append(fullpath)
|
||||
elif os.path.isdir(fullpath):
|
||||
directories.append(fullpath)
|
||||
|
||||
return files_list
|
||||
|
||||
|
||||
def IsInsideChroot():
|
||||
"""Returns True if we are inside chroot."""
|
||||
return os.path.exists('/etc/debian_chroot')
|
||||
|
||||
|
||||
def GetSrcRoot():
|
||||
"""Get absolute path to src/scripts/ directory.
|
||||
|
||||
Assuming test script will always be run from descendent of src/scripts.
|
||||
|
||||
Returns:
|
||||
A string, absolute path to src/scripts directory. None if not found.
|
||||
"""
|
||||
src_root = None
|
||||
match_str = '/src/scripts/'
|
||||
test_script_path = os.path.abspath('.')
|
||||
|
||||
path_list = re.split(match_str, test_script_path)
|
||||
if path_list:
|
||||
src_root = os.path.join(path_list[0], match_str.strip('/'))
|
||||
Info ('src_root = %r' % src_root)
|
||||
else:
|
||||
Info ('No %r found in %r' % (match_str, test_script_path))
|
||||
|
||||
return src_root
|
||||
|
||||
|
||||
def GetChromeosVersion(str_obj):
|
||||
"""Helper method to parse output for CHROMEOS_VERSION_STRING.
|
||||
|
||||
Args:
|
||||
str_obj: a string, which may contain Chrome OS version info.
|
||||
|
||||
Returns:
|
||||
A string, value of CHROMEOS_VERSION_STRING environment variable set by
|
||||
chromeos_version.sh. Or None if not found.
|
||||
"""
|
||||
if str_obj is not None:
|
||||
match = re.search('CHROMEOS_VERSION_STRING=([0-9_.]+)', str_obj)
|
||||
if match and match.group(1):
|
||||
Info ('CHROMEOS_VERSION_STRING = %s' % match.group(1))
|
||||
return match.group(1)
|
||||
|
||||
Info ('CHROMEOS_VERSION_STRING NOT found')
|
||||
return None
|
||||
|
||||
|
||||
def GetOutputImageDir(board, cros_version):
|
||||
"""Construct absolute path to output image directory.
|
||||
|
||||
Args:
|
||||
board: a string.
|
||||
cros_version: a string, Chrome OS version.
|
||||
|
||||
Returns:
|
||||
a string: absolute path to output directory.
|
||||
"""
|
||||
src_root = GetSrcRoot()
|
||||
rel_path = 'build/images/%s' % board
|
||||
# ASSUME: --build_attempt always sets to 1
|
||||
version_str = '-'.join([cros_version, 'a1'])
|
||||
output_dir = os.path.join(os.path.dirname(src_root), rel_path, version_str)
|
||||
Info ('output_dir = %s' % output_dir)
|
||||
return output_dir
|
||||
@ -1,230 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
import cros_build_lib
|
||||
import mox
|
||||
|
||||
|
||||
class TestRunCommand(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.mox.StubOutWithMock(subprocess, 'Popen', use_mock_anything=True)
|
||||
self.proc_mock = self.mox.CreateMockAnything()
|
||||
self.cmd = 'test cmd'
|
||||
self.error = 'test error'
|
||||
self.output = 'test output'
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def _AssertCrEqual(self, expected, actual):
|
||||
"""Helper method to compare two CommandResult objects.
|
||||
|
||||
This is needed since assertEqual does not know how to compare two
|
||||
CommandResult objects.
|
||||
|
||||
Args:
|
||||
expected: a CommandResult object, expected result.
|
||||
actual: a CommandResult object, actual result.
|
||||
"""
|
||||
self.assertEqual(expected.cmd, actual.cmd)
|
||||
self.assertEqual(expected.error, actual.error)
|
||||
self.assertEqual(expected.output, actual.output)
|
||||
self.assertEqual(expected.returncode, actual.returncode)
|
||||
|
||||
def _TestCmd(self, cmd, sp_kv=dict(), rc_kv=dict()):
|
||||
"""Factor out common setup logic for testing --cmd.
|
||||
|
||||
Args:
|
||||
cmd: a string or an array of strings.
|
||||
sp_kv: key-value pairs passed to subprocess.Popen().
|
||||
rc_kv: key-value pairs passed to RunCommand().
|
||||
"""
|
||||
expected_result = cros_build_lib.CommandResult()
|
||||
expected_result.cmd = self.cmd
|
||||
expected_result.error = self.error
|
||||
expected_result.output = self.output
|
||||
if 'exit_code' in rc_kv:
|
||||
expected_result.returncode = self.proc_mock.returncode
|
||||
|
||||
arg_dict = dict()
|
||||
for attr in 'cwd stdin stdout stderr shell'.split():
|
||||
if attr in sp_kv:
|
||||
arg_dict[attr] = sp_kv[attr]
|
||||
else:
|
||||
if attr == 'shell':
|
||||
arg_dict[attr] = False
|
||||
else:
|
||||
arg_dict[attr] = None
|
||||
|
||||
subprocess.Popen(self.cmd, **arg_dict).AndReturn(self.proc_mock)
|
||||
self.proc_mock.communicate(None).AndReturn((self.output, self.error))
|
||||
self.mox.ReplayAll()
|
||||
actual_result = cros_build_lib.RunCommand(cmd, **rc_kv)
|
||||
self._AssertCrEqual(expected_result, actual_result)
|
||||
|
||||
def testReturnCodeZeroWithArrayCmd(self):
|
||||
"""--enter_chroot=False and --cmd is an array of strings."""
|
||||
self.proc_mock.returncode = 0
|
||||
cmd_list = ['foo', 'bar', 'roger']
|
||||
self.cmd = 'foo bar roger'
|
||||
self._TestCmd(cmd_list, rc_kv=dict(exit_code=True))
|
||||
|
||||
def testReturnCodeZeroWithArrayCmdEnterChroot(self):
|
||||
"""--enter_chroot=True and --cmd is an array of strings."""
|
||||
self.proc_mock.returncode = 0
|
||||
cmd_list = ['foo', 'bar', 'roger']
|
||||
self.cmd = './enter_chroot.sh -- %s' % ' '.join(cmd_list)
|
||||
self._TestCmd(cmd_list, rc_kv=dict(enter_chroot=True))
|
||||
|
||||
def testReturnCodeNotZeroErrorOkNotRaisesError(self):
|
||||
"""Raise error when proc.communicate() returns non-zero."""
|
||||
self.proc_mock.returncode = 1
|
||||
self._TestCmd(self.cmd, rc_kv=dict(error_ok=True))
|
||||
|
||||
def testSubprocessCommunicateExceptionRaisesError(self):
|
||||
"""Verify error raised by communicate() is caught."""
|
||||
subprocess.Popen(self.cmd, cwd=None, stdin=None, stdout=None, stderr=None,
|
||||
shell=False).AndReturn(self.proc_mock)
|
||||
self.proc_mock.communicate(None).AndRaise(ValueError)
|
||||
self.mox.ReplayAll()
|
||||
self.assertRaises(ValueError, cros_build_lib.RunCommand, self.cmd)
|
||||
|
||||
def testSubprocessCommunicateExceptionNotRaisesError(self):
|
||||
"""Don't re-raise error from communicate() when --error_ok=True."""
|
||||
expected_result = cros_build_lib.CommandResult()
|
||||
cmd_str = './enter_chroot.sh -- %s' % self.cmd
|
||||
expected_result.cmd = cmd_str
|
||||
|
||||
subprocess.Popen(cmd_str, cwd=None, stdin=None, stdout=None, stderr=None,
|
||||
shell=False).AndReturn(self.proc_mock)
|
||||
self.proc_mock.communicate(None).AndRaise(ValueError)
|
||||
self.mox.ReplayAll()
|
||||
actual_result = cros_build_lib.RunCommand(self.cmd, error_ok=True,
|
||||
enter_chroot=True)
|
||||
self._AssertCrEqual(expected_result, actual_result)
|
||||
|
||||
|
||||
class TestListFiles(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.root_dir = tempfile.mkdtemp(prefix='listfiles_unittest')
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.root_dir)
|
||||
|
||||
def _CreateNestedDir(self, dir_structure):
|
||||
for entry in dir_structure:
|
||||
full_path = os.path.join(os.path.join(self.root_dir, entry))
|
||||
# ensure dirs are created
|
||||
try:
|
||||
os.makedirs(os.path.dirname(full_path))
|
||||
if full_path.endswith('/'):
|
||||
# we only want to create directories
|
||||
return
|
||||
except OSError, err:
|
||||
if err.errno == errno.EEXIST:
|
||||
# we don't care if the dir already exists
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
# create dummy files
|
||||
tmp = open(full_path, 'w')
|
||||
tmp.close()
|
||||
|
||||
def testTraverse(self):
|
||||
"""Test that we are traversing the directory properly."""
|
||||
dir_structure = ['one/two/test.txt', 'one/blah.py',
|
||||
'three/extra.conf']
|
||||
self._CreateNestedDir(dir_structure)
|
||||
|
||||
files = cros_build_lib.ListFiles(self.root_dir)
|
||||
for f in files:
|
||||
f = f.replace(self.root_dir, '').lstrip('/')
|
||||
if f not in dir_structure:
|
||||
self.fail('%s was not found in %s' % (f, dir_structure))
|
||||
|
||||
def testEmptyFilePath(self):
|
||||
"""Test that we return nothing when directories are empty."""
|
||||
dir_structure = ['one/', 'two/', 'one/a/']
|
||||
self._CreateNestedDir(dir_structure)
|
||||
files = cros_build_lib.ListFiles(self.root_dir)
|
||||
self.assertEqual(files, [])
|
||||
|
||||
def testNoSuchDir(self):
|
||||
try:
|
||||
cros_build_lib.ListFiles('/me/no/existe')
|
||||
except OSError, err:
|
||||
self.assertEqual(err.errno, errno.ENOENT)
|
||||
|
||||
|
||||
class HelperMethodMoxTests(unittest.TestCase):
|
||||
"""Tests for various helper methods using mox."""
|
||||
|
||||
def setUp(self):
|
||||
self.mox = mox.Mox()
|
||||
self.mox.StubOutWithMock(os.path, 'abspath')
|
||||
|
||||
def tearDown(self):
|
||||
self.mox.UnsetStubs()
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def testGetSrcRoot(self):
|
||||
test_path = '/tmp/foo/src/scripts/bar/more'
|
||||
expected = '/tmp/foo/src/scripts'
|
||||
os.path.abspath('.').AndReturn(test_path)
|
||||
self.mox.ReplayAll()
|
||||
actual = cros_build_lib.GetSrcRoot()
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def testGetOutputImageDir(self):
|
||||
expected = '/tmp/foo/src/build/images/x86-generic/0.0.1-a1'
|
||||
self.mox.StubOutWithMock(cros_build_lib, 'GetSrcRoot')
|
||||
cros_build_lib.GetSrcRoot().AndReturn('/tmp/foo/src/scripts')
|
||||
self.mox.ReplayAll()
|
||||
actual = cros_build_lib.GetOutputImageDir('x86-generic', '0.0.1')
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
|
||||
class HelperMethodSimpleTests(unittest.TestCase):
|
||||
"""Tests for various helper methods without using mox."""
|
||||
|
||||
def _TestChromeosVersion(self, test_str, expected=None):
|
||||
actual = cros_build_lib.GetChromeosVersion(test_str)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def testGetChromeosVersionWithValidVersionReturnsValue(self):
|
||||
expected = '0.8.71.2010_09_10_1530'
|
||||
test_str = ' CHROMEOS_VERSION_STRING=0.8.71.2010_09_10_1530 '
|
||||
self._TestChromeosVersion(test_str, expected)
|
||||
|
||||
def testGetChromeosVersionWithMultipleVersionReturnsFirstMatch(self):
|
||||
expected = '0.8.71.2010_09_10_1530'
|
||||
test_str = (' CHROMEOS_VERSION_STRING=0.8.71.2010_09_10_1530 '
|
||||
' CHROMEOS_VERSION_STRING=10_1530 ')
|
||||
self._TestChromeosVersion(test_str, expected)
|
||||
|
||||
def testGetChromeosVersionWithInvalidVersionReturnsDefault(self):
|
||||
test_str = ' CHROMEOS_VERSION_STRING=invalid_version_string '
|
||||
self._TestChromeosVersion(test_str)
|
||||
|
||||
def testGetChromeosVersionWithEmptyInputReturnsDefault(self):
|
||||
self._TestChromeosVersion('')
|
||||
|
||||
def testGetChromeosVersionWithNoneInputReturnsDefault(self):
|
||||
self._TestChromeosVersion(None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@ -1,166 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Helper functions for accessing the issue tracker in a pythonic way."""
|
||||
|
||||
import os.path
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
# import the GData libraries
|
||||
import gdata.client
|
||||
import gdata.projecthosting.client
|
||||
|
||||
DEFAULT_TRACKER_SOURCE = "chromite-tracker-access-1.0"
|
||||
VERBOSE = True # Set to True to get extra debug info...
|
||||
|
||||
class TrackerAccess(object):
|
||||
"""Class for accessing the tracker on code.google.com."""
|
||||
|
||||
def __init__(self, email="", password="",
|
||||
tracker_source=DEFAULT_TRACKER_SOURCE):
|
||||
"""TrackerAccess constructor.
|
||||
|
||||
Args:
|
||||
email: The email address to Login with; may be "" for anonymous access.
|
||||
password: The password that goes with the email address; may be "" if
|
||||
the email is "".
|
||||
tracker_source: A string describing this program. This can be anything
|
||||
you like but should should give some indication of which
|
||||
app is making the request.
|
||||
"""
|
||||
# Save parameters...
|
||||
self._email = email
|
||||
self._password = password
|
||||
self._tracker_source = tracker_source
|
||||
|
||||
# This will be initted on first login...
|
||||
self._tracker_client = None
|
||||
|
||||
def Login(self):
|
||||
"""Login, if needed. This may be safely called more than once.
|
||||
|
||||
Commands will call this function as their first line, so the client
|
||||
of this class need not call it themselves unless trying to debug login
|
||||
problems.
|
||||
|
||||
This function should be called even if we're accessing anonymously.
|
||||
"""
|
||||
# Bail immediately if we've already logged in...
|
||||
if self._tracker_client is not None:
|
||||
return
|
||||
|
||||
self._tracker_client = gdata.projecthosting.client.ProjectHostingClient()
|
||||
if self._email and self._password:
|
||||
self._tracker_client.client_login(self._email, self._password,
|
||||
source=self._tracker_source,
|
||||
service="code", account_type='GOOGLE')
|
||||
|
||||
def GetKeyedLabels(self, project_name, issue_id):
|
||||
"""Get labels of the form "Key-Value" attached to the given issue.
|
||||
|
||||
Any labels that don't have a dash in them are ignored.
|
||||
|
||||
Args:
|
||||
project_name: The tracker project to query.
|
||||
issue_id: The ID of the issue to query; should be an int but a string
|
||||
will probably work too.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping key/value pairs from the issue's labels, like:
|
||||
|
||||
{'Area': 'Build',
|
||||
'Iteration': '15',
|
||||
'Mstone': 'R9.x',
|
||||
'Pri': '1',
|
||||
'Type': 'Bug'}
|
||||
"""
|
||||
# Login if needed...
|
||||
self.Login()
|
||||
|
||||
# Construct the query...
|
||||
query = gdata.projecthosting.client.Query(issue_id=issue_id)
|
||||
try:
|
||||
feed = self._tracker_client.get_issues(project_name, query=query)
|
||||
except gdata.client.RequestError, e:
|
||||
if VERBOSE:
|
||||
print >>sys.stderr, "ERROR: Unable to access bug %s:%s: %s" % (
|
||||
project_name, issue_id, str(e))
|
||||
return {}
|
||||
|
||||
# There should be exactly one result...
|
||||
assert len(feed.entry) == 1, "Expected exactly 1 result"
|
||||
(entry,) = feed.entry
|
||||
|
||||
# We only care about labels that look like: Key-Value
|
||||
# We'll return a dictionary of those.
|
||||
keyed_labels = {}
|
||||
for label in entry.label:
|
||||
if "-" in label.text:
|
||||
label_key, label_val = label.text.split("-", 1)
|
||||
keyed_labels[label_key] = label_val
|
||||
|
||||
return keyed_labels
|
||||
|
||||
|
||||
def _TestGetKeyedLabels(project_name, email, passwordFile, *bug_ids):
|
||||
"""Test code for GetKeyedLabels().
|
||||
|
||||
Args:
|
||||
project_name: The name of the project we're looking at.
|
||||
email: The email address to use to login. May be ""
|
||||
passwordFile: A file containing the password for the email address.
|
||||
May be "" if email is "" for anon access.
|
||||
bug_ids: A list of bug IDs to query.
|
||||
"""
|
||||
# If password was specified as a file, read it.
|
||||
if passwordFile:
|
||||
password = open(passwordFile, "r").read().strip()
|
||||
else:
|
||||
password = ""
|
||||
|
||||
ta = TrackerAccess(email, password)
|
||||
|
||||
if not bug_ids:
|
||||
print "No bugs were specified"
|
||||
else:
|
||||
for bug_id in bug_ids:
|
||||
print bug_id, ta.GetKeyedLabels(project_name, int(bug_id))
|
||||
|
||||
|
||||
def _DoHelp(commands, *args):
|
||||
"""Print help for the script."""
|
||||
|
||||
if len(args) >= 2 and args[0] == "help" and args[1] in commands:
|
||||
# If called with arguments 'help' and 'command', show that commands's doc.
|
||||
command_name = args[1]
|
||||
print commands[command_name].__doc__
|
||||
else:
|
||||
# Something else: show generic help...
|
||||
print (
|
||||
"Usage %s <command> <command args>\n"
|
||||
"\n"
|
||||
"Known commands: \n"
|
||||
" %s\n"
|
||||
) % (sys.argv[0], pprint.pformat(["help"] + sorted(commands)))
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function of the script."""
|
||||
|
||||
commands = {
|
||||
"TestGetKeyedLabels": _TestGetKeyedLabels,
|
||||
}
|
||||
|
||||
if len(sys.argv) <= 1 or sys.argv[1] not in commands:
|
||||
# Argument 1 isn't in list of commands; show help and pass all arguments...
|
||||
_DoHelp(commands, *sys.argv[1:])
|
||||
else:
|
||||
command_name = sys.argv[1]
|
||||
commands[command_name](*sys.argv[2:])
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,63 +0,0 @@
|
||||
# chromite x86-generic target spec file
|
||||
# Use RFC 822 format
|
||||
|
||||
[BUILD]
|
||||
# If the profile is pulled in from an overlay you will need to specify it.
|
||||
# Well known locations of the "src/overlays, /usr/local/portage" etc will
|
||||
# be searched for the overlay
|
||||
overlay: overlay-x86-generic
|
||||
|
||||
# The profile to use for building this target
|
||||
# ALL is a reserved target that builds all specfiles recursively beneath
|
||||
# e.g:
|
||||
# profile: x86-generic/base
|
||||
profile: x86-generic/dev
|
||||
|
||||
# portagechannel speficies which version of the upstream portage is being built
|
||||
# "stable" is the current stable version
|
||||
# "unstable" is the next version of portage we are stablizing to
|
||||
# "bleedingedge" is upto the minute upstream portage
|
||||
# e.g:
|
||||
# portagechannel: stable
|
||||
portagechannel: stable
|
||||
|
||||
# prebuilt mirror hosts prebuilts for stage4/chroot and per profile prebuilts
|
||||
# e.g:
|
||||
# prebuiltmirror:http://build.chromium.org/mirror/chromiumos/stage4mirror
|
||||
prebuiltmirror:http://build.chromium.org/mirror/chromiumos/stage4mirror
|
||||
|
||||
# stage4 is the Portage Stage3 + any additions deps (hard-host-deps etc)
|
||||
# "latest" tries to fetch the "latest" from the prebuilt mirror (default)
|
||||
# -- This is pulled in from the current portagechannel i.e stable
|
||||
# "nofetch" will prevent fetching stage4 and attempt to compile a stage4
|
||||
# If nofetch is specified a stage3 and portage are required
|
||||
# "version" This will attempt to download a particular prebuilt version
|
||||
# - Version is specfied as s<stage3ver>-p<portagever>
|
||||
# e.g:
|
||||
# stage4: stage4-s20100309-p20100310
|
||||
stage4: latest
|
||||
|
||||
# stage3 is the pristine stage3 to use to build your stage4/chroot.
|
||||
# This is ignored if stage4 is latest
|
||||
# "latest" fetches the latest version of upstream stage3
|
||||
# "version" pulls in the specified version of stage3
|
||||
# e.g:
|
||||
# stage3: 20100309
|
||||
stage3: latest
|
||||
|
||||
# portage is the upstream portage version to use to build your stage4/chroot.
|
||||
# "latest" fetches the latest version of upstream portage
|
||||
# "version" pulls in the specified version of portage
|
||||
# e.g:
|
||||
# portage: 20100310
|
||||
|
||||
portage: latest
|
||||
|
||||
|
||||
[IMAGE]
|
||||
# TODO(vince): update the following imaging sections as appropriate.
|
||||
# TODO(anush): figure out how this can work for virtual ALL targets since
|
||||
# since each profile will require partition/filesystem/hook information.
|
||||
p0: ',c,*,,83', 'ext3', '/', 'p0hook'
|
||||
p1: ',c,*,,82', 'ext2', '/boot', 'p1hook'
|
||||
p2: ',c,*,,83', 'ext3', '/var', 'p2hook'
|
||||
@ -1,130 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Tests for build_image shell script.
|
||||
|
||||
Note:
|
||||
This script must be run from INSIDE chroot.
|
||||
|
||||
Sample usage:
|
||||
# (inside chroot) pushd ~/trunk/src/scripts/
|
||||
# run all test cases in this script
|
||||
python chromite/tests/build_image_test.py
|
||||
|
||||
# run all test cases in a test suite
|
||||
python chromite/tests/build_image_test.py BuildImageTest
|
||||
|
||||
# run a specific test
|
||||
python chromite/tests/build_image_test.py BuildImageTest.testWithoutBoardExit
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
|
||||
from cros_build_lib import (RunCommand, IsInsideChroot, GetChromeosVersion,
|
||||
GetOutputImageDir)
|
||||
|
||||
|
||||
class BuildImageTest(unittest.TestCase):
|
||||
"""Test suite for build_image script."""
|
||||
|
||||
def setUp(self):
|
||||
if not IsInsideChroot():
|
||||
raise RuntimeError('This script must be run from inside chroot.')
|
||||
|
||||
def _CheckStringPresent(self, query_list, check_stdout=False):
|
||||
"""Check for presence of specific queries.
|
||||
|
||||
Args:
|
||||
query_list: a list of strings to look for.
|
||||
check_stdout: a boolean. True == use stdout from child process.
|
||||
Otherwise use its stderr.
|
||||
"""
|
||||
for query in query_list:
|
||||
# Source error string defined in src/scripts/build_image
|
||||
if check_stdout:
|
||||
self.assertNotEqual(-1, self.output.find(query))
|
||||
else:
|
||||
self.assertNotEqual(-1, self.error.find(query))
|
||||
|
||||
def _RunBuildImageCmd(self, cmd, assert_success=True):
|
||||
"""Run build_image with flags.
|
||||
|
||||
Args:
|
||||
cmd: a string.
|
||||
assert_success: a boolean. True == check child process return code is 0.
|
||||
False otherwise.
|
||||
"""
|
||||
Info ('About to run command: %s' % cmd)
|
||||
cmd_result = RunCommand(
|
||||
cmd, error_ok=True, exit_code=True, redirect_stdout=True,
|
||||
redirect_stderr=True, shell=True)
|
||||
self.output = cmd_result.output
|
||||
self.error = cmd_result.error
|
||||
Info ('output =\n%r' % self.output)
|
||||
Info ('error =\n%r' % self.error)
|
||||
|
||||
message = 'cmd should have failed! error:\n%s' % self.error
|
||||
if assert_success:
|
||||
self.assertEqual(0, cmd_result.returncode)
|
||||
else:
|
||||
self.assertNotEqual(0, cmd_result.returncode, message)
|
||||
|
||||
def _VerifyOutputImagesExist(self, image_dir, image_list):
|
||||
"""Verify output images exist in image_dir.
|
||||
|
||||
Args:
|
||||
image_dir: a string, absolute path to output directory with images.
|
||||
image_list: a list of strings, names of output images.
|
||||
"""
|
||||
for i in image_list:
|
||||
image_path = os.path.join(image_dir, i)
|
||||
self.assertTrue(os.path.exists(image_path))
|
||||
|
||||
def testWithoutBoardExit(self):
|
||||
"""Fail when no --board is specified."""
|
||||
self._RunBuildImageCmd('./build_image --board=""', assert_success=False)
|
||||
self._CheckStringPresent(['ERROR', '--board is required'])
|
||||
|
||||
def testIncompatibleInstallFlags(self):
|
||||
"""Fail when both --factory_install and --dev_install are set."""
|
||||
cmd = './build_image --board=x86-generic --factory_install --dev_install'
|
||||
self._RunBuildImageCmd(cmd, assert_success=False)
|
||||
self._CheckStringPresent(['ERROR', 'Incompatible flags'])
|
||||
|
||||
def testIncompatibleRootfsFlags(self):
|
||||
"""Fail when rootfs partition is not large enough."""
|
||||
cmd = ('./build_image --board=x86-generic --rootfs_size=100'
|
||||
' --rootfs_hash_pad=10 --rootfs_partition_size=20')
|
||||
self._RunBuildImageCmd(cmd, assert_success=False)
|
||||
self._CheckStringPresent(['ERROR', 'bigger than partition'])
|
||||
|
||||
def _BuildImageForBoard(self, board, image_list):
|
||||
"""Build image for specific board type.
|
||||
|
||||
Args:
|
||||
board: a string.
|
||||
image_list: a list of strings, names of output images.
|
||||
"""
|
||||
cmd = './build_image --board=%s' % board
|
||||
Info ('If all goes well, it takes ~5 min. to build an image...')
|
||||
self._RunBuildImageCmd(cmd)
|
||||
self._CheckStringPresent(['Image created in', 'copy to USB keyfob'],
|
||||
check_stdout=True)
|
||||
chromeos_version_str = GetChromeosVersion(self.output)
|
||||
image_dir = GetOutputImageDir(board, chromeos_version_str)
|
||||
self._VerifyOutputImagesExist(image_dir, image_list)
|
||||
|
||||
def testBuildX86Generic(self):
|
||||
"""Verify we can build an x86-generic image."""
|
||||
self._BuildImageForBoard(
|
||||
'x86-generic', ['chromiumos_image.bin', 'chromiumos_base_image.bin'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Loading…
x
Reference in New Issue
Block a user