Remove buildbot code that has been added to chromite.

Change-Id: I7eb27ac81704de8689a65203a440303a52a01dc2

BUG=chromium-os:11171
TEST=we'll see if things explode

Review URL: http://codereview.chromium.org/6286040
This commit is contained in:
Chris Sosa 2011-02-02 15:39:58 -08:00
parent 2b2f19fc2e
commit 79088f496e
18 changed files with 0 additions and 6373 deletions

View File

@ -1 +0,0 @@
cbuildbot.py

View File

@ -1,817 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""CBuildbot is wrapper around the build process used by the pre-flight queue"""
import errno
import heapq
import re
import optparse
import os
import shutil
import sys
import cbuildbot_comm
from cbuildbot_config import config
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from cros_build_lib import (Die, Info, ReinterpretPathForChroot, RunCommand,
Warning)
_DEFAULT_RETRIES = 3
_PACKAGE_FILE = '%(buildroot)s/src/scripts/cbuildbot_package.list'
ARCHIVE_BASE = '/var/www/archive'
ARCHIVE_COUNT = 10
PUBLIC_OVERLAY = '%(buildroot)s/src/third_party/chromiumos-overlay'
PRIVATE_OVERLAY = '%(buildroot)s/src/private-overlays/chromeos-overlay'
CHROME_KEYWORDS_FILE = ('/build/%(board)s/etc/portage/package.keywords/chrome')
# Currently, both the full buildbot and the preflight buildbot store their
# data in a variable named PORTAGE_BINHOST, but they're in different files.
# We're planning on joining the two files soon and renaming the full binhost
# to FULL_BINHOST.
_FULL_BINHOST = 'PORTAGE_BINHOST'
_PREFLIGHT_BINHOST = 'PORTAGE_BINHOST'
# ======================== Utility functions ================================
def _PrintFile(path):
"""Prints out the contents of a file to stderr."""
file_handle = open(path)
print >> sys.stderr, file_handle.read()
file_handle.close()
sys.stderr.flush()
def MakeDir(path, parents=False):
"""Basic wrapper around os.mkdirs.
Keyword arguments:
path -- Path to create.
parents -- Follow mkdir -p logic.
"""
try:
os.makedirs(path)
except OSError, e:
if e.errno == errno.EEXIST and parents:
pass
else:
raise
def RepoSync(buildroot, retries=_DEFAULT_RETRIES):
"""Uses repo to checkout the source code.
Keyword arguments:
retries -- Number of retries to try before failing on the sync.
"""
while retries > 0:
try:
# The --trace option ensures that repo shows the output from git. This
# is needed so that the buildbot can kill us if git is not making
# progress.
RunCommand(['repo', '--trace', 'sync'], cwd=buildroot)
RunCommand(['repo', 'forall', '-c', 'git', 'config',
'url.ssh://git@gitrw.chromium.org:9222.insteadof',
'http://git.chromium.org/git'], cwd=buildroot)
retries = 0
except:
retries -= 1
if retries > 0:
Warning('CBUILDBOT -- Repo Sync Failed, retrying')
else:
Warning('CBUILDBOT -- Retries exhausted')
raise
RunCommand(['repo', 'manifest', '-r', '-o', '/dev/stderr'], cwd=buildroot)
# =========================== Command Helpers =================================
def _GetAllGitRepos(buildroot, debug=False):
"""Returns a list of tuples containing [git_repo, src_path]."""
manifest_tuples = []
# Gets all the git repos from a full repo manifest.
repo_cmd = "repo manifest -o -".split()
output = RunCommand(repo_cmd, cwd=buildroot, redirect_stdout=True,
redirect_stderr=True, print_cmd=debug)
# Extract all lines containg a project.
extract_cmd = ['grep', 'project name=']
output = RunCommand(extract_cmd, cwd=buildroot, input=output,
redirect_stdout=True, print_cmd=debug)
# Parse line using re to get tuple.
result_array = re.findall('.+name=\"([\w-]+)\".+path=\"(\S+)".+', output)
# Create the array.
for result in result_array:
if len(result) != 2:
Warning('Found incorrect xml object %s' % result)
else:
# Remove pre-pended src directory from manifest.
manifest_tuples.append([result[0], result[1].replace('src/', '')])
return manifest_tuples
def _GetCrosWorkOnSrcPath(buildroot, board, package, debug=False):
"""Returns ${CROS_WORKON_SRC_PATH} for given package."""
cwd = os.path.join(buildroot, 'src', 'scripts')
equery_cmd = ('equery-%s which %s' % (board, package)).split()
ebuild_path = RunCommand(equery_cmd, cwd=cwd, redirect_stdout=True,
redirect_stderr=True, enter_chroot=True,
error_ok=True, print_cmd=debug)
if ebuild_path:
ebuild_cmd = ('ebuild-%s %s info' % (board, ebuild_path)).split()
cros_workon_output = RunCommand(ebuild_cmd, cwd=cwd,
redirect_stdout=True, redirect_stderr=True,
enter_chroot=True, print_cmd=debug)
temp = re.findall('CROS_WORKON_SRCDIR="(\S+)"', cros_workon_output)
if temp:
return temp[0]
return None
def _CreateRepoDictionary(buildroot, board, debug=False):
"""Returns the repo->list_of_ebuilds dictionary."""
repo_dictionary = {}
manifest_tuples = _GetAllGitRepos(buildroot)
Info('Creating dictionary of git repos to portage packages ...')
cwd = os.path.join(buildroot, 'src', 'scripts')
get_all_workon_pkgs_cmd = './cros_workon list --all'.split()
packages = RunCommand(get_all_workon_pkgs_cmd, cwd=cwd,
redirect_stdout=True, redirect_stderr=True,
enter_chroot=True, print_cmd=debug)
for package in packages.split():
cros_workon_src_path = _GetCrosWorkOnSrcPath(buildroot, board, package)
if cros_workon_src_path:
for tuple in manifest_tuples:
# This path tends to have the user's home_dir prepended to it.
if cros_workon_src_path.endswith(tuple[1]):
Info('For %s found matching package %s' % (tuple[0], package))
if repo_dictionary.has_key(tuple[0]):
repo_dictionary[tuple[0]] += [package]
else:
repo_dictionary[tuple[0]] = [package]
return repo_dictionary
def _ParseRevisionString(revision_string, repo_dictionary):
"""Parses the given revision_string into a revision dictionary.
Returns a list of tuples that contain [portage_package_name, commit_id] to
update.
Keyword arguments:
revision_string -- revision_string with format
'repo1.git@commit_1 repo2.git@commit2 ...'.
repo_dictionary -- dictionary with git repository names as keys (w/out git)
to portage package names.
"""
# Using a dictionary removes duplicates.
revisions = {}
for revision in revision_string.split():
# Format 'package@commit-id'.
revision_tuple = revision.split('@')
if len(revision_tuple) != 2:
Warning('Incorrectly formatted revision %s' % revision)
repo_name = revision_tuple[0].replace('.git', '')
# Might not have entry if no matching ebuild.
if repo_dictionary.has_key(repo_name):
# May be many corresponding packages to a given git repo e.g. kernel).
for package in repo_dictionary[repo_name]:
revisions[package] = revision_tuple[1]
return revisions.items()
def _UprevFromRevisionList(buildroot, tracking_branch, revision_list, board,
overlays):
"""Uprevs based on revision list."""
if not revision_list:
Info('No packages found to uprev')
return
packages = []
for package, revision in revision_list:
assert ':' not in package, 'Invalid package name: %s' % package
packages.append(package)
chroot_overlays = [ReinterpretPathForChroot(path) for path in overlays]
cwd = os.path.join(buildroot, 'src', 'scripts')
RunCommand(['./cros_mark_as_stable',
'--board=%s' % board,
'--tracking_branch=%s' % tracking_branch,
'--overlays=%s' % ':'.join(chroot_overlays),
'--packages=%s' % ':'.join(packages),
'--drop_file=%s' % ReinterpretPathForChroot(_PACKAGE_FILE %
{'buildroot': buildroot}),
'commit'],
cwd=cwd, enter_chroot=True)
def _MarkChromeAsStable(buildroot, tracking_branch, chrome_rev, board):
"""Returns the portage atom for the revved chrome ebuild - see man emerge."""
cwd = os.path.join(buildroot, 'src', 'scripts')
portage_atom_string = RunCommand(['bin/cros_mark_chrome_as_stable',
'--tracking_branch=%s' % tracking_branch,
chrome_rev], cwd=cwd, redirect_stdout=True,
enter_chroot=True).rstrip()
if not portage_atom_string:
Info('Found nothing to rev.')
return None
else:
chrome_atom = portage_atom_string.split('=')[1]
keywords_file = CHROME_KEYWORDS_FILE % {'board': board}
# TODO(sosa): Workaround to build unstable chrome ebuild we uprevved.
RunCommand(['sudo', 'mkdir', '-p', os.path.dirname(keywords_file)],
enter_chroot=True, cwd=cwd)
RunCommand(['sudo', 'tee', keywords_file], input='=%s\n' % chrome_atom,
enter_chroot=True, cwd=cwd)
return chrome_atom
def _UprevAllPackages(buildroot, tracking_branch, board, overlays):
"""Uprevs all packages that have been updated since last uprev."""
cwd = os.path.join(buildroot, 'src', 'scripts')
chroot_overlays = [ReinterpretPathForChroot(path) for path in overlays]
RunCommand(['./cros_mark_as_stable', '--all',
'--board=%s' % board,
'--overlays=%s' % ':'.join(chroot_overlays),
'--tracking_branch=%s' % tracking_branch,
'--drop_file=%s' % ReinterpretPathForChroot(_PACKAGE_FILE %
{'buildroot': buildroot}),
'commit'],
cwd=cwd, enter_chroot=True)
def _GetVMConstants(buildroot):
"""Returns minimum (vdisk_size, statefulfs_size) recommended for VM's."""
cwd = os.path.join(buildroot, 'src', 'scripts', 'lib')
source_cmd = 'source %s/cros_vm_constants.sh' % cwd
vdisk_size = RunCommand([
'/bin/bash', '-c', '%s && echo $MIN_VDISK_SIZE_FULL' % source_cmd],
redirect_stdout=True)
statefulfs_size = RunCommand([
'/bin/bash', '-c', '%s && echo $MIN_STATEFUL_FS_SIZE_FULL' % source_cmd],
redirect_stdout=True)
return (vdisk_size.strip(), statefulfs_size.strip())
def _GitCleanup(buildroot, board, tracking_branch, overlays):
"""Clean up git branch after previous uprev attempt."""
cwd = os.path.join(buildroot, 'src', 'scripts')
if os.path.exists(cwd):
RunCommand(['./cros_mark_as_stable', '--srcroot=..',
'--board=%s' % board,
'--overlays=%s' % ':'.join(overlays),
'--tracking_branch=%s' % tracking_branch, 'clean'],
cwd=cwd, error_ok=True)
def _CleanUpMountPoints(buildroot):
"""Cleans up any stale mount points from previous runs."""
mount_output = RunCommand(['mount'], redirect_stdout=True)
mount_pts_in_buildroot = RunCommand(['grep', buildroot], input=mount_output,
redirect_stdout=True, error_ok=True)
for mount_pt_str in mount_pts_in_buildroot.splitlines():
mount_pt = mount_pt_str.rpartition(' type ')[0].partition(' on ')[2]
RunCommand(['sudo', 'umount', '-l', mount_pt], error_ok=True)
def _WipeOldOutput(buildroot):
"""Wipes out build output directories."""
RunCommand(['rm', '-rf', 'src/build/images'], cwd=buildroot)
# =========================== Main Commands ===================================
def _PreFlightRinse(buildroot, board, tracking_branch, overlays):
"""Cleans up any leftover state from previous runs."""
_GitCleanup(buildroot, board, tracking_branch, overlays)
_CleanUpMountPoints(buildroot)
RunCommand(['sudo', 'killall', 'kvm'], error_ok=True)
def _FullCheckout(buildroot, tracking_branch,
retries=_DEFAULT_RETRIES,
url='http://git.chromium.org/git/manifest'):
"""Performs a full checkout and clobbers any previous checkouts."""
RunCommand(['sudo', 'rm', '-rf', buildroot])
MakeDir(buildroot, parents=True)
branch = tracking_branch.split('/');
RunCommand(['repo', 'init', '-u',
url, '-b',
'%s' % branch[-1]], cwd=buildroot, input='\n\ny\n')
RepoSync(buildroot, retries)
def _IncrementalCheckout(buildroot, retries=_DEFAULT_RETRIES):
"""Performs a checkout without clobbering previous checkout."""
RepoSync(buildroot, retries)
def _MakeChroot(buildroot, replace=False):
"""Wrapper around make_chroot."""
cwd = os.path.join(buildroot, 'src', 'scripts')
cmd = ['./make_chroot', '--fast']
if replace:
cmd.append('--replace')
RunCommand(cmd, cwd=cwd)
def _GetPortageEnvVar(buildroot, board, envvar):
"""Get a portage environment variable for the specified board, if any.
buildroot: The root directory where the build occurs. Must be an absolute
path.
board: Board type that was built on this machine. E.g. x86-generic. If this
is None, get the env var from the host.
envvar: The environment variable to get. E.g. 'PORTAGE_BINHOST'.
Returns:
The value of the environment variable, as a string. If no such variable
can be found, return the empty string.
"""
cwd = os.path.join(buildroot, 'src', 'scripts')
portageq = 'portageq'
if board:
portageq += '-%s' % board
binhost = RunCommand([portageq, 'envvar', envvar], cwd=cwd,
redirect_stdout=True, enter_chroot=True, error_ok=True)
return binhost.rstrip('\n')
def _SetupBoard(buildroot, board='x86-generic'):
"""Wrapper around setup_board."""
cwd = os.path.join(buildroot, 'src', 'scripts')
RunCommand(['./setup_board', '--fast', '--default', '--board=%s' % board],
cwd=cwd, enter_chroot=True)
def _Build(buildroot, emptytree, build_autotest=True, usepkg=True):
"""Wrapper around build_packages."""
cwd = os.path.join(buildroot, 'src', 'scripts')
if emptytree:
cmd = ['sh', '-c', 'EXTRA_BOARD_FLAGS=--emptytree ./build_packages']
else:
cmd = ['./build_packages']
if not build_autotest:
cmd.append('--nowithautotest')
if not usepkg:
cmd.append('--nousepkg')
RunCommand(cmd, cwd=cwd, enter_chroot=True)
def _EnableLocalAccount(buildroot):
cwd = os.path.join(buildroot, 'src', 'scripts')
# Set local account for test images.
RunCommand(['./enable_localaccount.sh',
'chronos'],
print_cmd=False, cwd=cwd)
def _BuildImage(buildroot):
_WipeOldOutput(buildroot)
cwd = os.path.join(buildroot, 'src', 'scripts')
RunCommand(['./build_image', '--replace'], cwd=cwd, enter_chroot=True)
def _BuildVMImageForTesting(buildroot):
(vdisk_size, statefulfs_size) = _GetVMConstants(buildroot)
cwd = os.path.join(buildroot, 'src', 'scripts')
RunCommand(['./image_to_vm.sh',
'--test_image',
'--full',
'--vdisk_size=%s' % vdisk_size,
'--statefulfs_size=%s' % statefulfs_size,
], cwd=cwd, enter_chroot=True)
def _RunUnitTests(buildroot):
cwd = os.path.join(buildroot, 'src', 'scripts')
RunCommand(['./cros_run_unit_tests',
'--package_file=%s' % ReinterpretPathForChroot(_PACKAGE_FILE %
{'buildroot': buildroot}),
], cwd=cwd, enter_chroot=True)
def _RunSmokeSuite(buildroot, results_dir):
results_dir_in_chroot = os.path.join(buildroot, 'chroot',
results_dir.lstrip('/'))
if os.path.exists(results_dir_in_chroot):
shutil.rmtree(results_dir_in_chroot)
cwd = os.path.join(buildroot, 'src', 'scripts')
RunCommand(['bin/cros_run_vm_test',
'--no_graphics',
'--results_dir_root=%s' % results_dir,
'suite_Smoke',
], cwd=cwd, error_ok=False)
def _RunAUTest(buildroot, board):
"""Runs a basic update test from the au test harness."""
cwd = os.path.join(buildroot, 'src', 'scripts')
image_path = os.path.join(buildroot, 'src', 'build', 'images', board,
'latest', 'chromiumos_test_image.bin')
RunCommand(['bin/cros_au_test_harness',
'--no_graphics',
'--no_delta',
'--board=%s' % board,
'--test_prefix=SimpleTest',
'--verbose',
'--base_image=%s' % image_path,
'--target_image=%s' % image_path,
], cwd=cwd, error_ok=False)
def _UprevPackages(buildroot, tracking_branch, revisionfile, board, overlays):
"""Uprevs a package based on given revisionfile.
If revisionfile is set to None or does not resolve to an actual file, this
function will uprev all packages.
Keyword arguments:
revisionfile -- string specifying a file that contains a list of revisions to
uprev.
"""
# Purposefully set to None as it means Force Build was pressed.
revisions = 'None'
if (revisionfile):
try:
rev_file = open(revisionfile)
revisions = rev_file.read()
rev_file.close()
except Exception, e:
Warning('Error reading %s, revving all' % revisionfile)
revisions = 'None'
revisions = revisions.strip()
# TODO(sosa): Un-comment once we close individual trees.
# revisions == "None" indicates a Force Build.
#if revisions != 'None':
# print >> sys.stderr, 'CBUILDBOT Revision list found %s' % revisions
# revision_list = _ParseRevisionString(revisions,
# _CreateRepoDictionary(buildroot, board))
# _UprevFromRevisionList(buildroot, tracking_branch, revision_list, board,
# overlays)
#else:
Info('CBUILDBOT Revving all')
_UprevAllPackages(buildroot, tracking_branch, board, overlays)
def _UprevPush(buildroot, tracking_branch, board, overlays, dryrun):
"""Pushes uprev changes to the main line."""
cwd = os.path.join(buildroot, 'src', 'scripts')
cmd = ['./cros_mark_as_stable',
'--srcroot=%s' % os.path.join(buildroot, 'src'),
'--board=%s' % board,
'--overlays=%s' % ':'.join(overlays),
'--tracking_branch=%s' % tracking_branch
]
if dryrun:
cmd.append('--dryrun')
cmd.append('push')
RunCommand(cmd, cwd=cwd)
def _LegacyArchiveBuild(buildroot, bot_id, buildconfig, buildnumber,
debug=False):
"""Adds a step to the factory to archive a build."""
# Fixed properties
keep_max = 3
gsutil_archive = 'gs://chromeos-archive/' + bot_id
cwd = os.path.join(buildroot, 'src', 'scripts')
cmd = ['./archive_build.sh',
'--build_number', str(buildnumber),
'--to', '/var/www/archive/' + bot_id,
'--keep_max', str(keep_max),
'--prebuilt_upload',
'--board', buildconfig['board'],
'--acl', '/home/chrome-bot/slave_archive_acl',
'--gsutil_archive', gsutil_archive,
'--gsd_gen_index',
'/b/scripts/gsd_generate_index/gsd_generate_index.py',
'--gsutil', '/b/scripts/slave/gsutil',
'--test_mod'
]
if buildconfig.get('test_mod', True):
cmd.append('--test_mod')
if buildconfig.get('factory_install_mod', True):
cmd.append('--factory_install_mod')
if buildconfig.get('factory_test_mod', True):
cmd.append('--factory_test_mod')
if debug:
Warning('***** ***** LegacyArchiveBuild CMD: ' + ' '.join(cmd))
else:
RunCommand(cmd, cwd=cwd)
def _ArchiveTestResults(buildroot, board, test_results_dir,
gsutil, archive_dir, acl):
"""Archives the test results into Google Storage
Takes the results from the test_results_dir and the last qemu image and
uploads them to Google Storage.
Arguments:
buildroot: Root directory where build occurs
board: Board to find the qemu image.
test_results_dir: Path from buildroot/chroot to find test results.
This must a subdir of /tmp.
gsutil: Location of gsutil
archive_dir: Google Storage path to store the archive
acl: ACL to set on archive in Google Storage
"""
num_gsutil_retries = 5
test_results_dir = test_results_dir.lstrip('/')
results_path = os.path.join(buildroot, 'chroot', test_results_dir)
RunCommand(['sudo', 'chmod', '-R', '+r', results_path])
try:
# gsutil has the ability to resume an upload when the command is retried
RunCommand([gsutil, 'cp', '-R', results_path, archive_dir],
num_retries=num_gsutil_retries)
RunCommand([gsutil, 'setacl', acl, archive_dir])
image_name = 'chromiumos_qemu_image.bin'
image_path = os.path.join(buildroot, 'src', 'build', 'images', board,
'latest', image_name)
RunCommand(['gzip', '-f', '--fast', image_path])
RunCommand([gsutil, 'cp', image_path + '.gz', archive_dir],
num_retries=num_gsutil_retries)
except Exception, e:
Warning('Could not archive test results (error=%s)' % str(e))
def _GetConfig(config_name):
"""Gets the configuration for the build"""
buildconfig = {}
if not config.has_key(config_name):
Warning('Non-existent configuration specified.')
Warning('Please specify one of:')
config_names = config.keys()
config_names.sort()
for name in config_names:
Warning(' %s' % name)
sys.exit(1)
return config[config_name]
def _ResolveOverlays(buildroot, overlays):
"""Return the list of overlays to use for a given buildbot.
Args:
buildroot: The root directory where the build occurs. Must be an absolute
path.
overlays: A string describing which overlays you want.
'private': Just the private overlay.
'public': Just the public overlay.
'both': Both the public and private overlays.
"""
public_overlay = PUBLIC_OVERLAY % {'buildroot': buildroot}
private_overlay = PRIVATE_OVERLAY % {'buildroot': buildroot}
if overlays == 'private':
paths = [private_overlay]
elif overlays == 'public':
paths = [public_overlay]
elif overlays == 'both':
paths = [public_overlay, private_overlay]
else:
Info('No overlays found.')
paths = []
return paths
def _UploadPrebuilts(buildroot, board, overlay_config, binhosts):
"""Upload prebuilts.
Args:
buildroot: The root directory where the build occurs.
board: Board type that was built on this machine
overlay_config: A string describing which overlays you want.
'private': Just the private overlay.
'public': Just the public overlay.
'both': Both the public and private overlays.
binhosts: The URLs of the current binhosts. Binaries that are already
present will not be uploaded twice. Empty URLs will be ignored.
"""
cwd = os.path.join(buildroot, 'src', 'scripts')
cmd = [os.path.join(cwd, 'prebuilt.py'),
'--sync-binhost-conf',
'--build-path', buildroot,
'--board', board,
'--prepend-version', 'preflight',
'--key', _PREFLIGHT_BINHOST]
for binhost in binhosts:
if binhost:
cmd.extend(['--previous-binhost-url', binhost])
if overlay_config == 'public':
cmd.extend(['--upload', 'gs://chromeos-prebuilt'])
else:
assert overlay_config in ('private', 'both')
cmd.extend(['--upload', 'chromeos-images:/var/www/prebuilt/',
'--binhost-base-url', 'http://chromeos-prebuilt'])
RunCommand(cmd, cwd=cwd)
def main():
# Parse options
usage = "usage: %prog [options] cbuildbot_config"
parser = optparse.OptionParser(usage=usage)
parser.add_option('-a', '--acl', default='private',
help='ACL to set on GSD archives')
parser.add_option('-r', '--buildroot',
help='root directory where build occurs', default=".")
parser.add_option('-n', '--buildnumber',
help='build number', type='int', default=0)
parser.add_option('--chrome_rev', default=None, type='string',
dest='chrome_rev',
help=('Chrome_rev of type [tot|latest_release|'
'sticky_release]'))
parser.add_option('-g', '--gsutil', default='', help='Location of gsutil')
parser.add_option('-c', '--gsutil_archive', default='',
help='Datastore archive location')
parser.add_option('--clobber', action='store_true', dest='clobber',
default=False,
help='Clobbers an old checkout before syncing')
parser.add_option('--debug', action='store_true', dest='debug',
default=False,
help='Override some options to run as a developer.')
parser.add_option('--nobuild', action='store_false', dest='build',
default=True,
help="Don't actually build (for cbuildbot dev")
parser.add_option('--noprebuilts', action='store_false', dest='prebuilts',
default=True,
help="Don't upload prebuilts.")
parser.add_option('--nosync', action='store_false', dest='sync',
default=True,
help="Don't sync before building.")
parser.add_option('--notests', action='store_false', dest='tests',
default=True,
help='Override values from buildconfig and run no tests.')
parser.add_option('-f', '--revisionfile',
help='file where new revisions are stored')
parser.add_option('-t', '--tracking-branch', dest='tracking_branch',
default='cros/master', help='Run the buildbot on a branch')
parser.add_option('-u', '--url', dest='url',
default='http://git.chromium.org/git/manifest',
help='Run the buildbot on internal manifest')
(options, args) = parser.parse_args()
buildroot = os.path.abspath(options.buildroot)
revisionfile = options.revisionfile
tracking_branch = options.tracking_branch
chrome_atom_to_build = None
if len(args) >= 1:
bot_id = args[-1]
buildconfig = _GetConfig(bot_id)
else:
Warning('Missing configuration description')
parser.print_usage()
sys.exit(1)
try:
# Calculate list of overlay directories.
rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays'])
push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays'])
# We cannot push to overlays that we don't rev.
assert set(push_overlays).issubset(set(rev_overlays))
# Either has to be a master or not have any push overlays.
assert buildconfig['master'] or not push_overlays
board = buildconfig['board']
old_binhost = None
_PreFlightRinse(buildroot, buildconfig['board'], tracking_branch,
rev_overlays)
chroot_path = os.path.join(buildroot, 'chroot')
boardpath = os.path.join(chroot_path, 'build', board)
if options.sync:
if options.clobber or not os.path.isdir(buildroot):
_FullCheckout(buildroot, tracking_branch, url=options.url)
else:
old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST)
_IncrementalCheckout(buildroot)
new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST)
emptytree = (old_binhost and old_binhost != new_binhost)
# Check that all overlays can be found.
for path in rev_overlays:
if not os.path.isdir(path):
Die('Missing overlay: %s' % path)
if not os.path.isdir(chroot_path) or buildconfig['chroot_replace']:
_MakeChroot(buildroot, buildconfig['chroot_replace'])
if not os.path.isdir(boardpath):
_SetupBoard(buildroot, board=buildconfig['board'])
# Perform chrome uprev.
if options.chrome_rev:
chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch,
options.chrome_rev, board)
# Perform other uprevs.
if buildconfig['uprev']:
_UprevPackages(buildroot, tracking_branch, revisionfile,
buildconfig['board'], rev_overlays)
elif options.chrome_rev and not chrome_atom_to_build:
# We found nothing to rev, we're done here.
return
_EnableLocalAccount(buildroot)
if options.build:
_Build(buildroot,
emptytree,
build_autotest=(buildconfig['vm_tests'] and options.tests),
usepkg=buildconfig['usepkg'])
if buildconfig['unittests'] and options.tests:
_RunUnitTests(buildroot)
_BuildImage(buildroot)
if buildconfig['vm_tests'] and options.tests:
_BuildVMImageForTesting(buildroot)
test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber
try:
_RunSmokeSuite(buildroot, test_results_dir)
_RunAUTest(buildroot, buildconfig['board'])
finally:
if not options.debug:
archive_full_path = os.path.join(options.gsutil_archive,
str(options.buildnumber))
_ArchiveTestResults(buildroot, buildconfig['board'],
test_results_dir=test_results_dir,
gsutil=options.gsutil,
archive_dir=archive_full_path,
acl=options.acl)
if buildconfig['uprev']:
# Don't push changes for developers.
if buildconfig['master']:
# Master bot needs to check if the other slaves completed.
if cbuildbot_comm.HaveSlavesCompleted(config):
if not options.debug and options.prebuilts:
_UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'],
[new_binhost])
_UprevPush(buildroot, tracking_branch, buildconfig['board'],
push_overlays, options.debug)
else:
Die('CBUILDBOT - One of the slaves has failed!!!')
else:
# Publish my status to the master if its expecting it.
if buildconfig['important'] and not options.debug:
cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE)
if buildconfig['archive_build']:
_LegacyArchiveBuild(buildroot,
bot_id,
buildconfig,
options.buildnumber,
options.debug)
except:
# Send failure to master bot.
if not buildconfig['master'] and buildconfig['important']:
cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED)
raise
if __name__ == '__main__':
main()

View File

@ -1,195 +0,0 @@
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module contains communication methods between cbuildbot instances."""
import Queue
import SocketServer
import os
import socket
import sys
import time
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from cros_build_lib import Info, Warning, RunCommand
# Communication port for master to slave communication.
_COMM_PORT = 32890
# TCP Buffer Size.
_BUFFER = 4096
# Timeout between checks for new status by either end.
_HEARTBEAT_TIMEOUT = 60 # in sec.
# Max Timeout to wait before assuming failure.
_MAX_TIMEOUT = 30 * 60 # in sec.
# Commands - sent to slave from master.
# Report whether you have completed or failed building.
_COMMAND_CHECK_STATUS = 'check-status'
# Return status - response to commands from slaves (self.explanatory)
_STATUS_COMMAND_REJECTED = 'rejected'
_STATUS_TIMEOUT = 'timeout'
# Public for cbuildbot.
STATUS_BUILD_COMPLETE = 'complete'
STATUS_BUILD_FAILED = 'failure'
# Global queues to communicate with server.
_status_queue = Queue.Queue(1)
_receive_queue = Queue.Queue(1)
_command_queue = Queue.Queue(1)
class _TCPServerWithReuse(SocketServer.TCPServer):
"""TCPServer that allows re-use of socket and timed out sockets."""
SocketServer.TCPServer.allow_reuse_address = True
def __init__(self, address, handler, timeout):
SocketServer.TCPServer.__init__(self, address, handler)
self.socket.settimeout(timeout)
class _SlaveCommandHandler(SocketServer.BaseRequestHandler):
"""Handles requests from a master pre-flight-queue bot."""
def _HandleCommand(self, command, args):
"""Handles command and returns status for master."""
Info('(Slave) - Received command %s with args %s' % (command, args))
command_to_expect = _command_queue.get()
# Check status also adds an entry on the status queue.
if command_to_expect == _COMMAND_CHECK_STATUS:
slave_status = _status_queue.get()
# Safety check to make sure the server is in a good state.
if command_to_expect != command:
Warning(
'(Slave) - Rejecting command %s. Was expecting %s.' % (command,
command_to_expect))
return _STATUS_COMMAND_REJECTED
# Give slave command with optional args.
_receive_queue.put(args)
if command == _COMMAND_CHECK_STATUS:
# Returns status to send.
return slave_status
def handle(self):
"""Overriden. Handles commands sent from master."""
data = self.request.recv(_BUFFER).strip()
(command, args) = data.split('\n')
response = self._HandleCommand(command, args)
self.request.send(response)
def _GetSlaveNames(configuration):
"""Returns an array of slave hostnames that are important."""
slaves = []
for slave_config in configuration.items():
if (not slave_config[1]['master'] and
slave_config[1]['important']):
slaves.append(slave_config[1]['hostname'])
return slaves
def _SendCommand(hostname, command, args):
"""Returns response from host or _STATUS_TIMEOUT on error."""
data = '%s\n%s\n' % (command, args)
Info('(Master) - Sending %s %s to %s' % (command, args, hostname))
# Create a socket (SOCK_STREAM means a TCP socket).
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# Connect to server and send data
sock.connect((hostname, _COMM_PORT))
sock.send(data)
# Receive data from the server and shut down.
received = sock.recv(_BUFFER)
except:
received = _STATUS_TIMEOUT
finally:
sock.close()
return received
def _CheckSlavesLeftStatus(slaves_to_check):
"""Returns True if remaining slaves have completed.
Once a slave reports STATUS_BUILD_COMPLETE, removes slave from list. Returns
True as long as no slave reports STATUS_BUILD_FAILED.
Keyword arguments:
slaves_to_check -- Array of hostnames to check.
"""
slaves_to_remove = []
for slave in slaves_to_check:
status = _SendCommand(slave, _COMMAND_CHECK_STATUS, 'empty')
if status == STATUS_BUILD_FAILED:
Warning('(Master) - Slave %s failed' % slave)
return False
elif status == STATUS_BUILD_COMPLETE:
Info('(Master) - Slave %s completed' % slave)
slaves_to_remove.append(slave)
for slave in slaves_to_remove:
slaves_to_check.remove(slave)
return True
def HaveSlavesCompleted(configuration):
"""Returns True if all other slaves have succeeded.
Checks other slaves status until either '_MAX_TIMEOUT' has passed,
at least one slaves reports a failure, or all slaves report success.
Keyword arguments:
configuration -- configuration dictionary for slaves.
"""
not_failed = True
slaves_to_check = _GetSlaveNames(configuration)
timeout = 0
while slaves_to_check and not_failed and timeout < _MAX_TIMEOUT:
not_failed = _CheckSlavesLeftStatus(slaves_to_check)
if slaves_to_check and not_failed:
time.sleep(_HEARTBEAT_TIMEOUT)
timeout += _HEARTBEAT_TIMEOUT
return len(slaves_to_check) == 0
def PublishStatus(status):
"""Publishes status and Returns True if master received it.
This call is blocking until either the master pre-flight-queue bot picks
up the status, or a '_MAX_TIMEOUT' has passed.
Keyword arguments:
status -- should be a string and one of STATUS_BUILD_.*.
"""
# Clean up queues.
try:
_command_queue.get_nowait()
except Queue.Empty: pass
try:
_status_queue.get_nowait()
except Queue.Empty: pass
_command_queue.put(_COMMAND_CHECK_STATUS)
_status_queue.put(status)
server = _TCPServerWithReuse(('localhost', _COMM_PORT),
_SlaveCommandHandler, _HEARTBEAT_TIMEOUT)
timeout = 0
response = None
try:
while not response and timeout < _MAX_TIMEOUT:
server.handle_request()
try:
response = _receive_queue.get_nowait()
except Queue.Empty:
Info('(Slave) - Waiting for master to accept %s' % status)
timeout += _HEARTBEAT_TIMEOUT
response = None
except Exception, e:
Warning('%s' % e)
server.server_close()
return response != None

View File

@ -1,101 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Units tests for cbuildbot_comm commands."""
import cbuildbot_comm
import sys
import threading
import time
import unittest
_TEST_CONFIG = {'test_slave' :
{'master' : False,
'hostname' : 'localhost',
'important' : True},
'test_master' :
{'master' : True,
'important' : False
}
}
# Reduce timeouts.
cbuildbot_comm._HEARTBEAT_TIMEOUT = 2
cbuildbot_comm._MAX_TIMEOUT = 6
class _MasterSendBadStatus(threading.Thread):
def __init__(self, test_class):
threading.Thread.__init__(self)
self.test_class = test_class
def run(self):
# Sleep for heartbeat timeout to let slave start up.
time.sleep(2)
return_value = cbuildbot_comm._SendCommand('localhost', 'bad-command',
'args')
self.test_class.assertEqual(return_value,
cbuildbot_comm._STATUS_COMMAND_REJECTED)
class _MasterCheckStatusThread(threading.Thread):
def __init__(self, config, expected_return, test_class):
threading.Thread.__init__(self)
self.config = config
self.expected_return = expected_return
self.test_class = test_class
def run(self):
return_value = cbuildbot_comm.HaveSlavesCompleted(self.config)
self.test_class.assertEqual(return_value, self.expected_return)
class CBuildBotCommTest(unittest.TestCase):
def testSlaveComplete(self):
print >> sys.stderr, '\n>>> Running testSlaveComplete\n'
# Master should check statuses in another thread.
master_thread = _MasterCheckStatusThread(_TEST_CONFIG, True, self)
master_thread.start()
return_value = cbuildbot_comm.PublishStatus(
cbuildbot_comm.STATUS_BUILD_COMPLETE)
self.assertEqual(return_value, True)
def testMasterTimeout(self):
print >> sys.stderr, '\n>>> Running testMasterTimeout\n'
return_value = cbuildbot_comm.HaveSlavesCompleted(_TEST_CONFIG)
self.assertEqual(return_value, False)
def testSlaveTimeout(self):
print >> sys.stderr, '\n>>> Running testSlaveTimeout\n'
return_value = cbuildbot_comm.PublishStatus(
cbuildbot_comm.STATUS_BUILD_COMPLETE)
self.assertEqual(return_value, False)
def testSlaveFail(self):
print >> sys.stderr, '\n>>> Running testSlaveFail\n'
# Master should check statuses in another thread.
master_thread = _MasterCheckStatusThread(_TEST_CONFIG, False, self)
master_thread.start()
return_value = cbuildbot_comm.PublishStatus(
cbuildbot_comm.STATUS_BUILD_FAILED)
self.assertEqual(return_value, True)
def testBadCommand(self):
print >> sys.stderr, '\n>>> Running testSendBadCommand\n'
# Master should check statuses in another thread.
master_thread = _MasterSendBadStatus(self)
master_thread.start()
return_value = cbuildbot_comm.PublishStatus(
cbuildbot_comm.STATUS_BUILD_COMPLETE)
self.assertEqual(return_value, False)
if __name__ == '__main__':
unittest.main()

View File

@ -1,217 +0,0 @@
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dictionary of configuration types for cbuildbot.
Each dictionary entry is in turn a dictionary of config_param->value.
config_param's:
board -- The board of the image to build.
master -- This bot pushes changes to the overlays.
important -- Master bot uses important bots to determine overall status.
i.e. if master bot succeeds and other important slaves succeed
then the master will uprev packages. This should align
with info vs. closer except for the master.and options.tests
hostname -- Needed for 'important' slaves. The hostname of the bot. Should
match hostname in slaves.cfg in buildbot checkout.
uprev -- Uprevs the local ebuilds to build new changes since last stable.
build. If master then also pushes these changes on success.
rev_overlays -- Select what overlays to look at for revving. This can be
'public', 'private' or 'both'.
push_overlays -- Select what overlays to push at. This should be a subset of
rev_overlays for the particular builder. Must be None if
not a master. There should only be one master bot pushing
changes to each overlay per branch.
unittests -- Runs unittests for packages.
vm_tests -- Runs the smoke suite and au test harness in a qemu-based VM
using KVM.
usepkg -- Use binary packages to bootstrap, when possible. (emerge --usepkg)
chroot_replace -- wipe and replace chroot, but not source.
archive_build -- Do we run archive_build.sh
test_mod -- Create a test mod image for archival.
factory_install_mod -- Create a factory install image for archival.
factory_test_mod -- Create a factory test image for archival.
"""
# TODO(dgarrett) Make test_mod, factory_install_mod, factory_test_mod options
# go away when these options work for arm.
default = {
# 'board' No default value
'master' : False,
'important' : False,
# 'hostname' No default value
'uprev' : False,
'rev_overlays': 'public',
'push_overlays': None,
'unittests' : True,
'vm_tests' : True,
'usepkg' : True,
'chroot_replace' : False,
'archive_build' : False,
'test_mod' : True,
'factory_install_mod' : True,
'factory_test_mod' : True,
}
arm = {
# VM/tests are broken on arm.
'unittests' : False,
'vm_tests' : False,
# These images don't work for arm.
'factory_install_mod' : False,
'factory_test_mod' : False,
}
full = {
# Full builds are test build to show that we can build from scratch,
# so use settings to build from scratch, and archive the results.
'usepkg' : False,
'chroot_replace' : True,
'archive_build' : True
}
config = {}
config['x86-generic-pre-flight-queue'] = default.copy()
config['x86-generic-pre-flight-queue'].update({
'board' : 'x86-generic',
'master' : True,
'hostname' : 'chromeosbuild2',
'uprev' : True,
'rev_overlays': 'public',
'push_overlays': 'public',
})
config['x86-generic-chrome-pre-flight-queue'] = default.copy()
config['x86-generic-chrome-pre-flight-queue'].update({
'board' : 'x86-generic',
'master' : True,
'uprev' : False,
'rev_overlays': 'public',
'push_overlays': 'public',
})
config['x86-mario-pre-flight-queue'] = default.copy()
config['x86-mario-pre-flight-queue'].update({
'board' : 'x86-mario',
'master' : True,
'uprev' : True,
'rev_overlays': 'both',
'push_overlays': 'private',
})
config['x86-mario-pre-flight-branch'] = default.copy()
config['x86-mario-pre-flight-branch'].update({
'board' : 'x86-mario',
'master' : True,
'uprev' : True,
'rev_overlays': 'both',
'push_overlays': 'both',
})
config['x86-agz-bin'] = default.copy()
config['x86-agz-bin'].update({
'board' : 'x86-agz',
'uprev' : True,
'rev_overlays': 'both',
'push_overlays': None,
})
config['x86-dogfood-bin'] = default.copy()
config['x86-dogfood-bin'].update({
'board' : 'x86-dogfood',
'uprev' : True,
'rev_overlays': 'both',
'push_overlays': None,
})
config['x86-pineview-bin'] = default.copy()
config['x86-pineview-bin'].update({
'board' : 'x86-pineview',
'uprev' : True,
'rev_overlays': 'public',
'push_overlays': None,
})
config['arm-tegra2-bin'] = default.copy()
config['arm-tegra2-bin'].update(arm)
config['arm-tegra2-bin'].update({
'board' : 'tegra2_dev-board',
'uprev' : True,
'rev_overlays': 'public',
'push_overlays': None,
})
config['arm-generic-bin'] = default.copy()
config['arm-generic-bin'].update(arm)
config['arm-generic-bin'].update({
'board' : 'arm-generic',
'uprev' : True,
'rev_overlays': 'public',
'push_overlays': None,
})
config['arm-generic-full'] = default.copy()
config['arm-generic-full'].update(arm)
config['arm-generic-full'].update(full)
config['arm-generic-full'].update({
'board' : 'arm-generic',
})
config['arm-tegra2-full'] = default.copy()
config['arm-tegra2-full'].update(arm)
config['arm-tegra2-full'].update(full)
config['arm-tegra2-full'].update({
'board' : 'tegra2_dev-board',
})
config['arm-tegra2-seaboard-full'] = default.copy()
config['arm-tegra2-seaboard-full'].update(arm)
config['arm-tegra2-seaboard-full'].update(full)
config['arm-tegra2-seaboard-full'].update({
'board' : 'tegra2_seaboard',
})
config['x86-generic-full'] = default.copy()
config['x86-generic-full'].update(full)
config['x86-generic-full'].update({
'board' : 'x86-generic',
})
config['x86-pineview-full'] = default.copy()
config['x86-pineview-full'].update(full)
config['x86-pineview-full'].update({
'board' : 'x86-pineview',
})
# TODO(dgarrett) delete when buildbot updated to use new names
config['x86_agz_bin'] = config['x86-agz-bin']
config['x86_dogfood_bin'] = config['x86-dogfood-bin']
config['x86_pineview_bin'] = config['x86-pineview-bin']
config['arm_tegra2_bin'] = config['arm-tegra2-bin']
config['arm_generic_bin'] = config['arm-generic-bin']

View File

@ -1,237 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for cbuildbot. Needs to be run inside of chroot for mox."""
import __builtin__
import mox
import os
import posix
import shutil
import tempfile
import unittest
# Fixes circular dependency error.
import cbuildbot_comm
import cbuildbot
from cros_build_lib import ReinterpretPathForChroot
class CBuildBotTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
# Always stub RunCommmand out as we use it in every method.
self.mox.StubOutWithMock(cbuildbot, 'RunCommand')
self.tracking_branch = 'cros/master'
self._test_repos = [['kernel', 'third_party/kernel/files'],
['login_manager', 'platform/login_manager']
]
self._test_cros_workon_packages = \
'chromeos-base/kernel\nchromeos-base/chromeos-login\n'
self._test_board = 'test-board'
self._buildroot = '.'
self._test_dict = {'kernel' : ['chromos-base/kernel', 'dev-util/perf'],
'cros' : ['chromos-base/libcros']
}
self._test_string = "kernel.git@12345test cros.git@12333test"
self._test_string += " crosutils.git@blahblah"
self._revision_file = 'test-revisions.pfq'
self._test_parsed_string_array = [
['chromeos-base/kernel', '12345test'],
['dev-util/perf', '12345test'],
['chromos-base/libcros', '12345test']
]
self._overlays = ['%s/src/third_party/chromiumos-overlay' % self._buildroot]
self._chroot_overlays = [
ReinterpretPathForChroot(p) for p in self._overlays
]
def testParseRevisionString(self):
"""Test whether _ParseRevisionString parses string correctly."""
return_array = cbuildbot._ParseRevisionString(self._test_string,
self._test_dict)
self.assertEqual(len(return_array), 3)
self.assertTrue(
'chromeos-base/kernel', '12345test' in return_array)
self.assertTrue(
'dev-util/perf', '12345test' in return_array)
self.assertTrue(
'chromos-base/libcros', '12345test' in return_array)
def testCreateDictionary(self):
self.mox.StubOutWithMock(cbuildbot, '_GetAllGitRepos')
self.mox.StubOutWithMock(cbuildbot, '_GetCrosWorkOnSrcPath')
cbuildbot._GetAllGitRepos(mox.IgnoreArg()).AndReturn(self._test_repos)
cbuildbot.RunCommand(mox.IgnoreArg(),
cwd='%s/src/scripts' % self._buildroot,
redirect_stdout=True,
redirect_stderr=True,
enter_chroot=True,
print_cmd=False).AndReturn(
self._test_cros_workon_packages)
cbuildbot._GetCrosWorkOnSrcPath(self._buildroot, self._test_board,
'chromeos-base/kernel').AndReturn(
'/home/test/third_party/kernel/files')
cbuildbot._GetCrosWorkOnSrcPath(self._buildroot, self._test_board,
'chromeos-base/chromeos-login').AndReturn(
'/home/test/platform/login_manager')
self.mox.ReplayAll()
repo_dict = cbuildbot._CreateRepoDictionary(self._buildroot,
self._test_board)
self.assertEqual(repo_dict['kernel'], ['chromeos-base/kernel'])
self.assertEqual(repo_dict['login_manager'],
['chromeos-base/chromeos-login'])
self.mox.VerifyAll()
# TODO(sosa): Re-add once we use cros_mark vs. cros_mark_all.
#def testUprevPackages(self):
# """Test if we get actual revisions in revisions.pfq."""
# self.mox.StubOutWithMock(cbuildbot, '_CreateRepoDictionary')
# self.mox.StubOutWithMock(cbuildbot, '_ParseRevisionString')
# self.mox.StubOutWithMock(cbuildbot, '_UprevFromRevisionList')
# self.mox.StubOutWithMock(__builtin__, 'open')
# # Mock out file interaction.
# m_file = self.mox.CreateMock(file)
# __builtin__.open(self._revision_file).AndReturn(m_file)
# m_file.read().AndReturn(self._test_string)
# m_file.close()
# cbuildbot._CreateRepoDictionary(self._buildroot,
# self._test_board).AndReturn(self._test_dict)
# cbuildbot._ParseRevisionString(self._test_string,
# self._test_dict).AndReturn(
# self._test_parsed_string_array)
# cbuildbot._UprevFromRevisionList(self._buildroot,
# self._test_parsed_string_array)
# self.mox.ReplayAll()
# cbuildbot._UprevPackages(self._buildroot, self._revision_file,
# self._test_board)
# self.mox.VerifyAll()
def testArchiveTestResults(self):
"""Test if we can archive the latest results dir to Google Storage."""
# Set vars for call.
buildroot = '/fake_dir'
board = 'fake-board'
test_results_dir = 'fake_results_dir'
gsutil_path = '/fake/gsutil/path'
archive_dir = 1234
acl = 'fake_acl'
num_retries = 5
# Convenience variables to make archive easier to understand.
path_to_results = os.path.join(buildroot, 'chroot', test_results_dir)
path_to_image = os.path.join(buildroot, 'src', 'build', 'images', board,
'latest', 'chromiumos_qemu_image.bin')
cbuildbot.RunCommand(['sudo', 'chmod', '-R', '+r', path_to_results])
cbuildbot.RunCommand([gsutil_path, 'cp', '-R', path_to_results,
archive_dir], num_retries=num_retries)
cbuildbot.RunCommand([gsutil_path, 'setacl', acl, archive_dir])
cbuildbot.RunCommand(['gzip', '-f', '--fast', path_to_image])
cbuildbot.RunCommand([gsutil_path, 'cp', path_to_image + '.gz',
archive_dir], num_retries=num_retries)
self.mox.ReplayAll()
cbuildbot._ArchiveTestResults(buildroot, board, test_results_dir,
gsutil_path, archive_dir, acl)
self.mox.VerifyAll()
# TODO(sosa): Remove once we un-comment above.
def testUprevPackages(self):
"""Test if we get actual revisions in revisions.pfq."""
self.mox.StubOutWithMock(__builtin__, 'open')
# Mock out file interaction.
m_file = self.mox.CreateMock(file)
__builtin__.open(self._revision_file).AndReturn(m_file)
m_file.read().AndReturn(self._test_string)
m_file.close()
drop_file = cbuildbot._PACKAGE_FILE % {'buildroot': self._buildroot}
cbuildbot.RunCommand(['./cros_mark_as_stable', '--all',
'--board=%s' % self._test_board,
'--overlays=%s' % ':'.join(self._chroot_overlays),
'--tracking_branch=cros/master',
'--drop_file=%s' % ReinterpretPathForChroot(drop_file),
'commit'],
cwd='%s/src/scripts' % self._buildroot,
enter_chroot=True)
self.mox.ReplayAll()
cbuildbot._UprevPackages(self._buildroot, self.tracking_branch,
self._revision_file, self._test_board,
self._overlays)
self.mox.VerifyAll()
def testUprevAllPackages(self):
"""Test if we get None in revisions.pfq indicating Full Builds."""
self.mox.StubOutWithMock(__builtin__, 'open')
# Mock out file interaction.
m_file = self.mox.CreateMock(file)
__builtin__.open(self._revision_file).AndReturn(m_file)
m_file.read().AndReturn('None')
m_file.close()
drop_file = cbuildbot._PACKAGE_FILE % {'buildroot': self._buildroot}
cbuildbot.RunCommand(['./cros_mark_as_stable', '--all',
'--board=%s' % self._test_board,
'--overlays=%s' % ':'.join(self._chroot_overlays),
'--tracking_branch=cros/master',
'--drop_file=%s' % ReinterpretPathForChroot(drop_file),
'commit'],
cwd='%s/src/scripts' % self._buildroot,
enter_chroot=True)
self.mox.ReplayAll()
cbuildbot._UprevPackages(self._buildroot, self.tracking_branch,
self._revision_file, self._test_board,
self._overlays)
self.mox.VerifyAll()
def testGetPortageEnvVar(self):
"""Basic test case for _GetPortageEnvVar function."""
envvar = 'EXAMPLE'
cbuildbot.RunCommand(mox.And(mox.IsA(list), mox.In(envvar)),
cwd='%s/src/scripts' % self._buildroot,
redirect_stdout=True, enter_chroot=True,
error_ok=True).AndReturn('RESULT\n')
self.mox.ReplayAll()
result = cbuildbot._GetPortageEnvVar(self._buildroot, self._test_board,
envvar)
self.mox.VerifyAll()
self.assertEqual(result, 'RESULT')
def testUploadPublicPrebuilts(self):
"""Test _UploadPrebuilts with a public location."""
binhost = 'http://www.example.com'
binhosts = [binhost, None]
check = mox.And(mox.IsA(list), mox.In(binhost), mox.Not(mox.In(None)),
mox.In('gs://chromeos-prebuilt'))
cbuildbot.RunCommand(check, cwd='%s/src/scripts' % self._buildroot)
self.mox.ReplayAll()
cbuildbot._UploadPrebuilts(self._buildroot, self._test_board, 'public',
binhosts)
self.mox.VerifyAll()
def testUploadPrivatePrebuilts(self):
"""Test _UploadPrebuilts with a private location."""
binhost = 'http://www.example.com'
binhosts = [binhost, None]
check = mox.And(mox.IsA(list), mox.In(binhost), mox.Not(mox.In(None)),
mox.In('chromeos-images:/var/www/prebuilt/'))
cbuildbot.RunCommand(check, cwd='%s/src/scripts' % self._buildroot)
self.mox.ReplayAll()
cbuildbot._UploadPrebuilts(self._buildroot, self._test_board, 'private',
binhosts)
self.mox.VerifyAll()
if __name__ == '__main__':
unittest.main()

View File

@ -1 +0,0 @@
cros_mark_chrome_as_stable.py

View File

@ -1,357 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module uprevs Chrome for cbuildbot.
After calling, it prints outs CHROME_VERSION_ATOM=(version atom string). A
caller could then use this atom with emerge to build the newly uprevved version
of Chrome e.g.
./cros_mark_chrome_as_stable tot
Returns chrome-base/chromeos-chrome-8.0.552.0_alpha_r1
emerge-x86-generic =chrome-base/chromeos-chrome-8.0.552.0_alpha_r1
"""
import optparse
import os
import re
import sys
import urllib
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import cros_mark_as_stable
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from cros_build_lib import RunCommand, Info, Warning
BASE_CHROME_SVN_URL = 'http://src.chromium.org/svn'
# Command for which chrome ebuild to uprev.
TIP_OF_TRUNK, LATEST_RELEASE, STICKY = 'tot', 'latest_release', 'stable_release'
CHROME_REV = [TIP_OF_TRUNK, LATEST_RELEASE, STICKY]
# Helper regex's for finding ebuilds.
_CHROME_VERSION_REGEX = '\d+\.\d+\.\d+\.\d+'
_NON_STICKY_REGEX = '%s[(_rc.*)|(_alpha.*)]+' % _CHROME_VERSION_REGEX
# Dir where all the action happens.
_CHROME_OVERLAY_DIR = ('%(srcroot)s/third_party/chromiumos-overlay'
'/chromeos-base/chromeos-chrome')
_GIT_COMMIT_MESSAGE = ('Marking %(chrome_rev)s for chrome ebuild with version '
'%(chrome_version)s as stable.')
def _GetSvnUrl():
"""Returns the path to the svn url for the given chrome branch."""
return os.path.join(BASE_CHROME_SVN_URL, 'trunk')
def _GetTipOfTrunkSvnRevision():
"""Returns the current svn revision for the chrome tree."""
svn_url = _GetSvnUrl()
svn_info = RunCommand(['svn', 'info', svn_url], redirect_stdout=True)
revision_re = re.compile('^Revision:\s+(\d+).*')
for line in svn_info.splitlines():
match = revision_re.search(line)
if match:
svn_revision = match.group(1)
Info('Using SVN Revision %s' % svn_revision)
return svn_revision
raise Exception('Could not find revision information from %s' % svn_url)
def _GetTipOfTrunkVersion():
"""Returns the current Chrome version."""
svn_url = _GetSvnUrl()
chrome_version_file = urllib.urlopen(os.path.join(svn_url, 'src', 'chrome',
'VERSION'))
chrome_version_info = chrome_version_file.read()
chrome_version_file.close()
# Sanity check.
if '404 Not Found' in chrome_version_info:
raise Exception('Url %s does not have version file.' % svn_url)
chrome_version_array = []
for line in chrome_version_info.splitlines():
chrome_version_array.append(line.rpartition('=')[2])
return '.'.join(chrome_version_array)
def _GetLatestRelease(branch=None):
"""Gets the latest release version from the buildspec_url for the branch.
Args:
branch: If set, gets the latest release for branch, otherwise latest
release.
Returns:
Latest version string.
"""
buildspec_url = 'http://src.chromium.org/svn/releases'
svn_ls = RunCommand(['svn', 'ls', buildspec_url], redirect_stdout=True)
sorted_ls = RunCommand(['sort', '--version-sort'], input=svn_ls,
redirect_stdout=True)
if branch:
chrome_version_re = re.compile('^%s\.\d+.*' % branch)
else:
chrome_version_re = re.compile('^[0-9]+\..*')
for chrome_version in sorted_ls.splitlines():
if chrome_version_re.match(chrome_version):
current_version = chrome_version
return current_version.rstrip('/')
def _GetStickyEBuild(stable_ebuilds):
"""Returns the sticky ebuild."""
sticky_ebuilds = []
non_sticky_re = re.compile(_NON_STICKY_REGEX)
for ebuild in stable_ebuilds:
if not non_sticky_re.match(ebuild.version):
sticky_ebuilds.append(ebuild)
if not sticky_ebuilds:
raise Exception('No sticky ebuilds found')
elif len(sticky_ebuilds) > 1:
Warning('More than one sticky ebuild found')
return cros_mark_as_stable.BestEBuild(sticky_ebuilds)
class ChromeEBuild(cros_mark_as_stable.EBuild):
"""Thin sub-class of EBuild that adds a chrome_version field."""
chrome_version_re = re.compile('.*chromeos-chrome-(%s|9999).*' % (
_CHROME_VERSION_REGEX))
chrome_version = ''
def __init__(self, path):
cros_mark_as_stable.EBuild.__init__(self, path)
re_match = self.chrome_version_re.match(self.ebuild_path_no_revision)
if re_match:
self.chrome_version = re_match.group(1)
def __cmp__(self, other):
"""Use ebuild paths for comparison."""
if self.ebuild_path == other.ebuild_path:
return 0
elif self.ebuild_path > other.ebuild_path:
return 1
else:
return (-1)
def __str__(self):
return self.ebuild_path
def FindChromeCandidates(overlay_dir):
"""Return a tuple of chrome's unstable ebuild and stable ebuilds.
Args:
overlay_dir: The path to chrome's portage overlay dir.
Returns:
Tuple [unstable_ebuild, stable_ebuilds].
Raises:
Exception: if no unstable ebuild exists for Chrome.
"""
stable_ebuilds = []
unstable_ebuilds = []
for path in [
os.path.join(overlay_dir, entry) for entry in os.listdir(overlay_dir)]:
if path.endswith('.ebuild'):
ebuild = ChromeEBuild(path)
if not ebuild.chrome_version:
Warning('Poorly formatted ebuild found at %s' % path)
else:
if '9999' in ebuild.version:
unstable_ebuilds.append(ebuild)
else:
stable_ebuilds.append(ebuild)
# Apply some sanity checks.
if not unstable_ebuilds:
raise Exception('Missing 9999 ebuild for %s' % overlay_dir)
if not stable_ebuilds:
Warning('Missing stable ebuild for %s' % overlay_dir)
return cros_mark_as_stable.BestEBuild(unstable_ebuilds), stable_ebuilds
def FindChromeUprevCandidate(stable_ebuilds, chrome_rev, sticky_branch):
"""Finds the Chrome uprev candidate for the given chrome_rev.
Using the pre-flight logic, this means the stable ebuild you are uprevving
from. The difference here is that the version could be different and in
that case we want to find it to delete it.
Args:
stable_ebuilds: A list of stable ebuilds.
chrome_rev: The chrome_rev designating which candidate to find.
sticky_branch: The the branch that is currently sticky with Major/Minor
components. For example: 9.0.553
Returns:
Returns the EBuild, otherwise None if none found.
"""
candidates = []
if chrome_rev == TIP_OF_TRUNK:
chrome_branch_re = re.compile('%s.*_alpha.*' % _CHROME_VERSION_REGEX)
for ebuild in stable_ebuilds:
if chrome_branch_re.search(ebuild.version):
candidates.append(ebuild)
elif chrome_rev == STICKY:
chrome_branch_re = re.compile('%s\..*' % sticky_branch)
for ebuild in stable_ebuilds:
if chrome_branch_re.search(ebuild.version):
candidates.append(ebuild)
else:
chrome_branch_re = re.compile('%s.*_rc.*' % _CHROME_VERSION_REGEX)
for ebuild in stable_ebuilds:
if chrome_branch_re.search(ebuild.version) and (
not ebuild.chrome_version.startswith(sticky_branch)):
candidates.append(ebuild)
if candidates:
return cros_mark_as_stable.BestEBuild(candidates)
else:
return None
def MarkChromeEBuildAsStable(stable_candidate, unstable_ebuild, chrome_rev,
chrome_version, commit, overlay_dir,
sticky_ebuild):
"""Uprevs the chrome ebuild specified by chrome_rev.
This is the main function that uprevs the chrome_rev from a stable candidate
to its new version.
Args:
stable_candidate: ebuild that corresponds to the stable ebuild we are
revving from. If None, builds the a new ebuild given the version
and logic for chrome_rev type with revision set to 1.
unstable_ebuild: ebuild corresponding to the unstable ebuild for chrome.
chrome_rev: one of CHROME_REV
TIP_OF_TRUNK - Requires commit value. Revs the ebuild for the TOT
version and uses the portage suffix of _alpha.
LATEST_RELEASE - This uses the portage suffix of _rc as they are release
candidates for the next sticky version.
STICKY - Revs the sticky version.
chrome_version: The \d.\d.\d.\d version of Chrome.
commit: Used with TIP_OF_TRUNK. The svn revision of chrome.
overlay_dir: Path to the chromeos-chrome package dir.
sticky_ebuild: EBuild class for the sticky ebuild.
Returns:
Full portage version atom (including rc's, etc) that was revved.
"""
base_path = os.path.join(overlay_dir, 'chromeos-chrome-%s' % chrome_version)
# Case where we have the last stable candidate with same version just rev.
if stable_candidate and stable_candidate.chrome_version == chrome_version:
new_ebuild_path = '%s-r%d.ebuild' % (
stable_candidate.ebuild_path_no_revision,
stable_candidate.current_revision + 1)
else:
if chrome_rev == TIP_OF_TRUNK:
portage_suffix = '_alpha'
else:
portage_suffix = '_rc'
new_ebuild_path = base_path + ('%s-r1.ebuild' % portage_suffix)
# Mark latest release and sticky branches as stable.
mark_stable = chrome_rev != TIP_OF_TRUNK
cros_mark_as_stable.EBuildStableMarker.MarkAsStable(
unstable_ebuild.ebuild_path, new_ebuild_path, 'CROS_SVN_COMMIT', commit,
make_stable=mark_stable)
new_ebuild = ChromeEBuild(new_ebuild_path)
if stable_candidate and (
stable_candidate.chrome_version == new_ebuild.chrome_version):
if 0 == RunCommand(['diff', '-Bu', stable_candidate.ebuild_path,
new_ebuild_path],
redirect_stderr=True,
redirect_stdout=True,
exit_code=True):
Info('Previous ebuild with same version found and no 9999 changes found.'
' Nothing to do.')
os.unlink(new_ebuild_path)
return None
RunCommand(['git', 'add', new_ebuild_path])
if stable_candidate and stable_candidate != sticky_ebuild:
RunCommand(['git', 'rm', stable_candidate.ebuild_path])
cros_mark_as_stable.EBuildStableMarker.CommitChange(
_GIT_COMMIT_MESSAGE % {'chrome_rev': chrome_rev,
'chrome_version': chrome_version})
new_ebuild = ChromeEBuild(new_ebuild_path)
return '%s-%s' % (new_ebuild.package, new_ebuild.version)
def main():
usage = '%s OPTIONS [%s]' % (__file__, '|'.join(CHROME_REV))
parser = optparse.OptionParser(usage)
parser.add_option('-s', '--srcroot', default=os.path.join(os.environ['HOME'],
'trunk', 'src'),
help='Path to the src directory')
parser.add_option('-t', '--tracking_branch', default='cros/master',
help='Branch we are tracking changes against')
(options, args) = parser.parse_args()
if len(args) != 1 or args[0] not in CHROME_REV:
parser.error('Commit requires arg set to one of %s.' % CHROME_REV)
overlay_dir = os.path.abspath(_CHROME_OVERLAY_DIR %
{'srcroot': options.srcroot})
chrome_rev = args[0]
version_to_uprev = None
commit_to_use = None
(unstable_ebuild, stable_ebuilds) = FindChromeCandidates(overlay_dir)
sticky_ebuild = _GetStickyEBuild(stable_ebuilds)
sticky_version = sticky_ebuild.chrome_version
sticky_branch = sticky_version.rpartition('.')[0]
if chrome_rev == TIP_OF_TRUNK:
version_to_uprev = _GetTipOfTrunkVersion()
commit_to_use = _GetTipOfTrunkSvnRevision()
elif chrome_rev == LATEST_RELEASE:
version_to_uprev = _GetLatestRelease()
# Don't rev on stable branch for latest_release.
if re.match('%s\.\d+' % sticky_branch, version_to_uprev):
Info('Latest release is sticky branch. Nothing to do.')
return
else:
version_to_uprev = _GetLatestRelease(sticky_branch)
stable_candidate = FindChromeUprevCandidate(stable_ebuilds, chrome_rev,
sticky_branch)
if stable_candidate:
Info('Stable candidate found %s' % stable_candidate)
else:
Info('No stable candidate found.')
os.chdir(overlay_dir)
work_branch = cros_mark_as_stable.GitBranch(
cros_mark_as_stable.STABLE_BRANCH_NAME, options.tracking_branch)
work_branch.CreateBranch()
chrome_version_atom = MarkChromeEBuildAsStable(
stable_candidate, unstable_ebuild, chrome_rev, version_to_uprev,
commit_to_use, overlay_dir, sticky_ebuild)
# Explicit print to communicate to caller.
if chrome_version_atom:
print 'CHROME_VERSION_ATOM=%s' % chrome_version_atom
if __name__ == '__main__':
main()

View File

@ -1,270 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for cros_mark_chrome_as_stable.py."""
import cros_mark_chrome_as_stable
import mox
import os
import shutil
import sys
import tempfile
import unittest
import urllib
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import cros_mark_as_stable
unstable_data = 'KEYWORDS=~x86 ~arm'
stable_data = 'KEYWORDS=x86 arm'
def _TouchAndWrite(path, data=None):
"""Writes data (if it exists) to the file specified by the path."""
fh = open(path, 'w')
if data:
fh.write(data)
fh.close()
class CrosMarkChromeAsStable(mox.MoxTestBase):
def setUp(self):
"""Setup vars and create mock dir."""
mox.MoxTestBase.setUp(self)
self.tmp_overlay = tempfile.mkdtemp(prefix='chromiumos-overlay')
self.mock_chrome_dir = os.path.join(self.tmp_overlay, 'chromeos-base',
'chromeos-chrome')
os.makedirs(self.mock_chrome_dir)
self.unstable = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-9999.ebuild')
self.sticky_branch = '8.0.224'
self.sticky_version = '%s.503' % self.sticky_branch
self.sticky = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s.ebuild' %
self.sticky_version)
self.sticky_rc_version = '%s.504' % self.sticky_branch
self.sticky_rc = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s_rc-r1.ebuild' %
self.sticky_rc_version)
self.latest_stable_version = '8.0.300.1'
self.latest_stable = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s_rc-r2.ebuild' %
self.latest_stable_version)
self.tot_stable_version = '9.0.305.0'
self.tot_stable = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s_alpha-r1.ebuild' %
self.tot_stable_version)
self.sticky_new_rc_version = '%s.520' % self.sticky_branch
self.sticky_new_rc = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s_rc-r1.ebuild' %
self.sticky_new_rc_version)
self.latest_new_version = '9.0.305.1'
self.latest_new = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s_rc-r1.ebuild' %
self.latest_new_version)
self.tot_new_version = '9.0.306.0'
self.tot_new = os.path.join(self.mock_chrome_dir,
'chromeos-chrome-%s_alpha-r1.ebuild' %
self.tot_new_version)
_TouchAndWrite(self.unstable, unstable_data)
_TouchAndWrite(self.sticky, stable_data)
_TouchAndWrite(self.sticky_rc, stable_data)
_TouchAndWrite(self.latest_stable, stable_data)
_TouchAndWrite(self.tot_stable, stable_data)
def tearDown(self):
"""Cleans up mock dir."""
shutil.rmtree(self.tmp_overlay)
def testFindChromeCandidates(self):
"""Test creation of stable ebuilds from mock dir."""
unstable, stable_ebuilds = cros_mark_chrome_as_stable.FindChromeCandidates(
self.mock_chrome_dir)
self.assertEqual(unstable.ebuild_path, self.unstable)
self.assertEqual(len(stable_ebuilds), 4)
self.assertTrue(cros_mark_chrome_as_stable.ChromeEBuild(self.sticky) in
stable_ebuilds)
self.assertTrue(cros_mark_chrome_as_stable.ChromeEBuild(self.sticky_rc) in
stable_ebuilds)
self.assertTrue(cros_mark_chrome_as_stable.ChromeEBuild(self.latest_stable)
in stable_ebuilds)
self.assertTrue(cros_mark_chrome_as_stable.ChromeEBuild(self.tot_stable) in
stable_ebuilds)
def _GetStableEBuilds(self):
"""Common helper to create a list of stable ebuilds."""
return [
cros_mark_chrome_as_stable.ChromeEBuild(self.sticky),
cros_mark_chrome_as_stable.ChromeEBuild(self.sticky_rc),
cros_mark_chrome_as_stable.ChromeEBuild(self.latest_stable),
cros_mark_chrome_as_stable.ChromeEBuild(self.tot_stable),
]
def testTOTFindChromeUprevCandidate(self):
"""Tests if we can find tot uprev candidate from our mock dir data."""
stable_ebuilds = self._GetStableEBuilds()
candidate = cros_mark_chrome_as_stable.FindChromeUprevCandidate(
stable_ebuilds, cros_mark_chrome_as_stable.TIP_OF_TRUNK,
self.sticky_branch)
self.assertEqual(candidate.ebuild_path, self.tot_stable)
def testLatestFindChromeUprevCandidate(self):
"""Tests if we can find latest uprev candidate from our mock dir data."""
stable_ebuilds = self._GetStableEBuilds()
candidate = cros_mark_chrome_as_stable.FindChromeUprevCandidate(
stable_ebuilds, cros_mark_chrome_as_stable.LATEST_RELEASE,
self.sticky_branch)
self.assertEqual(candidate.ebuild_path, self.latest_stable)
def testStickyFindChromeUprevCandidate(self):
"""Tests if we can find sticky uprev candidate from our mock dir data."""
stable_ebuilds = self._GetStableEBuilds()
candidate = cros_mark_chrome_as_stable.FindChromeUprevCandidate(
stable_ebuilds, cros_mark_chrome_as_stable.STICKY,
self.sticky_branch)
self.assertEqual(candidate.ebuild_path, self.sticky_rc)
def testGetTipOfTrunkSvnRevision(self):
"""Tests if we can get the latest svn revision from TOT."""
self.mox.StubOutWithMock(cros_mark_chrome_as_stable, 'RunCommand')
cros_mark_chrome_as_stable.RunCommand(
['svn', 'info', cros_mark_chrome_as_stable._GetSvnUrl()],
redirect_stdout=True).AndReturn(
'Some Junk 2134\nRevision: 12345\nOtherInfo: test_data')
self.mox.ReplayAll()
revision = cros_mark_chrome_as_stable._GetTipOfTrunkSvnRevision()
self.mox.VerifyAll()
self.assertEquals(revision, '12345')
def testGetTipOfTrunkVersion(self):
"""Tests if we get the latest version from TOT."""
self.mox.StubOutWithMock(urllib, 'urlopen')
mock_file = self.mox.CreateMock(file)
urllib.urlopen(os.path.join(cros_mark_chrome_as_stable._GetSvnUrl(), 'src',
'chrome', 'VERSION')).AndReturn(mock_file)
mock_file.read().AndReturn('A=8\nB=0\nC=256\nD=0')
mock_file.close()
self.mox.ReplayAll()
version = cros_mark_chrome_as_stable._GetTipOfTrunkVersion()
self.mox.VerifyAll()
self.assertEquals(version, '8.0.256.0')
def testGetLatestRelease(self):
"""Tests if we can find the latest release from our mock url data."""
test_data = '\n'.join(['7.0.224.1/',
'7.0.224.2/',
'8.0.365.5/',
'LATEST.txt'])
self.mox.StubOutWithMock(cros_mark_chrome_as_stable, 'RunCommand')
cros_mark_chrome_as_stable.RunCommand(
['svn', 'ls', 'http://src.chromium.org/svn/releases'],
redirect_stdout=True).AndReturn('some_data')
cros_mark_chrome_as_stable.RunCommand(
['sort', '--version-sort'], input='some_data',
redirect_stdout=True).AndReturn(test_data)
self.mox.ReplayAll()
release = cros_mark_chrome_as_stable._GetLatestRelease()
self.mox.VerifyAll()
self.assertEqual('8.0.365.5', release)
def testGetLatestStickyRelease(self):
"""Tests if we can find the latest sticky release from our mock url data."""
test_data = '\n'.join(['7.0.222.1/',
'8.0.224.2/',
'8.0.365.5/',
'LATEST.txt'])
self.mox.StubOutWithMock(cros_mark_chrome_as_stable, 'RunCommand')
cros_mark_chrome_as_stable.RunCommand(
['svn', 'ls', 'http://src.chromium.org/svn/releases'],
redirect_stdout=True).AndReturn('some_data')
cros_mark_chrome_as_stable.RunCommand(
['sort', '--version-sort'], input='some_data',
redirect_stdout=True).AndReturn(test_data)
self.mox.ReplayAll()
release = cros_mark_chrome_as_stable._GetLatestRelease(self.sticky_branch)
self.mox.VerifyAll()
self.assertEqual('8.0.224.2', release)
def testStickyEBuild(self):
"""Tests if we can find the sticky ebuild from our mock directories."""
stable_ebuilds = self._GetStableEBuilds()
sticky_ebuild = cros_mark_chrome_as_stable._GetStickyEBuild(
stable_ebuilds)
self.assertEqual(sticky_ebuild.chrome_version, self.sticky_version)
def testChromeEBuildInit(self):
"""Tests if the chrome_version is set correctly in a ChromeEBuild."""
ebuild = cros_mark_chrome_as_stable.ChromeEBuild(self.sticky)
self.assertEqual(ebuild.chrome_version, self.sticky_version)
def _CommonMarkAsStableTest(self, chrome_rev, new_version, old_ebuild_path,
new_ebuild_path, commit_string_indicator):
"""Common function used for test functions for MarkChromeEBuildAsStable.
This function stubs out others calls, and runs MarkChromeEBuildAsStable
with the specified args.
Args:
chrome_rev: standard chrome_rev argument
new_version: version we are revving up to
old_ebuild_path: path to the stable ebuild
new_ebuild_path: path to the to be created path
commit_string_indicator: a string that the commit message must contain
"""
self.mox.StubOutWithMock(cros_mark_chrome_as_stable, 'RunCommand')
self.mox.StubOutWithMock(cros_mark_as_stable.EBuildStableMarker,
'CommitChange')
stable_candidate = cros_mark_chrome_as_stable.ChromeEBuild(old_ebuild_path)
unstable_ebuild = cros_mark_chrome_as_stable.ChromeEBuild(self.unstable)
sticky_ebuild = cros_mark_chrome_as_stable.ChromeEBuild(self.sticky)
chrome_version = new_version
commit = None
overlay_dir = self.mock_chrome_dir
cros_mark_chrome_as_stable.RunCommand(['git', 'add', new_ebuild_path])
cros_mark_chrome_as_stable.RunCommand(['git', 'rm', old_ebuild_path])
cros_mark_as_stable.EBuildStableMarker.CommitChange(
mox.StrContains(commit_string_indicator))
self.mox.ReplayAll()
cros_mark_chrome_as_stable.MarkChromeEBuildAsStable(
stable_candidate, unstable_ebuild, chrome_rev, chrome_version, commit,
overlay_dir, sticky_ebuild)
self.mox.VerifyAll()
def testStickyMarkAsStable(self):
"""Tests to see if we can mark chrome as stable for a new sticky release."""
self._CommonMarkAsStableTest(cros_mark_chrome_as_stable.STICKY,
self.sticky_new_rc_version, self.sticky_rc,
self.sticky_new_rc, 'stable_release')
def testLatestMarkAsStable(self):
"""Tests to see if we can mark chrome for a latest release."""
self._CommonMarkAsStableTest(cros_mark_chrome_as_stable.LATEST_RELEASE,
self.latest_new_version, self.latest_stable,
self.latest_new, 'latest_release')
def testTotMarkAsStable(self):
"""Tests to see if we can mark chrome for tot."""
self._CommonMarkAsStableTest(cros_mark_chrome_as_stable.TIP_OF_TRUNK,
self.tot_new_version, self.tot_stable,
self.tot_new, 'tot')
if __name__ == '__main__':
unittest.main()

View File

@ -1 +0,0 @@
cros_repo_sync_all.py

View File

@ -1,38 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Stop gap sync function until cbuildbot is integrated into all builders"""
import cbuildbot_comm
import cbuildbot
import optparse
import sys
"""Number of retries to retry repo sync before giving up"""
_NUMBER_OF_RETRIES = 3
def main():
parser = optparse.OptionParser()
parser.add_option('-r', '--buildroot',
help='root directory where sync occurs')
parser.add_option('-c', '--clobber', action='store_true', default=False,
help='clobber build directory and do a full checkout')
parser.add_option('-t', '--tracking_branch', default='cros/master',
help='Branch to sync against for full checkouts.')
(options, args) = parser.parse_args()
if options.buildroot:
if options.clobber:
cbuildbot._FullCheckout(options.buildroot, options.tracking_branch,
retries=_NUMBER_OF_RETRIES)
else:
cbuildbot._IncrementalCheckout(options.buildroot,
retries=_NUMBER_OF_RETRIES)
else:
print >> sys.stderr, 'ERROR: Must set buildroot'
sys.exit(1)
if __name__ == '__main__':
main()

View File

@ -1 +0,0 @@
cros_mark_as_stable.py

View File

@ -1,597 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module uprevs a given package's ebuild to the next revision."""
import fileinput
import gflags
import os
import re
import shutil
import subprocess
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
from cros_build_lib import Info, RunCommand, Warning, Die
gflags.DEFINE_boolean('all', False,
'Mark all packages as stable.')
gflags.DEFINE_string('board', '',
'Board for which the package belongs.', short_name='b')
gflags.DEFINE_string('drop_file', None,
'File to list packages that were revved.')
gflags.DEFINE_boolean('dryrun', False,
'Passes dry-run to git push if pushing a change.')
gflags.DEFINE_string('overlays', '',
'Colon-separated list of overlays to modify.',
short_name='o')
gflags.DEFINE_string('packages', '',
'Colon-separated list of packages to mark as stable.',
short_name='p')
gflags.DEFINE_string('srcroot', '%s/trunk/src' % os.environ['HOME'],
'Path to root src directory.',
short_name='r')
gflags.DEFINE_string('tracking_branch', 'cros/master',
'Used with commit to specify branch to track against.',
short_name='t')
gflags.DEFINE_boolean('verbose', False,
'Prints out verbose information about what is going on.',
short_name='v')
# Takes two strings, package_name and commit_id.
_GIT_COMMIT_MESSAGE = 'Marking 9999 ebuild for %s with commit %s as stable.'
# Dictionary of valid commands with usage information.
COMMAND_DICTIONARY = {
'clean':
'Cleans up previous calls to either commit or push',
'commit':
'Marks given ebuilds as stable locally',
'push':
'Pushes previous marking of ebuilds to remote repo',
}
# Name used for stabilizing branch.
STABLE_BRANCH_NAME = 'stabilizing_branch'
def BestEBuild(ebuilds):
"""Returns the newest EBuild from a list of EBuild objects."""
from portage.versions import vercmp
winner = ebuilds[0]
for ebuild in ebuilds[1:]:
if vercmp(winner.version, ebuild.version) < 0:
winner = ebuild
return winner
# ======================= Global Helper Functions ========================
def _Print(message):
"""Verbose print function."""
if gflags.FLAGS.verbose:
Info(message)
def _CleanStalePackages(board, package_atoms):
"""Cleans up stale package info from a previous build."""
Info('Cleaning up stale packages %s.' % package_atoms)
unmerge_board_cmd = ['emerge-%s' % board, '--unmerge']
unmerge_board_cmd.extend(package_atoms)
RunCommand(unmerge_board_cmd)
unmerge_host_cmd = ['sudo', 'emerge', '--unmerge']
unmerge_host_cmd.extend(package_atoms)
RunCommand(unmerge_host_cmd)
RunCommand(['eclean-%s' % board, '-d', 'packages'], redirect_stderr=True)
RunCommand(['sudo', 'eclean', '-d', 'packages'], redirect_stderr=True)
def _FindUprevCandidates(files):
"""Return a list of uprev candidates from specified list of files.
Usually an uprev candidate is a the stable ebuild in a cros_workon directory.
However, if no such stable ebuild exists (someone just checked in the 9999
ebuild), this is the unstable ebuild.
Args:
files: List of files.
"""
workon_dir = False
stable_ebuilds = []
unstable_ebuilds = []
for path in files:
if path.endswith('.ebuild') and not os.path.islink(path):
ebuild = EBuild(path)
if ebuild.is_workon:
workon_dir = True
if ebuild.is_stable:
stable_ebuilds.append(ebuild)
else:
unstable_ebuilds.append(ebuild)
# If we found a workon ebuild in this directory, apply some sanity checks.
if workon_dir:
if len(unstable_ebuilds) > 1:
Die('Found multiple unstable ebuilds in %s' % os.path.dirname(path))
if len(stable_ebuilds) > 1:
stable_ebuilds = [BestEBuild(stable_ebuilds)]
# Print a warning if multiple stable ebuilds are found in the same
# directory. Storing multiple stable ebuilds is error-prone because
# the older ebuilds will not get rev'd.
#
# We make a special exception for x11-drivers/xf86-video-msm for legacy
# reasons.
if stable_ebuilds[0].package != 'x11-drivers/xf86-video-msm':
Warning('Found multiple stable ebuilds in %s' % os.path.dirname(path))
if not unstable_ebuilds:
Die('Missing 9999 ebuild in %s' % os.path.dirname(path))
if not stable_ebuilds:
Warning('Missing stable ebuild in %s' % os.path.dirname(path))
return unstable_ebuilds[0]
if stable_ebuilds:
return stable_ebuilds[0]
else:
return None
def _BuildEBuildDictionary(overlays, all, packages):
"""Build a dictionary of the ebuilds in the specified overlays.
overlays: A map which maps overlay directories to arrays of stable EBuilds
inside said directories.
all: Whether to include all ebuilds in the specified directories. If true,
then we gather all packages in the directories regardless of whether
they are in our set of packages.
packages: A set of the packages we want to gather.
"""
for overlay in overlays:
for package_dir, dirs, files in os.walk(overlay):
# Add stable ebuilds to overlays[overlay].
paths = [os.path.join(package_dir, path) for path in files]
ebuild = _FindUprevCandidates(paths)
# If the --all option isn't used, we only want to update packages that
# are in packages.
if ebuild and (all or ebuild.package in packages):
overlays[overlay].append(ebuild)
def _DoWeHaveLocalCommits(stable_branch, tracking_branch):
"""Returns true if there are local commits."""
current_branch = _SimpleRunCommand('git branch | grep \*').split()[1]
if current_branch == stable_branch:
current_commit_id = _SimpleRunCommand('git rev-parse HEAD')
tracking_commit_id = _SimpleRunCommand('git rev-parse %s' % tracking_branch)
return current_commit_id != tracking_commit_id
else:
return False
def _CheckSaneArguments(package_list, command):
"""Checks to make sure the flags are sane. Dies if arguments are not sane."""
if not command in COMMAND_DICTIONARY.keys():
_PrintUsageAndDie('%s is not a valid command' % command)
if not gflags.FLAGS.packages and command == 'commit' and not gflags.FLAGS.all:
_PrintUsageAndDie('Please specify at least one package')
if not gflags.FLAGS.board and command == 'commit':
_PrintUsageAndDie('Please specify a board')
if not os.path.isdir(gflags.FLAGS.srcroot):
_PrintUsageAndDie('srcroot is not a valid path')
gflags.FLAGS.srcroot = os.path.abspath(gflags.FLAGS.srcroot)
def _PrintUsageAndDie(error_message=''):
"""Prints optional error_message the usage and returns an error exit code."""
command_usage = 'Commands: \n'
# Add keys and usage information from dictionary.
commands = sorted(COMMAND_DICTIONARY.keys())
for command in commands:
command_usage += ' %s: %s\n' % (command, COMMAND_DICTIONARY[command])
commands_str = '|'.join(commands)
Warning('Usage: %s FLAGS [%s]\n\n%s\nFlags:%s' % (sys.argv[0], commands_str,
command_usage, gflags.FLAGS))
if error_message:
Die(error_message)
else:
sys.exit(1)
def _SimpleRunCommand(command):
"""Runs a shell command and returns stdout back to caller."""
_Print(' + %s' % command)
proc_handle = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
stdout = proc_handle.communicate()[0]
retcode = proc_handle.wait()
if retcode != 0:
_Print(stdout)
raise subprocess.CalledProcessError(retcode, command)
return stdout
# ======================= End Global Helper Functions ========================
def Clean(tracking_branch):
"""Cleans up uncommitted changes.
Args:
tracking_branch: The tracking branch we want to return to after the call.
"""
# Safety case in case we got into a bad state with a previous build.
try:
_SimpleRunCommand('git rebase --abort')
except:
pass
_SimpleRunCommand('git reset HEAD --hard')
branch = GitBranch(STABLE_BRANCH_NAME, tracking_branch)
if branch.Exists():
GitBranch.Checkout(branch)
branch.Delete()
def PushChange(stable_branch, tracking_branch):
"""Pushes commits in the stable_branch to the remote git repository.
Pushes locals commits from calls to CommitChange to the remote git
repository specified by current working directory.
Args:
stable_branch: The local branch with commits we want to push.
tracking_branch: The tracking branch of the local branch.
Raises:
OSError: Error occurred while pushing.
"""
num_retries = 5
# Sanity check to make sure we're on a stabilizing branch before pushing.
if not _DoWeHaveLocalCommits(stable_branch, tracking_branch):
Info('Not work found to push. Exiting')
return
description = _SimpleRunCommand('git log --format=format:%s%n%n%b ' +
tracking_branch + '..')
description = 'Marking set of ebuilds as stable\n\n%s' % description
Info('Using description %s' % description)
merge_branch_name = 'merge_branch'
for push_try in range(num_retries + 1):
try:
merge_branch = GitBranch(merge_branch_name, tracking_branch)
if merge_branch.Exists():
merge_branch.Delete()
_SimpleRunCommand('repo sync .')
merge_branch.CreateBranch()
if not merge_branch.Exists():
Die('Unable to create merge branch.')
_SimpleRunCommand('git merge --squash %s' % stable_branch)
_SimpleRunCommand('git commit -m "%s"' % description)
_SimpleRunCommand('git config push.default tracking')
if gflags.FLAGS.dryrun:
_SimpleRunCommand('git push --dry-run')
else:
_SimpleRunCommand('git push')
break
except:
if push_try < num_retries:
Warning('Failed to push change, performing retry (%s/%s)' % (
push_try + 1, num_retries))
else:
raise
class GitBranch(object):
"""Wrapper class for a git branch."""
def __init__(self, branch_name, tracking_branch):
"""Sets up variables but does not create the branch."""
self.branch_name = branch_name
self.tracking_branch = tracking_branch
def CreateBranch(self):
GitBranch.Checkout(self)
@classmethod
def Checkout(cls, target):
"""Function used to check out to another GitBranch."""
if target.branch_name == target.tracking_branch or target.Exists():
git_cmd = 'git checkout %s -f' % target.branch_name
else:
git_cmd = 'git checkout -b %s %s -f' % (target.branch_name,
target.tracking_branch)
_SimpleRunCommand(git_cmd)
def Exists(self):
"""Returns True if the branch exists."""
branch_cmd = 'git branch'
branches = _SimpleRunCommand(branch_cmd)
return self.branch_name in branches.split()
def Delete(self):
"""Deletes the branch and returns the user to the master branch.
Returns True on success.
"""
tracking_branch = GitBranch(self.tracking_branch, self.tracking_branch)
GitBranch.Checkout(tracking_branch)
delete_cmd = 'git branch -D %s' % self.branch_name
_SimpleRunCommand(delete_cmd)
class EBuild(object):
"""Wrapper class for information about an ebuild."""
def __init__(self, path):
"""Sets up data about an ebuild from its path."""
from portage.versions import pkgsplit
unused_path, self.category, self.pkgname, filename = path.rsplit('/', 3)
unused_pkgname, self.version_no_rev, rev = pkgsplit(
filename.replace('.ebuild', ''))
self.ebuild_path_no_version = os.path.join(
os.path.dirname(path), self.pkgname)
self.ebuild_path_no_revision = '%s-%s' % (self.ebuild_path_no_version,
self.version_no_rev)
self.current_revision = int(rev.replace('r', ''))
self.version = '%s-%s' % (self.version_no_rev, rev)
self.package = '%s/%s' % (self.category, self.pkgname)
self.ebuild_path = path
self.is_workon = False
self.is_stable = False
for line in fileinput.input(path):
if line.startswith('inherit ') and 'cros-workon' in line:
self.is_workon = True
elif (line.startswith('KEYWORDS=') and '~' not in line and
('amd64' in line or 'x86' in line or 'arm' in line)):
self.is_stable = True
fileinput.close()
def GetCommitId(self):
"""Get the commit id for this ebuild."""
# Grab and evaluate CROS_WORKON variables from this ebuild.
unstable_ebuild = '%s-9999.ebuild' % self.ebuild_path_no_version
cmd = ('export CROS_WORKON_LOCALNAME="%s" CROS_WORKON_PROJECT="%s"; '
'eval $(grep -E "^CROS_WORKON" %s) && '
'echo $CROS_WORKON_PROJECT '
'$CROS_WORKON_LOCALNAME/$CROS_WORKON_SUBDIR'
% (self.pkgname, self.pkgname, unstable_ebuild))
project, subdir = _SimpleRunCommand(cmd).split()
# Calculate srcdir.
srcroot = gflags.FLAGS.srcroot
if self.category == 'chromeos-base':
dir = 'platform'
else:
dir = 'third_party'
srcdir = os.path.join(srcroot, dir, subdir)
if not os.path.isdir(srcdir):
Die('Cannot find commit id for %s' % self.ebuild_path)
# Verify that we're grabbing the commit id from the right project name.
# NOTE: chromeos-kernel has the wrong project name, so it fails this
# check.
# TODO(davidjames): Fix the project name in the chromeos-kernel ebuild.
cmd = 'cd %s && git config --get remote.cros.projectname' % srcdir
actual_project = _SimpleRunCommand(cmd).rstrip()
if project not in (actual_project, 'chromeos-kernel'):
Die('Project name mismatch for %s (%s != %s)' % (unstable_ebuild, project,
actual_project))
# Get commit id.
output = _SimpleRunCommand('cd %s && git rev-parse HEAD' % srcdir)
if not output:
Die('Missing commit id for %s' % self.ebuild_path)
return output.rstrip()
class EBuildStableMarker(object):
"""Class that revs the ebuild and commits locally or pushes the change."""
def __init__(self, ebuild):
assert ebuild
self._ebuild = ebuild
@classmethod
def MarkAsStable(cls, unstable_ebuild_path, new_stable_ebuild_path,
commit_keyword, commit_value, redirect_file=None,
make_stable=True):
"""Static function that creates a revved stable ebuild.
This function assumes you have already figured out the name of the new
stable ebuild path and then creates that file from the given unstable
ebuild and marks it as stable. If the commit_value is set, it also
set the commit_keyword=commit_value pair in the ebuild.
Args:
unstable_ebuild_path: The path to the unstable ebuild.
new_stable_ebuild_path: The path you want to use for the new stable
ebuild.
commit_keyword: Optional keyword to set in the ebuild to mark it as
stable.
commit_value: Value to set the above keyword to.
redirect_file: Optionally redirect output of new ebuild somewhere else.
make_stable: Actually make the ebuild stable.
"""
shutil.copyfile(unstable_ebuild_path, new_stable_ebuild_path)
for line in fileinput.input(new_stable_ebuild_path, inplace=1):
# Has to be done here to get changes to sys.stdout from fileinput.input.
if not redirect_file:
redirect_file = sys.stdout
if line.startswith('KEYWORDS'):
# Actually mark this file as stable by removing ~'s.
if make_stable:
redirect_file.write(line.replace('~', ''))
else:
redirect_file.write(line)
elif line.startswith('EAPI'):
# Always add new commit_id after EAPI definition.
redirect_file.write(line)
if commit_keyword and commit_value:
redirect_file.write('%s="%s"\n' % (commit_keyword, commit_value))
elif not line.startswith(commit_keyword):
# Skip old commit_keyword definition.
redirect_file.write(line)
fileinput.close()
def RevWorkOnEBuild(self, commit_id, redirect_file=None):
"""Revs a workon ebuild given the git commit hash.
By default this class overwrites a new ebuild given the normal
ebuild rev'ing logic. However, a user can specify a redirect_file
to redirect the new stable ebuild to another file.
Args:
commit_id: String corresponding to the commit hash of the developer
package to rev.
redirect_file: Optional file to write the new ebuild. By default
it is written using the standard rev'ing logic. This file must be
opened and closed by the caller.
Raises:
OSError: Error occurred while creating a new ebuild.
IOError: Error occurred while writing to the new revved ebuild file.
Returns:
If the revved package is different than the old ebuild, return the full
revved package name, including the version number. Otherwise, return None.
"""
if self._ebuild.is_stable:
stable_version_no_rev = self._ebuild.version_no_rev
else:
# If given unstable ebuild, use 0.0.1 rather than 9999.
stable_version_no_rev = '0.0.1'
new_version = '%s-r%d' % (stable_version_no_rev,
self._ebuild.current_revision + 1)
new_stable_ebuild_path = '%s-%s.ebuild' % (
self._ebuild.ebuild_path_no_version, new_version)
_Print('Creating new stable ebuild %s' % new_stable_ebuild_path)
unstable_ebuild_path = ('%s-9999.ebuild' %
self._ebuild.ebuild_path_no_version)
if not os.path.exists(unstable_ebuild_path):
Die('Missing unstable ebuild: %s' % unstable_ebuild_path)
self.MarkAsStable(unstable_ebuild_path, new_stable_ebuild_path,
'CROS_WORKON_COMMIT', commit_id, redirect_file)
old_ebuild_path = self._ebuild.ebuild_path
diff_cmd = ['diff', '-Bu', old_ebuild_path, new_stable_ebuild_path]
if 0 == RunCommand(diff_cmd, exit_code=True, redirect_stdout=True,
redirect_stderr=True, print_cmd=gflags.FLAGS.verbose):
os.unlink(new_stable_ebuild_path)
return None
else:
_Print('Adding new stable ebuild to git')
_SimpleRunCommand('git add %s' % new_stable_ebuild_path)
if self._ebuild.is_stable:
_Print('Removing old ebuild from git')
_SimpleRunCommand('git rm %s' % old_ebuild_path)
return '%s-%s' % (self._ebuild.package, new_version)
@classmethod
def CommitChange(cls, message):
"""Commits current changes in git locally with given commit message.
Args:
message: the commit string to write when committing to git.
Raises:
OSError: Error occurred while committing.
"""
Info('Committing changes with commit message: %s' % message)
git_commit_cmd = 'git commit -am "%s"' % message
_SimpleRunCommand(git_commit_cmd)
def main(argv):
try:
argv = gflags.FLAGS(argv)
if len(argv) != 2:
_PrintUsageAndDie('Must specify a valid command')
else:
command = argv[1]
except gflags.FlagsError, e :
_PrintUsageAndDie(str(e))
package_list = gflags.FLAGS.packages.split(':')
_CheckSaneArguments(package_list, command)
if gflags.FLAGS.overlays:
overlays = {}
for path in gflags.FLAGS.overlays.split(':'):
if command != 'clean' and not os.path.isdir(path):
Die('Cannot find overlay: %s' % path)
overlays[path] = []
else:
Warning('Missing --overlays argument')
overlays = {
'%s/private-overlays/chromeos-overlay' % gflags.FLAGS.srcroot: [],
'%s/third_party/chromiumos-overlay' % gflags.FLAGS.srcroot: []
}
if command == 'commit':
_BuildEBuildDictionary(overlays, gflags.FLAGS.all, package_list)
for overlay, ebuilds in overlays.items():
if not os.path.isdir(overlay):
Warning("Skipping %s" % overlay)
continue
# TODO(davidjames): Currently, all code that interacts with git depends on
# the cwd being set to the overlay directory. We should instead pass in
# this parameter so that we don't need to modify the cwd globally.
os.chdir(overlay)
if command == 'clean':
Clean(gflags.FLAGS.tracking_branch)
elif command == 'push':
PushChange(STABLE_BRANCH_NAME, gflags.FLAGS.tracking_branch)
elif command == 'commit' and ebuilds:
work_branch = GitBranch(STABLE_BRANCH_NAME, gflags.FLAGS.tracking_branch)
work_branch.CreateBranch()
if not work_branch.Exists():
Die('Unable to create stabilizing branch in %s' % overlay)
# Contains the array of packages we actually revved.
revved_packages = []
new_package_atoms = []
for ebuild in ebuilds:
try:
_Print('Working on %s' % ebuild.package)
worker = EBuildStableMarker(ebuild)
commit_id = ebuild.GetCommitId()
new_package = worker.RevWorkOnEBuild(commit_id)
if new_package:
message = _GIT_COMMIT_MESSAGE % (ebuild.package, commit_id)
worker.CommitChange(message)
revved_packages.append(ebuild.package)
new_package_atoms.append('=%s' % new_package)
except (OSError, IOError):
Warning('Cannot rev %s\n' % ebuild.package,
'Note you will have to go into %s '
'and reset the git repo yourself.' % overlay)
raise
_CleanStalePackages(gflags.FLAGS.board, new_package_atoms)
if gflags.FLAGS.drop_file:
fh = open(gflags.FLAGS.drop_file, 'w')
fh.write(' '.join(revved_packages))
fh.close()
if __name__ == '__main__':
main(sys.argv)

View File

@ -1,313 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for cros_mark_as_stable.py."""
import fileinput
import mox
import os
import sys
import unittest
import cros_mark_as_stable
class NonClassTests(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.mox.StubOutWithMock(cros_mark_as_stable, '_SimpleRunCommand')
self._branch = 'test_branch'
self._tracking_branch = 'cros/test'
def testPushChange(self):
git_log = 'Marking test_one as stable\nMarking test_two as stable\n'
fake_description = 'Marking set of ebuilds as stable\n\n%s' % git_log
self.mox.StubOutWithMock(cros_mark_as_stable, '_DoWeHaveLocalCommits')
self.mox.StubOutWithMock(cros_mark_as_stable.GitBranch, 'CreateBranch')
self.mox.StubOutWithMock(cros_mark_as_stable.GitBranch, 'Exists')
cros_mark_as_stable._DoWeHaveLocalCommits(
self._branch, self._tracking_branch).AndReturn(True)
cros_mark_as_stable.GitBranch.CreateBranch()
cros_mark_as_stable.GitBranch.Exists().AndReturn(True)
cros_mark_as_stable._SimpleRunCommand('git log --format=format:%s%n%n%b ' +
self._tracking_branch + '..').AndReturn(git_log)
cros_mark_as_stable._SimpleRunCommand('repo sync .')
cros_mark_as_stable._SimpleRunCommand('git merge --squash %s' %
self._branch)
cros_mark_as_stable._SimpleRunCommand('git commit -m "%s"' %
fake_description)
cros_mark_as_stable._SimpleRunCommand('git config push.default tracking')
cros_mark_as_stable._SimpleRunCommand('git push')
self.mox.ReplayAll()
cros_mark_as_stable.PushChange(self._branch, self._tracking_branch)
self.mox.VerifyAll()
class GitBranchTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
# Always stub RunCommmand out as we use it in every method.
self.mox.StubOutWithMock(cros_mark_as_stable, '_SimpleRunCommand')
self._branch = self.mox.CreateMock(cros_mark_as_stable.GitBranch)
self._branch_name = 'test_branch'
self._branch.branch_name = self._branch_name
self._tracking_branch = 'cros/test'
self._branch.tracking_branch = self._tracking_branch
def testCheckoutCreate(self):
# Test init with no previous branch existing.
self._branch.Exists().AndReturn(False)
cros_mark_as_stable._SimpleRunCommand(
'git checkout -b %s %s' % (self._branch_name, self._tracking_branch))
self.mox.ReplayAll()
cros_mark_as_stable.GitBranch.Checkout(self._branch)
self.mox.VerifyAll()
def testCheckoutNoCreate(self):
# Test init with previous branch existing.
self._branch.Exists().AndReturn(True)
cros_mark_as_stable._SimpleRunCommand('git checkout %s' % (
self._branch_name))
self.mox.ReplayAll()
cros_mark_as_stable.GitBranch.Checkout(self._branch)
self.mox.VerifyAll()
def testDelete(self):
self.mox.StubOutWithMock(cros_mark_as_stable.GitBranch, 'Checkout')
branch = cros_mark_as_stable.GitBranch(self._branch_name,
self._tracking_branch)
cros_mark_as_stable.GitBranch.Checkout(mox.IgnoreArg())
cros_mark_as_stable._SimpleRunCommand('git branch -D ' + self._branch_name)
self.mox.ReplayAll()
branch.Delete()
self.mox.VerifyAll()
def testExists(self):
branch = cros_mark_as_stable.GitBranch(self._branch_name,
self._tracking_branch)
# Test if branch exists that is created
cros_mark_as_stable._SimpleRunCommand('git branch').AndReturn(
'%s' % self._branch_name)
self.mox.ReplayAll()
self.assertTrue(branch.Exists())
self.mox.VerifyAll()
class EBuildTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
def testParseEBuildPath(self):
# Test with ebuild with revision number.
fake_ebuild_path = '/path/to/test_package/test_package-0.0.1-r1.ebuild'
self.mox.StubOutWithMock(fileinput, 'input')
fileinput.input(fake_ebuild_path).AndReturn('')
self.mox.ReplayAll()
fake_ebuild = cros_mark_as_stable.EBuild(fake_ebuild_path)
self.mox.VerifyAll()
self.assertEquals(fake_ebuild.version_no_rev, '0.0.1')
self.assertEquals(fake_ebuild.ebuild_path_no_revision,
'/path/to/test_package/test_package-0.0.1')
self.assertEquals(fake_ebuild.ebuild_path_no_version,
'/path/to/test_package/test_package')
self.assertEquals(fake_ebuild.current_revision, 1)
def testParseEBuildPathNoRevisionNumber(self):
# Test with ebuild without revision number.
fake_ebuild_path = '/path/to/test_package/test_package-9999.ebuild'
self.mox.StubOutWithMock(fileinput, 'input')
fileinput.input(fake_ebuild_path).AndReturn('')
self.mox.ReplayAll()
fake_ebuild = cros_mark_as_stable.EBuild(fake_ebuild_path)
self.mox.VerifyAll()
self.assertEquals(fake_ebuild.version_no_rev, '9999')
self.assertEquals(fake_ebuild.ebuild_path_no_revision,
'/path/to/test_package/test_package-9999')
self.assertEquals(fake_ebuild.ebuild_path_no_version,
'/path/to/test_package/test_package')
self.assertEquals(fake_ebuild.current_revision, 0)
class EBuildStableMarkerTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.mox.StubOutWithMock(cros_mark_as_stable, '_SimpleRunCommand')
self.mox.StubOutWithMock(cros_mark_as_stable, 'RunCommand')
self.mox.StubOutWithMock(os, 'unlink')
self.m_ebuild = self.mox.CreateMock(cros_mark_as_stable.EBuild)
self.m_ebuild.is_stable = True
self.m_ebuild.package = 'test_package/test_package'
self.m_ebuild.version_no_rev = '0.0.1'
self.m_ebuild.current_revision = 1
self.m_ebuild.ebuild_path_no_revision = '/path/test_package-0.0.1'
self.m_ebuild.ebuild_path_no_version = '/path/test_package'
self.m_ebuild.ebuild_path = '/path/test_package-0.0.1-r1.ebuild'
self.revved_ebuild_path = '/path/test_package-0.0.1-r2.ebuild'
self.unstable_ebuild_path = '/path/test_package-9999.ebuild'
def testRevWorkOnEBuild(self):
self.mox.StubOutWithMock(cros_mark_as_stable.fileinput, 'input')
self.mox.StubOutWithMock(cros_mark_as_stable.os.path, 'exists')
self.mox.StubOutWithMock(cros_mark_as_stable.shutil, 'copyfile')
m_file = self.mox.CreateMock(file)
# Prepare mock fileinput. This tests to make sure both the commit id
# and keywords are changed correctly.
mock_file = ['EAPI=2', 'CROS_WORKON_COMMIT=old_id',
'KEYWORDS=\"~x86 ~arm\"', 'src_unpack(){}']
ebuild_9999 = self.m_ebuild.ebuild_path_no_version + '-9999.ebuild'
cros_mark_as_stable.os.path.exists(ebuild_9999).AndReturn(True)
cros_mark_as_stable.shutil.copyfile(ebuild_9999, self.revved_ebuild_path)
cros_mark_as_stable.fileinput.input(self.revved_ebuild_path,
inplace=1).AndReturn(mock_file)
m_file.write('EAPI=2')
m_file.write('CROS_WORKON_COMMIT="my_id"\n')
m_file.write('KEYWORDS="x86 arm"')
m_file.write('src_unpack(){}')
diff_cmd = ['diff', '-Bu', self.m_ebuild.ebuild_path,
self.revved_ebuild_path]
cros_mark_as_stable.RunCommand(diff_cmd, exit_code=True,
print_cmd=False, redirect_stderr=True,
redirect_stdout=True).AndReturn(1)
cros_mark_as_stable._SimpleRunCommand('git add ' + self.revved_ebuild_path)
cros_mark_as_stable._SimpleRunCommand('git rm ' + self.m_ebuild.ebuild_path)
self.mox.ReplayAll()
marker = cros_mark_as_stable.EBuildStableMarker(self.m_ebuild)
result = marker.RevWorkOnEBuild('my_id', redirect_file=m_file)
self.mox.VerifyAll()
self.assertEqual(result, 'test_package/test_package-0.0.1-r2')
def testRevUnchangedEBuild(self):
self.mox.StubOutWithMock(cros_mark_as_stable.fileinput, 'input')
self.mox.StubOutWithMock(cros_mark_as_stable.os.path, 'exists')
self.mox.StubOutWithMock(cros_mark_as_stable.shutil, 'copyfile')
m_file = self.mox.CreateMock(file)
# Prepare mock fileinput. This tests to make sure both the commit id
# and keywords are changed correctly.
mock_file = ['EAPI=2', 'CROS_WORKON_COMMIT=old_id',
'KEYWORDS=\"~x86 ~arm\"', 'src_unpack(){}']
ebuild_9999 = self.m_ebuild.ebuild_path_no_version + '-9999.ebuild'
cros_mark_as_stable.os.path.exists(ebuild_9999).AndReturn(True)
cros_mark_as_stable.shutil.copyfile(ebuild_9999, self.revved_ebuild_path)
cros_mark_as_stable.fileinput.input(self.revved_ebuild_path,
inplace=1).AndReturn(mock_file)
m_file.write('EAPI=2')
m_file.write('CROS_WORKON_COMMIT="my_id"\n')
m_file.write('KEYWORDS="x86 arm"')
m_file.write('src_unpack(){}')
diff_cmd = ['diff', '-Bu', self.m_ebuild.ebuild_path,
self.revved_ebuild_path]
cros_mark_as_stable.RunCommand(diff_cmd, exit_code=True,
print_cmd=False, redirect_stderr=True,
redirect_stdout=True).AndReturn(0)
cros_mark_as_stable.os.unlink(self.revved_ebuild_path)
self.mox.ReplayAll()
marker = cros_mark_as_stable.EBuildStableMarker(self.m_ebuild)
result = marker.RevWorkOnEBuild('my_id', redirect_file=m_file)
self.mox.VerifyAll()
self.assertEqual(result, None)
def testRevMissingEBuild(self):
self.mox.StubOutWithMock(cros_mark_as_stable.fileinput, 'input')
self.mox.StubOutWithMock(cros_mark_as_stable.os.path, 'exists')
self.mox.StubOutWithMock(cros_mark_as_stable.shutil, 'copyfile')
self.mox.StubOutWithMock(cros_mark_as_stable, 'Die')
m_file = self.mox.CreateMock(file)
revved_ebuild_path = self.m_ebuild.ebuild_path
self.m_ebuild.ebuild_path = self.unstable_ebuild_path
self.m_ebuild.is_stable = False
self.m_ebuild.current_revision = 0
# Prepare mock fileinput. This tests to make sure both the commit id
# and keywords are changed correctly.
mock_file = ['EAPI=2', 'CROS_WORKON_COMMIT=old_id',
'KEYWORDS=\"~x86 ~arm\"', 'src_unpack(){}']
ebuild_9999 = self.m_ebuild.ebuild_path_no_version + '-9999.ebuild'
cros_mark_as_stable.os.path.exists(ebuild_9999).AndReturn(False)
cros_mark_as_stable.Die("Missing unstable ebuild: %s" % ebuild_9999)
cros_mark_as_stable.shutil.copyfile(ebuild_9999, revved_ebuild_path)
cros_mark_as_stable.fileinput.input(revved_ebuild_path,
inplace=1).AndReturn(mock_file)
m_file.write('EAPI=2')
m_file.write('CROS_WORKON_COMMIT="my_id"\n')
m_file.write('KEYWORDS="x86 arm"')
m_file.write('src_unpack(){}')
diff_cmd = ['diff', '-Bu', self.unstable_ebuild_path, revved_ebuild_path]
cros_mark_as_stable.RunCommand(diff_cmd, exit_code=True,
print_cmd=False, redirect_stderr=True,
redirect_stdout=True).AndReturn(1)
cros_mark_as_stable._SimpleRunCommand('git add ' + revved_ebuild_path)
self.mox.ReplayAll()
marker = cros_mark_as_stable.EBuildStableMarker(self.m_ebuild)
result = marker.RevWorkOnEBuild('my_id', redirect_file=m_file)
self.mox.VerifyAll()
self.assertEqual(result, 'test_package/test_package-0.0.1-r1')
def testCommitChange(self):
mock_message = 'Commit me'
cros_mark_as_stable._SimpleRunCommand(
'git commit -am "%s"' % mock_message)
self.mox.ReplayAll()
marker = cros_mark_as_stable.EBuildStableMarker(self.m_ebuild)
marker.CommitChange(mock_message)
self.mox.VerifyAll()
class _Package(object):
def __init__(self, package):
self.package = package
class BuildEBuildDictionaryTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.mox.StubOutWithMock(cros_mark_as_stable.os, 'walk')
self.mox.StubOutWithMock(cros_mark_as_stable, 'RunCommand')
self.package = 'chromeos-base/test_package'
self.root = '/overlay/chromeos-base/test_package'
self.package_path = self.root + '/test_package-0.0.1.ebuild'
paths = [[self.root, [], []]]
cros_mark_as_stable.os.walk("/overlay").AndReturn(paths)
self.mox.StubOutWithMock(cros_mark_as_stable, '_FindUprevCandidates')
def testWantedPackage(self):
overlays = {"/overlay": []}
package = _Package(self.package)
cros_mark_as_stable._FindUprevCandidates([]).AndReturn(package)
self.mox.ReplayAll()
cros_mark_as_stable._BuildEBuildDictionary(overlays, False, [self.package])
self.mox.VerifyAll()
self.assertEquals(len(overlays), 1)
self.assertEquals(overlays["/overlay"], [package])
def testUnwantedPackage(self):
overlays = {"/overlay": []}
package = _Package(self.package)
cros_mark_as_stable._FindUprevCandidates([]).AndReturn(package)
self.mox.ReplayAll()
cros_mark_as_stable._BuildEBuildDictionary(overlays, False, [])
self.assertEquals(len(overlays), 1)
self.assertEquals(overlays["/overlay"], [])
self.mox.VerifyAll()
if __name__ == '__main__':
unittest.main()

2292
gflags.py

File diff suppressed because it is too large Load Diff

View File

@ -1,564 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import multiprocessing
import optparse
import os
import re
import sys
import tempfile
import time
from chromite.lib import cros_build_lib
from chromite.lib.binpkg import (GrabLocalPackageIndex, GrabRemotePackageIndex,
PackageIndex)
"""
This script is used to upload host prebuilts as well as board BINHOSTS.
If the URL starts with 'gs://', we upload using gsutil to Google Storage.
Otherwise, rsync is used.
After a build is successfully uploaded a file is updated with the proper
BINHOST version as well as the target board. This file is defined in GIT_FILE
To read more about prebuilts/binhost binary packages please refer to:
http://sites/chromeos/for-team-members/engineering/releng/prebuilt-binaries-for-streamlining-the-build-process
Example of uploading prebuilt amd64 host files to Google Storage:
./prebuilt.py -p /b/cbuild/build -s -u gs://chromeos-prebuilt
Example of uploading x86-dogfood binhosts to Google Storage:
./prebuilt.py -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
Example of uploading prebuilt amd64 host files using rsync:
./prebuilt.py -p /b/cbuild/build -s -u codf30.jail:/tmp
"""
# as per http://crosbug.com/5855 always filter the below packages
_FILTER_PACKAGES = set()
_RETRIES = 3
_GSUTIL_BIN = '/b/build/third_party/gsutil/gsutil'
_HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
_HOST_TARGET = 'amd64'
_BOARD_PATH = 'chroot/build/%(board)s'
_BOTO_CONFIG = '/home/chrome-bot/external-boto'
# board/board-target/version/packages/'
_REL_BOARD_PATH = 'board/%(board)s/%(version)s/packages'
# host/host-target/version/packages/'
_REL_HOST_PATH = 'host/%(target)s/%(version)s/packages'
# Private overlays to look at for builds to filter
# relative to build path
_PRIVATE_OVERLAY_DIR = 'src/private-overlays'
_BINHOST_BASE_URL = 'http://commondatastorage.googleapis.com/chromeos-prebuilt'
_PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/'
# Created in the event of new host targets becoming available
_PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR,
'make.conf.amd64-host')}
_BINHOST_CONF_DIR = 'src/third_party/chromiumos-overlay/chromeos/binhost'
class FiltersEmpty(Exception):
"""Raised when filters are used but none are found."""
pass
class UploadFailed(Exception):
"""Raised when one of the files uploaded failed."""
pass
class UnknownBoardFormat(Exception):
"""Raised when a function finds an unknown board format."""
pass
class GitPushFailed(Exception):
"""Raised when a git push failed after retry."""
pass
def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
"""Update the key in file with the value passed.
File format:
key="value"
Note quotes are added automatically
Args:
filename: Name of file to modify.
value: Value to write with the key.
key: The variable key to update. (Default: PORTAGE_BINHOST)
"""
if os.path.exists(filename):
file_fh = open(filename)
else:
file_fh = open(filename, 'w+')
file_lines = []
found = False
keyval_str = '%(key)s=%(value)s'
for line in file_fh:
# Strip newlines from end of line. We already add newlines below.
line = line.rstrip("\n")
if len(line.split('=')) != 2:
# Skip any line that doesn't fit key=val.
file_lines.append(line)
continue
file_var, file_val = line.split('=')
if file_var == key:
found = True
print 'Updating %s=%s to %s="%s"' % (file_var, file_val, key, value)
value = '"%s"' % value
file_lines.append(keyval_str % {'key': key, 'value': value})
else:
file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
if not found:
file_lines.append(keyval_str % {'key': key, 'value': value})
file_fh.close()
# write out new file
new_file_fh = open(filename, 'w')
new_file_fh.write('\n'.join(file_lines) + '\n')
new_file_fh.close()
def RevGitPushWithRetry(retries=5):
"""Repo sync and then push git changes in flight.
Args:
retries: The number of times to retry before giving up, default: 5
Raises:
GitPushFailed if push was unsuccessful after retries
"""
for retry in range(1, retries+1):
try:
cros_build_lib.RunCommand('repo sync .', shell=True)
cros_build_lib.RunCommand('git push', shell=True)
break
except cros_build_lib.RunCommandError:
if retry < retries:
print 'Error pushing changes trying again (%s/%s)' % (retry, retries)
time.sleep(5*retry)
else:
raise GitPushFailed('Failed to push change after %s retries' % retries)
def RevGitFile(filename, value, retries=5, key='PORTAGE_BINHOST'):
"""Update and push the git file.
Args:
filename: file to modify that is in a git repo already
value: string representing the version of the prebuilt that has been
uploaded.
retries: The number of times to retry before giving up, default: 5
key: The variable key to update in the git file.
(Default: PORTAGE_BINHOST)
"""
prebuilt_branch = 'prebuilt_branch'
old_cwd = os.getcwd()
os.chdir(os.path.dirname(filename))
cros_build_lib.RunCommand('repo sync .', shell=True)
cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True)
git_ssh_config_cmd = (
'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof '
'http://git.chromium.org/git')
cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
description = 'Update %s="%s" in %s' % (key, value, filename)
print description
try:
UpdateLocalFile(filename, value, key)
cros_build_lib.RunCommand('git config push.default tracking', shell=True)
cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
RevGitPushWithRetry(retries)
finally:
cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True)
os.chdir(old_cwd)
def GetVersion():
"""Get the version to put in LATEST and update the git version with."""
return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S')
def LoadPrivateFilters(build_path):
"""Load private filters based on ebuilds found under _PRIVATE_OVERLAY_DIR.
This function adds filters to the global set _FILTER_PACKAGES.
Args:
build_path: Path that _PRIVATE_OVERLAY_DIR is in.
"""
# TODO(scottz): eventually use manifest.xml to find the proper
# private overlay path.
filter_path = os.path.join(build_path, _PRIVATE_OVERLAY_DIR)
files = cros_build_lib.ListFiles(filter_path)
filters = []
for file in files:
if file.endswith('.ebuild'):
basename = os.path.basename(file)
match = re.match('(.*?)-\d.*.ebuild', basename)
if match:
filters.append(match.group(1))
if not filters:
raise FiltersEmpty('No filters were returned')
_FILTER_PACKAGES.update(filters)
def ShouldFilterPackage(file_path):
"""Skip a particular file if it matches a pattern.
Skip any files that machine the list of packages to filter in
_FILTER_PACKAGES.
Args:
file_path: string of a file path to inspect against _FILTER_PACKAGES
Returns:
True if we should filter the package,
False otherwise.
"""
for name in _FILTER_PACKAGES:
if name in file_path:
print 'FILTERING %s' % file_path
return True
return False
def _RetryRun(cmd, print_cmd=True, shell=False, cwd=None):
"""Run the specified command, retrying if necessary.
Args:
cmd: The command to run.
print_cmd: Whether to print out the cmd.
shell: Whether to treat the command as a shell.
cwd: Working directory to run command in.
Returns:
True if the command succeeded. Otherwise, returns False.
"""
# TODO(scottz): port to use _Run or similar when it is available in
# cros_build_lib.
for attempt in range(_RETRIES):
try:
output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell,
cwd=cwd)
return True
except cros_build_lib.RunCommandError:
print 'Failed to run %s' % cmd
else:
print 'Retry failed run %s, giving up' % cmd
return False
def _GsUpload(args):
"""Upload to GS bucket.
Args:
args: a tuple of two arguments that contains local_file and remote_file.
Returns:
Return the arg tuple of two if the upload failed
"""
(local_file, remote_file) = args
cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file)
if not _RetryRun(cmd, print_cmd=False, shell=True):
return (local_file, remote_file)
def RemoteUpload(files, pool=10):
"""Upload to google storage.
Create a pool of process and call _GsUpload with the proper arguments.
Args:
files: dictionary with keys to local files and values to remote path.
pool: integer of maximum proesses to have at the same time.
Returns:
Return a set of tuple arguments of the failed uploads
"""
# TODO(scottz) port this to use _RunManyParallel when it is available in
# cros_build_lib
pool = multiprocessing.Pool(processes=pool)
workers = []
for local_file, remote_path in files.iteritems():
workers.append((local_file, remote_path))
result = pool.map_async(_GsUpload, workers, chunksize=1)
while True:
try:
return set(result.get(60*60))
except multiprocessing.TimeoutError:
pass
def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
"""Build a dictionary of local remote file key pairs to upload.
Args:
base_local_path: The base path to the files on the local hard drive.
remote_path: The base path to the remote paths.
pkgs: The packages to upload.
Returns:
Returns a dictionary of local_path/remote_path pairs
"""
upload_files = {}
for pkg in pkgs:
suffix = pkg['CPV'] + '.tbz2'
local_path = os.path.join(base_local_path, suffix)
assert os.path.exists(local_path)
remote_path = '%s/%s' % (base_remote_path.rstrip('/'), suffix)
upload_files[local_path] = remote_path
return upload_files
def GetBoardPathFromCrosOverlayList(build_path, target):
"""Use the cros_overlay_list to determine the path to the board overlay
Args:
build_path: The path to the root of the build directory
target: The target that we are looking for, could consist of board and
board_variant, we handle that properly
Returns:
The last line from cros_overlay_list as a string
"""
script_dir = os.path.join(build_path, 'src/scripts/bin')
cmd = ['./cros_overlay_list']
if re.match('.*?_.*', target):
(board, variant) = target.split('_')
cmd += ['--board', board, '--variant', variant]
elif re.match('.*?-\w+', target):
cmd += ['--board', target]
else:
raise UnknownBoardFormat('Unknown format: %s' % target)
cmd_output = cros_build_lib.RunCommand(cmd, redirect_stdout=True,
cwd=script_dir)
# We only care about the last entry
return cmd_output.output.splitlines().pop()
def DeterminePrebuiltConfFile(build_path, target):
"""Determine the prebuilt.conf file that needs to be updated for prebuilts.
Args:
build_path: The path to the root of the build directory
target: String representation of the board. This includes host and board
targets
Returns
A string path to a prebuilt.conf file to be updated.
"""
if _HOST_TARGET == target:
# We are host.
# Without more examples of hosts this is a kludge for now.
# TODO(Scottz): as new host targets come online expand this to
# work more like boards.
make_path = _PREBUILT_MAKE_CONF[target]
else:
# We are a board
board = GetBoardPathFromCrosOverlayList(build_path, target)
make_path = os.path.join(board, 'prebuilt.conf')
return make_path
def UpdateBinhostConfFile(path, key, value):
"""Update binhost config file file with key=value.
Args:
path: Filename to update.
key: Key to update.
value: New value for key.
"""
cwd = os.path.dirname(os.path.abspath(path))
filename = os.path.basename(path)
if not os.path.isdir(cwd):
os.makedirs(cwd)
if not os.path.isfile(path):
config_file = file(path, 'w')
config_file.write('FULL_BINHOST="$PORTAGE_BINHOST"\n')
config_file.close()
UpdateLocalFile(path, value, key)
cros_build_lib.RunCommand('git add %s' % filename, cwd=cwd, shell=True)
description = 'Update %s=%s in %s' % (key, value, filename)
cros_build_lib.RunCommand('git commit -m "%s"' % description, cwd=cwd,
shell=True)
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
board=None, git_sync=False, git_sync_retries=5,
key='PORTAGE_BINHOST', pkg_indexes=[],
sync_binhost_conf=False):
"""Upload Host prebuilt files to Google Storage space.
Args:
build_path: The path to the root of the chroot.
upload_location: The upload location.
board: The board to upload to Google Storage. If this is None, upload
host packages.
git_sync: If set, update make.conf of target to reference the latest
prebuilt packages generated here.
git_sync_retries: How many times to retry pushing when updating git files.
This helps avoid failures when multiple bots are modifying the same Repo.
default: 5
key: The variable key to update in the git file. (Default: PORTAGE_BINHOST)
pkg_indexes: Old uploaded prebuilts to compare against. Instead of
uploading duplicate files, we just link to the old files.
sync_binhost_conf: If set, update binhost config file in chromiumos-overlay
for the current board or host.
"""
if not board:
# We are uploading host packages
# TODO(scottz): eventually add support for different host_targets
package_path = os.path.join(build_path, _HOST_PACKAGES_PATH)
url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET}
package_string = _HOST_TARGET
git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET])
binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'host',
'%s.conf' % _HOST_TARGET)
else:
board_path = os.path.join(build_path, _BOARD_PATH % {'board': board})
package_path = os.path.join(board_path, 'packages')
package_string = board
url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version}
git_file = DeterminePrebuiltConfFile(build_path, board)
binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'target',
'%s.conf' % board)
remote_location = '%s/%s' % (upload_location.rstrip('/'), url_suffix)
# Process Packages file, removing duplicates and filtered packages.
pkg_index = GrabLocalPackageIndex(package_path)
pkg_index.SetUploadLocation(binhost_base_url, url_suffix)
pkg_index.RemoveFilteredPackages(lambda pkg: ShouldFilterPackage(pkg))
uploads = pkg_index.ResolveDuplicateUploads(pkg_indexes)
# Write Packages file.
tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
if upload_location.startswith('gs://'):
# Build list of files to upload.
upload_files = GenerateUploadDict(package_path, remote_location, uploads)
remote_file = '%s/Packages' % remote_location.rstrip('/')
upload_files[tmp_packages_file.name] = remote_file
print 'Uploading %s' % package_string
failed_uploads = RemoteUpload(upload_files)
if len(failed_uploads) > 1 or (None not in failed_uploads):
error_msg = ['%s -> %s\n' % args for args in failed_uploads]
raise UploadFailed('Error uploading:\n%s' % error_msg)
else:
pkgs = ' '.join(p['CPV'] + '.tbz2' for p in uploads)
ssh_server, remote_path = remote_location.split(':', 1)
d = { 'pkg_index': tmp_packages_file.name,
'pkgs': pkgs,
'remote_packages': '%s/Packages' % remote_location.rstrip('/'),
'remote_path': remote_path,
'remote_location': remote_location,
'ssh_server': ssh_server }
cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d,
'rsync -av --chmod=a+r %(pkg_index)s %(remote_packages)s' % d]
if pkgs:
cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d)
for cmd in cmds:
if not _RetryRun(cmd, shell=True, cwd=package_path):
raise UploadFailed('Could not run %s' % cmd)
url_value = '%s/%s/' % (binhost_base_url, url_suffix)
if git_sync:
RevGitFile(git_file, url_value, retries=git_sync_retries, key=key)
if sync_binhost_conf:
UpdateBinhostConfFile(binhost_conf, key, url_value)
def usage(parser, msg):
"""Display usage message and parser help then exit with 1."""
print >> sys.stderr, msg
parser.print_help()
sys.exit(1)
def main():
parser = optparse.OptionParser()
parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
default=_BINHOST_BASE_URL,
help='Base URL to use for binhost in make.conf updates')
parser.add_option('', '--previous-binhost-url', action='append',
default=[], dest='previous_binhost_url',
help='Previous binhost URL')
parser.add_option('-b', '--board', dest='board', default=None,
help='Board type that was built on this machine')
parser.add_option('-p', '--build-path', dest='build_path',
help='Path to the chroot')
parser.add_option('-s', '--sync-host', dest='sync_host',
default=False, action='store_true',
help='Sync host prebuilts')
parser.add_option('-g', '--git-sync', dest='git_sync',
default=False, action='store_true',
help='Enable git version sync (This commits to a repo)')
parser.add_option('-u', '--upload', dest='upload',
default=None,
help='Upload location')
parser.add_option('-V', '--prepend-version', dest='prepend_version',
default=None,
help='Add an identifier to the front of the version')
parser.add_option('-f', '--filters', dest='filters', action='store_true',
default=False,
help='Turn on filtering of private ebuild packages')
parser.add_option('-k', '--key', dest='key',
default='PORTAGE_BINHOST',
help='Key to update in make.conf / binhost.conf')
parser.add_option('', '--sync-binhost-conf', dest='sync_binhost_conf',
default=False, action='store_true',
help='Update binhost.conf')
options, args = parser.parse_args()
# Setup boto environment for gsutil to use
os.environ['BOTO_CONFIG'] = _BOTO_CONFIG
if not options.build_path:
usage(parser, 'Error: you need provide a chroot path')
if not options.upload:
usage(parser, 'Error: you need to provide an upload location using -u')
if options.filters:
LoadPrivateFilters(options.build_path)
version = GetVersion()
if options.prepend_version:
version = '%s-%s' % (options.prepend_version, version)
pkg_indexes = []
for url in options.previous_binhost_url:
pkg_index = GrabRemotePackageIndex(url)
if pkg_index:
pkg_indexes.append(pkg_index)
if options.sync_host:
UploadPrebuilt(options.build_path, options.upload, version,
options.binhost_base_url, git_sync=options.git_sync,
key=options.key, pkg_indexes=pkg_indexes,
sync_binhost_conf=options.sync_binhost_conf)
if options.board:
UploadPrebuilt(options.build_path, options.upload, version,
options.binhost_base_url, board=options.board,
git_sync=options.git_sync, key=options.key,
pkg_indexes=pkg_indexes,
sync_binhost_conf=options.sync_binhost_conf)
if __name__ == '__main__':
main()

View File

@ -1,371 +0,0 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import mox
import os
import prebuilt
import shutil
import tempfile
import unittest
import urllib
from chromite.lib import cros_build_lib
from chromite.lib.binpkg import PackageIndex
PUBLIC_PACKAGES = [{'CPV': 'gtk+/public1', 'SHA1': '1'},
{'CPV': 'gtk+/public2', 'SHA1': '2',
'PATH': 'gtk%2B/foo.tgz'}]
PRIVATE_PACKAGES = [{'CPV': 'private', 'SHA1': '3'}]
def SimplePackageIndex(header=True, packages=True):
pkgindex = PackageIndex()
if header:
pkgindex.header['URI'] = 'http://www.example.com'
if packages:
pkgindex.packages = copy.deepcopy(PUBLIC_PACKAGES + PRIVATE_PACKAGES)
return pkgindex
class TestUpdateFile(unittest.TestCase):
def setUp(self):
self.contents_str = ['# comment that should be skipped',
'PKGDIR="/var/lib/portage/pkgs"',
'PORTAGE_BINHOST="http://no.thanks.com"',
'portage portage-20100310.tar.bz2',
'COMPILE_FLAGS="some_value=some_other"',
]
temp_fd, self.version_file = tempfile.mkstemp()
os.write(temp_fd, '\n'.join(self.contents_str))
os.close(temp_fd)
def tearDown(self):
os.remove(self.version_file)
def _read_version_file(self, version_file=None):
"""Read the contents of self.version_file and return as a list."""
if not version_file:
version_file = self.version_file
version_fh = open(version_file)
try:
return [line.strip() for line in version_fh.readlines()]
finally:
version_fh.close()
def _verify_key_pair(self, key, val):
file_contents = self._read_version_file()
# ensure key for verify is wrapped on quotes
if '"' not in val:
val = '"%s"' % val
for entry in file_contents:
if '=' not in entry:
continue
file_key, file_val = entry.split('=')
if file_key == key:
if val == file_val:
break
else:
self.fail('Could not find "%s=%s" in version file' % (key, val))
def testAddVariableThatDoesNotExist(self):
"""Add in a new variable that was no present in the file."""
key = 'PORTAGE_BINHOST'
value = '1234567'
prebuilt.UpdateLocalFile(self.version_file, value)
print self.version_file
current_version_str = self._read_version_file()
self._verify_key_pair(key, value)
print self.version_file
def testUpdateVariable(self):
"""Test updating a variable that already exists."""
key, val = self.contents_str[2].split('=')
new_val = 'test_update'
self._verify_key_pair(key, val)
prebuilt.UpdateLocalFile(self.version_file, new_val)
self._verify_key_pair(key, new_val)
def testUpdateNonExistentFile(self):
key = 'PORTAGE_BINHOST'
value = '1234567'
non_existent_file = tempfile.mktemp()
try:
prebuilt.UpdateLocalFile(non_existent_file, value)
file_contents = self._read_version_file(non_existent_file)
self.assertEqual(file_contents, ['%s=%s' % (key, value)])
finally:
if os.path.exists(non_existent_file):
os.remove(non_existent_file)
class TestPrebuiltFilters(unittest.TestCase):
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
self.private_dir = os.path.join(self.tmp_dir,
prebuilt._PRIVATE_OVERLAY_DIR)
self.private_structure_base = 'chromeos-overlay/chromeos-base'
self.private_pkgs = ['test-package/salt-flavor-0.1.r3.ebuild',
'easy/alpha_beta-0.1.41.r3.ebuild',
'dev/j-t-r-0.1.r3.ebuild',]
self.expected_filters = set(['salt-flavor', 'alpha_beta', 'j-t-r'])
def tearDown(self):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
def _CreateNestedDir(self, tmp_dir, dir_structure):
for entry in dir_structure:
full_path = os.path.join(os.path.join(tmp_dir, entry))
# ensure dirs are created
try:
os.makedirs(os.path.dirname(full_path))
if full_path.endswith('/'):
# we only want to create directories
return
except OSError, err:
if err.errno == errno.EEXIST:
# we don't care if the dir already exists
pass
else:
raise
# create dummy files
tmp = open(full_path, 'w')
tmp.close()
def _LoadPrivateMockFilters(self):
"""Load mock filters as defined in the setUp function."""
dir_structure = [os.path.join(self.private_structure_base, entry)
for entry in self.private_pkgs]
self._CreateNestedDir(self.private_dir, dir_structure)
prebuilt.LoadPrivateFilters(self.tmp_dir)
def testFilterPattern(self):
"""Check that particular packages are filtered properly."""
self._LoadPrivateMockFilters()
packages = ['/some/dir/area/j-t-r-0.1.r3.tbz',
'/var/pkgs/new/alpha_beta-0.2.3.4.tbz',
'/usr/local/cache/good-0.1.3.tbz',
'/usr-blah/b_d/salt-flavor-0.0.3.tbz']
expected_list = ['/usr/local/cache/good-0.1.3.tbz']
filtered_list = [file for file in packages if not
prebuilt.ShouldFilterPackage(file)]
self.assertEqual(expected_list, filtered_list)
def testLoadPrivateFilters(self):
self._LoadPrivateMockFilters()
prebuilt.LoadPrivateFilters(self.tmp_dir)
self.assertEqual(self.expected_filters, prebuilt._FILTER_PACKAGES)
def testEmptyFiltersErrors(self):
"""Ensure LoadPrivateFilters errors if an empty list is generated."""
os.makedirs(os.path.join(self.tmp_dir, prebuilt._PRIVATE_OVERLAY_DIR))
self.assertRaises(prebuilt.FiltersEmpty, prebuilt.LoadPrivateFilters,
self.tmp_dir)
class TestPrebuilt(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
self.mox.VerifyAll()
def testGenerateUploadDict(self):
base_local_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
gs_bucket_path = 'gs://chromeos-prebuilt/host/version'
local_path = os.path.join(base_local_path, 'public1.tbz2')
self.mox.StubOutWithMock(prebuilt.os.path, 'exists')
prebuilt.os.path.exists(local_path).AndReturn(True)
self.mox.ReplayAll()
pkgs = [{ 'CPV': 'public1' }]
result = prebuilt.GenerateUploadDict(base_local_path, gs_bucket_path, pkgs)
expected = { local_path: gs_bucket_path + '/public1.tbz2' }
self.assertEqual(result, expected)
def testFailonUploadFail(self):
"""Make sure we fail if one of the upload processes fail."""
files = {'test': '/uasd'}
self.assertEqual(prebuilt.RemoteUpload(files), set([('test', '/uasd')]))
def testDeterminePrebuiltConfHost(self):
"""Test that the host prebuilt path comes back properly."""
expected_path = os.path.join(prebuilt._PREBUILT_MAKE_CONF['amd64'])
self.assertEqual(prebuilt.DeterminePrebuiltConfFile('fake_path', 'amd64'),
expected_path)
def testDeterminePrebuiltConf(self):
"""Test the different known variants of boards for proper path discovery."""
fake_path = '/b/cbuild'
script_path = os.path.join(fake_path, 'src/scripts/bin')
public_overlay_path = os.path.join(fake_path, 'src/overlays')
private_overlay_path = os.path.join(fake_path,
prebuilt._PRIVATE_OVERLAY_DIR)
path_dict = {'private_overlay_path': private_overlay_path,
'public_overlay_path': public_overlay_path}
# format for targets
# board target key in dictionar
# Tuple containing cmd run, expected results as cmd obj, and expected output
# Mock output from cros_overlay_list
x86_out = ('%(private_overlay_path)s/chromeos-overlay\n'
'%(public_overlay_path)s/overlay-x86-generic\n' % path_dict)
x86_cmd = './cros_overlay_list --board x86-generic'
x86_expected_path = os.path.join(public_overlay_path, 'overlay-x86-generic',
'prebuilt.conf')
# Mock output from cros_overlay_list
tegra2_out = ('%(private_overlay_path)s/chromeos-overlay\n'
'%(public_overlay_path)s/overlay-tegra2\n'
'%(public_overlay_path)s/overlay-variant-tegra2-seaboard\n'
'%(private_overlay_path)s/overlay-tegra2-private\n'
'%(private_overlay_path)s/'
'overlay-variant-tegra2-seaboard-private\n' % path_dict)
tegra2_cmd = './cros_overlay_list --board tegra2 --variant seaboard'
tegra2_expected_path = os.path.join(
private_overlay_path, 'overlay-variant-tegra2-seaboard-private',
'prebuilt.conf')
targets = {'x86-generic': {'cmd': x86_cmd,
'output': x86_out,
'result': x86_expected_path},
'tegra2_seaboard': {'cmd': tegra2_cmd,
'output': tegra2_out,
'result': tegra2_expected_path}
}
self.mox.StubOutWithMock(prebuilt.cros_build_lib, 'RunCommand')
for target, expected_results in targets.iteritems():
# create command object for output
cmd_result_obj = cros_build_lib.CommandResult()
cmd_result_obj.output = expected_results['output']
prebuilt.cros_build_lib.RunCommand(
expected_results['cmd'].split(), redirect_stdout=True,
cwd=script_path).AndReturn(cmd_result_obj)
self.mox.ReplayAll()
for target, expected_results in targets.iteritems():
self.assertEqual(
prebuilt.DeterminePrebuiltConfFile(fake_path, target),
expected_results['result'])
def testDeterminePrebuiltConfGarbage(self):
"""Ensure an exception is raised on bad input."""
self.assertRaises(prebuilt.UnknownBoardFormat,
prebuilt.DeterminePrebuiltConfFile,
'fake_path', 'asdfasdf')
class TestPackagesFileFiltering(unittest.TestCase):
def testFilterPkgIndex(self):
pkgindex = SimplePackageIndex()
pkgindex.RemoveFilteredPackages(lambda pkg: pkg in PRIVATE_PACKAGES)
self.assertEqual(pkgindex.packages, PUBLIC_PACKAGES)
self.assertEqual(pkgindex.modified, True)
class TestPopulateDuplicateDB(unittest.TestCase):
def testEmptyIndex(self):
pkgindex = SimplePackageIndex(packages=False)
db = {}
pkgindex._PopulateDuplicateDB(db)
self.assertEqual(db, {})
def testNormalIndex(self):
pkgindex = SimplePackageIndex()
db = {}
pkgindex._PopulateDuplicateDB(db)
self.assertEqual(len(db), 3)
self.assertEqual(db['1'], 'http://www.example.com/gtk%2B/public1.tbz2')
self.assertEqual(db['2'], 'http://www.example.com/gtk%2B/foo.tgz')
self.assertEqual(db['3'], 'http://www.example.com/private.tbz2')
def testMissingSHA1(self):
db = {}
pkgindex = SimplePackageIndex()
del pkgindex.packages[0]['SHA1']
pkgindex._PopulateDuplicateDB(db)
self.assertEqual(len(db), 2)
self.assertEqual(db['2'], 'http://www.example.com/gtk%2B/foo.tgz')
self.assertEqual(db['3'], 'http://www.example.com/private.tbz2')
def testFailedPopulate(self):
db = {}
pkgindex = SimplePackageIndex(header=False)
self.assertRaises(KeyError, pkgindex._PopulateDuplicateDB, db)
pkgindex = SimplePackageIndex()
del pkgindex.packages[0]['CPV']
self.assertRaises(KeyError, pkgindex._PopulateDuplicateDB, db)
class TestResolveDuplicateUploads(unittest.TestCase):
def testEmptyList(self):
pkgindex = SimplePackageIndex()
pristine = SimplePackageIndex()
uploads = pkgindex.ResolveDuplicateUploads([])
self.assertEqual(uploads, pristine.packages)
self.assertEqual(pkgindex.packages, pristine.packages)
self.assertEqual(pkgindex.modified, False)
def testEmptyIndex(self):
pkgindex = SimplePackageIndex()
pristine = SimplePackageIndex()
empty = SimplePackageIndex(packages=False)
uploads = pkgindex.ResolveDuplicateUploads([empty])
self.assertEqual(uploads, pristine.packages)
self.assertEqual(pkgindex.packages, pristine.packages)
self.assertEqual(pkgindex.modified, False)
def testDuplicates(self):
pkgindex = SimplePackageIndex()
dup_pkgindex = SimplePackageIndex()
expected_pkgindex = SimplePackageIndex()
for pkg in expected_pkgindex.packages:
pkg.setdefault('PATH', urllib.quote(pkg['CPV'] + '.tbz2'))
uploads = pkgindex.ResolveDuplicateUploads([dup_pkgindex])
self.assertEqual(pkgindex.packages, expected_pkgindex.packages)
def testMissingSHA1(self):
db = {}
pkgindex = SimplePackageIndex()
dup_pkgindex = SimplePackageIndex()
expected_pkgindex = SimplePackageIndex()
del pkgindex.packages[0]['SHA1']
del expected_pkgindex.packages[0]['SHA1']
for pkg in expected_pkgindex.packages[1:]:
pkg.setdefault('PATH', pkg['CPV'] + '.tbz2')
uploads = pkgindex.ResolveDuplicateUploads([dup_pkgindex])
self.assertEqual(pkgindex.packages, expected_pkgindex.packages)
class TestWritePackageIndex(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
self.mox.VerifyAll()
def testSimple(self):
pkgindex = SimplePackageIndex()
self.mox.StubOutWithMock(pkgindex, 'Write')
pkgindex.Write(mox.IgnoreArg())
self.mox.ReplayAll()
f = pkgindex.WriteToNamedTemporaryFile()
self.assertEqual(f.read(), '')
if __name__ == '__main__':
unittest.main()