Update cbuildbot.py and prebuilt.py to deduplicate preflight prebuilts.

Preflight prebuilts reference the last full build, so they should always be complete as long as the last full build is still there.

Also add host prebuilts to the preflight prebuilts.

BUG=chromium-os:5311
TEST=Run unit tests for cbuildbot and prebuilt.py. Test runs of cbuildbot.py with --dryrun.

Review URL: http://codereview.chromium.org/5344002

Change-Id: Id95f94c02cc2f6cbd70a029d4f8b94617b7cc071
This commit is contained in:
David James 2010-11-30 11:45:11 -08:00
parent c509a906da
commit 63c51c4d3c
5 changed files with 497 additions and 147 deletions

View File

@ -317,17 +317,20 @@ def _GetPortageEnvVar(buildroot, board, envvar):
buildroot: The root directory where the build occurs. Must be an absolute
path.
board: Board type that was built on this machine. E.g. x86-generic.
envvar: The environment variable to get. E.g. "PORTAGE_BINHOST".
board: Board type that was built on this machine. E.g. x86-generic. If this
is None, get the env var from the host.
envvar: The environment variable to get. E.g. 'PORTAGE_BINHOST'.
Returns:
The value of the environment variable, as a string. If no such variable
can be found, return the empty string.
"""
cwd = os.path.join(buildroot, 'src', 'scripts')
binhost = RunCommand(['portageq-%s' % board, 'envvar', envvar],
cwd=cwd, redirect_stdout=True, enter_chroot=True,
error_ok=True)
portageq = 'portageq'
if board:
portageq += '-%s' % board
binhost = RunCommand([portageq, 'envvar', envvar], cwd=cwd,
redirect_stdout=True, enter_chroot=True, error_ok=True)
return binhost.rstrip('\n')
@ -533,7 +536,7 @@ def _ResolveOverlays(buildroot, overlays):
return paths
def _UploadPrebuilts(buildroot, board, overlay_config):
def _UploadPrebuilts(buildroot, board, overlay_config, binhosts):
"""Upload prebuilts.
Args:
@ -543,6 +546,8 @@ def _UploadPrebuilts(buildroot, board, overlay_config):
'private': Just the private overlay.
'public': Just the public overlay.
'both': Both the public and private overlays.
binhosts: The URLs of the current binhosts. Binaries that are already
present will not be uploaded twice. Empty URLs will be ignored.
"""
cwd = os.path.join(buildroot, 'src', 'scripts')
@ -552,6 +557,9 @@ def _UploadPrebuilts(buildroot, board, overlay_config):
'--board', board,
'--prepend-version', 'preflight',
'--key', _PREFLIGHT_BINHOST]
for binhost in binhosts:
if binhost:
cmd.extend(['--previous-binhost-url', binhost])
if overlay_config == 'public':
cmd.extend(['--upload', 'gs://chromeos-prebuilt'])
else:
@ -617,6 +625,7 @@ def main():
# Calculate list of overlay directories.
overlays = _ResolveOverlays(buildroot, buildconfig['overlays'])
board = buildconfig['board']
old_binhost = None
_PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, overlays)
chroot_path = os.path.join(buildroot, 'chroot')
@ -627,9 +636,10 @@ def main():
else:
old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST)
_IncrementalCheckout(buildroot)
new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST)
if old_binhost != new_binhost:
RunCommand(['sudo', 'rm', '-rf', boardpath])
new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST)
if old_binhost and old_binhost != new_binhost:
RunCommand(['sudo', 'rm', '-rf', boardpath])
# Check that all overlays can be found.
for path in overlays:
@ -684,7 +694,8 @@ def main():
if buildconfig['master']:
# Master bot needs to check if the other slaves completed.
if cbuildbot_comm.HaveSlavesCompleted(config):
_UploadPrebuilts(buildroot, board, buildconfig['overlays'])
_UploadPrebuilts(buildroot, board, buildconfig['overlays'],
[new_binhost])
_UprevPush(buildroot, tracking_branch, buildconfig['board'],
overlays, options.debug)
else:

View File

@ -208,18 +208,26 @@ class CBuildBotTest(mox.MoxTestBase):
def testUploadPublicPrebuilts(self):
"""Test _UploadPrebuilts with a public location."""
check = mox.And(mox.IsA(list), mox.In('gs://chromeos-prebuilt'))
binhost = 'http://www.example.com'
binhosts = [binhost, None]
check = mox.And(mox.IsA(list), mox.In(binhost), mox.Not(mox.In(None)),
mox.In('gs://chromeos-prebuilt'))
cbuildbot.RunCommand(check, cwd='%s/src/scripts' % self._buildroot)
self.mox.ReplayAll()
cbuildbot._UploadPrebuilts(self._buildroot, self._test_board, 'public')
cbuildbot._UploadPrebuilts(self._buildroot, self._test_board, 'public',
binhosts)
self.mox.VerifyAll()
def testUploadPrivatePrebuilts(self):
"""Test _UploadPrebuilts with a private location."""
check = mox.And(mox.IsA(list), mox.In('chromeos-images:/var/www/prebuilt/'))
binhost = 'http://www.example.com'
binhosts = [binhost, None]
check = mox.And(mox.IsA(list), mox.In(binhost), mox.Not(mox.In(None)),
mox.In('chromeos-images:/var/www/prebuilt/'))
cbuildbot.RunCommand(check, cwd='%s/src/scripts' % self._buildroot)
self.mox.ReplayAll()
cbuildbot._UploadPrebuilts(self._buildroot, self._test_board, 'private')
cbuildbot._UploadPrebuilts(self._buildroot, self._test_board, 'private',
binhosts)
self.mox.VerifyAll()

307
chromite/lib/binpkg.py Normal file
View File

@ -0,0 +1,307 @@
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Adapted from portage/getbinpkg.py -- Portage binary-package helper functions
# Copyright 2003-2004 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import operator
import os
import tempfile
import time
import urllib2
import urlparse
class PackageIndex(object):
"""A parser for the Portage Packages index file.
The Portage Packages index file serves to keep track of what packages are
included in a tree. It contains the following sections:
1) The header. The header tracks general key/value pairs that don't apply
to any specific package. E.g., it tracks the base URL of the packages
file, and the number of packages included in the file. The header is
terminated by a blank line.
2) The body. The body is a list of packages. Each package contains a list
of key/value pairs. Packages are either terminated by a blank line or
by the end of the file. Every package has a CPV entry, which serves as
a unique identifier for the package.
"""
def __init__(self):
"""Constructor."""
# The header tracks general key/value pairs that don't apply to any
# specific package. E.g., it tracks the base URL of the packages.
self.header = {}
# A list of packages (stored as a list of dictionaries).
self.packages = []
# Whether or not the PackageIndex has been modified since the last time it
# was written.
self.modified = False
def _PopulateDuplicateDB(self, db):
"""Populate db with SHA1 -> URL mapping for packages.
Args:
db: Dictionary to populate with SHA1 -> URL mapping for packages.
"""
uri = self.header['URI']
for pkg in self.packages:
cpv, sha1 = pkg['CPV'], pkg['SHA1']
path = pkg.get('PATH', cpv + '.tbz2')
db[sha1] = urlparse.urljoin(uri, path)
def _ReadPkgIndex(self, pkgfile):
"""Read a list of key/value pairs from the Packages file into a dictionary.
Both header entries and package entries are lists of key/value pairs, so
they can both be read by this function. Entries can be terminated by empty
lines or by the end of the file.
This function will read lines from the specified file until it encounters
the a blank line or the end of the file.
Keys and values in the Packages file are separated by a colon and a space.
Keys may contain capital letters, numbers, and underscores, but may not
contain colons. Values may contain any character except a newline. In
particular, it is normal for values to contain colons.
Lines that have content, and do not contain a valid key/value pair, are
ignored. This is for compatibility with the Portage package parser, and
to allow for future extensions to the Packages file format.
All entries must contain at least one key/value pair. If the end of the
fils is reached, an empty dictionary is returned.
Args:
pkgfile: A python file object.
Returns the dictionary of key-value pairs that was read from the file.
"""
d = {}
for line in pkgfile:
line = line.rstrip('\n')
if not line:
assert d, 'Packages entry must contain at least one key/value pair'
break
line = line.split(': ', 1)
if len(line) == 2:
k, v = line
d[k] = v
return d
def _WritePkgIndex(self, pkgfile, entry):
"""Write header entry or package entry to packages file.
The keys and values will be separated by a colon and a space. The entry
will be terminated by a blank line.
Args:
pkgfile: A python file object.
entry: A dictionary of the key/value pairs to write.
"""
lines = ['%s: %s' % (k, v) for k, v in sorted(entry.items()) if v]
pkgfile.write('%s\n\n' % '\n'.join(lines))
def _ReadHeader(self, pkgfile):
"""Read header of packages file.
Args:
pkgfile: A python file object.
"""
assert not self.header, 'Should only read header once.'
self.header = self._ReadPkgIndex(pkgfile)
def _ReadBody(self, pkgfile):
"""Read body of packages file.
Before calling this function, you must first read the header (using
_ReadHeader).
Args:
pkgfile: A python file object.
"""
assert self.header, 'Should read header first.'
assert not self.packages, 'Should only read body once.'
# Read all of the sections in the body by looping until we reach the end
# of the file.
while True:
d = self._ReadPkgIndex(pkgfile)
if not d:
break
if 'CPV' in d:
self.packages.append(d)
def Read(self, pkgfile):
"""Read the entire packages file.
Args:
pkgfile: A python file object.
"""
self._ReadHeader(pkgfile)
self._ReadBody(pkgfile)
def RemoveFilteredPackages(self, filter_fn):
"""Remove packages which match filter_fn.
Args:
filter_fn: A function which operates on packages. If it returns True,
the package should be removed.
"""
filtered = [p for p in self.packages if not filter_fn(p)]
if filtered != self.packages:
self.modified = True
self.packages = filtered
def ResolveDuplicateUploads(self, pkgindexes):
"""Point packages at files that have already been uploaded.
For each package in our index, check if there is an existing package that
has already been uploaded to the same base URI. If so, point that package
at the existing file, so that we don't have to upload the file.
Args:
pkgindexes: A list of PackageIndex objects containing info about packages
that have already been uploaded.
Returns:
A list of the packages that still need to be uploaded.
"""
db = {}
for pkgindex in pkgindexes:
pkgindex._PopulateDuplicateDB(db)
uploads = []
base_uri = self.header['URI']
for pkg in self.packages:
sha1 = pkg['SHA1']
uri = db.get(sha1)
if uri and uri.startswith(base_uri):
pkg['PATH'] = uri[len(base_uri):].lstrip('/')
else:
uploads.append(pkg)
return uploads
def SetUploadLocation(self, base_uri, path_prefix):
"""Set upload location to base_uri + path_prefix.
Args:
base_uri: Base URI for all packages in the file. We set
self.header['URI'] to this value, so all packages must live under
this directory.
path_prefix: Path prefix to use for all current packages in the file.
This will be added to the beginning of the path for every package.
"""
self.header['URI'] = base_uri
for pkg in self.packages:
pkg['PATH'] = urlparse.urljoin(path_prefix, pkg['CPV'] + '.tbz2')
def Write(self, pkgfile):
"""Write a packages file to disk.
If 'modified' flag is set, the TIMESTAMP and PACKAGES fields in the header
will be updated before writing to disk.
Args:
pkgfile: A python file object.
"""
if self.modified:
self.header['TIMESTAMP'] = str(long(time.time()))
self.header['PACKAGES'] = str(len(self.packages))
self.modified = False
self._WritePkgIndex(pkgfile, self.header)
for metadata in sorted(self.packages, key=operator.itemgetter('CPV')):
self._WritePkgIndex(pkgfile, metadata)
def WriteToNamedTemporaryFile(self):
"""Write pkgindex to a temporary file.
Args:
pkgindex: The PackageIndex object.
Returns:
A temporary file containing the packages from pkgindex.
"""
f = tempfile.NamedTemporaryFile()
self.Write(f)
f.flush()
f.seek(0)
return f
def _RetryUrlOpen(url, tries=3):
"""Open the specified url, retrying if we run into temporary errors.
We retry for both network errors and 5xx Server Errors. We do not retry
for HTTP errors with a non-5xx code.
Args:
url: The specified url.
tries: The number of times to try.
Returns:
The result of urllib2.urlopen(url).
"""
for i in range(tries):
try:
return urllib2.urlopen(url)
except urllib2.HTTPError as e:
if i + 1 >= tries or e.code < 500:
raise
else:
print 'Cannot GET %s: %s' % (url, str(e))
except urllib2.URLError as e:
if i + 1 >= tries:
raise
else:
print 'Cannot GET %s: %s' % (url, str(e))
print 'Sleeping for 10 seconds before retrying...'
time.sleep(10)
def GrabRemotePackageIndex(binhost_url):
"""Grab the latest binary package database from the specified URL.
Args:
binhost_url: Base URL of remote packages (PORTAGE_BINHOST).
Returns:
A PackageIndex object, if the Packages file can be retrieved. If the
server returns status code 404, None is returned.
"""
url = urlparse.urljoin(binhost_url, 'Packages')
try:
f = _RetryUrlOpen(url)
except urllib2.HTTPError as e:
if e.code == 404:
return None
raise
pkgindex = PackageIndex()
pkgindex.Read(f)
pkgindex.header.setdefault('URI', binhost_url)
f.close()
return pkgindex
def GrabLocalPackageIndex(package_path):
"""Read a local packages file from disk into a PackageIndex() object.
Args:
package_path: Directory containing Packages file.
Returns:
A PackageIndex object.
"""
packages_file = file(os.path.join(package_path, 'Packages'))
pkgindex = PackageIndex()
pkgindex.Read(packages_file)
packages_file.close()
return pkgindex

View File

@ -11,8 +11,11 @@ import re
import sys
import tempfile
import time
import urlparse
from chromite.lib import cros_build_lib
from chromite.lib.binpkg import (GrabLocalPackageIndex, GrabRemotePackageIndex,
PackageIndex)
"""
This script is used to upload host prebuilts as well as board BINHOSTS.
@ -228,70 +231,14 @@ def ShouldFilterPackage(file_path):
return False
def _ShouldFilterPackageFileSection(section):
"""Return whether an section in the package file should be filtered out.
Args:
section: The section, as a list of strings.
Returns:
True if the section should be excluded.
"""
for line in section:
if line.startswith("CPV: "):
package = line.replace("CPV: ", "").rstrip()
if ShouldFilterPackage(package):
return True
else:
return False
def FilterPackagesFile(packages_filename):
"""Read a portage Packages file and filter out private packages.
The new, filtered packages file is written to a temporary file.
Args:
packages_filename: The filename of the Packages file.
Returns:
filtered_packages: A filtered Packages file, as a NamedTemporaryFile.
"""
packages_file = open(packages_filename)
filtered_packages = tempfile.NamedTemporaryFile()
section = []
for line in packages_file:
if line == "\n":
if not _ShouldFilterPackageFileSection(section):
# Looks like this section doesn't contain a private package. Write it
# out.
filtered_packages.write("".join(section))
# Start next section.
section = []
section.append(line)
else:
if not _ShouldFilterPackageFileSection(section):
filtered_packages.write("".join(section))
packages_file.close()
# Flush contents to disk.
filtered_packages.flush()
filtered_packages.seek(0)
return filtered_packages
def _RetryRun(cmd, print_cmd=True, shell=False):
def _RetryRun(cmd, print_cmd=True, shell=False, cwd=None):
"""Run the specified command, retrying if necessary.
Args:
cmd: The command to run.
print_cmd: Whether to print out the cmd.
shell: Whether to treat the command as a shell.
cwd: Working directory to run command in.
Returns:
True if the command succeeded. Otherwise, returns False.
@ -301,7 +248,8 @@ def _RetryRun(cmd, print_cmd=True, shell=False):
# cros_build_lib.
for attempt in range(_RETRIES):
try:
output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell)
output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell,
cwd=cwd)
return True
except cros_build_lib.RunCommandError:
print 'Failed to run %s' % cmd
@ -320,12 +268,6 @@ def _GsUpload(args):
Return the arg tuple of two if the upload failed
"""
(local_file, remote_file) = args
if ShouldFilterPackage(local_file):
return
if local_file.endswith("/Packages"):
filtered_packages_file = FilterPackagesFile(local_file)
local_file = filtered_packages_file.name
cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file)
if not _RetryRun(cmd, print_cmd=False, shell=True):
@ -359,22 +301,24 @@ def RemoteUpload(files, pool=10):
pass
def GenerateUploadDict(local_path, gs_path):
"""Build a dictionary of local remote file key pairs for gsutil to upload.
def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
"""Build a dictionary of local remote file key pairs to upload.
Args:
local_path: A path to the file on the local hard drive.
gs_path: Path to upload in Google Storage.
base_local_path: The base path to the files on the local hard drive.
remote_path: The base path to the remote paths.
pkgs: The packages to upload.
Returns:
Returns a dictionary of file path/gs_dest_path pairs
Returns a dictionary of local_path/remote_path pairs
"""
files_to_sync = cros_build_lib.ListFiles(local_path)
upload_files = {}
for file_path in files_to_sync:
filename = file_path.replace(local_path, '').lstrip('/')
gs_file_path = os.path.join(gs_path, filename)
upload_files[file_path] = gs_file_path
for pkg in pkgs:
suffix = pkg['CPV'] + '.tbz2'
local_path = os.path.join(base_local_path, suffix)
assert os.path.exists(local_path)
remote_path = urlparse.urljoin(base_remote_path, suffix)
upload_files[local_path] = remote_path
return upload_files
@ -433,13 +377,14 @@ def UpdateBinhostConfFile(path, key, value):
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
board=None, git_sync=False, git_sync_retries=5,
key='PORTAGE_BINHOST', sync_binhost_conf=False):
key='PORTAGE_BINHOST', pkg_indexes=[],
sync_binhost_conf=False):
"""Upload Host prebuilt files to Google Storage space.
Args:
build_path: The path to the root of the chroot.
upload_location: The upload location.
board: The board to upload to Google Storage, if this is None upload
board: The board to upload to Google Storage. If this is None, upload
host packages.
git_sync: If set, update make.conf of target to reference the latest
prebuilt packages generated here.
@ -447,6 +392,8 @@ def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
This helps avoid failures when multiple bots are modifying the same Repo.
default: 5
key: The variable key to update in the git file. (Default: PORTAGE_BINHOST)
pkg_indexes: Old uploaded prebuilts to compare against. Instead of
uploading duplicate files, we just link to the old files.
sync_binhost_conf: If set, update binhost config file in chromiumos-overlay
for the current board or host.
"""
@ -468,10 +415,22 @@ def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
git_file = os.path.join(build_path, DetermineMakeConfFile(board))
binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'target',
'%s.conf' % board)
remote_location = os.path.join(upload_location, url_suffix)
remote_location = urlparse.urljoin(upload_location, url_suffix)
# Process Packages file, removing duplicates and filtered packages.
pkg_index = GrabLocalPackageIndex(package_path)
pkg_index.SetUploadLocation(binhost_base_url, url_suffix)
pkg_index.RemoveFilteredPackages(lambda pkg: ShouldFilterPackage(pkg))
uploads = pkg_index.ResolveDuplicateUploads(pkg_indexes)
# Write Packages file.
tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
if upload_location.startswith('gs://'):
upload_files = GenerateUploadDict(package_path, remote_location)
# Build list of files to upload.
upload_files = GenerateUploadDict(package_path, remote_location, uploads)
remote_file = urlparse.urljoin(remote_location, 'Packages')
upload_files[tmp_packages_file.name] = remote_file
print 'Uploading %s' % package_string
failed_uploads = RemoteUpload(upload_files)
@ -479,11 +438,19 @@ def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
error_msg = ['%s -> %s\n' % args for args in failed_uploads]
raise UploadFailed('Error uploading:\n%s' % error_msg)
else:
pkgs = ' '.join(p['CPV'] + '.tbz2' for p in uploads)
ssh_server, remote_path = remote_location.split(':', 1)
cmds = ['ssh %s mkdir -p %s' % (ssh_server, remote_path),
'rsync -av %s/ %s/' % (package_path, remote_location)]
d = { 'pkg_index': tmp_packages_file.name,
'pkgs': pkgs,
'remote_path': remote_path,
'remote_location': remote_location,
'ssh_server': ssh_server }
cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d,
'rsync -av %(pkg_index)s %(remote_location)s/Packages' % d]
if pkgs:
cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d)
for cmd in cmds:
if not _RetryRun(cmd, shell=True):
if not _RetryRun(cmd, shell=True, cwd=package_path):
raise UploadFailed('Could not run %s' % cmd)
url_value = '%s/%s/' % (binhost_base_url, url_suffix)
@ -506,6 +473,9 @@ def main():
parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
default=_BINHOST_BASE_URL,
help='Base URL to use for binhost in make.conf updates')
parser.add_option('', '--previous-binhost-url', action='append',
default=[], dest='previous_binhost_url',
help='Previous binhost URL')
parser.add_option('-b', '--board', dest='board', default=None,
help='Board type that was built on this machine')
parser.add_option('-p', '--build-path', dest='build_path',
@ -542,26 +512,29 @@ def main():
usage(parser, 'Error: you need to provide an upload location using -u')
if options.filters:
# TODO(davidjames): It might be nice to be able to filter private ebuilds
# from rsync uploads as well, some day. But for now it's not needed.
if not options.upload.startswith("gs://"):
usage(parser, 'Error: filtering only works with gs:// paths')
LoadPrivateFilters(options.build_path)
version = GetVersion()
if options.prepend_version:
version = '%s-%s' % (options.prepend_version, version)
pkg_indexes = []
for url in options.previous_binhost_url:
pkg_index = GrabRemotePackageIndex(url)
if pkg_index:
pkg_indexes.append(pkg_index)
if options.sync_host:
UploadPrebuilt(options.build_path, options.upload, version,
options.binhost_base_url, git_sync=options.git_sync,
key=options.key,
key=options.key, pkg_indexes=pkg_indexes,
sync_binhost_conf=options.sync_binhost_conf)
if options.board:
UploadPrebuilt(options.build_path, options.upload, version,
options.binhost_base_url, board=options.board,
git_sync=options.git_sync, key=options.key,
pkg_indexes=pkg_indexes,
sync_binhost_conf=options.sync_binhost_conf)

View File

@ -3,6 +3,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import mox
import os
import prebuilt
@ -10,6 +11,21 @@ import shutil
import tempfile
import unittest
from chromite.lib import cros_build_lib
from chromite.lib.binpkg import PackageIndex
PUBLIC_PACKAGES = [{'CPV': 'public1', 'SHA1': '1'},
{'CPV': 'public2', 'SHA1': '2', 'PATH': 'foo.tgz'}]
PRIVATE_PACKAGES = [{'CPV': 'private', 'SHA1': '3'}]
def SimplePackageIndex(header=True, packages=True):
pkgindex = PackageIndex()
if header:
pkgindex.header['URI'] = 'http://www.example.com'
if packages:
pkgindex.packages = copy.deepcopy(PUBLIC_PACKAGES + PRIVATE_PACKAGES)
return pkgindex
class TestUpdateFile(unittest.TestCase):
@ -137,14 +153,6 @@ class TestPrebuiltFilters(unittest.TestCase):
class TestPrebuilt(unittest.TestCase):
fake_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
bin_package_mock = ['packages/x11-misc/shared-mime-info-0.70.tbz2',
'packages/x11-misc/util-macros-1.5.0.tbz2',
'packages/x11-misc/xbitmaps-1.1.0.tbz2',
'packages/x11-misc/read-edid-1.4.2.tbz2',
'packages/x11-misc/xdg-utils-1.0.2-r3.tbz2']
files_to_sync = [os.path.join(fake_path, file) for file in bin_package_mock]
def setUp(self):
self.mox = mox.Mox()
@ -153,23 +161,17 @@ class TestPrebuilt(unittest.TestCase):
self.mox.UnsetStubs()
self.mox.VerifyAll()
def _generate_dict_results(self, gs_bucket_path):
"""
Generate a dictionary result similar to GenerateUploadDict
"""
results = {}
for entry in self.files_to_sync:
results[entry] = os.path.join(
gs_bucket_path, entry.replace(self.fake_path, '').lstrip('/'))
return results
def testGenerateUploadDict(self):
base_local_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
gs_bucket_path = 'gs://chromeos-prebuilt/host/version'
self.mox.StubOutWithMock(cros_build_lib, 'ListFiles')
cros_build_lib.ListFiles(self.fake_path).AndReturn(self.files_to_sync)
local_path = os.path.join(base_local_path, 'public1.tbz2')
self.mox.StubOutWithMock(prebuilt.os.path, 'exists')
prebuilt.os.path.exists(local_path).AndReturn(True)
self.mox.ReplayAll()
result = prebuilt.GenerateUploadDict(self.fake_path, gs_bucket_path)
self.assertEqual(result, self._generate_dict_results(gs_bucket_path))
pkgs = [{ 'CPV': 'public1' }]
result = prebuilt.GenerateUploadDict(base_local_path, gs_bucket_path, pkgs)
expected = { local_path: gs_bucket_path + '/public1.tbz2' }
self.assertEqual(result, expected)
def testFailonUploadFail(self):
"""Make sure we fail if one of the upload processes fail."""
@ -195,6 +197,73 @@ class TestPrebuilt(unittest.TestCase):
class TestPackagesFileFiltering(unittest.TestCase):
def testFilterPkgIndex(self):
pkgindex = SimplePackageIndex()
pkgindex.RemoveFilteredPackages(lambda pkg: pkg in PRIVATE_PACKAGES)
self.assertEqual(pkgindex.packages, PUBLIC_PACKAGES)
self.assertEqual(pkgindex.modified, True)
class TestPopulateDuplicateDB(unittest.TestCase):
def testEmptyIndex(self):
pkgindex = SimplePackageIndex(packages=False)
db = {}
pkgindex._PopulateDuplicateDB(db)
self.assertEqual(db, {})
def testNormalIndex(self):
pkgindex = SimplePackageIndex()
db = {}
pkgindex._PopulateDuplicateDB(db)
self.assertEqual(len(db), 3)
self.assertEqual(db['1'], 'http://www.example.com/public1.tbz2')
self.assertEqual(db['2'], 'http://www.example.com/foo.tgz')
self.assertEqual(db['3'], 'http://www.example.com/private.tbz2')
def testFailedPopulate(self):
db = {}
pkgindex = SimplePackageIndex(header=False)
self.assertRaises(KeyError, pkgindex._PopulateDuplicateDB, db)
pkgindex = SimplePackageIndex()
del pkgindex.packages[0]['CPV']
self.assertRaises(KeyError, pkgindex._PopulateDuplicateDB, db)
pkgindex = SimplePackageIndex()
del pkgindex.packages[0]['SHA1']
self.assertRaises(KeyError, pkgindex._PopulateDuplicateDB, db)
class TestResolveDuplicateUploads(unittest.TestCase):
def testEmptyList(self):
pkgindex = SimplePackageIndex()
pristine = SimplePackageIndex()
uploads = pkgindex.ResolveDuplicateUploads([])
self.assertEqual(uploads, pristine.packages)
self.assertEqual(pkgindex.packages, pristine.packages)
self.assertEqual(pkgindex.modified, False)
def testEmptyIndex(self):
pkgindex = SimplePackageIndex()
pristine = SimplePackageIndex()
empty = SimplePackageIndex(packages=False)
uploads = pkgindex.ResolveDuplicateUploads([empty])
self.assertEqual(uploads, pristine.packages)
self.assertEqual(pkgindex.packages, pristine.packages)
self.assertEqual(pkgindex.modified, False)
def testDuplicates(self):
pkgindex = SimplePackageIndex()
dup_pkgindex = SimplePackageIndex()
expected_pkgindex = SimplePackageIndex()
for pkg in expected_pkgindex.packages:
pkg.setdefault('PATH', pkg['CPV'] + '.tbz2')
uploads = pkgindex.ResolveDuplicateUploads([dup_pkgindex])
self.assertEqual(pkgindex.packages, expected_pkgindex.packages)
class TestWritePackageIndex(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
@ -202,31 +271,13 @@ class TestPackagesFileFiltering(unittest.TestCase):
self.mox.UnsetStubs()
self.mox.VerifyAll()
def testFilterAllPackages(self):
self.mox.StubOutWithMock(prebuilt, 'ShouldFilterPackage')
prebuilt.ShouldFilterPackage("public1").AndReturn(False)
prebuilt.ShouldFilterPackage("private").AndReturn(True)
prebuilt.ShouldFilterPackage("public2").AndReturn(False)
full_packages_file = [
"foo: bar\n", "\n",
"CPV: public1\n", "foo: bar1\n", "\n",
"CPV: private\n", "foo: bar2\n", "\n",
"CPV: public2\n", "foo: bar3\n", "\n",
]
private_packages_file = [
"foo: bar\n", "\n",
"CPV: public1\n", "foo: bar1\n", "\n",
"CPV: public2\n", "foo: bar3\n", "\n",
]
def testSimple(self):
pkgindex = SimplePackageIndex()
self.mox.StubOutWithMock(pkgindex, 'Write')
pkgindex.Write(mox.IgnoreArg())
self.mox.ReplayAll()
temp_packages_file = tempfile.NamedTemporaryFile()
temp_packages_file.write("".join(full_packages_file))
temp_packages_file.flush()
new_packages_file = prebuilt.FilterPackagesFile(temp_packages_file.name)
new_contents = open(new_packages_file.name).read()
self.assertEqual("".join(private_packages_file), new_contents)
self.assertEqual("".join(private_packages_file), new_packages_file.read())
new_packages_file.close()
f = pkgindex.WriteToNamedTemporaryFile()
self.assertEqual(f.read(), '')
if __name__ == '__main__':