First submission of prebuilt upload script.

This introduces a way to upload prebuilts from the command line.

The following features are available

  * Multiprocess upload (cuts time down by a 1/5th for uploading)
  * Versioned uploading with git file commital
  * Host prebuilt uploading
  * Board prebuilt uploading

BUG=chromium-os:4843,chromium-os:5855

TEST=./prebuilt_unittest.py
{'/b/cbuild/build/chroot/build/x86-dogfood/packages/x11-misc/util-macros-1.5.0.tbz2':
'gs://chromeos-prebuilt/host/version/packages/x11-misc/util-macros-1.5.0.tbz2',
'/b/cbuild/build/chroot/build/x86-dogf}
.FILTERING /usr/local/package/oob
FILTERING /var/tmp/bibby.file
.....Updating stage 20100309/stage3-amd64-20100309.tar.bz2 to stage
test_update
.
----------------------------------------------------------------------
Ran 7 tests in 0.002s

OK

Review URL: http://codereview.chromium.org/3452032
This commit is contained in:
Scott Zawalski 2010-10-01 11:28:44 -07:00
parent 3da3113c01
commit 9d892dd99f
4 changed files with 440 additions and 0 deletions

0
chromite/__init__.py Normal file
View File

0
chromite/lib/__init__.py Normal file
View File

309
prebuilt.py Executable file
View File

@ -0,0 +1,309 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import multiprocessing
import optparse
import os
import sys
from chromite.lib import cros_build_lib
"""
This script is used to upload host prebuilts as well as board BINHOSTS to
Google Storage.
After a build is successfully uploaded a file is updated with the proper
BINHOST version as well as the target board. This file is defined in GIT_FILE
To read more about prebuilts/binhost binary packages please refer to:
http://sites/chromeos/for-team-members/engineering/releng/prebuilt-binaries-for-streamlining-the-build-process
Example of uploading prebuilt amd64 host files
./prebuilt.py -p /b/cbuild/build -s -u gs://chromeos-prebuilt
Example of uploading x86-dogfood binhosts
./prebuilt.py -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
"""
VER_FILE = 'src/third_party/chromiumos-overlay/chromeos/config/stable_versions'
# as per http://crosbug.com/5855 always filter the below packages
_FILTER_PACKAGES = set()
_RETRIES = 3
_HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
_HOST_TARGET = 'amd64'
_BOARD_PATH = 'chroot/build/%(board)s'
_BOTO_CONFIG = '/home/chrome-bot/external-boto'
# board/board-target/version'
_GS_BOARD_PATH = 'board/%(board)s/%(version)s/'
# We only support amd64 right now
_GS_HOST_PATH = 'host/%s' % _HOST_TARGET
def UpdateLocalFile(filename, key, value):
"""Update the key in file with the value passed.
File format:
key value
Args:
filename: Name of file to modify.
key: The variable key to update.
value: Value to write with the key.
"""
file_fh = open(filename)
file_lines = []
found = False
for line in file_fh:
file_var, file_val = line.split()
if file_var == key:
found = True
print 'Updating %s %s to %s %s' % (file_var, file_val, key, value)
file_lines.append('%s %s' % (key, value))
else:
file_lines.append('%s %s' % (file_var, file_val))
if not found:
file_lines.append('%s %s' % (key, value))
file_fh.close()
# write out new file
new_file_fh = open(filename, 'w')
new_file_fh.write('\n'.join(file_lines))
new_file_fh.close()
def RevGitFile(filename, key, value):
"""Update and push the git file.
Args:
filename: file to modify that is in a git repo already
key: board or host package type e.g. x86-dogfood
value: string representing the version of the prebuilt that has been
uploaded.
"""
prebuilt_branch = 'prebuilt_branch'
old_cwd = os.getcwd()
os.chdir(os.path.dirname(filename))
cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True)
UpdateLocalFile(filename, key, value)
description = 'Update BINHOST key/value %s %s' % (key, value)
print description
git_ssh_config_cmd = (
'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof '
'http://git.chromium.org/git')
try:
cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
cros_build_lib.RunCommand('git pull', shell=True)
cros_build_lib.RunCommand('git config push.default tracking', shell=True)
cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
cros_build_lib.RunCommand('git push', shell=True)
finally:
cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True)
os.chdir(old_cwd)
def GetVersion():
"""Get the version to put in LATEST and update the git version with."""
return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S')
def LoadFilterFile(filter_file):
"""Load a file with keywords on a per line basis.
Args:
filter_file: file to load into _FILTER_PACKAGES
"""
filter_fh = open(filter_file)
try:
_FILTER_PACKAGES.update([filter.strip() for filter in filter_fh])
finally:
filter_fh.close()
return _FILTER_PACKAGES
def ShouldFilterPackage(file_path):
"""Skip a particular file if it matches a pattern.
Skip any files that machine the list of packages to filter in
_FILTER_PACKAGES.
Args:
file_path: string of a file path to inspect against _FILTER_PACKAGES
Returns:
True if we should filter the package,
False otherwise.
"""
for name in _FILTER_PACKAGES:
if name in file_path:
print 'FILTERING %s' % file_path
return True
return False
def _GsUpload(args):
"""Upload to GS bucket.
Args:
args: a tuple of two arguments that contains local_file and remote_file.
"""
(local_file, remote_file) = args
if ShouldFilterPackage(local_file):
return
cmd = 'gsutil cp -a public-read %s %s' % (local_file, remote_file)
# TODO(scottz): port to use _Run or similar when it is available in
# cros_build_lib.
for attempt in range(_RETRIES):
try:
output = cros_build_lib.RunCommand(cmd, print_cmd=False, shell=True)
break
except cros_build_lib.RunCommandError:
print 'Failed to sync %s -> %s, retrying' % (local_file, remote_file)
else:
# TODO(scottz): potentially return what failed so we can do something with
# with it but for now just print an error.
print 'Retry failed uploading %s -> %s, giving up' % (local_file,
remote_file)
def RemoteUpload(files, pool=10):
"""Upload to google storage.
Create a pool of process and call _GsUpload with the proper arguments.
Args:
files: dictionary with keys to local files and values to remote path.
pool: integer of maximum proesses to have at the same time.
"""
# TODO(scottz) port this to use _RunManyParallel when it is available in
# cros_build_lib
pool = multiprocessing.Pool(processes=pool)
workers = []
for local_file, remote_path in files.iteritems():
workers.append((local_file, remote_path))
result = pool.map_async(_GsUpload, workers, chunksize=1)
while True:
try:
result.get(60*60)
break
except multiprocessing.TimeoutError:
pass
def GenerateUploadDict(local_path, gs_path, strip_str):
"""Build a dictionary of local remote file key pairs for gsutil to upload.
Args:
local_path: A path to the file on the local hard drive.
gs_path: Path to upload in Google Storage.
strip_str: String to remove from the local_path so that the relative
file path can be tacked on to the gs_path.
Returns:
Returns a dictionary of file path/gs_dest_path pairs
"""
files_to_sync = cros_build_lib.ListFiles(local_path)
upload_files = {}
for file_path in files_to_sync:
filename = file_path.replace(strip_str, '').lstrip('/')
gs_file_path = os.path.join(gs_path, filename)
upload_files[file_path] = gs_file_path
return upload_files
def UploadPrebuilt(build_path, bucket, board=None, git_file=None):
"""Upload Host prebuilt files to Google Storage space.
Args:
build_path: The path to the root of the chroot.
bucket: The Google Storage bucket to upload to.
board: The board to upload to Google Storage, if this is None upload
host packages.
git_file: If set, update this file with a host/version combo, commit and
push it.
"""
version = GetVersion()
if not board:
# We are uploading host packages
# TODO(scottz): eventually add support for different host_targets
package_path = os.path.join(build_path, _HOST_PACKAGES_PATH)
gs_path = os.path.join(bucket, _GS_HOST_PATH, version)
strip_pattern = package_path
package_string = _HOST_TARGET
else:
board_path = os.path.join(build_path, _BOARD_PATH % {'board': board})
package_path = os.path.join(board_path, 'packages')
package_string = board
strip_pattern = board_path
gs_path = os.path.join(bucket, _GS_BOARD_PATH % {'board': board,
'version': version})
upload_files = GenerateUploadDict(package_path, gs_path, strip_pattern)
print 'Uploading %s' % package_string
RemoteUpload(upload_files)
if git_file:
RevGitFile(git_file, package_string, version)
def usage(parser, msg):
"""Display usage message and parser help then exit with 1."""
print >> sys.stderr, msg
parser.print_help()
sys.exit(1)
def main():
parser = optparse.OptionParser()
parser.add_option('-b', '--board', dest='board', default=None,
help='Board type that was built on this machine')
parser.add_option('-p', '--build-path', dest='build_path',
help='Path to the chroot')
parser.add_option('-s', '--sync-host', dest='sync_host',
default=False, action='store_true',
help='Sync host prebuilts')
parser.add_option('-g', '--git-sync', dest='git_sync',
default=False, action='store_true',
help='Enable git version sync (This commits to a repo)')
parser.add_option('-u', '--upload', dest='upload',
default=None,
help='Upload to GS bucket')
parser.add_option('-f', '--filter', dest='filter_file',
default=None,
help='File to use for filtering GS bucket uploads')
options, args = parser.parse_args()
# Setup boto environment for gsutil to use
os.environ['BOTO_CONFIG'] = _BOTO_CONFIG
if not options.build_path:
usage(parser, 'Error: you need provide a chroot path')
if not options.upload:
usage(parser, 'Error: you need to provide a gsutil upload bucket -u')
if options.filter_file:
LoadFilterFile(options.filter_file)
git_file = None
if options.git_sync:
git_file = os.path.join(options.build_path, VER_FILE)
if options.sync_host:
UploadPrebuilt(options.build_path, options.upload, git_file=git_file)
if options.board:
UploadPrebuilt(options.build_path, options.upload, board=options.board,
git_file=git_file)
if __name__ == '__main__':
main()

131
prebuilt_unittest.py Executable file
View File

@ -0,0 +1,131 @@
#!/usr/bin/python
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import mox
import os
import prebuilt
import tempfile
import unittest
from chromite.lib import cros_build_lib
class TestUpdateFile(unittest.TestCase):
def setUp(self):
self.contents_str = ['stage 20100309/stage3-amd64-20100309.tar.bz2',
'portage portage-20100310.tar.bz2']
temp_fd, self.version_file = tempfile.mkstemp()
os.write(temp_fd, '\n'.join(self.contents_str))
os.close(temp_fd)
def tearDown(self):
os.remove(self.version_file)
def _read_version_file(self):
"""Read the contents of self.version_file and return as a list."""
version_fh = open(self.version_file)
try:
return [line.strip() for line in version_fh.readlines()]
finally:
version_fh.close()
def _verify_key_pair(self, key, val):
file_contents = self._read_version_file()
for entry in file_contents:
file_key, file_val = entry.split()
if file_key == key:
if val == file_val:
break
else:
self.fail('Could not find "%s %s" in version file' % (key, val))
def testAddVariableThatDoesNotExist(self):
"""Add in a new variable that was no present in the file."""
key = 'x86-testcase'
value = '1234567'
prebuilt.UpdateLocalFile(self.version_file, key, value)
current_version_str = self._read_version_file()
self._verify_key_pair(key, value)
def testUpdateVariable(self):
"""Test updating a variable that already exists."""
# take first entry in contents
key, val = self.contents_str[0].split()
new_val = 'test_update'
self._verify_key_pair(key, val)
prebuilt.UpdateLocalFile(self.version_file, key, new_val)
self._verify_key_pair(key, new_val)
class TestPrebuiltFilters(unittest.TestCase):
def setUp(self):
self.FAUX_FILTERS = set(['oob', 'bibby', 'bob'])
temp_fd, self.filter_filename = tempfile.mkstemp()
os.write(temp_fd, '\n'.join(self.FAUX_FILTERS))
os.close(temp_fd)
def tearDown(self):
os.remove(self.filter_filename)
def testLoadFilterFile(self):
"""
Call filter packages with a list of packages that should be filtered
and ensure they are.
"""
loaded_filters = prebuilt.LoadFilterFile(self.filter_filename)
self.assertEqual(self.FAUX_FILTERS, loaded_filters)
def testFilterPattern(self):
"""Check that particular packages are filtered properly."""
prebuilt.LoadFilterFile(self.filter_filename)
file_list = ['/usr/local/package/oob',
'/usr/local/package/other/path/valid',
'/var/tmp/bibby.file',
'/tmp/b/o/b']
expected_list = ['/usr/local/package/other/path/valid',
'/tmp/b/o/b']
filtered_list = [file for file in file_list if not
prebuilt.ShouldFilterPackage(file)]
self.assertEqual(expected_list, filtered_list)
class TestPrebuilt(unittest.TestCase):
fake_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
bin_package_mock = ['packages/x11-misc/shared-mime-info-0.70.tbz2',
'packages/x11-misc/util-macros-1.5.0.tbz2',
'packages/x11-misc/xbitmaps-1.1.0.tbz2',
'packages/x11-misc/read-edid-1.4.2.tbz2',
'packages/x11-misc/xdg-utils-1.0.2-r3.tbz2']
files_to_sync = [os.path.join(fake_path, file) for file in bin_package_mock]
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
self.mox.VerifyAll()
def _generate_dict_results(self, gs_bucket_path):
"""
Generate a dictionary result similar to GenerateUploadDict
"""
results = {}
for entry in self.files_to_sync:
results[entry] = os.path.join(
gs_bucket_path, entry.replace(self.fake_path, '').lstrip('/'))
return results
def testGenerateUploadDict(self):
gs_bucket_path = 'gs://chromeos-prebuilt/host/version'
self.mox.StubOutWithMock(cros_build_lib, 'ListFiles')
cros_build_lib.ListFiles(' ').AndReturn(self.files_to_sync)
self.mox.ReplayAll()
result = prebuilt.GenerateUploadDict(' ', gs_bucket_path, self.fake_path)
self.assertEqual(result, self._generate_dict_results(gs_bucket_path))
if __name__ == '__main__':
unittest.main()