mirror of
https://github.com/flatcar/scripts.git
synced 2025-08-11 06:56:58 +02:00
A few more style changes and logic changes fro code review
This commit is contained in:
parent
6f0584cd2d
commit
6332272fa7
35
prebuilt.py
35
prebuilt.py
@ -11,8 +11,8 @@ from multiprocessing import Pool
|
|||||||
|
|
||||||
from chromite.lib import cros_build_lib
|
from chromite.lib import cros_build_lib
|
||||||
"""
|
"""
|
||||||
This script is used to upload host prebuilts as well as board BINHOSTS to
|
This script is used to upload host prebuilts as well as board BINHOSTS to
|
||||||
Google Storage.
|
Google Storage.
|
||||||
|
|
||||||
After a build is successfully uploaded a file is updated with the proper
|
After a build is successfully uploaded a file is updated with the proper
|
||||||
BINHOST version as well as the target board. This file is defined in GIT_FILE
|
BINHOST version as well as the target board. This file is defined in GIT_FILE
|
||||||
@ -96,6 +96,7 @@ def RevGitFile(filename, key, value):
|
|||||||
'http://git.chromium.org/git')
|
'http://git.chromium.org/git')
|
||||||
try:
|
try:
|
||||||
cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
|
cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
|
||||||
|
cros_build_lib.RunCommand('git pull', shell=True)
|
||||||
cros_build_lib.RunCommand('git config push.default tracking', shell=True)
|
cros_build_lib.RunCommand('git config push.default tracking', shell=True)
|
||||||
cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
|
cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
|
||||||
cros_build_lib.RunCommand('git push', shell=True)
|
cros_build_lib.RunCommand('git push', shell=True)
|
||||||
@ -116,13 +117,12 @@ def LoadFilterFile(filter_file):
|
|||||||
filter_file: file to load into FILTER_PACKAGES
|
filter_file: file to load into FILTER_PACKAGES
|
||||||
"""
|
"""
|
||||||
filter_fh = open(filter_file)
|
filter_fh = open(filter_file)
|
||||||
global FILTER_PACKAGES
|
|
||||||
try:
|
try:
|
||||||
FILTER_PACKAGES.update(set([filter.strip() for filter in filter_fh]))
|
FILTER_PACKAGES.update([filter.strip() for filter in filter_fh])
|
||||||
finally:
|
finally:
|
||||||
filter_fh.close()
|
filter_fh.close()
|
||||||
return FILTER_PACKAGES
|
return FILTER_PACKAGES
|
||||||
|
|
||||||
|
|
||||||
def ShouldFilterPackage(file_path):
|
def ShouldFilterPackage(file_path):
|
||||||
"""Skip a particular file if it matches a pattern.
|
"""Skip a particular file if it matches a pattern.
|
||||||
@ -155,30 +155,31 @@ def _GsUpload(args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
cmd = 'gsutil cp -a public-read %s %s' % (local_file, remote_file)
|
cmd = 'gsutil cp -a public-read %s %s' % (local_file, remote_file)
|
||||||
# TODO: port to use _Run or similar when it is available in cros_build_lib.
|
# TODO(scottz): port to use _Run or similar when it is available in
|
||||||
|
# cros_build_lib.
|
||||||
for attempt in range(_RETRIES):
|
for attempt in range(_RETRIES):
|
||||||
try:
|
try:
|
||||||
output = cros_build_lib.RunCommand(cmd, print_cmd=False, shell=True)
|
output = cros_build_lib.RunCommand(cmd, print_cmd=False, shell=True)
|
||||||
break
|
break
|
||||||
except cros_build_lib.RunCommandError:
|
except cros_build_lib.RunCommandError:
|
||||||
print 'Failed to sync %s -> %s, retrying' % (local_file, remote_file)
|
print 'Failed to sync %s -> %s, retrying' % (local_file, remote_file)
|
||||||
else:
|
else:
|
||||||
# TODO: potentially return what failed so we can do something with it but
|
# TODO(scottz): potentially return what failed so we can do something with
|
||||||
# for now just print an error.
|
# with it but for now just print an error.
|
||||||
print 'Retry failed uploading %s -> %s, giving up' % (local_file,
|
print 'Retry failed uploading %s -> %s, giving up' % (local_file,
|
||||||
remote_file)
|
remote_file)
|
||||||
|
|
||||||
|
|
||||||
def RemoteUpload(files, pool=10):
|
def RemoteUpload(files, pool=10):
|
||||||
"""Upload to google storage.
|
"""Upload to google storage.
|
||||||
|
|
||||||
Create a pool of process and call _GsUpload with the proper arguments.
|
Create a pool of process and call _GsUpload with the proper arguments.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
files: dictionary with keys to local files and values to remote path.
|
files: dictionary with keys to local files and values to remote path.
|
||||||
pool: integer of maximum proesses to have at the same time.
|
pool: integer of maximum proesses to have at the same time.
|
||||||
"""
|
"""
|
||||||
# TODO port this to use _RunManyParallel when it is available in
|
# TODO(scottz) port this to use _RunManyParallel when it is available in
|
||||||
# cros_build_lib
|
# cros_build_lib
|
||||||
pool = Pool(processes=pool)
|
pool = Pool(processes=pool)
|
||||||
workers = []
|
workers = []
|
||||||
@ -194,8 +195,6 @@ def RemoteUpload(files, pool=10):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def GenerateUploadDict(local_path, gs_path, strip_str):
|
def GenerateUploadDict(local_path, gs_path, strip_str):
|
||||||
"""Build a dictionary of local remote file key pairs for gsutil to upload.
|
"""Build a dictionary of local remote file key pairs for gsutil to upload.
|
||||||
|
|
||||||
@ -224,12 +223,14 @@ def UploadPrebuilt(build_path, bucket, board=None, git_file=None):
|
|||||||
Args:
|
Args:
|
||||||
build_path: The path to the root of the chroot.
|
build_path: The path to the root of the chroot.
|
||||||
bucket: The Google Storage bucket to upload to.
|
bucket: The Google Storage bucket to upload to.
|
||||||
|
board: The board to upload to Google Storage, if this is None upload
|
||||||
|
host packages.
|
||||||
git_file: If set, update this file with a host/version combo, commit and
|
git_file: If set, update this file with a host/version combo, commit and
|
||||||
push it.
|
push it.
|
||||||
"""
|
"""
|
||||||
version = GetVersion()
|
version = GetVersion()
|
||||||
|
|
||||||
if board is None:
|
if not board:
|
||||||
# We are uploading host packages
|
# We are uploading host packages
|
||||||
# TODO: eventually add support for different host_targets
|
# TODO: eventually add support for different host_targets
|
||||||
package_path = os.path.join(build_path, _HOST_PACKAGES_PATH)
|
package_path = os.path.join(build_path, _HOST_PACKAGES_PATH)
|
||||||
@ -243,7 +244,7 @@ def UploadPrebuilt(build_path, bucket, board=None, git_file=None):
|
|||||||
strip_pattern = board_path
|
strip_pattern = board_path
|
||||||
gs_path = os.path.join(bucket, _GS_BOARD_PATH % {'board': board,
|
gs_path = os.path.join(bucket, _GS_BOARD_PATH % {'board': board,
|
||||||
'version': version})
|
'version': version})
|
||||||
|
|
||||||
upload_files = GenerateUploadDict(package_path, gs_path, strip_pattern)
|
upload_files = GenerateUploadDict(package_path, gs_path, strip_pattern)
|
||||||
|
|
||||||
print 'Uploading %s' % package_string
|
print 'Uploading %s' % package_string
|
||||||
@ -278,7 +279,7 @@ def main():
|
|||||||
parser.add_option('-f', '--filter', dest='filter_file',
|
parser.add_option('-f', '--filter', dest='filter_file',
|
||||||
default=None,
|
default=None,
|
||||||
help='File to use for filtering GS bucket uploads')
|
help='File to use for filtering GS bucket uploads')
|
||||||
|
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
# Setup boto environment for gsutil to use
|
# Setup boto environment for gsutil to use
|
||||||
os.environ['BOTO_CONFIG'] = _BOTO_CONFIG
|
os.environ['BOTO_CONFIG'] = _BOTO_CONFIG
|
||||||
|
@ -40,7 +40,7 @@ class TestUpdateFile(unittest.TestCase):
|
|||||||
else:
|
else:
|
||||||
self.fail('Could not find "%s %s" in version file' % (key, val))
|
self.fail('Could not find "%s %s" in version file' % (key, val))
|
||||||
|
|
||||||
def testAddVariableThatDoesnotExist(self):
|
def testAddVariableThatDoesNotExist(self):
|
||||||
"""Add in a new variable that was no present in the file."""
|
"""Add in a new variable that was no present in the file."""
|
||||||
key = 'x86-testcase'
|
key = 'x86-testcase'
|
||||||
value = '1234567'
|
value = '1234567'
|
||||||
@ -56,7 +56,7 @@ class TestUpdateFile(unittest.TestCase):
|
|||||||
self._verify_key_pair(key, val)
|
self._verify_key_pair(key, val)
|
||||||
prebuilt.UpdateLocalFile(self.version_file, key, new_val)
|
prebuilt.UpdateLocalFile(self.version_file, key, new_val)
|
||||||
self._verify_key_pair(key, new_val)
|
self._verify_key_pair(key, new_val)
|
||||||
|
|
||||||
|
|
||||||
class TestPrebuiltFilters(unittest.TestCase):
|
class TestPrebuiltFilters(unittest.TestCase):
|
||||||
|
|
||||||
@ -92,15 +92,14 @@ class TestPrebuiltFilters(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class TestPrebuilt(unittest.TestCase):
|
class TestPrebuilt(unittest.TestCase):
|
||||||
|
fake_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
|
||||||
|
bin_package_mock = ['packages/x11-misc/shared-mime-info-0.70.tbz2',
|
||||||
|
'packages/x11-misc/util-macros-1.5.0.tbz2',
|
||||||
|
'packages/x11-misc/xbitmaps-1.1.0.tbz2',
|
||||||
|
'packages/x11-misc/read-edid-1.4.2.tbz2',
|
||||||
|
'packages/x11-misc/xdg-utils-1.0.2-r3.tbz2']
|
||||||
|
|
||||||
files_to_sync = ['/b/cbuild/build/chroot/build/x86-dogfood/packages/x11-misc/shared-mime-info-0.70.tbz2',
|
files_to_sync = [os.path.join(fake_path, file) for file in bin_package_mock]
|
||||||
'/b/cbuild/build/chroot/build/x86-dogfood/packages/x11-misc/util-macros-1.5.0.tbz2',
|
|
||||||
'/b/cbuild/build/chroot/build/x86-dogfood/packages/x11-misc/xbitmaps-1.1.0.tbz2',
|
|
||||||
'/b/cbuild/build/chroot/build/x86-dogfood/packages/x11-misc/read-edid-1.4.2.tbz2',
|
|
||||||
'/b/cbuild/build/chroot/build/x86-dogfood/packages/x11-misc/xdg-utils-1.0.2-r3.tbz2']
|
|
||||||
|
|
||||||
strip_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
|
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.mox = mox.Mox()
|
self.mox = mox.Mox()
|
||||||
@ -109,25 +108,22 @@ class TestPrebuilt(unittest.TestCase):
|
|||||||
self.mox.UnsetStubs()
|
self.mox.UnsetStubs()
|
||||||
self.mox.VerifyAll()
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
|
||||||
def _generate_dict_results(self, gs_bucket_path):
|
def _generate_dict_results(self, gs_bucket_path):
|
||||||
"""
|
"""
|
||||||
Generate a dictionary result similar to GenerateUploadDict
|
Generate a dictionary result similar to GenerateUploadDict
|
||||||
"""
|
"""
|
||||||
results = {}
|
results = {}
|
||||||
for entry in self.files_to_sync:
|
for entry in self.files_to_sync:
|
||||||
results[entry] = os.path.join(gs_bucket_path,
|
results[entry] = os.path.join(
|
||||||
entry.replace(self.strip_path, '').lstrip('/'))
|
gs_bucket_path, entry.replace(self.fake_path, '').lstrip('/'))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def testGenerateUploadDict(self):
|
def testGenerateUploadDict(self):
|
||||||
gs_bucket_path = 'gs://chromeos-prebuilt/host/version'
|
gs_bucket_path = 'gs://chromeos-prebuilt/host/version'
|
||||||
self.mox.StubOutWithMock(cros_build_lib, 'ListFiles')
|
self.mox.StubOutWithMock(cros_build_lib, 'ListFiles')
|
||||||
cros_build_lib.ListFiles(' ').AndReturn(self.files_to_sync)
|
cros_build_lib.ListFiles(' ').AndReturn(self.files_to_sync)
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
result = prebuilt.GenerateUploadDict(' ', gs_bucket_path, self.strip_path)
|
result = prebuilt.GenerateUploadDict(' ', gs_bucket_path, self.fake_path)
|
||||||
print result
|
|
||||||
self.assertEqual(result, self._generate_dict_results(gs_bucket_path))
|
self.assertEqual(result, self._generate_dict_results(gs_bucket_path))
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user