rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
RunCommand(['repo', 'init', '-u', 'http://src.chromium.org/git/manifest'],
RunCommand(['repo', 'init', '-u', 'http://git.chromium.org/git/manifest'],
def _FullCheckout(buildroot, rw_checkout=True, retries=_DEFAULT_RETRIES): """Performs a full checkout and clobbers any previous checkouts.""" RunCommand(['sudo', 'rm', '-rf', buildroot]) MakeDir(buildroot, parents=True) RunCommand(['repo', 'init', '-u', 'http://src.chromium.org/git/manifest'], cwd=buildroot, input='\n\ny\n') RepoSync(buildroot, rw_checkout, retries)
return GetNewestLinkFromZipBase(board, channel, zip_server_base)
try: return GetNewestLinkFromZipBase(board, channel, zip_server_base) except: Warning('Failed to get url from standard zip base. Trying rc.') return GetNewestLinkFromZipBase(board + '-rc', channel, zip_server_base)
def GetLatestZipUrl(board, channel, latest_url_base, zip_server_base): """Returns the url of the latest image zip for the given arguments. Args: board: board for the image zip. channel: channel for the image zip. latest_url_base: base url for latest links. zip_server_base: base url for zipped images. """ if latest_url_base: try: # Grab the latest image info. latest_file_url = os.path.join(latest_url_base, channel, 'LATEST-%s' % board) latest_image_file = urllib.urlopen(latest_file_url) latest_image = latest_image_file.read() latest_image_file.close() # Convert bin.gz into zip. latest_image = latest_image.replace('.bin.gz', '.zip') version = latest_image.split('-')[1] zip_base = os.path.join(zip_server_base, channel, board) return os.path.join(zip_base, version, latest_image) except IOError: Warning(('Could not use latest link provided, defaulting to parsing' ' latest from zip url base.')) return GetNewestLinkFromZipBase(board, channel, zip_server_base)
num_procs = int(RunCommand('grep -c processor /proc/cpuinfo'.split(), print_cmd=False, redirect_stdout=True))
def RepoSync(buildroot, rw_checkout=False, retries=_DEFAULT_RETRIES): """Uses repo to checkout the source code. Keyword arguments: rw_checkout -- Reconfigure repo after sync'ing to read-write. retries -- Number of retries to try before failing on the sync. """ # Get the number of processors to use with repo sync. num_procs = int(RunCommand('grep -c processor /proc/cpuinfo'.split(), print_cmd=False, redirect_stdout=True)) while retries > 0: try: RunCommand(['repo', 'sync', '--jobs=%d' % (num_procs)], cwd=buildroot) if rw_checkout: # Always re-run in case of new git repos or repo sync # failed in a previous run because of a forced Stop Build. RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof', 'http://git.chromium.org/git'], cwd=buildroot) retries = 0 except: retries -= 1 if retries > 0: Warning('CBUILDBOT -- Repo Sync Failed, retrying') else: Warning('CBUILDBOT -- Retries exhausted') raise
RunCommand(['repo', 'sync', '--jobs=%d' % (num_procs)], cwd=buildroot)
RunCommand(['repo', '--trace', 'sync'], cwd=buildroot)
def RepoSync(buildroot, rw_checkout=False, retries=_DEFAULT_RETRIES): """Uses repo to checkout the source code. Keyword arguments: rw_checkout -- Reconfigure repo after sync'ing to read-write. retries -- Number of retries to try before failing on the sync. """ # Get the number of processors to use with repo sync. num_procs = int(RunCommand('grep -c processor /proc/cpuinfo'.split(), print_cmd=False, redirect_stdout=True)) while retries > 0: try: RunCommand(['repo', 'sync', '--jobs=%d' % (num_procs)], cwd=buildroot) if rw_checkout: # Always re-run in case of new git repos or repo sync # failed in a previous run because of a forced Stop Build. RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof', 'http://git.chromium.org/git'], cwd=buildroot) retries = 0 except: retries -= 1 if retries > 0: Warning('CBUILDBOT -- Repo Sync Failed, retrying') else: Warning('CBUILDBOT -- Retries exhausted') raise
passwd = urllib.quote_plus(getpass.getpass(prompt=''))
passwd = getpass.getpass(prompt='')
def Authenticate(): default_username = getpass.getuser() username = os.environ.get('GSDCURL_USERNAME') if username is None: sys.stderr.write('Username [' + default_username + ']: ') username = raw_input() if username == '': username = default_username + '@google.com' elif '@' not in username: username = username + '@google.com' passwd = os.environ.get('GSDCURL_PASSWORD') if passwd is None: sys.stderr.write('Password: ') passwd = urllib.quote_plus(getpass.getpass(prompt='')) cmd = [ 'curl', '--silent', 'https://www.google.com/accounts/ClientLogin', '-d', 'Email=' + username, '-d', 'Passwd=' + passwd, '-d', 'accountType=GOOGLE', '-d', 'source=Google-gsdcurl-ver1', '-d', 'service=cds', ] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) (p_stdout, _) = p.communicate() assert p.returncode == 0 m = re.search('\nAuth=([^\n]+)\n', p_stdout) if not m: sys.stderr.write('BAD LOGIN\n') sys.exit(1) auth = m.group(1) return auth
'-d', 'Passwd=' + passwd,
'-d', 'Passwd=' + urllib.quote_plus(passwd),
def Authenticate(): default_username = getpass.getuser() username = os.environ.get('GSDCURL_USERNAME') if username is None: sys.stderr.write('Username [' + default_username + ']: ') username = raw_input() if username == '': username = default_username + '@google.com' elif '@' not in username: username = username + '@google.com' passwd = os.environ.get('GSDCURL_PASSWORD') if passwd is None: sys.stderr.write('Password: ') passwd = urllib.quote_plus(getpass.getpass(prompt='')) cmd = [ 'curl', '--silent', 'https://www.google.com/accounts/ClientLogin', '-d', 'Email=' + username, '-d', 'Passwd=' + passwd, '-d', 'accountType=GOOGLE', '-d', 'source=Google-gsdcurl-ver1', '-d', 'service=cds', ] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) (p_stdout, _) = p.communicate() assert p.returncode == 0 m = re.search('\nAuth=([^\n]+)\n', p_stdout) if not m: sys.stderr.write('BAD LOGIN\n') sys.exit(1) auth = m.group(1) return auth
cmd = ['./build_packages']
def _Build(buildroot, emptytree): """Wrapper around build_packages.""" cwd = os.path.join(buildroot, 'src', 'scripts') cmd = ['./build_packages'] if emptytree: cmd.insert(0, 'EXTRA_BOARD_FLAGS=--emptytree') RunCommand(cmd, cwd=cwd, enter_chroot=True)
cmd.insert(0, 'EXTRA_BOARD_FLAGS=--emptytree')
cmd = ['sh', '-c', 'EXTRA_BOARD_FLAGS=--emptytree ./build_packages'] else: cmd = ['./build_packages']
def _Build(buildroot, emptytree): """Wrapper around build_packages.""" cwd = os.path.join(buildroot, 'src', 'scripts') cmd = ['./build_packages'] if emptytree: cmd.insert(0, 'EXTRA_BOARD_FLAGS=--emptytree') RunCommand(cmd, cwd=cwd, enter_chroot=True)
RunCommand(unmerge_board_cmd, env=env)
RunCommand(unmerge_board_cmd)
def _CleanStalePackages(board, package_array): """Cleans up stale package info from a previous build.""" Info('Cleaning up stale packages %s.' % package_array) unmerge_board_cmd = ['emerge-%s' % board, '--unmerge'] unmerge_board_cmd.extend(package_array) RunCommand(unmerge_board_cmd, env=env) unmerge_host_cmd = ['sudo', 'emerge', '--unmerge'] unmerge_host_cmd.extend(package_array) RunCommand(unmerge_host_cmd, env=env) RunCommand(['eclean-%s' % board, '-d', 'packages'], redirect_stderr=True) RunCommand(['sudo', 'eclean', '-d', 'packages'], redirect_stderr=True)
RunCommand(unmerge_host_cmd, env=env)
RunCommand(unmerge_host_cmd)
def _CleanStalePackages(board, package_array): """Cleans up stale package info from a previous build.""" Info('Cleaning up stale packages %s.' % package_array) unmerge_board_cmd = ['emerge-%s' % board, '--unmerge'] unmerge_board_cmd.extend(package_array) RunCommand(unmerge_board_cmd, env=env) unmerge_host_cmd = ['sudo', 'emerge', '--unmerge'] unmerge_host_cmd.extend(package_array) RunCommand(unmerge_host_cmd, env=env) RunCommand(['eclean-%s' % board, '-d', 'packages'], redirect_stderr=True) RunCommand(['sudo', 'eclean', '-d', 'packages'], redirect_stderr=True)
(_CHROMIUMOS_OVERLAYS_DIRECTORY, _STABLE_BRANCH_NAME))
(os.getcwd(), _STABLE_BRANCH_NAME))
def _PushChange(): """Pushes changes to the git repository. Pushes locals commits from calls to CommitChange to the remote git repository specified by os.pwd. Raises: OSError: Error occurred while pushing. """ # TODO(sosa) - Add logic for buildbot to check whether other slaves have # completed and push this change only if they have. # Sanity check to make sure we're on a stabilizing branch before pushing. if not _CheckOnStabilizingBranch(): generate_test_report.Die('Expected %s to be on branch "%s"' % (_CHROMIUMOS_OVERLAYS_DIRECTORY, _STABLE_BRANCH_NAME)) _RunCommand('git cl upload --desc_from_logs -m "%s"' % 'Marking set of ebuilds as stable') _RunCommand('git remote update') _RunCommand('git rebase origin/master') _RunCommand('git cl push %s' % gflags.FLAGS.push_options)
os.chdir(_CHROMIUMOS_OVERLAYS_DIRECTORY)
overlay_directory = '%s/third_party/chromiumos-overlay' % gflags.FLAGS.srcroot os.chdir(overlay_directory)
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split() if gflags.FLAGS.commit_ids: commit_id_list = gflags.FLAGS.commit_ids.split() else: commit_id_list = None _CheckSaneArguments(package_list, commit_id_list, command) os.chdir(_CHROMIUMOS_OVERLAYS_DIRECTORY) if command == 'clean': _Clean() elif command == 'commit': work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): generate_test_report.Die('Unable to create stabilizing branch in %s' % _CHROMIUMOS_OVERLAYS_DIRECTORY) index = 0 try: for index in range(len(package_list)): # Gather the package and optional commit id to work on. package = package_list[index] commit_id = "" if commit_id_list: commit_id = commit_id_list[index] _Print('Working on %s' % package) worker = EBuildStableMarker(_EBuild(package, commit_id)) worker.RevEBuild(commit_id) worker.CommitChange(_GIT_COMMIT_MESSAGE % (package, commit_id)) except (OSError, IOError), e: print ('An exception occurred\n' 'Only the following packages were revved: %s\n' 'Note you will have to go into %s' 'and reset the git repo yourself.' % (package_list[:index], _CHROMIUMOS_OVERLAYS_DIRECTORY)) raise e elif command == 'push': _PushChange()
_CHROMIUMOS_OVERLAYS_DIRECTORY)
overlay_directory)
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split() if gflags.FLAGS.commit_ids: commit_id_list = gflags.FLAGS.commit_ids.split() else: commit_id_list = None _CheckSaneArguments(package_list, commit_id_list, command) os.chdir(_CHROMIUMOS_OVERLAYS_DIRECTORY) if command == 'clean': _Clean() elif command == 'commit': work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): generate_test_report.Die('Unable to create stabilizing branch in %s' % _CHROMIUMOS_OVERLAYS_DIRECTORY) index = 0 try: for index in range(len(package_list)): # Gather the package and optional commit id to work on. package = package_list[index] commit_id = "" if commit_id_list: commit_id = commit_id_list[index] _Print('Working on %s' % package) worker = EBuildStableMarker(_EBuild(package, commit_id)) worker.RevEBuild(commit_id) worker.CommitChange(_GIT_COMMIT_MESSAGE % (package, commit_id)) except (OSError, IOError), e: print ('An exception occurred\n' 'Only the following packages were revved: %s\n' 'Note you will have to go into %s' 'and reset the git repo yourself.' % (package_list[:index], _CHROMIUMOS_OVERLAYS_DIRECTORY)) raise e elif command == 'push': _PushChange()
(package_list[:index], _CHROMIUMOS_OVERLAYS_DIRECTORY))
(package_list[:index], overlay_directory))
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split() if gflags.FLAGS.commit_ids: commit_id_list = gflags.FLAGS.commit_ids.split() else: commit_id_list = None _CheckSaneArguments(package_list, commit_id_list, command) os.chdir(_CHROMIUMOS_OVERLAYS_DIRECTORY) if command == 'clean': _Clean() elif command == 'commit': work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): generate_test_report.Die('Unable to create stabilizing branch in %s' % _CHROMIUMOS_OVERLAYS_DIRECTORY) index = 0 try: for index in range(len(package_list)): # Gather the package and optional commit id to work on. package = package_list[index] commit_id = "" if commit_id_list: commit_id = commit_id_list[index] _Print('Working on %s' % package) worker = EBuildStableMarker(_EBuild(package, commit_id)) worker.RevEBuild(commit_id) worker.CommitChange(_GIT_COMMIT_MESSAGE % (package, commit_id)) except (OSError, IOError), e: print ('An exception occurred\n' 'Only the following packages were revved: %s\n' 'Note you will have to go into %s' 'and reset the git repo yourself.' % (package_list[:index], _CHROMIUMOS_OVERLAYS_DIRECTORY)) raise e elif command == 'push': _PushChange()
_RunCommand('git cl upload --desc_from_logs -m "%s"' % 'Marking set of ebuilds as stable')
description = _RunCommand('git log --format=format:%s%n%n%b ' + gflags.FLAGS.tracking_branch + '..') description = 'Marking set of ebuilds as stable\n\n%s' % description merge_branch_name = 'merge_branch'
def _PushChange(): """Pushes changes to the git repository. Pushes locals commits from calls to CommitChange to the remote git repository specified by os.pwd. Raises: OSError: Error occurred while pushing. """ # TODO(sosa) - Add logic for buildbot to check whether other slaves have # completed and push this change only if they have. # Sanity check to make sure we're on a stabilizing branch before pushing. if not _CheckOnStabilizingBranch(): generate_test_report.Die('Expected %s to be on branch "%s"' % (os.getcwd(), _STABLE_BRANCH_NAME)) _RunCommand('git cl upload --desc_from_logs -m "%s"' % 'Marking set of ebuilds as stable') _RunCommand('git remote update') _RunCommand('git rebase %s' % gflags.FLAGS.tracking_branch) _RunCommand('git cl push %s' % gflags.FLAGS.push_options)
_RunCommand('git rebase %s' % gflags.FLAGS.tracking_branch) _RunCommand('git cl push %s' % gflags.FLAGS.push_options)
_RunCommand('git checkout -b %s %s' % ( merge_branch_name, gflags.FLAGS.tracking_branch)) try: _RunCommand('git merge --squash %s' % _STABLE_BRANCH_NAME) _RunCommand('git commit -m "%s"' % description) _RunCommand('git config push.default tracking') _RunCommand('git push') finally: _RunCommand('git checkout %s' % _STABLE_BRANCH_NAME) _RunCommand('git branch -D %s' % merge_branch_name)
def _PushChange(): """Pushes changes to the git repository. Pushes locals commits from calls to CommitChange to the remote git repository specified by os.pwd. Raises: OSError: Error occurred while pushing. """ # TODO(sosa) - Add logic for buildbot to check whether other slaves have # completed and push this change only if they have. # Sanity check to make sure we're on a stabilizing branch before pushing. if not _CheckOnStabilizingBranch(): generate_test_report.Die('Expected %s to be on branch "%s"' % (os.getcwd(), _STABLE_BRANCH_NAME)) _RunCommand('git cl upload --desc_from_logs -m "%s"' % 'Marking set of ebuilds as stable') _RunCommand('git remote update') _RunCommand('git rebase %s' % gflags.FLAGS.tracking_branch) _RunCommand('git cl push %s' % gflags.FLAGS.push_options)
mock_file = ['EAPI=2', 'EGIT_COMMIT=old_id', 'KEYWORDS=\"~x86 ~arm\"', 'src_unpack(){}']
mock_file = ['EAPI=2', 'CROS_WORKON_COMMIT=old_id', 'KEYWORDS=\"~x86 ~arm\"', 'src_unpack(){}']
def testRevEBuild(self): self.mox.StubOutWithMock(cros_mark_as_stable.fileinput, 'input') self.mox.StubOutWithMock(cros_mark_as_stable.shutil, 'copyfile') m_file = self.mox.CreateMock(file)
m_file.write('EGIT_COMMIT="my_id"')
m_file.write('CROS_WORKON_COMMIT="my_id"')
def testRevEBuild(self): self.mox.StubOutWithMock(cros_mark_as_stable.fileinput, 'input') self.mox.StubOutWithMock(cros_mark_as_stable.shutil, 'copyfile') m_file = self.mox.CreateMock(file)
_SimpleRunCommand('git remote update')
_SimpleRunCommand('repo sync .')
def PushChange(stable_branch, tracking_branch): """Pushes commits in the stable_branch to the remote git repository. Pushes locals commits from calls to CommitChange to the remote git repository specified by current working directory. Args: stable_branch: The local branch with commits we want to push. tracking_branch: The tracking branch of the local branch. Raises: OSError: Error occurred while pushing. """ num_retries = 5 # Sanity check to make sure we're on a stabilizing branch before pushing. if not _CheckOnStabilizingBranch(stable_branch): Info('Not on branch %s so no work found to push. Exiting' % stable_branch) return description = _SimpleRunCommand('git log --format=format:%s%n%n%b ' + tracking_branch + '..') description = 'Marking set of ebuilds as stable\n\n%s' % description Info('Using description %s' % description) merge_branch_name = 'merge_branch' for push_try in range(num_retries + 1): try: _SimpleRunCommand('git remote update') merge_branch = GitBranch(merge_branch_name, tracking_branch) merge_branch.CreateBranch() if not merge_branch.Exists(): Die('Unable to create merge branch.') _SimpleRunCommand('git merge --squash %s' % stable_branch) _SimpleRunCommand('git commit -m "%s"' % description) _SimpleRunCommand('git config push.default tracking') if gflags.FLAGS.dryrun: _SimpleRunCommand('git push --dry-run') else: _SimpleRunCommand('git push') break except: if push_try < num_retries: Warning('Failed to push change, performing retry (%s/%s)' % ( push_try + 1, num_retries)) else: raise
'http://src.chromium.org/git'], cwd=buildroot)
'http://git.chromium.org/git'], cwd=buildroot)
def RepoSync(buildroot, rw_checkout, retries=_DEFAULT_RETRIES): while retries > 0: try: RunCommand(['repo', 'sync'], cwd=buildroot) if rw_checkout: # Always re-run in case of new git repos or repo sync # failed in a previous run because of a forced Stop Build. RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof', 'http://src.chromium.org/git'], cwd=buildroot) retries = 0 except: retries -= 1 if retries > 0: print >> sys.stderr, 'CBUILDBOT -- Repo Sync Failed, retrying' else: print >> sys.stderr, 'CBUILDBOT -- Retries exhausted' raise
_DumpManifest(buildroot, options.url)
if not options.chrome_rev: _DumpManifest(buildroot, options.url)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-a', '--acl', default='private', help='ACL to set on GSD archives') parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('--chrome_rev', default=None, type='string', dest='chrome_rev', help=('Chrome_rev of type [tot|latest_release|' 'sticky_release]')) parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') parser.add_option('-c', '--gsutil_archive', default='', help='Datastore archive location') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('--noprebuilts', action='store_false', dest='prebuilts', help="Don't upload prebuilts.") parser.add_option('--nosync', action='store_false', dest='sync', default=True, help="Don't sync before building.") parser.add_option('--notests', action='store_false', dest='tests', default=True, help='Override values from buildconfig and run no tests.') parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch chrome_atom_to_build = None if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: # Calculate list of overlay directories. rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(rev_overlays)) # Either has to be a master or not have any push overlays. assert buildconfig['master'] or not push_overlays board = buildconfig['board'] old_binhost = None _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, rev_overlays) chroot_path = os.path.join(buildroot, 'chroot') boardpath = os.path.join(chroot_path, 'build', board) if options.sync: if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) _IncrementalCheckout(buildroot) new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) emptytree = (old_binhost and old_binhost != new_binhost) # Check that all overlays can be found. for path in rev_overlays: if not os.path.isdir(path): Die('Missing overlay: %s' % path) _DumpManifest(buildroot, options.url) if not os.path.isdir(chroot_path): _MakeChroot(buildroot) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) # Perform uprev. If chrome_uprev is set, rev Chrome ebuilds. if options.chrome_rev: chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, options.chrome_rev) # If we found nothing to rev, we're done here. if not chrome_atom_to_build: return elif buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], rev_overlays) _EnableLocalAccount(buildroot) # Doesn't rebuild without acquiring more source. if options.sync: _Build(buildroot, emptytree) if chrome_atom_to_build: _BuildChrome(buildroot, buildconfig['board'], chrome_atom_to_build) if buildconfig['unittests'] and options.tests: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt'] and options.tests: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: if not options.debug: archive_full_path = os.path.join(options.gsutil_archive, str(options.buildnumber)) _ArchiveTestResults(buildroot, buildconfig['board'], test_results_dir=test_results_dir, gsutil=options.gsutil, archive_dir=archive_full_path, acl=options.acl) if buildconfig['uprev']: # Don't push changes for developers. if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): if not options.debug and options.prebuilts: _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost]) _UprevPush(buildroot, tracking_branch, buildconfig['board'], push_overlays, options.debug) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important'] and not options.debug: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
if _CheckOnStabilizingBranch(): _RunCommand('git reset HEAD --hard') _RunCommand('git checkout master')
def _Clean(): """Cleans up uncommitted changes on either stabilizing branch or master.""" if _CheckOnStabilizingBranch(): _RunCommand('git reset HEAD --hard') _RunCommand('git checkout master') _RunCommand('git reset HEAD --hard')
self._Checkout('master', create=False)
self._Checkout(gflags.FLAGS.tracking_branch, create=False)
def Delete(self): """Deletes the branch and returns the user to the master branch.
Return a list of tuple arguments of the failed uploads
Return a set of tuple arguments of the failed uploads
def RemoteUpload(files, pool=10): """Upload to google storage. Create a pool of process and call _GsUpload with the proper arguments. Args: files: dictionary with keys to local files and values to remote path. pool: integer of maximum proesses to have at the same time. Returns: Return a list of tuple arguments of the failed uploads """ # TODO(scottz) port this to use _RunManyParallel when it is available in # cros_build_lib pool = multiprocessing.Pool(processes=pool) workers = [] for local_file, remote_path in files.iteritems(): workers.append((local_file, remote_path)) result = pool.map_async(_GsUpload, workers, chunksize=1) while True: try: return result.get(60*60) except multiprocessing.TimeoutError: pass
return result.get(60*60)
return set(result.get(60*60))
def RemoteUpload(files, pool=10): """Upload to google storage. Create a pool of process and call _GsUpload with the proper arguments. Args: files: dictionary with keys to local files and values to remote path. pool: integer of maximum proesses to have at the same time. Returns: Return a list of tuple arguments of the failed uploads """ # TODO(scottz) port this to use _RunManyParallel when it is available in # cros_build_lib pool = multiprocessing.Pool(processes=pool) workers = [] for local_file, remote_path in files.iteritems(): workers.append((local_file, remote_path)) result = pool.map_async(_GsUpload, workers, chunksize=1) while True: try: return result.get(60*60) except multiprocessing.TimeoutError: pass
if failed_uploads: raise UploadFailed('Error uploading:\n%s' % '\n'.join(failed_uploads))
if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg)
def UploadPrebuilt(build_path, bucket, board=None, git_file=None): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. bucket: The Google Storage bucket to upload to. board: The board to upload to Google Storage, if this is None upload host packages. git_file: If set, update this file with a host/version combo, commit and push it. """ version = GetVersion() if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) gs_path = os.path.join(bucket, _GS_HOST_PATH, version) strip_pattern = package_path package_string = _HOST_TARGET else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board strip_pattern = board_path gs_path = os.path.join(bucket, _GS_BOARD_PATH % {'board': board, 'version': version}) upload_files = GenerateUploadDict(package_path, gs_path, strip_pattern) print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if failed_uploads: raise UploadFailed('Error uploading:\n%s' % '\n'.join(failed_uploads)) if git_file: RevGitFile(git_file, package_string, version)
def RevGitFile(filename, value):
def RevGitPushWithRetry(retries=5): """Repo sync and then push git changes in flight. Args: retries: The number of times to retry before giving up, default: 5 Raises: GitPushFailed if push was unsuccessful after retries """ for retry in range(retries+1): try: cros_build_lib.RunCommand('repo sync .', shell=True) cros_build_lib.RunCommand('git push', shell=True) break except cros_build_lib.RunCommandError: print 'Error pushing changes trying again (%s/%s)' % (retry, retries) else: raise GitPushFailed('Failed to push change after %s retries' % retries) def RevGitFile(filename, value, retries=5):
def RevGitFile(filename, value): """Update and push the git file. Args: filename: file to modify that is in a git repo already key: board or host package type e.g. x86-dogfood value: string representing the version of the prebuilt that has been uploaded. """ prebuilt_branch = 'prebuilt_branch' old_cwd = os.getcwd() os.chdir(os.path.dirname(filename)) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True) git_ssh_config_cmd = ( 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof ' 'http://git.chromium.org/git') cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True) description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename) print description try: UpdateLocalFile(filename, value) cros_build_lib.RunCommand('git config push.default tracking', shell=True) cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('git push', shell=True) finally: cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True) os.chdir(old_cwd)
Args: filename: file to modify that is in a git repo already key: board or host package type e.g. x86-dogfood value: string representing the version of the prebuilt that has been uploaded.
Args: filename: file to modify that is in a git repo already value: string representing the version of the prebuilt that has been uploaded. retries: The number of times to retry before giving up, default: 5
def RevGitFile(filename, value): """Update and push the git file. Args: filename: file to modify that is in a git repo already key: board or host package type e.g. x86-dogfood value: string representing the version of the prebuilt that has been uploaded. """ prebuilt_branch = 'prebuilt_branch' old_cwd = os.getcwd() os.chdir(os.path.dirname(filename)) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True) git_ssh_config_cmd = ( 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof ' 'http://git.chromium.org/git') cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True) description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename) print description try: UpdateLocalFile(filename, value) cros_build_lib.RunCommand('git config push.default tracking', shell=True) cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('git push', shell=True) finally: cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True) os.chdir(old_cwd)
cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True)
cros_build_lib.RunCommand('repo sync .', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True)
def RevGitFile(filename, value): """Update and push the git file. Args: filename: file to modify that is in a git repo already key: board or host package type e.g. x86-dogfood value: string representing the version of the prebuilt that has been uploaded. """ prebuilt_branch = 'prebuilt_branch' old_cwd = os.getcwd() os.chdir(os.path.dirname(filename)) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True) git_ssh_config_cmd = ( 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof ' 'http://git.chromium.org/git') cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True) description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename) print description try: UpdateLocalFile(filename, value) cros_build_lib.RunCommand('git config push.default tracking', shell=True) cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('git push', shell=True) finally: cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True) os.chdir(old_cwd)
cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('git push', shell=True)
RevGitPushWithRetry(retries)
def RevGitFile(filename, value): """Update and push the git file. Args: filename: file to modify that is in a git repo already key: board or host package type e.g. x86-dogfood value: string representing the version of the prebuilt that has been uploaded. """ prebuilt_branch = 'prebuilt_branch' old_cwd = os.getcwd() os.chdir(os.path.dirname(filename)) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True) git_ssh_config_cmd = ( 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof ' 'http://git.chromium.org/git') cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True) description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename) print description try: UpdateLocalFile(filename, value) cros_build_lib.RunCommand('git config push.default tracking', shell=True) cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('git push', shell=True) finally: cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True) os.chdir(old_cwd)
board=None, git_sync=False):
board=None, git_sync=False, git_sync_retries=5):
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, board=None, git_sync=False): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. upload_location: The upload location. board: The board to upload to Google Storage, if this is None upload host packages. git_sync: If set, update make.conf of target to reference the latest prebuilt packages genereated here. """ if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} package_string = _HOST_TARGET git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} git_file = os.path.join(build_path, DetermineMakeConfFile(board)) remote_location = os.path.join(upload_location, url_suffix) if upload_location.startswith('gs://'): upload_files = GenerateUploadDict(package_path, remote_location) print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg) else: ssh_server, remote_path = remote_location.split(':', 1) cmds = ['ssh %s mkdir -p %s' % (ssh_server, remote_path), 'rsync -av %s/ %s/' % (package_path, remote_location)] for cmd in cmds: if not _RetryRun(cmd, shell=True): raise UploadFailed('Could not run %s' % cmd) if git_sync: url_value = '%s/%s/' % (binhost_base_url, url_suffix) RevGitFile(git_file, url_value)
RevGitFile(git_file, url_value)
RevGitFile(git_file, url_value, retries=git_sync_retries)
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, board=None, git_sync=False): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. upload_location: The upload location. board: The board to upload to Google Storage, if this is None upload host packages. git_sync: If set, update make.conf of target to reference the latest prebuilt packages genereated here. """ if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} package_string = _HOST_TARGET git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} git_file = os.path.join(build_path, DetermineMakeConfFile(board)) remote_location = os.path.join(upload_location, url_suffix) if upload_location.startswith('gs://'): upload_files = GenerateUploadDict(package_path, remote_location) print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg) else: ssh_server, remote_path = remote_location.split(':', 1) cmds = ['ssh %s mkdir -p %s' % (ssh_server, remote_path), 'rsync -av %s/ %s/' % (package_path, remote_location)] for cmd in cmds: if not _RetryRun(cmd, shell=True): raise UploadFailed('Could not run %s' % cmd) if git_sync: url_value = '%s/%s/' % (binhost_base_url, url_suffix) RevGitFile(git_file, url_value)
def __init__(self, tests, results_dir_root=None):
def __init__(self, tests, base_ssh_port=_DEFAULT_BASE_SSH_PORT, board=None, image_path=None, results_dir_root=None):
def __init__(self, tests, results_dir_root=None): """Constructs and initializes the test runner class.
ssh_port = self._DEFAULT_START_SSH_PORT
ssh_port = self._base_ssh_port
def _SpawnTests(self): """Spawns VMs and starts the test runs on them.
parser.add_option('--results_dir_root', help='Root results directory.')
parser.add_option('--base_ssh_port', type='int', default=_DEFAULT_BASE_SSH_PORT, help='Base SSH port. Spawned VMs listen to localhost SSH ' 'ports incrementally allocated starting from the base one. ' '[default: %default]') parser.add_option('--board', help='The target board. If none specified, ' 'cros_run_vm_test will use the default board.') parser.add_option('--image_path', help='Full path to the VM image. If none specified, ' 'cros_run_vm_test will use the latest image.') parser.add_option('--results_dir_root', help='Root results directory. If none specified, each test ' 'will store its results in a separate /tmp directory.')
def main(): usage = 'Usage: %prog [options] tests...' parser = optparse.OptionParser(usage=usage) parser.add_option('--results_dir_root', help='Root results directory.') (options, args) = parser.parse_args() if not args: parser.print_help() Die('no tests provided') runner = ParallelTestRunner(args, options.results_dir_root) runner.Run()
runner = ParallelTestRunner(args, options.results_dir_root)
runner = ParallelTestRunner(args, options.base_ssh_port, options.board, options.image_path, options.results_dir_root)
def main(): usage = 'Usage: %prog [options] tests...' parser = optparse.OptionParser(usage=usage) parser.add_option('--results_dir_root', help='Root results directory.') (options, args) = parser.parse_args() if not args: parser.print_help() Die('no tests provided') runner = ParallelTestRunner(args, options.results_dir_root) runner.Run()
cbuildbot._FullCheckout(options.buildroot, rw_checkout=False, retries=_NUMBER_OF_RETRIES)
cbuildbot._FullCheckout(options.buildroot, options.tracking_branch, rw_checkout=False, retries=_NUMBER_OF_RETRIES)
def main(): parser = optparse.OptionParser() parser.add_option('-r', '--buildroot', help='root directory where sync occurs') parser.add_option('-c', '--clobber', action='store_true', default=False, help='clobber build directory and do a full checkout') (options, args) = parser.parse_args() if options.buildroot: if options.clobber: cbuildbot._FullCheckout(options.buildroot, rw_checkout=False, retries=_NUMBER_OF_RETRIES) else: cbuildbot._IncrementalCheckout(options.buildroot, rw_checkout=False, retries=_NUMBER_OF_RETRIES) else: print >>sys.stderr, 'ERROR: Must set buildroot' sys.exit(1)
logging.info('Using tempdir = %s', temp_dir)
logging.debug('Using tempdir = %s', temp_dir)
def CreateTempDir(): """Creates a tempdir and returns the name of the tempdir.""" temp_dir = tempfile.mkdtemp(suffix='au', prefix='tmp') logging.info('Using tempdir = %s', temp_dir) return temp_dir
logging.info('Running ldd on %s', file_name)
logging.debug('Running ldd on %s', file_name)
def DepsToCopy(ldd_files, black_list): """Returns a list of deps for a given dynamic executables list. Args: ldd_files: List of dynamic files that needs to have the deps evaluated black_list: List of files that we should ignore Returns: library_list: List of files that are dependencies """ for file_name in ldd_files: logging.info('Running ldd on %s', file_name) cmd = ['/usr/bin/ldd', file_name] stdout_data = '' stderr_data = '' try: proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout_data, stderr_data) = proc.communicate(input=None) except subprocess.CalledProcessError, e: logging.error('Command %s failed', cmd) logging.error('error code %s', e.returncode) logging.error('ouput %s', e.output) raise library_list = [] if not stdout_data: return library_list logging.debug('ldd for %s = stdout = %s stderr =%s', file_name, stdout_data, stderr_data) library_list = _SplitAndStrip(stdout_data) return _ExcludeBlacklist(library_list, black_list)
logging.info('Copying file %s to %s', file_name, dest_files_root)
logging.debug('Copying file %s to %s', file_name, dest_files_root)
def CopyRequiredFiles(dest_files_root): """Generates a list of files that are required for au-generator zip file Args: dest_files_root: location of the directory where we should copy the files """ if not dest_files_root: logging.error('Invalid option passed for dest_files_root') sys.exit(1) # Files that need to go through ldd ldd_files = ['/usr/bin/delta_generator', '/usr/bin/bsdiff', '/usr/bin/bspatch', '/usr/bin/cgpt'] # statically linked files and scripts etc., static_files = ['~/trunk/src/scripts/common.sh', '~/trunk/src/scripts/cros_generate_update_payload', '~/trunk/src/scripts/chromeos-common.sh'] # We need directories to be copied recursively to a destination within tempdir recurse_dirs = {'~/trunk/src/scripts/lib/shflags': 'lib/shflags'} black_list = [ 'linux-vdso.so', 'libgcc_s.so', 'libgthread-2.0.so', 'libpthread.so', 'librt.so', 'libstdc', 'libgcc_s.so', 'libc.so', 'ld-linux-x86-64', 'libm.so', 'libdl.so', 'libresolv.so', ] all_files = ldd_files + static_files all_files = map(os.path.expanduser, all_files) for file_name in all_files: if not os.path.isfile(file_name): logging.error('file = %s does not exist', file_name) sys.exit(1) logging.debug('Given files that need to be copied = %s' % '' .join(all_files)) all_files += DepsToCopy(ldd_files=ldd_files,black_list=black_list) for file_name in all_files: logging.info('Copying file %s to %s', file_name, dest_files_root) shutil.copy2(file_name, dest_files_root) for source_dir, target_dir in recurse_dirs.iteritems(): logging.info('Processing directory %s', source_dir) full_path = os.path.expanduser(source_dir) if not os.path.isdir(full_path): logging.error("Directory given for %s expanded to %s doens't exist.", source_dir, full_path) sys.exit(1) dest = os.path.join(dest_files_root, target_dir) logging.info('Copying directory %s to %s.', full_path, target_dir) shutil.copytree(full_path, dest)
logging.info('Processing directory %s', source_dir)
logging.debug('Processing directory %s', source_dir)
def CopyRequiredFiles(dest_files_root): """Generates a list of files that are required for au-generator zip file Args: dest_files_root: location of the directory where we should copy the files """ if not dest_files_root: logging.error('Invalid option passed for dest_files_root') sys.exit(1) # Files that need to go through ldd ldd_files = ['/usr/bin/delta_generator', '/usr/bin/bsdiff', '/usr/bin/bspatch', '/usr/bin/cgpt'] # statically linked files and scripts etc., static_files = ['~/trunk/src/scripts/common.sh', '~/trunk/src/scripts/cros_generate_update_payload', '~/trunk/src/scripts/chromeos-common.sh'] # We need directories to be copied recursively to a destination within tempdir recurse_dirs = {'~/trunk/src/scripts/lib/shflags': 'lib/shflags'} black_list = [ 'linux-vdso.so', 'libgcc_s.so', 'libgthread-2.0.so', 'libpthread.so', 'librt.so', 'libstdc', 'libgcc_s.so', 'libc.so', 'ld-linux-x86-64', 'libm.so', 'libdl.so', 'libresolv.so', ] all_files = ldd_files + static_files all_files = map(os.path.expanduser, all_files) for file_name in all_files: if not os.path.isfile(file_name): logging.error('file = %s does not exist', file_name) sys.exit(1) logging.debug('Given files that need to be copied = %s' % '' .join(all_files)) all_files += DepsToCopy(ldd_files=ldd_files,black_list=black_list) for file_name in all_files: logging.info('Copying file %s to %s', file_name, dest_files_root) shutil.copy2(file_name, dest_files_root) for source_dir, target_dir in recurse_dirs.iteritems(): logging.info('Processing directory %s', source_dir) full_path = os.path.expanduser(source_dir) if not os.path.isdir(full_path): logging.error("Directory given for %s expanded to %s doens't exist.", source_dir, full_path) sys.exit(1) dest = os.path.join(dest_files_root, target_dir) logging.info('Copying directory %s to %s.', full_path, target_dir) shutil.copytree(full_path, dest)
logging.info('Copying directory %s to %s.', full_path, target_dir)
logging.debug('Copying directory %s to %s.', full_path, target_dir)
def CopyRequiredFiles(dest_files_root): """Generates a list of files that are required for au-generator zip file Args: dest_files_root: location of the directory where we should copy the files """ if not dest_files_root: logging.error('Invalid option passed for dest_files_root') sys.exit(1) # Files that need to go through ldd ldd_files = ['/usr/bin/delta_generator', '/usr/bin/bsdiff', '/usr/bin/bspatch', '/usr/bin/cgpt'] # statically linked files and scripts etc., static_files = ['~/trunk/src/scripts/common.sh', '~/trunk/src/scripts/cros_generate_update_payload', '~/trunk/src/scripts/chromeos-common.sh'] # We need directories to be copied recursively to a destination within tempdir recurse_dirs = {'~/trunk/src/scripts/lib/shflags': 'lib/shflags'} black_list = [ 'linux-vdso.so', 'libgcc_s.so', 'libgthread-2.0.so', 'libpthread.so', 'librt.so', 'libstdc', 'libgcc_s.so', 'libc.so', 'ld-linux-x86-64', 'libm.so', 'libdl.so', 'libresolv.so', ] all_files = ldd_files + static_files all_files = map(os.path.expanduser, all_files) for file_name in all_files: if not os.path.isfile(file_name): logging.error('file = %s does not exist', file_name) sys.exit(1) logging.debug('Given files that need to be copied = %s' % '' .join(all_files)) all_files += DepsToCopy(ldd_files=ldd_files,black_list=black_list) for file_name in all_files: logging.info('Copying file %s to %s', file_name, dest_files_root) shutil.copy2(file_name, dest_files_root) for source_dir, target_dir in recurse_dirs.iteritems(): logging.info('Processing directory %s', source_dir) full_path = os.path.expanduser(source_dir) if not os.path.isdir(full_path): logging.error("Directory given for %s expanded to %s doens't exist.", source_dir, full_path) sys.exit(1) dest = os.path.join(dest_files_root, target_dir) logging.info('Copying directory %s to %s.', full_path, target_dir) shutil.copytree(full_path, dest)
logging.info('Removed tempdir = %s', temp_dir)
logging.debug('Removed tempdir = %s', temp_dir)
def CleanUp(temp_dir): """Cleans up the tempdir Args: temp_dir = name of the directory to cleanup """ if os.path.exists(temp_dir): shutil.rmtree(temp_dir, ignore_errors=True) logging.info('Removed tempdir = %s', temp_dir)
logging.info('Generating zip file %s with contents from %s', base_name,
logging.debug('Generating zip file %s with contents from %s', base_name,
def GenerateZipFile(base_name, root_dir): """Returns true if able to generate zip file Args: base_name: name of the zip file root_dir: location of the directory that we should zip Returns: True if successfully generates the zip file otherwise False """ logging.info('Generating zip file %s with contents from %s', base_name, root_dir) current_dir = os.getcwd() os.chdir(root_dir) try: subprocess.Popen(['zip', '-r', '-9', base_name, '.'], stdout=subprocess.PIPE).communicate()[0] except OSError, e: logging.error('Execution failed:%s', e.strerror) return False finally: os.chdir(current_dir) return True
logging.info('Copying %s to %s', zip_file_name, output_dir)
logging.debug('Copying %s to %s', zip_file_name, output_dir)
def CopyZipToFinalDestination(output_dir, zip_file_name): """Copies the generated zip file to a final destination Args: output_dir: Directory where the file should be copied to zip_file_name: name of the zip file that should be copied Returns: True on Success False on Failure """ if not os.path.isfile(zip_file_name): logging.error("Zip file %s doesn't exist. Returning False", zip_file_name) return False if not os.path.isdir(output_dir): logging.debug('Creating %s', output_dir) os.makedirs(output_dir) logging.info('Copying %s to %s', zip_file_name, output_dir) shutil.copy2(zip_file_name, output_dir) return True
RunCommand(['repo', 'sync'], cwd=buildroot)
RunCommand(['repo', 'sync', '--jobs=%d' % (num_procs)], cwd=buildroot)
def RepoSync(buildroot, rw_checkout=False, retries=_DEFAULT_RETRIES): """Uses repo to checkout the source code. Keyword arguments: rw_checkout -- Reconfigure repo after sync'ing to read-write. retries -- Number of retries to try before failing on the sync. """ while retries > 0: try: RunCommand(['repo', 'sync'], cwd=buildroot) if rw_checkout: # Always re-run in case of new git repos or repo sync # failed in a previous run because of a forced Stop Build. RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof', 'http://git.chromium.org/git'], cwd=buildroot) retries = 0 except: retries -= 1 if retries > 0: Warning('CBUILDBOT -- Repo Sync Failed, retrying') else: Warning('CBUILDBOT -- Retries exhausted') raise
self.UpdateImage(target_image_path, 'clean')
try: self.UpdateImage(target_image_path, 'clean') except: if self.use_delta_updates: Warning('Delta update failed, disabling delta updates and retrying.') self.use_delta_updates = False self.source_image = '' self.UpdateImage(target_image_path) else: raise
def testFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
def _BuildChrome(buildroot, board, chrome_atom_to_build): """Wrapper for emerge call to build Chrome.""" cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['emerge-%s' % board, '=%s' % chrome_atom_to_build], cwd=cwd, enter_chroot=True)
def _Build(buildroot, emptytree): """Wrapper around build_packages.""" cwd = os.path.join(buildroot, 'src', 'scripts') if emptytree: cmd = ['sh', '-c', 'EXTRA_BOARD_FLAGS=--emptytree ./build_packages'] else: cmd = ['./build_packages'] RunCommand(cmd, cwd=cwd, enter_chroot=True)
if chrome_atom_to_build: _BuildChrome(buildroot, buildconfig['board'], chrome_atom_to_build)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-a', '--acl', default='private', help='ACL to set on GSD archives') parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('--chrome_rev', default=None, type='string', dest='chrome_rev', help=('Chrome_rev of type [tot|latest_release|' 'sticky_release]')) parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') parser.add_option('-c', '--gsutil_archive', default='', help='Datastore archive location') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('--noprebuilts', action='store_false', dest='prebuilts', default=True, help="Don't upload prebuilts.") parser.add_option('--nosync', action='store_false', dest='sync', default=True, help="Don't sync before building.") parser.add_option('--notests', action='store_false', dest='tests', default=True, help='Override values from buildconfig and run no tests.') parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch chrome_atom_to_build = None if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: # Calculate list of overlay directories. rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(rev_overlays)) # Either has to be a master or not have any push overlays. assert buildconfig['master'] or not push_overlays board = buildconfig['board'] old_binhost = None _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, rev_overlays) chroot_path = os.path.join(buildroot, 'chroot') boardpath = os.path.join(chroot_path, 'build', board) if options.sync: if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) _IncrementalCheckout(buildroot) new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) emptytree = (old_binhost and old_binhost != new_binhost) # Check that all overlays can be found. for path in rev_overlays: if not os.path.isdir(path): Die('Missing overlay: %s' % path) if not os.path.isdir(chroot_path): _MakeChroot(buildroot) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) # Perform uprev. If chrome_uprev is set, rev Chrome ebuilds. if options.chrome_rev: chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, options.chrome_rev, board) # If we found nothing to rev, we're done here. if not chrome_atom_to_build: return elif buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], rev_overlays) _EnableLocalAccount(buildroot) # Doesn't rebuild without acquiring more source. if options.sync: _Build(buildroot, emptytree) if chrome_atom_to_build: _BuildChrome(buildroot, buildconfig['board'], chrome_atom_to_build) if buildconfig['unittests'] and options.tests: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt'] and options.tests: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: if not options.debug: archive_full_path = os.path.join(options.gsutil_archive, str(options.buildnumber)) _ArchiveTestResults(buildroot, buildconfig['board'], test_results_dir=test_results_dir, gsutil=options.gsutil, archive_dir=archive_full_path, acl=options.acl) if buildconfig['uprev']: # Don't push changes for developers. if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): if not options.debug and options.prebuilts: _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost]) _UprevPush(buildroot, tracking_branch, buildconfig['board'], push_overlays, options.debug) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important'] and not options.debug: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
def RevGitFile(filename, value):
def RevGitPushWithRetry(retries=5): """Repo sync and then push git changes in flight. Args: retries: The number of times to retry before giving up, default: 5 Raises: GitPushFailed if push was unsuccessful after retries """ for retry in range(1, retries+1): try: cros_build_lib.RunCommand('repo sync .', shell=True) cros_build_lib.RunCommand('git push', shell=True) break except cros_build_lib.RunCommandError: if retry < retries: print 'Error pushing changes trying again (%s/%s)' % (retry, retries) time.sleep(5*retry) else: raise GitPushFailed('Failed to push change after %s retries' % retries) def RevGitFile(filename, value, retries=5):
def RevGitFile(filename, value): """Update and push the git file. Args: filename: file to modify that is in a git repo already key: board or host package type e.g. x86-dogfood value: string representing the version of the prebuilt that has been uploaded. """ prebuilt_branch = 'prebuilt_branch' old_cwd = os.getcwd() os.chdir(os.path.dirname(filename)) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True) git_ssh_config_cmd = ( 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof ' 'http://git.chromium.org/git') cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True) description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename) print description try: UpdateLocalFile(filename, value) cros_build_lib.RunCommand('git config push.default tracking', shell=True) cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True) cros_build_lib.RunCommand('repo sync', shell=True) cros_build_lib.RunCommand('git push', shell=True) finally: cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True) os.chdir(old_cwd)
return max(url_parser.link_list)
return reduce(_GreaterVersion, url_parser.link_list)
def GetLatestLinkFromPage(url, regex): """Returns the latest link from the given url that matches regex. Args: url: Url to download and parse. regex: Regular expression to match links against. """ url_file = urllib.urlopen(url) url_html = url_file.read() url_file.close() # Parses links with versions embedded. url_parser = HTMLDirectoryParser(regex=regex) url_parser.feed(url_html) return max(url_parser.link_list)
RunCommand(['gzip', '-f', image_path])
RunCommand(['gzip', '-f', '--fast', image_path])
def _ArchiveTestResults(buildroot, board, archive_dir, test_results_dir): """Archives the test results into the www dir for later use. Takes the results from the test_results_dir and dumps them into the archive dir specified. This also archives the last qemu image. board: Board to find the qemu image. archive_dir: Path from ARCHIVE_BASE to store image. test_results_dir: Path from buildroot/chroot to find test results. This must a subdir of /tmp. """ test_results_dir = test_results_dir.lstrip('/') if not os.path.exists(ARCHIVE_BASE): os.makedirs(ARCHIVE_BASE) else: dir_entries = os.listdir(ARCHIVE_BASE) if len(dir_entries) >= ARCHIVE_COUNT: oldest_dirs = heapq.nsmallest((len(dir_entries) - ARCHIVE_COUNT) + 1, [filename for filename in dir_entries], key=lambda fn: os.stat(fn).st_mtime) Info('Removing archive dirs %s' % oldest_dirs) for oldest_dir in oldest_dirs: shutil.rmtree(os.path.join(ARCHIVE_BASE, oldest_dir)) archive_target = os.path.join(ARCHIVE_BASE, str(archive_dir)) if os.path.exists(archive_target): shutil.rmtree(archive_target) results_path = os.path.join(buildroot, 'chroot', test_results_dir) RunCommand(['sudo', 'chmod', '-R', '+r', results_path]) try: shutil.copytree(results_path, archive_target) except: Warning('Some files could not be copied') image_name = 'chromiumos_qemu_image.bin' image_path = os.path.join(buildroot, 'src', 'build', 'images', board, 'latest', image_name) RunCommand(['gzip', '-f', image_path]) shutil.copyfile(image_path + '.gz', os.path.join(archive_target, image_name + '.gz'))
cros_mark_as_stable._SimpleRunCommand( 'equery-x86-generic which %s 2> /dev/null' % self.package).AndReturn( self.ebuild_path)
cmd = ('ACCEPT_KEYWORDS="x86 arm amd64" ' 'equery-x86-generic which %s 2> /dev/null') cros_mark_as_stable._SimpleRunCommand(cmd % self.package).AndReturn( self.ebuild_path)
def testFindEBuildPath(self): self.mox.StubOutWithMock(cros_mark_as_stable, '_SimpleRunCommand') cros_mark_as_stable._SimpleRunCommand( 'equery-x86-generic which %s 2> /dev/null' % self.package).AndReturn( self.ebuild_path) self.mox.ReplayAll() path = cros_mark_as_stable._EBuild._FindEBuildPath(self.package) self.mox.VerifyAll() self.assertEquals(path, self.ebuild_path)
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path.
def testGetLatestZipUrl(self): """Test case that tests GetLatestZipUrl with test urls.""" self.mox.StubOutWithMock(urllib, 'urlopen') m_file = self.mox.CreateMock(file)
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder.
urllib.urlopen('%s/%s/LATEST-%s' % (self.latestbase, self.channel, self.board)).AndReturn(m_file) m_file.read().AndReturn('%s.bin.gz' % self.image_name) m_file.close()
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder)
self.mox.ReplayAll() self.assertEquals(ctest.GetLatestZipUrl(self.board, self.channel, self.latestbase, self.zipbase), self.image_url) self.mox.VerifyAll()
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
for line in fileinput.input(boot_desc_path, inplace=1): if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip()
def testGrabZipAndExtractImageUseCached(self): """Test case where cache holds our image.""" self.mox.StubOutWithMock(os.path, 'exists') self.mox.StubOutWithMock(__builtins__, 'open') m_file = self.mox.CreateMock(file)
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue
os.path.exists('%s/%s' % ( self.download_folder, 'download_url')).AndReturn(True)
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
redirect_file.write(line)
open('%s/%s' % (self.download_folder, 'download_url')).AndReturn(m_file) m_file.read().AndReturn(self.image_url) m_file.close()
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
fileinput.close()
os.path.exists('%s/%s' % ( self.download_folder, ctest._IMAGE_TO_EXTRACT)).AndReturn(True) self.mox.ReplayAll() ctest.GrabZipAndExtractImage(self.image_url, self.download_folder, ctest._IMAGE_TO_EXTRACT) self.mox.VerifyAll() def CommonDownloadAndExtractImage(self): """Common code to mock downloading image, unzipping it and setting url.""" zip_path = os.path.join(self.download_folder, 'image.zip') m_file = self.mox.CreateMock(file) ctest.RunCommand(['rm', '-rf', self.download_folder], print_cmd=False) os.mkdir(self.download_folder) urllib.urlretrieve(self.image_url, zip_path) ctest.RunCommand(['unzip', '-d', self.download_folder, zip_path], print_cmd=False, error_message=mox.IgnoreArg()) ctest.ModifyBootDesc(self.download_folder) open('%s/%s' % (self.download_folder, 'download_url'), 'w+').AndReturn(m_file) m_file.write(self.image_url) m_file.close() self.mox.ReplayAll() ctest.GrabZipAndExtractImage(self.image_url, self.download_folder, ctest._IMAGE_TO_EXTRACT) self.mox.VerifyAll() def testGrabZipAndExtractImageNoCache(self): """Test case where download_url doesn't exist.""" self.mox.StubOutWithMock(os.path, 'exists') self.mox.StubOutWithMock(os, 'mkdir') self.mox.StubOutWithMock(__builtins__, 'open') self.mox.StubOutWithMock(ctest, 'RunCommand') self.mox.StubOutWithMock(urllib, 'urlretrieve') self.mox.StubOutWithMock(ctest, 'ModifyBootDesc') m_file = self.mox.CreateMock(file) os.path.exists('%s/%s' % ( self.download_folder, 'download_url')).AndReturn(False) self.CommonDownloadAndExtractImage()
def ModifyBootDesc(download_folder, redirect_file=None): """Modifies the boot description of a downloaded image to work with path. The default boot.desc from another system is specific to the directory it was created in. This modifies the boot description to be compatiable with the download folder. Args: download_folder: Absoulte path to the download folder. redirect_file: For testing. Where to copy new boot desc. """ boot_desc_path = os.path.join(download_folder, 'boot.desc') in_chroot_folder = ReinterpretPathForChroot(download_folder) for line in fileinput.input(boot_desc_path, inplace=1): # Has to be done here to get changes to sys.stdout from fileinput.input. if not redirect_file: redirect_file = sys.stdout split_line = line.split('=') if len(split_line) > 1: var_part = split_line[0] potential_path = split_line[1].replace('"', '').strip() if potential_path.startswith('/home') and not 'output_dir' in var_part: new_path = os.path.join(in_chroot_folder, os.path.basename(potential_path)) new_line = '%s="%s"' % (var_part, new_path) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue elif 'output_dir' in var_part: # Special case for output_dir. new_line = '%s="%s"' % (var_part, in_chroot_folder) Info('Replacing line %s with %s' % (line, new_line)) redirect_file.write('%s\n' % new_line) continue # Line does not need to be modified. redirect_file.write(line) fileinput.close()
def GetLatestZipUrl(board, channel, latest_url_base, zip_server_base): """Returns the url of the latest image zip for the given arguments.
def testGrabZipAndExtractImageWrongCache(self): """Test case where download_url exists but doesn't match our url.""" self.mox.StubOutWithMock(os.path, 'exists') self.mox.StubOutWithMock(os, 'mkdir') self.mox.StubOutWithMock(__builtins__, 'open') self.mox.StubOutWithMock(ctest, 'RunCommand') self.mox.StubOutWithMock(urllib, 'urlretrieve') self.mox.StubOutWithMock(ctest, 'ModifyBootDesc')
def GetLatestZipUrl(board, channel, latest_url_base, zip_server_base): """Returns the url of the latest image zip for the given arguments. Args: board: board for the image zip. channel: channel for the image zip. latest_url_base: base url for latest links. zip_server_base: base url for zipped images. """ # Grab the latest image info. latest_file_url = os.path.join(latest_url_base, channel, 'LATEST-%s' % board) latest_image_file = urllib.urlopen(latest_file_url) latest_image = latest_image_file.read() latest_image_file.close() # Convert bin.gz into zip. latest_image = latest_image.replace('.bin.gz', '.zip') version = latest_image.split('-')[1] zip_base = os.path.join(zip_server_base, channel, board) return os.path.join(zip_base, version, latest_image)
Args: board: board for the image zip. channel: channel for the image zip. latest_url_base: base url for latest links. zip_server_base: base url for zipped images. """ latest_file_url = os.path.join(latest_url_base, channel, 'LATEST-%s' % board) latest_image_file = urllib.urlopen(latest_file_url) latest_image = latest_image_file.read() latest_image_file.close()
m_file = self.mox.CreateMock(file)
def GetLatestZipUrl(board, channel, latest_url_base, zip_server_base): """Returns the url of the latest image zip for the given arguments. Args: board: board for the image zip. channel: channel for the image zip. latest_url_base: base url for latest links. zip_server_base: base url for zipped images. """ # Grab the latest image info. latest_file_url = os.path.join(latest_url_base, channel, 'LATEST-%s' % board) latest_image_file = urllib.urlopen(latest_file_url) latest_image = latest_image_file.read() latest_image_file.close() # Convert bin.gz into zip. latest_image = latest_image.replace('.bin.gz', '.zip') version = latest_image.split('-')[1] zip_base = os.path.join(zip_server_base, channel, board) return os.path.join(zip_base, version, latest_image)
latest_image = latest_image.replace('.bin.gz', '.zip') version = latest_image.split('-')[1] zip_base = os.path.join(zip_server_base, channel, board) return os.path.join(zip_base, version, latest_image)
os.path.exists('%s/%s' % ( self.download_folder, 'download_url')).AndReturn(True)
def GetLatestZipUrl(board, channel, latest_url_base, zip_server_base): """Returns the url of the latest image zip for the given arguments. Args: board: board for the image zip. channel: channel for the image zip. latest_url_base: base url for latest links. zip_server_base: base url for zipped images. """ # Grab the latest image info. latest_file_url = os.path.join(latest_url_base, channel, 'LATEST-%s' % board) latest_image_file = urllib.urlopen(latest_file_url) latest_image = latest_image_file.read() latest_image_file.close() # Convert bin.gz into zip. latest_image = latest_image.replace('.bin.gz', '.zip') version = latest_image.split('-')[1] zip_base = os.path.join(zip_server_base, channel, board) return os.path.join(zip_base, version, latest_image)
def GrabZipAndExtractImage(zip_url, download_folder, image_name) : """Downloads the zip and extracts the given image.
os.path.exists('%s/%s' % ( self.download_folder, ctest._IMAGE_TO_EXTRACT)).AndReturn(False)
def GrabZipAndExtractImage(zip_url, download_folder, image_name) : """Downloads the zip and extracts the given image. Doesn't re-download if matching version found already in download folder. Args: zip_url - url for the image. download_folder - download folder to store zip file and extracted images. image_name - name of the image to extract from the zip file. """ zip_path = os.path.join(download_folder, 'image.zip') versioned_url_path = os.path.join(download_folder, 'download_url') found_cached = False if os.path.exists(versioned_url_path): fh = open(versioned_url_path) version_url = fh.read() fh.close() if version_url == zip_url and os.path.exists(os.path.join(download_folder, image_name)): Info('Using cached %s' % image_name) found_cached = True if not found_cached: Info('Downloading %s' % zip_url) RunCommand(['rm', '-rf', download_folder], print_cmd=False) os.mkdir(download_folder) urllib.urlretrieve(zip_url, zip_path) # Using unzip because python implemented unzip in native python so # extraction is really slow. Info('Unzipping image %s' % image_name) RunCommand(['unzip', '-d', download_folder, zip_path], print_cmd=False, error_message='Failed to download %s' % zip_url) ModifyBootDesc(download_folder) # Put url in version file so we don't have to do this every time. fh = open(versioned_url_path, 'w+') fh.write(zip_url) fh.close()
Doesn't re-download if matching version found already in download folder. Args: zip_url - url for the image. download_folder - download folder to store zip file and extracted images. image_name - name of the image to extract from the zip file. """ zip_path = os.path.join(download_folder, 'image.zip') versioned_url_path = os.path.join(download_folder, 'download_url') found_cached = False if os.path.exists(versioned_url_path): fh = open(versioned_url_path) version_url = fh.read() fh.close() if version_url == zip_url and os.path.exists(os.path.join(download_folder, image_name)): Info('Using cached %s' % image_name) found_cached = True if not found_cached: Info('Downloading %s' % zip_url) RunCommand(['rm', '-rf', download_folder], print_cmd=False) os.mkdir(download_folder) urllib.urlretrieve(zip_url, zip_path) Info('Unzipping image %s' % image_name) RunCommand(['unzip', '-d', download_folder, zip_path], print_cmd=False, error_message='Failed to download %s' % zip_url) ModifyBootDesc(download_folder) fh = open(versioned_url_path, 'w+') fh.write(zip_url) fh.close() def RunAUTestHarness(board, channel, latest_url_base, zip_server_base): """Runs the auto update test harness. The auto update test harness encapsulates testing the auto-update mechanism for the latest image against the latest official image from the channel. This also tests images with suite_Smoke (built-in as part of its verification process). Args: board: the board for the latest image. channel: the channel to run the au test harness against. latest_url_base: base url for getting latest links. zip_server_base: base url for zipped images. """ crosutils_root = os.path.join(os.path.dirname(__file__), '..') download_folder = os.path.abspath('latest_download') zip_url = GetLatestZipUrl(board, channel, latest_url_base, zip_server_base) GrabZipAndExtractImage(zip_url, download_folder, _IMAGE_TO_EXTRACT) latest_image = RunCommand(['./get_latest_image.sh', '--board=%s' % board], cwd=crosutils_root, redirect_stdout=True, print_cmd=True) RunCommand(['bin/cros_au_test_harness', '--base_image=%s' % os.path.join(download_folder, _IMAGE_TO_EXTRACT), '--target_image=%s' % latest_image, '--board=%s' % board], cwd=crosutils_root) def main(): parser = optparse.OptionParser() parser.add_option('-b', '--board', help='board for the image to compare against.') parser.add_option('-c', '--channel', help='channel for the image to compare against.') parser.add_option('-l', '--latestbase', help='Base url for latest links.') parser.add_option('-z', '--zipbase', help='Base url for hosted images.') parser.set_usage(parser.format_help()) (options, args) = parser.parse_args() if args: parser.error('Extra args found %s.' % args) if not options.board: parser.error('Need board for image to compare against.') if not options.channel: parser.error('Need channel for image to compare against.') if not options.latestbase: parser.error('Need latest url base to get images.') if not options.zipbase: parser.error('Need zip url base to get images.') RunAUTestHarness(options.board, options.channel, options.latestbase, options.zipbase)
self.CommonDownloadAndExtractImage()
def GrabZipAndExtractImage(zip_url, download_folder, image_name) : """Downloads the zip and extracts the given image. Doesn't re-download if matching version found already in download folder. Args: zip_url - url for the image. download_folder - download folder to store zip file and extracted images. image_name - name of the image to extract from the zip file. """ zip_path = os.path.join(download_folder, 'image.zip') versioned_url_path = os.path.join(download_folder, 'download_url') found_cached = False if os.path.exists(versioned_url_path): fh = open(versioned_url_path) version_url = fh.read() fh.close() if version_url == zip_url and os.path.exists(os.path.join(download_folder, image_name)): Info('Using cached %s' % image_name) found_cached = True if not found_cached: Info('Downloading %s' % zip_url) RunCommand(['rm', '-rf', download_folder], print_cmd=False) os.mkdir(download_folder) urllib.urlretrieve(zip_url, zip_path) # Using unzip because python implemented unzip in native python so # extraction is really slow. Info('Unzipping image %s' % image_name) RunCommand(['unzip', '-d', download_folder, zip_path], print_cmd=False, error_message='Failed to download %s' % zip_url) ModifyBootDesc(download_folder) # Put url in version file so we don't have to do this every time. fh = open(versioned_url_path, 'w+') fh.write(zip_url) fh.close()
main()
unittest.main()
def main(): parser = optparse.OptionParser() parser.add_option('-b', '--board', help='board for the image to compare against.') parser.add_option('-c', '--channel', help='channel for the image to compare against.') parser.add_option('-l', '--latestbase', help='Base url for latest links.') parser.add_option('-z', '--zipbase', help='Base url for hosted images.') # Set the usage to include flags. parser.set_usage(parser.format_help()) (options, args) = parser.parse_args() if args: parser.error('Extra args found %s.' % args) if not options.board: parser.error('Need board for image to compare against.') if not options.channel: parser.error('Need channel for image to compare against.') if not options.latestbase: parser.error('Need latest url base to get images.') if not options.zipbase: parser.error('Need zip url base to get images.') RunAUTestHarness(options.board, options.channel, options.latestbase, options.zipbase)
if self.use_delta_updates: self.source_image = base_image_path
def testFullUpdateKeepStateful(self): """Tests if we can update normally.
try: self.UpdateImage(target_image_path) except: if self.use_delta_updates: Warning('Delta update failed, disabling delta updates and retrying.') self.use_delta_updates = False self.source_image = '' self.UpdateImage(target_image_path) else: raise
self.TryDeltaAndFallbackToFull(base_image_path, target_image_path)
def testFullUpdateKeepStateful(self): """Tests if we can update normally.
if self.use_delta_updates: self.source_image = target_image_path
def testFullUpdateKeepStateful(self): """Tests if we can update normally.
self.UpdateImage(base_image_path)
self.TryDeltaAndFallbackToFull(target_image_path, base_image_path)
def testFullUpdateKeepStateful(self): """Tests if we can update normally.
if self.use_delta_updates: self.source_image = base_image_path
def testFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
try: self.UpdateImage(target_image_path, 'clean') except: if self.use_delta_updates: Warning('Delta update failed, disabling delta updates and retrying.') self.use_delta_updates = False self.source_image = '' self.UpdateImage(target_image_path) else: raise
self.TryDeltaAndFallbackToFull(base_image_path, target_image_path, 'clean')
def testFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
if self.use_delta_updates: self.source_image = target_image_path
def testFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
self.UpdateImage(base_image_path, 'clean')
self.TryDeltaAndFallbackToFull(target_image_path, base_image_path, 'clean')
def testFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
cwd=cwd)
cwd=cwd, enter_chroot=True)
def _SetupBoard(buildroot, board='x86-generic'): """Wrapper around setup_board.""" cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./setup_board', '--fast', '--default', '--board=%s' % board], cwd=cwd)
RunCommand(['./build_image'], cwd=cwd)
RunCommand(['./build_image'], cwd=cwd, enter_chroot=True)
def _BuildImage(buildroot): cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./build_image'], cwd=cwd)
RunCommand(['./cros_run_unit_tests'], cwd=cwd)
RunCommand(['./cros_run_unit_tests'], cwd=cwd, enter_chroot=True)
def _RunUnitTests(buildroot): cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./cros_run_unit_tests'], cwd=cwd)
rev_file = revisionfile.open(revisionfile) revisions = rev_file.read() rev_file.close()
try: rev_file = open(revisionfile) revisions = rev_file.read() rev_file.close() except: print >> sys.stderr, 'Error reading %s' % revisionfile revisions = None
def _UprevPackages(buildroot, revisionfile): revisions = None if (revisionfile): rev_file = revisionfile.open(revisionfile) revisions = rev_file.read() rev_file.close() # Note: Revisions == "None" indicates a Force Build. if revisions and revisions != 'None': print 'CBUILDBOT - Revision list found %s' % revisions print 'Revision list not yet propagating to build, marking all instead' _UprevAllPackages(buildroot)
filter_file: file to load into FILTER_PACKAGES
filter_file: file to load into _FILTER_PACKAGES
def LoadFilterFile(filter_file): """Load a file with keywords on a per line basis. Args: filter_file: file to load into FILTER_PACKAGES """ filter_fh = open(filter_file) try: FILTER_PACKAGES.update([filter.strip() for filter in filter_fh]) finally: filter_fh.close() return FILTER_PACKAGES
FILTER_PACKAGES.update([filter.strip() for filter in filter_fh])
_FILTER_PACKAGES.update([filter.strip() for filter in filter_fh])
def LoadFilterFile(filter_file): """Load a file with keywords on a per line basis. Args: filter_file: file to load into FILTER_PACKAGES """ filter_fh = open(filter_file) try: FILTER_PACKAGES.update([filter.strip() for filter in filter_fh]) finally: filter_fh.close() return FILTER_PACKAGES
return FILTER_PACKAGES
return _FILTER_PACKAGES
def LoadFilterFile(filter_file): """Load a file with keywords on a per line basis. Args: filter_file: file to load into FILTER_PACKAGES """ filter_fh = open(filter_file) try: FILTER_PACKAGES.update([filter.strip() for filter in filter_fh]) finally: filter_fh.close() return FILTER_PACKAGES
Skip any files that machine the list of packages to filter in FILTER_PACKAGES. Args: file_path: string of a file path to inspect against FILTER_PACKAGES
Skip any files that machine the list of packages to filter in _FILTER_PACKAGES. Args: file_path: string of a file path to inspect against _FILTER_PACKAGES
def ShouldFilterPackage(file_path): """Skip a particular file if it matches a pattern. Skip any files that machine the list of packages to filter in FILTER_PACKAGES. Args: file_path: string of a file path to inspect against FILTER_PACKAGES Returns: True if we should filter the package, False otherwise. """ for name in FILTER_PACKAGES: if name in file_path: print 'FILTERING %s' % file_path return True return False
for name in FILTER_PACKAGES:
for name in _FILTER_PACKAGES:
def ShouldFilterPackage(file_path): """Skip a particular file if it matches a pattern. Skip any files that machine the list of packages to filter in FILTER_PACKAGES. Args: file_path: string of a file path to inspect against FILTER_PACKAGES Returns: True if we should filter the package, False otherwise. """ for name in FILTER_PACKAGES: if name in file_path: print 'FILTERING %s' % file_path return True return False
pool = Pool(processes=pool)
pool = multiprocessing.Pool(processes=pool)
def RemoteUpload(files, pool=10): """Upload to google storage. Create a pool of process and call _GsUpload with the proper arguments. Args: files: dictionary with keys to local files and values to remote path. pool: integer of maximum proesses to have at the same time. """ # TODO(scottz) port this to use _RunManyParallel when it is available in # cros_build_lib pool = Pool(processes=pool) workers = [] for local_file, remote_path in files.iteritems(): workers.append((local_file, remote_path)) result = pool.map_async(_GsUpload, workers, chunksize=1) while True: try: result.get(60*60) break except multiprocessing.TimeoutError: pass
print msg
print >> sys.stderr, msg
def usage(parser, msg): """Display usage message and parser help then exit with 1.""" print msg parser.print_help() sys.exit(1)
if rw_checkout: RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof', 'http://git.chromium.org/git'], cwd=buildroot)
def RepoSync(buildroot, rw_checkout=False, retries=_DEFAULT_RETRIES): """Uses repo to checkout the source code. Keyword arguments: rw_checkout -- Reconfigure repo after sync'ing to read-write. retries -- Number of retries to try before failing on the sync. """ while retries > 0: try: # The --trace option ensures that repo shows the output from git. This # is needed so that the buildbot can kill us if git is not making # progress. RunCommand(['repo', '--trace', 'sync'], cwd=buildroot) if rw_checkout: # Always re-run in case of new git repos or repo sync # failed in a previous run because of a forced Stop Build. RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof', 'http://git.chromium.org/git'], cwd=buildroot) retries = 0 except: retries -= 1 if retries > 0: Warning('CBUILDBOT -- Repo Sync Failed, retrying') else: Warning('CBUILDBOT -- Retries exhausted') raise # Output manifest RunCommand(['repo', 'manifest', '-r', '-o', '-'], cwd=buildroot)
default='http://git.chromium.org/git/manifest',
default='ssh://git@gitrw.chromium.org:9222/manifest',
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) # Calculate list of overlay directories. overlays = _ResolveOverlays(buildroot, buildconfig['overlays']) try: _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, overlays) if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: _IncrementalCheckout(buildroot) # Check that all overlays can be found. for path in overlays: assert ':' not in path, 'Overlay must not contain colons: %s' % path if not os.path.isdir(path): Die('Missing overlay: %s' % path) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], overlays) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: _ArchiveTestResults(buildroot, buildconfig['board'], archive_dir=options.buildnumber, test_results_dir=test_results_dir) if buildconfig['uprev']: # Don't push changes for developers. if not options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot, tracking_branch, buildconfig['board'], overlays) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
def ResolveOverlays(overlays):
def ResolveOverlays(buildroot, overlays):
def ResolveOverlays(overlays): """Return the list of overlays to use for a given buildbot. Args: overlays: A string describing which overlays you want. 'private': Just the private overlay. 'public': Just the public overlay. 'both': Both the public and private overlays. """ public_overlay = '%s/src/third_party/chromiumos-overlay' % buildroot private_overlay = '%s/src/private-overlays/chromeos-overlay' % buildroot if overlays == 'private': dirs = [private_overlay] elif overlays == 'public': dirs = [public_overlay] elif overlays == 'both': dirs = [public_overlay, private_overlay] else: Die('Incorrect overlay configuration: %s' % overlays) for dir in dirs: assert ':' not in dir, 'Overlay must not contain colons: %s' % dir if not os.path.exists(dir): Die('Missing overlay: %s' % dir) return dirs
overlays = ResolveOverlays(buildconfig['overlays'])
overlays = ResolveOverlays(buildroot, buildconfig['overlays'])
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') (options, args) = parser.parse_args() buildroot = options.buildroot revisionfile = options.revisionfile tracking_branch = options.tracking_branch if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) # Calculate list of overlay directories. overlays = ResolveOverlays(buildconfig['overlays']) try: _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, overlays) if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: _IncrementalCheckout(buildroot) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], overlays) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: _ArchiveTestResults(buildroot, buildconfig['board'], archive_dir=options.buildnumber, test_results_dir=test_results_dir) if buildconfig['uprev']: # Don't push changes for developers. if not options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot, tracking_branch, buildconfig['board'], overlays) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
self.UpdateImage(image)
self._UpdateImageReportError(image)
def TryDeltaAndFallbackToFull(self, src_image, image, stateful_change='old'): """Tries the delta update first if set and falls back to full update.""" if self.use_delta_updates: try: self.source_image = src_image self.UpdateImage(image) except: Warning('Delta update failed, disabling delta updates and retrying.') self.use_delta_updates = False self.source_image = '' self.UpdateImage(image) else: self.UpdateImage(image)
self.UpdateImage(image) def PrepareBase(self):
self._UpdateImageReportError(image) def _UpdateImageReportError(self, image_path, stateful_change='old'): """Calls UpdateImage and reports any error to the console. Still throws the exception. """ try: self.UpdateImage(image_path, stateful_change) except UpdateException as err: Warning(err.stdout) raise def _AttemptUpdateWithPayloadExpectedFailure(self, payload, expected_msg): try: self.UpdateUsingPayload(payload) except UpdateException as err: if re.search(re.escape(expected_msg), err.stdout, re.MULTILINE): return Warning("Didn't find '%s' in:" % expected_msg) Warning(err.stdout) self.fail('We managed to update when failure was expected') def PrepareBase(self, image_path):
def TryDeltaAndFallbackToFull(self, src_image, image, stateful_change='old'): """Tries the delta update first if set and falls back to full update.""" if self.use_delta_updates: try: self.source_image = src_image self.UpdateImage(image) except: Warning('Delta update failed, disabling delta updates and retrying.') self.use_delta_updates = False self.source_image = '' self.UpdateImage(image) else: self.UpdateImage(image)
self.PrepareBase()
self.PrepareBase(image_path=base_image_path)
def testFullUpdateKeepStateful(self): """Tests if we can update normally.
self.PrepareBase()
self.PrepareBase(image_path=base_image_path)
def testFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
def PrepareBase(self):
def PrepareBase(self, image_path):
def PrepareBase(self): """Auto-update to base image to prepare for test.""" self.UpdateImage(base_image_path)
self.UpdateImage(base_image_path)
self._UpdateImageReportError(image_path)
def PrepareBase(self): """Auto-update to base image to prepare for test.""" self.UpdateImage(base_image_path)
RunCommand([
(code, stdout, stderr) = RunCommandCaptureOutput([
def UpdateImage(self, image_path, stateful_change='old'): """Updates a remote image using image_to_live.sh.""" stateful_change_flag = self.GetStatefulChangeFlag(stateful_change)
'--src_image=%s' % self.source_image, ], enter_chroot=False)
'--src_image=%s' % self.source_image ]) if code != 0: raise UpdateException(code, stdout) def UpdateUsingPayload(self, update_path, stateful_change='old'): """Updates a remote image using image_to_live.sh.""" stateful_change_flag = self.GetStatefulChangeFlag(stateful_change) (code, stdout, stderr) = RunCommandCaptureOutput([ '%s/image_to_live.sh' % self.crosutils, '--payload=%s' % update_path, '--remote=%s' % remote, stateful_change_flag, '--verify', ]) if code != 0: raise UpdateException(code, stdout)
def UpdateImage(self, image_path, stateful_change='old'): """Updates a remote image using image_to_live.sh.""" stateful_change_flag = self.GetStatefulChangeFlag(stateful_change)
def PrepareBase(self):
def PrepareBase(self, image_path):
def PrepareBase(self): """Creates an update-able VM based on base image.""" self.vm_image_path = '%s/chromiumos_qemu_image.bin' % os.path.dirname( base_image_path)
base_image_path)
image_path) Info('Creating: %s' % self.vm_image_path)
def PrepareBase(self): """Creates an update-able VM based on base image.""" self.vm_image_path = '%s/chromiumos_qemu_image.bin' % os.path.dirname( base_image_path)
os.path.dirname(base_image_path)),
os.path.dirname(image_path)),
def PrepareBase(self): """Creates an update-able VM based on base image.""" self.vm_image_path = '%s/chromiumos_qemu_image.bin' % os.path.dirname( base_image_path)