rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
def GetVersion(): """Get the version to put in LATEST and update the git version with.""" return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S')
def _generate_dict_results(self, gs_bucket_path): """ Generate a dictionary result similar to GenerateUploadDict """ results = {} for entry in self.files_to_sync: results[entry] = os.path.join(gs_bucket_path, entry.replace(self.strip_path, '').lstrip('/')) return results
def GetVersion(): """Get the version to put in LATEST and update the git version with.""" return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S')
def LoadFilterFile(filter_file): """Load a file with keywords on a perline basis.
def testGenerateUploadDict(self): gs_bucket_path = 'gs://chromeos-prebuilt/host/version' self.mox.StubOutWithMock(cros_build_lib, 'ListFiles') cros_build_lib.ListFiles(' ').AndReturn(self.files_to_sync) self.mox.ReplayAll() result = prebuilt.GenerateUploadDict(' ', gs_bucket_path, self.strip_path) print result self.assertEqual(result, self._generate_dict_results(gs_bucket_path))
def LoadFilterFile(filter_file): """Load a file with keywords on a perline basis. Args: filter_file: file to load into FILTER_PACKAGES """ filter_fh = open(filter_file) global FILTER_PACKAGES FILTER_PACKAGES = [filter.strip() for filter in filter_fh.readlines()] return FILTER_PACKAGES
Args: filter_file: file to load into FILTER_PACKAGES """ filter_fh = open(filter_file) global FILTER_PACKAGES FILTER_PACKAGES = [filter.strip() for filter in filter_fh.readlines()] return FILTER_PACKAGES def FilterPackage(file_path): """Skip a particular file if it matches a pattern. Skip any files that machine the list of packages to filter in FILTER_PACKAGES. Args: file_path: string of a file path to inspect against FILTER_PACKAGES Returns: True if we should filter the package. False otherwise """ for name in FILTER_PACKAGES: if name in file_path: print 'FILTERING %s' % file_path return True return False def _GsUpload(args): """Upload to GS bucket. Args: args: a set of arguments that contains local_file and remote_file. """ (local_file, remote_file) = args if FilterPackage(local_file): return cmd = 'gsutil cp -a public-read %s %s' % (local_file, remote_file) for attempt in range(_RETRIES): try: output = cros_build_lib.RunCommand(cmd, print_cmd=False, shell=True) break except cros_build_lib.RunCommandError: print 'Failed to sync %s -> %s, retryings' % (local_file, remote_file) else: print 'Retry failed we should probably return the file that faild?' def RemoteUpload(files, pool=10): """Upload to google storage. Create a pool of process and call _GsUpload with the proper arguments. Args: files: dictionary with keys to local files and values to remote path. pool: integer of maximum proesses to have at the same time """ pool = Pool(processes=pool) workers = [] for local_file, remote_path in files.iteritems(): workers.append((local_file, remote_path)) pool.map(_GsUpload, workers) def GenerateUploadDict(local_path, gs_path, strip_str): """Build a dictionary of local remote file key pairs for gsutil to upload. Args: local_path: A path to the file on the local hard drive. gs_path: Path to upload in Google Storage. strip_str: String to remove from the local_path so that the relative file path can be tacked on to the gs_path. Returns: Returns a dictionary of file path/gs_dest_path pairs """ files_to_sync = cros_build_lib.ListFiles(local_path) upload_files = {} for file_path in files_to_sync: filename = file_path.replace(strip_str, '').lstrip('/') gs_file_path = os.path.join(gs_path, filename) upload_files[file_path] = gs_file_path return upload_files def HostPrebuilt(build_path, bucket, git_file=None): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. bucket: The Google Storage bucket to upload to. git_file: If set, update this file with a host/version combo, commit and push it. """ host_package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) version = GetVersion() gs_path = os.path.join(bucket, _GS_HOST_PATH, version) upload_files = GenerateUploadDict(host_package_path, gs_path, host_package_path) print 'Uploading host to %s' % bucket RemoteUpload(upload_files) if git_file: RevGitFile(git_file, 'amd64', version) def BoardPackages(board, build_path, bucket, git_file=None): """Upload board packages to Google Storage. Args: board: The board type. build_path: Path to the Chrome build directory. bucket: The Google Storage bucket to upload to. git_file: If set, update this file with a host/version combo, commit and push it. """ board_path = os.path.join(build_path, _BOARD_PATH % board) board_packages_path = os.path.join(board_path, 'packages') version = GetVersion() gs_path = os.path.join(bucket, _GS_BOARD_PATH % (board, version)) upload_files = GenerateUploadDict(board_packages_path, gs_path, board_path) print 'Uploading board %s to %s' % (board, bucket) RemoteUpload(upload_files) if git_file: RevGitFile(git_file, board, version) def usage(parser, msg): """Display usage message and parser help then exit with 1.""" print msg parser.print_help() sys.exit(1) def main(): parser = optparse.OptionParser() parser.add_option('-b', '--board', dest='board', default=None, help='Board type that was built on this machine') parser.add_option('-p', '--build-path', dest='build_path', help='Path to the chroot') parser.add_option('-s', '--sync-host', dest='sync_host', default=False, action='store_true', help='Sync host prebuilts') parser.add_option('-g', '--git-sync', dest='git_sync', default=False, action='store_true', help='Enable git version sync (This commits to a repo)') parser.add_option('-u', '--upload', dest='upload', default=None, help='Upload to GS bucket') parser.add_option('-f', '--filter', dest='filter_file', default=None, help='File to use for filtering GS bucket uploads') options, args = parser.parse_args() if not options.build_path: usage(parser, 'Error: you need provide a chroot path') if not options.upload: usage(parser, 'Error: you need to provide a gsutil upload bucket -u') git_file = None if options.git_sync: git_file = os.path.join(options.build_path, VER_FILE) if options.sync_host: HostPrebuilt(options.build_path, options.upload, git_file=git_file) if options.board: BoardPackages(options.board, options.build_path, options.upload, git_file=git_file)
def LoadFilterFile(filter_file): """Load a file with keywords on a perline basis. Args: filter_file: file to load into FILTER_PACKAGES """ filter_fh = open(filter_file) global FILTER_PACKAGES FILTER_PACKAGES = [filter.strip() for filter in filter_fh.readlines()] return FILTER_PACKAGES
main()
unittest.main()
def main(): parser = optparse.OptionParser() parser.add_option('-b', '--board', dest='board', default=None, help='Board type that was built on this machine') parser.add_option('-p', '--build-path', dest='build_path', help='Path to the chroot') parser.add_option('-s', '--sync-host', dest='sync_host', default=False, action='store_true', help='Sync host prebuilts') parser.add_option('-g', '--git-sync', dest='git_sync', default=False, action='store_true', help='Enable git version sync (This commits to a repo)') parser.add_option('-u', '--upload', dest='upload', default=None, help='Upload to GS bucket') parser.add_option('-f', '--filter', dest='filter_file', default=None, help='File to use for filtering GS bucket uploads') options, args = parser.parse_args() if not options.build_path: usage(parser, 'Error: you need provide a chroot path') if not options.upload: usage(parser, 'Error: you need to provide a gsutil upload bucket -u') git_file = None if options.git_sync: git_file = os.path.join(options.build_path, VER_FILE) if options.sync_host: HostPrebuilt(options.build_path, options.upload, git_file=git_file) if options.board: BoardPackages(options.board, options.build_path, options.upload, git_file=git_file)
percent_passed = self.VerifyImage(42)
percent_passed = self.VerifyImage(10)
def testFullUpdateKeepStateful(self): """Tests if we can update normally.
def NotestFullUpdateWipeStateful(self):
def testFullUpdateWipeStateful(self):
def NotestFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
percent_passed = self.VerifyImage(42)
percent_passed = self.VerifyImage(10)
def NotestFullUpdateWipeStateful(self): """Tests if we can update after cleaning the stateful partition.
print ('\n========== DEBUG FILE %s FOR TEST %s ==============\n' % ( path, test)) print fh.read() print('\n=========== END DEBUG %s FOR TEST %s ===============\n' % ( path, test))
print >> sys.stderr, ( '\n========== DEBUG FILE %s FOR TEST %s ==============\n' % ( path, test)) out = fh.read() while out: print >> sys.stderr, out out = fh.read() print >> sys.stderr, ( '\n=========== END DEBUG %s FOR TEST %s ===============\n' % ( path, test))
def _GenerateReportText(self): """Prints a result report to stdout.
output = RunCommand(['%s/cros_run_vm_test' % self.crosutilsbin, '--image_path=%s' % self.vm_image_path, '--snapshot', '--persist', vm_graphics_flag, '--kvm_pid=%s' % _KVM_PID_FILE, '--test_case=%s' % _VERIFY_SUITE, ], error_ok=True, enter_chroot=False, redirect_stdout=True)
commandWithArgs = ['%s/cros_run_vm_test' % self.crosutilsbin, '--image_path=%s' % self.vm_image_path, '--snapshot', '--persist', '--kvm_pid=%s' % _KVM_PID_FILE, _VERIFY_SUITE, ] if vm_graphics_flag: commandWithArgs.append(vm_graphics_flag) output = RunCommand(commandWithArgs, error_ok=True, enter_chroot=False, redirect_stdout=True)
def VerifyImage(self, percent_required_to_pass): """Runs vm smoke suite to verify image.""" # image_to_live already verifies lsb-release matching. This is just # for additional steps. output = RunCommand(['%s/cros_run_vm_test' % self.crosutilsbin, '--image_path=%s' % self.vm_image_path, '--snapshot', '--persist', vm_graphics_flag, '--kvm_pid=%s' % _KVM_PID_FILE, '--test_case=%s' % _VERIFY_SUITE, ], error_ok=True, enter_chroot=False, redirect_stdout=True) return self.CommonVerifyImage(self, output, percent_required_to_pass)
def NotVerifyImage(self):
def VerifyImage(self):
def NotVerifyImage(self): """Verifies an image using run_remote_tests.sh with verification suite.""" RunCommand([ '%s/run_remote_tests.sh' % self.crosutils, '--remote=%s' % remote, _VERIFY_SUITE, ], error_ok=False, enter_chroot=False)
unittest.TextTestRunner(verbosity=2).run(suite)
return_code = unittest.TextTestRunner(verbosity=2).run(suite)
def VerifyImage(self): """Runs vm smoke suite to verify image.""" # image_to_live already verifies lsb-release matching. This is just # for additional steps. RunCommand(['%s/cros_run_vm_test' % self.crosutilsbin, '--image_path=%s' % self.vm_image_path, '--snapshot', '--persist', '--kvm_pid=%s' % _KVM_PID_FILE, '--test_case=%s' % _VERIFY_SUITE, ], error_ok=False, enter_chroot=False)
Die('Found multiple unstable ebuilds in %s' % root)
Die('Found multiple unstable ebuilds in %s' % os.path.dirname(path))
def _FindStableEBuilds(files): """Return a list of stable ebuilds from specified list of files. Args: files: List of files. """ workon_dir = False stable_ebuilds = [] unstable_ebuilds = [] for path in files: if path.endswith('.ebuild') and not os.path.islink(path): ebuild = _EBuild(path) if ebuild.is_workon: workon_dir = True if ebuild.is_stable: stable_ebuilds.append(ebuild) else: unstable_ebuilds.append(ebuild) # If we found a workon ebuild in this directory, apply some sanity checks. if workon_dir: if len(unstable_ebuilds) > 1: Die('Found multiple unstable ebuilds in %s' % root) if len(stable_ebuilds) > 1: stable_ebuilds = [_BestEBuild(stable_ebuilds)] # Print a warning if multiple stable ebuilds are found in the same # directory. Storing multiple stable ebuilds is error-prone because # the older ebuilds will not get rev'd. # # We make a special exception for x11-drivers/xf86-video-msm for legacy # reasons. if stable_ebuilds[0].package != 'x11-drivers/xf86-video-msm': Warning('Found multiple stable ebuilds in %s' % root) if not unstable_ebuilds: Die('Missing 9999 ebuild in %s' % root) if not stable_ebuilds: Die('Missing stable ebuild in %s' % root) if stable_ebuilds: return stable_ebuilds[0] else: return None
Warning('Found multiple stable ebuilds in %s' % root)
Warning('Found multiple stable ebuilds in %s' % os.path.dirname(path))
def _FindStableEBuilds(files): """Return a list of stable ebuilds from specified list of files. Args: files: List of files. """ workon_dir = False stable_ebuilds = [] unstable_ebuilds = [] for path in files: if path.endswith('.ebuild') and not os.path.islink(path): ebuild = _EBuild(path) if ebuild.is_workon: workon_dir = True if ebuild.is_stable: stable_ebuilds.append(ebuild) else: unstable_ebuilds.append(ebuild) # If we found a workon ebuild in this directory, apply some sanity checks. if workon_dir: if len(unstable_ebuilds) > 1: Die('Found multiple unstable ebuilds in %s' % root) if len(stable_ebuilds) > 1: stable_ebuilds = [_BestEBuild(stable_ebuilds)] # Print a warning if multiple stable ebuilds are found in the same # directory. Storing multiple stable ebuilds is error-prone because # the older ebuilds will not get rev'd. # # We make a special exception for x11-drivers/xf86-video-msm for legacy # reasons. if stable_ebuilds[0].package != 'x11-drivers/xf86-video-msm': Warning('Found multiple stable ebuilds in %s' % root) if not unstable_ebuilds: Die('Missing 9999 ebuild in %s' % root) if not stable_ebuilds: Die('Missing stable ebuild in %s' % root) if stable_ebuilds: return stable_ebuilds[0] else: return None
Die('Missing 9999 ebuild in %s' % root)
Die('Missing 9999 ebuild in %s' % os.path.dirname(path))
def _FindStableEBuilds(files): """Return a list of stable ebuilds from specified list of files. Args: files: List of files. """ workon_dir = False stable_ebuilds = [] unstable_ebuilds = [] for path in files: if path.endswith('.ebuild') and not os.path.islink(path): ebuild = _EBuild(path) if ebuild.is_workon: workon_dir = True if ebuild.is_stable: stable_ebuilds.append(ebuild) else: unstable_ebuilds.append(ebuild) # If we found a workon ebuild in this directory, apply some sanity checks. if workon_dir: if len(unstable_ebuilds) > 1: Die('Found multiple unstable ebuilds in %s' % root) if len(stable_ebuilds) > 1: stable_ebuilds = [_BestEBuild(stable_ebuilds)] # Print a warning if multiple stable ebuilds are found in the same # directory. Storing multiple stable ebuilds is error-prone because # the older ebuilds will not get rev'd. # # We make a special exception for x11-drivers/xf86-video-msm for legacy # reasons. if stable_ebuilds[0].package != 'x11-drivers/xf86-video-msm': Warning('Found multiple stable ebuilds in %s' % root) if not unstable_ebuilds: Die('Missing 9999 ebuild in %s' % root) if not stable_ebuilds: Die('Missing stable ebuild in %s' % root) if stable_ebuilds: return stable_ebuilds[0] else: return None
Die('Missing stable ebuild in %s' % root)
Die('Missing stable ebuild in %s' % os.path.dirname(path))
def _FindStableEBuilds(files): """Return a list of stable ebuilds from specified list of files. Args: files: List of files. """ workon_dir = False stable_ebuilds = [] unstable_ebuilds = [] for path in files: if path.endswith('.ebuild') and not os.path.islink(path): ebuild = _EBuild(path) if ebuild.is_workon: workon_dir = True if ebuild.is_stable: stable_ebuilds.append(ebuild) else: unstable_ebuilds.append(ebuild) # If we found a workon ebuild in this directory, apply some sanity checks. if workon_dir: if len(unstable_ebuilds) > 1: Die('Found multiple unstable ebuilds in %s' % root) if len(stable_ebuilds) > 1: stable_ebuilds = [_BestEBuild(stable_ebuilds)] # Print a warning if multiple stable ebuilds are found in the same # directory. Storing multiple stable ebuilds is error-prone because # the older ebuilds will not get rev'd. # # We make a special exception for x11-drivers/xf86-video-msm for legacy # reasons. if stable_ebuilds[0].package != 'x11-drivers/xf86-video-msm': Warning('Found multiple stable ebuilds in %s' % root) if not unstable_ebuilds: Die('Missing 9999 ebuild in %s' % root) if not stable_ebuilds: Die('Missing stable ebuild in %s' % root) if stable_ebuilds: return stable_ebuilds[0] else: return None
for root_dir, dirs, files in os.walk(overlay):
for package_dir, dirs, files in os.walk(overlay):
def _BuildEBuildDictionary(overlays, all, packages): """Build a dictionary of the ebuilds in the specified overlays. overlays: A map which maps overlay directories to arrays of stable EBuilds inside said directories. all: Whether to include all ebuilds in the specified directories. If true, then we gather all packages in the directories regardless of whether they are in our set of packages. packages: A set of the packages we want to gather. """ for overlay in overlays: for root_dir, dirs, files in os.walk(overlay): # Add stable ebuilds to overlays[overlay]. paths = [os.path.join(root_dir, path) for path in files] ebuild = _FindStableEBuilds(paths) # If the --all option isn't used, we only want to update packages that # are in packages. if ebuild and (all or ebuild.package in packages): overlays[overlay].append(ebuild)
paths = [os.path.join(root_dir, path) for path in files]
paths = [os.path.join(package_dir, path) for path in files]
def _BuildEBuildDictionary(overlays, all, packages): """Build a dictionary of the ebuilds in the specified overlays. overlays: A map which maps overlay directories to arrays of stable EBuilds inside said directories. all: Whether to include all ebuilds in the specified directories. If true, then we gather all packages in the directories regardless of whether they are in our set of packages. packages: A set of the packages we want to gather. """ for overlay in overlays: for root_dir, dirs, files in os.walk(overlay): # Add stable ebuilds to overlays[overlay]. paths = [os.path.join(root_dir, path) for path in files] ebuild = _FindStableEBuilds(paths) # If the --all option isn't used, we only want to update packages that # are in packages. if ebuild and (all or ebuild.package in packages): overlays[overlay].append(ebuild)
def RevEBuild(self, commit_id="", redirect_file=None):
def RevEBuild(self, commit_id='', redirect_file=None):
def RevEBuild(self, commit_id="", redirect_file=None): """Revs an ebuild given the git commit id.
redirect_file.write(line.replace("~", ""))
redirect_file.write(line.replace('~', ''))
def RevEBuild(self, commit_id="", redirect_file=None): """Revs an ebuild given the git commit id.
if 0 == RunCommand(diff_cmd, exit_code=True, print_cmd=gflags.FLAGS.verbose):
if 0 == RunCommand(diff_cmd, exit_code=True, redirect_stdout=True, redirect_stderr=True, print_cmd=gflags.FLAGS.verbose):
def RevEBuild(self, commit_id="", redirect_file=None): """Revs an ebuild given the git commit id.
all = gflags.FLAGS.all
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split() _CheckSaneArguments(package_list, command) overlays = { '%s/private-overlays/chromeos-overlay' % gflags.FLAGS.srcroot: [], '%s/third_party/chromiumos-overlay' % gflags.FLAGS.srcroot: [] } all = gflags.FLAGS.all if command == 'commit': _BuildEBuildDictionary(overlays, all, package_list) for overlay, ebuilds in overlays.items(): if not os.path.exists(overlay): continue os.chdir(overlay) if command == 'clean': _Clean() elif command == 'push': _PushChange() elif command == 'commit' and ebuilds: for ebuild in ebuilds: try: _Print('Working on %s' % ebuild.package) worker = EBuildStableMarker(ebuild) commit_id = ebuild.GetCommitId() if worker.RevEBuild(commit_id): if not _CheckOnStabilizingBranch(): work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): Die('Unable to create stabilizing branch in %s' % overlay) message = _GIT_COMMIT_MESSAGE % (ebuild.package, commit_id) worker.CommitChange(message) except (OSError, IOError): Warning('Cannot rev %s\n' % ebuild.package, 'Note you will have to go into %s ' 'and reset the git repo yourself.' % overlay) raise
_BuildEBuildDictionary(overlays, all, package_list)
_BuildEBuildDictionary(overlays, gflags.FLAGS.all, package_list)
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split() _CheckSaneArguments(package_list, command) overlays = { '%s/private-overlays/chromeos-overlay' % gflags.FLAGS.srcroot: [], '%s/third_party/chromiumos-overlay' % gflags.FLAGS.srcroot: [] } all = gflags.FLAGS.all if command == 'commit': _BuildEBuildDictionary(overlays, all, package_list) for overlay, ebuilds in overlays.items(): if not os.path.exists(overlay): continue os.chdir(overlay) if command == 'clean': _Clean() elif command == 'push': _PushChange() elif command == 'commit' and ebuilds: for ebuild in ebuilds: try: _Print('Working on %s' % ebuild.package) worker = EBuildStableMarker(ebuild) commit_id = ebuild.GetCommitId() if worker.RevEBuild(commit_id): if not _CheckOnStabilizingBranch(): work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): Die('Unable to create stabilizing branch in %s' % overlay) message = _GIT_COMMIT_MESSAGE % (ebuild.package, commit_id) worker.CommitChange(message) except (OSError, IOError): Warning('Cannot rev %s\n' % ebuild.package, 'Note you will have to go into %s ' 'and reset the git repo yourself.' % overlay) raise
if not _CheckOnStabilizingBranch(): work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): Die('Unable to create stabilizing branch in %s' % overlay)
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split() _CheckSaneArguments(package_list, command) overlays = { '%s/private-overlays/chromeos-overlay' % gflags.FLAGS.srcroot: [], '%s/third_party/chromiumos-overlay' % gflags.FLAGS.srcroot: [] } all = gflags.FLAGS.all if command == 'commit': _BuildEBuildDictionary(overlays, all, package_list) for overlay, ebuilds in overlays.items(): if not os.path.exists(overlay): continue os.chdir(overlay) if command == 'clean': _Clean() elif command == 'push': _PushChange() elif command == 'commit' and ebuilds: for ebuild in ebuilds: try: _Print('Working on %s' % ebuild.package) worker = EBuildStableMarker(ebuild) commit_id = ebuild.GetCommitId() if worker.RevEBuild(commit_id): if not _CheckOnStabilizingBranch(): work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): Die('Unable to create stabilizing branch in %s' % overlay) message = _GIT_COMMIT_MESSAGE % (ebuild.package, commit_id) worker.CommitChange(message) except (OSError, IOError): Warning('Cannot rev %s\n' % ebuild.package, 'Note you will have to go into %s ' 'and reset the git repo yourself.' % overlay) raise
for package in repo_dictionary[repo_name]: revisions[package] = revision_tuple[1]
if repo_dictionary.has_key(repo_name): for package in repo_dictionary[repo_name]: revisions[package] = revision_tuple[1]
def _ParseRevisionString(revision_string, repo_dictionary): """Parses the given revision_string into a revision dictionary. Returns a list of tuples that contain [portage_package_name, commit_id] to update. Keyword arguments: revision_string -- revision_string with format 'repo1.git@commit_1 repo2.git@commit2 ...'. repo_dictionary -- dictionary with git repository names as keys (w/out git) to portage package names. """ # Using a dictionary removes duplicates. revisions = {} for revision in revision_string.split(): # Format 'package@commit-id'. revision_tuple = revision.split('@') if len(revision_tuple) != 2: print >> sys.stderr, 'Incorrectly formatted revision %s' % revision repo_name = revision_tuple[0].replace('.git', '') # May be many corresponding packages to a given git repo e.g. kernel) for package in repo_dictionary[repo_name]: revisions[package] = revision_tuple[1] return revisions.items()
def __init__(self, tests):
def __init__(self, tests, results_dir_root=None): """Constructs and initializes the test runner class. Args: tests: A list of test names (see run_remote_tests.sh). results_dir_root: The results directory root. If provided, the results directory root for each test will be created under it with the SSH port appended to the test name. """
def __init__(self, tests): self._tests = tests
Arguments:
Args:
def _WaitForCompletion(self, spawned_tests): """Waits for tests to complete and returns a list of failed tests.
runner = ParallelTestRunner(args)
runner = ParallelTestRunner(args, options.results_dir_root)
def main(): usage = 'Usage: %prog [options] tests...' parser = optparse.OptionParser(usage=usage) (options, args) = parser.parse_args() if not args: parser.print_help() Die('no tests provided') runner = ParallelTestRunner(args) runner.Run()
cros_mark_as_stable._SimpleRunCommand('git remote update')
cros_mark_as_stable._SimpleRunCommand('repo sync .')
def testPushChange(self): git_log = 'Marking test_one as stable\nMarking test_two as stable\n' fake_description = 'Marking set of ebuilds as stable\n\n%s' % git_log self.mox.StubOutWithMock(cros_mark_as_stable, '_CheckOnStabilizingBranch') self.mox.StubOutWithMock(cros_mark_as_stable.GitBranch, 'CreateBranch') self.mox.StubOutWithMock(cros_mark_as_stable.GitBranch, 'Exists')
url_suffix = '%s/%s' % (_GS_HOST_PATH, version)
url_suffix = '%s/%s/' % (_GS_HOST_PATH, version)
def UploadPrebuilt(build_path, bucket, version, board=None, git_sync=False): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. bucket: The Google Storage bucket to upload to. board: The board to upload to Google Storage, if this is None upload host packages. git_sync: If set, update make.conf of target to reference the latest prebuilt packages genereated here. """ if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) gs_path = os.path.join(bucket, _GS_HOST_PATH, version) strip_pattern = package_path package_string = _HOST_TARGET git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) url_suffix = '%s/%s' % (_GS_HOST_PATH, version) else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board strip_pattern = board_path remote_board_path = _GS_BOARD_PATH % {'board': board, 'version': version} gs_path = os.path.join(bucket, remote_board_path) git_file = os.path.join(build_path, DetermineMakeConfFile(board)) url_suffix = remote_board_path upload_files = GenerateUploadDict(package_path, gs_path, strip_pattern) print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg) if git_sync: url_value = '%s/%s' % (_BINHOST_BASE_URL, url_suffix) RevGitFile(git_file, url_value)
remote_path = urlparse.urljoin(base_remote_path, suffix)
remote_path = '%s/%s' % (base_remote_path.rstrip('/'), suffix)
def GenerateUploadDict(base_local_path, base_remote_path, pkgs): """Build a dictionary of local remote file key pairs to upload. Args: base_local_path: The base path to the files on the local hard drive. remote_path: The base path to the remote paths. pkgs: The packages to upload. Returns: Returns a dictionary of local_path/remote_path pairs """ upload_files = {} for pkg in pkgs: suffix = pkg['CPV'] + '.tbz2' local_path = os.path.join(base_local_path, suffix) assert os.path.exists(local_path) remote_path = urlparse.urljoin(base_remote_path, suffix) upload_files[local_path] = remote_path return upload_files
remote_location = urlparse.urljoin(upload_location, url_suffix)
remote_location = '%s/%s' % (upload_location.rstrip('/'), url_suffix)
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, board=None, git_sync=False, git_sync_retries=5, key='PORTAGE_BINHOST', pkg_indexes=[], sync_binhost_conf=False): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. upload_location: The upload location. board: The board to upload to Google Storage. If this is None, upload host packages. git_sync: If set, update make.conf of target to reference the latest prebuilt packages generated here. git_sync_retries: How many times to retry pushing when updating git files. This helps avoid failures when multiple bots are modifying the same Repo. default: 5 key: The variable key to update in the git file. (Default: PORTAGE_BINHOST) pkg_indexes: Old uploaded prebuilts to compare against. Instead of uploading duplicate files, we just link to the old files. sync_binhost_conf: If set, update binhost config file in chromiumos-overlay for the current board or host. """ if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} package_string = _HOST_TARGET git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'host', '%s.conf' % _HOST_TARGET) else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} git_file = os.path.join(build_path, DetermineMakeConfFile(board)) binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'target', '%s.conf' % board) remote_location = urlparse.urljoin(upload_location, url_suffix) # Process Packages file, removing duplicates and filtered packages. pkg_index = GrabLocalPackageIndex(package_path) pkg_index.SetUploadLocation(binhost_base_url, url_suffix) pkg_index.RemoveFilteredPackages(lambda pkg: ShouldFilterPackage(pkg)) uploads = pkg_index.ResolveDuplicateUploads(pkg_indexes) # Write Packages file. tmp_packages_file = pkg_index.WriteToNamedTemporaryFile() if upload_location.startswith('gs://'): # Build list of files to upload. upload_files = GenerateUploadDict(package_path, remote_location, uploads) remote_file = urlparse.urljoin(remote_location, 'Packages') upload_files[tmp_packages_file.name] = remote_file print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg) else: pkgs = ' '.join(p['CPV'] + '.tbz2' for p in uploads) ssh_server, remote_path = remote_location.split(':', 1) d = { 'pkg_index': tmp_packages_file.name, 'pkgs': pkgs, 'remote_path': remote_path, 'remote_location': remote_location, 'ssh_server': ssh_server } cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d, 'rsync -av %(pkg_index)s %(remote_location)s/Packages' % d] if pkgs: cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d) for cmd in cmds: if not _RetryRun(cmd, shell=True, cwd=package_path): raise UploadFailed('Could not run %s' % cmd) url_value = '%s/%s/' % (binhost_base_url, url_suffix) if git_sync: RevGitFile(git_file, url_value, retries=git_sync_retries, key=key) if sync_binhost_conf: UpdateBinhostConfFile(binhost_conf, key, url_value)
remote_file = urlparse.urljoin(remote_location, 'Packages')
remote_file = '%s/Packages' % remote_location.rstrip('/')
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, board=None, git_sync=False, git_sync_retries=5, key='PORTAGE_BINHOST', pkg_indexes=[], sync_binhost_conf=False): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. upload_location: The upload location. board: The board to upload to Google Storage. If this is None, upload host packages. git_sync: If set, update make.conf of target to reference the latest prebuilt packages generated here. git_sync_retries: How many times to retry pushing when updating git files. This helps avoid failures when multiple bots are modifying the same Repo. default: 5 key: The variable key to update in the git file. (Default: PORTAGE_BINHOST) pkg_indexes: Old uploaded prebuilts to compare against. Instead of uploading duplicate files, we just link to the old files. sync_binhost_conf: If set, update binhost config file in chromiumos-overlay for the current board or host. """ if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} package_string = _HOST_TARGET git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'host', '%s.conf' % _HOST_TARGET) else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} git_file = os.path.join(build_path, DetermineMakeConfFile(board)) binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'target', '%s.conf' % board) remote_location = urlparse.urljoin(upload_location, url_suffix) # Process Packages file, removing duplicates and filtered packages. pkg_index = GrabLocalPackageIndex(package_path) pkg_index.SetUploadLocation(binhost_base_url, url_suffix) pkg_index.RemoveFilteredPackages(lambda pkg: ShouldFilterPackage(pkg)) uploads = pkg_index.ResolveDuplicateUploads(pkg_indexes) # Write Packages file. tmp_packages_file = pkg_index.WriteToNamedTemporaryFile() if upload_location.startswith('gs://'): # Build list of files to upload. upload_files = GenerateUploadDict(package_path, remote_location, uploads) remote_file = urlparse.urljoin(remote_location, 'Packages') upload_files[tmp_packages_file.name] = remote_file print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg) else: pkgs = ' '.join(p['CPV'] + '.tbz2' for p in uploads) ssh_server, remote_path = remote_location.split(':', 1) d = { 'pkg_index': tmp_packages_file.name, 'pkgs': pkgs, 'remote_path': remote_path, 'remote_location': remote_location, 'ssh_server': ssh_server } cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d, 'rsync -av %(pkg_index)s %(remote_location)s/Packages' % d] if pkgs: cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d) for cmd in cmds: if not _RetryRun(cmd, shell=True, cwd=package_path): raise UploadFailed('Could not run %s' % cmd) url_value = '%s/%s/' % (binhost_base_url, url_suffix) if git_sync: RevGitFile(git_file, url_value, retries=git_sync_retries, key=key) if sync_binhost_conf: UpdateBinhostConfFile(binhost_conf, key, url_value)
if options.debug:
if not options.debug:
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') (options, args) = parser.parse_args() buildroot = options.buildroot revisionfile = options.revisionfile # Passed option to clobber. if options.clobber: RunCommand(['sudo', 'rm', '-rf', buildroot]) if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: if not os.path.isdir(buildroot): _FullCheckout(buildroot) else: _PreFlightRinse(buildroot) _IncrementalCheckout(buildroot) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, revisionfile, board=buildconfig['board']) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) _RunSmokeSuite(buildroot) if buildconfig['uprev']: # Don't push changes for developers. if options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot) else: # At least one of the slaves failed or we timed out. _UprevCleanup(buildroot) Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) _UprevCleanup(buildroot) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
m_file.write('CROS_WORKON_COMMIT="my_id"')
m_file.write('CROS_WORKON_COMMIT="my_id"\n')
def testRevEBuild(self): self.mox.StubOutWithMock(cros_mark_as_stable.fileinput, 'input') self.mox.StubOutWithMock(cros_mark_as_stable.shutil, 'copyfile') m_file = self.mox.CreateMock(file)
make_stable=False)
make_stable=mark_stable)
def MarkChromeEBuildAsStable(stable_candidate, unstable_ebuild, chrome_rev, chrome_version, commit, overlay_dir, sticky_ebuild): """Uprevs the chrome ebuild specified by chrome_rev. This is the main function that uprevs the chrome_rev from a stable candidate to its new version. Args: stable_candidate: ebuild that corresponds to the stable ebuild we are revving from. If None, builds the a new ebuild given the version and logic for chrome_rev type with revision set to 1. unstable_ebuild: ebuild corresponding to the unstable ebuild for chrome. chrome_rev: one of CHROME_REV TIP_OF_TRUNK - Requires commit value. Revs the ebuild for the TOT version and uses the portage suffix of _alpha. LATEST_RELEASE - This uses the portage suffix of _rc as they are release candidates for the next sticky version. STICKY - Revs the sticky version. chrome_version: The \d.\d.\d.\d version of Chrome. commit: Used with TIP_OF_TRUNK. The svn revision of chrome. overlay_dir: Path to the chromeos-chrome package dir. sticky_ebuild: EBuild class for the sticky ebuild. Returns: Full portage version atom (including rc's, etc) that was revved. """ base_path = os.path.join(overlay_dir, 'chromeos-chrome-%s' % chrome_version) # Case where we have the last stable candidate with same version just rev. if stable_candidate and stable_candidate.chrome_version == chrome_version: new_ebuild_path = '%s-r%d.ebuild' % ( stable_candidate.ebuild_path_no_revision, stable_candidate.current_revision + 1) else: if chrome_rev == TIP_OF_TRUNK: portage_suffix = '_alpha' else: portage_suffix = '_rc' new_ebuild_path = base_path + ('%s-r1.ebuild' % portage_suffix) cros_mark_as_stable.EBuildStableMarker.MarkAsStable( unstable_ebuild.ebuild_path, new_ebuild_path, 'CROS_SVN_COMMIT', commit, make_stable=False) new_ebuild = ChromeEBuild(new_ebuild_path) if stable_candidate and ( stable_candidate.chrome_version == new_ebuild.chrome_version): if 0 == RunCommand(['diff', '-Bu', stable_candidate.ebuild_path, new_ebuild_path], redirect_stderr=True, redirect_stdout=True, exit_code=True): Info('Previous ebuild with same version found and no 9999 changes found.' ' Nothing to do.') os.unlink(new_ebuild_path) return None RunCommand(['git', 'add', new_ebuild_path]) if stable_candidate and stable_candidate != sticky_ebuild: RunCommand(['git', 'rm', stable_candidate.ebuild_path]) cros_mark_as_stable.EBuildStableMarker.CommitChange( _GIT_COMMIT_MESSAGE % {'chrome_rev': chrome_rev, 'chrome_version': chrome_version}) new_ebuild = ChromeEBuild(new_ebuild_path) return '%s-%s' % (new_ebuild.package, new_ebuild.version)
unittest.main()
main(sys.argv)
def testTotMarkAsStable(self): """Tests to see if we can mark chrome for tot.""" self._CommonMarkAsStableTest(cros_mark_chrome_as_stable.TIP_OF_TRUNK, self.tot_new_version, self.tot_stable, self.tot_new, 'tot')
self.mox.StubOutWithMock(cbuildbot, '_CreateRepoDictionary') self.mox.StubOutWithMock(cbuildbot, '_ParseRevisionString') self.mox.StubOutWithMock(cbuildbot, '_UprevFromRevisionList')
def testUprevPackages(self): self.mox.StubOutWithMock(cbuildbot, '_CreateRepoDictionary') self.mox.StubOutWithMock(cbuildbot, '_ParseRevisionString') self.mox.StubOutWithMock(cbuildbot, '_UprevFromRevisionList') self.mox.StubOutWithMock(__builtin__, 'open')
cbuildbot._CreateRepoDictionary(self._buildroot, self._test_board).AndReturn(self._test_dict) cbuildbot._ParseRevisionString(self._test_string, self._test_dict).AndReturn( self._test_parsed_string_array) cbuildbot._UprevFromRevisionList(self._buildroot, self._test_parsed_string_array)
cbuildbot.RunCommand(['./cros_mark_all_as_stable', '--tracking_branch="cros/master"'], cwd='%s/src/scripts' % self._buildroot, enter_chroot=True) self.mox.ReplayAll() cbuildbot._UprevPackages(self._buildroot, self._revision_file, self._test_board) self.mox.VerifyAll() def testUprevAllPackages(self): """Test if we get None in revisions.pfq indicating Full Builds.""" self.mox.StubOutWithMock(__builtin__, 'open') m_file = self.mox.CreateMock(file) __builtin__.open(self._revision_file).AndReturn(m_file) m_file.read().AndReturn('None') m_file.close() cbuildbot.RunCommand(['./cros_mark_all_as_stable', '--tracking_branch="cros/master"'], cwd='%s/src/scripts' % self._buildroot, enter_chroot=True)
def testUprevPackages(self): self.mox.StubOutWithMock(cbuildbot, '_CreateRepoDictionary') self.mox.StubOutWithMock(cbuildbot, '_ParseRevisionString') self.mox.StubOutWithMock(cbuildbot, '_UprevFromRevisionList') self.mox.StubOutWithMock(__builtin__, 'open')
self.vm_image_path = ('%s/chromiumos_qemu_image.bin' % os.path.dirname( base_image_path))
self.vm_image_path = '%s/chromiumos_qemu_image.bin' % os.path.dirname( base_image_path)
def PrepareBase(self): """Creates an update-able VM based on base image."""
Info('Qemu image not found, creating one.')
Info('Qemu image %s not found, creating one.' % self.vm_image_path)
def PrepareBase(self): """Creates an update-able VM based on base image."""
Info('Using existing VM image')
Info('Using existing VM image %s' % self.vm_image_path)
def PrepareBase(self): """Creates an update-able VM based on base image."""
if options.quick_test: _VERIFY_SUITE = 'build_RootFilesystemSize'
def VerifyImage(self, percent_required_to_pass): """Runs vm smoke suite to verify image.""" # image_to_live already verifies lsb-release matching. This is just # for additional steps. output = RunCommand(['%s/cros_run_vm_test' % self.crosutilsbin, '--image_path=%s' % self.vm_image_path, '--snapshot', '--persist', vm_graphics_flag, '--kvm_pid=%s' % _KVM_PID_FILE, '--test_case=%s' % _VERIFY_SUITE, ], error_ok=True, enter_chroot=False, redirect_stdout=True) return self.CommonVerifyImage(self, output, percent_required_to_pass)
def setUp(self): self.utf8 = "<?xml version='1.0' encoding='UTF-8'?>\n" self.tiny_manifest = '<manifest>\n</manifest>'
def setUp(self): self.utf8 = "<?xml version='1.0' encoding='UTF-8'?>\n" self.tiny_manifest = '<manifest>\n</manifest>'
def testSimpleParse(self): ptree = loman.LocalManifest() ptree.Parse()
class LocalManifest: """Class which provides an abstraction for manipulating the local manifest."""
def testSimpleParse(self): ptree = loman.LocalManifest() ptree.Parse()
def testParse(self): ptree = loman.LocalManifest(self.tiny_manifest) ptree.Parse() self.assertEqual(ptree.ToString(), self.utf8 + self.tiny_manifest)
def __init__(self, text=None): self._text = text or '<manifest>\n</manifest>'
def testParse(self): ptree = loman.LocalManifest(self.tiny_manifest) ptree.Parse() self.assertEqual(ptree.ToString(), self.utf8 + self.tiny_manifest)
def testUTF8Parse(self): ptree = loman.LocalManifest(self.utf8 + self.tiny_manifest) ptree.Parse() self.assertEqual(ptree.ToString(), self.utf8 + self.tiny_manifest)
def Parse(self): """Parse the manifest.""" self._root = ElementTree.fromstring(self._text)
def testUTF8Parse(self): ptree = loman.LocalManifest(self.utf8 + self.tiny_manifest) ptree.Parse() self.assertEqual(ptree.ToString(), self.utf8 + self.tiny_manifest)
def testAddNew(self): ptree = loman.LocalManifest('<manifest>\n</manifest>') ptree.Parse() self.assertTrue(ptree.AddWorkonProject('foo', 'path/to/foo')) self.assertEqual( ptree.ToString(), self.utf8 + '<manifest>\n' '<project name="foo" path="path/to/foo" workon="True" />\n' '</manifest>')
def AddWorkonProject(self, name, path): """Add a new workon project if it is not already in the manifest.
def testAddNew(self): ptree = loman.LocalManifest('<manifest>\n</manifest>') ptree.Parse() self.assertTrue(ptree.AddWorkonProject('foo', 'path/to/foo')) self.assertEqual( ptree.ToString(), self.utf8 + '<manifest>\n' '<project name="foo" path="path/to/foo" workon="True" />\n' '</manifest>')
def testAddDup(self): ptree = loman.LocalManifest('<manifest>\n</manifest>') ptree.Parse() ptree.AddWorkonProject('foo', 'path/to/foo') self.assertTrue(not ptree.AddWorkonProject('foo', 'path/to/foo')) self.assertTrue(not ptree.AddWorkonProject('foo', 'path/foo')) self.assertTrue(not ptree.AddWorkonProject('foobar', 'path/to/foo'))
Returns: True on success. """
def testAddDup(self): ptree = loman.LocalManifest('<manifest>\n</manifest>') ptree.Parse() ptree.AddWorkonProject('foo', 'path/to/foo') self.assertTrue(not ptree.AddWorkonProject('foo', 'path/to/foo')) self.assertTrue(not ptree.AddWorkonProject('foo', 'path/foo')) self.assertTrue(not ptree.AddWorkonProject('foobar', 'path/to/foo'))
class MainTest(unittest.TestCase):
for project in self._root.findall('project'): if project.attrib['path'] == path or project.attrib['name'] == name: return False self._AddProject(name, path, workon='True') return True
def testAddDup(self): ptree = loman.LocalManifest('<manifest>\n</manifest>') ptree.Parse() ptree.AddWorkonProject('foo', 'path/to/foo') self.assertTrue(not ptree.AddWorkonProject('foo', 'path/to/foo')) self.assertTrue(not ptree.AddWorkonProject('foo', 'path/foo')) self.assertTrue(not ptree.AddWorkonProject('foobar', 'path/to/foo'))
def setUp(self): self.utf8 = "<?xml version='1.0' encoding='UTF-8'?>\n" self.tiny_manifest = '<manifest>\n</manifest>' self.stderr = sys.stderr sys.stderr = StringIO.StringIO()
def _AddProject(self, name, path, workon='False'): element = ElementTree.Element('project', name=name, path=path, workon=workon) element.tail = '\n' self._root.append(element)
def setUp(self): self.utf8 = "<?xml version='1.0' encoding='UTF-8'?>\n" self.tiny_manifest = '<manifest>\n</manifest>' self.stderr = sys.stderr sys.stderr = StringIO.StringIO()
def tearDown(self): sys.stderr = self.stderr
def ToString(self): return ElementTree.tostring(self._root, encoding='UTF-8')
def tearDown(self): sys.stderr = self.stderr
def testNotEnoughArgs(self): err_msg = 'Not enough arguments\n' self.assertRaises(SystemExit, loman.main, ['loman']) self.assertTrue(sys.stderr.getvalue().endswith(err_msg))
def testNotEnoughArgs(self): err_msg = 'Not enough arguments\n' self.assertRaises(SystemExit, loman.main, ['loman']) self.assertTrue(sys.stderr.getvalue().endswith(err_msg))
def testNotWorkon(self): err_msg = 'Adding of non-workon projects is currently unsupported.\n' self.assertRaises(SystemExit, loman.main, ['loman', 'add', 'foo', 'path']) self.assertTrue(sys.stderr.getvalue().endswith(err_msg))
def main(argv): usage = 'usage: %prog add [options] <name> <path>' parser = optparse.OptionParser(usage=usage) parser.add_option('-w', '--workon', action='store_true', dest='workon', default=False, help='Is this a workon package?') parser.add_option('-f', '--file', dest='manifest', help='Non-default manifest file to read.') (options, args) = parser.parse_args(argv[2:]) if len(args) < 2: parser.error('Not enough arguments') if argv[1] not in ['add']: parser.error('Unsupported command: %s.' % argv[1]) if not options.workon: parser.error('Adding of non-workon projects is currently unsupported.') (name, path) = (args[0], args[1])
def testNotWorkon(self): err_msg = 'Adding of non-workon projects is currently unsupported.\n' self.assertRaises(SystemExit, loman.main, ['loman', 'add', 'foo', 'path']) self.assertTrue(sys.stderr.getvalue().endswith(err_msg))
def testBadCommand(self): err_msg = 'Unsupported command: bad.\n' self.assertRaises(SystemExit, loman.main, ['loman', 'bad', 'foo', 'path']) self.assertTrue(sys.stderr.getvalue().endswith(err_msg)) def testSimpleAdd(self): temp = tempfile.NamedTemporaryFile('w') print >> temp, '<manifest>\n</manifest>' temp.flush() os.fsync(temp.fileno()) loman.main(['loman', 'add', '--workon', '-f', temp.name, 'foo', 'path/to/foo']) self.assertEqual( open(temp.name, 'r').read(), self.utf8 + '<manifest>\n' '<project name="foo" path="path/to/foo" workon="True" />\n' '</manifest>\n') def testAddDup(self): temp = tempfile.NamedTemporaryFile('w') print >> temp, '<manifest>\n</manifest>' temp.flush() os.fsync(temp.fileno()) loman.main(['loman', 'add', '--workon', '-f', temp.name, 'foo', 'path/to/foo']) self.assertRaises(SystemExit, loman.main, ['loman', 'add', '--workon', '-f', temp.name, 'foo', 'path/to/foo'])
repo_dir = _FindRepoDir() if not repo_dir: Die("Unable to find repo dir.") local_manifest = options.manifest or \ os.path.join(_FindRepoDir(), 'local_manifest.xml') if os.path.isfile(local_manifest): ptree = LocalManifest(open(local_manifest).read()) else: ptree = LocalManifest() ptree.Parse() if not ptree.AddWorkonProject(name, path): Die('Path "%s" or name "%s" already exits in the manifest.' % (path, name)) try: print >> open(local_manifest, 'w'), ptree.ToString() except Exception, e: Die('Error writing to manifest: %s' % e)
def testBadCommand(self): err_msg = 'Unsupported command: bad.\n' self.assertRaises(SystemExit, loman.main, ['loman', 'bad', 'foo', 'path']) self.assertTrue(sys.stderr.getvalue().endswith(err_msg))
unittest.main()
main(sys.argv)
def testAddDup(self): temp = tempfile.NamedTemporaryFile('w') print >> temp, '<manifest>\n</manifest>' temp.flush() os.fsync(temp.fileno()) loman.main(['loman', 'add', '--workon', '-f', temp.name, 'foo', 'path/to/foo']) self.assertRaises(SystemExit, loman.main, ['loman', 'add', '--workon', '-f', temp.name, 'foo', 'path/to/foo'])
_UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost])
if not options.debug: _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost])
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('--chrome_rev', default=None, type='string', dest='chrome_rev', help=('Chrome_rev of type [tot|latest_release|' 'sticky_release]')) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('--nosync', action='store_false', dest='sync', default=True, help="Don't sync before building.") parser.add_option('--notests', action='store_false', dest='tests', default=True, help='Override values from buildconfig and run no tests.') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') parser.add_option('-c', '--gsutil_archive', default='', help='Datastore archive location') parser.add_option('-a', '--acl', default='private', help='ACL to set on GSD archives') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch chrome_atom_to_build = None if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: # Calculate list of overlay directories. rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(rev_overlays)) # Either has to be a master or not have any push overlays. assert buildconfig['master'] or not push_overlays board = buildconfig['board'] old_binhost = None _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, rev_overlays) chroot_path = os.path.join(buildroot, 'chroot') boardpath = os.path.join(chroot_path, 'build', board) if options.sync: if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) _IncrementalCheckout(buildroot) new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) if old_binhost and old_binhost != new_binhost: RunCommand(['sudo', 'rm', '-rf', boardpath]) # Check that all overlays can be found. for path in rev_overlays: if not os.path.isdir(path): Die('Missing overlay: %s' % path) if not os.path.isdir(chroot_path): _MakeChroot(buildroot) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) # Perform uprev. If chrome_uprev is set, rev Chrome ebuilds. if options.chrome_rev: chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, options.chrome_rev) elif buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], rev_overlays) _EnableLocalAccount(buildroot) # Doesn't rebuild without acquiring more source. if options.sync: _Build(buildroot) if chrome_atom_to_build: _BuildChrome(buildroot, buildconfig['board'], chrome_atom_to_build) if buildconfig['unittests'] and options.tests: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt'] and options.tests: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: if not options.debug: archive_full_path = os.path.join(options.gsutil_archive, str(options.buildnumber)) _ArchiveTestResults(buildroot, buildconfig['board'], test_results_dir=test_results_dir, gsutil=options.gsutil, archive_dir=archive_full_path, acl=options.acl) if buildconfig['uprev']: # Don't push changes for developers. if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost]) _UprevPush(buildroot, tracking_branch, buildconfig['board'], push_overlays, options.debug) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important'] and not options.debug: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
if options.sync: _Build(buildroot, emptytree)
_Build(buildroot, emptytree)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-a', '--acl', default='private', help='ACL to set on GSD archives') parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('--chrome_rev', default=None, type='string', dest='chrome_rev', help=('Chrome_rev of type [tot|latest_release|' 'sticky_release]')) parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') parser.add_option('-c', '--gsutil_archive', default='', help='Datastore archive location') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('--noprebuilts', action='store_false', dest='prebuilts', default=True, help="Don't upload prebuilts.") parser.add_option('--nosync', action='store_false', dest='sync', default=True, help="Don't sync before building.") parser.add_option('--notests', action='store_false', dest='tests', default=True, help='Override values from buildconfig and run no tests.') parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch chrome_atom_to_build = None if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: # Calculate list of overlay directories. rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(rev_overlays)) # Either has to be a master or not have any push overlays. assert buildconfig['master'] or not push_overlays board = buildconfig['board'] old_binhost = None _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, rev_overlays) chroot_path = os.path.join(buildroot, 'chroot') boardpath = os.path.join(chroot_path, 'build', board) if options.sync: if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) _IncrementalCheckout(buildroot) new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) emptytree = (old_binhost and old_binhost != new_binhost) # Check that all overlays can be found. for path in rev_overlays: if not os.path.isdir(path): Die('Missing overlay: %s' % path) if not os.path.isdir(chroot_path): _MakeChroot(buildroot) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) # Perform uprev. If chrome_uprev is set, rev Chrome ebuilds. if options.chrome_rev: chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, options.chrome_rev, board) # If we found nothing to rev, we're done here. if not chrome_atom_to_build: return elif buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], rev_overlays) _EnableLocalAccount(buildroot) # Doesn't rebuild without acquiring more source. if options.sync: _Build(buildroot, emptytree) if buildconfig['unittests'] and options.tests: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt'] and options.tests: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: if not options.debug: archive_full_path = os.path.join(options.gsutil_archive, str(options.buildnumber)) _ArchiveTestResults(buildroot, buildconfig['board'], test_results_dir=test_results_dir, gsutil=options.gsutil, archive_dir=archive_full_path, acl=options.acl) if buildconfig['uprev']: # Don't push changes for developers. if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): if not options.debug and options.prebuilts: _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost]) _UprevPush(buildroot, tracking_branch, buildconfig['board'], push_overlays, options.debug) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important'] and not options.debug: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
if not os.path.isdir(path):
if command != 'clean' and not os.path.isdir(path):
def main(argv): try: argv = gflags.FLAGS(argv) if len(argv) != 2: _PrintUsageAndDie('Must specify a valid command') else: command = argv[1] except gflags.FlagsError, e : _PrintUsageAndDie(str(e)) package_list = gflags.FLAGS.packages.split(':') _CheckSaneArguments(package_list, command) if gflags.FLAGS.overlays: overlays = {} for path in gflags.FLAGS.overlays.split(':'): if not os.path.isdir(path): Die('Cannot find overlay: %s' % path) overlays[path] = [] else: Warning('Missing --overlays argument') overlays = { '%s/private-overlays/chromeos-overlay' % gflags.FLAGS.srcroot: [], '%s/third_party/chromiumos-overlay' % gflags.FLAGS.srcroot: [] } if command == 'commit': _BuildEBuildDictionary(overlays, gflags.FLAGS.all, package_list) for overlay, ebuilds in overlays.items(): if not os.path.isdir(overlay): Warning("Skipping %s" % overlay) continue # TODO(davidjames): Currently, all code that interacts with git depends on # the cwd being set to the overlay directory. We should instead pass in # this parameter so that we don't need to modify the cwd globally. os.chdir(overlay) if command == 'clean': _Clean() elif command == 'push': _PushChange() elif command == 'commit' and ebuilds: work_branch = _GitBranch(_STABLE_BRANCH_NAME) work_branch.CreateBranch() if not work_branch.Exists(): Die('Unable to create stabilizing branch in %s' % overlay) # Contains the array of packages we actually revved. revved_packages = [] for ebuild in ebuilds: try: _Print('Working on %s' % ebuild.package) worker = EBuildStableMarker(ebuild) commit_id = ebuild.GetCommitId() if worker.RevEBuild(commit_id): message = _GIT_COMMIT_MESSAGE % (ebuild.package, commit_id) worker.CommitChange(message) revved_packages.append(ebuild.package) except (OSError, IOError): Warning('Cannot rev %s\n' % ebuild.package, 'Note you will have to go into %s ' 'and reset the git repo yourself.' % overlay) raise if revved_packages: _CleanStalePackages(gflags.FLAGS.board, revved_packages) else: work_branch.Delete()
'rsync -av %(pkg_index)s %(remote_location)s/Packages' % d]
'rsync -av --chmod=a+r %(pkg_index)s %(remote_packages)s' % d]
def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, board=None, git_sync=False, git_sync_retries=5, key='PORTAGE_BINHOST', pkg_indexes=[], sync_binhost_conf=False): """Upload Host prebuilt files to Google Storage space. Args: build_path: The path to the root of the chroot. upload_location: The upload location. board: The board to upload to Google Storage. If this is None, upload host packages. git_sync: If set, update make.conf of target to reference the latest prebuilt packages generated here. git_sync_retries: How many times to retry pushing when updating git files. This helps avoid failures when multiple bots are modifying the same Repo. default: 5 key: The variable key to update in the git file. (Default: PORTAGE_BINHOST) pkg_indexes: Old uploaded prebuilts to compare against. Instead of uploading duplicate files, we just link to the old files. sync_binhost_conf: If set, update binhost config file in chromiumos-overlay for the current board or host. """ if not board: # We are uploading host packages # TODO(scottz): eventually add support for different host_targets package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} package_string = _HOST_TARGET git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'host', '%s.conf' % _HOST_TARGET) else: board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) package_path = os.path.join(board_path, 'packages') package_string = board url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} git_file = os.path.join(build_path, DetermineMakeConfFile(board)) binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'target', '%s.conf' % board) remote_location = '%s/%s' % (upload_location.rstrip('/'), url_suffix) # Process Packages file, removing duplicates and filtered packages. pkg_index = GrabLocalPackageIndex(package_path) pkg_index.SetUploadLocation(binhost_base_url, url_suffix) pkg_index.RemoveFilteredPackages(lambda pkg: ShouldFilterPackage(pkg)) uploads = pkg_index.ResolveDuplicateUploads(pkg_indexes) # Write Packages file. tmp_packages_file = pkg_index.WriteToNamedTemporaryFile() if upload_location.startswith('gs://'): # Build list of files to upload. upload_files = GenerateUploadDict(package_path, remote_location, uploads) remote_file = '%s/Packages' % remote_location.rstrip('/') upload_files[tmp_packages_file.name] = remote_file print 'Uploading %s' % package_string failed_uploads = RemoteUpload(upload_files) if len(failed_uploads) > 1 or (None not in failed_uploads): error_msg = ['%s -> %s\n' % args for args in failed_uploads] raise UploadFailed('Error uploading:\n%s' % error_msg) else: pkgs = ' '.join(p['CPV'] + '.tbz2' for p in uploads) ssh_server, remote_path = remote_location.split(':', 1) d = { 'pkg_index': tmp_packages_file.name, 'pkgs': pkgs, 'remote_path': remote_path, 'remote_location': remote_location, 'ssh_server': ssh_server } cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d, 'rsync -av %(pkg_index)s %(remote_location)s/Packages' % d] if pkgs: cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d) for cmd in cmds: if not _RetryRun(cmd, shell=True, cwd=package_path): raise UploadFailed('Could not run %s' % cmd) url_value = '%s/%s/' % (binhost_base_url, url_suffix) if git_sync: RevGitFile(git_file, url_value, retries=git_sync_retries, key=key) if sync_binhost_conf: UpdateBinhostConfFile(binhost_conf, key, url_value)
equery_cmd = 'equery-%s which %s 2> /dev/null' \ % (gflags.FLAGS.board, package)
equery_cmd = ( 'ACCEPT_KEYWORDS="x86 arm amd64" equery-%s which %s 2> /dev/null' % (gflags.FLAGS.board, package))
def _FindEBuildPath(cls, package): """Static method that returns the full path of an ebuild.""" _Print('Looking for unstable ebuild for %s' % package) equery_cmd = 'equery-%s which %s 2> /dev/null' \ % (gflags.FLAGS.board, package) path = _SimpleRunCommand(equery_cmd) if path: _Print('Unstable ebuild found at %s' % path) return path
], cwd=cwd, error_ok=True)
], cwd=cwd, error_ok=False)
def _RunSmokeSuite(buildroot): cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['bin/cros_run_vm_test', '--no_graphics', '--test_case', 'suite_Smoke', ], cwd=cwd, error_ok=True)
image_path=None, results_dir_root=None):
image_path=None, order_output=False, results_dir_root=None):
def __init__(self, tests, base_ssh_port=_DEFAULT_BASE_SSH_PORT, board=None, image_path=None, results_dir_root=None): """Constructs and initializes the test runner class.
proc = subprocess.Popen(args, stdin=dev_null)
output = None if self._order_output: output = tempfile.NamedTemporaryFile(prefix='parallel_vm_test_') Info('Piping output to %s.' % output.name) proc = subprocess.Popen(args, stdin=dev_null, stdout=output, stderr=output)
def _SpawnTests(self): """Spawns VMs and starts the test runs on them.
'proc': proc }
'proc': proc, 'output': output }
def _SpawnTests(self): """Spawns VMs and starts the test runs on them.
options.image_path, options.results_dir_root)
options.image_path, options.order_output, options.results_dir_root)
def main(): usage = 'Usage: %prog [options] tests...' parser = optparse.OptionParser(usage=usage) parser.add_option('--base_ssh_port', type='int', default=_DEFAULT_BASE_SSH_PORT, help='Base SSH port. Spawned VMs listen to localhost SSH ' 'ports incrementally allocated starting from the base one. ' '[default: %default]') parser.add_option('--board', help='The target board. If none specified, ' 'cros_run_vm_test will use the default board.') parser.add_option('--image_path', help='Full path to the VM image. If none specified, ' 'cros_run_vm_test will use the latest image.') parser.add_option('--results_dir_root', help='Root results directory. If none specified, each test ' 'will store its results in a separate /tmp directory.') (options, args) = parser.parse_args() if not args: parser.print_help() Die('no tests provided') runner = ParallelTestRunner(args, options.base_ssh_port, options.board, options.image_path, options.results_dir_root) runner.Run()
self.download_folder = os.path.join(self.crosutilsbin, 'latest_download')
self.download_folder = os.path.join(self.crosutils, 'latest_download') if not os.path.exists(self.download_folder): os.makedirs(self.download_folder)
def setUp(self): unittest.TestCase.setUp(self) # Set these up as they are used often. self.crosutils = os.path.join(os.path.dirname(__file__), '..') self.crosutilsbin = os.path.join(os.path.dirname(__file__)) self.download_folder = os.path.join(self.crosutilsbin, 'latest_download')
def BuildStateful(self, src, dst):
def BuildStateful(self, src, dst_dir, dst_file):
def BuildStateful(self, src, dst): """Create a stateful partition update image."""
if self.GetCached(src, dst): self.Info('Using cached stateful %s' % dst)
if self.GetCached(src, dst_file): self.Info('Using cached stateful %s' % dst_file)
def BuildStateful(self, src, dst): """Create a stateful partition update image."""
cgpt = self.ChrootPath('/usr/bin/cgpt') offset = self.cmd.OutputOneLine(cgpt, 'show', '-b', '-i', '1', src) size = self.cmd.OutputOneLine(cgpt, 'show', '-s', '-i', '1', src) if None in (size, offset): self.Error('Unable to use cgpt to get image geometry') return False return self.cmd.RunPipe([['dd', 'if=%s' % src, 'bs=512', 'skip=%s' % offset, 'count=%s' % size], ['gzip', '-c']], outfile=dst)
return self.cmd.Run(self.CrosUtilsPath( 'cros_generate_stateful_update_payload'), '--image=%s' % src, '--output=%s' % dst_dir)
def BuildStateful(self, src, dst): """Create a stateful partition update image."""
not cros_env.BuildStateful(image_file, stateful_file)):
not cros_env.BuildStateful(image_file, image_directory, stateful_file)):
def main(argv): usage = 'usage: %prog [options]' parser = optparse.OptionParser(usage=usage) parser.add_option('--board', dest='board', default=None, help='Board platform type') parser.add_option('--force-mismatch', dest='force_mismatch', default=False, action='store_true', help='Upgrade even if client arch does not match') parser.add_option('--from', dest='src', default=None, help='Source image to install') parser.add_option('--image-name', dest='image_name', default=DEFAULT_IMAGE_NAME, help='Filename within image directory to load') parser.add_option('--port', dest='port', default=8081, type='int', help='TCP port to serve from and tunnel through') parser.add_option('--remote', dest='remote', default=None, help='Remote device-under-test IP address') parser.add_option('--server-only', dest='server_only', default=False, action='store_true', help='Do not start client') parser.add_option('--verbose', dest='verbose', default=False, action='store_true', help='Display running commands') (options, args) = parser.parse_args(argv) cros_env = CrosEnv(verbose=options.verbose) if not options.board: options.board = cros_env.GetDefaultBoard() if not options.src: options.src = cros_env.GetLatestImage(options.board) if options.src is None: parser.error('No --from argument given and no default image found') cros_env.Info('Performing update from %s' % options.src) if not os.path.exists(options.src): parser.error('Path %s does not exist' % options.src) if os.path.isdir(options.src): image_directory = options.src image_file = os.path.join(options.src, options.image_name) if not os.path.exists(image_file): parser.error('Image file %s does not exist' % image_file) else: image_file = options.src image_directory = os.path.dirname(options.src) if options.remote: cros_env.SetRemote(options.remote) rel = cros_env.GetRemoteRelease() if not rel: cros_env.Fatal('Could not retrieve remote lsb-release') board = rel.get('CHROMEOS_RELEASE_BOARD', '(None)') if board != options.board and not options.force_mismatch: cros_env.Error('Board %s does not match expected %s' % (board, options.board)) cros_env.Error('(Use --force-mismatch option to override this)') cros_env.Fatal() elif not options.server_only: parser.error('Either --server-only must be specified or ' '--remote=<client> needs to be given') update_file = os.path.join(image_directory, UPDATE_FILENAME) stateful_file = os.path.join(image_directory, STATEFUL_FILENAME) if (not cros_env.GenerateUpdatePayload(image_file, update_file) or not cros_env.BuildStateful(image_file, stateful_file)): cros_env.Fatal() cros_env.CreateServer(options.port, update_file, stateful_file) exit_status = 1 if options.server_only: child = None else: # Start an "image-to-live" instance that will pull bits from the server child = os.fork() if child: signal.signal(signal.SIGCHLD, ChildFinished(child).SigHandler) else: try: time.sleep(SERVER_STARTUP_WAIT) if cros_env.StartClient(options.port): exit_status = 0 except KeyboardInterrupt: cros_env.Error('Client Exiting on Control-C') except: cros_env.Error('Exception in client code:') traceback.print_exc(file=sys.stdout) cros_env.ssh_cmd.Cleanup() cros_env.Info('Client exiting with status %d' % exit_status) sys.exit(exit_status) try: cros_env.StartServer() except KeyboardInterrupt: cros_env.Info('Server Exiting on Control-C') exit_status = 0 except ChildFinished, e: cros_env.Info('Server Exiting on Client Exit (%d)' % e.status) exit_status = e.status child = None except: cros_env.Error('Exception in server code:') traceback.print_exc(file=sys.stdout) if child: os.kill(child, 15) cros_env.Info('Server exiting with status %d' % exit_status) sys.exit(exit_status)
_SimpleRunCommand('git remote update') merge_branch = _GitBranch(merge_branch_name) merge_branch.CreateBranch() if not merge_branch.Exists(): Die('Unable to create merge branch.') _SimpleRunCommand('git merge --squash %s' % _STABLE_BRANCH_NAME) _SimpleRunCommand('git commit -m "%s"' % description) _SimpleRunCommand('git config push.default tracking') _SimpleRunCommand('git push')
for push_try in range(num_retries + 1): try: _SimpleRunCommand('git remote update') merge_branch = _GitBranch(merge_branch_name) merge_branch.CreateBranch() if not merge_branch.Exists(): Die('Unable to create merge branch.') _SimpleRunCommand('git merge --squash %s' % _STABLE_BRANCH_NAME) _SimpleRunCommand('git commit -m "%s"' % description) _SimpleRunCommand('git config push.default tracking') _SimpleRunCommand('git push') break except: if push_try < num_retries: Warning('Failed to push change, performing retry (%s/%s)' % ( push_try + 1, num_retries)) else: raise
def _PushChange(): """Pushes changes to the git repository. Pushes locals commits from calls to CommitChange to the remote git repository specified by os.pwd. Raises: OSError: Error occurred while pushing. """ # TODO(sosa) - Add logic for buildbot to check whether other slaves have # completed and push this change only if they have. # Sanity check to make sure we're on a stabilizing branch before pushing. if not _CheckOnStabilizingBranch(): Info('Not on branch %s so no work found to push. Exiting' % \ _STABLE_BRANCH_NAME) return description = _SimpleRunCommand('git log --format=format:%s%n%n%b ' + gflags.FLAGS.tracking_branch + '..') description = 'Marking set of ebuilds as stable\n\n%s' % description merge_branch_name = 'merge_branch' _SimpleRunCommand('git remote update') merge_branch = _GitBranch(merge_branch_name) merge_branch.CreateBranch() if not merge_branch.Exists(): Die('Unable to create merge branch.') _SimpleRunCommand('git merge --squash %s' % _STABLE_BRANCH_NAME) _SimpleRunCommand('git commit -m "%s"' % description) # Ugh. There has got to be an easier way to push to a tracking branch _SimpleRunCommand('git config push.default tracking') _SimpleRunCommand('git push')
capture=False, oneline=False): """Perform a command pipeline, with optional input/output filenames."""
capture=False, oneline=False, hide_stderr=False): """ Perform a command pipeline, with optional input/output filenames. hide_stderr Don't allow output of stderr (default False) """
def RunPipe(self, pipeline, infile=None, outfile=None, capture=False, oneline=False): """Perform a command pipeline, with optional input/output filenames."""
self.env.Info('Running: %s' % ' '.join(cmd))
if hide_stderr: kwargs['stderr'] = open('/dev/null', 'wb') self.env.Debug('Running: %s' % ' '.join(cmd))
def RunPipe(self, pipeline, infile=None, outfile=None, capture=False, oneline=False): """Perform a command pipeline, with optional input/output filenames."""
def __init__(self, verbose=False):
SILENT = 0 INFO = 1 DEBUG = 2 def __init__(self, verbose=SILENT):
def __init__(self, verbose=False): self.cros_root = os.path.dirname(os.path.abspath(sys.argv[0])) parent = os.path.dirname(self.cros_root) if os.path.exists(os.path.join(parent, 'chromeos-common.sh')): self.cros_root = parent self.cmd = Command(self) self.verbose = verbose
if self.verbose:
if self.verbose >= CrosEnv.INFO:
def Info(self, msg): if self.verbose: print 'INFO: %s' % msg
verbose=self.verbose))
verbose=self.verbose, have_pv=self.have_pv))
def CreateServer(self, port, update_file, stateful_file): """Start the devserver clone."""
print 'Client update completed successfully!'
self.Info('Client update completed successfully!')
def StartClient(self, port): """Ask the client machine to update from our server."""
handler.send_Error(404, 'File not found')
handler.send_error(404, 'File not found')
def Reply(self, handler, send_content=True, post_data=None): handler.send_Error(404, 'File not found') return None
verbose=False, blocksize=16*1024):
verbose=False, blocksize=16*1024, have_pv=False):
def __init__(self, filename, content_type='application/octet-stream', verbose=False, blocksize=16*1024): self.filename = filename self.content_type = content_type self.verbose = verbose self.blocksize = blocksize
if not send_content: return if filesize <= self.blocksize: handler.wfile.write(f.read()) else:
if send_content:
def Reply(self, handler, send_content=True, post_data=None): """Return file contents to the client. Optionally display progress."""
default=DEFAULT_IMAGE_NAME,
def main(argv): usage = 'usage: %prog [options]' parser = optparse.OptionParser(usage=usage) parser.add_option('--board', dest='board', default=None, help='Board platform type') parser.add_option('--force-mismatch', dest='force_mismatch', default=False, action='store_true', help='Upgrade even if client arch does not match') parser.add_option('--from', dest='src', default=None, help='Source image to install') parser.add_option('--image-name', dest='image_name', default=DEFAULT_IMAGE_NAME, help='Filename within image directory to load') parser.add_option('--port', dest='port', default=8081, type='int', help='TCP port to serve from and tunnel through') parser.add_option('--remote', dest='remote', default=None, help='Remote device-under-test IP address') parser.add_option('--server-only', dest='server_only', default=False, action='store_true', help='Do not start client') parser.add_option('--verbose', dest='verbose', default=False, action='store_true', help='Display running commands') (options, args) = parser.parse_args(argv) cros_env = CrosEnv(verbose=options.verbose) if not options.board: options.board = cros_env.GetDefaultBoard() if not options.src: options.src = cros_env.GetLatestImage(options.board) if options.src is None: parser.error('No --from argument given and no default image found') cros_env.Info('Performing update from %s' % options.src) if not os.path.exists(options.src): parser.error('Path %s does not exist' % options.src) if os.path.isdir(options.src): image_directory = options.src image_file = os.path.join(options.src, options.image_name) if not os.path.exists(image_file): parser.error('Image file %s does not exist' % image_file) else: image_file = options.src image_directory = os.path.dirname(options.src) if options.remote: cros_env.SetRemote(options.remote) rel = cros_env.GetRemoteRelease() if not rel: cros_env.Fatal('Could not retrieve remote lsb-release') board = rel.get('CHROMEOS_RELEASE_BOARD', '(None)') if board != options.board and not options.force_mismatch: cros_env.Error('Board %s does not match expected %s' % (board, options.board)) cros_env.Error('(Use --force-mismatch option to override this)') cros_env.Fatal() elif not options.server_only: parser.error('Either --server-only must be specified or ' '--remote=<client> needs to be given') update_file = os.path.join(image_directory, UPDATE_FILENAME) stateful_file = os.path.join(image_directory, STATEFUL_FILENAME) if (not cros_env.GenerateUpdatePayload(image_file, update_file) or not cros_env.BuildStateful(image_file, image_directory, stateful_file)): cros_env.Fatal() cros_env.CreateServer(options.port, update_file, stateful_file) exit_status = 1 if options.server_only: child = None else: # Start an "image-to-live" instance that will pull bits from the server child = os.fork() if child: signal.signal(signal.SIGCHLD, ChildFinished(child).SigHandler) else: try: time.sleep(SERVER_STARTUP_WAIT) if cros_env.StartClient(options.port): exit_status = 0 except KeyboardInterrupt: cros_env.Error('Client Exiting on Control-C') except: cros_env.Error('Exception in client code:') traceback.print_exc(file=sys.stdout) cros_env.ssh_cmd.Cleanup() cros_env.Info('Client exiting with status %d' % exit_status) sys.exit(exit_status) try: cros_env.StartServer() except KeyboardInterrupt: cros_env.Info('Server Exiting on Control-C') exit_status = 0 except ChildFinished, e: cros_env.Info('Server Exiting on Client Exit (%d)' % e.status) exit_status = e.status child = None except: cros_env.Error('Exception in server code:') traceback.print_exc(file=sys.stdout) if child: os.kill(child, 15) cros_env.Info('Server exiting with status %d' % exit_status) sys.exit(exit_status)
cros_env = CrosEnv(verbose=options.verbose)
build_test_image = False verbosity = CrosEnv.SILENT if options.verbose: verbosity = CrosEnv.INFO if options.debug: verbosity = CrosEnv.DEBUG cros_env = CrosEnv(verbose=verbosity)
def main(argv): usage = 'usage: %prog [options]' parser = optparse.OptionParser(usage=usage) parser.add_option('--board', dest='board', default=None, help='Board platform type') parser.add_option('--force-mismatch', dest='force_mismatch', default=False, action='store_true', help='Upgrade even if client arch does not match') parser.add_option('--from', dest='src', default=None, help='Source image to install') parser.add_option('--image-name', dest='image_name', default=DEFAULT_IMAGE_NAME, help='Filename within image directory to load') parser.add_option('--port', dest='port', default=8081, type='int', help='TCP port to serve from and tunnel through') parser.add_option('--remote', dest='remote', default=None, help='Remote device-under-test IP address') parser.add_option('--server-only', dest='server_only', default=False, action='store_true', help='Do not start client') parser.add_option('--verbose', dest='verbose', default=False, action='store_true', help='Display running commands') (options, args) = parser.parse_args(argv) cros_env = CrosEnv(verbose=options.verbose) if not options.board: options.board = cros_env.GetDefaultBoard() if not options.src: options.src = cros_env.GetLatestImage(options.board) if options.src is None: parser.error('No --from argument given and no default image found') cros_env.Info('Performing update from %s' % options.src) if not os.path.exists(options.src): parser.error('Path %s does not exist' % options.src) if os.path.isdir(options.src): image_directory = options.src image_file = os.path.join(options.src, options.image_name) if not os.path.exists(image_file): parser.error('Image file %s does not exist' % image_file) else: image_file = options.src image_directory = os.path.dirname(options.src) if options.remote: cros_env.SetRemote(options.remote) rel = cros_env.GetRemoteRelease() if not rel: cros_env.Fatal('Could not retrieve remote lsb-release') board = rel.get('CHROMEOS_RELEASE_BOARD', '(None)') if board != options.board and not options.force_mismatch: cros_env.Error('Board %s does not match expected %s' % (board, options.board)) cros_env.Error('(Use --force-mismatch option to override this)') cros_env.Fatal() elif not options.server_only: parser.error('Either --server-only must be specified or ' '--remote=<client> needs to be given') update_file = os.path.join(image_directory, UPDATE_FILENAME) stateful_file = os.path.join(image_directory, STATEFUL_FILENAME) if (not cros_env.GenerateUpdatePayload(image_file, update_file) or not cros_env.BuildStateful(image_file, image_directory, stateful_file)): cros_env.Fatal() cros_env.CreateServer(options.port, update_file, stateful_file) exit_status = 1 if options.server_only: child = None else: # Start an "image-to-live" instance that will pull bits from the server child = os.fork() if child: signal.signal(signal.SIGCHLD, ChildFinished(child).SigHandler) else: try: time.sleep(SERVER_STARTUP_WAIT) if cros_env.StartClient(options.port): exit_status = 0 except KeyboardInterrupt: cros_env.Error('Client Exiting on Control-C') except: cros_env.Error('Exception in client code:') traceback.print_exc(file=sys.stdout) cros_env.ssh_cmd.Cleanup() cros_env.Info('Client exiting with status %d' % exit_status) sys.exit(exit_status) try: cros_env.StartServer() except KeyboardInterrupt: cros_env.Info('Server Exiting on Control-C') exit_status = 0 except ChildFinished, e: cros_env.Info('Server Exiting on Client Exit (%d)' % e.status) exit_status = e.status child = None except: cros_env.Error('Exception in server code:') traceback.print_exc(file=sys.stdout) if child: os.kill(child, 15) cros_env.Info('Server exiting with status %d' % exit_status) sys.exit(exit_status)
update_file = os.path.join(image_directory, UPDATE_FILENAME) stateful_file = os.path.join(image_directory, STATEFUL_FILENAME)
def main(argv): usage = 'usage: %prog [options]' parser = optparse.OptionParser(usage=usage) parser.add_option('--board', dest='board', default=None, help='Board platform type') parser.add_option('--force-mismatch', dest='force_mismatch', default=False, action='store_true', help='Upgrade even if client arch does not match') parser.add_option('--from', dest='src', default=None, help='Source image to install') parser.add_option('--image-name', dest='image_name', default=DEFAULT_IMAGE_NAME, help='Filename within image directory to load') parser.add_option('--port', dest='port', default=8081, type='int', help='TCP port to serve from and tunnel through') parser.add_option('--remote', dest='remote', default=None, help='Remote device-under-test IP address') parser.add_option('--server-only', dest='server_only', default=False, action='store_true', help='Do not start client') parser.add_option('--verbose', dest='verbose', default=False, action='store_true', help='Display running commands') (options, args) = parser.parse_args(argv) cros_env = CrosEnv(verbose=options.verbose) if not options.board: options.board = cros_env.GetDefaultBoard() if not options.src: options.src = cros_env.GetLatestImage(options.board) if options.src is None: parser.error('No --from argument given and no default image found') cros_env.Info('Performing update from %s' % options.src) if not os.path.exists(options.src): parser.error('Path %s does not exist' % options.src) if os.path.isdir(options.src): image_directory = options.src image_file = os.path.join(options.src, options.image_name) if not os.path.exists(image_file): parser.error('Image file %s does not exist' % image_file) else: image_file = options.src image_directory = os.path.dirname(options.src) if options.remote: cros_env.SetRemote(options.remote) rel = cros_env.GetRemoteRelease() if not rel: cros_env.Fatal('Could not retrieve remote lsb-release') board = rel.get('CHROMEOS_RELEASE_BOARD', '(None)') if board != options.board and not options.force_mismatch: cros_env.Error('Board %s does not match expected %s' % (board, options.board)) cros_env.Error('(Use --force-mismatch option to override this)') cros_env.Fatal() elif not options.server_only: parser.error('Either --server-only must be specified or ' '--remote=<client> needs to be given') update_file = os.path.join(image_directory, UPDATE_FILENAME) stateful_file = os.path.join(image_directory, STATEFUL_FILENAME) if (not cros_env.GenerateUpdatePayload(image_file, update_file) or not cros_env.BuildStateful(image_file, image_directory, stateful_file)): cros_env.Fatal() cros_env.CreateServer(options.port, update_file, stateful_file) exit_status = 1 if options.server_only: child = None else: # Start an "image-to-live" instance that will pull bits from the server child = os.fork() if child: signal.signal(signal.SIGCHLD, ChildFinished(child).SigHandler) else: try: time.sleep(SERVER_STARTUP_WAIT) if cros_env.StartClient(options.port): exit_status = 0 except KeyboardInterrupt: cros_env.Error('Client Exiting on Control-C') except: cros_env.Error('Exception in client code:') traceback.print_exc(file=sys.stdout) cros_env.ssh_cmd.Cleanup() cros_env.Info('Client exiting with status %d' % exit_status) sys.exit(exit_status) try: cros_env.StartServer() except KeyboardInterrupt: cros_env.Info('Server Exiting on Control-C') exit_status = 0 except ChildFinished, e: cros_env.Info('Server Exiting on Client Exit (%d)' % e.status) exit_status = e.status child = None except: cros_env.Error('Exception in server code:') traceback.print_exc(file=sys.stdout) if child: os.kill(child, 15) cros_env.Info('Server exiting with status %d' % exit_status) sys.exit(exit_status)
'url.ssh://gitrw.chromium.org.pushinsteadof',
'url.ssh://git@gitrw.chromium.org:9222.pushinsteadof',
def _FullCheckout(buildroot): MakeDir(buildroot, parents=True) RunCommand(['repo', 'init', '-u', 'http://src.chromium.org/git/manifest'], cwd=buildroot, input='\n\ny\n') RunCommand(['repo', 'sync'], cwd=buildroot) RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://gitrw.chromium.org.pushinsteadof', 'http://src.chromium.org/git'], cwd=buildroot)
dev_null = open('/dev/null')
def _SpawnTests(self): """Spawns VMs and starts the test runs on them.
proc = subprocess.Popen(args, stdin=dev_null, stdout=output, stderr=output)
proc = subprocess.Popen(args, stdout=output, stderr=output)
def _SpawnTests(self): """Spawns VMs and starts the test runs on them.
RunCommand(['repo', '--trace', 'sync'], cwd=buildroot)
def RepoSync(buildroot, retries=_DEFAULT_RETRIES): """Uses repo to checkout the source code. Keyword arguments: retries -- Number of retries to try before failing on the sync. """ while retries > 0: try: # The --trace option ensures that repo shows the output from git. This # is needed so that the buildbot can kill us if git is not making # progress. RunCommand(['repo', 'forall', '-c', 'git', 'config', 'url.ssh://git@gitrw.chromium.org:9222.insteadof', 'http://git.chromium.org/git'], cwd=buildroot) RunCommand(['repo', '--trace', 'sync'], cwd=buildroot) retries = 0 except: retries -= 1 if retries > 0: Warning('CBUILDBOT -- Repo Sync Failed, retrying') else: Warning('CBUILDBOT -- Retries exhausted') raise RunCommand(['repo', 'manifest', '-r', '-o', '/dev/stderr'], cwd=buildroot)
def _Build(buildroot):
def _Build(buildroot, emptytree):
def _Build(buildroot): """Wrapper around build_packages.""" cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./build_packages'], cwd=cwd, enter_chroot=True)
RunCommand(['./build_packages'], cwd=cwd, enter_chroot=True)
cmd = ['./build_packages'] if emptytree: cmd.insert(0, 'EXTRA_BOARD_FLAGS=--emptytree') RunCommand(cmd, cwd=cwd, enter_chroot=True)
def _Build(buildroot): """Wrapper around build_packages.""" cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./build_packages'], cwd=cwd, enter_chroot=True)
if old_binhost and old_binhost != new_binhost: RunCommand(['sudo', 'rm', '-rf', boardpath])
emptytree = (old_binhost and old_binhost != new_binhost)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('--chrome_rev', default=None, type='string', dest='chrome_rev', help=('Chrome_rev of type [tot|latest_release|' 'sticky_release]')) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('--nosync', action='store_false', dest='sync', default=True, help="Don't sync before building.") parser.add_option('--notests', action='store_false', dest='tests', default=True, help='Override values from buildconfig and run no tests.') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') parser.add_option('-c', '--gsutil_archive', default='', help='Datastore archive location') parser.add_option('-a', '--acl', default='private', help='ACL to set on GSD archives') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch chrome_atom_to_build = None if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: # Calculate list of overlay directories. rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(rev_overlays)) # Either has to be a master or not have any push overlays. assert buildconfig['master'] or not push_overlays board = buildconfig['board'] old_binhost = None _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, rev_overlays) chroot_path = os.path.join(buildroot, 'chroot') boardpath = os.path.join(chroot_path, 'build', board) if options.sync: if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) _IncrementalCheckout(buildroot) new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) if old_binhost and old_binhost != new_binhost: RunCommand(['sudo', 'rm', '-rf', boardpath]) # Check that all overlays can be found. for path in rev_overlays: if not os.path.isdir(path): Die('Missing overlay: %s' % path) if not os.path.isdir(chroot_path): _MakeChroot(buildroot) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) # Perform uprev. If chrome_uprev is set, rev Chrome ebuilds. if options.chrome_rev: chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, options.chrome_rev) elif buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], rev_overlays) _EnableLocalAccount(buildroot) # Doesn't rebuild without acquiring more source. if options.sync: _Build(buildroot) if chrome_atom_to_build: _BuildChrome(buildroot, buildconfig['board'], chrome_atom_to_build) if buildconfig['unittests'] and options.tests: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt'] and options.tests: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: if not options.debug: archive_full_path = os.path.join(options.gsutil_archive, str(options.buildnumber)) _ArchiveTestResults(buildroot, buildconfig['board'], test_results_dir=test_results_dir, gsutil=options.gsutil, archive_dir=archive_full_path, acl=options.acl) if buildconfig['uprev']: # Don't push changes for developers. if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): if not options.debug: _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost]) _UprevPush(buildroot, tracking_branch, buildconfig['board'], push_overlays, options.debug) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important'] and not options.debug: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
_Build(buildroot)
_Build(buildroot, emptytree)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('--chrome_rev', default=None, type='string', dest='chrome_rev', help=('Chrome_rev of type [tot|latest_release|' 'sticky_release]')) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') parser.add_option('--nosync', action='store_false', dest='sync', default=True, help="Don't sync before building.") parser.add_option('--notests', action='store_false', dest='tests', default=True, help='Override values from buildconfig and run no tests.') parser.add_option('-t', '--tracking-branch', dest='tracking_branch', default='cros/master', help='Run the buildbot on a branch') parser.add_option('-u', '--url', dest='url', default='http://git.chromium.org/git/manifest', help='Run the buildbot on internal manifest') parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') parser.add_option('-c', '--gsutil_archive', default='', help='Datastore archive location') parser.add_option('-a', '--acl', default='private', help='ACL to set on GSD archives') (options, args) = parser.parse_args() buildroot = os.path.abspath(options.buildroot) revisionfile = options.revisionfile tracking_branch = options.tracking_branch chrome_atom_to_build = None if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: # Calculate list of overlay directories. rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(rev_overlays)) # Either has to be a master or not have any push overlays. assert buildconfig['master'] or not push_overlays board = buildconfig['board'] old_binhost = None _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, rev_overlays) chroot_path = os.path.join(buildroot, 'chroot') boardpath = os.path.join(chroot_path, 'build', board) if options.sync: if options.clobber or not os.path.isdir(buildroot): _FullCheckout(buildroot, tracking_branch, url=options.url) else: old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) _IncrementalCheckout(buildroot) new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) if old_binhost and old_binhost != new_binhost: RunCommand(['sudo', 'rm', '-rf', boardpath]) # Check that all overlays can be found. for path in rev_overlays: if not os.path.isdir(path): Die('Missing overlay: %s' % path) if not os.path.isdir(chroot_path): _MakeChroot(buildroot) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) # Perform uprev. If chrome_uprev is set, rev Chrome ebuilds. if options.chrome_rev: chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, options.chrome_rev) elif buildconfig['uprev']: _UprevPackages(buildroot, tracking_branch, revisionfile, buildconfig['board'], rev_overlays) _EnableLocalAccount(buildroot) # Doesn't rebuild without acquiring more source. if options.sync: _Build(buildroot) if chrome_atom_to_build: _BuildChrome(buildroot, buildconfig['board'], chrome_atom_to_build) if buildconfig['unittests'] and options.tests: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt'] and options.tests: _BuildVMImageForTesting(buildroot) test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber try: _RunSmokeSuite(buildroot, test_results_dir) finally: if not options.debug: archive_full_path = os.path.join(options.gsutil_archive, str(options.buildnumber)) _ArchiveTestResults(buildroot, buildconfig['board'], test_results_dir=test_results_dir, gsutil=options.gsutil, archive_dir=archive_full_path, acl=options.acl) if buildconfig['uprev']: # Don't push changes for developers. if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): if not options.debug: _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], [new_binhost]) _UprevPush(buildroot, tracking_branch, buildconfig['board'], push_overlays, options.debug) else: Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important'] and not options.debug: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
pid = RunCommand(['sudo', 'cat', pid_file], redirect_stdout=True, enter_chroot=False) if pid: RunCommand(['sudo', 'kill', pid.strip()], error_ok=True, enter_chroot=False) RunCommand(['sudo', 'rm', pid_file], enter_chroot=False)
RunCommand(['./cros_stop_vm', '--kvm_pid=%s' % pid_file], cwd=self.crosutilsbin) assert not os.path.exists(pid_file)
def _KillExistingVM(self, pid_file): if os.path.exists(pid_file): Warning('Existing %s found. Deleting and killing process' % pid_file) pid = RunCommand(['sudo', 'cat', pid_file], redirect_stdout=True, enter_chroot=False) if pid: RunCommand(['sudo', 'kill', pid.strip()], error_ok=True, enter_chroot=False) RunCommand(['sudo', 'rm', pid_file], enter_chroot=False)
def _PreFlightRinse(buildroot): """Cleans up any leftover state from previous runs.""" RunCommand(['sudo', 'killall', 'kvm'], error_ok=True) _UprevCleanup(buildroot, error_ok=True)
def _PreFlightRinse(buildroot): """Cleans up any leftover state from previous runs.""" RunCommand(['sudo', 'killall', 'kvm'], error_ok=True) _UprevCleanup(buildroot, error_ok=True)
def _WipeOldOutput(buildroot): RunCommand(['rm', '-rf', 'src/build/images'], cwd=buildroot)
def _Build(buildroot): """Wrapper around build_packages.""" cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./build_packages'], cwd=cwd, enter_chroot=True)
def _UprevCleanup(buildroot, error_ok=False): """Clean up after a previous uprev attempt.""" cwd = os.path.join(buildroot, 'src', 'scripts') RunCommand(['./cros_mark_as_stable', '--srcroot=..', '--tracking_branch="cros/master"', 'clean'], cwd=cwd, error_ok=error_ok)
def _UprevPackages(buildroot, revisionfile, board): """Uprevs a package based on given revisionfile. If revisionfile is set to None or does not resolve to an actual file, this function will uprev all packages. Keyword arguments: revisionfile -- string specifying a file that contains a list of revisions to uprev. """ # Purposefully set to None as it means Force Build was pressed. revisions = 'None' if (revisionfile): try: rev_file = open(revisionfile) revisions = rev_file.read() rev_file.close() except Exception, e: Warning('Error reading %s, revving all' % revisionfile) revisions = 'None' revisions = revisions.strip() # TODO(sosa): Un-comment once we close individual trees. # revisions == "None" indicates a Force Build. #if revisions != 'None': # print >> sys.stderr, 'CBUILDBOT Revision list found %s' % revisions # revision_list = _ParseRevisionString(revisions, # _CreateRepoDictionary(buildroot, board)) # _UprevFromRevisionList(buildroot, revision_list) #else: Info('CBUILDBOT Revving all') _UprevAllPackages(buildroot)
if options.clobber: RunCommand(['sudo', 'rm', '-rf', buildroot])
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') (options, args) = parser.parse_args() buildroot = options.buildroot revisionfile = options.revisionfile # Passed option to clobber. if options.clobber: RunCommand(['sudo', 'rm', '-rf', buildroot]) if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: if not os.path.isdir(buildroot): _FullCheckout(buildroot) else: _PreFlightRinse(buildroot) _IncrementalCheckout(buildroot) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, revisionfile, board=buildconfig['board']) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) _RunSmokeSuite(buildroot) if buildconfig['uprev']: # Don't push changes for developers. if not options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot) else: # At least one of the slaves failed or we timed out. _UprevCleanup(buildroot) Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) _UprevCleanup(buildroot) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
if not os.path.isdir(buildroot):
_PreFlightRinse(buildroot) if options.clobber or not os.path.isdir(buildroot):
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') (options, args) = parser.parse_args() buildroot = options.buildroot revisionfile = options.revisionfile # Passed option to clobber. if options.clobber: RunCommand(['sudo', 'rm', '-rf', buildroot]) if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: if not os.path.isdir(buildroot): _FullCheckout(buildroot) else: _PreFlightRinse(buildroot) _IncrementalCheckout(buildroot) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, revisionfile, board=buildconfig['board']) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) _RunSmokeSuite(buildroot) if buildconfig['uprev']: # Don't push changes for developers. if not options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot) else: # At least one of the slaves failed or we timed out. _UprevCleanup(buildroot) Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) _UprevCleanup(buildroot) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
_PreFlightRinse(buildroot)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') (options, args) = parser.parse_args() buildroot = options.buildroot revisionfile = options.revisionfile # Passed option to clobber. if options.clobber: RunCommand(['sudo', 'rm', '-rf', buildroot]) if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: if not os.path.isdir(buildroot): _FullCheckout(buildroot) else: _PreFlightRinse(buildroot) _IncrementalCheckout(buildroot) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, revisionfile, board=buildconfig['board']) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) _RunSmokeSuite(buildroot) if buildconfig['uprev']: # Don't push changes for developers. if not options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot) else: # At least one of the slaves failed or we timed out. _UprevCleanup(buildroot) Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) _UprevCleanup(buildroot) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
_UprevCleanup(buildroot)
def main(): # Parse options usage = "usage: %prog [options] cbuildbot_config" parser = optparse.OptionParser(usage=usage) parser.add_option('-r', '--buildroot', help='root directory where build occurs', default=".") parser.add_option('-n', '--buildnumber', help='build number', type='int', default=0) parser.add_option('-f', '--revisionfile', help='file where new revisions are stored') parser.add_option('--clobber', action='store_true', dest='clobber', default=False, help='Clobbers an old checkout before syncing') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Override some options to run as a developer.') (options, args) = parser.parse_args() buildroot = options.buildroot revisionfile = options.revisionfile # Passed option to clobber. if options.clobber: RunCommand(['sudo', 'rm', '-rf', buildroot]) if len(args) >= 1: buildconfig = _GetConfig(args[-1]) else: Warning('Missing configuration description') parser.print_usage() sys.exit(1) try: if not os.path.isdir(buildroot): _FullCheckout(buildroot) else: _PreFlightRinse(buildroot) _IncrementalCheckout(buildroot) chroot_path = os.path.join(buildroot, 'chroot') if not os.path.isdir(chroot_path): _MakeChroot(buildroot) boardpath = os.path.join(chroot_path, 'build', buildconfig['board']) if not os.path.isdir(boardpath): _SetupBoard(buildroot, board=buildconfig['board']) if buildconfig['uprev']: _UprevPackages(buildroot, revisionfile, board=buildconfig['board']) _EnableLocalAccount(buildroot) _Build(buildroot) if buildconfig['unittests']: _RunUnitTests(buildroot) _BuildImage(buildroot) if buildconfig['smoke_bvt']: _BuildVMImageForTesting(buildroot) _RunSmokeSuite(buildroot) if buildconfig['uprev']: # Don't push changes for developers. if not options.debug: if buildconfig['master']: # Master bot needs to check if the other slaves completed. if cbuildbot_comm.HaveSlavesCompleted(config): _UprevPush(buildroot) else: # At least one of the slaves failed or we timed out. _UprevCleanup(buildroot) Die('CBUILDBOT - One of the slaves has failed!!!') else: # Publish my status to the master if its expecting it. if buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) _UprevCleanup(buildroot) except: # Send failure to master bot. if not buildconfig['master'] and buildconfig['important']: cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) raise
main()
try: main() except Exception: print "Got exception." traceback.print_exc(file=sys.stdout)
def main(): parser = optparse.OptionParser() parser.add_option('-b', '--board', help='board for the image to compare against.') parser.add_option('-c', '--channel', help='channel for the image to compare against.') parser.add_option('-l', '--latestbase', help='Base url for latest links.') parser.add_option('-z', '--zipbase', help='Base url for hosted images.') parser.add_option('--no_graphics', action='store_true', default=False, help='Disable graphics for the vm test.') parser.add_option('--type', default='vm', help='type of test to run: [vm, real]. Default: vm.') parser.add_option('--remote', default='0.0.0.0', help='For real tests, ip address of the target machine.') # Set the usage to include flags. parser.set_usage(parser.format_help()) (options, args) = parser.parse_args() if args: parser.error('Extra args found %s.' % args) if not options.board: parser.error('Need board for image to compare against.') if not options.channel: parser.error('Need channel for image to compare against.') if not options.latestbase: parser.error('Need latest url base to get images.') if not options.zipbase: parser.error('Need zip url base to get images.') RunAUTestHarness(options.board, options.channel, options.latestbase, options.zipbase, options.no_graphics, options.type, options.remote)