Example #1
0
def archive(options, args):
    build_dir, _ = chromium_utils.ConvertBuildDirToLegacy(
        options.build_dir, use_out=chromium_utils.IsLinux())
    build_dir = os.path.join(build_dir, options.target)
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))

    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir = None

    # TODO(nsylvain): We need to move linux to a subdir as well, but aarya is not
    # ready with the server-side change.
    if chromium_utils.IsMac():
        subdir = '%s-%s' % (chromium_utils.PlatformName(),
                            options.target.lower())

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Example #2
0
def UploadGomaCompilerProxyInfo(override_gsutil=None,
                                builder='unknown',
                                master='unknown',
                                slave='unknown',
                                clobber=''):
    """Upload compiler_proxy{,-subproc}.INFO and gomacc.INFO to Google Storage.

  Args:
    override_gsutil: gsutil path to override.
    builder: a string name of a builder.
    master: a string name of a master.
    slave: a string name of a slave.
    clobber: set something if clobber (to be removed)
  """
    latest_subproc_info = GetLatestGomaCompilerProxySubprocInfo()

    builderinfo = {
        'builder': builder,
        'master': master,
        'slave': slave,
        'clobber': True if clobber else False,
        'os': chromium_utils.PlatformName(),
    }
    # Needs to begin with x-goog-meta for custom metadata.
    # https://cloud.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata#custom-metadata
    metadata = {'x-goog-meta-builderinfo': json.dumps(builderinfo)}

    if latest_subproc_info:
        UploadToGomaLogGS(latest_subproc_info,
                          os.path.basename(latest_subproc_info),
                          metadata=metadata,
                          override_gsutil=override_gsutil)
    else:
        print 'No compiler_proxy-subproc.INFO to upload'
    latest_info = GetLatestGomaCompilerProxyInfo()
    if not latest_info:
        print 'No compiler_proxy.INFO to upload'
        return
    # Since a filename of compiler_proxy.INFO is fairly unique,
    # we might be able to upload it as-is.
    log_path = UploadToGomaLogGS(latest_info,
                                 os.path.basename(latest_info),
                                 metadata=metadata,
                                 override_gsutil=override_gsutil)
    viewer_url = (
        'http://chromium-build-stats.appspot.com/compiler_proxy_log/' +
        log_path)
    print 'Visualization at %s' % viewer_url

    gomacc_logs = GetListOfGomaccInfoAfterCompilerProxyStart()
    if gomacc_logs:
        for log in gomacc_logs:
            UploadToGomaLogGS(log,
                              os.path.basename(log),
                              metadata=metadata,
                              override_gsutil=override_gsutil)

    return viewer_url
Example #3
0
def main(argv):
    o3d_dir = os.path.join(os.getcwd(), 'o3d')
    staging_dir = slave_utils.GetStagingDir(o3d_dir)

    # Find builder name and revision #s.
    builder_name = slave_utils.SlaveBuildName(o3d_dir)
    o3d_rev = str(slave_utils.SubversionRevision(o3d_dir))
    platform = chromium_utils.PlatformName()

    # Upload zip.
    local_zip = os.path.join(staging_dir,
                             'full-build-' + platform + '_' + o3d_rev + '.zip')
    remote_zip = 'snapshots/o3d/' + o3d_rev + '/' + builder_name + '.zip'

    archive_file.UploadFile(local_zip, remote_zip)
    return 0
Example #4
0
def MakeGomaExitStatusCounter(goma_stats_file,
                              goma_crash_report,
                              builder='unknown',
                              master='unknown',
                              slave='unknown',
                              clobber=''):
    """Make Goma exit status counter. This counter indicates compiler_proxy
     has finished without problem, crashed, or killed. This counter will
     be used to alert to goma team.

  Args:
    goma_stats_file: path to goma stats file if any
    goma_crash_report: path to goma crash report file if any
    builder: builder name
    master: master name
    slave: slave name
    clobber: non false if clobber build
  """

    try:
        counter = {
            'name': 'goma/status',
            'value': 1,
            'builder': builder,
            'master': master,
            'slave': slave,
            'clobber': 1 if clobber else 0,
            'os': chromium_utils.PlatformName(),
        }
        if goma_stats_file and os.path.exists(goma_stats_file):
            counter['status'] = 'success'
        elif goma_crash_report and os.path.exists(goma_crash_report):
            counter['status'] = 'crashed'
        elif IsCompilerProxyKilledByFatalError():
            counter['status'] = 'killed'
        else:
            counter['status'] = 'unknown'

        start_time = GetCompilerProxyStartTime()
        if start_time:
            counter['start_time'] = int(time.mktime(start_time.timetuple()))

        return counter
    except Exception as ex:
        print('error while generating status counter: %s' % ex)
        return None
Example #5
0
def archive(options, args):
    src_dir = os.path.abspath(os.path.dirname(options.build_dir))
    build_dir = os.path.join(src_dir, 'out', options.target)
    staging_dir = slave_utils.GetStagingDir(src_dir)
    build_revision = slave_utils.SubversionRevision(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    prefix = options.factory_properties.get('asan_archive_name', 'asan')
    zip_file_name = '%s-%s-%s-%d' % (prefix, chromium_utils.PlatformName(),
                                     options.target.lower(), build_revision)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)
    status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, gs_acl=gs_acl)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Example #6
0
def GetZipFileNames(build_properties, src_dir, webkit_dir=None,
                    extract=False):
  base_name = 'full-build-%s' % chromium_utils.PlatformName()

  chromium_revision = GetHashOrRevision(src_dir)
  if 'try' in build_properties.get('mastername', ''):
    if extract:
      if not build_properties.get('parent_buildnumber'):
        raise Exception('build_props does not have parent data: %s' %
                        build_properties)
      version_suffix = '_%(parent_buildnumber)s' % build_properties
    else:
      version_suffix = '_%(buildnumber)s' % build_properties
  elif webkit_dir:
    webkit_revision = SubversionRevision(webkit_dir)
    version_suffix = '_wk%d_%s' % (webkit_revision, chromium_revision)
  else:
    version_suffix = '_%s' % chromium_revision

  return base_name, version_suffix
Example #7
0
def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list):
    """Creates an unversioned full build archive.
  Returns the path of the created archive."""
    zip_file_name = 'full-build-%s' % chromium_utils.PlatformName()
    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big and
    # can cause bot failures from timeouts during downloads to testers.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    return zip_file
Example #8
0
def GetZipFileNames(build_properties,
                    build_revision,
                    webkit_revision=None,
                    extract=False,
                    use_try_buildnumber=True):
    base_name = 'full-build-%s' % chromium_utils.PlatformName()

    if 'try' in build_properties.get('mastername', '') and use_try_buildnumber:
        if extract:
            if not build_properties.get('parent_buildnumber'):
                raise Exception('build_props does not have parent data: %s' %
                                build_properties)
            version_suffix = '_%(parent_buildnumber)s' % build_properties
        else:
            version_suffix = '_%(buildnumber)s' % build_properties
    elif webkit_revision:
        version_suffix = '_wk%s_%s' % (webkit_revision, build_revision)
    else:
        version_suffix = '_%s' % build_revision

    return base_name, version_suffix
Example #9
0
def GetZipFileNames(mastername,
                    buildnumber,
                    parent_buildnumber,
                    build_revision,
                    webkit_revision=None,
                    extract=False,
                    use_try_buildnumber=True):
    base_name = 'full-build-%s' % chromium_utils.PlatformName()

    if 'try' in mastername and use_try_buildnumber:
        if extract:
            if not parent_buildnumber:
                raise Exception('missing parent_buildnumber')
            version_suffix = '_%s' % parent_buildnumber
        else:
            version_suffix = '_%s' % buildnumber
    elif webkit_revision:
        version_suffix = '_wk%s_%s' % (webkit_revision, build_revision)
    else:
        version_suffix = '_%s' % build_revision

    return base_name, version_suffix
Example #10
0
def GetBuildUrl(abs_build_dir, options):
    """Compute the url to download the build from.  This will use as a base
     string, in order of preference:
     1) options.build_url
     2) options.factory_properties.build_url
     3) build url constructed from build_properties

     Args:
       abs_build_dir: Full path to source directory.
       options: options object as specified by parser below.
   """
    url = options.build_url or options.factory_properties.get('build_url')
    if not url:
        url = 'http://%s/b/build/slave/%s/chrome_staging/full-build-%s.zip' % (
            options.build_properties.parent_slavename,
            options.build_properties.parent_builddir,
            chromium_utils.PlatformName())

    if 'parentslavename' in url:
        parentslavename = options.build_properties.get('parentslavename', '')
        url = url % {'parentslavename': parentslavename}

    base_url = url
    versioned_url = url

    if options.webkit_dir:
        webkit_revision = slave_utils.SubversionRevision(
            os.path.join(abs_build_dir, '..', options.webkit_dir))
        versioned_url = versioned_url.replace('.zip',
                                              '_wk%d.zip' % webkit_revision)

    # Find the revision that we need to download.
    chromium_revision = slave_utils.SubversionRevision(abs_build_dir)
    versioned_url = versioned_url.replace('.zip',
                                          '_%d.zip' % chromium_revision)

    return base_url, versioned_url
def archive(options, args):
    # Disable 'unused argument' warning for 'args' | pylint: disable=W0613
    build_dir = build_directory.GetBuildOutputDirectory()
    src_dir = os.path.abspath(os.path.dirname(build_dir))
    build_dir = os.path.join(build_dir, options.target)

    revision_dir = options.factory_properties.get('revision_dir')
    primary_project = chromium_utils.GetPrimaryProject(options)

    build_sortkey_branch, build_sortkey_value = GetBuildSortKey(
        options, primary_project)
    build_git_commit = GetGitCommit(options, primary_project)

    staging_dir = slave_utils.GetStagingDir(src_dir)
    chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

    print 'Staging in %s' % build_dir

    # Build the list of files to archive.
    zip_file_list = [
        f for f in os.listdir(build_dir)
        if ShouldPackageFile(f, options.target)
    ]

    subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                   '')
    pieces = [chromium_utils.PlatformName(), options.target.lower()]
    if subdir_suffix:
        pieces.append(subdir_suffix)
    subdir = '-'.join(pieces)

    # Components like v8 get a <name>-v8-component-<revision> infix.
    component = ''
    if revision_dir:
        component = '-%s-component' % revision_dir

    prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
    sortkey_path = chromium_utils.GetSortableUploadPathForSortKey(
        build_sortkey_branch, build_sortkey_value)
    zip_file_name = '%s-%s-%s%s-%s' % (prefix, chromium_utils.PlatformName(),
                                       options.target.lower(), component,
                                       sortkey_path)

    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    chromium_utils.MakeWorldReadable(zip_file)

    # Report the size of the zip file to help catch when it gets too big.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size

    gs_bucket = options.factory_properties.get('gs_bucket', None)
    gs_acl = options.factory_properties.get('gs_acl', None)

    gs_metadata = {
        GS_COMMIT_POSITION_NUMBER_KEY: build_sortkey_value,
    }
    if build_sortkey_branch:
        gs_metadata[
            GS_COMMIT_POSITION_KEY] = chromium_utils.BuildCommitPosition(
                build_sortkey_branch, build_sortkey_value)
    if build_git_commit:
        gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit

    status = slave_utils.GSUtilCopyFile(zip_file,
                                        gs_bucket,
                                        subdir=subdir,
                                        gs_acl=gs_acl,
                                        metadata=gs_metadata)
    if status:
        raise StagingError('Failed to upload %s to %s. Error %d' %
                           (zip_file, gs_bucket, status))
    else:
        # Delete the file, it is not needed anymore.
        os.remove(zip_file)

    return status
Example #12
0
    def UploadTests(self, www_dir, gs_base, gs_acl):
        test_file_list = self._test_files
        if not test_file_list:
            return

        # Make test_file_list contain absolute paths.
        test_file_list = [
            os.path.join(self._build_dir, f) for f in test_file_list
        ]
        UPLOAD_DIR = 'chrome-%s.test' % chromium_utils.PlatformName()

        # Filter out those files that don't exist.
        base_src_dir = os.path.join(self._build_dir, '')

        for test_file in test_file_list[:]:
            if os.path.exists(test_file):
                relative_dir = os.path.dirname(test_file[len(base_src_dir):])
                test_dir = os.path.join(www_dir, UPLOAD_DIR, relative_dir)
                print 'chromium_utils.CopyFileToDir(%s, %s)' % (test_file,
                                                                test_dir)
            else:
                print '%s does not exist and is skipped.' % test_file
                test_file_list.remove(test_file)

        # Extract the list of test paths that will be created. These paths need
        # to be relative to the archive dir. We have to rebuild the relative
        # list from the now-pruned absolute test_file_list.
        relative_file_list = [tf[len(base_src_dir):] for tf in test_file_list]
        test_dirs = archive_utils.ExtractDirsFromPaths(relative_file_list)
        test_dirs = [os.path.join(www_dir, UPLOAD_DIR, d) for d in test_dirs]

        root_test_dir = os.path.join(www_dir, UPLOAD_DIR)
        print 'chromium_utils.MaybeMakeDirectory(%s)' % root_test_dir
        for test_dir in test_dirs:
            print 'chromium_utils.MaybeMakeDirectory(%s)' % test_dir

        if not self.options.dry_run:
            if chromium_utils.IsWindows():
                # Use Samba on Windows.
                self.MyMaybeMakeDirectory(root_test_dir, gs_base)
                for test_dir in test_dirs:
                    self.MyMaybeMakeDirectory(test_dir, gs_base)
                for test_file in test_file_list:
                    # TODO(robertshield): binaries and symbols are stored in a zip file
                    # via CreateArchiveFile. Tests should be too.
                    relative_dir = os.path.dirname(
                        test_file[len(base_src_dir):])
                    test_dir = os.path.join(www_dir, UPLOAD_DIR, relative_dir)
                    self.MyCopyFileToDir(test_file,
                                         test_dir,
                                         gs_base,
                                         gs_subdir='/'.join(
                                             [UPLOAD_DIR, relative_dir]),
                                         gs_acl=gs_acl)
            else:
                # Otherwise use scp.
                self.MySshMakeDirectory(self.options.archive_host,
                                        root_test_dir, gs_base)
                for test_dir in test_dirs:
                    self.MySshMakeDirectory(self.options.archive_host,
                                            test_dir, gs_base)
                for test_file in test_file_list:
                    self.MyMakeWorldReadable(test_file, gs_base)
                    # TODO(robertshield): binaries and symbols are stored in a zip file
                    # via CreateArchiveFile. Tests should be too.
                    relative_dir = os.path.dirname(
                        test_file[len(base_src_dir):])
                    test_dir = os.path.join(www_dir, UPLOAD_DIR, relative_dir)
                    self.MySshCopyFiles(test_file,
                                        self.options.archive_host,
                                        test_dir,
                                        gs_base,
                                        gs_subdir='/'.join(
                                            [UPLOAD_DIR, relative_dir]),
                                        gs_acl=gs_acl)
Example #13
0
def real_main(options):
    """ Download a build, extract it to build\BuildDir\full-build-win32
      and rename it to build\BuildDir\Target
  """
    abs_build_dir = os.path.abspath(
        build_directory.GetBuildOutputDirectory(options.src_dir))
    target_build_output_dir = os.path.join(abs_build_dir, options.target)

    # Generic name for the archive.
    archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName()

    # Just take the zip off the name for the output directory name.
    output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', ''))

    src_dir = os.path.dirname(abs_build_dir)
    if not options.build_revision:
        (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
            src_dir, options.webkit_dir, options.revision_dir)
    else:
        build_revision = options.build_revision
        webkit_revision = options.webkit_revision
    base_url, url = GetBuildUrl(options, build_revision, webkit_revision)
    archive_name = os.path.basename(base_url)

    if url.startswith('gs://'):
        handler = GSHandler(url=url, archive_name=archive_name)
    else:
        handler = WebHandler(url=url, archive_name=archive_name)

    # We try to download and extract 3 times.
    for tries in range(1, 4):
        print 'Try %d: Fetching build from %s...' % (tries, url)

        failure = False

        # Check if the url exists.
        if not handler.is_present():
            print '%s is not found' % url
            failure = True

            # When 'halt_on_missing_build' is present in factory_properties and if
            # 'revision' is set in build properties, we assume the build is
            # triggered automatically and so we halt on a missing build zip.  The
            # other case is if the build is forced, in which case we keep trying
            # later by looking for the latest build that's available.
            if (options.factory_properties.get('halt_on_missing_build', False)
                    and 'revision' in options.build_properties
                    and options.build_properties['revision'] != ''):
                return slave_utils.ERROR_EXIT_CODE

        # If the url is valid, we download the file.
        if not failure:
            if not handler.download():
                failure = True

        # If the versioned url failed, we try to get the latest build.
        if failure:
            if url.startswith('gs://'):
                continue
            else:
                print 'Fetching latest build at %s' % base_url
                handler.url = base_url
                if not handler.download():
                    continue

        print 'Extracting build %s to %s...' % (archive_name, abs_build_dir)
        try:
            chromium_utils.RemoveDirectory(target_build_output_dir)
            chromium_utils.ExtractZip(archive_name, abs_build_dir)
            # For Chrome builds, the build will be stored in chrome-win32.
            if 'full-build-win32' in output_dir:
                chrome_dir = output_dir.replace('full-build-win32',
                                                'chrome-win32')
                if os.path.exists(chrome_dir):
                    output_dir = chrome_dir

            print 'Moving build from %s to %s' % (output_dir,
                                                  target_build_output_dir)
            shutil.move(output_dir, target_build_output_dir)
        except (OSError, IOError, chromium_utils.ExternalError):
            print 'Failed to extract the build.'
            # Print out the traceback in a nice format
            traceback.print_exc()
            # Try again...
            continue

        # If we got the latest build, then figure out its revision number.
        if failure:
            print "Trying to determine the latest build's revision number..."
            try:
                build_revision_file_name = os.path.join(
                    target_build_output_dir,
                    chromium_utils.FULL_BUILD_REVISION_FILENAME)
                build_revision_file = open(build_revision_file_name, 'r')
                print 'Latest build is revision: %s' % build_revision_file.read(
                )
                build_revision_file.close()
            except IOError:
                print "Could not determine the latest build's revision number"

        if failure:
            # We successfully extracted the archive, but it was the generic one.
            return slave_utils.WARNING_EXIT_CODE
        return 0

    # If we get here, that means that it failed 3 times. We return a failure.
    return slave_utils.ERROR_EXIT_CODE
Example #14
0
def SendGomaTsMon(json_file, exit_status):
    """Send latest Goma status to ts_mon.

  Args:
    json_file: json filename string that has goma_ctl.py jsonstatus.
    exit_status: integer exit status of the build.
  """
    json_statuses = {}
    try:
        with open(json_file) as f:
            json_statuses = json.load(f)

        if not json_statuses:
            print('no json status is recorded in %s' % json_file)
            return

        if len(json_statuses.get('notice', [])) != 1:
            print('unknown json statuses style: %s' % json_statuses)
            return

        json_status = json_statuses['notice'][0]
        if json_status['version'] != 1:
            print('unknown version: %s' % json_status)
            return

        infra_status = json_status.get('infra_status')

        result = 'success'
        if exit_status != 0:
            result = 'failure'
            if (exit_status < 0 or not infra_status
                    or infra_status['ping_status_code'] != 200
                    or infra_status.get('num_user_error', 0) > 0):
                result = 'exception'

        num_failure = 0
        ping_status_code = 0
        if infra_status:
            num_failure = infra_status['num_exec_compiler_proxy_failure']
            ping_status_code = infra_status['ping_status_code']

        clobber = 0
        if os.environ.get('BUILDBOT_CLOBBER'):
            clobber = 1

        counter = {
            'name': 'goma/failure',
            'value': num_failure,
            'builder': os.environ.get('BUILDBOT_BUILDERNAME', 'unknown'),
            'master': os.environ.get('BUILDBOT_MASTERNAME', 'unknown'),
            'slave': os.environ.get('BUILDBOT_SLAVENAME', 'unknown'),
            'clobber': clobber,
            'os': chromium_utils.PlatformName(),
            'ping_status_code': ping_status_code,
            'result': result
        }
        start_time = GetCompilerProxyStartTime()
        if start_time:
            counter['start_time'] = int(time.mktime(start_time.timetuple()))
        run_cmd = PLATFORM_RUN_CMD.get(os.name)
        if not run_cmd:
            print 'Unknown os.name: %s' % os.name
            return

        counter_json = json.dumps(counter)
        # base64 encode on windows because it doesn't like json on the command-line.
        if os.name == 'nt':
            counter_json = base64.b64encode(counter_json)
        cmd = [
            sys.executable, run_cmd, 'infra.tools.send_ts_mon_values',
            '--verbose', '--ts-mon-target-type', 'task',
            '--ts-mon-task-service-name', 'goma-client',
            '--ts-mon-task-job-name', 'default', '--counter', counter_json
        ]
        cmd_filter = chromium_utils.FilterCapture()
        retcode = chromium_utils.RunCommand(cmd,
                                            filter_obj=cmd_filter,
                                            max_time=30)
        if retcode:
            print('Execution of send_ts_mon_values failed with code %s' %
                  retcode)
            print '\n'.join(cmd_filter.text)

    except Exception as ex:
        print('error while sending ts mon json_file=%s: %s' % (json_file, ex))
Example #15
0
def MakeGomaStatusCounter(json_file,
                          exit_status,
                          builder='unknown',
                          master='unknown',
                          slave='unknown',
                          clobber=''):
    """Make latest Goma status counter which will be sent to ts_mon.

  Args:
    json_file: json filename string that has goma_ctl.py jsonstatus.
    exit_status: integer exit status of the build.

  Returns:
    counter dict if succeeded. None if failed.
  """
    json_statuses = {}
    try:
        with open(json_file) as f:
            json_statuses = json.load(f)

        if not json_statuses:
            print('no json status is recorded in %s' % json_file)
            return None

        if len(json_statuses.get('notice', [])) != 1:
            print('unknown json statuses style: %s' % json_statuses)
            return None

        json_status = json_statuses['notice'][0]
        if json_status['version'] != 1:
            print('unknown version: %s' % json_status)
            return None

        infra_status = json_status.get('infra_status')

        result = 'success'
        if exit_status is None:
            result = 'exception'
        elif exit_status != 0:
            result = 'failure'
            if (exit_status < 0 or not infra_status
                    or infra_status['ping_status_code'] != 200
                    or infra_status.get('num_user_error', 0) > 0):
                result = 'exception'

        num_failure = 0
        ping_status_code = 0
        if infra_status:
            num_failure = infra_status['num_exec_compiler_proxy_failure']
            ping_status_code = infra_status['ping_status_code']

        counter = {
            'name': 'goma/failure',
            'value': num_failure,
            'builder': builder,
            'master': master,
            'slave': slave,
            'clobber': 1 if clobber else 0,
            'os': chromium_utils.PlatformName(),
            'ping_status_code': ping_status_code,
            'result': result
        }
        start_time = GetCompilerProxyStartTime()
        if start_time:
            counter['start_time'] = int(time.mktime(start_time.timetuple()))
        return counter

    except Exception as ex:
        print(
            'error while making goma status counter for ts_mon: jons_file=%s: %s'
            % (json_file, ex))
        return None
Example #16
0
def archive(options, args):
  build_dir = build_directory.GetBuildOutputDirectory()
  src_dir = os.path.abspath(os.path.dirname(build_dir))
  build_dir = os.path.join(build_dir, options.target)

  revision_dir = options.factory_properties.get('revision_dir')
  (build_revision, _) = slave_utils.GetBuildRevisions(
      src_dir, None, revision_dir)

  staging_dir = slave_utils.GetStagingDir(src_dir)
  chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)

  print 'Staging in %s' % build_dir

  # Build the list of files to archive.
  zip_file_list = [f for f in os.listdir(build_dir)
                   if ShouldPackageFile(f, options.target)]

  subdir_suffix = options.factory_properties.get('cf_archive_subdir_suffix',
                                                 '')
  pieces = [chromium_utils.PlatformName(), options.target.lower()]
  if subdir_suffix:
    pieces.append(subdir_suffix)
  subdir = '-'.join(pieces)

  # Components like v8 get a <name>-v8-component-<revision> infix.
  component = ''
  if revision_dir:
    component = '-%s-component' % revision_dir

  prefix = options.factory_properties.get('cf_archive_name', 'cf_archive')
  zip_file_name = '%s-%s-%s%s-%s' % (prefix,
                                   chromium_utils.PlatformName(),
                                   options.target.lower(),
                                   component,
                                   build_revision)

  (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                               zip_file_name,
                                               zip_file_list,
                                               build_dir,
                                               raise_error=True)
  chromium_utils.RemoveDirectory(zip_dir)
  if not os.path.exists(zip_file):
    raise StagingError('Failed to make zip package %s' % zip_file)
  chromium_utils.MakeWorldReadable(zip_file)

  # Report the size of the zip file to help catch when it gets too big.
  zip_size = os.stat(zip_file)[stat.ST_SIZE]
  print 'Zip file is %ld bytes' % zip_size

  gs_bucket = options.factory_properties.get('gs_bucket', None)
  gs_acl = options.factory_properties.get('gs_acl', None)
  status = slave_utils.GSUtilCopyFile(zip_file, gs_bucket, subdir=subdir,
                                      gs_acl=gs_acl)
  if status:
    raise StagingError('Failed to upload %s to %s. Error %d' % (zip_file,
                                                                gs_bucket,
                                                                status))
  else:
    # Delete the file, it is not needed anymore.
    os.remove(zip_file)

  return status
Example #17
0
    def ArchiveBuild(self):
        """Zips build files and uploads them, their symbols, and a change log."""
        result = 0
        if self._build_revision is None:
            raise archive_utils.StagingError('No build revision was provided')
        print 'Staging in %s' % self._staging_dir

        arch = archive_utils.BuildArch()
        fparser = archive_utils.FilesCfgParser(self._files_file,
                                               self.options.mode, arch)
        files_list = fparser.ParseLegacyList()
        self._archive_files = archive_utils.ExpandWildcards(
            self._build_dir, files_list)
        archives_list = fparser.ParseArchiveLists()
        # Check files and revision numbers.
        all_files_list = self._archive_files + [
            item['filename'] for sublist in archives_list.values()
            for item in sublist
        ]
        all_files_list.append(self._version_file)
        not_found = archive_utils.VerifyFiles(all_files_list, self._build_dir,
                                              self.options.ignore)
        not_found_optional = []
        for bad_fn in not_found[:]:
            if fparser.IsOptional(bad_fn):
                not_found_optional.append(bad_fn)
                not_found.remove(bad_fn)
                # Remove it from all file lists so we don't try to process it.
                if bad_fn in self._archive_files:
                    self._archive_files.remove(bad_fn)
                for archive_list in archives_list.values():
                    archive_list[:] = [
                        x for x in archive_list if bad_fn != x['filename']
                    ]
        # TODO(mmoss): Now that we can declare files optional in FILES.cfg, should
        # we only allow not_found_optional, and fail on any leftover not_found
        # files?

        print 'last change: %d' % self._build_revision
        previous_revision = self.GetLastBuildRevision()
        if self._build_revision <= previous_revision:
            # If there have been no changes, report it but don't raise an exception.
            # Someone might have pushed the "force build" button.
            print 'No changes since last build (r%d <= r%d)' % (
                self._build_revision, previous_revision)
            return 0

        print 'build name: %s' % self._build_name

        archive_name = 'chrome-%s.zip' % chromium_utils.PlatformName()
        archive_file = self.CreateArchiveFile(archive_name,
                                              self._archive_files)[1]

        # Handle any custom archives.
        # TODO(mmoss): Largely copied from stage_build.py. Maybe refactor more of
        # this into archive_utils.py.
        archive_files = [archive_file]
        for archive_name in archives_list:
            if fparser.IsDirectArchive(archives_list[archive_name]):
                fileobj = archives_list[archive_name][0]
                # Copy the file to the path specified in archive_name, which might be
                # different than the dirname or basename in 'filename' (allowed by
                # 'direct_archive').
                stage_subdir = os.path.dirname(archive_name)
                stage_fn = os.path.basename(archive_name)
                chromium_utils.MaybeMakeDirectory(
                    os.path.join(self._staging_dir, stage_subdir))
                print 'chromium_utils.CopyFileToDir(%s, %s, dest_fn=%s)' % (
                    os.path.join(self._build_dir, fileobj['filename']),
                    os.path.join(self._staging_dir, stage_subdir), stage_fn)
                if not self.options.dry_run:
                    chromium_utils.CopyFileToDir(
                        os.path.join(self._build_dir, fileobj['filename']),
                        os.path.join(self._staging_dir, stage_subdir),
                        dest_fn=stage_fn)
                archive_files.append(
                    os.path.join(self._staging_dir, archive_name))
            else:
                custom_archive = self.CreateArchiveFile(
                    archive_name,
                    [f['filename'] for f in archives_list[archive_name]])[1]
                print 'Adding %s to be archived.' % (custom_archive)
                archive_files.append(custom_archive)

        # Generate a change log or an error message if no previous revision.
        changelog_path = os.path.join(self._staging_dir, 'changelog.xml')
        self._GenerateChangeLog(previous_revision, changelog_path)

        # Generate a revisions file which contains the Chromium/WebKit/V8's
        # revision information.
        self.GenerateRevisionFile()

        www_dir = os.path.join(self._www_dir_base, str(self._build_revision))
        gs_bucket = self.options.factory_properties.get('gs_bucket', None)
        gs_acl = self.options.factory_properties.get('gs_acl', None)
        gs_base = None
        if gs_bucket:
            gs_base = '/'.join(
                [gs_bucket, self._build_name,
                 str(self._build_revision)])
        self._UploadBuild(www_dir, changelog_path, self.revisions_path,
                          archive_files, gs_base, gs_acl)

        # Archive Linux packages (if any -- only created for Chrome builds).
        if chromium_utils.IsLinux():
            linux_packages = (glob.glob(
                os.path.join(self._build_dir,
                             '*-r%s_*.deb' % self._chromium_revision)))
            linux_packages.extend(
                glob.glob(
                    os.path.join(self._build_dir,
                                 '*-%s.*.rpm' % self._chromium_revision)))
            for package_file in linux_packages:
                print 'SshCopyFiles(%s, %s, %s)' % (
                    package_file, self.options.archive_host, www_dir)
            if not self.options.dry_run:
                print 'SshMakeDirectory(%s, %s)' % (self.options.archive_host,
                                                    www_dir)
                self.MySshMakeDirectory(self.options.archive_host, www_dir,
                                        gs_base)

                for package_file in linux_packages:
                    self.MyMakeWorldReadable(package_file, gs_base)
                    self.MySshCopyFiles(package_file,
                                        self.options.archive_host,
                                        www_dir,
                                        gs_base,
                                        gs_acl=gs_acl)
                    # Cleanup archived packages, otherwise they keep accumlating since
                    # they have different filenames with each build.
                    os.unlink(package_file)

        self.UploadTests(www_dir, gs_base, gs_acl)

        if not self.options.dry_run:
            # Save the current build revision locally so we can compute a changelog
            # next time
            self.SaveBuildRevisionToSpecifiedFile(self.last_change_file)

            # Record the latest revision in the developer archive directory.
            latest_file_path = os.path.join(self._www_dir_base, 'LATEST')
            if chromium_utils.IsWindows():
                print 'Saving revision to %s' % latest_file_path
                if gs_base:
                    MyCopyFileToGS(self.last_change_file,
                                   gs_base,
                                   '..',
                                   mimetype='text/plain',
                                   gs_acl=gs_acl)
                if not gs_base or self._dual_upload:
                    self.SaveBuildRevisionToSpecifiedFile(latest_file_path)
            elif chromium_utils.IsLinux() or chromium_utils.IsMac():
                # Files are created umask 077 by default, so make it world-readable
                # before pushing to web server.
                self.MyMakeWorldReadable(self.last_change_file, gs_base)
                print 'Saving revision to %s:%s' % (self.options.archive_host,
                                                    latest_file_path)
                self.MySshCopyFiles(self.last_change_file,
                                    self.options.archive_host,
                                    latest_file_path,
                                    gs_base,
                                    '..',
                                    mimetype='text/plain',
                                    gs_acl=gs_acl)
            else:
                raise NotImplementedError(
                    'Platform "%s" is not currently supported.' % sys.platform)

        if len(not_found_optional):
            sys.stderr.write('\n\nINFO: Optional File(s) not found: %s\n' %
                             ', '.join(not_found_optional))
        if len(not_found):
            sys.stderr.write('\n\nWARNING: File(s) not found: %s\n' %
                             ', '.join(not_found))
        return result
Example #18
0
  Args:
    outdir: a directory that contains .ninja_log.
    compiler: compiler used for the build.
    command: command line.
    exit_status: ninja's exit status.
  """
    ninja_log_path = os.path.join(outdir, '.ninja_log')
    try:
        st = os.stat(ninja_log_path)
        mtime = datetime.datetime.fromtimestamp(st.st_mtime)
    except OSError, e:
        print e
        return

    cwd = os.getcwd()
    platform = chromium_utils.PlatformName()

    # info['cmdline'] should be list of string for
    # go struct on chromium-build-stats.
    if isinstance(command, str) or isinstance(command, unicode):
        command = [command]

    info = {
        'cmdline': command,
        'cwd': cwd,
        'platform': platform,
        'exit': exit_status,
        'env': {}
    }
    for k, v in os.environ.iteritems():
        info['env'][k] = v
Example #19
0
def real_main(options):
  """ Download a build, extract it to build\\BuildDir\\full-build-win32
      and rename it to build\\BuildDir\\Target
  """
  abs_build_dir = os.path.abspath(
      build_directory.GetBuildOutputDirectory(options.src_dir))
  target_build_output_dir = os.path.join(abs_build_dir, options.target)

  # Generic name for the archive.
  archive_name = 'full-build-%s.zip' % chromium_utils.PlatformName()

  # Just take the zip off the name for the output directory name.
  output_dir = os.path.join(abs_build_dir, archive_name.replace('.zip', ''))

  src_dir = os.path.dirname(abs_build_dir)
  if not options.build_revision and not options.build_archive_url:
    (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
        src_dir, options.webkit_dir, options.revision_dir)
  else:
    build_revision = options.build_revision
    webkit_revision = options.webkit_revision
  url, archive_name = GetBuildUrl(options, build_revision, webkit_revision)
  if archive_name is None:
    archive_name = 'build.zip'
    base_url = None
  else:
    base_url = '/'.join(url.split('/')[:-1] + [archive_name])

  if url.startswith('gs://'):
    handler = GSHandler(url=url, archive_name=archive_name)
  else:
    handler = WebHandler(url=url, archive_name=archive_name)

  # We try to download and extract 3 times.
  for tries in range(1, 4):
    print 'Try %d: Fetching build from %s...' % (tries, url)

    failure = False

    # If the url is valid, we download the file.
    if not failure:
      if not handler.download():
        if options.halt_on_missing_build:
          return slave_utils.ERROR_EXIT_CODE
        failure = True

    # If the versioned url failed, we try to get the latest build.
    if failure:
      if url.startswith('gs://') or not base_url:
        continue
      else:
        print 'Fetching latest build at %s' % base_url
        base_handler = handler.__class__(base_url, handler.archive_name)
        if not base_handler.download():
          continue

    print 'Extracting build %s to %s...' % (archive_name, abs_build_dir)
    try:
      chromium_utils.RemoveDirectory(target_build_output_dir)
      chromium_utils.ExtractZip(archive_name, abs_build_dir)
      # For Chrome builds, the build will be stored in chrome-win32.
      if 'full-build-win32' in output_dir:
        chrome_dir = output_dir.replace('full-build-win32', 'chrome-win32')
        if os.path.exists(chrome_dir):
          output_dir = chrome_dir

      print 'Moving build from %s to %s' % (output_dir, target_build_output_dir)
      shutil.move(output_dir, target_build_output_dir)
    except (OSError, IOError, chromium_utils.ExternalError):
      print 'Failed to extract the build.'
      # Print out the traceback in a nice format
      traceback.print_exc()
      # Try again...
      continue

    # If we got the latest build, then figure out its revision number.
    if failure:
      print "Trying to determine the latest build's revision number..."
      try:
        build_revision_file_name = os.path.join(
            target_build_output_dir,
            chromium_utils.FULL_BUILD_REVISION_FILENAME)
        build_revision_file = open(build_revision_file_name, 'r')
        print 'Latest build is revision: %s' % build_revision_file.read()
        build_revision_file.close()
      except IOError:
        print "Could not determine the latest build's revision number"

    if failure:
      # We successfully extracted the archive, but it was the generic one.
      return slave_utils.WARNING_EXIT_CODE
    return 0

  # If we get here, that means that it failed 3 times. We return a failure.
  return slave_utils.ERROR_EXIT_CODE
def real_main(options):
    """Download a build and extract.

  Extract to build\\BuildDir\\full-build-win32 and rename it to
  build\\BuildDir\\Target
  """
    target_build_output_dir = os.path.join(options.build_dir, options.target)
    platform = chromium_utils.PlatformName()

    revision = options.revision
    if not revision:
        revision = GetLatestRevision(options.build_url, platform)
        if not revision:
            print 'Failed to get revision number.'
            return slave_utils.ERROR_EXIT_CODE

    archive_url = GetBuildUrl(options.build_url, platform, revision)
    archive_name = 'dynamorio.' + os.path.basename(archive_url).split('.',
                                                                      1)[1]

    temp_dir = tempfile.mkdtemp()
    try:
        # We try to download and extract 3 times.
        for tries in range(1, 4):
            print 'Try %d: Fetching build from %s' % (tries, archive_url)

            failure = False
            try:
                print '%s/%s' % (archive_url, archive_name)
                urllib.urlretrieve(archive_url, archive_name)
                print '\nDownload complete'
            except IOError:
                print '\nFailed to download build'
                failure = True
                if options.halt_on_missing_build:
                    return slave_utils.ERROR_EXIT_CODE
            if failure:
                continue

            print 'Extracting build %s to %s...' % (archive_name,
                                                    options.build_dir)
            try:
                chromium_utils.RemoveDirectory(target_build_output_dir)
                chromium_utils.ExtractZip(archive_name, temp_dir)

                # Look for the top level directory from extracted build.
                entries = os.listdir(temp_dir)
                output_dir = temp_dir
                if (len(entries) == 1 and os.path.isdir(
                        os.path.join(output_dir, entries[0]))):
                    output_dir = os.path.join(output_dir, entries[0])

                print 'Moving build from %s to %s' % (output_dir,
                                                      target_build_output_dir)
                shutil.move(output_dir, target_build_output_dir)
            except (OSError, IOError, chromium_utils.ExternalError):
                print 'Failed to extract the build.'
                # Print out the traceback in a nice format
                traceback.print_exc()
                # Try again...
                time.sleep(3)
                continue
            return 0
    finally:
        chromium_utils.RemoveDirectory(temp_dir)

    # If we get here, that means that it failed 3 times. We return a failure.
    return slave_utils.ERROR_EXIT_CODE
 def TargetPlatformName(self):
     return self.options.factory_properties.get(
         'target_os', chromium_utils.PlatformName())
Example #22
0
    ]

    # Write out the revision number so we can figure it out in extract_build.py.
    build_revision_file_name = 'FULL_BUILD_REVISION'
    build_revision_path = os.path.join(build_dir, build_revision_file_name)
    try:
        build_revision_file = open(build_revision_path, 'w')
        build_revision_file.write('%d' % build_revision)
        build_revision_file.close()
        if chromium_utils.IsMac() or chromium_utils.IsLinux():
            os.chmod(build_revision_path, 0644)
        zip_file_list.append(build_revision_file_name)
    except IOError:
        print 'Writing to revision file %s failed ' % build_revision_path

    zip_file_name = 'full-build-%s' % chromium_utils.PlatformName()
    (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
                                                 zip_file_name,
                                                 zip_file_list,
                                                 build_dir,
                                                 raise_error=True)
    chromium_utils.RemoveDirectory(zip_dir)
    if not os.path.exists(zip_file):
        raise StagingError('Failed to make zip package %s' % zip_file)
    if chromium_utils.IsMac() or chromium_utils.IsLinux():
        os.chmod(zip_file, 0644)

    # Report the size of the zip file to help catch when it gets too big and
    # can cause bot failures from timeouts during downloads to testers.
    zip_size = os.stat(zip_file)[stat.ST_SIZE]
    print 'Zip file is %ld bytes' % zip_size