def Compile(self, target):
     """ Compile the Skia executables. """
     os.environ['SKIA_ANDROID_VERBOSE_SETUP'] = '1'
     os.environ['PATH'] = os.path.abspath(
         os.path.join(os.pardir, os.pardir, os.pardir, os.pardir,
                      'third_party',
                      'gsutil')) + os.pathsep + os.environ['PATH']
     os.environ['BOTO_CONFIG'] = os.path.abspath(
         os.path.join(os.pardir, os.pardir, os.pardir, os.pardir,
                      'site_config', '.boto'))
     os.environ['ANDROID_SDK_ROOT'] = self._step.args['android_sdk_root']
     gyp_defines = self._step.args['gyp_defines']
     os.environ['GYP_DEFINES'] = gyp_defines
     print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
     os.environ['BUILDTYPE'] = self._step.configuration
     print 'BUILDTYPE="%s"' % os.environ['BUILDTYPE']
     cmd = [
         os.path.join('platform_tools', 'android', 'bin', 'android_ninja'),
         target,
         '-d',
         self._step.args['device'],
     ]
     cmd.extend(self._step.default_ninja_flags)
     if os.name != 'nt':
         try:
             ccache = shell_utils.run(['which', 'ccache']).rstrip()
             if ccache:
                 os.environ['ANDROID_MAKE_CCACHE'] = ccache
         except Exception:
             pass
     cmd.extend(self._step.make_flags)
     shell_utils.run(cmd)
  def Compile(self, target):
    """ Compile the Skia executables. """
    # TODO(borenet): It would be nice to increase code sharing here.
    if 'Win8' in self._step.builder_name:
      os.environ['GYP_MSVS_VERSION'] = '2012'
      print 'GYP_MSVS_VERSION="%s"' % os.environ['GYP_MSVS_VERSION']

    os.environ['CHROME_PATH'] = os.path.join(os.path.expanduser('~'), 'src')
    print 'CHROME_PATH="%s"' % os.environ['CHROME_PATH']

    os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
    print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
    make_cmd = 'make'
    if os.name == 'nt':
      make_cmd = 'make.bat'
    cmd = [make_cmd,
           target,
           'BUILDTYPE=%s' % self._step.configuration,
           ]
    cmd.extend(self._step.default_make_flags)
    cmd.extend(self._step.make_flags)

    # TODO(epoger): Maybe remove this once we fix the underlying problem in
    # https://code.google.com/p/skia/issues/detail?id=2393 ('recurring RunGYP
    # failures on multiple Test-Win7-ShuttleA-HD2000-* bots')
    print 'about to run cmd %s' % cmd
    cwd = os.getcwd()
    print 'cwd is %s' % cwd
    print 'contents of cwd are %s' % os.listdir(cwd)

    shell_utils.run(cmd)
def delete_storage_object(object_name):
  """Delete an object on Google Storage."""
  gsutil = slave_utils.GSUtilSetup()
  command = [gsutil]
  command.extend(['rm', '-R', object_name])
  print 'Running command: %s' % command
  shell_utils.run(command)
 def Compile(self, target):
   """ Compile the Skia executables. """
   os.environ['SKIA_ANDROID_VERBOSE_SETUP'] = '1'
   os.environ['PATH'] = os.path.abspath(
       os.path.join(os.pardir, os.pardir, os.pardir, os.pardir, 'third_party',
                    'gsutil')) + os.pathsep + os.environ['PATH']
   os.environ['BOTO_CONFIG'] = os.path.abspath(os.path.join(
       os.pardir, os.pardir, os.pardir, os.pardir, 'site_config', '.boto'))
   os.environ['ANDROID_SDK_ROOT'] = self._step.args['android_sdk_root']
   gyp_defines = self._step.args['gyp_defines']
   os.environ['GYP_DEFINES'] = gyp_defines
   print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
   os.environ['BUILDTYPE'] = self._step.configuration
   print 'BUILDTYPE="%s"' % os.environ['BUILDTYPE']
   cmd = [os.path.join('platform_tools', 'android', 'bin', 'android_ninja'),
          target,
          '-d', self._step.args['device'],
          ]
   cmd.extend(self._step.default_ninja_flags)
   if os.name != 'nt':
     try:
       ccache = shell_utils.run(['which', 'ccache']).rstrip()
       if ccache:
         os.environ['ANDROID_MAKE_CCACHE'] = ccache
     except Exception:
       pass
   cmd.extend(self._step.make_flags)
   shell_utils.run(cmd)
Example #5
0
def delete_storage_object(object_name):
    """Delete an object on Google Storage."""
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    command.extend(['rm', '-R', object_name])
    print 'Running command: %s' % command
    shell_utils.run(command)
Example #6
0
def move_storage_directory(src_dir, dest_dir):
    """Move a directory on Google Storage."""
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    command.extend(['mv', '-p', src_dir, dest_dir])
    print 'Running command: %s' % command
    shell_utils.run(command)
def copy_dir_contents(remote_src_dir, remote_dest_dir, gs_acl='private',
                      http_header_lines=None):
  """Copy contents of one Google Storage directory to another.

  params:
    remote_src_dir: source GS URL (gs://BUCKETNAME/PATH)
    remote_dest_dir: dest GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the new files; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.

  Performs the copy in multithreaded mode, in case there are a large number of
  files.
  """
  gsutil = slave_utils.GSUtilSetup()
  command = [gsutil, '-m']
  if http_header_lines:
    for http_header_line in http_header_lines:
      command.extend(['-h', http_header_line])
  command.extend(['cp', '-a', gs_acl, '-R', remote_src_dir, remote_dest_dir])
  print 'Running command: %s' % command
  shell_utils.run(command)
def move_storage_directory(src_dir, dest_dir):
  """Move a directory on Google Storage."""
  gsutil = slave_utils.GSUtilSetup()
  command = [gsutil]
  command.extend(['mv', '-p', src_dir, dest_dir])
  print 'Running command: %s' % command
  shell_utils.run(command)
Example #9
0
def copy_dir_contents(remote_src_dir,
                      remote_dest_dir,
                      gs_acl='private',
                      http_header_lines=None):
    """Copy contents of one Google Storage directory to another.

  params:
    remote_src_dir: source GS URL (gs://BUCKETNAME/PATH)
    remote_dest_dir: dest GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the new files; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.

  Performs the copy in multithreaded mode, in case there are a large number of
  files.
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil, '-m']
    if http_header_lines:
        for http_header_line in http_header_lines:
            command.extend(['-h', http_header_line])
    command.extend(['cp', '-a', gs_acl, '-R', remote_src_dir, remote_dest_dir])
    print 'Running command: %s' % command
    shell_utils.run(command)
Example #10
0
  def _Run(self):
    # Build the Skia libraries in Release mode.
    os.environ['GYP_DEFINES'] = 'skia_static_initializers=0'
    shell_utils.run(['python', 'gyp_skia'])
    shell_utils.run(['make', 'skia_lib', 'BUILDTYPE=Release', '--jobs'])

    # Obtain the dump-static-initializers script.
    print 'Downloading %s' % DUMP_STATIC_INITIALIZERS_URL
    dl = urllib2.urlopen(DUMP_STATIC_INITIALIZERS_URL)
    with open(DUMP_STATIC_INITIALIZERS_FILENAME, 'wb') as f:
      f.write(dl.read())

    # Run the script over the compiled files.
    results = []
    for built_file_name in os.listdir(os.path.join('out', 'Release')):
      if built_file_name.endswith('.a') or built_file_name.endswith('.so'):
        output = shell_utils.run(['python', DUMP_STATIC_INITIALIZERS_FILENAME,
                                  os.path.join('out', 'Release',
                                               built_file_name)])
        matches = re.search('Found (\d+) static initializers', output)
        if matches:
          num_found = int(matches.groups()[0])
          if num_found:
            results.append((built_file_name, num_found))
    if results:
      print
      print 'Found static initializers:'
      print
      for result in results:
        print '  %s: %d' % result
      print
      # TODO(borenet): Make this an error once we have no static initializers.
      raise BuildStepWarning('Static initializers found!')
Example #11
0
def upload_dir_contents(local_src_dir,
                        remote_dest_dir,
                        gs_acl='private',
                        http_header_lines=None):
    """Upload contents of a local directory to Google Storage.

  params:
    local_src_dir: directory on local disk to upload contents of
    remote_dest_dir: GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the files on Google Storage; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.

  We upload each file as a separate call to gsutil.  This takes longer than
  calling "gsutil -m cp -R <source> <dest>", which can perform the uploads in
  parallel... but in http://skbug.com/2618 ('The Case of the Missing
  Mandrills') we figured out that was silently failing in some cases!

  TODO(epoger): Use the google-api-python-client API, like we do in
  https://skia.googlesource.com/skia/+/master/tools/pyutils/gs_utils.py ,
  rather than calling out to the gsutil tool.  See http://skbug.com/2618

  TODO(epoger): Upload multiple files simultaneously to reduce latency.

  TODO(epoger): Add a "noclobber" mode that will not upload any files would
  overwrite existing files in Google Storage.

  TODO(epoger): Consider adding a do_compress parameter that would compress
  the file using gzip before upload, and add a "Content-Encoding:gzip" header
  so that HTTP downloads of the file would be unzipped automatically.
  See https://developers.google.com/storage/docs/gsutil/addlhelp/
              WorkingWithObjectMetadata#content-encoding
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    if http_header_lines:
        for http_header_line in http_header_lines:
            command.extend(['-h', http_header_line])
    command.extend(['cp', '-a', gs_acl])

    abs_local_src_dir = os.path.abspath(local_src_dir)
    for (abs_src_dirpath, _, filenames) in os.walk(abs_local_src_dir):
        if abs_src_dirpath == abs_local_src_dir:
            # This file is within local_src_dir; no need to add subdirs to
            # abs_dest_dirpath.
            abs_dest_dirpath = remote_dest_dir
        else:
            # This file is within a subdir, so add subdirs to abs_dest_dirpath.
            abs_dest_dirpath = posixpath.join(
                remote_dest_dir,
                _convert_to_posixpath(
                    os.path.relpath(abs_src_dirpath, abs_local_src_dir)))
        for filename in sorted(filenames):
            abs_src_filepath = os.path.join(abs_src_dirpath, filename)
            abs_dest_filepath = posixpath.join(abs_dest_dirpath, filename)
            shell_utils.run(command + [abs_src_filepath, abs_dest_filepath])
Example #12
0
    def Compile(self, target):
        platform_bin = os.path.join('platform_tools', 'barelinux', 'bin')
        # If working_dir doesn't exist, arm64_download will create it.
        # this script should download everything we need to start the
        # virtual machine, and then boot it up.  If it fails it will
        # return a non-zero exit status and shell_utils.run will throw an
        # exception.  We do not catch this exception.
        print 'Installing build tools and VM to', self._working_dir
        self.AddGsutilToPath()  # needed by arm64_download
        shell_utils.run(
            [os.path.join(platform_bin, 'arm64_download'), self._working_dir])

        assert os.path.isdir(self._working_dir)

        toolchain_bin = os.path.join(
            self._working_dir,
            'gcc-linaro-aarch64-linux-gnu-4.8-2013.12_linux', 'bin')
        assert os.path.isdir(toolchain_bin)

        make_cmd = [
            'sh',
            '-x',
            os.path.join(platform_bin, 'arm64_make'),
            '-o',
            self._build_dir,
            '-c',
            os.path.join(toolchain_bin, 'aarch64-linux-gnu-gcc'),
            '-x',
            os.path.join(toolchain_bin, 'aarch64-linux-gnu-g++'),
            '-t',
            self._step.configuration,
        ]
        shell_utils.run(make_cmd)
Example #13
0
    def _Run(self):
        # Create empty dir and add static_footer.txt
        file_utils.create_clean_local_dir(DOXYGEN_WORKING_DIR)
        static_footer_path = os.path.join(DOXYGEN_WORKING_DIR,
                                          'static_footer.txt')
        shutil.copyfile(os.path.join('tools', 'doxygen_footer.txt'),
                        static_footer_path)

        # Make copy of doxygen config file, overriding any necessary configs,
        # and run doxygen.
        file_utils.create_clean_local_dir(DOXYGEN_CONFIG_DIR)
        modified_doxyfile = os.path.join(DOXYGEN_CONFIG_DIR, DOXYFILE_BASENAME)
        with open(DOXYFILE_BASENAME, 'r') as reader:
            with open(modified_doxyfile, 'w') as writer:
                shutil.copyfileobj(reader, writer)
                writer.write('OUTPUT_DIRECTORY = %s\n' % DOXYGEN_WORKING_DIR)
                writer.write('HTML_FOOTER = %s\n' % static_footer_path)
        shell_utils.run([DOXYGEN_BINARY, modified_doxyfile])

        # Create iframe_footer.html
        with open(os.path.join(DOXYGEN_WORKING_DIR, 'iframe_footer.html'),
                  'w') as fh:
            fh.write(IFRAME_FOOTER_TEMPLATE %
                     (datetime.datetime.now().isoformat(' '),
                      shell_utils.run([DOXYGEN_BINARY, '--version'])))
 def RunFlavoredCmd(self, app, args):
   """ Override this in new BuildStepUtils flavors. """
   if (sys.platform == 'linux2' and 'x86_64' in self._step.builder_name
       and not 'TSAN' in self._step.builder_name):
     cmd = ['catchsegv', self._PathToBinary(app)]
   else:
     cmd = [self._PathToBinary(app)]
   shell_utils.run(cmd + args)
Example #15
0
def upload_dir_contents(local_src_dir, remote_dest_dir, gs_acl='private',
                        http_header_lines=None):
  """Upload contents of a local directory to Google Storage.

  params:
    local_src_dir: directory on local disk to upload contents of
    remote_dest_dir: GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the files on Google Storage; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.

  We upload each file as a separate call to gsutil.  This takes longer than
  calling "gsutil -m cp -R <source> <dest>", which can perform the uploads in
  parallel... but in http://skbug.com/2618 ('The Case of the Missing
  Mandrills') we figured out that was silently failing in some cases!

  TODO(epoger): Use the google-api-python-client API, like we do in
  https://skia.googlesource.com/skia/+/master/tools/pyutils/gs_utils.py ,
  rather than calling out to the gsutil tool.  See http://skbug.com/2618

  TODO(epoger): Upload multiple files simultaneously to reduce latency.

  TODO(epoger): Add a "noclobber" mode that will not upload any files would
  overwrite existing files in Google Storage.

  TODO(epoger): Consider adding a do_compress parameter that would compress
  the file using gzip before upload, and add a "Content-Encoding:gzip" header
  so that HTTP downloads of the file would be unzipped automatically.
  See https://developers.google.com/storage/docs/gsutil/addlhelp/
              WorkingWithObjectMetadata#content-encoding
  """
  gsutil = slave_utils.GSUtilSetup()
  command = [gsutil]
  if http_header_lines:
    for http_header_line in http_header_lines:
      command.extend(['-h', http_header_line])
  command.extend(['cp', '-a', gs_acl])

  abs_local_src_dir = os.path.abspath(local_src_dir)
  for (abs_src_dirpath, _, filenames) in os.walk(abs_local_src_dir):
    if abs_src_dirpath == abs_local_src_dir:
      # This file is within local_src_dir; no need to add subdirs to
      # abs_dest_dirpath.
      abs_dest_dirpath = remote_dest_dir
    else:
      # This file is within a subdir, so add subdirs to abs_dest_dirpath.
      abs_dest_dirpath = posixpath.join(
          remote_dest_dir,
          _convert_to_posixpath(
              os.path.relpath(abs_src_dirpath, abs_local_src_dir)))
    for filename in sorted(filenames):
      abs_src_filepath = os.path.join(abs_src_dirpath, filename)
      abs_dest_filepath = posixpath.join(abs_dest_dirpath, filename)
      shell_utils.run(command + [abs_src_filepath, abs_dest_filepath])
Example #16
0
  def _Run(self):
    chrome_path = os.path.join(os.pardir, 'src')
    with misc.ChDir(chrome_path):
      shell_utils.run(['git', 'config', '--local', 'user.name', DEPS_ROLL_NAME])
      shell_utils.run(['git', 'config', '--local', 'user.email',
                       DEPS_ROLL_AUTHOR])

      auto_roll = os.path.join(misc.BUILDBOT_PATH, 'third_party',
                               'chromium_buildbot_tot', 'scripts', 'tools',
                               'blink_roller', 'auto_roll.py')

      # python auto_roll.py <project> <author> <path to chromium/src>
      cmd = ['python', auto_roll, 'skia', DEPS_ROLL_AUTHOR, chrome_path]

      exception = None
      try:
        output = shell_utils.run(cmd)
      except shell_utils.CommandFailedException as e:
        output = e.output
        # Suppress failure for "refusing to roll backwards."
        if not re.search(REGEXP_ROLL_TOO_OLD, output):
          exception = e

    bucket_url = gs_utils.GSUtils.with_gs_prefix(
        skia_vars.GetGlobalVariable('googlestorage_bucket'))

    match = re.search(REGEXP_ISSUE_CREATED, output)
    if match:
      issue = match.group('issue')
      print 'Found issue #', issue
      with open(FILENAME_CURRENT_ATTEMPT, 'w') as f:
        f.write(HTML_CONTENT % (ISSUE_URL_TEMPLATE % {'issue': issue}))
      slave_utils.GSUtilCopyFile(
          filename=FILENAME_CURRENT_ATTEMPT,
          gs_base=bucket_url,
          subdir=None,
          gs_acl='public-read')

    roll_status = None
    for regexp, status_msg in ROLL_STATUSES:
      match = re.search(regexp, output)
      if match:
        roll_status = status_msg % match.groupdict()
        break

    if roll_status:
      with open(FILENAME_ROLL_STATUS, 'w') as f:
        f.write(roll_status)
      slave_utils.GSUtilCopyFile(
          filename=FILENAME_ROLL_STATUS,
          gs_base=bucket_url,
          subdir=None,
          gs_acl='public-read')

    #pylint: disable=E0702
    if exception:
      raise exception
Example #17
0
    def _Run(self):
        if self._args['patch'] == 'None':
            raise BuildStepFailure('No patch given!')

        # patch is a tuple of the form (int, str), where patch[0] is the "level" of
        # the patch and patch[1] is the URL of the diff.
        patch_level, encoded_patch_url = literal_eval(
            self._args['patch'].decode())
        patch_url = urllib.quote(encoded_patch_url, safe="%/:=&?~+!$,;'@()*[]")
        print 'Patch level: %d' % patch_level
        print 'Diff file URL:'
        print patch_url

        # Write the patch file into a temporary directory. Unfortunately, temporary
        # files created by the tempfile module don't behave properly on Windows, so
        # we create a temporary directory and write the file inside it.
        temp_dir = tempfile.mkdtemp()
        try:
            patch_file_name = os.path.join(temp_dir, 'skiabot_patch')
            patch_file = open(patch_file_name, 'wb')
            try:
                if 'svn' in patch_url:
                    # TODO(borenet): Create an svn_utils module and use it instead.  It
                    # would be nice to find a way to share
                    # https://skia.googlesource.com/skia/+/master/tools/svn.py
                    patch_contents = shell_utils.run([SVN, 'cat', patch_url],
                                                     echo=False)
                else:
                    patch_contents = urllib2.urlopen(patch_url).read()
                if not patch_contents:
                    raise Exception('Got an empty patch!')
                patch_file.write(patch_contents)
            finally:
                patch_file.close()
            print 'Saved patch to %s' % patch_file.name

            def get_patch_cmd(level, patch_filename):
                return [
                    GIT, 'apply',
                    '-p%d' % level, '-v', '--ignore-space-change',
                    '--ignore-whitespace', patch_filename
                ]

            try:
                # First, check that the patch can be applied at the given level.
                shell_utils.run(
                    get_patch_cmd(patch_level, patch_file.name) + ['--check'])
            except shell_utils.CommandFailedException as e:
                # If the patch can't be applied at the requested level, try 0 or 1,
                # depending on what we just tried.
                print e
                patch_level = (patch_level + 1) % 2
                print 'Trying patch level %d instead...' % patch_level
            shell_utils.run(get_patch_cmd(patch_level, patch_file.name))

        finally:
            shutil.rmtree(temp_dir)
Example #18
0
    def _Run(self):
        chrome_path = os.path.join(os.pardir, 'src')
        with misc.ChDir(chrome_path):
            shell_utils.run(
                ['git', 'config', '--local', 'user.name', DEPS_ROLL_NAME])
            shell_utils.run(
                ['git', 'config', '--local', 'user.email', DEPS_ROLL_AUTHOR])

            auto_roll = os.path.join(misc.BUILDBOT_PATH, 'third_party',
                                     'chromium_buildbot_tot', 'scripts',
                                     'tools', 'blink_roller', 'auto_roll.py')

            # python auto_roll.py <project> <author> <path to chromium/src>
            cmd = ['python', auto_roll, 'skia', DEPS_ROLL_AUTHOR, chrome_path]

            exception = None
            try:
                output = shell_utils.run(cmd)
            except shell_utils.CommandFailedException as e:
                output = e.output
                # Suppress failure for "refusing to roll backwards."
                if not re.search(REGEXP_ROLL_TOO_OLD, output):
                    exception = e

        bucket_url = gs_utils.GSUtils.with_gs_prefix(
            skia_vars.GetGlobalVariable('googlestorage_bucket'))

        match = re.search(REGEXP_ISSUE_CREATED, output)
        if match:
            issue = match.group('issue')
            print 'Found issue #', issue
            with open(FILENAME_CURRENT_ATTEMPT, 'w') as f:
                f.write(HTML_CONTENT % (ISSUE_URL_TEMPLATE % {'issue': issue}))
            slave_utils.GSUtilCopyFile(filename=FILENAME_CURRENT_ATTEMPT,
                                       gs_base=bucket_url,
                                       subdir=None,
                                       gs_acl='public-read')

        roll_status = None
        for regexp, status_msg in ROLL_STATUSES:
            match = re.search(regexp, output)
            if match:
                roll_status = status_msg % match.groupdict()
                break

        if roll_status:
            with open(FILENAME_ROLL_STATUS, 'w') as f:
                f.write(roll_status)
            slave_utils.GSUtilCopyFile(filename=FILENAME_ROLL_STATUS,
                                       gs_base=bucket_url,
                                       subdir=None,
                                       gs_acl='public-read')

        #pylint: disable=E0702
        if exception:
            raise exception
Example #19
0
 def Compile(self, target):
   if not os.path.isdir('out'):
     self.RunGYP()
   make_cmd = 'ninja'
   cmd = [make_cmd,
          '-C', os.path.join('out', self._step.configuration),
          target,
          ]
   cmd.extend(self._step.make_flags)
   shell_utils.run(cmd)
Example #20
0
  def _SetGoogleReadACLs(self, gs_dir):
    """Sets the ACLs of all objects in the directory to google read-only.

    This method assumes that there is a gsutil in the system PATH that is recent
    enough to run the 'acl ch' command. The gsutil in chromium_buildbot is old
    and cannot run this command.
    """
    update_acls_cmd = ['gsutil', 'acl', 'ch', '-g', 'google.com:READ',
                       posixpath.join(gs_dir, '*')]
    shell_utils.run(update_acls_cmd)
Example #21
0
 def __enter__(self):
   shell_utils.run([GIT, 'config', 'user.email',
                    '"*****@*****.**"'])
   shell_utils.run([GIT, 'config', 'user.name',
                    '"Skia_Android Canary Bot"'])
   # Authenticate. This is only required on the actual build slave - not on
   # a test slave on someone's machine, where the file does not exist.
   if os.path.exists(GIT_COOKIE_AUTHDAEMON):
     output = shell_utils.run([GIT_COOKIE_AUTHDAEMON])
     self._auth_daemon_pid = shlex.split(output)[-1]
   else:
     print 'No authentication file. Did you authenticate?'
Example #22
0
  def _Run(self):
    if self._args['patch'] == 'None':
      raise BuildStepFailure('No patch given!')

    # patch is a tuple of the form (int, str), where patch[0] is the "level" of
    # the patch and patch[1] is the URL of the diff.
    patch_level, encoded_patch_url = literal_eval(self._args['patch'].decode())
    patch_url = urllib.quote(encoded_patch_url, safe="%/:=&?~+!$,;'@()*[]")
    print 'Patch level: %d' % patch_level
    print 'Diff file URL:'
    print patch_url

    # Write the patch file into a temporary directory. Unfortunately, temporary
    # files created by the tempfile module don't behave properly on Windows, so
    # we create a temporary directory and write the file inside it.
    temp_dir = tempfile.mkdtemp()
    try:
      patch_file_name = os.path.join(temp_dir, 'skiabot_patch')
      patch_file = open(patch_file_name, 'wb')
      try:
        if 'svn' in patch_url:
          # TODO(borenet): Create an svn_utils module and use it instead.  It
          # would be nice to find a way to share
          # https://skia.googlesource.com/skia/+/master/tools/svn.py
          patch_contents = shell_utils.run([SVN, 'cat', patch_url], echo=False)
        else:
          patch_contents = urllib2.urlopen(patch_url).read()
        if not patch_contents:
          raise Exception('Got an empty patch!')
        patch_file.write(patch_contents)
      finally:
        patch_file.close()
      print 'Saved patch to %s' % patch_file.name

      def get_patch_cmd(level, patch_filename):
        return [GIT, 'apply', '-p%d' % level, '-v', '--ignore-space-change',
                '--ignore-whitespace', patch_filename]

      try:
        # First, check that the patch can be applied at the given level.
        shell_utils.run(get_patch_cmd(patch_level, patch_file.name) +
                        ['--check'])
      except shell_utils.CommandFailedException as e:
        # If the patch can't be applied at the requested level, try 0 or 1,
        # depending on what we just tried.
        print e
        patch_level = (patch_level + 1) % 2
        print 'Trying patch level %d instead...' % patch_level
      shell_utils.run(get_patch_cmd(patch_level, patch_file.name))

    finally:
      shutil.rmtree(temp_dir)
Example #23
0
 def RunFlavoredCmd(self, app, args):
   """Run the executable."""
   # Run through runtest.py everywhere but Windows, where it doesn't work for
   # some reason (see http://skbug.com/2520).
   if os.name == 'nt':
     cmd = [self._PathToBinary(app)] + args
   else:
     runtest = os.path.join(misc.BUILDBOT_PATH, 'third_party',
                            'chromium_buildbot', 'scripts', 'slave',
                            'runtest.py')
     cmd = ['python', runtest, '--target', self._step.configuration, app,
            '--xvfb', '--build-dir', 'out']  + args
   shell_utils.run(cmd)
 def Compile(self, target):
   os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
   print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
   make_cmd = 'make'
   if os.name == 'nt':
     make_cmd = 'make.bat'
   cmd = [make_cmd,
          target,
          'BUILDTYPE=%s' % self._step.configuration,
          ]
   cmd.extend(self._step.default_make_flags)
   cmd.extend(self._step.make_flags)
   shell_utils.run(cmd)
  def Compile(self, target):
    # Run the xsan_build script.
    os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
    print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
    cmd = [
        os.path.join('tools', 'xsan_build'),
        self._step.args['sanitizer'],
        target,
        'BUILDTYPE=%s' % self._step.configuration,
    ]

    cmd.extend(self._step.default_make_flags)
    cmd.extend(self._step.make_flags)
    shell_utils.run(cmd)
Example #26
0
    def Compile(self, target):
        # Run the xsan_build script.
        os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
        print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
        cmd = [
            os.path.join('tools', 'xsan_build'),
            self._step.args['sanitizer'],
            target,
            'BUILDTYPE=%s' % self._step.configuration,
        ]

        cmd.extend(self._step.default_make_flags)
        cmd.extend(self._step.make_flags)
        shell_utils.run(cmd)
Example #27
0
def does_storage_object_exist(object_name):
  """Checks if an object exists on Google Storage.

  Returns True if it exists else returns False.
  """
  gsutil = slave_utils.GSUtilSetup()
  command = [gsutil]
  command.extend(['ls', object_name])
  print 'Running command: %s' % command
  try:
    shell_utils.run(command)
    return True
  except shell_utils.CommandFailedException:
    return False
Example #28
0
def does_storage_object_exist(object_name):
    """Checks if an object exists on Google Storage.

  Returns True if it exists else returns False.
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    command.extend(['ls', object_name])
    print 'Running command: %s' % command
    try:
        shell_utils.run(command)
        return True
    except shell_utils.CommandFailedException:
        return False
def GetGitRepoPOSIXTimestamp():
  """Returns the POSIX timestamp for the current Skia commit as in int."""
  git_show_command = [GIT, 'show', '--format=%at', '-s']
  raw_timestamp = shell_utils.run(
      git_show_command, log_in_real_time=False, echo=False,
      print_timestamps=False)
  return int(raw_timestamp)
Example #30
0
 def RunFlavoredCmd(self, app, args):
     os.environ['ASAN_SYMBOLIZER_PATH'] = '/usr/bin/llvm-symbolizer-3.5'
     os.environ['ASAN_OPTIONS'] = 'symbolize=1 detect_leaks=1'
     os.environ['LSAN_OPTIONS'] = \
             'symbolize=1 suppressions=tools/lsan.supp print_suppressions=1'
     os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
     return shell_utils.run([self._PathToBinary(app)] + args)
 def RunFlavoredCmd(self, app, args):
   os.environ['ASAN_SYMBOLIZER_PATH'] = '/usr/bin/llvm-symbolizer-3.5'
   os.environ['ASAN_OPTIONS'] = 'symbolize=1 detect_leaks=1'
   os.environ['LSAN_OPTIONS'] = \
           'symbolize=1 suppressions=tools/lsan.supp print_suppressions=1'
   os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
   return shell_utils.run([self._PathToBinary(app)] + args)
 def GetDeviceDirs(self):
     """ Set the directories which will be used by the BuildStep. """
     if self._serial:
         device_scratch_dir = shell_utils.run(
             '%s -s %s shell echo \$EXTERNAL_STORAGE' %
             (android_utils.PATH_TO_ADB, self._serial),
             echo=True,
             shell=True).rstrip().split('\n')[-1]
         prefix = posixpath.join(device_scratch_dir, 'skiabot', 'skia_')
         return DeviceDirs(perf_data_dir=prefix + 'perf',
                           gm_actual_dir=prefix + 'gm_actual',
                           gm_expected_dir=prefix + 'gm_expected',
                           dm_dir=prefix + 'dm',
                           resource_dir=prefix + 'resources',
                           skimage_in_dir=prefix + 'skimage_in',
                           skimage_expected_dir=prefix + 'skimage_expected',
                           skimage_out_dir=prefix + 'skimage_out',
                           skp_dir=prefix + 'skp',
                           skp_perf_dir=prefix + 'skp_perf',
                           playback_actual_images_dir=prefix +
                           'playback_actual_images',
                           playback_actual_summaries_dir=prefix +
                           'playback_actual_summaries',
                           playback_expected_summaries_dir=(
                               prefix + 'playback_expected_summaries'),
                           tmp_dir=prefix + 'tmp_dir')
 def Compile(self, target):
     os.environ[ENV_VAR] = self._step.args['nacl_sdk_root']
     cmd = [
         os.path.join('platform_tools', 'nacl', 'nacl_make'),
         target,
         'BUILDTYPE=%s' % self._step.configuration,
     ]
     cmd.extend(self._step.default_make_flags)
     if os.name != 'nt':
         try:
             ccache = shell_utils.run(['which', 'ccache'], echo=False)
             if ccache:
                 cmd.append('--use-ccache')
         except Exception:
             pass
     cmd.extend(self._step.make_flags)
     shell_utils.run(cmd)
Example #34
0
def GetCheckedOutHash():
    """ Determine what commit we actually got. If there are local modifications,
  raise an exception. """
    checkout_root, config_dict = _GetLocalConfig()

    # Get the checked-out commit hash for the first gclient solution.
    with misc.ChDir(os.path.join(checkout_root, config_dict[0]['name'])):
        # First, print out the remote from which we synced, just for debugging.
        cmd = [GIT, 'remote', '-v']
        try:
            shell_utils.run(cmd)
        except shell_utils.CommandFailedException as e:
            print e

        # "git rev-parse HEAD" returns the commit hash for HEAD.
        return shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                               log_in_real_time=False).rstrip('\n')
def GetCheckedOutHash():
  """ Determine what commit we actually got. If there are local modifications,
  raise an exception. """
  checkout_root, config_dict = _GetLocalConfig()

  # Get the checked-out commit hash for the first gclient solution.
  with misc.ChDir(os.path.join(checkout_root, config_dict[0]['name'])):
    # First, print out the remote from which we synced, just for debugging.
    cmd = [GIT, 'remote', '-v']
    try:
      shell_utils.run(cmd)
    except shell_utils.CommandFailedException as e:
      print e

    # "git rev-parse HEAD" returns the commit hash for HEAD.
    return shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                           log_in_real_time=False).rstrip('\n')
 def Install(self):
   """ Install the Skia executables. """
   if self._has_root:
     android_utils.RunADB(self._serial, ['root'])
     android_utils.RunADB(self._serial, ['remount'])
     # As an experiment, don't change CPU scaling mode.
     #android_utils.SetCPUScalingMode(self._serial, 'performance')
     try:
       android_utils.ADBKill(self._serial, 'skia')
     except Exception:
       # If we fail to kill the process, try rebooting the device, and wait for
       # it to come back up.
       shell_utils.run([android_utils.PATH_TO_ADB, '-s', self._serial,
                        'reboot'])
       time.sleep(60)
     android_utils.StopShell(self._serial)
   else:
     android_utils.ADBKill(self._serial, 'com.skia', kill_app=True)
 def Install(self):
     """ Install the Skia executables. """
     if self._has_root:
         android_utils.RunADB(self._serial, ['root'])
         android_utils.RunADB(self._serial, ['remount'])
         # As an experiment, don't change CPU scaling mode.
         #android_utils.SetCPUScalingMode(self._serial, 'performance')
         try:
             android_utils.ADBKill(self._serial, 'skia')
         except Exception:
             # If we fail to kill the process, try rebooting the device, and wait for
             # it to come back up.
             shell_utils.run(
                 [android_utils.PATH_TO_ADB, '-s', self._serial, 'reboot'])
             time.sleep(60)
         android_utils.StopShell(self._serial)
     else:
         android_utils.ADBKill(self._serial, 'com.skia', kill_app=True)
def maybe_fix_identity(username='******', email='*****@*****.**'):
  """If either of user.name or user.email is not defined, define it."""
  try:
    shell_utils.run([GIT, 'config', '--get', 'user.name'])
  except shell_utils.CommandFailedException:
    shell_utils.run([GIT, 'config', 'user.name', '"%s"' % username])

  try:
    shell_utils.run([GIT, 'config', '--get', 'user.email'])
  except shell_utils.CommandFailedException:
    shell_utils.run([GIT, 'config', 'user.email', '"%s"' % email])
Example #39
0
def download_dir_contents(remote_src_dir, local_dest_dir, multi=True):
  """Download contents of a Google Storage directory to local disk.

  params:
    remote_src_dir: GS URL (gs://BUCKETNAME/PATH)
    local_dest_dir: directory on local disk to write the contents into
    multi: boolean; whether to perform the copy in multithreaded mode.

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.
  """
  gsutil = slave_utils.GSUtilSetup()
  command = [gsutil]
  if multi:
    command.append('-m')
  command.extend(['cp', '-R', remote_src_dir, local_dest_dir])
  print 'Running command: %s' % command
  shell_utils.run(command)
  def RunFlavoredCmd(self, app, args):
    """ Override this in new BuildStep flavors. """
    cmd = ['valgrind', '--gen-suppressions=all', '--leak-check=no',
           '--track-origins=yes', '--error-exitcode=1']
    if self._suppressions_file:
      cmd.append('--suppressions=%s' % self._suppressions_file)

    cmd.append(self._PathToBinary(app))
    cmd.extend(args)
    return shell_utils.run(cmd)
Example #41
0
def download_dir_contents(remote_src_dir, local_dest_dir, multi=True):
    """Download contents of a Google Storage directory to local disk.

  params:
    remote_src_dir: GS URL (gs://BUCKETNAME/PATH)
    local_dest_dir: directory on local disk to write the contents into
    multi: boolean; whether to perform the copy in multithreaded mode.

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    if multi:
        command.append('-m')
    command.extend(['cp', '-R', remote_src_dir, local_dest_dir])
    print 'Running command: %s' % command
    shell_utils.run(command)
def GetGitNumber(commit_hash):
  """Returns the GIT number for the current Skia commit as in int."""
  try:
    git_show_command = [GIT, 'number']
    git_number = shell_utils.run(
        git_show_command, log_in_real_time=False, echo=False,
        print_timestamps=False)
    return int(git_number)
  except shell_utils.CommandFailedException:
    print 'GetGitNumber: Unable to get git number, returning -1'
    return -1
Example #43
0
    def Compile(self, target):
        """ Compile the Skia executables. """
        # Add gsutil to PATH
        gsutil = slave_utils.GSUtilSetup()
        os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)

        # Run the chromeos_make script.
        make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
                                'chromeos_make')
        cmd = [
            make_cmd,
            '-d',
            self._step.args['board'],
            target,
            'BUILDTYPE=%s' % self._step.configuration,
        ]

        cmd.extend(self._step.default_make_flags)
        cmd.extend(self._step.make_flags)
        shell_utils.run(cmd)
  def _Run(self):
    # Tar up the results.
    result_tarball = '%s_%s.tgz' % (self._builder_name,
                                    self._got_revision)
    shell_utils.run(['tar', '-cvzf', os.path.join(os.pardir, result_tarball),
                     self._flavor_utils.result_dir])

    # Upload to Google Storage
    bucket_url = gs_utils.GSUtils.with_gs_prefix(
        skia_vars.GetGlobalVariable('googlestorage_bucket'))
    upload_to_bucket.upload_to_bucket(
        os.path.join(os.pardir, result_tarball),
        bucket_url,
        subdir=GS_DRT_SUBDIR)

    print 'To download the tarball, run this command:'
    gs_url = posixpath.join(
        bucket_url,
        GS_DRT_SUBDIR,
        result_tarball)
    print 'gsutil cp %s <local_dir>' % gs_url
Example #45
0
  def _Run(self):
    try:
      os.makedirs(ANDROID_CHECKOUT_PATH)
    except OSError:
      pass
    with misc.ChDir(ANDROID_CHECKOUT_PATH):
      if not os.path.exists(REPO):
        # Download repo.
        shell_utils.run(['curl', REPO_URL, '>', REPO])
        shell_utils.run(['chmod', 'a+x', REPO])

      with GitAuthenticate():
        shell_utils.run([REPO, 'init', '-u', ANDROID_REPO_URL, '-g',
                         'all,-notdefault,-darwin', '-b', 'master-skia'])
        shell_utils.run([REPO, 'sync', '-j32'])
  def _Run(self):
    with misc.ChDir(PATH_TO_SKIA):
      shell_utils.run([GIT, 'config', '--local', 'user.name',
                       SKIA_COMMITTER_NAME])
      shell_utils.run([GIT, 'config', '--local', 'user.email',
                       SKIA_COMMITTER_EMAIL])
      if CHROMIUM_SKIA in shell_utils.run([GIT, 'remote', '-v']):
        shell_utils.run([GIT, 'remote', 'set-url', 'origin', SKIA_GIT_URL,
                         CHROMIUM_SKIA])

      version_file = 'SKP_VERSION'
      skp_version = self._args.get('skp_version')
      with git_utils.GitBranch(branch_name='update_skp_version',
                               commit_msg=COMMIT_MSG % skp_version,
                               commit_queue=not self._is_try):

        # First, upload a version of the CL with just the SKP version changed.
        with open(version_file, 'w') as f:
          f.write(skp_version)
 def GetDeviceDirs(self):
   """ Set the directories which will be used by the BuildStep. """
   if self._serial:
     device_scratch_dir = shell_utils.run(
         '%s -s %s shell echo \$EXTERNAL_STORAGE' % (
             android_utils.PATH_TO_ADB, self._serial),
         echo=True, shell=True).rstrip().split('\n')[-1]
     prefix = posixpath.join(device_scratch_dir, 'skiabot', 'skia_')
     return DeviceDirs(
         perf_data_dir=prefix + 'perf',
         gm_actual_dir=prefix + 'gm_actual',
         gm_expected_dir=prefix + 'gm_expected',
         dm_dir=prefix + 'dm',
         resource_dir=prefix + 'resources',
         skimage_in_dir=prefix + 'skimage_in',
         skimage_expected_dir=prefix + 'skimage_expected',
         skimage_out_dir=prefix + 'skimage_out',
         skp_dir=prefix + 'skp',
         skp_perf_dir=prefix + 'skp_perf',
         playback_actual_images_dir=prefix + 'playback_actual_images',
         playback_actual_summaries_dir=prefix + 'playback_actual_summaries',
         playback_expected_summaries_dir=(
             prefix + 'playback_expected_summaries'),
         tmp_dir=prefix + 'tmp_dir')
  def _Run(self):
    skp_version = self._get_skp_version()
    print 'SKP_VERSION=%d' % skp_version

    try:
      # Start Xvfb on the bot.
      shell_utils.run('sudo Xvfb :0 -screen 0 1280x1024x24 &', shell=True)
    except Exception:
      # It is ok if the above command fails, it just means that DISPLAY=:0
      # is already up.
      pass

    full_path_browser_executable = os.path.join(
        os.getcwd(), self._args['browser_executable'])

    upload_dir = 'playback_%d' % skp_version
    webpages_playback_cmd = [
      'python', os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             'webpages_playback.py'),
      '--page_sets', self._args['page_sets'],
      '--browser_executable', full_path_browser_executable,
      '--non-interactive',
      '--upload_to_gs',
      '--alternate_upload_dir', upload_dir,
    ]

    try:
      shell_utils.run(webpages_playback_cmd)
    finally:
      # Clean up any leftover browser instances. This can happen if there are
      # telemetry crashes, processes are not always cleaned up appropriately by
      # the webpagereplay and telemetry frameworks.
      cleanup_cmd = [
        'pkill', '-9', '-f', full_path_browser_executable
      ]
      try:
        shell_utils.run(cleanup_cmd)
      except Exception:
        # Do not fail the build step if the cleanup command fails.
        pass