def upload(png_path):

  # Creating a list of files which need to be uploaded to Google Storage:
  # all .png files from the directory containing golden screenshots.
  target = []
  for file in os.listdir(png_path):
    if file.endswith('.png'):
      target.append(os.path.join(png_path, file))

  # Creating a standard gsutil object, assuming there are depot_tools
  # and everything related is set up already.
  gsutil_path = os.path.abspath(os.path.join(src_path, '..', 'depot_tools',
                                             'third_party', 'gsutil',
                                             'gsutil'))
  gsutil = download_from_google_storage.Gsutil(gsutil_path,
                                               boto_path=None,
                                               bypass_prodaccess=True)

  # URL of the bucket used for storing screenshots.
  bucket_url = 'gs://chrome-os-oobe-ui-screenshot-testing'

  # Uploading using the most simple way,
  # see depot_tools/upload_to_google_storage.py to have better understanding
  # of this False and 1 arguments.
  upload_to_google_storage.upload_to_google_storage(target, bucket_url, gsutil,
                                                    False, False, 1, False)

  print 'All images are uploaded to Google Storage.'
Пример #2
0
def upload(png_path):

    # Creating a list of files which need to be uploaded to Google Storage:
    # all .png files from the directory containing golden screenshots.
    target = []
    for file in os.listdir(png_path):
        if file.endswith('.png'):
            target.append(os.path.join(png_path, file))

    # Creating a standard gsutil object, assuming there are depot_tools
    # and everything related is set up already.
    gsutil_path = os.path.abspath(
        os.path.join(src_path, '..', 'depot_tools', 'third_party', 'gsutil',
                     'gsutil'))
    gsutil = download_from_google_storage.Gsutil(gsutil_path,
                                                 boto_path=None,
                                                 bypass_prodaccess=True)

    # URL of the bucket used for storing screenshots.
    bucket_url = 'gs://chrome-os-oobe-ui-screenshot-testing'

    # Uploading using the most simple way,
    # see depot_tools/upload_to_google_storage.py to have better understanding
    # of this False and 1 arguments.
    upload_to_google_storage.upload_to_google_storage(target, bucket_url,
                                                      gsutil, False, False, 1,
                                                      False)

    print 'All images are uploaded to Google Storage.'
Пример #3
0
def _UploadToBucket(bucket_path, files_to_upload, is_dry_run):
  '''Uploads the files designated by the provided paths to a cloud bucket. '''

  upload_to_google_storage.upload_to_google_storage(
      input_filenames=files_to_upload,
      base_url=bucket_path,
      gsutil=_InitGsutil(is_dry_run),
      force=False,
      use_md5=False,
      num_threads=1,
      skip_hashing=False,
      gzip=None)
Пример #4
0
def _UploadToBucket(bucket_path, files_to_upload, is_dry_run):
  '''Uploads the files designated by the provided paths to a cloud bucket. '''

  upload_to_google_storage.upload_to_google_storage(
      input_filenames=files_to_upload,
      base_url=bucket_path,
      gsutil=_InitGsutil(is_dry_run),
      force=False,
      use_md5=False,
      num_threads=1,
      skip_hashing=False,
      gzip=None)
def _UpdateReferenceApks(milestones):
    """Update reference APKs and creates .sha1 files ready for commit.

  Will fail if perf builders were broken for the given milestone (use next
  passing build in this case).
  """
    with build_utils.TempDir() as temp_dir:
        for milestone, crrev in milestones:
            for builder, apks in _ALL_BUILDER_APKS.iteritems():
                tools_builder_path = builder.replace(' ', '_')
                zip_path = os.path.join(temp_dir, 'build_product.zip')
                commit = build_utils.CheckOutput(['git', 'crrev-parse',
                                                  crrev]).strip()
                # Download build product from perf builders.
                build_utils.CheckOutput([
                    'gsutil', 'cp',
                    'gs://chrome-perf/%s/full-build-linux_%s.zip' %
                    (builder, commit), zip_path
                ])

                # Extract desired .apks.
                with zipfile.ZipFile(zip_path) as z:
                    in_zip_paths = z.namelist()
                    out_dir = os.path.commonprefix(in_zip_paths)
                    for apk_name in apks:
                        output_path = os.path.join(DEFAULT_DOWNLOAD_PATH,
                                                   tools_builder_path,
                                                   milestone)
                        apk_path = os.path.join(out_dir, 'apks', apk_name)
                        zip_info = z.getinfo(apk_path)
                        zip_info.filename = apk_path.replace(
                            apk_path, apk_name)
                        z.extract(zip_info, output_path)
                        input_files = [os.path.join(output_path, apk_name)]
                        bucket_path = os.path.join(DEFAULT_BUCKET,
                                                   tools_builder_path,
                                                   milestone)

                        # Upload .apks to chromium-android-tools so that they aren't
                        # automatically removed in the future.
                        upload_to_google_storage.upload_to_google_storage(
                            input_files,
                            bucket_path,
                            upload_to_google_storage.Gsutil(
                                upload_to_google_storage.GSUTIL_DEFAULT_PATH),
                            False,  # force
                            False,  # use_md5
                            10,  # num_threads
                            False,  # skip_hashing
                            None)  # gzip
Пример #6
0
def Upload(arguments):
  """Upload files in a third_party directory to google storage"""
  bucket_url, local_path = _CheckPaths(arguments.bucket_path,
                                       arguments.local_path)
  file_list = _CheckFileList(local_path, arguments.file_list)
  upload_to_google_storage.upload_to_google_storage(
      input_filenames=file_list,
      base_url=bucket_url,
      gsutil=arguments.gsutil,
      force=False,
      use_md5=False,
      num_threads=1,
      skip_hashing=False,
      gzip=None)
Пример #7
0
def main():
    parser = argparse.ArgumentParser(
        description='Upload translation screenshots to Google Cloud Storage')
    parser.add_argument('-n',
                        '--dry-run',
                        action='store_true',
                        help='Don\'t actually upload the images')
    args = parser.parse_args()

    screenshots = find_screenshots(
        src_path, os.path.join(src_path, TRANSLATION_EXPECTATIONS_PATH))
    if not screenshots:
        print 'No screenshots found, exiting.'
        exit(0)

    print 'Found %d updated screenshot(s): ' % len(screenshots)
    for s in screenshots:
        print '  %s' % s
    print
    if not query_yes_no(
            'Do you want to upload these to Google Cloud Storage?'):
        exit(0)

    # Creating a standard gsutil object, assuming there are depot_tools
    # and everything related is set up already.
    gsutil_path = os.path.abspath(os.path.join(depot_tools_path, 'gsutil.py'))
    gsutil = download_from_google_storage.Gsutil(gsutil_path, boto_path=None)

    if not args.dry_run:
        if upload_to_google_storage.upload_to_google_storage(
                input_filenames=screenshots,
                base_url=BUCKET_URL,
                gsutil=gsutil,
                force=False,
                use_md5=False,
                num_threads=1,
                skip_hashing=False,
                gzip=None) != 0:
            print 'Error uploading screenshots, exiting.'
            exit(1)

    print
    print 'Images are uploaded and their signatures are calculated:'

    signatures = ['%s.sha1' % s for s in screenshots]
    for s in signatures:
        print '  %s' % s
    print

    # Always ask if the .sha1 files should be added to the CL, even if they are
    # already part of the CL. If the files are not modified, adding again is a
    # no-op.
    if not query_yes_no('Do you want to add these files to your CL?'):
        exit(0)

    if not args.dry_run:
        git_add(signatures, src_path)

    print 'DONE.'
Пример #8
0
def _UpdateReferenceApks(milestones):
  """Update reference APKs and creates .sha1 files ready for commit.

  Will fail if perf builders were broken for the given milestone (use next
  passing build in this case).
  """
  with build_utils.TempDir() as temp_dir:
    for milestone, crrev in milestones:
      for builder, apks in _ALL_BUILDER_APKS.iteritems():
        tools_builder_path = builder.replace(' ', '_')
        zip_path = os.path.join(temp_dir, 'build_product.zip')
        commit = build_utils.CheckOutput(['git', 'crrev-parse', crrev]).strip()
        # Download build product from perf builders.
        build_utils.CheckOutput([
            'gsutil', 'cp', 'gs://chrome-perf/%s/full-build-linux_%s.zip' % (
            builder, commit), zip_path])

        # Extract desired .apks.
        with zipfile.ZipFile(zip_path) as z:
          in_zip_paths = z.namelist()
          out_dir = os.path.commonprefix(in_zip_paths)
          for apk_name in apks:
            output_path = os.path.join(
                DEFAULT_DOWNLOAD_PATH, tools_builder_path, milestone)
            apk_path = os.path.join(out_dir, 'apks', apk_name)
            zip_info = z.getinfo(apk_path)
            zip_info.filename = apk_path.replace(apk_path, apk_name)
            z.extract(zip_info, output_path)
            input_files = [os.path.join(output_path, apk_name)]
            bucket_path = os.path.join(
                DEFAULT_BUCKET, tools_builder_path, milestone)

            # Upload .apks to chromium-android-tools so that they aren't
            # automatically removed in the future.
            upload_to_google_storage.upload_to_google_storage(
                input_files,
                bucket_path,
                upload_to_google_storage.Gsutil(
                    upload_to_google_storage.GSUTIL_DEFAULT_PATH),
                False,  # force
                False,  # use_md5
                10,  # num_threads
                False,  # skip_hashing
                None)  # gzip
 def test_upload_single_file(self):
     filenames = [self.lorem_ipsum]
     output_filename = '%s.sha1' % self.lorem_ipsum
     code = upload_to_google_storage.upload_to_google_storage(
         filenames, self.base_url, self.gsutil, True, False, 1, False)
     self.assertEqual(
         self.gsutil.history,
         [('check_call',
           ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
          ('check_call', ('cp', '-q', filenames[0], '%s/%s' %
                          (self.base_url, self.lorem_ipsum_sha1)))])
     self.assertTrue(os.path.exists(output_filename))
     self.assertEqual(
         open(output_filename, 'rb').read(),
         '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
     os.remove(output_filename)
     self.assertEqual(code, 0)
 def test_skip_hashing(self):
     filenames = [self.lorem_ipsum]
     output_filename = '%s.sha1' % self.lorem_ipsum
     fake_hash = '6871c8e24da15bad8b0be2c36edc9dc77e37727f'
     with open(output_filename, 'wb') as f:
         f.write(fake_hash)  # Fake hash.
     code = upload_to_google_storage.upload_to_google_storage(
         filenames, self.base_url, self.gsutil, False, False, 1, True)
     self.assertEqual(self.gsutil.history,
                      [('check_call',
                        ('ls', '%s/%s' % (self.base_url, fake_hash))),
                       ('check_call',
                        ('ls', '-L', '%s/%s' % (self.base_url, fake_hash))),
                       ('check_call', ('cp', '-q', filenames[0], '%s/%s' %
                                       (self.base_url, fake_hash)))])
     self.assertEqual(open(output_filename, 'rb').read(), fake_hash)
     os.remove(output_filename)
     self.assertEqual(code, 0)
 def test_upload_single_file(self):
   filenames = [self.lorem_ipsum]
   output_filename = '%s.sha1'  % self.lorem_ipsum
   code = upload_to_google_storage.upload_to_google_storage(
       filenames, self.base_url, self.gsutil, True, False, 1, False)
   self.assertEqual(
       self.gsutil.history,
       [('check_call',
         ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
        ('check_call',
         ('cp', '-q', filenames[0], '%s/%s' % (self.base_url,
                                               self.lorem_ipsum_sha1)))])
   self.assertTrue(os.path.exists(output_filename))
   self.assertEqual(
       open(output_filename, 'rb').read(),
       '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
   os.remove(output_filename)
   self.assertEqual(code, 0)
 def test_upload_single_file_remote_exists(self):
     filenames = [self.lorem_ipsum]
     output_filename = '%s.sha1' % self.lorem_ipsum
     etag_string = 'ETag: 634d7c1ed3545383837428f031840a1e'
     self.gsutil.add_expected(0, '', '')
     self.gsutil.add_expected(0, etag_string, '')
     code = upload_to_google_storage.upload_to_google_storage(
         filenames, self.base_url, self.gsutil, False, False, 1, False)
     self.assertEqual(
         self.gsutil.history,
         [('check_call',
           ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
          ('check_call',
           ('ls', '-L', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
     self.assertTrue(os.path.exists(output_filename))
     self.assertEqual(
         open(output_filename, 'rb').read(),
         '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
     os.remove(output_filename)
     self.assertEqual(code, 0)
 def test_upload_single_file_remote_exists(self):
   filenames = [self.lorem_ipsum]
   output_filename = '%s.sha1'  % self.lorem_ipsum
   etag_string = 'ETag: 634d7c1ed3545383837428f031840a1e'
   self.gsutil.add_expected(0, '', '')
   self.gsutil.add_expected(0, etag_string, '')
   code = upload_to_google_storage.upload_to_google_storage(
       filenames, self.base_url, self.gsutil, False, False, 1, False)
   self.assertEqual(
       self.gsutil.history,
       [('check_call',
         ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
        ('check_call',
         ('ls', '-L', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
   self.assertTrue(os.path.exists(output_filename))
   self.assertEqual(
       open(output_filename, 'rb').read(),
       '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
   os.remove(output_filename)
   self.assertEqual(code, 0)
 def test_skip_hashing(self):
   filenames = [self.lorem_ipsum]
   output_filename = '%s.sha1' % self.lorem_ipsum
   fake_hash = '6871c8e24da15bad8b0be2c36edc9dc77e37727f'
   with open(output_filename, 'wb') as f:
     f.write(fake_hash)  # Fake hash.
   code = upload_to_google_storage.upload_to_google_storage(
       filenames, self.base_url, self.gsutil, False, False, 1, True)
   self.assertEqual(
       self.gsutil.history,
       [('check_call',
         ('ls', '%s/%s' % (self.base_url, fake_hash))),
        ('check_call',
         ('ls', '-L', '%s/%s' % (self.base_url, fake_hash))),
        ('check_call',
         ('cp', '-q', filenames[0], '%s/%s' % (self.base_url, fake_hash)))])
   self.assertEqual(
       open(output_filename, 'rb').read(), fake_hash)
   os.remove(output_filename)
   self.assertEqual(code, 0)
Пример #15
0
def main():
    parser = argparse.ArgumentParser(
        description='Upload translation screenshots to Google Cloud Storage')
    parser.add_argument('-n',
                        '--dry-run',
                        action='store_true',
                        help='Don\'t actually upload the images')
    parser.add_argument(
        '-c',
        '--clank_internal',
        action='store_true',
        help='Upload screenshots for strings in the downstream clank directory'
    )
    args = parser.parse_args()
    if args.clank_internal:
        screenshots = find_screenshots(
            os.path.join(src_path, "clank"),
            os.path.join(src_path, INTERNAL_TRANSLATION_EXPECTATIONS_PATH))

    else:
        screenshots = find_screenshots(
            src_path, os.path.join(src_path, TRANSLATION_EXPECTATIONS_PATH))
    if not screenshots:
        print(
            "No screenshots found.\n\n"
            "- Screenshots must be located in the correct directory.\n"
            "  E.g. For IDS_HELLO_WORLD message in path/to/file.grd, save the "
            "screenshot at path/to/file_grd/IDS_HELLO_WORLD.png.\n"
            "- If you added a new, uncommitted .grd file, `git add` it so that "
            "this script can pick up its screenshot directory.")
        exit(0)

    print('Found %d updated screenshot(s): ' % len(screenshots))
    for s in screenshots:
        print('  %s' % s)
    print()
    if not query_yes_no(
            'Do you want to upload these to Google Cloud Storage?\n\n'
            'FILES WILL BE PUBLIC, DO NOT UPLOAD ANYTHING CONFIDENTIAL.'):
        exit(0)

    # Creating a standard gsutil object, assuming there are depot_tools
    # and everything related is set up already.
    gsutil_path = os.path.abspath(os.path.join(depot_tools_path, 'gsutil.py'))
    gsutil = download_from_google_storage.Gsutil(gsutil_path, boto_path=None)

    if not args.dry_run:
        if upload_to_google_storage.upload_to_google_storage(
                input_filenames=screenshots,
                base_url=BUCKET_URL,
                gsutil=gsutil,
                force=False,
                use_md5=False,
                num_threads=10,
                skip_hashing=False,
                gzip=None) != 0:
            print('Error uploading screenshots. Try running '
                  '`download_from_google_storage --config`.')
            exit(1)

    print()
    print('Images are uploaded and their signatures are calculated:')

    signatures = ['%s.sha1' % s for s in screenshots]
    for s in signatures:
        print('  %s' % s)
    print()

    # Always ask if the .sha1 files should be added to the CL, even if they are
    # already part of the CL. If the files are not modified, adding again is a
    # no-op.
    if not query_yes_no('Do you want to add these files to your CL?',
                        default='yes'):
        exit(0)

    if not args.dry_run:
        git_helper.git_add(signatures, src_path)

    print('DONE.')