예제 #1
0
    def CopyTarballToGCS(self, storage_client, gcs_object):
        """Copy a tarball of the snapshot to GCS.

    Args:
      storage_client: storage_api.StorageClient, The storage client to use for
                      uploading.
      gcs_object: storage.objects Resource, The GCS object to write.

    Returns:
      storage_v1_messages.Object, The written GCS object.
    """
        with files.ChDir(self.src_dir):
            with files.TemporaryDirectory() as tmp:
                archive_path = os.path.join(tmp, 'file.tgz')
                tf = self._MakeTarball(archive_path)
                tf.close()
                ignore_file_path = os.path.join(self.src_dir,
                                                gcloudignore.IGNORE_FILE_NAME)
                if self.any_files_ignored:
                    if os.path.exists(ignore_file_path):
                        log.info('Using gcloudignore file [{}]'.format(
                            ignore_file_path))
                    else:
                        log.status.Print(
                            _IGNORED_FILE_MESSAGE.format(
                                log_file=log.GetLogFilePath()))
                log.status.write(
                    'Uploading tarball of [{src_dir}] to '
                    '[gs://{bucket}/{object}]\n'.format(
                        src_dir=self.src_dir,
                        bucket=gcs_object.bucket,
                        object=gcs_object.object,
                    ), )
                return storage_client.CopyFileToGCS(archive_path, gcs_object)
예제 #2
0
def _UploadToGcsGsutil(local_path, dest_path):
    """Uploads a local file to GCS using gsutil."""
    retcode = storage_util.RunGsutilCommand('cp', [local_path, dest_path])
    if retcode != 0:
        log.err.Print('Failed to upload file. See {} for details.'.format(
            log.GetLogFilePath()))
        raise exceptions.FailedSubCommand(
            ['gsutil', 'cp', local_path, dest_path], retcode)
    return dest_path
예제 #3
0
def CopyFilesToCodeBucket(modules, bucket, source_contexts):
    """Examines modules and copies files to a Google Cloud Storage bucket.

  Args:
    modules: [(str, ModuleYamlInfo)] List of pairs of module name, and parsed
      module information.
    bucket: str A URL to the Google Cloud Storage bucket where the files will be
      uploaded.
    source_contexts: [dict] List of json-serializable source contexts
      associated with the modules.
  Returns:
    A lookup from module name to a dictionary representing the manifest. See
    _BuildStagingDirectory.
  """
    manifests = {}
    with file_utils.TemporaryDirectory() as staging_directory:
        for (module, info) in modules:
            source_directory = os.path.dirname(info.file)
            excluded_files_regex = info.parsed.skip_files.regex

            manifest = _BuildStagingDirectory(source_directory,
                                              staging_directory, bucket,
                                              excluded_files_regex,
                                              source_contexts)
            manifests[module] = manifest

        if any(manifest for manifest in manifests.itervalues()):
            log.status.Print('Copying files to Google Cloud Storage...')
            log.status.Print('Synchronizing files to [{b}].'.format(b=bucket))
            try:
                log.SetUserOutputEnabled(False)

                def _StatusUpdate(result, unused_retry_state):
                    log.info('Error synchronizing files. Return code: {0}. '
                             'Retrying.'.format(result))

                retryer = retry.Retryer(max_retrials=3,
                                        status_update_func=_StatusUpdate)

                def _ShouldRetry(return_code, unused_retry_state):
                    return return_code != 0

                try:
                    retryer.RetryOnResult(cloud_storage.Rsync,
                                          (staging_directory, bucket),
                                          should_retry_if=_ShouldRetry)
                except retry.RetryException as e:
                    raise exceptions.ToolException((
                        'Could not synchronize files. The gsutil command exited with '
                        'status [{s}]. Command output is available in [{l}].'
                    ).format(s=e.last_result, l=log.GetLogFilePath()))
            finally:
                # Reset to the standard log level.
                log.SetUserOutputEnabled(None)

    return manifests
예제 #4
0
def FileIterator(base, skip_files):
    """Walks a directory tree, returning all the files. Follows symlinks.

  Args:
    base: The base path to search for files under.
    skip_files: A regular expression object for files/directories to skip.

  Yields:
    Paths of files found, relative to base.
  """
    dirs = ['']
    contains_skipped_modules = False

    while dirs:
        current_dir = dirs.pop()
        entries = set(os.listdir(os.path.join(base, current_dir)))
        for entry in sorted(entries):
            true_name = os.path.join(current_dir, entry)
            fullname = os.path.join(base, true_name)

            # On Windows, os.path.join uses the path separator '\' instead of '/'.
            # However, the skip_files regular expression always uses '/'.
            # To handle this, we'll replace '\' characters with '/' characters.
            if os.path.sep == '\\':
                name = true_name.replace('\\', '/')
            else:
                name = true_name

            if os.path.isfile(fullname):
                if skip_files.match(name):
                    log.info('Ignoring file [%s]: File matches ignore regex.',
                             true_name)
                    contains_skipped_modules = True
                else:
                    yield true_name
            elif os.path.isdir(fullname):
                if skip_files.match(name):
                    log.info(
                        'Ignoring directory [%s]: Directory matches ignore regex.',
                        true_name)
                    contains_skipped_modules = True
                else:
                    dirs.append(true_name)

    if contains_skipped_modules:
        log.status.Print(
            'Some files were skipped. Pass `--verbosity=info` to see which ones.'
        )
        log_path = log.GetLogFilePath()
        if log_path:
            log.status.Print(
                ('You may also view the gcloud log file, found at\n'
                 '[{0}].').format(log_path))
예제 #5
0
def CopyFilesToCodeBucket(service, source_dir, bucket_ref):
  """Examines services and copies files to a Google Cloud Storage bucket.

  Args:
    service: ServiceYamlInfo, The parsed service information.
    source_dir: str, path to the service's source directory
    bucket_ref: str A reference to a GCS bucket where the files will be
      uploaded.

  Returns:
    A dictionary representing the manifest. See _BuildStagingDirectory.
  """
  with file_utils.TemporaryDirectory() as staging_directory:
    excluded_files_regex = service.parsed.skip_files.regex
    manifest = _BuildStagingDirectory(source_dir,
                                      staging_directory,
                                      bucket_ref,
                                      excluded_files_regex)
    if manifest:
      log.status.Print('Copying files to Google Cloud Storage...')
      log.status.Print('Synchronizing files to [{b}].'
                       .format(b=bucket_ref.bucket))
      try:
        log.SetUserOutputEnabled(False)

        def _StatusUpdate(result, unused_retry_state):
          log.info('Error synchronizing files. Return code: {0}. '
                   'Retrying.'.format(result))

        retryer = retry.Retryer(max_retrials=3,
                                status_update_func=_StatusUpdate)
        def _ShouldRetry(return_code, unused_retry_state):
          return return_code != 0

        # gsutil expects a trailing /
        dest_dir = bucket_ref.ToBucketUrl()
        try:
          retryer.RetryOnResult(
              storage_api.Rsync,
              (staging_directory, dest_dir),
              should_retry_if=_ShouldRetry)
        except retry.RetryException as e:
          raise exceptions.StorageError(
              ('Could not synchronize files. The gsutil command exited with '
               'status [{s}]. Command output is available in [{l}].').format(
                   s=e.last_result, l=log.GetLogFilePath()))
      finally:
        # Reset to the standard log level.
        log.SetUserOutputEnabled(None)
      log.status.Print('File upload done.')

  return manifest
예제 #6
0
def _UploadToGcs(is_async, local_path, daisy_bucket, image_uuid):
    """Uploads a local file to GCS. Returns the gs:// URI to that file."""
    file_name = os.path.basename(local_path).replace(' ', '-')
    dest_path = 'gs://{0}/tmpimage/{1}-{2}'.format(daisy_bucket, image_uuid,
                                                   file_name)
    if is_async:
        log.status.Print('Async: Once upload is complete, your image will be '
                         'imported from Cloud Storage asynchronously.')
    with progress_tracker.ProgressTracker('Copying [{0}] to [{1}]'.format(
            local_path, dest_path)):
        retcode = storage_util.RunGsutilCommand('cp', [local_path, dest_path])
    if retcode != 0:
        log.err.Print('Failed to upload file. See {} for details.'.format(
            log.GetLogFilePath()))
        raise exceptions.FailedSubCommand(
            ['gsutil', 'cp', local_path, dest_path], retcode)
    return dest_path
예제 #7
0
파일: util.py 프로젝트: barber223/AudioApp
def FileIterator(base, skip_files):
    """Walks a directory tree, returning all the files. Follows symlinks.

  Args:
    base: The base path to search for files under.
    skip_files: A regular expression object for files/directories to skip.

  Yields:
    Paths of files found, relative to base.
  """
    dirs = ['']
    contains_skipped_modules = False

    while dirs:
        current_dir = dirs.pop()
        entries = set(os.listdir(os.path.join(base, current_dir)))
        for entry in sorted(entries):
            name = os.path.join(current_dir, entry)
            fullname = os.path.join(base, name)

            if os.path.isfile(fullname):
                if ShouldSkip(skip_files, name):
                    log.info('Ignoring file [%s]: File matches ignore regex.',
                             name)
                    contains_skipped_modules = True
                else:
                    yield name
            elif os.path.isdir(fullname):
                if ShouldSkip(skip_files, name):
                    log.info(
                        'Ignoring directory [%s]: Directory matches ignore regex.',
                        name)
                    contains_skipped_modules = True
                else:
                    dirs.append(name)

    if contains_skipped_modules:
        log.status.Print(
            'Some files were skipped. Pass `--verbosity=info` to see which ones.'
        )
        log_path = log.GetLogFilePath()
        if log_path:
            log.status.Print(
                ('You may also view the gcloud log file, found at\n'
                 '[{0}].').format(log_path))
예제 #8
0
    def CopyTarballToGCS(self,
                         storage_client,
                         gcs_object,
                         ignore_file=None,
                         hide_logs=False):
        """Copy a tarball of the snapshot to GCS.

    Args:
      storage_client: storage_api.StorageClient, The storage client to use for
        uploading.
      gcs_object: storage.objects Resource, The GCS object to write.
      ignore_file: Override .gcloudignore file to specify skip files.
      hide_logs: boolean, not print the status message if the flag is true.

    Returns:
      storage_v1_messages.Object, The written GCS object.
    """
        with metrics.RecordDuration(metric_names.UPLOAD_SOURCE):
            with files.ChDir(self.src_dir):
                with files.TemporaryDirectory() as tmp:
                    archive_path = os.path.join(tmp, 'file.tgz')
                    tf = self._MakeTarball(archive_path)
                    tf.close()
                    ignore_file_path = os.path.join(
                        self.src_dir, ignore_file
                        or gcloudignore.IGNORE_FILE_NAME)
                    if self.any_files_ignored:
                        if os.path.exists(ignore_file_path):
                            log.info('Using ignore file [{}]'.format(
                                ignore_file_path))
                        elif not hide_logs:
                            log.status.Print(
                                _IGNORED_FILE_MESSAGE.format(
                                    log_file=log.GetLogFilePath()))
                    if not hide_logs:
                        log.status.write(
                            'Uploading tarball of [{src_dir}] to '
                            '[gs://{bucket}/{object}]\n'.format(
                                src_dir=self.src_dir,
                                bucket=gcs_object.bucket,
                                object=gcs_object.object,
                            ), )
                    return storage_client.CopyFileToGCS(
                        archive_path, gcs_object)
def CopyFilesToCodeBucket(modules, bucket):
  """Examines modules and copies files to a Google Cloud Storage bucket.

  Args:
    modules: [(str, ModuleYamlInfo)] List of pairs of module name, and parsed
      module information.
    bucket: str A URL to the Google Cloud Storage bucket where the files will be
      uploaded.
  Returns:
    A lookup from module name to a dictionary representing the manifest. See
    _BuildStagingDirectory.
  """
  manifests = {}
  with file_utils.TemporaryDirectory() as staging_directory:
    for (module, info) in modules:
      source_directory = os.path.dirname(info.file)
      excluded_files_regex = info.parsed.skip_files.regex

      manifest = _BuildStagingDirectory(source_directory,
                                        staging_directory,
                                        bucket,
                                        excluded_files_regex)
      manifests[module] = manifest

    if any(manifest for manifest in manifests.itervalues()):
      log.status.Print('Copying files to Google Cloud Storage...')
      log.status.Print('Synchronizing files to [{b}].'.format(b=bucket))
      try:
        log.SetUserOutputEnabled(False)
        exit_code = cloud_storage.Rsync(staging_directory, bucket)
        if exit_code:
          raise exceptions.ToolException(
              ('Could not synchronize files. The gsutil command exited with '
               'status [{s}]. Command output is available in [{l}].').format(
                   s=exit_code, l=log.GetLogFilePath()))
      finally:
        # Reset to the standard log level.
        log.SetUserOutputEnabled(None)

  return manifests
예제 #10
0

def _UploadToGcs(async, local_path, daisy_bucket, image_uuid):
    """Uploads a local file to GCS. Returns the gs:// URI to that file."""
    file_name = os.path.basename(local_path).replace(' ', '-')
    dest_path = 'gs://{0}/tmpimage/{1}-{2}'.format(daisy_bucket, image_uuid,
                                                   file_name)
    log.status.Print('\nCopying [{0}] to [{1}]'.format(local_path, dest_path))
    if async:
        log.status.Print(
            'Once completed, your image will be imported from Cloud'
            ' Storage asynchronously.')
    retcode = storage_util.RunGsutilCommand('cp', [local_path, dest_path])
    if retcode != 0:
        log.err.Print('Failed to upload file. See {} for details.'.format(
            log.GetLogFilePath()))
        raise exceptions.FailedSubCommand(
            ['gsutil', 'cp', local_path, dest_path], retcode)
    return dest_path


def _CopyToScratchBucket(source_uri, image_uuid, storage_client, daisy_bucket):
    """Copy image from source_uri to daisy scratch bucket."""
    image_file = os.path.basename(source_uri)
    dest_uri = 'gs://{0}/tmpimage/{1}-{2}'.format(daisy_bucket, image_uuid,
                                                  image_file)
    src_object = resources.REGISTRY.Parse(source_uri,
                                          collection='storage.objects')
    dest_object = resources.REGISTRY.Parse(dest_uri,
                                           collection='storage.objects')
    log.status.Print('\nCopying [{0}] to [{1}]'.format(source_uri, dest_uri))