コード例 #1
0
def _AlterRuntime(config_filename, runtime):
    try:
        # 0. Take backup
        with tempfile.NamedTemporaryFile(prefix='app.yaml.') as f:
            backup_fname = f.name
        log.status.Print(
            'Copying original config [{0}] to backup location [{1}].'.format(
                config_filename, backup_fname))
        shutil.copyfile(config_filename, backup_fname)
        # 1. Open and parse file using ruamel
        with files.FileReader(config_filename) as yaml_file:
            encoding = yaml_file.encoding
            config = yaml.load(yaml_file, yaml.RoundTripLoader)
        # 2. Alter the ruamel in-memory object representing the yaml file
        config['runtime'] = runtime
        # 3. Create an in-memory file buffer and write yaml file to it
        raw_buf = io.BytesIO()
        tmp_yaml_buf = io.TextIOWrapper(raw_buf, encoding)
        yaml.dump(config, tmp_yaml_buf, Dumper=yaml.RoundTripDumper)
        # 4. Overwrite the original app.yaml
        with files.BinaryFileWriter(config_filename) as yaml_file:
            tmp_yaml_buf.seek(0)
            yaml_file.write(raw_buf.getvalue())
    except Exception as e:
        raise fingerprinter.AlterConfigFileError(e)
コード例 #2
0
def _ungzip_file(file_path):
    """Unzips gzip file."""
    temporary_file_path = file_path + '.tmp'
    with gzip.open(file_path, 'rb') as gzipped_file:
        with files.BinaryFileWriter(temporary_file_path) as ungzipped_file:
            shutil.copyfileobj(gzipped_file, ungzipped_file)
    shutil.move(temporary_file_path, file_path)
コード例 #3
0
def UploadSource(upload_dir, source_files, object_ref, gen_files=None):
    """Upload a gzipped tarball of the source directory to GCS.

  Note: To provide parity with docker's behavior, we must respect .dockerignore.

  Args:
    upload_dir: the directory to be archived.
    source_files: [str], relative paths to upload.
    object_ref: storage_util.ObjectReference, the Cloud Storage location to
      upload the source tarball to.
    gen_files: dict of filename to (str) contents of generated config and
      source context files.
  """
    gen_files = gen_files or {}
    dockerignore_contents = _GetDockerignoreExclusions(upload_dir, gen_files)
    included_paths = _GetIncludedPaths(upload_dir, source_files,
                                       dockerignore_contents)

    # We can't use tempfile.NamedTemporaryFile here because ... Windows.
    # See https://bugs.python.org/issue14243. There are small cleanup races
    # during process termination that will leave artifacts on the filesystem.
    # eg, CTRL-C on windows leaves both the directory and the file. Unavoidable.
    # On Posix, `kill -9` has similar behavior, but CTRL-C allows cleanup.
    with files.TemporaryDirectory() as temp_dir:
        f = files.BinaryFileWriter(os.path.join(temp_dir, 'src.tgz'))
        with gzip.GzipFile(mode='wb', fileobj=f) as gz:
            _CreateTar(upload_dir, gen_files, included_paths, gz)
        f.close()
        storage_client = storage_api.StorageClient()
        storage_client.CopyFileToGCS(object_ref.bucket_ref, f.name,
                                     object_ref.name)
コード例 #4
0
def decompress_gzip_if_necessary(source_resource,
                                 gzipped_path,
                                 destination_path,
                                 do_not_decompress_flag=False):
    """Checks if file is elligible for decompression and decompresses if true.

  Args:
    source_resource (ObjectResource): May contain encoding metadata.
    gzipped_path (str): File path to unzip.
    destination_path (str): File path to write unzipped file to.
    do_not_decompress_flag (bool): User flag that blocks decompression.

  Returns:
    (bool) True if file was successfully decompressed, else False.
  """
    content_encoding = getattr(source_resource.metadata, 'contentEncoding', '')
    if (do_not_decompress_flag or content_encoding is None
            or 'gzip' not in content_encoding.split(',')):
        return False

    try:
        with gzip.open(gzipped_path, 'rb') as gzipped_file:
            with files.BinaryFileWriter(destination_path,
                                        create_path=True) as ungzipped_file:
                shutil.copyfileobj(gzipped_file, ungzipped_file)
        return True
    except OSError:
        # May indicate trying to decompress non-gzipped file. Clean up.
        os.remove(destination_path)

    return False
コード例 #5
0
    def _perform_download(self, digesters, progress_callback,
                          download_strategy, start_byte, end_byte):
        """Prepares file stream, calls API, and validates hash."""
        mode = (files.BinaryFileWriterMode.MODIFY
                if start_byte else files.BinaryFileWriterMode.TRUNCATE)
        with files.BinaryFileWriter(
                self._destination_resource.storage_url.object_name,
                create_path=True,
                mode=mode) as download_stream:
            download_stream.seek(start_byte)
            provider = self._source_resource.storage_url.scheme
            # TODO(b/162264437): Support all of download_object's parameters.
            api_factory.get_api(provider).download_object(
                self._source_resource,
                download_stream,
                digesters=digesters,
                download_strategy=download_strategy,
                progress_callback=progress_callback,
                start_byte=start_byte,
                end_byte=end_byte)

        # TODO(b/172048376): Add crc32c, and make this a loop.
        if util.HashAlgorithms.MD5 in digesters:
            calculated_digest = util.get_base64_hash_digest_string(
                digesters[util.HashAlgorithms.MD5])
            util.validate_object_hashes_match(
                self._source_resource.storage_url,
                self._source_resource.md5_hash, calculated_digest)
コード例 #6
0
def _PossiblyWriteRedactedResponseToOutputFile(value, parsed_args):
  """Helper function for writing redacted contents to an output file."""
  if not parsed_args.output_file:
    return
  with files.BinaryFileWriter(parsed_args.output_file) as outfile:
    outfile.write(value)
  log.status.Print('The redacted contents can be viewed in [{}]'.format(
      parsed_args.output_file))
コード例 #7
0
    def execute(self, callback=None):
        with files.BinaryFileWriter(
                self._destination_resource.storage_url.object_name,
                create_path=True) as download_stream:
            provider = self._source_resource.storage_url.scheme
            bucket_name = self._source_resource.storage_url.bucket_name
            object_name = self._source_resource.storage_url.object_name

            # TODO(b/162264437): Support all of download_object's parameters.
            api_factory.get_api(provider).download_object(
                bucket_name, object_name, download_stream)
コード例 #8
0
ファイル: file_parsers.py プロジェクト: saranraju90/multik8s
 def WriteToDisk(self):
   """Overwrite Original Yaml File."""
   # Only write if file_path is specified.
   if not self.file_path:
     raise YamlConfigFileError('Could Not Write To Config File: Path Is Empty')
   out_file_buf = io.BytesIO()
   tmp_yaml_buf = io.TextIOWrapper(out_file_buf, newline='\n',
                                   encoding='utf-8')
   yaml.dump_all_round_trip([x.content for x in self.data],
                            stream=tmp_yaml_buf)
   with files.BinaryFileWriter(self.file_path) as f:
     tmp_yaml_buf.seek(0)
     f.write(out_file_buf.getvalue())
コード例 #9
0
    def execute(self, callback=None):
        with files.BinaryFileWriter(
                self._destination_resource.storage_url.object_name,
                create_path=True) as download_stream:
            provider = self._source_resource.storage_url.scheme

            # TODO(b/162264437): Support all of download_object's parameters.
            api_factory.get_api(provider).download_object(
                self._source_resource, download_stream)

        with files.BinaryFileReader(self._destination_resource.storage_url.
                                    object_name) as completed_download_stream:
            downloaded_file_hash = util.get_hash_digest_from_file_stream(
                completed_download_stream, util.HashAlgorithms.MD5)
            util.validate_object_hashes_match(
                self._source_resource.storage_url,
                self._source_resource.md5_hash, downloaded_file_hash)
def create_file_if_needed(source_resource, destination_resource):
    """Creates new file if none exists or one that is too large exists at path.

  Args:
    source_resource (ObjectResource): Contains size metadata for target file.
    destination_resource(FileObjectResource|UnknownResource): Contains path to
      create file at.
  """
    file_path = destination_resource.storage_url.object_name
    if os.path.exists(
            file_path) and os.path.getsize(file_path) <= source_resource.size:
        return

    with files.BinaryFileWriter(file_path,
                                create_path=True,
                                mode=files.BinaryFileWriterMode.TRUNCATE):
        # Wipe or create file.
        pass
コード例 #11
0
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    instance_ref = self._GetInstanceRef(holder, args)
    request = holder.client.messages.ComputeInstancesGetScreenshotRequest(
        **instance_ref.AsDict())

    response = holder.client.MakeRequests([
        (holder.client.apitools_client.instances, 'GetScreenshot', request)
    ])[0]

    self._display_file_output = False
    if args.IsSpecified('destination'):
      with files.BinaryFileWriter(args.destination) as output:
        output.write(base64.b64decode(response.contents))
      self._resource_name = instance_ref.instance
      self._destination = args.destination
      self._display_file_output = True
    else:
      self._response_contents = response.contents
    return
コード例 #12
0
    def _perform_download(self, request_config, progress_callback,
                          download_strategy, start_byte, end_byte, write_mode,
                          digesters):
        """Prepares file stream, calls API, and validates hash."""
        with files.BinaryFileWriter(
                self._destination_resource.storage_url.object_name,
                create_path=True,
                mode=write_mode) as download_stream:
            download_stream.seek(start_byte)
            provider = self._source_resource.storage_url.scheme
            # TODO(b/162264437): Support all of download_object's parameters.
            api_download_result = api_factory.get_api(
                provider).download_object(
                    self._source_resource,
                    download_stream,
                    request_config,
                    digesters=digesters,
                    do_not_decompress=self._do_not_decompress,
                    download_strategy=download_strategy,
                    progress_callback=progress_callback,
                    start_byte=start_byte,
                    end_byte=end_byte)

        if hash_util.HashAlgorithm.MD5 in digesters:
            calculated_digest = hash_util.get_base64_hash_digest_string(
                digesters[hash_util.HashAlgorithm.MD5])
            download_util.validate_download_hash_and_delete_corrupt_files(
                self._destination_resource.storage_url.object_name,
                self._source_resource.md5_hash, calculated_digest)
        # Only for one-shot composite object downloads as final CRC32C validated in
        # FinalizeSlicedDownloadTask.
        elif (hash_util.HashAlgorithm.CRC32C in digesters
              and self._component_number is None):
            calculated_digest = crc32c.get_hash(
                digesters[hash_util.HashAlgorithm.CRC32C])
            download_util.validate_download_hash_and_delete_corrupt_files(
                self._destination_resource.storage_url.object_name,
                self._source_resource.crc32c_hash, calculated_digest)

        return api_download_result
コード例 #13
0
    def execute(self, callback=None):
        if self._source_resource.md5_hash:
            digesters = {util.HashAlgorithms.MD5: util.get_md5_hash()}
        else:
            digesters = {}

        with files.BinaryFileWriter(
                self._destination_resource.storage_url.object_name,
                create_path=True) as download_stream:
            provider = self._source_resource.storage_url.scheme

            # TODO(b/162264437): Support all of download_object's parameters.
            api_factory.get_api(provider).download_object(
                self._source_resource, download_stream, digesters=digesters)

        # TODO(b/172048376): Add crc32c, and make this a loop.
        if util.HashAlgorithms.MD5 in digesters:
            calculated_digest = util.get_base64_hash_digest_string(
                digesters[util.HashAlgorithms.MD5])
            util.validate_object_hashes_match(
                self._source_resource.storage_url,
                self._source_resource.md5_hash, calculated_digest)
コード例 #14
0
def EncryptedSSLCredentials(config_path):
  """Generates the encrypted client SSL credentials.

  The encrypted client SSL credentials are stored in a file which is returned
  along with the password.

  Args:
    config_path: path to the context aware configuration file.

  Raises:
    CertProvisionException: if the cert could not be provisioned.
    ConfigException: if there is an issue in the context aware config.

  Returns:
    Tuple[str, bytes]: cert and key file path and passphrase bytes.
  """
  try:
    (
        has_cert,
        cert_bytes,
        key_bytes,
        passphrase_bytes
    ) = _mtls_helper.get_client_ssl_credentials(
        generate_encrypted_key=True,
        context_aware_metadata_path=config_path)
    if has_cert:
      cert_path = os.path.join(
          config.Paths().global_config_dir, 'caa_cert.pem')
      with files.BinaryFileWriter(cert_path) as f:
        f.write(cert_bytes)
        f.write(key_bytes)
      return cert_path, passphrase_bytes
  except google_auth_exceptions.ClientCertError as caught_exc:
    new_exc = CertProvisionException(caught_exc)
    six.raise_from(new_exc, caught_exc)
  except files.Error as e:
    log.debug('context aware settings discovery file %s - %s', config_path, e)

  raise ConfigException()
コード例 #15
0
  def DownloadAndExtractTar(url, download_dir, extract_dir,
                            progress_callback=None, command_path='unknown'):
    """Download and extract the given tar file.

    Args:
      url: str, The URL to download.
      download_dir: str, The path to put the temporary download file into.
      extract_dir: str, The path to extract the tar into.
      progress_callback: f(float), A function to call with the fraction of
        completeness.
      command_path: the command path to include in the User-Agent header if the
        URL is HTTP

    Returns:
      [str], The files that were extracted from the tar file.

    Raises:
      URLFetchError: If there is a problem fetching the given URL.
    """
    for d in [download_dir, extract_dir]:
      if not os.path.exists(d):
        file_utils.MakeDir(d)
    download_file_path = os.path.join(download_dir, os.path.basename(url))
    if os.path.exists(download_file_path):
      os.remove(download_file_path)

    (download_callback, install_callback) = (
        console_io.SplitProgressBar(progress_callback, [1, 1]))

    try:
      req = ComponentInstaller.MakeRequest(url, command_path)
      try:
        total_size = float(req.info().get('Content-length', '0'))
      # pylint: disable=broad-except, We never want progress bars to block an
      # update.
      except Exception:
        total_size = 0

      with file_utils.BinaryFileWriter(download_file_path) as fp:
        # This is the buffer size that shutil.copyfileobj uses.
        buf_size = 16*1024
        total_written = 0

        while True:
          buf = req.read(buf_size)
          if not buf:
            break
          fp.write(buf)
          total_written += len(buf)
          if total_size:
            download_callback(total_written / total_size)

      download_callback(1)

    except (urllib.error.HTTPError,
            urllib.error.URLError,
            ssl.SSLError) as e:
      raise URLFetchError(e)

    with tarfile.open(name=download_file_path) as tar:
      members = tar.getmembers()
      total_files = len(members)

      files = []
      for num, member in enumerate(members, start=1):
        files.append(member.name + '/' if member.isdir() else member.name)
        tar.extract(member, extract_dir)
        install_callback(num / total_files)

      install_callback(1)

    os.remove(download_file_path)
    return files
コード例 #16
0
def DownloadAndExtractTar(url,
                          download_dir,
                          extract_dir,
                          progress_callback=None,
                          command_path='unknown'):
    """Download and extract the given tar file.

  Args:
    url: str, The URL to download.
    download_dir: str, The path to put the temporary download file into.
    extract_dir: str, The path to extract the tar into.
    progress_callback: f(float), A function to call with the fraction of
      completeness.
    command_path: the command path to include in the User-Agent header if the
      URL is HTTP

  Returns:
    [str], The files that were extracted from the tar file.

  Raises:
    URLFetchError: If there is a problem fetching the given URL.
  """
    for d in [download_dir, extract_dir]:
        if not os.path.exists(d):
            file_utils.MakeDir(d)
    download_file_path = os.path.join(download_dir, os.path.basename(url))
    if os.path.exists(download_file_path):
        os.remove(download_file_path)

    (download_callback,
     install_callback) = (console_io.SplitProgressBar(progress_callback,
                                                      [1, 1]))

    try:
        response = MakeRequest(url, command_path)
        with file_utils.BinaryFileWriter(download_file_path) as fp:
            total_written = 0
            total_size = len(response.content)
            for chunk in response.iter_content(chunk_size=WRITE_BUFFER_SIZE):
                fp.write(chunk)
                total_written += len(chunk)
                download_callback(total_written / total_size)
        download_callback(1)
    except (requests.exceptions.HTTPError, OSError) as e:
        raise URLFetchError(e)

    with tarfile.open(name=download_file_path) as tar:
        members = tar.getmembers()
        total_files = len(members)

        files = []
        for num, member in enumerate(members, start=1):
            files.append(member.name + '/' if member.isdir() else member.name)
            tar.extract(member, extract_dir)
            full_path = os.path.join(extract_dir, member.name)
            # Ensure read-and-write permission for all files
            if os.path.isfile(full_path) and not os.access(full_path, os.W_OK):
                os.chmod(full_path, stat.S_IWUSR | stat.S_IREAD)
            install_callback(num / total_files)

        install_callback(1)

    os.remove(download_file_path)
    return files
コード例 #17
0
    def Run(self, args):
        if args.no_clobber and args.if_generation_match:
            raise ValueError(
                'Cannot specify both generation precondition and no-clobber.')

        encryption_util.initialize_key_store(args)

        source_expansion_iterator = name_expansion.NameExpansionIterator(
            args.source,
            all_versions=args.all_versions,
            recursion_requested=args.recursive,
            ignore_symlinks=args.ignore_symlinks)
        task_status_queue = task_graph_executor.multiprocessing_context.Queue()

        raw_destination_url = storage_url.storage_url_from_string(
            args.destination)
        if (isinstance(raw_destination_url, storage_url.FileUrl)
                and args.storage_class):
            raise ValueError(
                'Cannot specify storage class for a non-cloud destination: {}'.
                format(raw_destination_url))

        parallelizable = True
        shared_stream = None
        if (args.all_versions
                and (properties.VALUES.storage.process_count.GetInt() != 1
                     or properties.VALUES.storage.thread_count.GetInt() != 1)):
            log.warning(
                'Using sequential instead of parallel task execution. This will'
                ' maintain version ordering when copying all versions of an object.'
            )
            parallelizable = False
        if (isinstance(raw_destination_url, storage_url.FileUrl)
                and raw_destination_url.is_pipe):
            log.warning('Downloading to a pipe.'
                        ' This command may stall until the pipe is read.')
            parallelizable = False
            shared_stream = files.BinaryFileWriter(args.destination)

        user_request_args = (
            user_request_args_factory.get_user_request_args_from_command_args(
                args,
                metadata_type=user_request_args_factory.MetadataType.OBJECT))
        task_iterator = copy_task_iterator.CopyTaskIterator(
            source_expansion_iterator,
            args.destination,
            custom_md5_digest=args.content_md5,
            do_not_decompress=args.do_not_decompress,
            print_created_message=args.print_created_message,
            shared_stream=shared_stream,
            skip_unsupported=args.skip_unsupported,
            task_status_queue=task_status_queue,
            user_request_args=user_request_args,
        )
        self.exit_code = task_executor.execute_tasks(
            task_iterator,
            parallelizable=parallelizable,
            task_status_queue=task_status_queue,
            progress_manager_args=task_status.ProgressManagerArgs(
                task_status.IncrementType.FILES_AND_BYTES,
                manifest_path=user_request_args.manifest_path,
            ),
            continue_on_error=args.continue_on_error,
        )

        if shared_stream:
            shared_stream.close()