コード例 #1
0
def _CreateSourcesZipFile(zip_dir, source_path, ignore_file=None):
    """Prepare zip file with source of the function to upload.

  Args:
    zip_dir: str, directory in which zip file will be located. Name of the file
             will be `fun.zip`.
    source_path: str, directory containing the sources to be zipped.
    ignore_file: custom ignore_file name.
        Override .gcloudignore file to customize files to be skipped.
  Returns:
    Path to the zip file (str).
  Raises:
    FunctionsError
  """
    api_util.ValidateDirectoryExistsOrRaiseFunctionError(source_path)
    if ignore_file and not os.path.exists(
            os.path.join(source_path, ignore_file)):
        raise exceptions.FileNotFoundError(
            'File {0} referenced by --ignore-file '
            'does not exist.'.format(ignore_file))
    _ValidateUnpackedSourceSize(source_path, ignore_file)
    zip_file_name = os.path.join(zip_dir, 'fun.zip')
    try:
        chooser = _GetChooser(source_path, ignore_file)
        predicate = chooser.IsIncluded
        archive.MakeZipFromDir(zip_file_name, source_path, predicate=predicate)
    except ValueError as e:
        raise exceptions.FunctionsError(
            'Error creating a ZIP archive with the source code '
            'for directory {0}: {1}'.format(source_path, six.text_type(e)))
    return zip_file_name
コード例 #2
0
def _UploadFileToGeneratedUrl(source, messages, service, function_ref):
    """Upload function source to URL generated by API."""
    url = _GetUploadUrl(messages, service, function_ref)
    upload = transfer.Upload.FromFile(source, mime_type='application/zip')
    try:
        upload_request = http_wrapper.Request(
            url,
            http_method='PUT',
            headers={
                'content-type': 'application/zip',
                # Magic header, request will fail without it.
                # Not documented at the moment this comment was being written.
                'x-goog-content-length-range': '0,104857600',
                'Content-Length': '{0:d}'.format(upload.total_size)
            })
        upload_request.body = upload.stream.read()
    finally:
        upload.stream.close()
    response = http_wrapper.MakeRequest(
        transports.GetApitoolsTransport(),
        upload_request,
        retry_func=lambda ra: _UploadFileToGeneratedUrlRetryFunc(  # pylint: disable=g-long-lambda
            upload.retry_func, ra),
        check_response_func=lambda r: _UploadFileToGeneratedUrlCheckResponse(  # pylint: disable=g-long-lambda
            http_wrapper.CheckResponse, r),
        retries=upload.num_retries)
    if not _CheckUploadStatus(response.status_code):
        raise exceptions.FunctionsError(
            'Failed to upload the function source code to signed url: {url}. '
            'Status: [{code}:{detail}]'.format(url=url,
                                               code=response.status_code,
                                               detail=response.content))
    return url
コード例 #3
0
def ValidateTriggerArgs(trigger_event, trigger_resource, retry_specified,
                        trigger_http_specified):
  """Check if args related function triggers are valid.

  Args:
    trigger_event: The trigger event
    trigger_resource: The trigger resource
    retry_specified: Whether or not `--retry` was specified
    trigger_http_specified: Whether or not `--trigger-http` was specified

  Raises:
    FunctionsError.
  """
  # Check that Event Type is valid
  trigger_provider = triggers.TRIGGER_PROVIDER_REGISTRY.ProviderForEvent(
      trigger_event)
  trigger_provider_label = trigger_provider.label
  if trigger_provider_label != triggers.UNADVERTISED_PROVIDER_LABEL:
    resource_type = triggers.TRIGGER_PROVIDER_REGISTRY.Event(
        trigger_provider_label, trigger_event).resource_type
    if trigger_resource is None and resource_type != triggers.Resources.PROJECT:
      raise exceptions.FunctionsError(
          'You must provide --trigger-resource when using '
          '--trigger-event={}'.format(trigger_event))
  if retry_specified and trigger_http_specified:
    raise calliope_exceptions.ConflictingArgumentsException(
        '--trigger-http', '--retry')
コード例 #4
0
def _GetOperationStatus(client,
                        get_request,
                        progress_tracker=None,
                        try_set_invoker=None,
                        on_every_poll=None):
    """Helper function for getting the status of an operation.

  Args:
    client: The client used to make requests.
    get_request: A GetOperationRequest message.
    progress_tracker: progress_tracker.ProgressTracker, A reference for the
        progress tracker to tick, in case this function is used in a Retryer.
    try_set_invoker: function to try setting invoker, see above TODO.
    on_every_poll: list of functions to execute every time we poll.
                   Functions should take in Operation as an argument.

  Returns:
    True if the operation succeeded without error.
    False if the operation is not yet done.

  Raises:
    FunctionsError: If the operation is finished with error.
  """
    if try_set_invoker:
        try_set_invoker()
    if progress_tracker:
        progress_tracker.Tick()
    op = client.operations.Get(get_request)
    if op.error:
        raise exceptions.FunctionsError(OperationErrorToString(op.error))
    if on_every_poll:
        for function in on_every_poll:
            function(op)
    return op.done
コード例 #5
0
def _GetUploadUrl(messages, service, function_ref, kms_key):
    """Retrieves the upload url to upload source code."""
    generate_upload_url_request = None
    if kms_key:
        generate_upload_url_request = messages.GenerateUploadUrlRequest(
            kmsKeyName=kms_key)
    request = (
        messages.
        CloudfunctionsProjectsLocationsFunctionsGenerateUploadUrlRequest)(
            parent='projects/{}/locations/{}'.format(function_ref.projectsId,
                                                     function_ref.locationsId),
            generateUploadUrlRequest=generate_upload_url_request)
    try:
        response = service.GenerateUploadUrl(request)
        return response.uploadUrl
    except http_exceptions.HttpError as e:
        # TODO(b/223631733): Check for a specific error once the backend supports.
        if kms_key and e.status_code == http_client.INTERNAL_SERVER_ERROR:
            raise exceptions.FunctionsError(
                'An error occurred. Ensure that the KMS key {kms_key} exists and the '
                'Cloud Functions service account has encrypter/decrypter permissions '
                '(roles/cloudkms.cryptoKeyEncrypterDecrypter) on the key. If you '
                'have recently made changes to the IAM config, wait a few minutes '
                'for the config to propagate and try again.'.format(
                    kms_key=kms_key))
        raise e
コード例 #6
0
ファイル: util.py プロジェクト: PrateekKhatri/gcloud_cli
def ValidateDirectoryExistsOrRaiseFunctionError(directory):
    """Checks if a source directory exists.

  Args:
    directory: A string: a local path to directory provided by user.
  Returns:
    The argument provided, if found valid.
  Raises:
    ArgumentTypeError: If the user provided a directory which is not valid.
  """
    if not os.path.exists(directory):
        raise exceptions.FunctionsError(
            'argument `--source`: Provided directory does not exist')
    if not os.path.isdir(directory):
        raise exceptions.FunctionsError(
            'argument `--source`: Provided path does not point to a directory')
    return directory
コード例 #7
0
ファイル: command.py プロジェクト: PrateekKhatri/gcloud_cli
def Run(args):
    """Delete a Google Cloud Function."""
    client = util.GetApiClientInstance()
    messages = client.MESSAGES_MODULE
    function_ref = args.CONCEPTS.name.Parse()
    function__url = function_ref.RelativeName()
    prompt_message = 'Resource [{0}] will be deleted.'.format(function__url)
    if not console_io.PromptContinue(message=prompt_message):
        raise exceptions.FunctionsError('Deletion aborted by user.')
    op = client.projects_locations_functions.Delete(
        messages.CloudfunctionsProjectsLocationsFunctionsDeleteRequest(
            name=function__url))
    operations.Wait(op, messages, client)
    log.DeletedResource(function__url)
コード例 #8
0
def _WaitForOperation(client,
                      get_request,
                      message,
                      try_set_invoker=None,
                      on_every_poll=None):
    """Wait for an operation to complete.

  No operation is done instantly. Wait for it to finish following this logic:
  * we wait 1s (jitter is also 1s)
  * we query service
  * if the operation is not finished we loop to first point
  * wait limit is 1820s - if we get to that point it means something is wrong
        and we can throw an exception

  Args:
    client:  The client used to make requests.
    get_request: A GetOperationRequest message.
    message: str, The string to print while polling.
    try_set_invoker: function to try setting invoker, see above TODO.
    on_every_poll: list of functions to execute every time we poll.
                   Functions should take in Operation as an argument.

  Returns:
    True if the operation succeeded without error.

  Raises:
    FunctionsError: If the operation takes more than 1820s.
  """

    with console_progress_tracker.ProgressTracker(message,
                                                  autotick=False) as pt:
        # This is actually linear retryer.
        retryer = retry.Retryer(exponential_sleep_multiplier=1,
                                max_wait_ms=MAX_WAIT_MS,
                                wait_ceiling_ms=WAIT_CEILING_MS)
        try:
            retryer.RetryOnResult(_GetOperationStatus, [client, get_request], {
                'progress_tracker': pt,
                'try_set_invoker': try_set_invoker,
                'on_every_poll': on_every_poll
            },
                                  should_retry_if=None,
                                  sleep_ms=SLEEP_MS)
        except retry.WaitException:
            raise exceptions.FunctionsError(
                'Operation {0} is taking too long'.format(get_request.name))
コード例 #9
0
def _ApplySecretsArgsToFunction(function, args):
    """Populates cloud function message with secrets payload if applicable.

  It compares the CLI args with the existing secrets configuration to compute
  the effective secrets configuration.

  Args:
    function: Cloud function message to be checked and populated.
    args: All CLI arguments.

  Returns:
    updated_fields: update mask containing the list of fields to be updated.
  """
    if not secrets_config.IsArgsSpecified(args):
        return []

    old_secrets = secrets_util.GetSecretsAsDict(
        function.secretEnvironmentVariables, function.secretVolumes)
    new_secrets = {}
    try:
        new_secrets = secrets_config.ApplyFlags(
            old_secrets, args, _GetProject(),
            project_util.GetProjectNumber(_GetProject()))
    except ArgumentTypeError as error:
        exceptions.reraise(function_exceptions.FunctionsError(error))

    if new_secrets:
        _LogSecretsPermissionMessage(_GetProject(),
                                     function.serviceAccountEmail)

    old_secret_env_vars, old_secret_volumes = secrets_config.SplitSecretsDict(
        old_secrets)
    new_secret_env_vars, new_secret_volumes = secrets_config.SplitSecretsDict(
        new_secrets)

    updated_fields = []
    if old_secret_env_vars != new_secret_env_vars:
        function.secretEnvironmentVariables = secrets_util.SecretEnvVarsToMessages(
            new_secret_env_vars, api_util.GetApiMessagesModule())
        updated_fields.append('secretEnvironmentVariables')
    if old_secret_volumes != new_secret_volumes:
        function.secretVolumes = secrets_util.SecretVolumesToMessages(
            new_secret_volumes, api_util.GetApiMessagesModule())
        updated_fields.append('secretVolumes')
    return updated_fields
コード例 #10
0
def _ValidateUnpackedSourceSize(path, ignore_file=None):
    """Validate size of unpacked source files."""
    chooser = _GetChooser(path, ignore_file)
    predicate = chooser.IsIncluded
    try:
        size_b = file_utils.GetTreeSizeBytes(path, predicate=predicate)
    except OSError as e:
        raise exceptions.FunctionsError(
            'Error building source archive from path [{path}]. '
            'Could not validate source files: [{error}]. '
            'Please ensure that path [{path}] contains function code or '
            'specify another directory with --source'.format(path=path,
                                                             error=e))
    size_limit_mb = 512
    size_limit_b = size_limit_mb * 2**20
    if size_b > size_limit_b:
        raise exceptions.OversizedDeployment(
            six.text_type(size_b) + 'B',
            six.text_type(size_limit_b) + 'B')
コード例 #11
0
def _UploadFileToGcs(source, function_ref, stage_bucket):
    """Upload local source files to GCS staging bucket."""
    zip_file = _GenerateRemoteZipFileName(function_ref.RelativeName())
    bucket_ref = storage_util.BucketReference.FromArgument(stage_bucket)
    dest_object = storage_util.ObjectReference.FromBucketRef(
        bucket_ref, zip_file)

    # TODO(b/109938541): Remove gsutil implementation after the new implementation
    # seems stable.
    use_gsutil = properties.VALUES.storage.use_gsutil.GetBool()
    if use_gsutil:
        upload_success = _UploadFileToGcsGsutil(source, dest_object)
    else:
        upload_success = _UploadFileToGcsStorageApi(source, dest_object)

    if not upload_success:
        raise exceptions.FunctionsError(
            'Failed to upload the function source code to the bucket {0}'.
            format(stage_bucket))
    return dest_object.ToUrl()