Ejemplo n.º 1
0
def TektonYamlDataToTaskRun(data):
  """Convert Tekton yaml file into TaskRun message."""
  _VersionCheck(data)
  metadata = _MetadataTransform(data)
  spec = data["spec"]
  if "taskSpec" in spec:
    _TaskSpecTransform(spec["taskSpec"])
    managed_sidecars = _MetadataToSidecar(metadata)
    if managed_sidecars:
      spec["taskSpec"]["managedSidecars"] = managed_sidecars
  elif "taskRef" not in spec:
    raise cloudbuild_exceptions.InvalidYamlError(
        "TaskSpec or TaskRef is required.")

  for workspace in spec.get("workspaces", []):
    _WorkspaceTransform(workspace)
  _ServiceAccountTransform(spec)
  input_util.ParamDictTransform(spec.get("params", []))

  discarded_fields = _CheckUnsupportedFields(spec, _TASKRUN_UNSUPPORTED_FIELDS)
  messages = client_util.GetMessagesModule()
  schema_message = encoding.DictToMessage(spec, messages.TaskRun)

  input_util.UnrecognizedFields(schema_message)
  return schema_message, discarded_fields
Ejemplo n.º 2
0
def TektonYamlDataToPipelineRun(data):
  """Convert Tekton yaml file into PipelineRun message."""
  _VersionCheck(data)
  _MetadataTransform(data)
  spec = data["spec"]
  if "pipelineSpec" in spec:
    _PipelineSpecTransform(spec["pipelineSpec"])
  elif "pipelineRef" not in spec:
    raise cloudbuild_exceptions.InvalidYamlError(
        "PipelineSpec or PipelineRef is required.")

  if "resources" in spec:
    spec.pop("resources")
    log.warning(
        "PipelineResources are dropped because they are deprecated: "
        "https://github.com/tektoncd/pipeline/blob/main/docs/resources.md")

  for workspace in spec.get("workspaces", []):
    _WorkspaceTransform(workspace)
  _ServiceAccountTransform(spec)
  input_util.ParamDictTransform(spec.get("params", []))

  discarded_fields = _CheckUnsupportedFields(spec,
                                             _PIPELINERUN_UNSUPPORTED_FIELDS)
  messages = client_util.GetMessagesModule()
  schema_message = encoding.DictToMessage(spec, messages.PipelineRun)

  input_util.UnrecognizedFields(schema_message)
  return schema_message, discarded_fields
def ParamValueTransform(param_value):
    if isinstance(param_value, str) or isinstance(param_value, float):
        return {"type": "STRING", "stringVal": str(param_value)}
    elif isinstance(param_value, list):
        return {"type": "ARRAY", "arrayVal": param_value}
    else:
        raise cloudbuild_exceptions.InvalidYamlError(
            "Unsupported param value type. {msg_type}".format(
                msg_type=type(param_value)))
Ejemplo n.º 4
0
def _MetadataTransform(data):
  """Helper funtion to transform the metadata."""
  spec = data["spec"]
  if not spec:
    raise cloudbuild_exceptions.InvalidYamlError("spec is empty.")

  metadata = data.pop("metadata")
  if not metadata:
    raise cloudbuild_exceptions.InvalidYamlError("Metadata is missing in yaml.")
  annotations, labels = metadata.get("annotations",
                                     {}), metadata.get("labels", {})
  if _WORKER_POOL_ANNOTATION not in annotations:
    raise cloudbuild_exceptions.InvalidYamlError(
        "Workerpool needs to be specified in metadata.annotations.")
  spec["workerPool"] = annotations[_WORKER_POOL_ANNOTATION]
  spec["annotations"] = annotations
  if labels:
    spec["labels"] = labels
  return metadata
Ejemplo n.º 5
0
def _WorkflowTransform(workflow):
    """Transform workflow message."""

    for param_spec in workflow.get("params", []):
        input_util.ParamSpecTransform(param_spec)

    pipeline = workflow.pop("pipeline")
    if "spec" in pipeline:
        _PipelineSpecTransform(pipeline["spec"])
        workflow["pipelineSpec"] = pipeline["spec"]
    elif "bundle" in pipeline:
        workflow["bundle"] = pipeline["bundle"]
    else:
        raise cloudbuild_exceptions.InvalidYamlError(
            "PipelineSpec or Bundle is required.")

    for workspace_binding in workflow.get("workspaces", []):
        _WorkspaceBindingTransform(workspace_binding)
Ejemplo n.º 6
0
  def Run(self, args):
    """This is what gets called when the user runs this command."""
    client = client_util.GetClientInstance()
    messages = client_util.GetMessagesModule()

    yaml_data = input_util.LoadYamlFromPath(args.file)
    run_type = yaml_data['kind']
    run_id = yaml_data['metadata']['name']

    parent = args.CONCEPTS.region.Parse().RelativeName()

    if run_type == 'PipelineRun':
      pipeline_run, discarded_fields = pipeline_input_util.TektonYamlDataToPipelineRun(
          yaml_data)
      self._CheckDiscardedFields(discarded_fields)
      operation = client.projects_locations_pipelineRuns.Create(
          messages.CloudbuildProjectsLocationsPipelineRunsCreateRequest(
              parent=parent,
              pipelineRun=pipeline_run,
              pipelineRunId=run_id,
          ))
      operation_ref = resources.REGISTRY.ParseRelativeName(
          operation.name, collection='cloudbuild.projects.locations.operations')
      created_pipeline_run = waiter.WaitFor(
          waiter.CloudOperationPoller(client.projects_locations_pipelineRuns,
                                      client.projects_locations_operations),
          operation_ref, 'Creating PipelineRun')

      pipeline_run_ref = resources.REGISTRY.Parse(
          created_pipeline_run.name,
          collection='cloudbuild.projects.locations.pipelineRuns',
          api_version=client_util.RELEASE_TRACK_TO_API_VERSION[
              self.ReleaseTrack()],
      )

      log.CreatedResource(pipeline_run_ref)
      return created_pipeline_run
    elif run_type == 'TaskRun':
      task_run, discarded_fields = pipeline_input_util.TektonYamlDataToTaskRun(
          yaml_data)
      self._CheckDiscardedFields(discarded_fields)
      operation = client.projects_locations_taskRuns.Create(
          messages.CloudbuildProjectsLocationsTaskRunsCreateRequest(
              parent=parent,
              taskRun=task_run,
              taskRunId=run_id,
          ))
      operation_ref = resources.REGISTRY.ParseRelativeName(
          operation.name, collection='cloudbuild.projects.locations.operations')
      created_task_run = waiter.WaitFor(
          waiter.CloudOperationPoller(client.projects_locations_taskRuns,
                                      client.projects_locations_operations),
          operation_ref, 'Creating TaskRun')

      task_run_ref = resources.REGISTRY.Parse(
          created_task_run.name,
          collection='cloudbuild.projects.locations.taskRuns',
          api_version=client_util.RELEASE_TRACK_TO_API_VERSION[
              self.ReleaseTrack()],
      )

      log.CreatedResource(task_run_ref)
      return created_task_run
    else:
      raise cloudbuild_exceptions.InvalidYamlError(
          'Requested resource type {r} not supported'.format(r=run_type))
def UnrecognizedFields(message):
    unrecognized_fields = message.all_unrecognized_fields()
    if unrecognized_fields:
        raise cloudbuild_exceptions.InvalidYamlError(
            "Unrecognized fields in yaml: {f}".format(
                f=", ".join(unrecognized_fields)))