Esempio n. 1
0
def register_pipeline(pb_pipeline, pipeline_id):
    request = service_pb2.RegisterPipelineRequest()
    request.pipeline.CopyFrom(pb_pipeline)
    request.pipeline_id = pipeline_id

    response = _service.request(request, "register_pipeline")

    import google.protobuf.json_format as json_format
    res = json_format.Parse(response, service_pb2.VoidResponse())
    #logger.info(res)
    return None, res.status
Esempio n. 2
0
def unregister_pipeline(pipeline_id):
    """
    unregister pipeline to backend
    """
    request = service_pb2.UnRegisterPipelineRequest()
    request.pipeline_id = pipeline_id

    response = _service.request(request, "unregister_pipeline")

    import google.protobuf.json_format as json_format
    res = json_format.Parse(response, service_pb2.VoidResponse())
    #logger.info(res)
    return None, res.status
Esempio n. 3
0
def write_record(file_path, records, objector):
    """
    write records to file path with objector
    """
    request = service_pb2.WriteLocalSeqFileRequest()
    request.file_path = file_path
    for record in records:
        request.key.append("")
        request.value.append(objector.serialize(record))

    response = _service.request(request, "write_local_seqfile")

    import google.protobuf.json_format as json_format
    res = json_format.Parse(response, service_pb2.VoidResponse())

    return None, res.status
Esempio n. 4
0
def hadoop_commit(args, hadoop_client_path=None, hadoop_config_path=None):
    if hadoop_client_path is None:
        hadoop_client_path = default_hadoop_client_path()

    if hadoop_config_path is None:
        hadoop_config_path = default_hadoop_config_path()

    request = service_pb2.HadoopCommitRequest()
    request.hadoop_client_path = hadoop_client_path
    request.hadoop_config_path = hadoop_config_path
    request.args.extend(args)

    response = _service.request(request, "hadoop_commit")

    import google.protobuf.json_format as json_format
    res = json_format.Parse(response, service_pb2.VoidResponse())
    return res.status.success
Esempio n. 5
0
def launch(pipeline_id,
           logical_plan_message,
           resource_message,
           commit_args=None,
           context=None):
    """
    Send the rpc command to the other end to launch the logical plan

    Args:

    Raises:
      error.BigflowRPCException:  if any error happened
    """

    request = service_pb2.LaunchRequest()
    request.pipeline_id = pipeline_id
    request.logical_plan.CopyFrom(logical_plan_message)

    request.resource.CopyFrom(resource_message)

    if commit_args is not None:
        request.hadoop_commit_args.extend(commit_args)

    if context is not None:
        assert isinstance(context, str)
        request.pipeline_context = context

    response = _service.request(request, "launch")

    import google.protobuf.json_format as json_format
    res = json_format.Parse(response, service_pb2.VoidResponse())
    #logger.info(res)

    if not res.status.success:
        backend_log_path = os.getenv("BIGFLOW_LOG_FILE_BACKEND", "")
        error_message = "Job ran failed"
        if len(_message) > 0:
            error_message += ", possible_reason: \n" + "".join(_message)
        if backend_log_path:
            error_message += "Please check backend log['%s.log'] for details" % backend_log_path
        raise error.BigflowRuntimeException(error_message)
Esempio n. 6
0
def kill(pipeline_id, logical_plan_message, commit_args=None):
    """
    Send the rpc command to the other end to kill the logical plan

    Args:

    Raises:
      error.BigflowRPCException:  if any error happened
    """

    request = service_pb2.KillRequest()
    request.pipeline_id = pipeline_id
    request.logical_plan.CopyFrom(logical_plan_message)

    if commit_args is not None:
        request.hadoop_commit_args.extend(commit_args)

    response = _service.request(request, "kill")

    import google.protobuf.json_format as json_format
    res = json_format.Parse(response, service_pb2.VoidResponse())
    return None, res.status