コード例 #1
0
def extract():
    json_data = request.get_json(force=True)
    graph = json_data['graph']
    dag_properties = json_data['dag_properties']
    code_info, success, errors, additional_info = PipelineGenerator.generate_pipeline(
        graph, dag_properties)
    result = {
        "codes": code_info,
        "result_code": success,
        "errors": errors,
        "additional_info": additional_info
    }
    json_string = json.dumps(result)
    return json_string
コード例 #2
0
def interpret_graph():
    json_data = request.get_json(force=True)
    json_data = convert(json_data)
    print(json_data)
    print(type(json_data))
    graph = json_data['graph']
    print(graph)
    print("------------")
    dag_properties = json_data['dag_properties']
    print(dag_properties)
    print("------------")
    code_info, success, errors, additional_info = PipelineGenerator.generate_pipeline(
        graph, dag_properties)
    result = {
        "codes": code_info,
        "result_code": success,
        "errors": errors,
        "additional_info": additional_info
    }
    json_string = json.dumps(result)
    return json_string
コード例 #3
0
                    }
                }
            },
            "task1": {
                "id": "task1",
                "parent": None,
                "node_type": 1
            }
        },
        "edges": {
            "node1-node2": {
                "type": "dataframe"
            },
            "node1-node3": {
                "type": "dataframe"
            }
        }
    },
    "dag_properties": {
        "app_id": "MyFirstApp",
        "bash_command": "sh /usr/local/shell_scripts/run.sh",
        "schedule_interval": "@once",
        "default_args": {
            "owner": "airflow",
            "start_date": "01/01/2018"
        }
    }
}

code_info, success, errors, additional_info = PipelineGenerator.generate_pipeline(
    data["graph"], data["dag_properties"])