コード例 #1
0
ファイル: qds.py プロジェクト: rahul26goyal/qds-sdk-py
def clustermainv2(args):
    action = args[0]
    actionset = set(
        ["create", "delete", "update", "list", "clone", "start", "terminate", "status", "reassign_label", "add_node",
         "remove_node", "update_node", "snapshot", "restore_point", "get_snapshot_schedule",
         "update_snapshot_schedule"])

    result = None
    if action not in actionset:
        sys.stderr.write("action must be one of <%s>\n" % "|".join(actionset))
        usage()
    elif action in set(["create", "update", "clone", "list"]):
        result =  ClusterCmdLine.run(args)
    else:
        result =  globals()["cluster_" + action + "_action"](Cluster, args)
    print(result)
コード例 #2
0
def clustermainv2(args):
    action = args[0]
    actionset = set([
        "create", "delete", "update", "list", "clone", "start", "terminate",
        "status", "reassign_label", "add_node", "remove_node", "update_node",
        "snapshot", "restore_point", "get_snapshot_schedule",
        "update_snapshot_schedule"
    ])

    result = None
    if action not in actionset:
        sys.stderr.write("action must be one of <%s>\n" % "|".join(actionset))
        usage()
    elif action in set(["create", "update", "clone", "list"]):
        result = ClusterCmdLine.run(args)
    else:
        result = globals()["cluster_" + action + "_action"](Cluster, args)
    print(result)
コード例 #3
0
def _create_spark_cluster_info(config):
    cluster_info = ClusterInfoV2(config['spark_cluster_name'])
    cluster_info.set_cluster_info(
        master_instance_type=config['hadoop_master_instance_type'],
        slave_instance_type=config['hadoop_slave_instance_type'],
        min_nodes=1,
        max_nodes=config['hadoop_max_nodes_count'],
        slave_request_type='spot')

    cloud_config = Qubole.get_cloud(cloud_name='aws')
    cloud_config.set_cloud_config(aws_region=config['region_name'],
                                  aws_availability_zone='Any',
                                  vpc_id=config['cluster_vpc_id'],
                                  subnet_id=config['cluster_subnet_id'])

    engine_config = Engine(flavour='spark')
    engine_config.set_engine_config(spark_version='2.1.0')

    cluster_request = ClusterCmdLine.get_cluster_request_parameters(
        cluster_info, cloud_config, engine_config)
    return cluster_request