def use_and_generate(host, port, bucket_name, frequency_width, volume,
                     add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(
                host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphConcatenation(bucket_name, volume,
                                            PARALLEL_STREAMS, nodes_running,
                                            add_shutdown, frequency_width,
                                            iterations, session_id, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown, iterations, concatenation_type):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphConcatenation(bucket_name, volume, PARALLEL_STREAMS, nodes_running, add_shutdown, frequency_width, iterations, concatenation_type, session_id)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def command_json(args):
    node_details = {
        'number_instances': 1,
        'instance_type': 'm4.large',
        'spot_price': 0.99
    }

    graph = BuildGraphConcatenation(args.bucket, args.volume, args.parallel_streams, node_details, args.shutdown, args.width, args.iterations, args.concatenation_type, 'session_id')
    graph.build_graph()
    json_dumps = json.dumps(graph.drop_list, indent=2)
    LOG.info(json_dumps)
    with open("/tmp/json_split.txt", "w") as json_file:
        json_file.write(json_dumps)
def command_json(args):
    node_details = {
        'number_instances': 1,
        'instance_type': 'm4.large',
        'spot_price': 0.99
    }

    graph = BuildGraphConcatenation(args.bucket, args.volume,
                                    args.parallel_streams, node_details,
                                    args.shutdown, args.width, args.iterations,
                                    'session_id', '1.2.3.4')
    graph.build_graph()
    json_dumps = json.dumps(graph.drop_list, indent=2)
    LOG.info(json_dumps)
    with open("/tmp/json_split.txt", "w") as json_file:
        json_file.write(json_dumps)
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price,
                        volume, add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        uuid = get_uuid()
        ec2_data = EC2Controller(ami_id, [{
            'number_instances': 1,
            'instance_type': 'i2.4xlarge',
            'spot_price': spot_price
        }],
                                 get_node_manager_user_data(boto_data, uuid),
                                 AWS_REGION,
                                 tags=[{
                                     'Key': 'Owner',
                                     'Value': getpass.getuser(),
                                 }, {
                                     'Key': 'Name',
                                     'Value': 'Daliuge NM - Concatenate',
                                 }, {
                                     'Key': 'uuid',
                                     'Value': uuid,
                                 }])
        ec2_data.start_instances()

        reported_running = get_reported_running(uuid, 1, wait=600)
        hosts = build_hosts(reported_running)

        # Create the Data Island Manager
        data_island_manager = EC2Controller(
            ami_id, [{
                'number_instances': 1,
                'instance_type': 'm4.large',
                'spot_price': spot_price
            }],
            get_data_island_manager_user_data(boto_data, hosts, uuid),
            AWS_REGION,
            tags=[
                {
                    'Key': 'Owner',
                    'Value': getpass.getuser(),
                },
                {
                    'Key': 'Name',
                    'Value': 'Daliuge DIM - Concatenate',
                },
                {
                    'Key': 'uuid',
                    'Value': uuid,
                },
            ])
        data_island_manager.start_instances()
        data_island_manager_running = get_reported_running(uuid, 1, wait=600)

        if len(data_island_manager_running['m4.large']) == 1:
            # Now build the graph
            session_id = get_session_id()
            instance_details = data_island_manager_running['m4.large'][0]
            host = instance_details['ip_address']
            graph = BuildGraphConcatenation(bucket_name, volume,
                                            PARALLEL_STREAMS, reported_running,
                                            add_shutdown, frequency_width,
                                            iterations, session_id, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
            client = DataIslandManagerClient(host, DIM_PORT)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)
    else:
        LOG.error('Unable to find the AWS credentials')
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price, volume, add_shutdown, iterations, concatenation_type):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        uuid = get_uuid()
        ec2_data = EC2Controller(
            ami_id,
            [
                {
                    'number_instances': 1,
                    'instance_type': 'i2.4xlarge',
                    'spot_price': spot_price
                }
            ],
            get_node_manager_user_data(boto_data, uuid),
            AWS_REGION,
            tags=[
                {
                    'Key': 'Owner',
                    'Value': getpass.getuser(),
                },
                {
                    'Key': 'Name',
                    'Value': 'DFMS Node - Concatenate',
                },
                {
                    'Key': 'uuid',
                    'Value': uuid,
                }
            ]
        )
        ec2_data.start_instances()

        reported_running = get_reported_running(
            uuid,
            1,
            wait=600
        )
        hosts = build_hosts(reported_running)

        # Create the Data Island Manager
        data_island_manager = EC2Controller(
            ami_id,
            [
                {
                    'number_instances': 1,
                    'instance_type': 'm4.large',
                    'spot_price': spot_price
                }
            ],
            get_data_island_manager_user_data(boto_data, hosts, uuid),
            AWS_REGION,
            tags=[
                {
                    'Key': 'Owner',
                    'Value': getpass.getuser(),
                },
                {
                    'Key': 'Name',
                    'Value': 'Data Island Manager - Concatenate',
                },
                {
                    'Key': 'uuid',
                    'Value': uuid,
                },
            ]
        )
        data_island_manager.start_instances()
        data_island_manager_running = get_reported_running(
            uuid,
            1,
            wait=600
        )

        if len(data_island_manager_running['m4.large']) == 1:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphConcatenation(bucket_name, volume, PARALLEL_STREAMS, reported_running, add_shutdown, frequency_width, iterations, concatenation_type, session_id)
            graph.build_graph()

            instance_details = data_island_manager_running['m4.large'][0]
            host = instance_details['ip_address']
            LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
            client = DataIslandManagerClient(host, DIM_PORT)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)
    else:
        LOG.error('Unable to find the AWS credentials')