def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphJpeg2000(bucket_name, volume, PARALLEL_STREAMS, nodes_running, add_shutdown, frequency_width, iterations, session_id, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def use_and_generate(host, port, bucket_name, frequency_width, volume,
                     add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(
                host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphConcatenation(bucket_name, volume,
                                            PARALLEL_STREAMS, nodes_running,
                                            add_shutdown, frequency_width,
                                            iterations, session_id, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
Esempio n. 3
0
def command_run(args):
    LOG.info(args)
    graph = BuildGraph()
    graph.build_graph()

    client = NodeManagerClient(args.host, args.port)

    session_id = get_session_id()
    client.create_session(session_id)
    client.append_graph(session_id, graph.drop_list)
    client.deploy_session(session_id, graph.start_oids)
Esempio n. 4
0
def command_run(args):
    LOG.info(args)
    graph = BuildGraph()
    graph.build_graph()

    client = NodeManagerClient(args.host, args.port)

    session_id = get_session_id()
    client.create_session(session_id)
    client.append_graph(session_id, graph.drop_list)
    client.deploy_session(session_id, graph.start_oids)
def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown, password):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        database_connection, map_day_name, database_ip = setup_database(password, bucket_name)

        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(frequency_width, bucket_name, get_s3_uvsub_name(frequency_width), database_connection)
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphStats(
                work_to_do.work_to_do,
                bucket_name,
                volume,
                PARALLEL_STREAMS,
                nodes_running,
                add_shutdown,
                frequency_width,
                session_id,
                map_day_name,
                password,
                database_ip,
                host
            )
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(
                width=frequency_width,
                bucket_name=bucket_name,
                s3_split_name=get_s3_split_name(frequency_width))
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphMsTransform(
                work_to_do=work_to_do.work_to_do,
                bucket_name=bucket_name,
                volume=volume,
                parallel_streams=7,
                node_details=nodes_running,
                shutdown=add_shutdown,
                width=frequency_width,
                session_id=session_id,
                dim_ip=host,
            )
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))

        else:
            LOG.warning('No nodes are running')
Esempio n. 7
0
def use_and_generate(host, port, bucket_name, frequency_width, volume,
                     add_shutdown, password):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        database_connection, map_day_name, database_ip = setup_database(
            password, bucket_name)

        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(
                host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(frequency_width, bucket_name,
                                  get_s3_uvsub_name(frequency_width),
                                  database_connection)
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphStats(work_to_do.work_to_do, bucket_name, volume,
                                    PARALLEL_STREAMS, nodes_running,
                                    add_shutdown, frequency_width, session_id,
                                    map_day_name, password, database_ip, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
Esempio n. 8
0
def create_and_generate(**kw_args):
    # Do we have boto data
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        session_id = get_session_id()
        # Start by creating the EFS
        client = boto3.client('efs', region_name=AWS_REGION)

        create_file_system = client.create_file_system(CreationToken=session_id)

        life_cycle_state = create_file_system['LifeCycleState']
        while life_cycle_state == 'creating':
            # Wait for AWS to provision the EFS
            sleep(10)

            describe_file_systems = client.describe_file_systems(
                CreationToken=create_file_system['CreationToken']
            )
            file_systems = describe_file_systems['FileSystems']
            life_cycle_state = file_systems[0]['LifeCycleState']

        if life_cycle_state != 'available':
            LOG.error('Could not create the EFS')
Esempio n. 9
0
def create_and_generate(
        bucket_name,
        frequency_width,
        w_projection_planes,
        ami_id,
        spot_price,
        volume,
        nodes,
        add_shutdown,
        min_frequency,
        max_frequency,
        scan_statistics,
        dump_json):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        work_to_do = WorkToDo(
            width=frequency_width,
            bucket_name=bucket_name,
            s3_uvsub_name=get_s3_uvsub_name(frequency_width),
            s3_split_name=get_s3_split_name(frequency_width),
            min_frequency=min_frequency,
            max_frequency=max_frequency)
        work_to_do.calculate_work_to_do()

        nodes_required, node_count = get_nodes_required(nodes, spot_price)

        if len(nodes_required) > 0:
            uuid = get_uuid()
            ec2_data = EC2Controller(
                ami_id,
                nodes_required,
                get_node_manager_user_data(boto_data, uuid, max_request_size=50),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'DALiuGE NM - Uvsub',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]
            )
            ec2_data.start_instances()

            reported_running = get_reported_running(
                uuid,
                node_count,
                wait=600
            )

            if len(reported_running) == 0:
                LOG.error('Nothing has reported ready')
            else:
                hosts = build_hosts(reported_running)

                # Create the Data Island Manager
                data_island_manager = EC2Controller(
                    ami_id,
                    [
                        {
                            'number_instances': 1,
                            'instance_type': 'm4.large',
                            'spot_price': spot_price
                        }
                    ],
                    get_data_island_manager_user_data(boto_data, hosts, uuid, max_request_size=50),
                    AWS_REGION,
                    tags=[
                        {
                            'Key': 'Owner',
                            'Value': getpass.getuser(),
                        },
                        {
                            'Key': 'Name',
                            'Value': 'DALiuGE DIM - Uvsub',
                        },
                        {
                            'Key': 'uuid',
                            'Value': uuid,
                        }
                    ]
                )
                data_island_manager.start_instances()
                data_island_manager_running = get_reported_running(
                        uuid,
                        1,
                        wait=600
                )

                if len(data_island_manager_running['m4.large']) == 1:
                    # Now build the graph
                    session_id = get_session_id()
                    instance_details = data_island_manager_running['m4.large'][0]
                    host = instance_details['ip_address']
                    graph = BuildGraphUvsub(
                        work_to_do=work_to_do.work_to_do,
                        bucket_name=bucket_name,
                        volume=volume,
                        parallel_streams=PARALLEL_STREAMS,
                        node_details=reported_running,
                        shutdown=add_shutdown,
                        scan_statistics=scan_statistics,
                        width=frequency_width,
                        w_projection_planes=w_projection_planes,
                        session_id=session_id,
                        dim_ip=host)
                    graph.build_graph()

                    if dump_json:
                        json_dumps = json.dumps(graph.drop_list, indent=2)
                        with open("/tmp/json_uvsub.txt", "w") as json_file:
                            json_file.write(json_dumps)

                    LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                    client = DataIslandManagerClient(host, DIM_PORT)

                    client.create_session(session_id)
                    client.append_graph(session_id, graph.drop_list)
                    client.deploy_session(session_id, get_roots(graph.drop_list))
    else:
        LOG.error('Unable to find the AWS credentials')
Esempio n. 10
0
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price,
                        volume, nodes, add_shutdown, password, log_level):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        database_connection, map_day_name, database_ip = setup_database(
            password, bucket_name)

        work_to_do = WorkToDo(frequency_width, bucket_name,
                              get_s3_uvsub_name(frequency_width),
                              database_connection)
        work_to_do.calculate_work_to_do()

        nodes_required, node_count = get_nodes_required(nodes, spot_price)

        if len(nodes_required) > 0:
            uuid = get_uuid()
            ec2_data = EC2Controller(ami_id,
                                     nodes_required,
                                     get_node_manager_user_data(
                                         boto_data,
                                         uuid,
                                         max_request_size=50,
                                         log_level=log_level),
                                     AWS_REGION,
                                     tags=[{
                                         'Key': 'Owner',
                                         'Value': getpass.getuser(),
                                     }, {
                                         'Key': 'Name',
                                         'Value': 'Daliuge NM - Stats',
                                     }, {
                                         'Key': 'uuid',
                                         'Value': uuid,
                                     }])
            ec2_data.start_instances()

            reported_running = get_reported_running(uuid, node_count, wait=600)

            if len(reported_running) == 0:
                LOG.error('Nothing has reported ready')
            else:
                hosts = build_hosts(reported_running)

                # Create the Data Island Manager
                data_island_manager = EC2Controller(
                    ami_id, [{
                        'number_instances': 1,
                        'instance_type': 'm4.large',
                        'spot_price': spot_price
                    }],
                    get_data_island_manager_user_data(boto_data,
                                                      hosts,
                                                      uuid,
                                                      need_node_manager=True,
                                                      max_request_size=50),
                    AWS_REGION,
                    tags=[{
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    }, {
                        'Key': 'Name',
                        'Value': 'Daliuge DIM - Stats',
                    }, {
                        'Key': 'uuid',
                        'Value': uuid,
                    }])
                data_island_manager.start_instances()
                data_island_manager_running = get_reported_running(
                    uuid,
                    1,
                    wait=600,
                )

                if len(data_island_manager_running['m4.large']) == 1:
                    # Now build the graph
                    session_id = get_session_id()
                    instance_details = data_island_manager_running['m4.large'][
                        0]
                    host = instance_details['ip_address']
                    graph = BuildGraphStats(work_to_do.work_to_do, bucket_name,
                                            volume, PARALLEL_STREAMS,
                                            reported_running, add_shutdown,
                                            frequency_width, session_id,
                                            map_day_name, password,
                                            database_ip, host)
                    graph.build_graph()

                    LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                    client = DataIslandManagerClient(host, DIM_PORT)

                    client.create_session(session_id)
                    client.append_graph(session_id, graph.drop_list)
                    client.deploy_session(session_id, graph.start_oids)
    else:
        LOG.error('Unable to find the AWS credentials')
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price, volume, frequencies_per_node, add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        work_to_do = WorkToDo(frequency_width, bucket_name, get_s3_clean_name(frequency_width, iterations))
        work_to_do.calculate_work_to_do()

        nodes_required, node_count = get_nodes_required(work_to_do.work_to_do, frequencies_per_node, spot_price)

        if len(nodes_required) > 0:
            uuid = get_uuid()
            ec2_data = EC2Controller(
                ami_id,
                nodes_required,
                get_node_manager_user_data(boto_data, uuid),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'DFMS Node - Clean',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]
            )
            ec2_data.start_instances()

            reported_running = get_reported_running(
                uuid,
                node_count,
                wait=600
            )

            if len(reported_running) == 0:
                LOG.error('Nothing has reported ready')
            else:
                hosts = build_hosts(reported_running)

                # Create the Data Island Manager
                data_island_manager = EC2Controller(
                    ami_id,
                    [
                        {
                            'number_instances': 1,
                            'instance_type': 'm4.large',
                            'spot_price': spot_price
                        }
                    ],
                    get_data_island_manager_user_data(boto_data, hosts, uuid),
                    AWS_REGION,
                    tags=[
                        {
                            'Key': 'Owner',
                            'Value': getpass.getuser(),
                        },
                        {
                            'Key': 'Name',
                            'Value': 'Data Island Manager - Clean',
                        },
                        {
                            'Key': 'uuid',
                            'Value': uuid,
                        }
                    ]
                )
                data_island_manager.start_instances()
                data_island_manager_running = get_reported_running(
                        uuid,
                        1,
                        wait=600
                )

                if len(data_island_manager_running['m4.large']) == 1:
                    # Now build the graph
                    session_id = get_session_id()
                    graph = BuildGraphClean(work_to_do.work_to_do, bucket_name, volume, PARALLEL_STREAMS, reported_running, add_shutdown, frequency_width, iterations, session_id)
                    graph.build_graph()

                    instance_details = data_island_manager_running['m4.large'][0]
                    host = instance_details['ip_address']
                    LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                    client = DataIslandManagerClient(host, DIM_PORT)

                    client.create_session(session_id)
                    client.append_graph(session_id, graph.drop_list)
                    client.deploy_session(session_id, graph.start_oids)
    else:
        LOG.error('Unable to find the AWS credentials')
Esempio n. 12
0
def create_and_generate(bucket_name, input_dir, ami_id, spot_price, volume, nodes, add_shutdown, log_level):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        work_to_do = WorkToDo(frequency_width, bucket_name, get_s3_uvsub_name(frequency_width), database_connection)
        work_to_do.calculate_work_to_do()

        nodes_required, node_count = get_nodes_required(nodes, spot_price)

        if len(nodes_required) > 0:
            uuid = get_uuid()
            ec2_data = EC2Controller(
                ami_id,
                nodes_required,
                get_node_manager_user_data(boto_data, uuid, max_request_size=50, log_level=log_level),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'DALiuGE NM - Stats',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]
            )
            ec2_data.start_instances()

            reported_running = get_reported_running(
                uuid,
                node_count,
                wait=600
            )

            if len(reported_running) == 0:
                LOG.error('Nothing has reported ready')
            else:
                hosts = build_hosts(reported_running)

                # Create the Data Island Manager
                data_island_manager = EC2Controller(
                    ami_id,
                    [
                        {
                            'number_instances': 1,
                            'instance_type': 'm4.large',
                            'spot_price': spot_price
                        }
                    ],
                    get_data_island_manager_user_data(
                        boto_data,
                        hosts,
                        uuid,
                        need_node_manager=True,
                        max_request_size=50
                    ),
                    AWS_REGION,
                    tags=[
                        {
                            'Key': 'Owner',
                            'Value': getpass.getuser(),
                        },
                        {
                            'Key': 'Name',
                            'Value': 'DALiuGE DIM - Stats',
                        },
                        {
                            'Key': 'uuid',
                            'Value': uuid,
                        }
                    ]
                )
                data_island_manager.start_instances()
                data_island_manager_running = get_reported_running(
                        uuid,
                        1,
                        wait=600,
                )

                if len(data_island_manager_running['m4.large']) == 1:
                    # Now build the graph
                    session_id = get_session_id()
                    instance_details = data_island_manager_running['m4.large'][0]
                    host = instance_details['ip_address']
                    graph = BuildGraphStats(
                        work_to_do.work_to_do,
                        bucket_name,
                        volume,
                        PARALLEL_STREAMS,
                        reported_running,
                        add_shutdown,
                        frequency_width,
                        session_id,
                        map_day_name,
                        password,
                        database_ip,
                        host
                    )
                    graph.build_graph()

                    LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                    client = DataIslandManagerClient(host, DIM_PORT)

                    client.create_session(session_id)
                    client.append_graph(session_id, graph.drop_list)
                    client.deploy_session(session_id, get_roots(graph.drop_list))
    else:
        LOG.error('Unable to find the AWS credentials')
Esempio n. 13
0
def use_and_generate(
        host,
        port,
        bucket_name,
        frequency_width,
        volume,
        add_shutdown,
        iterations,
        arcsec,
        w_projection_planes,
        robust,
        image_size,
        clean_channel_average,
        min_frequency,
        max_frequency,
        clean_directory_name,
        only_image):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(
                frequency_width,
                bucket_name,
                get_s3_clean_name(frequency_width, iterations, arcsec) if clean_directory_name is None else clean_directory_name,
                min_frequency,
                max_frequency
            )
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphClean(
                work_to_do=work_to_do.work_to_do,
                bucket_name=bucket_name,
                volume=volume,
                parallel_streams=PARALLEL_STREAMS,
                node_details=nodes_running,
                shutdown=add_shutdown,
                width=frequency_width,
                iterations=iterations,
                arcsec=arcsec,
                w_projection_planes=w_projection_planes,
                robust=robust,
                image_size=image_size,
                clean_channel_average=clean_channel_average,
                clean_directory_name=clean_directory_name,
                only_image=only_image,
                session_id=session_id,
                dim_ip=host)
            graph.build_graph()

            # TODO: Save the run parameters

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))

        else:
            LOG.warning('No nodes are running')
Esempio n. 14
0
def create_and_generate(
        bucket_name,
        frequency_width,
        ami_id,
        spot_price,
        volume,
        frequencies_per_node,
        add_shutdown,
        iterations,
        arcsec,
        w_projection_planes,
        robust,
        image_size,
        clean_channel_average,
        min_frequency,
        max_frequency,
        clean_directory_name,
        only_image,
        log_level):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        work_to_do = WorkToDo(
            frequency_width,
            bucket_name,
            get_s3_clean_name(frequency_width, iterations, arcsec) if clean_directory_name is None else clean_directory_name,
            min_frequency,
            max_frequency
        )
        work_to_do.calculate_work_to_do()

        nodes_required, node_count = get_nodes_required(work_to_do.work_to_do, frequencies_per_node, spot_price)

        if len(nodes_required) > 0:
            uuid = get_uuid()
            ec2_data = EC2Controller(
                ami_id,
                nodes_required,
                get_node_manager_user_data(boto_data, uuid, log_level=log_level),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'DALiuGE NM - Clean',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]
            )
            ec2_data.start_instances()

            reported_running = get_reported_running(
                uuid,
                node_count,
                wait=600
            )

            if len(reported_running) == 0:
                LOG.error('Nothing has reported ready')
            else:
                hosts = build_hosts(reported_running)

                # Create the Data Island Manager
                data_island_manager = EC2Controller(
                    ami_id,
                    [
                        {
                            'number_instances': 1,
                            'instance_type': 'm4.large',
                            'spot_price': spot_price
                        }
                    ],
                    get_data_island_manager_user_data(boto_data, hosts, uuid, need_node_manager=True, log_level=log_level),
                    AWS_REGION,
                    tags=[
                        {
                            'Key': 'Owner',
                            'Value': getpass.getuser(),
                        },
                        {
                            'Key': 'Name',
                            'Value': 'DALiuGE DIM - Clean',
                        },
                        {
                            'Key': 'uuid',
                            'Value': uuid,
                        }
                    ]
                )
                data_island_manager.start_instances()
                data_island_manager_running = get_reported_running(
                        uuid,
                        1,
                        wait=600
                )

                if len(data_island_manager_running['m4.large']) == 1:
                    # Now build the graph
                    session_id = get_session_id()
                    instance_details = data_island_manager_running['m4.large'][0]
                    host = instance_details['ip_address']
                    graph = BuildGraphClean(
                        work_to_do=work_to_do.work_to_do,
                        bucket_name=bucket_name,
                        volume=volume,
                        parallel_streams=PARALLEL_STREAMS,
                        node_details=reported_running,
                        shutdown=add_shutdown,
                        width=frequency_width,
                        iterations=iterations,
                        arcsec=arcsec,
                        w_projection_planes=w_projection_planes,
                        robust=robust,
                        image_size=image_size,
                        clean_channel_average=clean_channel_average,
                        clean_directory_name=clean_directory_name,
                        only_image=only_image,
                        session_id=session_id,
                        dim_ip=host)
                    graph.build_graph()

                    # TODO: Safe the run parameters

                    LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                    client = DataIslandManagerClient(host, DIM_PORT)

                    client.create_session(session_id)
                    client.append_graph(session_id, graph.drop_list)
                    client.deploy_session(session_id, get_roots(graph.drop_list))
    else:
        LOG.error('Unable to find the AWS credentials')
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price, volume, add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        uuid = get_uuid()
        ec2_data = EC2Controller(
            ami_id,
            [
                {
                    'number_instances': 1,
                    'instance_type': 'i2.4xlarge',
                    'spot_price': spot_price
                }
            ],
            get_node_manager_user_data(boto_data, uuid),
            AWS_REGION,
            tags=[
                {
                    'Key': 'Owner',
                    'Value': getpass.getuser(),
                },
                {
                    'Key': 'Name',
                    'Value': 'DALiuGE NM - Concatenate',
                },
                {
                    'Key': 'uuid',
                    'Value': uuid,
                }
            ]
        )
        ec2_data.start_instances()

        reported_running = get_reported_running(
            uuid,
            1,
            wait=600
        )
        hosts = build_hosts(reported_running)

        # Create the Data Island Manager
        data_island_manager = EC2Controller(
            ami_id,
            [
                {
                    'number_instances': 1,
                    'instance_type': 'm4.large',
                    'spot_price': spot_price
                }
            ],
            get_data_island_manager_user_data(boto_data, hosts, uuid),
            AWS_REGION,
            tags=[
                {
                    'Key': 'Owner',
                    'Value': getpass.getuser(),
                },
                {
                    'Key': 'Name',
                    'Value': 'DALiuGE DIM - Concatenate',
                },
                {
                    'Key': 'uuid',
                    'Value': uuid,
                },
            ]
        )
        data_island_manager.start_instances()
        data_island_manager_running = get_reported_running(
            uuid,
            1,
            wait=600
        )

        if len(data_island_manager_running['m4.large']) == 1:
            # Now build the graph
            session_id = get_session_id()
            instance_details = data_island_manager_running['m4.large'][0]
            host = instance_details['ip_address']
            graph = BuildGraphConcatenation(
                bucket_name,
                volume,
                PARALLEL_STREAMS,
                reported_running,
                add_shutdown,
                frequency_width,
                iterations,
                '2arcsec',  # TODO: Pass as a parameter
                session_id,
                host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
            client = DataIslandManagerClient(host, DIM_PORT)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))
    else:
        LOG.error('Unable to find the AWS credentials')
Esempio n. 16
0
def use_and_generate(
        host,
        port,
        bucket_name,
        frequency_width,
        w_projection_planes,
        volume,
        add_shutdown,
        min_frequency,
        max_frequency,
        scan_statistics,
        dump_json):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(
                width=frequency_width,
                bucket_name=bucket_name,
                s3_uvsub_name=get_s3_uvsub_name(frequency_width),
                s3_split_name=get_s3_split_name(frequency_width),
                min_frequency=min_frequency,
                max_frequency=max_frequency)
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphUvsub(
                work_to_do=work_to_do.work_to_do,
                bucket_name=bucket_name,
                volume=volume,
                parallel_streams=PARALLEL_STREAMS,
                node_details=nodes_running,
                shutdown=add_shutdown,
                scan_statistics=scan_statistics,
                width=frequency_width,
                w_projection_planes=w_projection_planes,
                session_id=session_id,
                dim_ip=host)
            graph.build_graph()

            if dump_json:
                json_dumps = json.dumps(graph.drop_list, indent=2)
                with open("/tmp/json_uvsub.txt", "w") as json_file:
                    json_file.write(json_dumps)

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))

        else:
            LOG.warning('No nodes are running')
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price, volume, bottom_frequency, nodes, add_shutdown):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        uuid = get_uuid()
        ec2_data = EC2Controller(
            ami_id,
            [
                {
                    'number_instances': nodes,
                    'instance_type': 'i2.2xlarge',
                    'spot_price': spot_price
                }
            ],
            get_node_manager_user_data(boto_data, uuid),
            AWS_REGION,
            tags=[
                {
                    'Key': 'Owner',
                    'Value': getpass.getuser(),
                },
                {
                    'Key': 'Name',
                    'Value': 'Daliuge NM - Find bad MS',
                },
                {
                    'Key': 'uuid',
                    'Value': uuid,
                }
            ]
        )
        ec2_data.start_instances()

        reported_running = get_reported_running(
            uuid,
            nodes,
            wait=600
        )

        if len(reported_running) == 0:
            LOG.error('Nothing has reported ready')
        else:
            hosts = build_hosts(reported_running)

            # Create the Data Island Manager
            data_island_manager = EC2Controller(
                ami_id,
                [
                    {
                        'number_instances': 1,
                        'instance_type': 'm4.large',
                        'spot_price': spot_price
                    }
                ],
                get_data_island_manager_user_data(boto_data, hosts, uuid),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'Daliuge DIM - Find bad MS',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]
            )
            data_island_manager.start_instances()
            data_island_manager_running = get_reported_running(
                    uuid,
                    1,
                    wait=600
            )

            if len(data_island_manager_running['m4.large']) == 1:
                # Now build the graph
                session_id = get_session_id()
                instance_details = data_island_manager_running['m4.large'][0]
                host = instance_details['ip_address']
                graph = BuildGraphFindBadMeasurementSet(
                    bucket_name,
                    volume,
                    PARALLEL_STREAMS,
                    reported_running,
                    add_shutdown,
                    frequency_width,
                    bottom_frequency,
                    session_id,
                    host,
                )
                graph.build_graph()

                LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                client = DataIslandManagerClient(host, DIM_PORT)

                client.create_session(session_id)
                client.append_graph(session_id, graph.drop_list)
                client.deploy_session(session_id, graph.start_oids)
    else:
        LOG.error('Unable to find the AWS credentials')
def create_and_generate(bucket_name, frequency_width, ami_id, spot_price1, spot_price2, volume, days_per_node, add_shutdown):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        work_to_do = WorkToDo(frequency_width, bucket_name, get_s3_split_name(frequency_width))
        work_to_do.calculate_work_to_do()

        days = work_to_do.work_to_do.keys()
        nodes_required, node_count = get_nodes_required(days, days_per_node, spot_price1, spot_price2)

        if len(nodes_required) > 0:
            uuid = get_uuid()
            ec2_data = EC2Controller(
                ami_id,
                nodes_required,
                get_node_manager_user_data(boto_data, uuid),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'Daliuge NM - MsTransform',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]

            )
            ec2_data.start_instances()

            reported_running = get_reported_running(
                uuid,
                node_count,
                wait=600
            )
            hosts = build_hosts(reported_running)

            # Create the Data Island Manager
            data_island_manager = EC2Controller(
                ami_id,
                [
                    {
                        'number_instances': 1,
                        'instance_type': 'm4.large',
                        'spot_price': spot_price1
                    }
                ],
                get_data_island_manager_user_data(boto_data, hosts, uuid),
                AWS_REGION,
                tags=[
                    {
                        'Key': 'Owner',
                        'Value': getpass.getuser(),
                    },
                    {
                        'Key': 'Name',
                        'Value': 'Daliuge DIM - MsTransform',
                    },
                    {
                        'Key': 'uuid',
                        'Value': uuid,
                    }
                ]
            )
            data_island_manager.start_instances()
            data_island_manager_running = get_reported_running(
                uuid,
                1,
                wait=600
            )

            if len(data_island_manager_running['m4.large']) == 1:
                # Now build the graph
                session_id = get_session_id()
                instance_details = data_island_manager_running['m4.large'][0]
                host = instance_details['ip_address']
                graph = BuildGraphMsTransform(
                    work_to_do.work_to_do,
                    bucket_name,
                    volume,
                    7,
                    reported_running,
                    add_shutdown,
                    frequency_width,
                    session_id,
                    hosts,
                )
                graph.build_graph()
                graph.tag_all_app_drops({
                    "session_id": session_id,
                })

                LOG.info('Connection to {0}:{1}'.format(host, DIM_PORT))
                client = DataIslandManagerClient(host, DIM_PORT)

                client.create_session(session_id)
                client.append_graph(session_id, graph.drop_list)
                client.deploy_session(session_id, graph.start_oids)
    else:
        LOG.error('Unable to find the AWS credentials')