def use_and_generate(host, port, bucket_name, frequency_width, volume,
                     add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(
                host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphConcatenation(bucket_name, volume,
                                            PARALLEL_STREAMS, nodes_running,
                                            add_shutdown, frequency_width,
                                            iterations, session_id, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown, iterations):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphJpeg2000(bucket_name, volume, PARALLEL_STREAMS, nodes_running, add_shutdown, frequency_width, iterations, session_id, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown, password):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        database_connection, map_day_name, database_ip = setup_database(password, bucket_name)

        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(frequency_width, bucket_name, get_s3_uvsub_name(frequency_width), database_connection)
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphStats(
                work_to_do.work_to_do,
                bucket_name,
                volume,
                PARALLEL_STREAMS,
                nodes_running,
                add_shutdown,
                frequency_width,
                session_id,
                map_day_name,
                password,
                database_ip,
                host
            )
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
def use_and_generate(host, port, bucket_name, frequency_width, volume, add_shutdown):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(
                width=frequency_width,
                bucket_name=bucket_name,
                s3_split_name=get_s3_split_name(frequency_width))
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphMsTransform(
                work_to_do=work_to_do.work_to_do,
                bucket_name=bucket_name,
                volume=volume,
                parallel_streams=7,
                node_details=nodes_running,
                shutdown=add_shutdown,
                width=frequency_width,
                session_id=session_id,
                dim_ip=host,
            )
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))

        else:
            LOG.warning('No nodes are running')
Esempio n. 5
0
def use_and_generate(host, port, bucket_name, frequency_width, volume,
                     add_shutdown, password):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        database_connection, map_day_name, database_ip = setup_database(
            password, bucket_name)

        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(
                host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(frequency_width, bucket_name,
                                  get_s3_uvsub_name(frequency_width),
                                  database_connection)
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphStats(work_to_do.work_to_do, bucket_name, volume,
                                    PARALLEL_STREAMS, nodes_running,
                                    add_shutdown, frequency_width, session_id,
                                    map_day_name, password, database_ip, host)
            graph.build_graph()

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, graph.start_oids)

        else:
            LOG.warning('No nodes are running')
Esempio n. 6
0
def use_and_generate(
        host,
        port,
        bucket_name,
        frequency_width,
        w_projection_planes,
        volume,
        add_shutdown,
        min_frequency,
        max_frequency,
        scan_statistics,
        dump_json):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(
                width=frequency_width,
                bucket_name=bucket_name,
                s3_uvsub_name=get_s3_uvsub_name(frequency_width),
                s3_split_name=get_s3_split_name(frequency_width),
                min_frequency=min_frequency,
                max_frequency=max_frequency)
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphUvsub(
                work_to_do=work_to_do.work_to_do,
                bucket_name=bucket_name,
                volume=volume,
                parallel_streams=PARALLEL_STREAMS,
                node_details=nodes_running,
                shutdown=add_shutdown,
                scan_statistics=scan_statistics,
                width=frequency_width,
                w_projection_planes=w_projection_planes,
                session_id=session_id,
                dim_ip=host)
            graph.build_graph()

            if dump_json:
                json_dumps = json.dumps(graph.drop_list, indent=2)
                with open("/tmp/json_uvsub.txt", "w") as json_file:
                    json_file.write(json_dumps)

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))

        else:
            LOG.warning('No nodes are running')
Esempio n. 7
0
def use_and_generate(
        host,
        port,
        bucket_name,
        frequency_width,
        volume,
        add_shutdown,
        iterations,
        arcsec,
        w_projection_planes,
        robust,
        image_size,
        clean_channel_average,
        min_frequency,
        max_frequency,
        clean_directory_name,
        only_image):
    boto_data = get_aws_credentials('aws-chiles02')
    if boto_data is not None:
        connection = httplib.HTTPConnection(host, port)
        connection.request('GET', '/api', None, {})
        response = connection.getresponse()
        if response.status != httplib.OK:
            msg = 'Error while processing GET request for {0}:{1}/api (status {2}): {3}'.format(host, port, response.status, response.read())
            raise Exception(msg)

        json_data = response.read()
        message_details = json.loads(json_data)
        host_list = message_details['hosts']

        nodes_running = get_nodes_running(host_list)
        if len(nodes_running) > 0:
            work_to_do = WorkToDo(
                frequency_width,
                bucket_name,
                get_s3_clean_name(frequency_width, iterations, arcsec) if clean_directory_name is None else clean_directory_name,
                min_frequency,
                max_frequency
            )
            work_to_do.calculate_work_to_do()

            # Now build the graph
            session_id = get_session_id()
            graph = BuildGraphClean(
                work_to_do=work_to_do.work_to_do,
                bucket_name=bucket_name,
                volume=volume,
                parallel_streams=PARALLEL_STREAMS,
                node_details=nodes_running,
                shutdown=add_shutdown,
                width=frequency_width,
                iterations=iterations,
                arcsec=arcsec,
                w_projection_planes=w_projection_planes,
                robust=robust,
                image_size=image_size,
                clean_channel_average=clean_channel_average,
                clean_directory_name=clean_directory_name,
                only_image=only_image,
                session_id=session_id,
                dim_ip=host)
            graph.build_graph()

            # TODO: Save the run parameters

            LOG.info('Connection to {0}:{1}'.format(host, port))
            client = DataIslandManagerClient(host, port)

            client.create_session(session_id)
            client.append_graph(session_id, graph.drop_list)
            client.deploy_session(session_id, get_roots(graph.drop_list))

        else:
            LOG.warning('No nodes are running')