Ejemplo n.º 1
0
def main():
    search_api = swagger_client.SearchApi(api_client=api_client)

    # TODO: Add/Change filter to get valid results. Examples are shown below
    # filter_string = "destination_ip.ip_address = '192.168.21.20'"
    # filter_string = "port = 123"
    # filter_string = "source_cluster.name = 'HaaS-Cluster-6'"
    filter_string = "destination_l2_network.name = 'vlan-1014'"

    aggregation = swagger_client.Aggregation(
        field="flow.totalBytes.delta.summation.bytes", aggregation_type="SUM")

    logger.info(
        "Getting total bytes for {} in last 15 days".format(filter_string))
    # Time range is last 15 days
    time_range = swagger_client.TimeRange(
        start_time=utilities.get_start_time(15),
        end_time=utilities.get_end_time())

    aggregation_request = swagger_client.AggregationRequest(
        entity_type=swagger_client.EntityType.FLOW,
        aggregations=[aggregation],
        time_range=time_range,
        filter=filter_string)

    api_response = search_api.aggregate_search_results(
        body=aggregation_request)
    # Value of sum bytes
    logger.info(api_response.aggregations[0].value)
Ejemplo n.º 2
0
def main():

    # Create search API client object
    search_api = swagger_client.SearchApi()

    # TODO: Add/Change filter to get valid results
    filter_string = "vcenter_manager.name = '10.197.17.43'"
    # Create request parameters required for search APIs
    public_api_search_request_params = dict(entity_type=swagger_client.EntityType.VIRTUALMACHINE,
                                            filter=filter_string,
                                            size=100)
    logger.info("Get all VMs with filter = [{}]".format(filter_string))

    # Create payload from search parameters required for calling the search API
    search_payload = swagger_client.SearchRequest(
        **public_api_search_request_params)

    while True:
        # Call the search API
        api_response = search_api.search_entities(body=search_payload)
        logger.info("Response attributes: Total Count: {} Cursor : {} "
                    "Time: {}".format(api_response.total_count, api_response.cursor,
                                      time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(api_response.end_time))))
        for result in api_response.results:
            entities_api = swagger_client.EntitiesApi(api_client=api_client)
            logger.info("VM Name: {}".format(
                entities_api.get_vm(id=result.entity_id).name))
            # make sure we don't hit the vRNI throttle and start getting 429 errors
            time.sleep(0.025)
        if not api_response.cursor:
            break
        search_payload.cursor = api_response.cursor
Ejemplo n.º 3
0
def main():

    # Create search API client object
    search_api = swagger_client.SearchApi()

    # TODO: Add/Change filter to get valid results
    filter_string = "((source_datacenter.name = 'washington-dc-delta-1'))"

    # Create request parameters required for search APIs
    public_api_search_request_params = dict(entity_type=swagger_client.EntityType.FLOW,
                                            size=3)
    logger.info("Get all VMs with filter = [{}]".format(filter_string))

    # Create payload from search parameters required for calling the search API
    search_payload = swagger_client.SearchRequest(**public_api_search_request_params)

    f_csv = open('flows_to_internet.csv', 'w')
    fields = ['src_ip', 'dst_ip', 'src_vm', 'src_security_groups', 'port']
    writer = csv.DictWriter(f_csv, fieldnames=fields, delimiter=":")
    writer.writeheader()

    while True:
        # Call the search API
        api_response = search_api.search_entities(body=search_payload)
        logger.info("Response attributes: Total Count: {} "
              "Time: {}".format(api_response.total_count,
                                time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(api_response.end_time))))
        logger.info("Result list : {} ".format(api_response.results))

        # payload for bulk fetch
        payload ={"entity_ids" : api_response.results}
        entities_api = swagger_client.EntitiesApi(api_client=api_client)
        # bulk fetching the entities
        api_response = entities_api.entities_fetch_post(body=payload)
        time.sleep(0.025) # make sure we don't hit the vRNI throttle and start getting 429 errors

        for result in api_response.results:
            # Get Source VM Name
            src_vm_name = get_referenced_entity_name(entity_id=result.entity.source_vm.entity_id,
                                                     entity_type=result.entity.source_vm.entity_type,
                                                     entities_api=entities_api)
            time.sleep(0.025) # make sure we don't hit the vRNI throttle and start getting 429 errors
            # Get Source security groups
            sec_group_names = []
            for src_sec_group in result.entity.source_security_groups:
                name = get_referenced_entity_name(entity_id=src_sec_group.entity_id,
                                                  entity_type=src_sec_group.entity_type,
                                                  entities_api=entities_api)
                if name: sec_group_names.append(name)
                time.sleep(0.025) # make sure we don't hit the vRNI throttle and start getting 429 errors
            # Write it to csv file
            flow_fields = dict(src_ip=result.entity.source_ip.ip_address,
                               dst_ip=result.entity.destination_ip.ip_address,
                               port=result.entity.port.iana_port_display,
                               src_vm=src_vm_name,
                               src_security_groups=",".join(sec_group_names))
            writer.writerow(flow_fields)

        if not api_response.cursor:
            break
        search_payload.cursor = api_response.cursor
    f_csv.close()
def main():

    # Create search API client object
    search_api = swagger_client.SearchApi()
    logger = logging.getLogger("vrni_sdk")
    filter_string = "(source_security_tags.name = 'OPI' or destination_security_tags.name='OPI') " \
                    "and (flow_tag != TAG_INTERNET_TRAFFIC) and" \
                    "(source_security_tags.name != AD ) and (destination_security_tags.name != AD)"
    filter_string = "((flow_tag = TAG_INTERNET_TRAFFIC) and (source_datacenter.name = 'HaaS-1'))"

    # Create request parameters required for search APIs
    public_api_search_request_params = dict(
        entity_type=swagger_client.EntityType.FLOW,
        filter=filter_string,
        size=100)
    logger.info("Get all VMs with filter = [{}]".format(filter_string))

    search_payload = swagger_client.SearchRequest(
        **public_api_search_request_params)

    f_csv = open('flows_to_internet.csv', 'w')
    fields = [
        'source_sec_tag', 'destination_sec_tag', 'src_security_groups',
        'dst_security_groups', 'src_vm', 'src_ip', 'destination_vm', 'dst_ip',
        'protocol', 'port'
    ]
    writer = csv.DictWriter(f_csv, fieldnames=fields, delimiter=":")
    writer.writeheader()

    destination_ip_port_protocol = []

    while True:
        # Call the search API
        api_response = search_api.search_entities(body=search_payload)
        logger.info("Response attributes: Total Count: {} "
                    "Time: {}".format(
                        api_response.total_count,
                        time.strftime('%Y-%m-%d %H:%M:%S',
                                      time.localtime(api_response.end_time))))

        for result in api_response.results:
            entities_api = swagger_client.EntitiesApi()

            flow_details = entities_api.get_flow(id=result.entity_id)
            logger.info("Flow: {}".format(flow_details.name))

            # Ignore same source IPs
            dest_ip_port_protocol = '{}-{}--{}-{}'.format(
                flow_details.destination_ip.ip_address, flow_details.protocol,
                flow_details.port.start, flow_details.port.end)
            if dest_ip_port_protocol in destination_ip_port_protocol:
                continue
            destination_ip_port_protocol.append(dest_ip_port_protocol)

            # Get Source VM Name
            src_vm_name = None if flow_details.source_vm is None else get_referenced_entity_name(
                referenced_entity=flow_details.source_vm)
            if src_vm_name is None:
                pass
                # Get vm name

            # Get Destination VM Name
            dst_vm_name = None if flow_details.destination_vm is None else get_referenced_entity_name(
                referenced_entity=flow_details.destination_vm)
            if dst_vm_name is None:
                pass
                # Get VM Name

            # Get Source security groups
            src_group_names = []
            for src_sec_group in flow_details.source_security_groups:
                name = get_referenced_entity_name(
                    referenced_entity=src_sec_group)
                if name: src_group_names.append(name)

            dst_group_names = []
            for dst_sec_group in flow_details.destination_security_groups:
                name = get_referenced_entity_name(
                    referenced_entity=dst_sec_group)
                if name: dst_group_names.append(name)

            # Get Source security tag
            src_security_tag_names = []
            for src_sec_tag in flow_details.source_security_tags:
                name = get_referenced_entity_name(
                    referenced_entity=src_sec_tag)
                if name: src_security_tag_names.append(name)

            dst_security_tag_names = []
            for dst_sec_tag in flow_details.destination_security_groups:
                name = get_referenced_entity_name(
                    referenced_entity=dst_sec_tag)
                if name: dst_security_tag_names.append(name)

            # Write it to csv file
            flow_fields = dict(
                src_ip=flow_details.source_ip.ip_address,
                dst_ip=flow_details.destination_ip.ip_address,
                port=flow_details.port.iana_port_display,
                protocol=flow_details.protocol,
                src_vm=src_vm_name,
                destination_vm=dst_vm_name,
                source_sec_tag=",".join(src_security_tag_names),
                destination_sec_tag=",".join(dst_security_tag_names),
                src_security_groups=",".join(src_group_names),
                dst_security_groups=",".join(dst_group_names))
            writer.writerow(flow_fields)

        if not api_response.cursor:
            break
        search_payload.cursor = api_response.cursor
    f_csv.close()
def main(args):
    # Create search API client object
    search_api = swagger_client.SearchApi()
    logger = logging.getLogger("vrni_sdk")
    filter_string = CONFIG_FILTER_STRING

    # Create request parameters required for search APIs
    public_api_search_request_params = dict(
        entity_type=swagger_client.EntityType.FLOW,
        filter=filter_string,
        size=100)
    logger.info("Get all flows with filter = [{}]".format(filter_string))
    search_payload = swagger_client.SearchRequest(
        **public_api_search_request_params)

    # to prevent default lookups, keep a record
    destination_ip_port_protocol = []

    while True:
        # Call the search API
        api_response = search_api.search_entities(body=search_payload)
        logger.info(
            "Response attributes: Total Count: {} Start Time: {} End Time: {}".
            format(api_response.total_count,
                   datetime.fromtimestamp(api_response.start_time),
                   datetime.fromtimestamp(api_response.end_time)))
        # print(api_response)

        for result in api_response.results:
            entities_api = swagger_client.EntitiesApi()
            flow_timestamp = result.time
            flow_details = entities_api.get_flow(id=result.entity_id,
                                                 time=flow_timestamp)
            flow_name = flow_details.name.encode('utf-8').strip()
            logger.info("Flow: {}".format(flow_name))

            # Ignore flows we've already seen
            dest_ip_port_protocol = '{}-{}--{}-{}'.format(
                flow_details.destination_ip.ip_address, flow_details.protocol,
                flow_details.port.start, flow_details.port.end)
            if dest_ip_port_protocol in destination_ip_port_protocol:
                continue
            destination_ip_port_protocol.append(dest_ip_port_protocol)

            # get source VM Name, if any
            src_vm_name = None
            if flow_details.source_vm != None:
                src_vm_name = lookup_vm_name(flow_details.source_vm)

            # get destination VM Name, if any
            dst_vm_name = None
            if flow_details.destination_vm != None:
                dst_vm_name = lookup_vm_name(flow_details.destination_vm)

            # for debugging purposes
            #print("Flow info: ")
            #print("Source: ", flow_details.source_ip.ip_address, " (",src_vm_name,") Destination: ", flow_details.destination_ip.ip_address, " (",dst_vm_name,")")
            #print("Port: ", flow_details.port.iana_port_display, " Protocol: ", flow_details.protocol)

            # create syslog message
            datetime_str = datetime.fromtimestamp(flow_timestamp)
            syslog_msg = 'vRNI-Flow: {} {} {} {}'.format(
                datetime_str, flow_details.firewall_action,
                flow_details.protocol, flow_name)

            print(syslog_msg)

            # form the fields parameter, which will show up as 'Fields' in vRLI
            log_fields = {}
            log_fields[
                "__vrni_flow_firewall_action"] = flow_details.firewall_action
            log_fields[
                "__vrni_flow_firewall_rule_id"] = flow_details.firewall_rule_id
            log_fields["__vrni_flow_traffic_type"] = flow_details.traffic_type
            log_fields["__vrni_flow_tag"] = flow_details.flow_tag
            log_fields[
                "__vrni_flow_source_ip"] = flow_details.source_ip.ip_address
            log_fields[
                "__vrni_flow_destination_ip"] = flow_details.destination_ip.ip_address
            log_fields["__vrni_flow_port"] = flow_details.port.display
            log_fields["__vrni_flow_port_name"] = flow_details.port.iana_name
            log_fields["__vrni_flow_protocol"] = flow_details.protocol
            log_fields["__vrni_flow_timestamp"] = flow_timestamp

            # vRLI takes milliseconds as the ts
            flow_timestamp_ms = flow_timestamp * 1000
            try:
                send_vrli_message(syslog_msg, flow_timestamp_ms, log_fields,
                                  args)
            except:
                print("Failure sending to vRLI")

            # make sure we don't hit the vRNI throttle and start getting 429 errors
            time.sleep(0.025)
        # break from the loop if this was the last results page
        if not api_response.cursor:
            break
        # otherwise save the cursor of the next page and move on
        search_payload.cursor = api_response.cursor
Ejemplo n.º 6
0
import swagger_client
from swagger_client.models import *
from swagger_client.rest import ApiException
import sys

# Configuration
configuration = swagger_client.configuration.Configuration()
configuration.verify_ssl = False
configuration.host = "http://localhost:7761/v1.0"
api_config = swagger_client.ApiClient(configuration)

gsi_boards_apis = swagger_client.BoardsApi(api_config)
gsi_datasets_apis = swagger_client.DatasetsApi(api_config)
gsi_search_apis = swagger_client.SearchApi(api_config)

# Inputs
num_of_boards = 1
#dataset_path = "/efs/data/public/fp32_datasets/dataset.npy"# "/path/to/dataset/npy/file"
dataset_path = "/efs/data/public/fp32_datasets/bit_vector_train50_padded256_f32.npy"
metadata_path = None  #"/path/to/metadata/pkl/file"
#queries_path = "/efs/data/public/fp32_queries/queries_1.npy"# "/path/to/queries/npy/file"
queries_path = "/efs/data/public/fp32_queries/bit_vector_test50_padded256_f32_50.npy"  # "/path/to/queries/npy/file"

# Example
allocation_id = None
try:

    # Import Dataset
    if False:
        print("about to import")
        response = gsi_datasets_apis.apis_import_dataset(
def main():

    # Create search API client object
    search_api = swagger_client.SearchApi()
    logger = logging.getLogger("vrni_sdk")

    # TODO: Add/Change filter to get valid results
    filter_string = "((flow_tag = TAG_INTERNET_TRAFFIC) and (source_datacenter.name = 'HaaS-1'))"
    # Create request parameters required for search APIs
    public_api_search_request_params = dict(
        entity_type=swagger_client.EntityType.FLOW,
        filter=filter_string,
        size=100)
    logger.info("Get all VMs with filter = [{}]".format(filter_string))

    # Create payload from search parameters required for calling the search API
    search_payload = swagger_client.SearchRequest(
        **public_api_search_request_params)

    f_csv = open('flows_to_internet.csv', 'w')
    fields = ['src_ip', 'dst_ip', 'src_vm', 'src_security_groups', 'port']
    writer = csv.DictWriter(f_csv, fieldnames=fields, delimiter=":")
    writer.writeheader()

    while True:
        # Call the search API
        api_response = search_api.search_entities(body=search_payload)
        logger.info("Response attributes: Total Count: {} "
                    "Time: {}".format(
                        api_response.total_count,
                        time.strftime('%Y-%m-%d %H:%M:%S',
                                      time.localtime(api_response.end_time))))
        for result in api_response.results:
            entities_api = swagger_client.EntitiesApi()

            internet_flow = entities_api.get_flow(id=result.entity_id)
            logger.info("Flow: {}".format(internet_flow.name))
            time.sleep(
                0.025
            )  # make sure we don't hit the vRNI throttle and start getting 429 errors

            # Get Source VM Name
            src_vm_name = get_referenced_entity_name(
                referenced_entity=internet_flow.source_vm)
            time.sleep(
                0.025
            )  # make sure we don't hit the vRNI throttle and start getting 429 errors

            # Get Source security groups
            sec_group_names = []
            for src_sec_group in internet_flow.source_security_groups:
                name = get_referenced_entity_name(
                    referenced_entity=src_sec_group)
                if name: sec_group_names.append(name)

            # Write it to csv file
            flow_fields = dict(src_ip=internet_flow.source_ip.ip_address,
                               dst_ip=internet_flow.destination_ip.ip_address,
                               port=internet_flow.port.iana_port_display,
                               src_vm=src_vm_name,
                               src_security_groups=",".join(sec_group_names))
            writer.writerow(flow_fields)

        if not api_response.cursor:
            break
        search_payload.cursor = api_response.cursor
    f_csv.close()