def main(api_client, args):

    # Create data source API client object
    data_source_api = swagger_client.DataSourcesApi(api_client=api_client)
    with open("{}".format(args.data_sources_csv), 'rb') as csvFile:
        data_sources = csv.DictReader(csvFile)
        for data_source in data_sources:
            data_source_type = data_source['DataSourceType']

            logger.info("Adding: <{}> <{}>".format(data_source_type, data_source['IP']))
            # Get the Data source add api fn
            data_source_api_name = get_api_function_name(data_source_type)
            get_datasource_fn = getattr(data_source_api, data_source_api_name["get"])
            update_datasource_fn = getattr(data_source_api, data_source_api_name["update"])
            try:
                list_datasource_api_fn = getattr(data_source_api, data_source_api_name["list"])
                data_source_list = list_datasource_api_fn()
                logger.info("Successfully got list of: {} : Response : {}".format(data_source_type, data_source_list))
                entity_id = get_data_source_entity_id(data_source_api, get_datasource_fn, data_source_list, data_source)
                if not entity_id:
                    print("Failed getting data source type : {}: {}".format(data_source_type, data_source['IP']))
                    return
                response = get_datasource_fn(id=entity_id)
                update_request_body = get_update_request_body(response, data_source)
                update_datasource_fn(id=entity_id, body=update_request_body)
                logger.info("Successfully updated: {} : Response : {}".format(data_source_type, data_source_list))
                if data_source['snmp_version']:
                    update_snmp_config(entity_id, data_source_api, data_source_api_name, data_source)
            except ApiException as e:
                print("Failed updating of data source type: {} : Error : {} ".format(data_source_type, json.loads(e.body)))
def main(api_client, args):

    # Create data source API client object
    datasource_api = swagger_client.DataSourcesApi(api_client=api_client)
    with open("{}".format(args.data_sources_csv), 'w') as csvFile:
        fields = [
            "DataSourceType", "IP", "fqdn", "Username", "Password",
            "CSPRefreshToken", "NickName", "CentralCliEnabled", "IPFixEnabled",
            "SwitchType", "ParentvCenter", "IsVMC", "snmp_version",
            "snmp_community_string", "ProxyIP"
        ]
        writer = csv.DictWriter(csvFile, fieldnames=fields)
        writer.writeheader()
        data = []
        for data_source_type in DATASOURCES_LIST:
            data_source_api_name = get_api_function_name(data_source_type)
            # Get lis function for datasource
            list_datasource_api_fn = getattr(datasource_api,
                                             data_source_api_name["list"])
            get_datasource_fn = getattr(datasource_api,
                                        data_source_api_name["get"])
            time.sleep(
                0.025
            )  # make sure we don't hit the vRNI throttle and start getting 429 errors
            try:
                data_source_list = list_datasource_api_fn()
                logger.info(
                    "Successfully got list of: {} : Response : {}".format(
                        data_source_type, data_source_list))
                for data_source in data_source_list.results:
                    datasource = get_datasource_fn(id=data_source.entity_id)
                    logger.info("Successfully got {} : Response : {}".format(
                        data_source_type, datasource))
                    data_dict = get_data(api_client, datasource_api,
                                         datasource)
                    if data_source_type in SNMP_CONFIG_LIST:
                        get_snmp_api_fn = getattr(
                            datasource_api,
                            data_source_api_name['snmp_config'])
                        response = get_snmp_api_fn(id=datasource.entity_id)
                        time.sleep(
                            0.025
                        )  # make sure we don't hit the vRNI throttle and start getting 429 errors
                        if response.snmp_version == 'v2c':
                            data_dict['snmp_version'] = response.snmp_version
                            logger.info(
                                "Successfully got: {} {} snmp : Response : {}".
                                format(data_source_type, datasource.ip,
                                       response))
                    data.append(data_dict)
            except ApiException as e:
                print(
                    "Failed getting list of data source type: {} : Error : {} "
                    .format(data_source_type, json.loads(e.body)))
        writer.writerows(data)
def main(api_client, args):
    return_code = SUCCESS
    # Create data source API client object
    data_source_api = swagger_client.DataSourcesApi(api_client=api_client)
    with open("{}".format(args.data_sources_csv), 'rb') as csvFile:
        data_sources = csv.DictReader(csvFile)
        for data_source in data_sources:
            data_source_type = data_source['DataSourceType']

            # Get the Proxy ID from Proxy IP
            if data_source['ProxyIP'] not in proxy_ip_to_id:
                proxy_id = get_node_entity_id(
                    api_client, data_source['ProxyIP'])
                if not proxy_id:
                    logger.info("Incorrect Proxy IP {}".format(
                        data_source['ProxyIP']))
                    continue
                proxy_ip_to_id[data_source['ProxyIP']] = proxy_id
            else:
                proxy_id = proxy_ip_to_id[data_source['ProxyIP']]

            # Get vCenter ID for vCenter manager required for adding NSX
            vcenter_id = get_vcenter_manager_entity_id(
                data_source_api, data_source['ParentvCenter'])
            logger.info("Adding: <{}> <{}>".format(
                data_source_type, data_source['IP']))
            # Get the Data source add api fn
            data_source_api_name = get_api_function_name(data_source_type)
            add_data_source_api_fn = getattr(
                data_source_api, data_source_api_name['add'])
            try:
                response = add_data_source_api_fn(
                    body=get_add_request_body(data_source, proxy_id, vcenter_id))
                logger.info(
                    "Successfully added: {} {} : Response : {}".format(data_source_type, data_source['IP'], response))
                time.sleep(1)
                if data_source['snmp_version']:
                    add_snmp_api_fn = getattr(
                        data_source_api, data_source_api_name['snmp_config'])
                    response = add_snmp_api_fn(
                        id=response.entity_id, body=get_snmp_request_body(data_source))
                    logger.info(
                        "Successfully added: {} {} snmp : Response : {}".format(data_source_type, data_source['IP'],
                                                                                response))
                    time.sleep(1)
            except ApiException as e:
                logger.exception(
                    "Failed adding data source: {} : Error : {} ".format(data_source['IP'], json.loads(e.body)))
                return_code = ERROR

    init_api_client.delete_token(args, api_client)
    return return_code
def main(api_client, args):
    return_code = SUCCESS
    # Create data source API client object
    data_source_api = swagger_client.DataSourcesApi(api_client=api_client)
    with open("{}".format(args.data_sources_csv), 'rb') as csvFile:
        data_sources = csv.DictReader(csvFile)
        for data_source in data_sources:
            data_source_type = data_source['DataSourceType']

            logger.info("Deleting: <{}> <{}>".format(data_source_type,
                                                     data_source['IP']))
            # Get the Data source add api fn
            data_source_api_name = get_datasource_details.get_api_function_name(
                data_source_type)
            get_datasource_fn = getattr(data_source_api,
                                        data_source_api_name["get"])
            delete_datasource_fn = getattr(data_source_api,
                                           data_source_api_name["delete"])
            try:
                list_datasource_api_fn = getattr(data_source_api,
                                                 data_source_api_name["list"])
                data_source_list = list_datasource_api_fn()
                logger.info(
                    "Successfully got list of: {} : Response : {}".format(
                        data_source_type, data_source_list))
                entity_id = get_data_source_entity_id(get_datasource_fn,
                                                      data_source_list,
                                                      data_source)
                if not entity_id:
                    print("Failed getting data source type : {}: {}".format(
                        data_source_type, data_source['IP']))
                    return
                delete_datasource_fn(id=entity_id)
                logger.info("Successfully deleted: {} : {}".format(
                    data_source_type, entity_id))
            except ApiException as e:
                print("Failed deleting data source type: {} : Error : {} ".
                      format(data_source_type, json.loads(e.body)))
                return_code = ERROR

    init_api_client.delete_token(args, api_client)
    return return_code
def main(api_client, args):

    # Create data source API client object
    data_source_api = swagger_client.DataSourcesApi(api_client=api_client)
    proxy_id = get_node_entity_id(api_client, args.proxy_ip)
    try:
        response = get_uani_datasource(data_source_api, args.device_ip_or_fqdn)
        if not response:
            response = data_source_api.add_generic_switch(body=get_add_request_body(args, proxy_id))
            logger.info(
                    "Successfully added: {} {} : Response : {}".format(data_source_type, args.device_ip_or_fqdn, response))
        data_source_api.file_upload(id=response.entity_id, file=args.zip_file_path)

        logger.info(
                "Successfully uploaded zip file: {}".format(args.zip_file_path))
    except ApiException as e:
        logger.exception(
                "Failed adding data source: {} : Error : {} ".format(args.device_ip_or_fqdn, json.loads(e.body)))
    finally:
        logger.info("Deleting API token")
        auth_api = swagger_client.AuthenticationApi(api_client=api_client)
        auth_api.delete()
Beispiel #6
0
def main(api_client, args):

    failure_log = []
    notfound_log = []
    # Create data source API client object
    start_time = datetime.now()
    data_source_api = swagger_client.DataSourcesApi(api_client=api_client)
    total_lines = len(open(
        args.data_sources_csv).readlines()) - 1  # subtract header row
    with open(args.data_sources_csv, 'rt') as csvFile:
        data_sources = csv.DictReader(csvFile)
        for csv_row in data_sources:
            data_source_type = csv_row['DataSourceType']
            if (datetime.now() - start_time).total_seconds() > 1500:
                init_api_client.delete_token(args, api_client)
                api_client = init_api_client.get_api_client(args)
                data_source_api = swagger_client.DataSourcesApi(
                    api_client=api_client)
                start_time = datetime.now()

            logger.info("Adding: {} [{} of {}]".format(
                _get_label(csv_row), data_sources.line_num - 1, total_lines))
            # Get the Data source add api fn
            get_datasource_fn = getattr(
                data_source_api, get_api_function_name(data_source_type,
                                                       'get'))
            update_datasource_fn = getattr(
                data_source_api,
                get_api_function_name(data_source_type, 'update'))
            try:
                data_source_list = get_entities_by_type(
                    data_source_type, data_source_api)
                logger.info(
                    "Successfully got list of {} : Total Count : {}".format(
                        data_source_type, data_source_list.total_count))

                entity = get_data_source_entity(get_datasource_fn,
                                                data_source_list, csv_row)
                if not entity:
                    logger.error("Could not find datasource entity: {}".format(
                        _get_label(csv_row)))
                    notfound_log.append("Line {}: {}".format(
                        data_sources.line_num,
                        ';'.join(list(csv_row.values()))))
                    continue

                logger.info("Attempting to update {}".format(
                    _get_label(csv_row)))
                updated_request_body = get_update_request_body(entity, csv_row)
                retry(update_datasource_fn,
                      id=entity.entity_id,
                      body=updated_request_body)
                logger.info("Successfully updated {}".format(
                    _get_label(csv_row)))

                if csv_row['snmp_version']:
                    try:
                        logger.info(
                            "Attempting to update SNMP Config for {}".format(
                                _get_label(csv_row)))
                        retry(update_snmp_config, entity.entity_id,
                              data_source_api, csv_row)
                        logger.info(
                            "Successfully updated SNMP Config for {}".format(
                                _get_label(csv_row)))
                    except ApiException as e:
                        logger.error(
                            "Failed to update snmp config for {}: Error : {} ".
                            format(_get_label(csv_row), e.reason))
                        failure_log.append("Line {}: {}".format(
                            data_sources.line_num,
                            ';'.join(list(csv_row.values()))))

                # modifying credentials via API doesn't restart datasource with those new credentials prior to vRNI 6.0
                if args.restart:
                    logger.info("Stopping datasource: {}".format(
                        _get_label(csv_row)))
                    disable_datasource_fn = getattr(
                        data_source_api,
                        get_api_function_name(data_source_type, 'disable'))
                    disable_datasource_fn(entity.entity_id)
                    time.sleep(1)
                    logger.info("Starting datasource: {}".format(
                        _get_label(csv_row)))
                    enable_datasource_fn = getattr(
                        data_source_api,
                        get_api_function_name(data_source_type, 'enable'))
                    enable_datasource_fn(entity.entity_id)
                    logger.info("Successfully Restarted: {}".format(
                        _get_label(csv_row)))
            except ApiException as e:
                logger.error("Failed to update {}: Error : {} ".format(
                    _get_label(csv_row), e.reason))
                failure_log.append("Line {}: {}".format(
                    data_sources.line_num, ';'.join(list(csv_row.values()))))
    return (failure_log, notfound_log)