def main(args):
    # ONLY Run Export to JSON
    if args.script_mode == 'EXPORT':
        # Connection to vRNI API (if mode EXPORT + EXPORT_IMPORT)
        logger.info("Connecting to vRNI API")
        if args.verbose:
            print("Connecting to vRNI API...")
        init_api_client.get_api_client(args)
        # Function to export vRNI App def. to JSON file
        export_to_json(args)
        return
    # ONLY Run Import to NSX-T
    elif args.script_mode == 'IMPORT':
        # Import vRNI App definition to NSG Expression
        import_to_nsx(args)
        return
    # Export vRNI App Def + Import def in NSG
    elif args.script_mode == 'EXPORT_IMPORT':
        # Connection to vRNI API (if mode EXPORT + EXPORT_IMPORT)
        logger.info("Connecting to vRNI API")
        if args.verbose:
            print("Connecting to vRNI API...")
        init_api_client.get_api_client(args)
        # Function to export vRNI App def. to JSON file
        export_to_json(args)
        # Import vRNI App definition to NSG Expression
        import_to_nsx(args)
        return
    else:
        logger.error("script_mode " + args.script_mode + " Unknown ! ")
        if args.verbose:
            print("script_mode " + args.script_mode + " Unknown ! ")
        return
예제 #2
0
                if data_source['snmp_version']:
                    add_snmp_api_fn = getattr(
                        data_source_api, data_source_api_name['snmp_config'])
                    response = add_snmp_api_fn(
                        id=response.entity_id,
                        body=get_snmp_request_body(data_source))
                    logger.info(
                        "Successfully added: {} {} snmp : Response : {}".
                        format(data_source_type, data_source['IP'], response))
            except ApiException as e:
                logger.exception(
                    "Failed adding data source: {} : Error : {} ".format(
                        data_source['IP'], json.loads(e.body)))


def parse_arguments():
    parser = init_api_client.parse_arguments()
    parser.add_argument("--data_sources_csv",
                        action="store",
                        default='data_sources.csv',
                        help="csv file with your own data sources")
    args = parser.parse_args()
    return args


if __name__ == '__main__':
    args = parse_arguments()
    utilities.configure_logging("/tmp")
    api_client = init_api_client.get_api_client(args)
    main(api_client, args)
예제 #3
0
def main(api_client, args):

    failure_log = []
    notfound_log = []
    # Create data source API client object
    start_time = datetime.now()
    data_source_api = swagger_client.DataSourcesApi(api_client=api_client)
    total_lines = len(open(
        args.data_sources_csv).readlines()) - 1  # subtract header row
    with open(args.data_sources_csv, 'rt') as csvFile:
        data_sources = csv.DictReader(csvFile)
        for csv_row in data_sources:
            data_source_type = csv_row['DataSourceType']
            if (datetime.now() - start_time).total_seconds() > 1500:
                init_api_client.delete_token(args, api_client)
                api_client = init_api_client.get_api_client(args)
                data_source_api = swagger_client.DataSourcesApi(
                    api_client=api_client)
                start_time = datetime.now()

            logger.info("Adding: {} [{} of {}]".format(
                _get_label(csv_row), data_sources.line_num - 1, total_lines))
            # Get the Data source add api fn
            get_datasource_fn = getattr(
                data_source_api, get_api_function_name(data_source_type,
                                                       'get'))
            update_datasource_fn = getattr(
                data_source_api,
                get_api_function_name(data_source_type, 'update'))
            try:
                data_source_list = get_entities_by_type(
                    data_source_type, data_source_api)
                logger.info(
                    "Successfully got list of {} : Total Count : {}".format(
                        data_source_type, data_source_list.total_count))

                entity = get_data_source_entity(get_datasource_fn,
                                                data_source_list, csv_row)
                if not entity:
                    logger.error("Could not find datasource entity: {}".format(
                        _get_label(csv_row)))
                    notfound_log.append("Line {}: {}".format(
                        data_sources.line_num,
                        ';'.join(list(csv_row.values()))))
                    continue

                logger.info("Attempting to update {}".format(
                    _get_label(csv_row)))
                updated_request_body = get_update_request_body(entity, csv_row)
                retry(update_datasource_fn,
                      id=entity.entity_id,
                      body=updated_request_body)
                logger.info("Successfully updated {}".format(
                    _get_label(csv_row)))

                if csv_row['snmp_version']:
                    try:
                        logger.info(
                            "Attempting to update SNMP Config for {}".format(
                                _get_label(csv_row)))
                        retry(update_snmp_config, entity.entity_id,
                              data_source_api, csv_row)
                        logger.info(
                            "Successfully updated SNMP Config for {}".format(
                                _get_label(csv_row)))
                    except ApiException as e:
                        logger.error(
                            "Failed to update snmp config for {}: Error : {} ".
                            format(_get_label(csv_row), e.reason))
                        failure_log.append("Line {}: {}".format(
                            data_sources.line_num,
                            ';'.join(list(csv_row.values()))))

                # modifying credentials via API doesn't restart datasource with those new credentials prior to vRNI 6.0
                if args.restart:
                    logger.info("Stopping datasource: {}".format(
                        _get_label(csv_row)))
                    disable_datasource_fn = getattr(
                        data_source_api,
                        get_api_function_name(data_source_type, 'disable'))
                    disable_datasource_fn(entity.entity_id)
                    time.sleep(1)
                    logger.info("Starting datasource: {}".format(
                        _get_label(csv_row)))
                    enable_datasource_fn = getattr(
                        data_source_api,
                        get_api_function_name(data_source_type, 'enable'))
                    enable_datasource_fn(entity.entity_id)
                    logger.info("Successfully Restarted: {}".format(
                        _get_label(csv_row)))
            except ApiException as e:
                logger.error("Failed to update {}: Error : {} ".format(
                    _get_label(csv_row), e.reason))
                failure_log.append("Line {}: {}".format(
                    data_sources.line_num, ';'.join(list(csv_row.values()))))
    return (failure_log, notfound_log)