def perform_bulk_delete_vlans(agent, org_name, fname, fn_deploy, vlans_only=False): store_list = Json.reader(fname, "templates") if not goahead_confirm("stores"): return for store in store_list: store_name = store.get("name", None) l.runlogs_logger.info("deploying network: {}".format(store_name)) if store_name is None: str = "fname: {} ::: store_name field was not found for store {}".format( fname, store) l.logger.error(str) l.runlogs_logger.error(str) gv.fake_assert() l.logger.info("deploying store : {}".format(store_name)) auto_globals.select_store(store_name) if (auto_globals.load_store(agent, store_name)) is False: l.logger.error("failed deploying network: {}".format(store_name)) l.runlogs_logger.error( "failed deploying network: {}".format(store_name)) return fn_deploy(agent, vlans_only) l.logger.info("deployed store : {}".format(store_name)) Json.writer(fname, store_list, "templates") Csv.transform_to_csv(fname, None, path="templates") l.runlogs_logger.info("deployed network: {} netid: {}".format( store_name, settings["netid"]))
def get_store_lists(agent): settings["orchestration-agent"] = agent org_name = settings["CLI"]["store-lists-org"].split("org-")[1] settings["org-name"] = org_name auto_globals.set_settings() l.logger.info("creating store lists for org {}".format(org_name)) l.runlogs_logger.info("creating store lists for org {}".format(org_name)) org_id = auto_globals.get_orgid(org_name) success, store_list = network_list(org_id) if success is False: return False stores = {} stores["ALL"]=[] it = {} from copy import deepcopy for item in store_list: name = item["name"] valid, store_name, group, store_number = is_valid_store_name(name) if not valid: continue it["name"]= store_name stores["ALL"].append(deepcopy(it)) if group not in stores.keys(): stores[group] = [] stores[group].append(deepcopy(it)) for group in stores.keys(): fname = "store-list-{}-{}".format(org_name, group) Json.writer(fname, data=stores[group], path="../templates") l.logger.info("created {} with {} stores.".format(fname, len(stores[group]))) l.runlogs_logger.info("created {} with {} stores.".format(fname, len(stores[group]))) return store_list
def perform_bulk_update_firewall(agent, fn_deploy, fw_rules, store_list): fname = store_list store_list = Json.reader(fname, "templates") """Ensure that the list now has always all the three fields in short that it is normalized so csv conversion is not upset""" Json.writer(fname, store_list, "templates") Csv.transform_to_csv(fname, None, path="templates") show_store_list(store_list) if not goahead_confirm("l3fwrules"): return for store in store_list: store_name = store.get("name", None) if store_name is None: str = "fname: {} ::: name field was not found for store {}".format( fname, store) l.runlogs_logger.error(str) l.logger.error(str) gv.fake_assert() auto_globals.select_store(store_name) try: assert (auto_globals.load_store(agent, store_name)) str = ('deploying l3fwrules to {}'.format(store_name)) l.runlogs_logger.info(str) fn_deploy(agent, fw_rules) str = ('deployed l3fwrules to {}'.format(store_name)) l.logger.info(str) l.runlogs_logger.info(str) except: str = "failed deployment for store : {}".format(store_name) l.logger.error(str) l.runlogs_logger.error(str) gv.fake_assert()
def writer(fname, data, path="data", header=None, logPath=False): if data is None: return Json.writer(fname, data, path=path, absolute_path=None, logPath=logPath) transform_to_csv(fname, path=path, header=header)
def writer(fname, data, path="data", header=None): if data is None: return Json.writer(fname, data, path=path) transform_to_csv(fname, path=path, header=header)