Example #1
0
    def to_json(self, fname, path="data", absolute_path=None):
        self.item = {}
        if absolute_path:
            fname_csv = "{}/{}.csv".format(absolute_path, fname)
            fname_json = "{}/{}.json".format(absolute_path, fname)
        else:
            fname_csv = utils.get_path(fname, path, "csv")
        entries = []
        try:
            with open(fname_csv, encoding="windows-1251",
                      newline='') as csv_file:
                reader = csv.DictReader(csv_file, skipinitialspace=True)
                for entry in reader:
                    entries.append(entry)
                    item = entry.get("syslogEnabled")
                    if item:
                        if item.lower() == "true":
                            entry["syslogEnabled"] = True
                        else:
                            entry["syslogEnabled"] = False

                    #self.Schema.validate(entry)
            if absolute_path:
                Json().writer_full_path(fname_json, entries)
            else:
                Json().writer(fname, entries, path)
        except Exception as err:
            l.logger.error("fname: {} not found".format(fname))
            l.runlogs_logger.error("fname: {} not found".format(fname))
            gv.EOM()
            gv.fake_assert()
        return entries
Example #2
0
def perform_bulk_delete_vlans(agent,
                              org_name,
                              fname,
                              fn_deploy,
                              vlans_only=False):
    store_list = Json.reader(fname, "templates")

    if not goahead_confirm("stores"):
        return

    for store in store_list:
        store_name = store.get("name", None)
        l.runlogs_logger.info("deploying network: {}".format(store_name))
        if store_name is None:
            str = "fname: {} ::: store_name field was not found for store {}".format(
                fname, store)
            l.logger.error(str)
            l.runlogs_logger.error(str)
            gv.fake_assert()
        l.logger.info("deploying store : {}".format(store_name))
        auto_globals.select_store(store_name)
        if (auto_globals.load_store(agent, store_name)) is False:
            l.logger.error("failed deploying network: {}".format(store_name))
            l.runlogs_logger.error(
                "failed deploying network: {}".format(store_name))
            return

        fn_deploy(agent, vlans_only)
        l.logger.info("deployed store : {}".format(store_name))
        Json.writer(fname, store_list, "templates")
        Csv.transform_to_csv(fname, None, path="templates")
        l.runlogs_logger.info("deployed network: {}  netid: {}".format(
            store_name, settings["netid"]))
Example #3
0
def get_store_lists(agent):
    settings["orchestration-agent"] = agent
    org_name = settings["CLI"]["store-lists-org"].split("org-")[1]
    settings["org-name"] = org_name
    auto_globals.set_settings()
    l.logger.info("creating store lists for org {}".format(org_name))
    l.runlogs_logger.info("creating store lists for org {}".format(org_name))
    org_id = auto_globals.get_orgid(org_name)
    success, store_list = network_list(org_id)
    if success is False:
        return False
    stores = {}
    stores["ALL"]=[]
    it = {}
    from copy import deepcopy
    for item in store_list:
        name = item["name"]
        valid, store_name, group, store_number = is_valid_store_name(name)
        if not valid:
            continue
        it["name"]= store_name
        stores["ALL"].append(deepcopy(it))
        if group not in stores.keys():
            stores[group] = []
        stores[group].append(deepcopy(it))
    for group in stores.keys():
        fname = "store-list-{}-{}".format(org_name, group)
        Json.writer(fname, data=stores[group], path="../templates")
        l.logger.info("created {} with {} stores.".format(fname, len(stores[group])))
        l.runlogs_logger.info("created {} with {} stores.".format(fname, len(stores[group])))

    return store_list
Example #4
0
def perform_bulk_get_firewall(agent, fn_get, org_name, store_list):
    fname = store_list
    store_list = Json.reader(fname, "templates")

    for store in store_list:
        store_name = store.get("name", None)
        if store_name is None:
            str = "fname: {} ::: name field was not found for store {}".format(
                fname, store)
            l.runlogs_logger.error(str)
            l.logger.error(str)
            gv.fake_assert()
        auto_globals.select_store(store_name)
        try:
            success = auto_globals.load_store(agent, store_name)
            assert (success)
            str = ('getting l3fwrules for {}'.format(store_name))
            l.logger.info(str)
            l.runlogs_logger.info(str)
            fn_get(agent)
            str = ('got l3fwrules for {}'.format(store_name))
            l.logger.info(str)
            l.runlogs_logger.info(str)
        except:
            str = "failed getting l3fwrules for store : {}".format(store_name)
            l.logger.error(str)
            l.runlogs_logger.error(str)
            gv.fake_assert()
Example #5
0
def perform_bulk_get_vpn_firewall(agent, org_list_fname, fn_get):
    l.runlogs_logger.info("downloading {}".format(agent))
    org_list = Json.reader(org_list_fname, "templates")
    Csv.transform_to_csv(org_list_fname, None, path="templates")

    org_name = org_list[0].get("org_name", None)
    auto_globals.load_org(agent, org_name)

    if org_name is None:
        str = "org {} not found".format(org_name)
        l.logger.error(str)
        l.runlogs_logger.error(str)
        gv.fake_assert()
    try:
        str = "downloading vpns2srules for org: {}".format(org_name)
        l.logger.info(str)
        l.runlogs_logger.info(str)
        fn_get(agent)
    except:
        str = "failed obtaining s2svpnrules for org: {}".format(org_name)
        l.logger.error(str)
        l.runlogs_logger.info(str)
        gv.fake_assert()
    str = 'downloaded s2svpnrules for org : "{}"'.format(org_name)
    l.logger.info(str)
    l.runlogs_logger.info(str)
Example #6
0
def bulkremove(fname):
    from utils.low_json import Json
    items = Json.reader("bulk_remove")
    for item in items:
        netid = item["netid"]
        serial = item["serial"]
        meraki.removedevfromnet(config.api_key, netid, serial)
Example #7
0
def perform_bulk_update_firewall(agent, fn_deploy, fw_rules, store_list):
    fname = store_list
    store_list = Json.reader(fname, "templates")
    """Ensure that the list now has always all the three fields
    in short that it is normalized so csv conversion is not upset"""
    Json.writer(fname, store_list, "templates")
    Csv.transform_to_csv(fname, None, path="templates")

    show_store_list(store_list)

    if not goahead_confirm("l3fwrules"):
        return

    for store in store_list:
        store_name = store.get("name", None)

        if store_name is None:
            str = "fname: {} ::: name field was not found for store {}".format(
                fname, store)
            l.runlogs_logger.error(str)
            l.logger.error(str)
            gv.fake_assert()
        auto_globals.select_store(store_name)
        try:
            assert (auto_globals.load_store(agent, store_name))
            str = ('deploying l3fwrules to {}'.format(store_name))
            l.runlogs_logger.info(str)
            fn_deploy(agent, fw_rules)
            str = ('deployed l3fwrules to {}'.format(store_name))
            l.logger.info(str)
            l.runlogs_logger.info(str)
        except:
            str = "failed deployment for store : {}".format(store_name)
            l.logger.error(str)
            l.runlogs_logger.error(str)
            gv.fake_assert()
Example #8
0
def perform_bulk_update_vpn_firewall(agent, fname, fn_deploy, rules=None):
    l.runlogs_logger.info("deploying {}".format(agent))
    org_list = Json.reader(fname, "templates")
    Csv.transform_to_csv(fname, None, path="templates")
    org_name = org_list[0].get("org_name", None)
    settings["org-name"] = org_name
    auto_globals.load_org(agent, org_name)
    str = "selected org: {}".format(org_name)
    l.logger.info(str)
    l.runlogs_logger.info(str)

    vpn_rules = settings["CLI"]["s2svpnrules-version"]
    str = "vpns2srules version: {}".format(vpn_rules)
    l.logger.info(str)
    l.runlogs_logger.info(str)

    if not goahead_confirm("s2svpnrules"):
        return

    if org_name is None:
        l.logger.error(
            "failed deploying s2svpnrules to org: {}".format(org_name))
        l.runlogs_logger.error(
            "failed deploying s2svpnrules to org: {}".format(org_name))
        gv.fake_assert()
    try:
        l.logger.info("deploying vpns2srules to org: {}".format(
            vpn_rules, org_name))
        l.runlogs_logger.info(
            "deploying vpns2srules to org: {}".format(org_name))
        fn_deploy(agent, rules)
    except:
        str = "failed deploying s2svpnrules {} to org: {}".format(
            rules, org_name)
        l.logger.error(str)
        l.runlogs_logger.info(str)
        gv.fake_assert()
    str = 'deployed s2svpnrules: "{}" to org: "{}"'.format(rules, org_name)
    l.logger.info(str)
    l.runlogs_logger.info(str)
Example #9
0
def perform_bulk_deploy_networks(agent, fn_deploy, fn_deploy_serials,
                                 store_list_file):

    store_list = Json.reader(store_list_file, "templates")
    show_store_list(store_list)
    serials_list_file = settings.get("CLI").get("networks-serials")
    if serials_list_file:
        serials_list = Json.reader(serials_list_file, "templates")
    else:
        serials_list = None
    if not goahead_confirm("stores"):
        return

    for store in store_list:
        store_name = store.get("name", None)
        if store_name is None:
            l.logger.error(
                "fname: {} ::: name field was not found for store {}".format(
                    store))
            l.runlogs_logger.error(
                "fname: {} ::: name field was not found for store {}".format(
                    store))
            gv.fake_assert()
        l.runlogs_logger.info("created network : {}".format(store_name))
        l.logger.info("created network : {}".format(store_name))

        auto_globals.select_store(store_name)
        assert (auto_globals.load_empty_store(agent, store_name))
        if gv.use_serials:
            if serials_list is None:
                l.logger.error("failed no serials file provided")
                l.runlogs_logger.error("failed no serials file provided")
                return

        if not fn_deploy(agent):
            l.logger.error("failed to create network : {}".format(store_name))
            l.runlogs_logger.error(
                "failed to create network : {}".format(store_name))
            return

        if gv.use_serials:
            serial_count = auto_globals.load_store_serials(
                store_name, serials_list)
            for count in range(1, serial_count + 1):
                settings["serial"] = settings["serial{}".format(count)]
                settings["device-name"] = settings["device-name{}".format(
                    count)]
                l.runlogs_logger.info("adding serial {} to {}".format(
                    settings["serial"], store_name))
                l.logger.info("adding serial {} to {}".format(
                    settings["serial"], store_name))
                if not fn_deploy_serials():
                    if gv.serial_not_available_revert_clone:
                        destroy(netid=settings["netid"])
                    l.logger.error(
                        "failed adding serial {} to  network : {}".format(
                            settings["serial"], store_name))
                    l.runlogs_logger.error(
                        "failed adding serial {} network : {}".format(
                            settings["serial"], store_name))
                    return
                l.runlogs_logger.info("added serial {} to {}".format(
                    settings["serial"], store_name))
                l.logger.info("added serial {} to {}".format(
                    settings["serial"], store_name))
Example #10
0
 def to_csv(self, fname, header=None, path="data"):
     fname_csv = utils.get_path(fname, path, 'csv')
     obj = Json()
     json_data = obj.reader(fname, path)
     self.data_to_csv(json_data, fname_csv, header)
Example #11
0
    def to_json_and_validate(self, fname, input_path=None, output_path=None):
        json_data = Json.reader("valid_inputs", path="templates")
        self.schemas = json_data[0]

        self.item = {}
        fname_csv = "{}/{}.csv".format(input_path, fname)
        fname_json = "{}/{}.json".format(output_path, fname)
        entries = []

        # Find schema to use in validation
        file_type = None
        schema = None
        for entry in self.schemas:
            item = self.schemas[entry]
            fname_pattern = item["fname_pattern"]
            if fname.find(fname_pattern) == 0:
                file_type = entry
                schema = item["json_schema"]
                break

        if schema is None:
            print("No valid schema match\n"
                  "check file name follows correct pattern: {}\n"
                  "store-list-* / org-* / l3fwrules_template_ / s2svpnrules_".
                  format(fname))
            gv.EOM()
            gv.fake_assert()

        with open(fname_csv, encoding="windows-1251", newline='') as csv_file:
            entries = csv.DictReader(csv_file, skipinitialspace=True)
            line_count = 0
            for entry in entries:
                line_count += 1
                try:
                    validate(entry, schema)
                    # Validate Fields are the same and same order
                    schema_keys = list(schema["properties"].keys())
                    item_keys = list(entry.keys())
                    result = [
                        i for i, j in zip(schema_keys, item_keys) if i != j
                    ]
                    if len(result) != 0:
                        print("line #:{} - mismatch schema keys: {}".format(
                            line_count, schema_keys))
                        print("line #:{} - mismatch item   keys: {}".format(
                            line_count, item_keys))
                        gv.EOM()
                        gv.fake_assert()
                except:
                    print("invalid schema line number :{} \ {}".format(
                        line_count, entry))
                    gv.EOM()
                    gv.fake_assert()

        is_firewall = (file_type == "l3fwrules" or file_type == "s2svpnrules")
        is_networks_serials = (file_type == "networks-serials")

        json_data = []

        line_count = 0
        with open(fname_csv, encoding="windows-1251", newline='') as csv_file:
            line_count += 1
            entries = csv.DictReader(csv_file, skipinitialspace=True)
            if is_networks_serials is True:
                from utils.auto_utils import is_valid_store_name
                line_count = 0
                for entry in entries:
                    line_count += 1
                    store_name = entry["Network name"]
                    success, _, _, _ = is_valid_store_name(store_name)
                    if success is False:
                        print("invalid Network name {} line number :{}".format(
                            store_name, line_count))
                        gv.EOM()
                        gv.fake_assert()
                    serial = entry["Serial"]
                    success = is_valid_serial_number(serial)
                    if success is False:
                        print(
                            "invalid serial number {} line number :{}".format(
                                serial, line_count))
                        gv.EOM()
                        gv.fake_assert()

                    json_data.append(entry)
            elif is_firewall is False:
                for entry in entries:
                    json_data.append(entry)
            else:
                for entry in entries:
                    line_count += 1
                    # It is a firewall fix syslogEnabled to false
                    item = entry.get("syslogEnabled")
                    if item:
                        if item.lower() == "false":
                            entry["syslogEnabled"] = False
                        elif item.lower() == "true":
                            entry["syslogEnabled"] = True

                    # Eliminates \n chars from string fields
                    for field in [
                            "srcCidr", "destCidr", "comment", "srcPort",
                            "destPort"
                    ]:
                        entry[field] = entry[field].replace("\n", "")

                    # force Any to any in protocol fields
                    for field in [
                            "protocol", "srcCidr", "destCidr", "comment",
                            "srcPort", "destPort"
                    ]:
                        entry[field] = entry[field].replace("Any", "any")

                    # validate src and dest ports
                    self.validate_port(entry, "srcPort", line_count)
                    self.validate_port(entry, "destPort", line_count)

                    # For l3fwrules only ensure VLAN Fields are valid
                    if file_type == "l3fwrules":
                        self.validate_vlan_and_ip(entry, "srcCidr", line_count)
                        self.validate_vlan_and_ip(entry, "destCidr",
                                                  line_count)
                    else:
                        self.validate_ip_range(entry, "srcCidr", line_count)
                        self.validate_ip_range(entry, "destCidr", line_count)

                    item = entry.get("comment")
                    if item == "Default rule":
                        continue

                    json_data.append(entry)

            Json().writer_full_path(fname_json, json_data)

        return entries
Example #12
0
def reader(fname, configDir="data"):
    """Reads json file"""
    data = Json.reader(fname, configDir)
    return data
Example #13
0
def make_pretty(my_json):
    return Json.make_pretty(my_json)
Example #14
0
def writer(fname, data, path="data", header=None, logPath=False):
    if data is None:
        return
    Json.writer(fname, data, path=path, absolute_path=None, logPath=logPath)
    transform_to_csv(fname, path=path, header=header)
Example #15
0
def writer(fname, data, path="data", header=None):
    if data is None:
        return
    Json.writer(fname, data, path=path)
    transform_to_csv(fname, path=path, header=header)