Exemplo n.º 1
0
 async def startup(self):
     """
     Startup a chord node. Here, the Event Loop from async.io is already set up.
     """
     # Startup a chord node.
     debug_log("ChordNode.startup", self.chord_addr)
     await self.router.startup(self.another_node)
Exemplo n.º 2
0
 async def set(self, key, value):
     """
     Set the key to value in the cluster.
     """
     assert (self.is_leader())
     debug_log("RaftPersistentKeyValueStore", self.addr, key, value)
     await self.raft_node.set_value(key, value)
Exemplo n.º 3
0
    async def copy_key_values_to(self, to_node: int, interval: Interval,
                                 dict_to_transfer: Dict[str, str]):
        """
        Copy key_values in the `interval` from persistent_storage to `to_node`
        """
        keys_to_transfer = dict_to_transfer.keys()
        msg_to_transfer = self.message_sender.create_message(
            MessageType.TRANSFER_KEYS, to_node, {
                'interval': interval.to_string(),
                'data_dict': dict_to_transfer,
                'copy_node': self.chord_addr,
                'receiving_node': to_node
            })
        recieved_msg = await self.send_message_and_await_response(
            msg_to_transfer, MessageType.TRANSFER_KEYS_CONFIRMED)

        keys_transfered_successfully, interval_transfered_successfully = recieved_msg.get_args(
            ['keys_transfered', 'interval'])
        assert (set(keys_transfered_successfully) == set(
            dict_to_transfer.keys()))
        trace_log(colorama.Fore.MAGENTA + "KeyTransferer", self.chord_addr,
                  ": Copy Successfully completed from", self.chord_addr, "to",
                  to_node, "of interval", interval.to_string())
        if debug_log_key_transfers:
            debug_log("KeyTransferer", self.chord_addr,
                      ": Copy Successfully completed from", self.chord_addr,
                      "to", to_node, "of interval", interval.to_string())
def replace_rule_field_uids_by_name(rule, general_objects):
    # This 'if' prevents the rare situations where this method is called on the same rule more than once
    if "position" in rule:
        return
    debug_log("Updating data for rule #" + str(rule["rule-number"]))
    rule["position"] = rule["rule-number"]
    rule.pop("rule-number")
    replace_data(rule, general_objects)
Exemplo n.º 5
0
def remove_row_by_pk(table, pks):
    id = utils.generate_where_from_pk(pks)

    conn = db.connect()
    query = f'DELETE FROM {table} WHERE {id};'
    utils.debug_log(query)
    conn.execute(query)
    conn.close()
Exemplo n.º 6
0
 async def unregister_client(self, client_obj: tuple):
     self.__connected.remove(client_obj)
     user = client_obj[1]
     disconnect_message = {
         'type': 'user_disconnected',
         'info': user.serialize()
     }
     debug_log(disconnect_message)
     await self.send_to_all_clients(disconnect_message)
Exemplo n.º 7
0
 async def message_handler(self, message: str, user: '******'):
     message_obj = Message.create(user, message)
     serialized_message = message_obj.serialize()
     message_to_send = {
         'type': 'receive_message',
         'message': serialized_message
     }
     debug_log(message_to_send)
     await self.send_to_all_clients(message_to_send)
Exemplo n.º 8
0
def get_group_objects(data_dict, api_type, group, client, unexportable_objects):
    group_object_reply = client.api_call("show-" + api_type, {"uid": group["uid"], "details-level": "full"})
    if not group_object_reply.success:
        debug_log("Failed to retrieve group named '" +
                  group["name"] + "'! Error: " + str(group_object_reply.error_message) +
                  ". Group was not exported!", True, True)
        return []

    group_object = group_object_reply.data

    if api_type == "group-with-exclusion":
        include_group_object = None
        exclude_group_object = None
        if "include" in group_object:
            if group_object["include"]["type"] != "CpmiAnyObject":
                include_group_object = get_group_objects(data_dict, group_object["include"]["type"],
                                                         group_object["include"], client, unexportable_objects)
            group_object["include"] = group_object["include"]["name"]
        if "except" in group_object:
            if group_object["except"]["type"] != "CpmiAnyObject":
                exclude_group_object = get_group_objects(data_dict, group_object["except"]["type"],
                                                         group_object["except"], client, unexportable_objects)
            group_object["except"] = group_object["except"]["name"]
        return_list = [group_object]
        if include_group_object:
            return_list.extend(include_group_object)
        if exclude_group_object:
            return_list.extend(exclude_group_object)
        return return_list

    member_objects = []
    for container in group_objects_field[api_type]:
        member_objects.extend(group_object[container])

    object_dictionary, group_unexportable_objects, exportable_types = \
        get_objects(member_objects, client.api_version)

    for member_object in member_objects:
        if should_export(member_object):
            check_for_export_error(member_object, client)

    merge_data(unexportable_objects, group_unexportable_objects)

    for unexportable_object in unexportable_objects:
        for container in group_objects_field[api_type]:
            for member in group_object[container]:
                if unexportable_object["uid"] == member["uid"]:
                    member["name"] = unexportable_object["name"]
                    break

    for api_type in exportable_types:
        debug_log("Exporting " + singular_to_plural_dictionary[client.api_version][api_type] + 
                  " from group [" + group["name"] + "]", True)
        export_general_objects(data_dict, api_type, object_dictionary[api_type], unexportable_objects, client)

    return [group_object]
Exemplo n.º 9
0
def create():
    """ recieves post requests to add new task """
    data = request.get_json()
    utils.debug_log(str(data))

    try:
        db_helper.create_row(data)
        result = {'success': True, 'response': 'Done'}
    except:
        result = {'success': False, 'response': 'Something went wrong'}

    return jsonify(result)
Exemplo n.º 10
0
def delete(keys):
    """ recieved post requests for entry delete """
    utils.debug_log(str(keys))
    split = keys.split('|')
    table = utils.hyphen_to_camel(split[0])
    try:
        db_helper.remove_row_by_pk(table, split[1])
        result = {'success': True, 'response': 'Removed row'}
    except:
        result = {'success': False, 'response': 'Something went wrong'}

    return jsonify(result)
Exemplo n.º 11
0
def update():
    """ recieved post requests for entry updates """
    utils.debug_log('here')
    data = request.get_json()
    utils.debug_log(str(data))

    try:
        db_helper.update_row(data)
        result = {'success': True, 'response': 'Status Updated'}
    except:
        result = {'success': False, 'response': 'Something went wrong'}

    return jsonify(result)
Exemplo n.º 12
0
def adv_query_match_history():
    utils.debug_log("here2")
    conn = db.connect()
    result = conn.execute(
        'SELECT (SELECT Name FROM summoners s WHERE s.AccountId = m.AccountId) AS Name, COUNT(DISTINCT Champion) AS Num_Champions FROM matchHistory m GROUP BY AccountId LIMIT 100;'
    )
    conn.close()

    keys = ['Name', 'Num_Champions']
    result = result.fetchall()
    items = [dict(zip(keys, row)) for row in result]
    for i in items:
        utils.debug_log(str(i))
    return keys, items
Exemplo n.º 13
0
def create_row(data):
    data = utils.fix_nesting(data)
    data = json.loads(data)

    table = data['table']
    table = utils.hyphen_to_camel(table)
    fields, keys, vals = utils.generate_fields(data)
    utils.debug_log(str(fields))

    conn = db.connect()
    query = f'INSERT INTO {table} ({keys}) VALUES ({vals})'
    utils.debug_log(query)
    conn.execute(query)
    conn.close()
Exemplo n.º 14
0
def format_objects(objects):
    formatted_objects = []

    for i in range(len(objects)):
        api_type = objects[i]["type"]
        if api_type in special_treatment_types:
            handle_fields(objects[i])
        flat_json = flatten_json(objects[i])

        string = u"Exporting {0} with uid {1} named {2}" if "name" in objects[i] else u"Exporting {0} with uid {1}"
        message = string.format(api_type, objects[i]["uid"], objects[i]["name"] if 'name' in objects[i] else "").encode("utf-8")
        debug_log(message)

        formatted_objects.append(flat_json)

    return formatted_objects
def replace_exception_data(exception, general_objects, layer=None,
                           rule_number=None, group=None, position_in_group=None):
    if "position" in exception:
        return
    position = position_in_group if not layer else exception["exception-number"]
    debug_log("Updating data for rule #" + str(position))
    exception["position"] = position
    if not layer:
        exception["exception-group-name"] = group
        if "rule-number" in exception:
            exception.pop("rule-number")
    elif "exception-group-name" not in exception:
        exception["rule-number"] = rule_number
    if "exception-number" in exception:
        exception.pop("exception-number")
    replace_data(exception, general_objects)
Exemplo n.º 16
0
def check_duplicate_layer(payload, changed_layer_names, api_type, client):
    layer_name = payload["name"]
    new_layer_name = payload["name"]

    i = 0
    while True:
        show_layer = client.api_call("show-" + api_type, payload={"name": new_layer_name})

        if "code" in show_layer.data and "not_found" in show_layer.data["code"]:
            if layer_name != new_layer_name:
                debug_log("A layer named \"%s\" already exists. Name was changed to \"%s\""
                          % (layer_name, new_layer_name))
                changed_layer_names[layer_name] = new_layer_name
                payload["name"] = new_layer_name
            break

        new_layer_name = "IMPORTED LAYER" + (" " if i == 0 else " %s " % i) + layer_name
        i += 1
Exemplo n.º 17
0
 async def replicate_our_data_to(self, nodes):
     for new_successor in nodes:
         if new_successor != None:
             if self.router.predecessor == None:
                 debug_log('SuccessorList.Stabalize', self.addr,
                           ':new successor so Transferring keys',
                           new_successor)
                 await self.key_transferer.copy_to(
                     new_successor,
                     Interval(self.router.successor, False, self.addr,
                              True))
             else:
                 debug_log('SuccessorList.Stabalize', self.addr,
                           ':new successor so Transferring keys',
                           new_successor)
                 await self.key_transferer.copy_to(
                     new_successor,
                     Interval(self.router.predecessor, False, self.addr,
                              True))
Exemplo n.º 18
0
    async def stabilize(self):
        trace_log("SuccRouter.stabilize: addr ", self.addr, " Stabilizing...")
        if (self.successor == self.addr):
            # If we were the first one to start
            return

        # Find our sucessors predecessor
        # request_predecessor = self.message_sender.create_message(MessageType.FIND_PREDECESSOR, node_id)
        # predecessor_msg = await self.send_message_and_await(request_predecessor,
        #     lambda msg: msg.has_type(MessageType.FIND_PREDECESSOR_REPLY) and msg.src == self.successor)
        # assert ('predecessor' in predecessor_msg.args)

        # N Node Network
        #1. Find Predecessor of our Successor and Wait for Response
        successor_predecessor = await self.get_predecessor(self.successor)
        trace_log("SuccRouter.stabilize: addr ", self.addr,
                  "found predecessor ", successor_predecessor)

        #UPDATE NODES
        #1. If Succesor does not have Predecessor, we may be their Predecessor
        if successor_predecessor == None:
            self.notify_i_might_be_your_predecessor(self.successor)
            return

        #2. If Node < Sucessor's predecessor < Succesor
        #       then our Sucessors Predecessor is Most likely adjacent to us on
        #       Ring so they become our new Sucessor
        if in_interval(self.addr, self.successor, successor_predecessor):
            self.successor = successor_predecessor

        #3. If Sucessor's predecessor ... < Node < Succesor
        #       then our Sucessors Predecessor is Most likely us
        if (successor_predecessor != self.addr):
            # notify our sucessor that we might be their predecessor
            self.notify_i_might_be_your_predecessor(self.successor)

        debug_log("SuccRouter.stabilize Complete: addr: ", self.addr,
                  "predecessor: ", self.predecessor, " successor: ",
                  self.successor)
def export_nat_rulebase(package, client):
    data_dict = {}

    rulebase_rules, general_objects = get_query_nat_rulebase_data(
        client, {"package": package})

    object_dictionary, unexportable_objects, exportable_types = get_objects(
        general_objects, client.api_version)

    debug_log("Processing rules and sections", True)

    for rule in rulebase_rules:
        replace_rule_field_uids_by_name(rule, general_objects)

    cleanse_object_dictionary(object_dictionary)

    for api_type in exportable_types:
        debug_log(
            "Exporting " +
            singular_to_plural_dictionary[client.api_version][api_type], True)
        export_general_objects(data_dict, api_type,
                               object_dictionary[api_type],
                               unexportable_objects, client)

    debug_log("Exporting NAT rules", True)

    format_and_merge_data(data_dict, rulebase_rules)

    debug_log(
        "Exporting placeholders for unexportable objects from NAT rulebase",
        True)

    format_and_merge_unexportable_objects(data_dict, unexportable_objects)

    debug_log("Done exporting NAT rulebase.\n", True)

    clean_objects(data_dict)

    return data_dict, unexportable_objects
Exemplo n.º 20
0
def search(data):
    table = data['table']
    table = utils.hyphen_to_camel(table)
    keyword = data['keyword']

    keys = data['keys']
    searches = utils.generate_searches(keys, keyword)

    conn = db.connect()
    query = f'SELECT * FROM {table} WHERE {searches};'
    utils.debug_log(query)
    result = conn.execute(query)
    conn.close()

    pk = []
    if table == 'matchHistory':
        pk = match_history_pks
    elif table == 'champions':
        pk = champions_pks

    k, i = utils.result_to_dict(result, pk)

    return k, i
Exemplo n.º 21
0
    async def handle_i_might_be_your_predecessor(self, msg: Message):
        assert ('addr' in msg.args)
        candidate_predecessor = msg.args['addr']

        # If we are the first node.
        if (self.successor == self.addr):
            self.successor = candidate_predecessor
            trace_log("SuccRouter addr:", self.addr, "Setting new successor ",
                      candidate_predecessor)
            # transfer the stuff from (self.addr, candidate_predecessor] because we are no longer responsible for that
            #   half of addr space
            # debug_log(self.addr, "Transfer because self.successor == self.addr, pred", self.predecessor, 'suc', self.successor)
            # await self.key_transferer.copy_to(candidate_predecessor, Interval(self.addr, False, candidate_predecessor, True))

        # We know there are at least two nodes in the system.
        if (self.predecessor == None):
            # From our point of view, we used to be responsible for (self.sucessor, self.addr]
            # Now, candidate_predecessor is coming in and taking (self.successor, candidate_predecessor] from us
            # So we transfer (self.successor, candidate_predecessor] to them.
            if (self.successor != candidate_predecessor):
                debug_log(self.addr,
                          "Transfer because self.prdecessor == None, pred",
                          self.predecessor, 'suc', self.successor)
                await self.key_transferer.copy_to(
                    candidate_predecessor,
                    Interval(self.successor, False, candidate_predecessor,
                             True))
            else:
                # There is only one other node, so we should transfer him the other side
                debug_log(self.addr,
                          "Transfer because self.prdecessor == None, pred",
                          self.predecessor, 'suc', self.successor)
                await self.key_transferer.copy_to(
                    candidate_predecessor,
                    Interval(self.addr, False, candidate_predecessor, True))

            # Normally, we don't need this. However, this fixes an edge cases involved with starting up with 1 node and having
            # 2 concurrent joins.

            self.predecessor = candidate_predecessor
            trace_log("SuccRouter addr:", self.addr,
                      "No current predecessor. Setting predecessor ",
                      candidate_predecessor)
        elif (in_interval(self.predecessor, self.addr, candidate_predecessor)):
            # From our point of view, we are responsible for (self.predecessor, self.addr]
            # However, now we are only responisble for (candidate_predecessor, self.addr]
            # So, we transfer out the difference: (self.predecessor, candidate_predecessor]
            debug_log(
                self.addr,
                "Transfer because candidate_predecessor \in (self.predecessor, self.addr), pred",
                self.predecessor, 'suc', self.successor)
            await self.key_transferer.copy_to(
                candidate_predecessor,
                Interval(self.predecessor, False, candidate_predecessor, True))
            self.predecessor = candidate_predecessor
            trace_log("SuccRouter addr:", self.addr,
                      "Setting new predecessor ", candidate_predecessor)
def format_objects(objects):
    formatted_objects = []

    for i in range(len(objects)):
        api_type = objects[i]["type"]
        if api_type in special_treatment_types:
            handle_fields(objects[i])
        flat_json = flatten_json(objects[i])

        # Special handling for data-center-object types - prepare the data for the import!
        if "data-center-object" in api_type:
            if "data-center.name" in flat_json.keys():
                flat_json["data-center-name"] = flat_json["data-center.name"]

        string = u"Exporting {0} with uid {1} named {2}" if "name" in objects[
            i] else u"Exporting {0} with uid {1}"
        message = string.format(
            api_type, objects[i]["uid"],
            objects[i]["name"] if 'name' in objects[i] else "").encode("utf-8")
        debug_log(message)

        formatted_objects.append(flat_json)

    return formatted_objects
Exemplo n.º 23
0
def search():
    data = request.get_json()
    utils.debug_log(str(data))
    data = utils.fix_nesting(data)
    data = json.loads(data)
    
    try:
        keys, items = db_helper.search(data)
        global search_results
        search_results = (data['table'], keys, items)
        utils.debug_log(str(search_results))
        result = {'success': True, 'response': 'Done'}
    except Exception as e:
        utils.debug_log(str(e))
        result = {'success': False, 'response': 'Something went wrong'}

    return jsonify(result)
Exemplo n.º 24
0
def update_row(data):
    data = utils.fix_nesting(data)
    data = json.loads(data)

    pks = data['pk']
    id = utils.generate_where_from_pk(pks)
    utils.debug_log(id)

    table = data['table']
    table = utils.hyphen_to_camel(table)
    fields, k, v = utils.generate_fields(data)
    utils.debug_log(str(fields))

    conn = db.connect()
    query = f'UPDATE {table} SET {fields} WHERE {id};'
    utils.debug_log(query)
    conn.execute(query)
    conn.close()
def export_threat_exception_rulebase(package, layer, threat_rule,
                                     exception_groups, client):
    data_dict = {}

    debug_log(
        "Exporting Exception-Rulebase from Threat-Rule #" +
        str(threat_rule["position"]) + " in Threat-Layer[" + layer + "]", True)

    layer_settings, rulebase_sections, rulebase_rules, general_objects = \
        get_query_rulebase_data(client, "threat-rule-exception-rulebase",
                                {"name": layer, "package": package, "rule-uid": threat_rule["uid"]})

    if not layer_settings:
        return None, None

    object_dictionary, unexportable_objects, exportable_types = \
        get_objects(general_objects, client.api_version)

    to_position = None

    debug_log("Processing exceptions", True)

    for rulebase_object in rulebase_sections + rulebase_rules:
        if "exception" in rulebase_object["type"]:
            replace_exception_data(rulebase_object,
                                   general_objects,
                                   layer=layer,
                                   rule_number=threat_rule["position"])
        elif "section" in rulebase_object["type"]:
            position_in_group = 1
            for rule in rulebase_object["rulebase"]:
                replace_exception_data(rule,
                                       general_objects,
                                       group=rulebase_object["name"],
                                       position_in_group=position_in_group)
                position_in_group += 1
            if rulebase_object["name"] == "Global Exceptions":
                continue
            show_group_reply = client.api_call(
                "show-exception-group",
                payload={"name": rulebase_object["name"]})
            if rulebase_object["from"]:
                group_position = rulebase_object["from"]
            else:
                group_position = to_position if to_position else "top"
            to_position = rulebase_object["to"] if rulebase_object[
                "to"] else to_position
            if rulebase_object["name"] not in [
                    x["name"] for x in exception_groups
            ]:
                show_group_reply.data["positions"] = []
                if show_group_reply.data[
                        "apply-on"] == "manually-select-threat-rules":
                    show_group_reply.data["applied-threat-rules"] = []
                exception_groups.append(show_group_reply.data)
            group_index = next(index
                               for (index, d) in enumerate(exception_groups)
                               if d['name'] == show_group_reply.data['name'])
            exception_groups[group_index]["positions"].append(group_position)
            if exception_groups[group_index][
                    "apply-on"] == "manually-select-threat-rules":
                exception_groups[group_index]["applied-threat-rules"].append({
                    "layer":
                    layer,
                    "rule-number":
                    str(threat_rule["position"])
                })

    cleanse_object_dictionary(object_dictionary)

    for api_type in exportable_types:
        debug_log(
            "Exporting " +
            singular_to_plural_dictionary[client.api_version][api_type] +
            " from layer [" + layer + "]", True)
        export_general_objects(data_dict, api_type,
                               object_dictionary[api_type],
                               unexportable_objects, client)

    debug_log("Exporting threat exceptions from layer [" + layer + "]", True)

    format_and_merge_data(data_dict, rulebase_rules)

    debug_log(
        "Exporting placeholders for unexportable objects from layer [" +
        layer + "]", True)

    format_and_merge_unexportable_objects(data_dict, unexportable_objects)

    debug_log("Exporting layer settings of layer [" + layer + "]", True)

    format_and_merge_data(data_dict, [layer_settings])

    debug_log("Done exporting layer '" + layer + "'.\n", True)

    clean_objects(data_dict)

    return data_dict, unexportable_objects
def export_access_rulebase(package, layer, layer_uid, client, timestamp,
                           tar_file):
    data_dict = {}

    debug_log("Exporting Access Layer [" + layer + "]", True)

    layer_settings, rulebase_sections, rulebase_rules, general_objects = \
        get_query_rulebase_data(client, "access-rulebase", {"name": layer, "uid": layer_uid, "package": package})

    if not layer_settings:
        return None, None

    object_dictionary, unexportable_objects, exportable_types = \
        get_objects(general_objects, client.api_version)

    to_position = None

    debug_log("Processing rules and sections", True)

    for rulebase_item in rulebase_sections + rulebase_rules:
        if "rule" in rulebase_item["type"]:
            replace_rule_field_uids_by_name(rulebase_item, general_objects)
        elif "section" in rulebase_item["type"]:
            if "from" in rulebase_item:
                rulebase_item["position"] = rulebase_item["from"]
            else:
                rulebase_item[
                    "position"] = to_position if to_position else "top"
            to_position = rulebase_item[
                "to"] if "to" in rulebase_item else to_position

    cleanse_object_dictionary(object_dictionary)

    if "access-layer" in object_dictionary:
        for access_layer in object_dictionary["access-layer"]:
            debug_log("Exporting Inline-Layer [" + access_layer["name"] + "]",
                      True)
            inner_data_dict, inner_unexportable_objects = \
                export_access_rulebase(package, access_layer["name"], access_layer["uid"], client, timestamp, tar_file)
            layer_tar_name = \
                create_tar_file(access_layer, inner_data_dict,
                                timestamp, ["access-rule", "access-section"], client.api_version)
            inner_data_dict.pop("access-rule", None)
            inner_data_dict.pop("access-section", None)
            merge_data(data_dict, inner_data_dict)
            merge_data(unexportable_objects, inner_unexportable_objects)
            tar_file.add(layer_tar_name)
            os.remove(layer_tar_name)

    for api_type in exportable_types:
        debug_log(
            "Exporting " +
            singular_to_plural_dictionary[client.api_version][api_type] +
            " from layer [" + layer + "]", True)
        export_general_objects(data_dict, api_type,
                               object_dictionary[api_type],
                               unexportable_objects, client)

    debug_log("Exporting access rules from layer [" + layer + "]", True)

    format_and_merge_data(data_dict, rulebase_rules)

    debug_log("Exporting access sections from layer [" + layer + "]", True)

    for rulebase_section in rulebase_sections:
        debug_log("rulebase_sections contains: " +
                  (rulebase_section["name"] if "name" in
                   rulebase_section else "no-name section"))
    format_and_merge_data(data_dict, rulebase_sections)

    debug_log(
        "Exporting placeholders for unexportable objects from layer [" +
        layer + "]", True)

    format_and_merge_unexportable_objects(data_dict, unexportable_objects)

    debug_log("Exporting layer settings of layer [" + layer + "]", True)

    format_and_merge_data(data_dict, [layer_settings])

    debug_log("Done exporting layer '" + layer + "'.\n", True)

    clean_objects(data_dict)

    return data_dict, unexportable_objects
def get_query_nat_rulebase_data(client, payload):
    rulebase_items = []
    rulebase_rules = []
    general_objects = []
    seen_object_uids = []
    before_auto_rules = True

    debug_log("Getting information from show-nat-rulebase", True)

    rulebase_replies = client.gen_api_query("show-nat-rulebase",
                                            details_level="full",
                                            container_keys=["rulebase"],
                                            payload=payload)

    for rulebase_reply in rulebase_replies:
        if not rulebase_reply.success:
            debug_log(
                "Failed to retrieve NAT rulebase! Error: " +
                str(rulebase_reply.error_message) +
                ". NAT rulebase was not exported!", True, True)
            return None, None
        rulebase_data = rulebase_reply.data
        if "total" not in rulebase_data or rulebase_data["total"] == 0:
            break
        percentage_complete = int(
            (float(rulebase_data["to"]) / float(rulebase_data["total"])) * 100)
        debug_log(
            "Retrieved " + str(rulebase_data["to"]) + " out of " +
            str(rulebase_data["total"]) + " rules (" +
            str(percentage_complete) + "%)", True)

        non_empty_rulebase_items = []
        for rulebase_item in rulebase_data["rulebase"]:
            if "nat-section" in rulebase_item["type"]:
                # Skip system auto generated section
                if "Automatic Generated Rules : " in rulebase_item["name"]:
                    before_auto_rules = False
                    continue
                # Skip empty section (no rules inside...)
                if "from" not in rulebase_item:
                    continue
            rulebase_item["__before_auto_rules"] = before_auto_rules
            non_empty_rulebase_items.append(rulebase_item)
            if ("to" in rulebase_item
                    and rulebase_item["to"] == rulebase_data["to"]):
                break

        if non_empty_rulebase_items and rulebase_items and non_empty_rulebase_items[0]["uid"] == \
                rulebase_items[len(rulebase_items) - 1]["uid"]:
            rulebase_items[len(rulebase_items) - 1]["rulebase"].extend(
                non_empty_rulebase_items[0]["rulebase"])
            rulebase_items[len(rulebase_items) -
                           1]["to"] = non_empty_rulebase_items[0]["to"]
            non_empty_rulebase_items = non_empty_rulebase_items[1:]
        rulebase_items.extend(non_empty_rulebase_items)

        new_objects = [
            x for x in rulebase_data["objects-dictionary"]
            if x["uid"] not in seen_object_uids
        ]
        seen_object_uids.extend([x["uid"] for x in new_objects])
        general_objects.extend(new_objects)

    for general_object in general_objects:
        string = (
            u"##Show presented object of type {0} " +
            (u"with name {1}" if "name" in general_object else u"with no name")
        ).format(general_object["type"],
                 general_object["name"] if "name" in general_object else "")
        debug_log(string)
        if should_export(general_object):
            check_for_export_error(general_object, client)

    debug_log("Analysing rulebase items...")
    for rulebase_item in rulebase_items:
        if "nat-rule" in rulebase_item["type"]:
            string = (u"##Show presented independent rule of type {0}").format(
                rulebase_item["type"])
            debug_log(string)
            rulebase_item.pop("auto-generated", None)
            rulebase_rules.append(rulebase_item)
        elif "nat-section" in rulebase_item["type"]:
            # !!! Attention: exporting only NAT rules, without sections !!!
            for rule in rulebase_item["rulebase"]:
                string = (
                    u"##Show presented dependent rule of type {0} under section {1}"
                ).format(
                    rule["type"], rulebase_item["name"]
                    if "name" in rulebase_item else "???")
                debug_log(string)
                rule.pop("auto-generated", None)
                rule["__before_auto_rules"] = rulebase_item[
                    "__before_auto_rules"]
                rulebase_rules.append(rule)

            string = (u"##Show presented section of type {0} " +
                      (u"with name {1}" if "name" in rulebase_item else
                       u"with no name")).format(
                           rulebase_item["type"], rulebase_item["name"]
                           if "name" in rulebase_item else "")
            debug_log(string)
        else:
            debug_log("Unsupported NAT rulebase object type - '" +
                      rulebase_item["type"] + "'. Continue...",
                      print_to_error_log=True)

    return rulebase_rules, general_objects
Exemplo n.º 28
0
def import_objects(file_name,
                   client,
                   changed_layer_names,
                   layer=None,
                   args=None):
    global position_decrements_for_sections

    export_tar = tarfile.open(file_name, "r:gz")
    export_tar.extractall()
    tar_files = export_tar.getmembers()

    general_object_files = [
        general_object_file for general_object_file in tar_files
        if os.path.splitext(general_object_file.name)[1] == ".csv"
        or os.path.splitext(general_object_file.name)[1] == ".json"
    ]

    rulebase_object_files = [
        general_object_file for general_object_file in tar_files
        if os.path.splitext(general_object_file.name)[1] == ".gz"
    ]

    general_object_files.sort(compare_general_object_files)

    layers_to_attach = {"access": [], "threat": []}

    if not general_object_files:
        debug_log("Nothing to import...", True)

    version_file_name = [f for f in tar_files if f.name == "version.txt"][0]
    with open(version_file_name.name, 'rb') as version_file:
        version = version_file.readline()
        api_versions = client.api_call("show-api-versions")
        if not api_versions.success:
            debug_log(
                "Error getting versions! Aborting import. " +
                str(api_versions), True, True)
            sys.exit(1)
        if version in api_versions.data["supported-versions"]:
            client.api_version = version
        else:
            debug_log(
                "The version of the imported package doesn't exist in this machine! import with this machines latest version. ",
                True, True)

    for general_object_file in general_object_files:
        _, file_extension = os.path.splitext(general_object_file.name)
        if file_extension != ".csv":
            os.remove(general_object_file.name)
            continue
        api_call = general_object_file.name.split('__')[2]
        counter = 1
        position_decrement_due_to_rules = 0
        position_decrement_due_to_sections = 0
        generic_type = None
        data = []
        if "generic" in api_call:
            generic_type = api_call.split("-")[3]
            api_call = "-".join(api_call.split("-")[0:3])
        api_type = generic_type if generic_type else '-'.join(
            api_call.split('-')[1:])
        if api_type == "access-rule":
            position_decrements_for_sections = []
        debug_log(
            "Adding " + (singular_to_plural_dictionary[
                client.api_version][api_type].replace('_', ' ') if api_type
                         in singular_to_plural_dictionary[client.api_version]
                         else "generic objects of type " + api_type), True)

        with open(general_object_file.name, 'rb') as csv_file:
            reader = csv.reader(csv_file)
            num_objects = len(list(reader)) - 1
            csv_file.seek(0)

            fields = next(reader)

            while True:
                line = next(reader, None)
                if line is None:
                    break
                line = [unicode(item, 'utf-8') for item in line]
                data.append(line)

        os.remove(general_object_file.name)

        for line in data:
            counter, position_decrement_due_to_rules = add_object(
                line, counter, position_decrement_due_to_rules,
                position_decrement_due_to_sections, fields, api_type,
                generic_type, layer, layers_to_attach, changed_layer_names,
                api_call, num_objects, client, args)

    for rulebase_object_file in rulebase_object_files:
        layer_type = rulebase_object_file.name.split("__")[1]
        layer_name = '__'.join(rulebase_object_file.name.split('__')[2:-1])
        if layer_name in changed_layer_names:
            layer_name = changed_layer_names[layer_name]
        debug_log(
            "Importing " + layer_type.split('_')[0].capitalize() + "_" +
            layer_type.split('_')[1].capitalize() + " [" + layer_name + "]",
            True)
        import_objects(rulebase_object_file.name, client, changed_layer_names,
                       layer_name, args)
        os.remove(rulebase_object_file.name)

    return layers_to_attach
Exemplo n.º 29
0
def add_object(line, counter, position_decrement_due_to_rule,
               position_decrement_due_to_section, fields, api_type,
               generic_type, layer, layers_to_attach, changed_layer_names,
               api_call, num_objects, client, args):
    global duplicates_dict
    global position_decrements_for_sections
    global missing_parameter_set
    global should_create_imported_nat_top_section
    global should_create_imported_nat_bottom_section
    global imported_nat_top_section_uid

    if "access-rule" in api_type:
        position_decrements_for_sections.append(position_decrement_due_to_rule)

    payload, _ = create_payload(fields, line, 0, api_type, client.api_version)

    # for objects that had collisions, use new name in the imported package
    for field in ["members", "source", "destination"]:
        if field in payload:
            for i, member in enumerate(payload[field]):
                if member in name_collision_map:
                    payload[field][i] = name_collision_map[member]

    payload[
        "ignore-warnings"] = True  # Useful for example when creating two hosts with the same IP

    if "nat-rule" in api_type:
        # For NAT rules, the 'layer' parameter is the name of the policy package!!!
        payload["package"] = layer
        # --- NAT rules specific logic ---
        # Importing only rules, without sections.
        # Rules marked as "__before_auto_rules = TRUE" will be imported at the TOP of the rulebase, inside a new section "IMPORTED UPPER RULES".
        # There is an additional new section "Original Upper Rules" at the bottom of "IMPORTED UPPER RULES".
        # Rules marked as "__before_auto_rules = FALSE" will be imported at the BOTTOM of the rulebase, inside a new section "IMPORTED LOWER RULES".
        # There will be no rule merges!!!
        before_auto_rules = payload["__before_auto_rules"]
        payload.pop("__before_auto_rules", None)
        if "true" in before_auto_rules:
            if should_create_imported_nat_top_section:
                should_create_imported_nat_top_section = False
                nat_section_payload = {}
                nat_section_payload["package"] = layer
                nat_section_payload["position"] = "top"
                # --> we add the footer section first!!!
                nat_section_payload["name"] = "Original Upper Rules"
                client.api_call("add-nat-section", nat_section_payload)
                # <--
                nat_section_payload["name"] = "IMPORTED UPPER RULES"
                nat_section_reply = client.api_call("add-nat-section",
                                                    nat_section_payload)
                if nat_section_reply.success:
                    imported_nat_top_section_uid = nat_section_reply.data[
                        "uid"]
            if imported_nat_top_section_uid is None:
                payload["position"] = "bottom"
            else:
                sub_payload = {}
                sub_payload["bottom"] = imported_nat_top_section_uid
                payload["position"] = sub_payload
        else:
            if should_create_imported_nat_bottom_section:
                should_create_imported_nat_bottom_section = False
                nat_section_payload = {}
                nat_section_payload["package"] = layer
                nat_section_payload["position"] = "bottom"
                nat_section_payload["name"] = "IMPORTED LOWER RULES"
                client.api_call("add-nat-section", nat_section_payload)
            payload["position"] = "bottom"
    else:
        if "position" in payload:
            if "rule" in api_type:
                payload["position"] = str(
                    int(payload["position"]) - position_decrement_due_to_rule)
                if payload["action"] == "Drop":
                    if "action-settings" in payload:
                        payload.pop("action-settings")
                    if "user-check" in payload:
                        if "frequency" in payload["user-check"]:
                            payload["user-check"].pop("frequency")
                        if "custom-frequency" in payload["user-check"]:
                            payload["user-check"].pop("custom-frequency")
                        if "confirm" in payload["user-check"]:
                            payload["user-check"].pop("confirm")
            if "section" in api_type:
                section_position_decrement = (
                    position_decrements_for_sections[int(payload["position"]) -
                                                     1]
                    if len(position_decrements_for_sections) > 0 else
                    0) + position_decrement_due_to_section
                payload["position"] = str(
                    int(payload["position"]) - section_position_decrement)
        if generic_type:
            payload["create"] = generic_type
        if "layer" in api_type:
            check_duplicate_layer(payload, changed_layer_names, api_type,
                                  client)
            if compare_versions(client.api_version, "1.1") != -1:
                payload["add-default-rule"] = "false"
            if layer is None:
                if "access-layer" in api_type:
                    #---> This code segment distinguishes between an inline layer and an ordered layer during import
                    is_ordered_access_control_layer = payload[
                        "__ordered_access_control_layer"]
                    payload.pop("__ordered_access_control_layer", None)
                    if "true" in is_ordered_access_control_layer:
                        layers_to_attach["access"].append(
                            payload["name"])  # ordered access layer
                    #<--- end of code segment
                else:
                    layers_to_attach["threat"].append(payload["name"])
        elif "rule" in api_type or "section" in api_type or \
                (api_type == "threat-exception" and "exception-group-name" not in payload):
            payload["layer"] = layer
            if client.api_version != "1" and api_type == "access-rule" and "track-alert" in payload:
                payload["track"] = {}
                payload["track"]["alert"] = payload["track-alert"]
                payload.pop("track-alert", None)
        elif api_type == "exception-group" and "applied-threat-rules" in payload:
            for applied_rule in payload["applied-threat-rules"]:
                if applied_rule["layer"] in changed_layer_names.keys():
                    applied_rule["layer"] = changed_layer_names[
                        applied_rule["layer"]]

    api_reply = client.api_call(api_call, payload)

    if not api_reply.success and "name" in payload and "More than one object" in api_reply.error_message:
        i = 0
        original_name = payload["name"]
        while not api_reply.success:
            payload["name"] = "NAME_COLLISION_RESOLVED" + (
                "_" if i == 0 else "_%s_" % i) + original_name
            api_reply = client.api_call(api_call, payload)
            i += 1

            if i > 100:
                payload["name"] = original_name
                break

        if api_reply.success:
            debug_log(
                "Object \"%s\" was renamed to \"%s\" to resolve the name collision"
                % (original_name, payload["name"]), True, True)
            name_collision_map[original_name] = payload["name"]

    if not api_reply.success:
        if api_reply.data and "errors" in api_reply.data:
            error_msg = api_reply.data["errors"][0]["message"]
        elif api_reply.data and "warnings" in api_reply.data:
            error_msg = api_reply.data["warnings"][0]["message"]
        else:
            error_msg = api_reply.error_message
        log_err_msg = "Failed to import {0}{1}. Error: {2}".format(
            api_type, " with name [" + payload["name"] +
            "]" if "name" in payload else "", error_msg)

        if "More than one object" in api_reply.error_message:
            log_err_msg = api_reply.error_message + ". Cannot import this object"

        if "rule" in api_type and ("Requested object"
                                   in api_reply.error_message
                                   and "not found" in api_reply.error_message):
            field_value = api_reply.error_message.split("[")[1].split("]")[0]
            indices_of_field = [
                i for i, x in enumerate(line) if x == field_value
            ]
            field_keys = [
                x for x in fields if fields.index(x) in indices_of_field
            ]
            for field_key in field_keys:
                if field_key.split(".")[0] in generic_objects_for_rule_fields:
                    missing_obj_data = generic_objects_for_rule_fields[
                        field_key.split(".")[0]]
                    missing_type = missing_obj_data[0]
                    mandatory_field = missing_obj_data[1] if len(
                        missing_obj_data) > 1 else None
                    add_missing_command = "add-" + missing_type
                    new_name = "import_error_due_to_missing_fields_" + field_value.replace(
                        " ", "_")
                    add_succeeded = True
                    if new_name not in missing_parameter_set:
                        missing_parameter_set.add(new_name)
                        add_missing_payload = {"name": new_name}
                        if mandatory_field == "port":
                            add_missing_payload["port"] = "8080"
                        elif mandatory_field == "ip-address":
                            add_missing_payload[
                                "ip-address"] = generate_new_dummy_ip_address(
                                )
                        add_missing_reply = client.api_call(
                            add_missing_command, add_missing_payload)
                        if not add_missing_reply.success:
                            log_err_msg += "\nAlso failed to generate placeholder object: {0}".format(
                                add_missing_reply.error_message)
                            add_succeeded = False
                    if add_succeeded:
                        line[fields.index(field_key)] = new_name
                        return add_object(line, counter,
                                          position_decrement_due_to_rule,
                                          position_decrement_due_to_section,
                                          fields, api_type, generic_type,
                                          layer, layers_to_attach,
                                          changed_layer_names, api_call,
                                          num_objects, client, args)
        if "Invalid parameter for [position]" in api_reply.error_message:
            if "access-rule" in api_type:
                position_decrement_due_to_rule += adjust_position_decrement(
                    int(payload["position"]), api_reply.error_message)
            elif "access-section" in api_type:
                position_decrement_due_to_section += adjust_position_decrement(
                    int(payload["position"]), api_reply.error_message)
            return add_object(line, counter, position_decrement_due_to_rule,
                              position_decrement_due_to_section, fields,
                              api_type, generic_type, layer, layers_to_attach,
                              changed_layer_names, api_call, num_objects,
                              client, args)
        elif "is not unique" in api_reply.error_message and "name" in api_reply.error_message:
            field_value = api_reply.error_message.partition("name")[2].split(
                "[")[1].split("]")[0]
            debug_log(
                "Not unique name problem \"%s\" - changing payload to use UID instead."
                % field_value, True, True)
            obj_uid_found_and_used = False
            if field_value not in duplicates_dict:
                show_objects_reply = client.api_query(
                    "show-objects",
                    payload={"in": ["name", "\"" + field_value + "\""]})
                if show_objects_reply.success:
                    for obj in show_objects_reply.data:
                        if obj["name"] == field_value:
                            duplicates_dict[field_value] = obj["uid"]
                            obj_uid_found_and_used = True
            if obj_uid_found_and_used:
                indices_of_field = [
                    i for i, x in enumerate(line) if x == field_value
                ]
                field_keys = [
                    x for x in fields if fields.index(x) in indices_of_field
                ]
                for field_key in field_keys:
                    line[fields.index(
                        field_key)] = duplicates_dict[field_value]
                return add_object(line, counter,
                                  position_decrement_due_to_rule,
                                  position_decrement_due_to_section, fields,
                                  api_type, generic_type, layer,
                                  layers_to_attach, changed_layer_names,
                                  api_call, num_objects, client, args)
            else:
                debug_log(
                    "Not unique name problem \"%s\" - cannot change payload to use UID instead of name."
                    % field_value, True, True)
        elif "will place the exception in an Exception-Group" in api_reply.error_message:
            return add_object(line, counter,
                              position_decrement_due_to_rule - 1,
                              position_decrement_due_to_section, fields,
                              api_type, generic_type, layer, layers_to_attach,
                              changed_layer_names, api_call, num_objects,
                              client, args)

        position_decrement_due_to_rule += 1

        debug_log(log_err_msg, True, True)
        if args is not None and args.strict:
            discard_reply = client.api_call("discard")
            if not discard_reply.success:
                debug_log(
                    "Failed to discard changes! Terminating. Error: " +
                    discard_reply.error_message, True, True)
            exit(1)
    else:
        imported_name = payload["name"] if "name" in payload else ""
        debug_log("Imported {0}{1}".format(
            api_type, " with name [" + imported_name.encode("utf-8") + "]"))
        if counter % 20 == 0 or counter == num_objects:
            percentage = int(float(counter) / float(num_objects) * 100)
            debug_log(
                "Imported {0} out of {1} {2} ({3}%)".format(
                    counter, num_objects,
                    singular_to_plural_dictionary[client.api_version][api_type]
                    if api_type
                    in singular_to_plural_dictionary[client.api_version] else
                    "generic objects", percentage), True)
            if counter % 100 == 0 or counter == num_objects:
                publish_reply = client.api_call("publish", wait_for_task=True)
                if not publish_reply.success:
                    plural = singular_to_plural_dictionary[client.api_version][api_type].replace('_', ' ') \
                        if api_type in singular_to_plural_dictionary[client.api_version] \
                        else "generic objects of type " + api_type
                    try:
                        debug_log(
                            "Failed to publish import of " + plural +
                            " from tar file #" + str((counter / 100) + 1) +
                            "! " + plural.capitalize() +
                            " from said file were not imported!. Error: " +
                            str(publish_reply.error_message), True, True)
                    except UnicodeEncodeError:
                        try:
                            debug_log(
                                "UnicodeEncodeError: " +
                                str(publish_reply.error_message), True, True)
                        except:
                            debug_log(
                                "UnicodeEncodeError: .encode('utf-8') FAILED",
                                True, True)

                    discard_reply = client.api_call("discard")
                    if not discard_reply.success:
                        debug_log(
                            "Failed to discard changes of unsuccessful publish! Terminating. Error: "
                            + discard_reply.error_message, True, True)
                        exit(1)

    return counter + 1, position_decrement_due_to_rule
def export_package(client, args):
    timestamp = time.strftime("%Y_%m_%d_%H_%M")
    debug_log("Checking existence of package [" + args.name + "]")
    show_package = client.api_call("show-package", {"name": args.name, "details-level": "full"})
    if not show_package.success:
        debug_log("No package named '" + args.name + "' found. Cannot export.", True, True)
        sys.exit(1)

    tar_file_name = args.output_file if args.output_file else "exported__package__" + args.name + "__" + timestamp
    tar_file = tarfile.open(tar_file_name + ".tar.gz", "w:gz")

    access = args.access
    threat = args.threat
    if args.all:
        access = True
        threat = True

    data_dict = {}
    unexportable_objects = {}

    if access:
        if show_package.data["access"]:
            debug_log("Exporting Access Control layers", True)
            for access_layer in show_package.data["access-layers"]:
                access_data_dict, access_unexportable_objects \
                    = export_access_rulebase(show_package.data["name"], access_layer["name"], client, timestamp, tar_file)
                if not access_data_dict:
                    continue
                #---> This code segment distinguishes between an inline layer and an ordered layer during export
                access_layers = access_data_dict.get("access-layer")
                if access_layers is not None:
                    for layer in access_layers:
                        layer["__ordered_access_control_layer"] = True if layer["name"] == access_layer["name"] else False
                #<--- end of code segment
                layer_tar_name = \
                    create_tar_file(access_layer, access_data_dict,
                                    timestamp, ["access-rule", "access-section"], client.api_version)
                merge_data(data_dict, access_data_dict)
                merge_data(unexportable_objects, access_unexportable_objects)
                tar_file.add(layer_tar_name)
                os.remove(layer_tar_name)

        # NAT policy should be exported as a part of Access policy
        if show_package.data["nat-policy"]:
            debug_log("Exporting NAT policy", True)
            nat_data_dict, nat_unexportable_objects = export_nat_rulebase(show_package.data["name"], client)
            if nat_data_dict:
                nat_tar_name = "exported__nat_layer__" + show_package.data["name"] + "__" + timestamp + ".tar.gz"
                with tarfile.open(nat_tar_name, "w:gz") as tar:
                    export_to_tar(nat_data_dict, timestamp, tar, ["nat-rule", "nat-section"], client.api_version)
                merge_data(data_dict, nat_data_dict)
                merge_data(unexportable_objects, nat_unexportable_objects)
                tar_file.add(nat_tar_name)
                os.remove(nat_tar_name)

    if threat:
        if show_package.data["threat-prevention"]:
            debug_log("Exporting Threat-Prevention layers", True)
            for threat_layer in show_package.data["threat-layers"]:
                threat_data_dict, threat_unexportable_objects \
                    = export_threat_rulebase(show_package.data["name"], threat_layer["name"], client)
                if not threat_data_dict:
                    continue
                layer_tar_name = \
                    create_tar_file(threat_layer, threat_data_dict,
                                    timestamp, ["threat-rule", "exception-group", "threat-exception"],
                                    client.api_version)
                merge_data(data_dict, threat_data_dict)
                merge_data(unexportable_objects, threat_unexportable_objects)
                tar_file.add(layer_tar_name)
                os.remove(layer_tar_name)

    for obj_type in data_dict:
        if obj_type not in singular_to_plural_dictionary[client.api_version]:
            singular_to_plural_dictionary[client.api_version][obj_type] = "generic-object"

    debug_log("Exporting general objects to TAR...")
    export_to_tar(data_dict, timestamp, tar_file, singular_to_plural_dictionary[client.api_version].keys(),
                  client.api_version,
                  ignore_list=["rule", "section", "threat-exception", "exception-group"])

    generate_export_error_report()

    tar_file.close()