Esempio n. 1
0
async def update_schema_v_1_2_5_0() -> bool:
    column_definitions = [('dailynotifyid', 'TEXT', False, False),
                          ('dailynotifytype', 'TEXT', False, False)]

    schema_version = await get_schema_version()
    if schema_version:
        compare_1250 = utils.compare_versions(schema_version, '1.2.5.0')
        compare_1240 = utils.compare_versions(schema_version, '1.2.4.0')
        if compare_1250 <= 0:
            return True
        elif compare_1240 > 0:
            return False

    print(
        f'[update_schema_v_1_2_5_0] Updating database schema from v1.2.4.0 to v1.2.5.0'
    )

    query_lines = []
    for (column_name, column_type, column_is_primary,
         column_not_null) in column_definitions:
        column_definition = utils.database.get_column_definition(
            column_name,
            column_type,
            is_primary=column_is_primary,
            not_null=column_not_null)
        query_lines.append(
            f'ALTER TABLE serversettings ADD COLUMN IF NOT EXISTS {column_definition};'
        )

    query = '\n'.join(query_lines)
    success = await try_execute(query)
    if success:
        success = await try_set_schema_version('1.2.5.0')
    return success
Esempio n. 2
0
async def update_schema_v_1_3_1_0() -> bool:
    schema_version = await get_schema_version()
    if schema_version:
        compare_1300 = utils.compare_versions(schema_version, '1.3.1.0')
        compare_1290 = utils.compare_versions(schema_version, '1.3.0.0')
        if compare_1300 <= 0:
            return True
        elif compare_1290 > 0:
            return False

    print(
        f'[update_schema_v_1_3_1_0] Updating database schema from v1.3.0.0 to v1.3.1.0'
    )

    query_add_column = f'ALTER TABLE serversettings ADD COLUMN useembeds BOOL;'
    success_add_column = await try_execute(query_add_column)

    if not success_add_column:
        print(
            f'[update_schema_v_1_3_1_0] ERROR: Failed to add column \'useembeds\' to table \'serversettings\'!'
        )
        return False

    success = await try_set_schema_version('1.3.1.0')
    return success
Esempio n. 3
0
async def update_schema_v_1_2_9_0() -> bool:
    schema_version = await get_schema_version()
    if schema_version:
        compare_1290 = utils.compare_versions(schema_version, '1.2.9.0')
        compare_1280 = utils.compare_versions(schema_version, '1.2.8.0')
        if compare_1290 <= 0:
            return True
        elif compare_1280 > 0:
            return False

    print(
        f'[update_schema_v_1_2_9_0] Updating database schema from v1.2.8.0 to v1.2.9.0'
    )

    query_add_column = f'ALTER TABLE serversettings ADD COLUMN dailychangemode INT;'
    success_add_column = await try_execute(query_add_column)
    if not success_add_column:
        print(
            f'[update_schema_v_1_2_9_0] ERROR: Failed to add column \'dailychangemode\' to table \'serversettings\'!'
        )
        return False

    query_lines_move_data = [
        f'UPDATE serversettings SET dailychangemode = 1 WHERE dailydeleteonchange IS NULL;'
    ]
    query_lines_move_data.append(
        f'UPDATE serversettings SET dailychangemode = 2 WHERE dailydeleteonchange = {utils.database.convert_boolean(True)};'
    )
    query_lines_move_data.append(
        f'UPDATE serversettings SET dailychangemode = 3 WHERE dailydeleteonchange = {utils.database.convert_boolean(False)};'
    )
    query_move_data = '\n'.join(query_lines_move_data)
    success_move_data = await try_execute(query_move_data)
    if not success_move_data:
        print(
            f'[update_schema_v_1_2_9_0] ERROR: Failed to convert and copy data from column \'dailydeleteonchange\' into column \'dailychangemode\'!'
        )
        return False

    query_drop_column = f'ALTER TABLE serversettings DROP COLUMN IF EXISTS dailydeleteonchange;'
    success_drop_column = await try_execute(query_drop_column)
    if not success_drop_column:
        print(
            f'[update_schema_v_1_2_9_0] ERROR: Failed to drop column \'dailydeleteonchange\'!'
        )
        return False

    success = await try_set_schema_version('1.2.9.0')
    return success
Esempio n. 4
0
async def update_schema_v_1_2_4_0() -> bool:
    column_definitions = [('dailydeleteonchange', 'BOOLEAN', False, False,
                           None)]

    schema_version = await get_schema_version()
    if schema_version:
        compare_1240 = utils.compare_versions(schema_version, '1.2.4.0')
        compare_1220 = utils.compare_versions(schema_version, '1.2.2.0')
        if compare_1240 <= 0:
            return True
        elif compare_1220 > 0:
            return False

    print(
        f'[update_schema_v_1_2_4_0] Updating database schema from v1.2.2.0 to v1.2.4.0'
    )

    query_lines = []
    for (column_name, column_type, column_is_primary, column_not_null,
         column_default) in column_definitions:
        column_definition = utils.database.get_column_definition(
            column_name,
            column_type,
            is_primary=column_is_primary,
            not_null=column_not_null,
            default=column_default)
        query_lines.append(
            f'ALTER TABLE serversettings ADD COLUMN IF NOT EXISTS {column_definition}'
        )

    query = '\n'.join(query_lines)
    success = await try_execute(query)
    if success:
        utc_now = utils.get_utc_now()
        daily_info = await daily.get_daily_info()

        __settings = {
            daily.__get_daily_info_setting_name(key): (value, utc_now)
            for key, value in daily_info.items()
        }
        success = await set_settings(__settings)
        if success:
            await daily.__update_db_daily_info_cache()

        #success = await daily.db_set_daily_info(daily_info, utc_now) Function has been updated must use old methods
        if success:
            success = await try_set_schema_version('1.2.4.0')
    return success
Esempio n. 5
0
def homepage():
    pkg_info = {}
    status_order = {
        '🤔🤔🤔': 1,
        '🤔🤔': 2,
        '🤔': 3,
        '✓': 4,
        '🎉': 5
    }
    for channel in CHANNELS:
        res = REDIS_CONN.hgetall(channel)
        res = {
            k.decode(): (v.decode().split('#')[0], v.decode().split('#')[1])
            for k, v in res.items()
        }
        pkg_info[channel] = []
        for k, v in res.items():
            pkg_info[channel].append({
                'pkg_name': k,
                'pkg_status': compare_versions(v[0], v[1]),
                'pkg_ver': v[0],
                'pip_ver': v[1]
            })
        pkg_info[channel].sort(key=lambda x: status_order[x['pkg_status']])

    return render_template("index.html", pkg_info=pkg_info)
Esempio n. 6
0
def channeldiff(ch1, ch2):
    pkg_info = []

    if not all(REDIS_CONN.exists("{}|repodata".format(x)) for x in [ch1, ch2]):
        return ":'("

    pkg1_dict = {
        k.decode(): v.decode()
        for k, v in REDIS_CONN.hgetall("{}|repodata".format(ch1)).items()
    }
    pkg2_dict = {
        k.decode(): v.decode()
        for k, v in REDIS_CONN.hgetall("{}|repodata".format(ch2)).items()
    }

    common_pkgs = list(set(pkg1_dict.keys()).intersection(pkg2_dict.keys()))

    for pkg in sorted(common_pkgs):
        pkg_info.append({
            'pkg_name':
            pkg,
            'pkg_status':
            compare_versions(pkg1_dict[pkg], pkg2_dict[pkg]),
            'ch1_ver':
            pkg1_dict[pkg],
            'ch2_ver':
            pkg2_dict[pkg]
        })

    pkg_info.sort(key=lambda x: status_order[x['pkg_status']])

    return render_template("channeldiff.html",
                           pkg_info=pkg_info,
                           ch1=ch1,
                           ch2=ch2)
Esempio n. 7
0
def archlinux():
    pkg_info = {}
    for channel in CHANNELS:
        res = REDIS_CONN.hgetall("{}|{}".format(channel, 'archlinux'))
        if len(res) == 0:
            continue
        split_this = lambda x: tuple(x.decode().split('#'))
        res = {split_this(k): split_this(v) for k, v in res.items()}
        pkg_info[channel] = []
        for k, v in res.items():
            pkg_info[channel].append({
                'conda_pkg_name':
                k[0],
                'archlinux_pkg_name':
                k[1],
                'conda_pkg_status':
                compare_versions(v[0], v[1]),
                'conda_pkg_ver':
                v[0],
                'archlinux_pkg_ver':
                v[1]
            })
        pkg_info[channel].sort(
            key=lambda x: status_order[x['conda_pkg_status']])

    return render_template("archlinux.html", pkg_info=pkg_info)
Esempio n. 8
0
def has_dependencies_installed():
    try:
        import z3
        import z3.z3util
        z3_version = z3.get_version_string()
        tested_z3_version = '4.5.1'
        if compare_versions(z3_version, tested_z3_version) > 0:
            logging.warning(
                "You are using an untested version of z3. %s is the officially tested version"
                % tested_z3_version)
    except:
        logging.critical(
            "Z3 is not available. Please install z3 from https://github.com/Z3Prover/z3."
        )
        return False

    if not cmd_exists("evm"):
        logging.critical(
            "Please install evm from go-ethereum and make sure it is in the path."
        )
        return False
    else:
        cmd = "evm --version"
        out = run_command(cmd).strip()
        evm_version = re.findall(r"evm version (\d*.\d*.\d*)", out)[0]
        tested_evm_version = '1.7.3'
        if compare_versions(evm_version, tested_evm_version) > 0:
            logging.warning(
                "You are using evm version %s. The supported version is %s" %
                (evm_version, tested_evm_version))

    if not cmd_exists("solc"):
        logging.critical(
            "solc is missing. Please install the solidity compiler and make sure solc is in the path."
        )
        return False
    else:
        cmd = "solc --version"
        out = run_command(cmd).strip()
        solc_version = re.findall(r"Version: (\d*.\d*.\d*)", out)[0]
        tested_solc_version = '0.4.19'
        if compare_versions(solc_version, tested_solc_version) > 0:
            logging.warning(
                "You are using solc version %s, The latest supported version is %s"
                % (solc_version, tested_solc_version))

    return True
Esempio n. 9
0
async def update_schema_v_1_3_0_0() -> bool:
    schema_version = await get_schema_version()
    if schema_version:
        compare_1300 = utils.compare_versions(schema_version, '1.3.0.0')
        compare_1290 = utils.compare_versions(schema_version, '1.2.9.0')
        if compare_1300 <= 0:
            return True
        elif compare_1290 > 0:
            return False

    print(
        f'[update_schema_v_1_3_0_0] Updating database schema from v1.2.9.0 to v1.3.0.0'
    )

    query_add_column = f'ALTER TABLE serversettings ADD COLUMN botnewschannelid BIGINT;'
    success_add_column = await try_execute(query_add_column)
    if not success_add_column:
        print(
            f'[update_schema_v_1_3_0_0] ERROR: Failed to add column \'botnewschannelid\' to table \'serversettings\'!'
        )
        return False

    column_definitions_sales = [
        ('id', 'SERIAL', True, True),
        ('limitedcatalogargument', 'INT', False, False),
        ('limitedcatalogtype', 'TEXT', False, False),
        ('limitedcatalogcurrencytype', 'TEXT', False, False),
        ('limitedcatalogcurrencyamount', 'INT', False, False),
        ('limitedcatalogmaxtotal', 'INT', False, False),
        ('limitedcatalogexpirydate', 'TIMESTAMPTZ', False, False)
    ]
    success_create_table = await try_create_table('sales',
                                                  column_definitions_sales)
    if not success_create_table:
        print(
            f'[update_schema_v_1_3_0_0] ERROR: Failed to add table \'sales\'!')
        return False

    success = await try_set_schema_version('1.3.0.0')
    return success
Esempio n. 10
0
async def db_update_schema_v_1_2_7_0() -> bool:
    column_definitions_devices = [('key', 'TEXT', True, True),
                                  ('checksum', 'TEXT', False, False),
                                  ('loginuntil', 'TIMESTAMPTZ', False, False)]

    schema_version = await get_schema_version()
    if schema_version:
        compare_1270 = utils.compare_versions(schema_version, '1.2.7.0')
        compare_1260 = utils.compare_versions(schema_version, '1.2.6.0')
        if compare_1270 <= 0:
            return True
        elif compare_1260 > 0:
            return False

    print(
        f'[update_schema_v_1_2_8_0] Updating database schema from v1.2.6.0 to v1.2.7.0'
    )

    success = await try_create_table('devices', column_definitions_devices)
    if success:
        success = await try_set_schema_version('1.2.7.0')
    return success
Esempio n. 11
0
async def update_schema_v_1_2_8_0() -> bool:
    column_definitions_serversettings = [
        ('guildid', 'BIGINT', True, True),
        ('dailychannelid', 'BIGINT', False, False),
        ('dailylatestmessageid', 'BIGINT', False, False),
        ('dailynotifyid', 'BIGINT', False, False),
        ('dailynotifytype', 'INT', False, False)
    ]

    schema_version = await get_schema_version()
    if schema_version:
        compare_1280 = utils.compare_versions(schema_version, '1.2.8.0')
        compare_1270 = utils.compare_versions(schema_version, '1.2.7.0')
        if compare_1280 <= 0:
            return True
        elif compare_1270 > 0:
            return False

    print(
        f'[update_schema_v_1_2_8_0] Updating database schema from v1.2.7.0 to v1.2.8.0'
    )

    query_lines = ['ALTER TABLE serversettings']
    for column_name, new_column_type, _, _ in column_definitions_serversettings:
        if new_column_type in USING_LOOKUP:
            using = f' USING {column_name}::{USING_LOOKUP[new_column_type]}'
        else:
            using = ''
        query_lines.append(
            f'ALTER COLUMN {column_name} SET DATA TYPE {new_column_type}{using},'
        )
    query_lines[-1] = query_lines[-1].replace(',', ';')

    query = '\n'.join(query_lines)
    success = await try_execute(query)
    if success:
        success = await try_set_schema_version('1.2.8.0')
    return success
Esempio n. 12
0
async def create_schema() -> bool:
    column_definitions_settings = [('settingname', 'TEXT', True, True),
                                   ('modifydate', 'TIMESTAMPTZ', False, True),
                                   ('settingboolean', 'BOOLEAN', False, False),
                                   ('settingfloat', 'FLOAT', False, False),
                                   ('settingint', 'INT', False, False),
                                   ('settingtext', 'TEXT', False, False),
                                   ('settingtimestamp', 'TIMESTAMPTZ', False,
                                    False)]
    column_definitions_daily = [('guildid', 'TEXT', True, True),
                                ('channelid', 'TEXT', False, True),
                                ('canpost', 'BOOLEAN')]
    query_server_settings = 'SELECT * FROM serversettings'

    schema_version = await get_schema_version()
    if schema_version:
        compare_1000 = utils.compare_versions(schema_version, '1.0.0.0')
        if compare_1000 <= 0:
            return True

    print(f'[create_schema] Creating database schema v1.0.0.0')

    success_settings = await try_create_table('settings',
                                              column_definitions_settings)
    if not success_settings:
        print(
            '[create_schema] DB initialization failed upon creating the table \'settings\'.'
        )

    create_daily = False
    try:
        _ = await fetchall(query_server_settings)
    except asyncpg.exceptions.UndefinedTableError:
        create_daily = True

    if create_daily:
        success_daily = await try_create_table('daily',
                                               column_definitions_daily)
    else:
        success_daily = True

    if success_daily is False:
        print(
            '[create_schema] DB initialization failed upon creating the table \'daily\'.'
        )

    success = success_settings and success_daily
    if success:
        success = await try_set_schema_version('1.0.0.0')
    return success
Esempio n. 13
0
def generate_new_tag(repository, temp_dir, specified_crate):
    versions = {}
    version = None
    # In some repositories (like sys), there are more than one crates. In such case, we try to
    # get the most common version number and then we create the tag from there.
    #
    # First, we get all versions.
    for crate in consts.CRATE_LIST:
        if crate['repository'] == repository:
            versions[crate['crate']] = CRATES_VERSION[crate['crate']]
            if crate['crate'].endswith('-sys') or crate['crate'].endswith(
                    '-sys-rs'):
                version = CRATES_VERSION[crate['crate']]
    if (specified_crate is not None
            and (specified_crate.endswith('-sys')
                 or specified_crate.endswith('-sys-rs'))):
        write_msg(
            'Seems like "{}" is part of a repository with multiple crates so no \
                   tag generation this time...'.format(specified_crate))
        return

    if version is None:
        most_common = {}
        # Now we get how many crates have this version.
        for version in versions:
            if versions[version] in most_common:
                most_common[versions[version]] += 1
            else:
                most_common[versions[version]] = 1
        # Now we get the "lowest" version that will be used as default tag name.
        for common in most_common:
            if version is None or compare_versions(common, version) < 0:
                version = common
        # And now we get the most common tag name.
        for common in most_common:
            if version is None or most_common[version] < most_common[common]:
                version = common
        if version is None:
            write_error(
                'Something impossible happened for "{}": no version can be tagged...'
                .format(repository))
            input(
                'If you think you can do better, go ahead! (In "{}") Then press ENTER to continue'
                .format(join(temp_dir, repository)))
            return
    write_msg('==> Creating new tag "{}" for repository "{}"...'.format(
        version, repository))
    create_tag_and_push(version, repository, temp_dir)
Esempio n. 14
0
def pypi():
    pkg_info = {}
    for channel in CHANNELS:
        res = REDIS_CONN.hgetall("{}|{}".format(channel, 'pypi'))
        if len(res) == 0:
            continue
        res = {
            k.decode(): (v.decode().split('#')[0], v.decode().split('#')[1])
            for k, v in res.items()
        }
        pkg_info[channel] = []
        for k, v in res.items():
            pkg_info[channel].append({
                'pkg_name': k,
                'pkg_status': compare_versions(v[0], v[1]),
                'pkg_ver': v[0],
                'pip_ver': v[1]
            })
        pkg_info[channel].sort(key=lambda x: status_order[x['pkg_status']])

    return render_template("pypi.html", pkg_info=pkg_info)
Esempio n. 15
0
Monkey Patch and feature map for Python Paramiko

"""

import paramiko
import re
try:
    from paramiko.config import SSH_PORT
except ImportError:
    SSH_PORT=22
import platform
from utils import compare_versions

PARAMIKO_VERSION = paramiko.__version__.split()[0]
PARAMIKO_FEATURE = {
    'forward-ssh-agent': compare_versions(PARAMIKO_VERSION, ">=", '1.8.0') and (platform.system() != "Windows"),
    'use-compression': compare_versions(PARAMIKO_VERSION, ">=", '1.7.7.1'),
    'hash-host-entries': compare_versions(PARAMIKO_VERSION, ">=", '99'),
    'host-entries-reloadable': compare_versions(PARAMIKO_VERSION, ">=", '1.11.0'),
    'preserve-known-hosts': compare_versions(PARAMIKO_VERSION, ">=", '1.11.0'),
    'ecdsa-hostkeys': compare_versions(PARAMIKO_VERSION, ">=", '1.11.6'),
}

def _SSHClient_save_host_keys(self, filename):
    """\
    Available since paramiko 1.11.0...

    This method has been taken from SSHClient class in Paramiko and
    has been improved and adapted to latest SSH implementations.

    Save the host keys back to a file.
Esempio n. 16
0
def add_object(line, counter, position_decrement_due_to_rule,
               position_decrement_due_to_section, fields, api_type,
               generic_type, layer, layers_to_attach, changed_layer_names,
               api_call, num_objects, client, args):
    global duplicates_dict
    global position_decrements_for_sections
    global missing_parameter_set
    global should_create_imported_nat_top_section
    global should_create_imported_nat_bottom_section
    global imported_nat_top_section_uid

    if "access-rule" in api_type:
        position_decrements_for_sections.append(position_decrement_due_to_rule)

    payload, _ = create_payload(fields, line, 0, api_type, client.api_version)

    # for objects that had collisions, use new name in the imported package
    for field in ["members", "source", "destination"]:
        if field in payload:
            for i, member in enumerate(payload[field]):
                if member in name_collision_map:
                    payload[field][i] = name_collision_map[member]

    payload[
        "ignore-warnings"] = True  # Useful for example when creating two hosts with the same IP

    if "nat-rule" in api_type:
        # For NAT rules, the 'layer' parameter is the name of the policy package!!!
        payload["package"] = layer
        # --- NAT rules specific logic ---
        # Importing only rules, without sections.
        # Rules marked as "__before_auto_rules = TRUE" will be imported at the TOP of the rulebase, inside a new section "IMPORTED UPPER RULES".
        # There is an additional new section "Original Upper Rules" at the bottom of "IMPORTED UPPER RULES".
        # Rules marked as "__before_auto_rules = FALSE" will be imported at the BOTTOM of the rulebase, inside a new section "IMPORTED LOWER RULES".
        # There will be no rule merges!!!
        before_auto_rules = payload["__before_auto_rules"]
        payload.pop("__before_auto_rules", None)
        if "true" in before_auto_rules:
            if should_create_imported_nat_top_section:
                should_create_imported_nat_top_section = False
                nat_section_payload = {}
                nat_section_payload["package"] = layer
                nat_section_payload["position"] = "top"
                # --> we add the footer section first!!!
                nat_section_payload["name"] = "Original Upper Rules"
                client.api_call("add-nat-section", nat_section_payload)
                # <--
                nat_section_payload["name"] = "IMPORTED UPPER RULES"
                nat_section_reply = client.api_call("add-nat-section",
                                                    nat_section_payload)
                if nat_section_reply.success:
                    imported_nat_top_section_uid = nat_section_reply.data[
                        "uid"]
            if imported_nat_top_section_uid is None:
                payload["position"] = "bottom"
            else:
                sub_payload = {}
                sub_payload["bottom"] = imported_nat_top_section_uid
                payload["position"] = sub_payload
        else:
            if should_create_imported_nat_bottom_section:
                should_create_imported_nat_bottom_section = False
                nat_section_payload = {}
                nat_section_payload["package"] = layer
                nat_section_payload["position"] = "bottom"
                nat_section_payload["name"] = "IMPORTED LOWER RULES"
                client.api_call("add-nat-section", nat_section_payload)
            payload["position"] = "bottom"
    else:
        if "position" in payload:
            if "rule" in api_type:
                payload["position"] = str(
                    int(payload["position"]) - position_decrement_due_to_rule)
                if payload["action"] == "Drop":
                    if "action-settings" in payload:
                        payload.pop("action-settings")
                    if "user-check" in payload:
                        if "frequency" in payload["user-check"]:
                            payload["user-check"].pop("frequency")
                        if "custom-frequency" in payload["user-check"]:
                            payload["user-check"].pop("custom-frequency")
                        if "confirm" in payload["user-check"]:
                            payload["user-check"].pop("confirm")
            if "section" in api_type:
                section_position_decrement = (
                    position_decrements_for_sections[int(payload["position"]) -
                                                     1]
                    if len(position_decrements_for_sections) > 0 else
                    0) + position_decrement_due_to_section
                payload["position"] = str(
                    int(payload["position"]) - section_position_decrement)
        if generic_type:
            payload["create"] = generic_type
        if "layer" in api_type:
            check_duplicate_layer(payload, changed_layer_names, api_type,
                                  client)
            if compare_versions(client.api_version, "1.1") != -1:
                payload["add-default-rule"] = "false"
            if layer is None:
                if "access-layer" in api_type:
                    #---> This code segment distinguishes between an inline layer and an ordered layer during import
                    is_ordered_access_control_layer = payload[
                        "__ordered_access_control_layer"]
                    payload.pop("__ordered_access_control_layer", None)
                    if "true" in is_ordered_access_control_layer:
                        layers_to_attach["access"].append(
                            payload["name"])  # ordered access layer
                    #<--- end of code segment
                else:
                    layers_to_attach["threat"].append(payload["name"])
        elif "rule" in api_type or "section" in api_type or \
                (api_type == "threat-exception" and "exception-group-name" not in payload):
            payload["layer"] = layer
            if client.api_version != "1" and api_type == "access-rule" and "track-alert" in payload:
                payload["track"] = {}
                payload["track"]["alert"] = payload["track-alert"]
                payload.pop("track-alert", None)
        elif api_type == "exception-group" and "applied-threat-rules" in payload:
            for applied_rule in payload["applied-threat-rules"]:
                if applied_rule["layer"] in changed_layer_names.keys():
                    applied_rule["layer"] = changed_layer_names[
                        applied_rule["layer"]]

    api_reply = client.api_call(api_call, payload)

    if not api_reply.success and "name" in payload and "More than one object" in api_reply.error_message:
        i = 0
        original_name = payload["name"]
        while not api_reply.success:
            payload["name"] = "NAME_COLLISION_RESOLVED" + (
                "_" if i == 0 else "_%s_" % i) + original_name
            api_reply = client.api_call(api_call, payload)
            i += 1

            if i > 100:
                payload["name"] = original_name
                break

        if api_reply.success:
            debug_log(
                "Object \"%s\" was renamed to \"%s\" to resolve the name collision"
                % (original_name, payload["name"]), True, True)
            name_collision_map[original_name] = payload["name"]

    if not api_reply.success:
        if api_reply.data and "errors" in api_reply.data:
            error_msg = api_reply.data["errors"][0]["message"]
        elif api_reply.data and "warnings" in api_reply.data:
            error_msg = api_reply.data["warnings"][0]["message"]
        else:
            error_msg = api_reply.error_message
        log_err_msg = "Failed to import {0}{1}. Error: {2}".format(
            api_type, " with name [" + payload["name"] +
            "]" if "name" in payload else "", error_msg)

        if "More than one object" in api_reply.error_message:
            log_err_msg = api_reply.error_message + ". Cannot import this object"

        if "rule" in api_type and ("Requested object"
                                   in api_reply.error_message
                                   and "not found" in api_reply.error_message):
            field_value = api_reply.error_message.split("[")[1].split("]")[0]
            indices_of_field = [
                i for i, x in enumerate(line) if x == field_value
            ]
            field_keys = [
                x for x in fields if fields.index(x) in indices_of_field
            ]
            for field_key in field_keys:
                if field_key.split(".")[0] in generic_objects_for_rule_fields:
                    missing_obj_data = generic_objects_for_rule_fields[
                        field_key.split(".")[0]]
                    missing_type = missing_obj_data[0]
                    mandatory_field = missing_obj_data[1] if len(
                        missing_obj_data) > 1 else None
                    add_missing_command = "add-" + missing_type
                    new_name = "import_error_due_to_missing_fields_" + field_value.replace(
                        " ", "_")
                    add_succeeded = True
                    if new_name not in missing_parameter_set:
                        missing_parameter_set.add(new_name)
                        add_missing_payload = {"name": new_name}
                        if mandatory_field == "port":
                            add_missing_payload["port"] = "8080"
                        elif mandatory_field == "ip-address":
                            add_missing_payload[
                                "ip-address"] = generate_new_dummy_ip_address(
                                )
                        add_missing_reply = client.api_call(
                            add_missing_command, add_missing_payload)
                        if not add_missing_reply.success:
                            log_err_msg += "\nAlso failed to generate placeholder object: {0}".format(
                                add_missing_reply.error_message)
                            add_succeeded = False
                    if add_succeeded:
                        line[fields.index(field_key)] = new_name
                        return add_object(line, counter,
                                          position_decrement_due_to_rule,
                                          position_decrement_due_to_section,
                                          fields, api_type, generic_type,
                                          layer, layers_to_attach,
                                          changed_layer_names, api_call,
                                          num_objects, client, args)
        if "Invalid parameter for [position]" in api_reply.error_message:
            if "access-rule" in api_type:
                position_decrement_due_to_rule += adjust_position_decrement(
                    int(payload["position"]), api_reply.error_message)
            elif "access-section" in api_type:
                position_decrement_due_to_section += adjust_position_decrement(
                    int(payload["position"]), api_reply.error_message)
            return add_object(line, counter, position_decrement_due_to_rule,
                              position_decrement_due_to_section, fields,
                              api_type, generic_type, layer, layers_to_attach,
                              changed_layer_names, api_call, num_objects,
                              client, args)
        elif "is not unique" in api_reply.error_message and "name" in api_reply.error_message:
            field_value = api_reply.error_message.partition("name")[2].split(
                "[")[1].split("]")[0]
            debug_log(
                "Not unique name problem \"%s\" - changing payload to use UID instead."
                % field_value, True, True)
            obj_uid_found_and_used = False
            if field_value not in duplicates_dict:
                show_objects_reply = client.api_query(
                    "show-objects",
                    payload={"in": ["name", "\"" + field_value + "\""]})
                if show_objects_reply.success:
                    for obj in show_objects_reply.data:
                        if obj["name"] == field_value:
                            duplicates_dict[field_value] = obj["uid"]
                            obj_uid_found_and_used = True
            if obj_uid_found_and_used:
                indices_of_field = [
                    i for i, x in enumerate(line) if x == field_value
                ]
                field_keys = [
                    x for x in fields if fields.index(x) in indices_of_field
                ]
                for field_key in field_keys:
                    line[fields.index(
                        field_key)] = duplicates_dict[field_value]
                return add_object(line, counter,
                                  position_decrement_due_to_rule,
                                  position_decrement_due_to_section, fields,
                                  api_type, generic_type, layer,
                                  layers_to_attach, changed_layer_names,
                                  api_call, num_objects, client, args)
            else:
                debug_log(
                    "Not unique name problem \"%s\" - cannot change payload to use UID instead of name."
                    % field_value, True, True)
        elif "will place the exception in an Exception-Group" in api_reply.error_message:
            return add_object(line, counter,
                              position_decrement_due_to_rule - 1,
                              position_decrement_due_to_section, fields,
                              api_type, generic_type, layer, layers_to_attach,
                              changed_layer_names, api_call, num_objects,
                              client, args)

        position_decrement_due_to_rule += 1

        debug_log(log_err_msg, True, True)
        if args is not None and args.strict:
            discard_reply = client.api_call("discard")
            if not discard_reply.success:
                debug_log(
                    "Failed to discard changes! Terminating. Error: " +
                    discard_reply.error_message, True, True)
            exit(1)
    else:
        imported_name = payload["name"] if "name" in payload else ""
        debug_log("Imported {0}{1}".format(
            api_type, " with name [" + imported_name.encode("utf-8") + "]"))
        if counter % 20 == 0 or counter == num_objects:
            percentage = int(float(counter) / float(num_objects) * 100)
            debug_log(
                "Imported {0} out of {1} {2} ({3}%)".format(
                    counter, num_objects,
                    singular_to_plural_dictionary[client.api_version][api_type]
                    if api_type
                    in singular_to_plural_dictionary[client.api_version] else
                    "generic objects", percentage), True)
            if counter % 100 == 0 or counter == num_objects:
                publish_reply = client.api_call("publish", wait_for_task=True)
                if not publish_reply.success:
                    plural = singular_to_plural_dictionary[client.api_version][api_type].replace('_', ' ') \
                        if api_type in singular_to_plural_dictionary[client.api_version] \
                        else "generic objects of type " + api_type
                    try:
                        debug_log(
                            "Failed to publish import of " + plural +
                            " from tar file #" + str((counter / 100) + 1) +
                            "! " + plural.capitalize() +
                            " from said file were not imported!. Error: " +
                            str(publish_reply.error_message), True, True)
                    except UnicodeEncodeError:
                        try:
                            debug_log(
                                "UnicodeEncodeError: " +
                                str(publish_reply.error_message), True, True)
                        except:
                            debug_log(
                                "UnicodeEncodeError: .encode('utf-8') FAILED",
                                True, True)

                    discard_reply = client.api_call("discard")
                    if not discard_reply.success:
                        debug_log(
                            "Failed to discard changes of unsuccessful publish! Terminating. Error: "
                            + discard_reply.error_message, True, True)
                        exit(1)

    return counter + 1, position_decrement_due_to_rule
def get_query_rulebase_data(client, api_type, payload):
    rulebase_items = []
    rulebase_sections = []
    rulebase_rules = []
    general_objects = []

    debug_log("Getting layer information for layer [" + payload["name"] + "]")
    # We use here uid instead of name for supporting MDS env.
    layer_reply = client.api_call("show-" + api_type.split("-")[0] + "-layer",
                                  {"uid": payload["uid"]})
    if not layer_reply.success:
        debug_log(
            "Failed to retrieve layer named '" + payload["name"] +
            "'! Error: " + str(layer_reply.error_message) +
            ". Layer was not exported!", True, True)
        return None, None, None, None

    layer_data = layer_reply.data

    if layer_data["type"] == "access-layer":
        layer_settings = {
            "name": layer_data["name"],
            "uid": layer_data["uid"],
            "color": layer_data["color"],
            "comments": layer_data["comments"],
            "applications-and-url-filtering": 'True',
            "mobile-access": layer_data["mobile-access"],
            "firewall": layer_data["firewall"],
            "type": "access-layer"
        }
        if compare_versions(client.api_version, "1.1") != -1:
            layer_settings["shared"] = layer_data["shared"]
            layer_settings["content-awareness"] = layer_data[
                "content-awareness"]
        else:
            layer_settings["data-awareness"] = layer_data["data-awareness"]
    else:
        layer_settings = {
            "name": layer_data["name"],
            "uid": layer_data["uid"],
            "color": layer_data["color"],
            "comments": layer_data["comments"],
            "type": "threat-layer"
        }

    if "detect-using-x-forward-for" in layer_data:
        layer_settings["detect-using-x-forward-for"] = layer_data[
            "detect-using-x-forward-for"]

    debug_log("Getting information from show-" + api_type)

    seen_object_uids = []

    # We use here uid instead of name for supporting MDS env.
    queryPayload = {"uid": payload["uid"], "package": payload["package"]}
    if api_type == "threat-rule-exception-rulebase":
        queryPayload = {
            "uid": payload["uid"],
            "package": payload["package"],
            "rule-uid": payload["rule-uid"]
        }

    rulebase_replies = client.gen_api_query("show-" + api_type,
                                            details_level="full",
                                            container_keys=["rulebase"],
                                            payload=queryPayload)

    for rulebase_reply in rulebase_replies:
        if not rulebase_reply.success:
            debug_log(
                "Failed to retrieve layer named '" + payload["name"] +
                "'! Error: " + str(rulebase_reply.error_message) +
                ". Layer was not exported!", True, True)
            return None, None, None, None
        rulebase_data = rulebase_reply.data
        if "total" not in rulebase_data or rulebase_data["total"] == 0:
            break
        if rulebase_data["to"] == rulebase_data["total"]:
            done = True
        percentage_complete = int(
            (float(rulebase_data["to"]) / float(rulebase_data["total"])) * 100)
        debug_log(
            "Retrieved " + str(rulebase_data["to"]) + " out of " +
            str(rulebase_data["total"]) + " rules (" +
            str(percentage_complete) + "%)", True)

        non_empty_rulebase_items = []
        skipped_first_empty_section = False
        for rulebase_item in rulebase_data["rulebase"]:
            if not skipped_first_empty_section and "rule-number" not in rulebase_item and "to" not in rulebase_item:
                continue
            else:
                skipped_first_empty_section = True
            non_empty_rulebase_items.append(rulebase_item)
            if ("rule-number" in rulebase_item and rulebase_item["rule-number"]
                    == rulebase_data["to"]) or ("to" in rulebase_item
                                                and rulebase_item["to"]
                                                == rulebase_data["to"]):
                break

        if non_empty_rulebase_items and rulebase_items and non_empty_rulebase_items[0]["uid"] == \
                rulebase_items[len(rulebase_items) - 1]["uid"]:
            rulebase_items[len(rulebase_items) - 1]["rulebase"].extend(
                non_empty_rulebase_items[0]["rulebase"])
            rulebase_items[len(rulebase_items) -
                           1]["to"] = non_empty_rulebase_items[0]["to"]
            non_empty_rulebase_items = non_empty_rulebase_items[1:]
        rulebase_items.extend(non_empty_rulebase_items)

        new_objects = [
            x for x in rulebase_data["objects-dictionary"]
            if x["uid"] not in seen_object_uids
        ]
        seen_object_uids.extend([x["uid"] for x in new_objects])
        general_objects.extend(new_objects)

    for general_object in general_objects:
        string = (
            u"##Show presented object of type {0} " +
            (u"with name {1}" if "name" in general_object else u"with no name")
        ).format(general_object["type"],
                 general_object["name"] if "name" in general_object else "")
        debug_log(string)
        if should_export(general_object):
            check_for_export_error(general_object, client)

    debug_log("Analysing rulebase items...")
    for rulebase_item in rulebase_items:
        if any(x in rulebase_item["type"]
               for x in ["access-rule", "threat-rule", "threat-exception"]):
            string = (u"##Show presented independent rule of type {0} " +
                      (u"with name {1}" if "name" in rulebase_item else
                       u"with no name")).format(
                           rulebase_item["type"], rulebase_item["name"]
                           if "name" in rulebase_item else "")
            debug_log(string)
            rulebase_rules.append(rulebase_item)
        elif "section" in rulebase_item["type"]:
            for rule in rulebase_item["rulebase"]:
                string = (
                    u"##Show presented dependent rule of type {0} under section {1} "
                    + (u"with name {2}" if "name" in rule else u"with no name")
                ).format(
                    rule["type"], rulebase_item["name"]
                    if "name" in rulebase_item else "???",
                    rule["name"] if "name" in rule else "")
                debug_log(string)
                rulebase_rules.append(rule)

            # Because of 50 items chunks per API query reply, one rule section may spread over several chunks!!!
            if rulebase_sections and rulebase_sections[
                    len(rulebase_sections) - 1]["uid"] == rulebase_item["uid"]:
                if "to" in rulebase_item:
                    rulebase_sections[len(rulebase_sections) -
                                      1]["to"] = rulebase_item["to"]
                continue

            string = (u"##Show presented section of type {0} " +
                      (u"with name {1}" if "name" in rulebase_item else
                       u"with no name")).format(
                           rulebase_item["type"], rulebase_item["name"]
                           if "name" in rulebase_item else "")
            debug_log(string)
            rulebase_sections.append(rulebase_item)
        else:
            debug_log("Unsupported rulebase object type - '" +
                      rulebase_item["type"] + "'. Continue...",
                      print_to_error_log=True)

    return layer_settings, rulebase_sections, rulebase_rules, general_objects
Esempio n. 18
0
        debug ('  %s tanglu=%s debian=%s upstream=%s' % (thread.package.source, thread.package.tanglu_version, thread.package.debian_version, thread.package.upstream_version))
        count -= 1
for i in xrange (count):
    thread = queue.get ()
    debug ('  %s tanglu=%s debian=%s upstream=%s' % (thread.package.source, thread.package.tanglu_version, thread.package.debian_version, thread.package.upstream_version))

if 'DEBUG' in os.environ and os.environ['DEBUG'] == 'get':
    sys.exit(0)

debug('Comparing versions...')

for name in package_names:
    package = packages[name]
    if package.debian_version == package.tanglu_version:
        package.is_synchronised = True
    if compare_versions(package.debian_version, package.tanglu_version) > 0:
        package.is_debian_newer = True
    if package.upstream_version is None:
        debug ("Upstream version of %s was None!" % (name))
    else:
        if package.tanglu_version is None or compare_versions(apt_pkg.upstream_version(package.upstream_version), apt_pkg.upstream_version(package.tanglu_version)) > 0:
            package.is_upstream_newer = True
    if package.upstream_unstable_version is not None and \
       compare_versions(package.upstream_unstable_version, package.tanglu_version) > 0:
       # HACK? don't list gnome3 ppa version as uptodate, we don't want to overlook things
       # because they are only in the ppa, could be yet another section though
       # and (package.ubuntu_unstable_version is None or compare_versions(package.upstream_unstable_version, package.ubuntu_unstable_version) > 0):
        package.is_upstream_unstable_newer = True

def get_package_class(package):
    if package.stable_url == UNTRACKED or package.upstream_version == '':
Esempio n. 19
0
 def __call__(self, status_update_callback, completion_callback):
     uc = status_update_callback
     try:
         home_path = task.get_home_directory(self, logger)
         djm_path = os.path.join(home_path, "djm")
         python_ve_path = os.path.join(djm_path, "python")
         virtualenv_worker = os.path.join(python_ve_path, "bin/djm-worker")
         # we go through a bunch of stuff to see if we can reuse the
         # existing worker
         can_use_existing_worker = False
         if task.check_for_path(self, virtualenv_worker, logger=logger):
             existing_worker_path = virtualenv_worker
         else:
             existing_worker_path = task.find_exe(self, "djm-worker", logger)
         if existing_worker_path:
             # a worker already exists on this node. See if it has a new enough
             # version. We upgrade if the version is older than this one.
             result = task.run_command(self, [existing_worker_path, "version"],
                                       logger=logger)
             version_str = result[len(result)-1] if len(result)>0 else ""
             if not version_pattern.match(version_str):
                 raise TaskError("Unable to determine version of worker running on node %s. Version string obtained was '%s'" %
                                 (self.worker_node["node"], version_str))
             if utils.compare_versions(VERSION, version_str)>=0:
                 can_use_existing_worker = True
             else:
                 virtualenv_exe = task.find_exe(self, "virtualenv", logger)
                 if not virtualenv_exe:
                     raise TaskError("Unable to bootstrap node %s: cannot find virtualenv executable" %
                                     self.worker_node["name"])
                 logger.debug("making sure old DJM on %s is stopped" %
                              self.worker_node["name"])
                 task.run_command(self, [existing_worker_path, "stop"],
                                  logger)
                 if task.check_for_path(self, python_ve_path):
                     logger.debug("Remove old DJM install from %s" %
                                  self.worker_node["name"])
                     task.run_command(self, ["/bin/rm", "-rf",
                                             python_ve_path],
                                      logger)
         else: # no existing worker path
             virtualenv_exe = task.find_exe(self, "virtualenv", logger)
             if not virtualenv_exe:
                 raise TaskError("Unable to bootstrap node %s: cannot find virtualenv executable" %
                                 self.worker_node["name"])
             
         if can_use_existing_worker:
             logger.info("Using existing DJM worker on node %s" %
                         self.worker_node["name"])
         else:
             # setup a virtualenv
             task.run_command(self, ["/bin/mkdir", "-p", python_ve_path],
                              shell=True)
             result = task.run_command(self, [virtualenv_exe, "--version"])
             virtualenv_version = result[len(result)-1]
             logger.debug("Virtualenv version on %s is %s" %
                          (self.worker_node["name"],
                           virtualenv_version))
             if utils.compare_versions("1.7", virtualenv_version)>=0:
                 # F'ing virtualenv options changed in 1.7!
                 task.run_command(self, [virtualenv_exe, "--system-site-packages",
                                         python_ve_path],
                                  shell=True, logger=logger)
             else:
                 task.run_command(self, [virtualenv_exe, python_ve_path],
                                  shell=True, logger=logger)
             # get the package to the node if necessary
             djm_package = self.server_config["djm_package"]
             if (not djm_package.startswith("http://")) and \
                (not djm_package.startswith("https://")):
                 target_djm_package_path = os.path.join(djm_path,
                                                        os.path.basename(djm_package))
                 if (not task.local_node(self.worker_node)) or \
                    (djm_package != target_djm_package_path):
                     task.copy(self.worker_node, djm_package,
                               target_djm_package_path,
                               log_tag=self.get_log_tag())
             else:
                 target_djm_package_path = djm_package
             pip_exe = os.path.join(python_ve_path, "bin/pip")
             task.run_command(self, [pip_exe, "install",
                                     target_djm_package_path],
                              shell=True, logger=logger)
     except TaskError, e:
         logger.exception("Task %s failed due to error: %s" %
                          (self.get_log_tag(), e))
         completion_callback(self, TaskStatus.TASK_FAILED,
                             reason="Got error: %s" % e)
         return 1
Esempio n. 20
0
async def update_schema_v_1_2_2_0() -> bool:
    query_lines = []
    rename_columns = {'channelid': 'dailychannelid', 'canpost': 'dailycanpost'}
    column_definitions = [('guildid', 'TEXT', True, True),
                          ('dailychannelid', 'TEXT', False, False),
                          ('dailycanpost', 'BOOLEAN', False, False),
                          ('dailylatestmessageid', 'TEXT', False, False),
                          ('usepagination', 'BOOLEAN', False, False),
                          ('prefix', 'TEXT', False, False)]

    schema_version = await get_schema_version()
    if schema_version:
        compare_1220 = utils.compare_versions(schema_version, '1.2.2.0')
        compare_1000 = utils.compare_versions(schema_version, '1.0.0.0')
        if compare_1220 <= 0:
            return True
        elif compare_1000 > 0:
            return False

    print(
        f'[update_schema_v_1_2_2_0] Updating database schema from v1.0.0.0 to v1.2.2.0'
    )

    query = 'ALTER TABLE IF EXISTS daily RENAME TO serversettings'
    try:
        success = await try_execute(query, raise_db_error=True)
    except Exception as error:
        success = False
        print_db_query_error('update_schema_v_1_2_2_0', query, None, error)
    if success:
        column_names = await get_column_names('serversettings')
        column_names = [column_name.lower() for column_name in column_names]
        for name_from, name_to in rename_columns.items():
            if name_from in column_names:
                query_lines.append(
                    f'ALTER TABLE IF EXISTS serversettings RENAME COLUMN {name_from} TO {name_to};'
                )

        for (column_name, column_type, column_is_primary,
             column_not_null) in column_definitions:
            if column_name in rename_columns.values(
            ) or column_name in column_names:
                query_lines.append(
                    f'ALTER TABLE IF EXISTS serversettings ALTER COLUMN {column_name} TYPE {column_type};'
                )
                if column_not_null:
                    not_null_toggle = 'SET'
                else:
                    not_null_toggle = 'DROP'
                query_lines.append(
                    f'ALTER TABLE IF EXISTS serversettings ALTER COLUMN {column_name} {not_null_toggle} NOT NULL;'
                )

        query = '\n'.join(query_lines)
        if query:
            success = await try_execute(query)
        else:
            success = True
        if success:
            query_lines = []
            column_names = await get_column_names('serversettings')
            column_names = [
                column_name.lower() for column_name in column_names
            ]
            for (column_name, column_type, column_is_primary,
                 column_not_null) in column_definitions:
                if column_name not in column_names:
                    query_lines.append(
                        f'ALTER TABLE IF EXISTS serversettings ADD COLUMN IF NOT EXISTS {utils.database.get_column_definition(column_name, column_type, column_is_primary, column_not_null)};'
                    )
            query = '\n'.join(query_lines)
            if query:
                success = await try_execute(query)
            else:
                success = True
            if success:
                success = await try_set_schema_version('1.2.2.0')
    return success