Exemple #1
0
def exportDomains(path_domain,
                  path_wdomain,
                  collapse=True,
                  asterize_wc=False,
                  sort=False,
                  **apiconf):
    """
    :param collapse: Collapse domains matching wdomains
    :param asterize_wc: Add *. to wdomains
    """
    domains, wdomains = webconn.call(module='api.restrictions',
                                     method='getBlockedDomains',
                                     collapse=collapse,
                                     **apiconf)
    if sort:
        domains = list(domains)
        domains.sort()
    wb = exportToFileFormatted(path_domain, domains)
    if path_wdomain is None:
        return wb
    # Non-wildcard processing stops here.

    if sort:
        wdomains = list(wdomains)
        wdomains.sort()

    if asterize_wc:
        wdomains = {'*.' + dom for dom in wdomains}

    return wb + exportToFileFormatted(path_wdomain, wdomains)
Exemple #2
0
def exportHTTPS(path, only_sni=True, from_domains=True, **apiconf):
    """
    :param only_sni: Writes only SNI parsed from URL using urllib
    :param from_domains: Fetches SNI's from domains instead of https URL's
    """
    if from_domains:
        dataset = webconn.call(module='api.restrictions',
                               method='getBlockedDomains',
                               collapse=False,
                               **apiconf)
    else:
        dataset = webconn.call(module='api.restrictions',
                               method='getBlockedHTTPS',
                               **apiconf)
        if only_sni:
            dataset = {getdomain(url) for url in dataset}

    return exportToFileFormatted(path, dataset)
Exemple #3
0
def exportDomains(path, collapse=True, extra=None, **apiconf):
    """
    :param collapse: Collapse excessive.
    """
    dataset = webconn.call(module='api.restrictions',
                           method='getBlockedDNS',
                           collapse=collapse,
                           srcenttys=extra,
                           **apiconf)
    return exportToFileFormatted(path, dataset)
Exemple #4
0
def exportHTTPS(path, cutproto=False, extra=None, **apiconf):
    """
    :param cutproto: Truncates 'https://'
    """
    dataset = webconn.call(module='api.restrictions',
                           method='getBlockedHTTPS',
                           cutproto=cutproto,
                           srcenttys=extra,
                           **apiconf)
    return exportToFileFormatted(path, dataset)
Exemple #5
0
def exportHTTP(path, truncate_proto=False, **apiconf):
    """
    :param truncate_proto: Cuts 'https://'
    """
    dataset = webconn.call(module='api.restrictions',
                           method='getBlockedHTTP',
                           **apiconf)
    if truncate_proto:
        dataset = {url[url.find('://') + 3:] for url in dataset}

    return exportToFileFormatted(path, dataset)
Exemple #6
0
def exportIPs(path, collapse=True, subnet_fmt=False, ipv6=False, **apiconf):
    dataset, totalblocked = webconn.call(module='api.restrictions',
                                         method='getBlockedIPs',
                                         collapse=collapse,
                                         ipv6=ipv6,
                                         **apiconf)
    if not subnet_fmt:
        if ipv6:
            # Truncating /128
            dataset = [ip.split('/128')[0] for ip in dataset]
        else:
            # Truncating /32
            dataset = [ip.split('/32')[0] for ip in dataset]

    return exportToFileFormatted(path, dataset)
Exemple #7
0
def exportWDomains(path,
                   collapse=True,
                   wc_asterize=False,
                   extra=None,
                   **apiconf):
    """
    :param collapse: Collapse excessive.
    :param wc_asterize: Appends *. prefix.
    """
    dataset = webconn.call(module='api.restrictions',
                           method='getBlockedWildcardDNS',
                           collapse=collapse,
                           wc_asterize=wc_asterize,
                           srcenttys=extra,
                           **apiconf)
    return exportToFileFormatted(path, dataset)
Exemple #8
0
def main():
    parsedargs = parseArgs()
    if parsedargs is None:
        return 2
    args = vars(parsedargs)
    # If argv are invalid, the program would exit and no actions would be further

    config = initConf(args['confpath'])
    try:
        print(
            str(
                webconn.call(
                    module=CLI_MOD,
                    method=CLI_DICT[args['subject']][args['action']]['func'],
                    **config['API'],
                    **args)))
    except Exception as e:
        print(e)
Exemple #9
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])
    try:
        # Fetching http restrictions
        logger.info('Fetching restrictions list from DB')
        # Don't apply lstrip('http://') for this.
        # Using particular case for http
        urlsSet = set()
        urlsSet.update(
            webconn.call(module='api.restrictions',
                         method='getBlockedHTTP',
                         cutproto=True,
                         **config['API']))
        if config['Extra']['https']:
            # Using particular case for https
            urlsSet.update(
                webconn.call(module='api.restrictions',
                             method='getBlockedHTTPS',
                             cutproto=True,
                             **config['API']))
        if config['Extra']['domain']:
            urlsSet.update(
                webconn.call(module='api.restrictions',
                             method='getBlockedDataSet',
                             entitytypes='domain',
                             blocktypes='domain',
                             **config['API']))
        if config['Extra']['domain-mask']:
            urlsSet.update(
                webconn.call(module='api.restrictions',
                             method='getBlockedDataSet',
                             entitytypes='domain-mask',
                             blocktypes='domain-mask',
                             **config['API']))
        # Truncating entries if too many.
        if len(urlsSet) > config['Extra']['truncate-after']:
            logger.debug('Truncating entries: ' +
                         str(len(urlsSet) - config['Extra']['truncate-after']))
            # For now it's a list.
            urlsSet = list(urlsSet)[-config['Extra']['truncate-after']:]

        logger.info('Entries being blocked: ' + str(len(urlsSet)))
        logger.info('Updating F5 configuration...')
        result = ['URLs to restrict: ' + str(len(urlsSet))]

        for host in config['F5']:
            logger.info('Host: ' + host['host'])
            saved = False
            # Putting URLs to F5
            success, strcode = updateF5datagroup(urls=urlsSet, **host)
            if success:
                # Then saving
                saved = saveF5config(**host)
                if saved:
                    logger.info('Configuration is up to date and saved')
                else:
                    logger.warning('Not saved, it\'s strange...')
            else:
                logger.warning('Couldn\'t update')
            res = 'F5 ' + host['host'] + ' status: ' + \
                  ['ERROR ', 'OK '][success] + strcode + \
                  [' not ', ' '][saved] + 'saved'

            logger.debug(res)
            result.append(res)

        # Updating the state in the database
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result='\n'.join(result),
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Exemple #10
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])

    try:
        # Fetching ip restrictions
        logger.info('Fetching restrictions list from DB')
        ipsublist, totalblocked = webconn.call(
            module='api.restrictions',
            method='getBlockedIPs',
            collapse=config['Bird']['collapse'],
            ipv6=config['Bird']['ipv6'],
            **config['API'])
        # Checking limit
        if len(ipsublist) > config['Bird']['limit']:
            logger.warning('Limit exceeded: ' + str(len(ipsublist)) +
                           ' routes')
            ipsublist = ipsublist[-config['Bird']['limit']:]

        logger.info('Updating bird configuration and restarting daemon...')
        # Updating BGP casts
        updateBirdConfig(ipsublist=ipsublist, **config['Bird'])
        # Saving the conf to another place
        if config['Global']['saveconf']:
            shutil.copy(config['Bird']['confpath'],
                        config['Global']['tmppath'])

        # Updating the state in the database
        result = [
            str(totalblocked) + ' ip entries are routed to blackhole',
            str(len(ipsublist)) + ' entries are announced by BGP daemon'
        ]
        logger.info(', '.join(result))
        # Updating the state in the database
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result='\n'.join(result),
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Exemple #11
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
            logger.critical('Couldn\'t obtain information from the database\n' + str(e))
            return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical('The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])

    if config['Unbound'].get('stubip') is None \
            and config['Unbound'].get('stubipv6') is None:
        # Generally it's acceptable, but will work only for redirect records
        # Transparent records without any info are be passed through.
        logger.error('The stub is not set neither for A, nor AAAA records.')
        logger.warning('Zones will be defined without info entries.')

    try:
        logger.info('Obtaining current domain blocklists on unbound daemon')
        domainUBCSet, wdomainUBCSet = getUnboundLocalDomains(**config['Unbound'])

        logger.info('Fetching restrictions list from DB')
        domainBlockSet, \
        wdomainBlockSet = webconn.call(module='api.restrictions',
                                       method='getBlockedDomains',
                                       collapse=config['Unbound']['collapse'],
                                       **config['API'])
        logger.info('Obtained ' + str(len(domainBlockSet)) + ' strict domains and ' +
                    str(len(wdomainBlockSet)) + ' wildcard domains')
        # Lists were got, transforming
        domainBlockSet = set(domainBlockSet)
        wdomainBlockSet = set(wdomainBlockSet)

        logger.info('Banning...')
        result = ['Unbound updates:']

        domainset = domainBlockSet - domainUBCSet
        addUnboundZones(domainset=domainset,
                        zonetype='static',
                        **config['Unbound'])
        logger.debug('Strict banned: ' + ' '.join(map(str, domainset)))
        result.append('Strict banned: ' + str(len(domainset)))

        domainset = wdomainBlockSet - wdomainUBCSet
        addUnboundZones(domainset=domainset,
                        zonetype='redirect',
                        **config['Unbound'])
        logger.debug('Wildcard banned: ' + ' '.join(map(str, domainset)))
        result.append('Wildcard banned: ' + str(len(domainset)))

        logger.info('Unbanning...')

        domainset = domainUBCSet - domainBlockSet
        delUnboundZones(domainset=domainset,
                        zonetype='static',
                        **config['Unbound'])
        logger.debug('Strict unbanned: ' + ' '.join(map(str, domainset)))
        result.append('Strict unbanned: ' + str(len(domainset)))

        domainset = wdomainUBCSet - wdomainBlockSet
        delUnboundZones(domainset=domainset,
                        zonetype='redirect',
                        **config['Unbound'])
        logger.debug('Wildcard unbanned: ' + ' '.join(map(str, domainset)))
        result.append('Wildcard unbanned: ' + str(len(domainset)))

        logger.info('Generating permanent config...')
        buildUnboundConfig(domainset=domainBlockSet,
                           wdomainset=wdomainBlockSet,
                           **config['Unbound']
                           )
        if config['Global']['saveconf']:
            shutil.copy(config['Unbound']['confpath'],
                        config['Global']['tmppath'])

        logger.info(', '.join(result))
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result='\n'.join(result),
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Exemple #12
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])

    try:
        logger.info('Initializing export threads pool...')
        threadpool = Pool(processes=int(config['Global']['threads']))
        resultpool = dict()

        for entity, options in config['Sandvine']['export'].items():
            logger.debug(entity + ' export started...')
            proc = PROC_DICT.get(entity)
            if proc is None:
                logger.warn('No such procedure built-in: ' + 'entity')
                continue
            resultpool[entity] = threadpool.\
                apply_async(proc, kwds={**options, **config['API']})

        # I know about wait() and get()
        timeout = int(config['Global']['wait_timeout'])
        runtime = 0
        pollsec = int(config['Global']['poll_timeout'])
        ready = dict()

        while runtime < timeout:

            for entity in resultpool.keys():
                ready[entity] = resultpool[entity].ready()

            if reduce(lambda a, b: a and b, ready.values()):
                logger.info('All tasks completed')
                break

            sleep(pollsec)
            runtime = runtime + pollsec
            logger.debug(
                'Progress ' + str(runtime) + '/' + str(timeout) + ' sec: ' +
                '|'.join(
                    [' ' + k + [' -- ', ' OK '][v] for k, v in ready.items()]))

        logger.info('Export results:')
        for entity in resultpool.keys():
            try:
                logger.info(entity + ': ' + str(resultpool[entity].get(1)) +
                            ' bytes written')
            except TimeoutError:
                logger.warn(entity + 'has been timed out')
        threadpool.terminate()

        if not reduce(lambda a, b: a or b, ready.values()):
            result = 'All exports failed'
            logger.error(result)
            raise Exception(result, errno=13)
        if reduce(lambda a, b: a and b, ready.values()):
            result = 'All exports were successfull'
            logger.info(result)
        else:
            result = 'Some exports failed'
            logger.warn(result)

        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result=result,
                     **config['API'])

        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Exemple #13
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
            logger.critical('Couldn\'t obtain information from the database\n' + str(e))
            return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical('The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])
    try:
        if config['Miscellaneous']['uselocaldump']:
            dumpFile = open(file=config['Global']['dumpPath'],
                            mode='rb').read()
        else:
            # Checking dump info
            logger.debug('Obtaining dumpfile from ' + config['DumpLoader']['url'])
            rknSW = rknsoapwrapper.RknSOAPWrapper(**config['DumpLoader'])
            dumpDate = rknSW.getLastDumpDateEx()
            if not dumpDate:
                raise Exception('Couldn\'t obtain dumpdates info', errno=2)

            # Loaded dump unix timestamp in seconds
            update_ts = max(dumpDate['lastDumpDate'],
                            dumpDate['lastDumpDateUrgently'])/1000
            logger.info('Latest dump timestamp is: ' +
                         str(datetime.fromtimestamp(update_ts))
                         )
            # Last parsed dump lag in seconds
            dump_ts = webconn.call(module='api.monitoring',
                                   method='getLastDumpTS',
                                   **config['API'])
            logger.info('Parsed dump timestamp is: ' +
                         str(datetime.fromtimestamp(dump_ts))
                         )
            # 5 seconds rule
            if update_ts < dump_ts + 5:
                result = 'The latest dump is relevant'
                logger.info(result)
                # Updating the state in database
                webconn.call(module='api.dumpparse',
                             method='updateDumpCheckTime',
                             **config['API'])
                # Finalising
                webconn.call(module='api.procutils',
                             method='finishJob',
                             log_id=log_id,
                             exit_code=0,
                             result=result,
                             **config['API'])
                return 0

            # Obtaining dump file
            logger.info('Blocklist is outdated, requesting a new dump')
            dumpFile = rknSW.getDumpFile(open(config['Global']['reqPath'], 'rb').read(),
                                         open(config['Global']['reqPathSig'], 'rb').read()
                                         )
            if config['Global']['savedump']:
                logger.info('Saving file to ' + config['Global']['dumpPath'])
                open(file=config['Global']['dumpPath'], mode='wb').write(dumpFile)

        # Parsing dump file
        logger.info('Parsing the dump')
        xmldump = zipfile.ZipFile(io.BytesIO(dumpFile)).read('dump.xml').decode('cp1251')
        # Freeing memory
        del dumpFile

        parse_result = webconn.call(module='api.dumpparse',
                                    method='parse',
                                    xmldump=xmldump,
                                    **config['API'])
        if not parse_result:
            raise Exception('Dump hasn\'t been parsed', errno=3)
        # Freeing memory
        del xmldump
        result = 'Dump have been parsed to database successfully'
        logger.info(result)

        # Updating the state in the database
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result=result,
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Exemple #14
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
            logger.critical('Couldn\'t obtain information from the database\n' + str(e))
            return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical('The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])
    try:
        if config['Miscellaneous']['uselocaldump']:
            dumpFile = open(file=config['Global']['dumpPath'],
                            mode='rb').read()
        else:
            # Checking dump info
            logger.debug('Obtaining dumpfile from ' + config['DumpLoader']['url'])
            rknSW = rknsoapwrapper.RknSOAPWrapper(**config['DumpLoader'])
            dumpDate = rknSW.getLastDumpDateEx()
            if not dumpDate:
                raise Exception('Couldn\'t obtain dumpdates info', errno=2)

            update_time = max(dumpDate['lastDumpDate'],
                              dumpDate['lastDumpDateUrgently'])/1000
            parsed_recently = webconn.call(module='api.dumpparse',
                                           method='parsedRecently',
                                           update_time=update_time,
                                           **config['API'])

            if parsed_recently:
                result = 'The latest dump is relevant'
                logger.info(result)
                # Updating the state in database
                webconn.call(module='api.procutils',
                             method='finishJob',
                             log_id=log_id,
                             exit_code=0,
                             result=result,
                             **config['API'])
                return 0

            # Obtaining dump file
            logger.info('Blocklist is outdated, requesting a new dump')
            dumpFile = rknSW.getDumpFile(open(config['Global']['reqPath'], 'rb').read(),
                                         open(config['Global']['reqPathSig'], 'rb').read()
                                         )
            if config['Global']['savedump']:
                logger.info('Saving file to ' + config['Global']['dumpPath'])
                open(file=config['Global']['dumpPath'], mode='wb').write(dumpFile)

        # Parsing dump file
        logger.info('Parsing dump file...')
        xmldump = zipfile.ZipFile(io.BytesIO(dumpFile)).read('dump.xml').decode('cp1251')
        # Freeing memory
        del dumpFile

        parse_result = webconn.call(module='api.dumpparse',
                                    method='parse',
                                    xmldump=xmldump,
                                    **config['API'])
        if not parse_result:
            raise Exception('Dump hasn\'t been parsed', errno=3)
        # Freeing memory
        del xmldump
        logger.info('Dump have been parsed to database successfully')

        # Blocking
        rowsdict = dict()
        # It may slow down but is safe
        webconn.call(module='api.blocking',
                     method='unblockResources',
                     **config['API'])
        # Fairly blocking first
        logger.debug('Blocking fairly (as is)')
        rows = webconn.call(module='api.blocking',
                            method='blockResourcesFairly',
                            **config['API'])
        rowsdict['fairly'] = rows
        logger.info('Blocked fairly ' + str(rows) + ' rows')
        for src, dst in config['Blocking']:
            logger.info('Blocking ' + str(dst) + ' from ' + str(src))
            rows = webconn.call(module='api.blocking',
                                method='blockResourcesExcessively',
                                src_entity=src,
                                dst_entity=dst,
                                **config['API'])
            if rows is not None:
                logger.info('Blocked ' + str(rows) + ' rows')
                rowsdict[str(dst) + '->' + str(src)] = rows
            else:
                logger.warning('Nothing have been blocked from' + str(src) + ' to ' + str(dst))
        # Blocking custom resouces
        if config['Miscellaneous']['custom']:
            logger.info('Blocking custom resources')
            rows = webconn.call(module='api.blocking',
                                method='blockCustom',
                                **config['API'])
            logger.info('Blocked ' + str(rows))
            rowsdict['Custom'] = rows

        # Unblocking
        whitelist = config['Miscellaneous']['whitelist']
        if whitelist is not None:
            logger.info('Unblocking whitelist')
            rows = webconn.call(module='api.blocking',
                                method='unblockSet',
                                resSet=whitelist,
                                **config['API'])
            logger.info('Unblocked ' + str(rows))
            rowsdict['Undone'] = rows

        # Updating the state in the database
        result = 'Blocking results\n' + '\n'.join(k + ':' + str(v) for k,v in rowsdict.items())
        # Updating the state in database
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result=result,
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0