Esempio n. 1
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])
    try:
        # Fetching http restrictions
        logger.info('Fetching restrictions list from DB')
        # Don't apply lstrip('http://') for this.
        # Using particular case for http
        urlsSet = set()
        urlsSet.update(
            webconn.call(module='api.restrictions',
                         method='getBlockedHTTP',
                         cutproto=True,
                         **config['API']))
        if config['Extra']['https']:
            # Using particular case for https
            urlsSet.update(
                webconn.call(module='api.restrictions',
                             method='getBlockedHTTPS',
                             cutproto=True,
                             **config['API']))
        if config['Extra']['domain']:
            urlsSet.update(
                webconn.call(module='api.restrictions',
                             method='getBlockedDataSet',
                             entitytypes='domain',
                             blocktypes='domain',
                             **config['API']))
        if config['Extra']['domain-mask']:
            urlsSet.update(
                webconn.call(module='api.restrictions',
                             method='getBlockedDataSet',
                             entitytypes='domain-mask',
                             blocktypes='domain-mask',
                             **config['API']))
        # Truncating entries if too many.
        if len(urlsSet) > config['Extra']['truncate-after']:
            logger.debug('Truncating entries: ' +
                         str(len(urlsSet) - config['Extra']['truncate-after']))
            # For now it's a list.
            urlsSet = list(urlsSet)[-config['Extra']['truncate-after']:]

        logger.info('Entries being blocked: ' + str(len(urlsSet)))
        logger.info('Updating F5 configuration...')
        result = ['URLs to restrict: ' + str(len(urlsSet))]

        for host in config['F5']:
            logger.info('Host: ' + host['host'])
            saved = False
            # Putting URLs to F5
            success, strcode = updateF5datagroup(urls=urlsSet, **host)
            if success:
                # Then saving
                saved = saveF5config(**host)
                if saved:
                    logger.info('Configuration is up to date and saved')
                else:
                    logger.warning('Not saved, it\'s strange...')
            else:
                logger.warning('Couldn\'t update')
            res = 'F5 ' + host['host'] + ' status: ' + \
                  ['ERROR ', 'OK '][success] + strcode + \
                  [' not ', ' '][saved] + 'saved'

            logger.debug(res)
            result.append(res)

        # Updating the state in the database
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result='\n'.join(result),
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Esempio n. 2
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])

    try:
        # Fetching ip restrictions
        logger.info('Fetching restrictions list from DB')
        ipsublist, totalblocked = webconn.call(
            module='api.restrictions',
            method='getBlockedIPs',
            collapse=config['Bird']['collapse'],
            ipv6=config['Bird']['ipv6'],
            **config['API'])
        # Checking limit
        if len(ipsublist) > config['Bird']['limit']:
            logger.warning('Limit exceeded: ' + str(len(ipsublist)) +
                           ' routes')
            ipsublist = ipsublist[-config['Bird']['limit']:]

        logger.info('Updating bird configuration and restarting daemon...')
        # Updating BGP casts
        updateBirdConfig(ipsublist=ipsublist, **config['Bird'])
        # Saving the conf to another place
        if config['Global']['saveconf']:
            shutil.copy(config['Bird']['confpath'],
                        config['Global']['tmppath'])

        # Updating the state in the database
        result = [
            str(totalblocked) + ' ip entries are routed to blackhole',
            str(len(ipsublist)) + ' entries are announced by BGP daemon'
        ]
        logger.info(', '.join(result))
        # Updating the state in the database
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result='\n'.join(result),
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Esempio n. 3
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
            logger.critical('Couldn\'t obtain information from the database\n' + str(e))
            return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical('The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])

    if config['Unbound'].get('stubip') is None \
            and config['Unbound'].get('stubipv6') is None:
        # Generally it's acceptable, but will work only for redirect records
        # Transparent records without any info are be passed through.
        logger.error('The stub is not set neither for A, nor AAAA records.')
        logger.warning('Zones will be defined without info entries.')

    try:
        logger.info('Obtaining current domain blocklists on unbound daemon')
        domainUBCSet, wdomainUBCSet = getUnboundLocalDomains(**config['Unbound'])

        logger.info('Fetching restrictions list from DB')
        domainBlockSet, \
        wdomainBlockSet = webconn.call(module='api.restrictions',
                                       method='getBlockedDomains',
                                       collapse=config['Unbound']['collapse'],
                                       **config['API'])
        logger.info('Obtained ' + str(len(domainBlockSet)) + ' strict domains and ' +
                    str(len(wdomainBlockSet)) + ' wildcard domains')
        # Lists were got, transforming
        domainBlockSet = set(domainBlockSet)
        wdomainBlockSet = set(wdomainBlockSet)

        logger.info('Banning...')
        result = ['Unbound updates:']

        domainset = domainBlockSet - domainUBCSet
        addUnboundZones(domainset=domainset,
                        zonetype='static',
                        **config['Unbound'])
        logger.debug('Strict banned: ' + ' '.join(map(str, domainset)))
        result.append('Strict banned: ' + str(len(domainset)))

        domainset = wdomainBlockSet - wdomainUBCSet
        addUnboundZones(domainset=domainset,
                        zonetype='redirect',
                        **config['Unbound'])
        logger.debug('Wildcard banned: ' + ' '.join(map(str, domainset)))
        result.append('Wildcard banned: ' + str(len(domainset)))

        logger.info('Unbanning...')

        domainset = domainUBCSet - domainBlockSet
        delUnboundZones(domainset=domainset,
                        zonetype='static',
                        **config['Unbound'])
        logger.debug('Strict unbanned: ' + ' '.join(map(str, domainset)))
        result.append('Strict unbanned: ' + str(len(domainset)))

        domainset = wdomainUBCSet - wdomainBlockSet
        delUnboundZones(domainset=domainset,
                        zonetype='redirect',
                        **config['Unbound'])
        logger.debug('Wildcard unbanned: ' + ' '.join(map(str, domainset)))
        result.append('Wildcard unbanned: ' + str(len(domainset)))

        logger.info('Generating permanent config...')
        buildUnboundConfig(domainset=domainBlockSet,
                           wdomainset=wdomainBlockSet,
                           **config['Unbound']
                           )
        if config['Global']['saveconf']:
            shutil.copy(config['Unbound']['confpath'],
                        config['Global']['tmppath'])

        logger.info(', '.join(result))
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result='\n'.join(result),
                     **config['API'])
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Esempio n. 4
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = procutils.checkRunning(procname=config['Global']['procname'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' + str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical('The same program is running at this moment. Halting...')
        return 0
    log_id = procutils.addLogEntry(procname=config['Global']['procname'])

    try:
        if config['Miscellaneous']['uselocaldump']:
            dumpFile = open(file=config['Global']['dumpPath'],
                            mode='rb').read()
        else:
            # Checking dump info
            logger.debug('Obtaining dumpfile from ' + config['DumpLoader']['url'])
            rknSW = rknsoapwrapper.RknSOAPWrapper(**config['DumpLoader'])
            dumpDate = rknSW.getLastDumpDateEx()
            if not dumpDate:
                raise Exception('Couldn\'t obtain dumpdates info', errno=2)
            # Loaded dump unix timestamp in seconds
            update_ts = max(dumpDate['lastDumpDate'],
                            dumpDate['lastDumpDateUrgently'])/1000
            logger.info('Latest dump timestamp is: ' +
                         str(datetime.fromtimestamp(update_ts))
                         )
            # Last parsed dump lag in seconds
            dump_ts = monitoring.getLastDumpTS()
            logger.info('Parsed dump timestamp is: ' +
                         str(datetime.fromtimestamp(dump_ts))
                         )
            # 5 seconds rule
            if update_ts < dump_ts + 5:
                result = 'Last dump is relevant'
                logger.info(result)
                dumpparse.updateDumpCheckTime()
                procutils.finishJob(log_id, 0, result)
                return 0

            # Obtaining dump file
            logger.info('Blocklist is outdated, requesting a new dump')
            dumpFile = rknSW.getDumpFile(open(config['Global']['reqPath'], 'rb').read(),
                                         open(config['Global']['reqPathSig'], 'rb').read()
                                         )
            if config['Global']['savedump']:
                logger.info('Saving file to ' + config['Global']['dumpPath'])
                open(file=config['Global']['dumpPath'], mode='wb').write(dumpFile)

        # Parsing dump file
        logger.info('Parsing the dump')
        xmldump = zipfile.ZipFile(io.BytesIO(dumpFile)).read('dump.xml').decode('cp1251')
        # Freeing memory
        del dumpFile
        dumpparse.parse(xmldump)
        # Freeing memory
        del xmldump
        result = 'Dump have been parsed to database successfully'
        logger.info(result)

        # Updating the state in the database
        procutils.finishJob(log_id, 0, result)
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        procutils.finishJob(log_id, 1, str(e))
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Esempio n. 5
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = procutils.checkRunning(connstr, PROCNAME)
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    log_id = procutils.addLogEntry(connstr, PROCNAME)

    try:
        if config['Miscellaneous']['uselocaldump']:
            dumpFile = open(file=config['Global']['dumpPath'],
                            mode='rb').read()
        else:
            # Checking dump info
            logger.debug('Obtaining dumpfile from ' +
                         config['DumpLoader']['url'])
            rknSW = rknsoapwrapper.RknSOAPWrapper(**config['DumpLoader'])
            dumpDate = rknSW.getLastDumpDateEx()
            if not dumpDate:
                raise Exception('Couldn\'t obtain dumpdates info', errno=2)
            update_time = max(dumpDate['lastDumpDate'],
                              dumpDate['lastDumpDateUrgently']) / 1000

            parsed_recently = dumpparse.parsedRecently(update_time, connstr)

            if parsed_recently:
                result = 'Last dump is relevant'
                logger.info(result)
                # Updating the state in database
                procutils.finishJob(connstr, log_id, 0, result)
                return 0

            # Obtaining dump file
            logger.info('Blocklist is outdated, requesting a new dump')
            dumpFile = rknSW.getDumpFile(
                open(config['Global']['reqPath'], 'rb').read(),
                open(config['Global']['reqPathSig'], 'rb').read())
            if config['Global']['savedump']:
                open(file=config['Global']['dumpPath'],
                     mode='wb').write(dumpFile)

        # Parsing dump file
        xmldump = zipfile.ZipFile(
            io.BytesIO(dumpFile)).read('dump.xml').decode('cp1251')
        # Freeing memory
        del dumpFile

        dumpparse.parse(xmldump, connstr)
        # Freeing memory
        del xmldump
        logger.info('Dump have been parsed to database successfully')

        # Blocking
        rowsdict = dict()
        # It may slow down but is safe
        blocking.unblockResources(connstr)
        # Fairly blocking first
        logger.debug('Blocking fairly (as is)')
        rows = blocking.blockResourcesFairly(connstr)
        rowsdict['fairly'] = rows
        logger.info('Blocked fairly ' + str(rows) + ' rows')
        for src, dst in config['Blocking']:
            logger.info('Blocking ' + str(dst) + ' from ' + str(src))
            rows = blocking.blockResourcesExcessively(connstr, src, dst)
            if rows is not None:
                logger.info('Blocked ' + str(rows) + ' rows')
                rowsdict[str(dst) + '->' + str(src)] = rows
            else:
                logger.warning('Nothing have been blocked from' + str(src) +
                               ' to ' + str(dst))
        # Blocking custom resouces
        if config['Miscellaneous']['custom']:
            logger.info('Blocking custom resources')
            rows = blocking.blockCustom(connstr)
            logger.info('Blocked ' + str(rows))
            rowsdict['Custom'] = rows

        # Unblocking
        whitelist = config['Miscellaneous']['whitelist']
        if whitelist is not None:
            logger.info('Unblocking whitelist')
            rows = blocking.unblockSet(connstr, whitelist)
            logger.info('Unblocked ' + str(rows))
            rowsdict['Undone'] = rows

        # Updating the state in the database
        result = 'Blocking results\n' + '\n'.join(k + ':' + str(v)
                                                  for k, v in rowsdict.items())
        procutils.finishJob(connstr, log_id, 0, result)
        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        procutils.finishJob(connstr, log_id, 1, str(e))
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0
Esempio n. 6
0
def main():
    configPath = utils.confpath_argv()
    if configPath is None:
        utils.print_help()
        return 0

    config = utils.initConf(configPath, __file__)

    logger = utils.initLog(**config['Logging'])
    logger.debug('Starting with config:\n' + str(config))

    utils.createFolders(config['Global']['tmppath'])

    try:
        running = webconn.call(module='api.procutils',
                               method='checkRunning',
                               procname=config['Global']['procname'],
                               **config['API'])
    except Exception as e:
        logger.critical('Couldn\'t obtain information from the database\n' +
                        str(e))
        return 9
    if running and not config['Global'].get('forcerun'):
        logger.critical(
            'The same program is running at this moment. Halting...')
        return 0
    # Getting PID
    log_id = webconn.call(module='api.procutils',
                          method='addLogEntry',
                          procname=config['Global']['procname'],
                          **config['API'])

    try:
        logger.info('Initializing export threads pool...')
        threadpool = Pool(processes=int(config['Global']['threads']))
        resultpool = dict()

        for entity, options in config['Sandvine']['export'].items():
            logger.debug(entity + ' export started...')
            proc = PROC_DICT.get(entity)
            if proc is None:
                logger.warn('No such procedure built-in: ' + 'entity')
                continue
            resultpool[entity] = threadpool.\
                apply_async(proc, kwds={**options, **config['API']})

        # I know about wait() and get()
        timeout = int(config['Global']['wait_timeout'])
        runtime = 0
        pollsec = int(config['Global']['poll_timeout'])
        ready = dict()

        while runtime < timeout:

            for entity in resultpool.keys():
                ready[entity] = resultpool[entity].ready()

            if reduce(lambda a, b: a and b, ready.values()):
                logger.info('All tasks completed')
                break

            sleep(pollsec)
            runtime = runtime + pollsec
            logger.debug(
                'Progress ' + str(runtime) + '/' + str(timeout) + ' sec: ' +
                '|'.join(
                    [' ' + k + [' -- ', ' OK '][v] for k, v in ready.items()]))

        logger.info('Export results:')
        for entity in resultpool.keys():
            try:
                logger.info(entity + ': ' + str(resultpool[entity].get(1)) +
                            ' bytes written')
            except TimeoutError:
                logger.warn(entity + 'has been timed out')
        threadpool.terminate()

        if not reduce(lambda a, b: a or b, ready.values()):
            result = 'All exports failed'
            logger.error(result)
            raise Exception(result, errno=13)
        if reduce(lambda a, b: a and b, ready.values()):
            result = 'All exports were successfull'
            logger.info(result)
        else:
            result = 'Some exports failed'
            logger.warn(result)

        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=0,
                     result=result,
                     **config['API'])

        logger.info('Blocking was finished, enjoy your 1984th')

    except Exception as e:
        webconn.call(module='api.procutils',
                     method='finishJob',
                     log_id=log_id,
                     exit_code=1,
                     result=str(e),
                     **config['API'])
        logger.error(str(e))
        return getattr(e, 'errno', 1)

    return 0