Beispiel #1
0
def send(msg):
    if Config.TESTING:
        logger = logging.getLogger("email")
        for line in str(msg).splitlines():
            logger.info(line)
    else:
        mail.send(msg)
Beispiel #2
0
async def document_parser(app, queue):
    log = logging.getLogger('Document parser')
    model = app['data'].get('model')
    index = app['data'].get('index')
    pool = app['db_pool']
    if not model or not index:
        log.error(
            "Word2vec model or document index is not loaded. Aborting...")
    else:
        while True:
            # wait for an item from the producer
            item = await queue.get()

            # process the item
            log.info('Indexing sentence_id: {} ...'.format(item))

            async with pool.acquire() as connection:
                async with connection.transaction():
                    sql = "SELECT text from sentence WHERE id = $1;"
                    text = await connection.fetchval(sql, item)

                word_embeddings = []
                words = filter(lambda x: x not in config.IGNORED_WORDS,
                               nltk.word_tokenize(text))
                for w in words:
                    if w in model:
                        word_embeddings.append(model[w])

                if word_embeddings:
                    sentence_embedding = np.mean(word_embeddings, axis=0)
                    index.add_items(sentence_embedding, item)
                    async with connection.transaction():
                        sql = "UPDATE sentence SET indexed = true WHERE id = $1;"
                        await connection.execute(sql, item)

            queue.task_done()
Beispiel #3
0
import xml.etree.ElementTree as ET

import requests

from models import XRate, peewee_datetime
from config import logging, LOGGER_CONFIG

log = logging.getLogger("CbrApi")
fh = logging.FileHandler(LOGGER_CONFIG["file"])
fh.setLevel(LOGGER_CONFIG["level"])
fh.setFormatter(LOGGER_CONFIG["formatter"])
log.addHandler(fh)
log.setLevel(LOGGER_CONFIG["level"])


def update_xrates(from_currency, to_currency):
    log.info("Started update for: %s=>%s" % (from_currency, to_currency))
    xrate = XRate.select().where(XRate.from_currency == from_currency,
                                 XRate.to_currency == to_currency).first()

    log.debug("rate before: %s", xrate)
    xrate.rate = get_cbr_rate(from_currency)
    xrate.updated = peewee_datetime.datetime.now()
    xrate.save()

    log.debug("rate after: %s", xrate)
    log.info("Finished update for: %s=>%s" % (from_currency, to_currency))


def get_cbr_rate(from_currency):
    response = requests.get(
import tensorflow as tf

sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from config import config, logging

from tools import utils
from learner.feature_extractor import get_droid_feature, FeatureMapping, drebin
from learner.basic_DNN import BasicDNNModel, tester
from learner import model_scope_dict
from defender import defense_model_scope_dict

MODLE_TEMP = BasicDNNModel

models = model_scope_dict.values() + defense_model_scope_dict.values()

logger = logging.getLogger("learning.surrogate")

SUR_INFO = {
    'dataset_dir':
    os.path.join(config.get('DEFAULT', 'database_dir'),
                 config.get('DEFAULT', 'surrogate_dataset')),
    'feature_type':
    'drebin',
    'feature_utility_rate':
    1.,
    'feature_mapping_type':
    config.get('feature.drebin', 'feature_mp'),
    'use_interdependent_features':
    False,
    'learning_algorithm':
    'DNN'
import sys
from datetime import datetime
from timeit import default_timer
import shutil

import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import f1_score

sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from learner.classification import *
from tools import utils
from learner.feature_extractor import get_droid_feature, FeatureMapping, DREBIN_FEAT_INFO, feature_type_scope_dict
from config import config, logging

logger = logging.getLogger("learning.basic_dnn")

INFO = {
    'dataset_dir': config.get('dataset', 'dataset_root'),
    'feature_type': list(feature_type_scope_dict.keys())[0],  # 'drebin',
    'feature_mapping_type': config.get('feature.drebin', 'feature_mp'),
    'use_interdependent_features': False,
    'learning_algorithm': 'DNN'
}

DNN_HP = {
    'random_seed': 23456,
    'hidden_units':
    [160, 160],  # DNN has two hidden layers with each having 160 neurons
    'output_dim': 2,  # malicious vs. benign
    'n_epochs': 150,
Beispiel #6
0
import requests

from models import XRate, peewee_datetime

from config import logging, LOGGER_CONFIG

log = logging.getLogger("PrivatApi")
fh = logging.FileHandler(LOGGER_CONFIG["file"])
fh.setLevel(LOGGER_CONFIG["level"])
fh.setFormatter(LOGGER_CONFIG["formatter"])
log.addHandler(fh)
log.setLevel(LOGGER_CONFIG["level"])


def update_xrates(from_currency, to_currency):
    log.info("Started update for: %s=>%s" % (from_currency, to_currency))
    xrate = XRate.select().where(XRate.from_currency == from_currency,
                                 XRate.to_currency == to_currency).first()

    log.debug("rate before: %s", xrate)
    xrate.rate = get_privat_rate(from_currency)
    xrate.updated = peewee_datetime.datetime.now()
    xrate.save()

    log.debug("rate after: %s", xrate)
    log.info("Finished update for: %s=>%s" % (from_currency, to_currency))


def get_privat_rate(from_currency):
    response = requests.get(
        "https://api.privatbank.ua/p24api/pubinfo?exchange&json&coursid=11")
Beispiel #7
0
 def __init__(self, logger_name):
     self.log = logging.getLogger("Api")
     self.log.name = logger_name
Beispiel #8
0
from models import XRate, peewee_datetime
from config import logging, LOGGER_CONFIG

log = logging.getLogger("TestApi")
fh = logging.FileHandler(LOGGER_CONFIG["file"])
fh.setLevel(LOGGER_CONFIG["level"])
fh.setFormatter(LOGGER_CONFIG["formatter"])
log.addHandler(fh)
log.setLevel(LOGGER_CONFIG["level"])


def update_xrates(from_currency, to_currency):
    log.info("Started update for: %s=>%s" % (from_currency, to_currency))
    xrate = XRate.select().where(XRate.from_currency == from_currency,
                                 XRate.to_currency == to_currency).first()

    log.debug("rate before: %s", xrate)
    xrate.rate += 0.01
    xrate.updated = peewee_datetime.datetime.now()
    xrate.save()

    log.debug("rate after: %s", xrate)
    log.info("Finished update for: %s=>%s" % (from_currency, to_currency))


if __name__ == '__main__':
    update_xrates(840, 980)
OPERATOR = {
    #insert
    0 : "insertion",
    #delete
    1 : "removal"
}
'''
import os
import sys
from abc import ABCMeta, abstractmethod

project_root = os.path.dirname(
    os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
sys.path.append(project_root)
from config import config, logging
logger = logging.getLogger("attacker.modification")

MANIFEST = "AndroidManifest.xml"  # type: str


class DroidModification(object):
    """Abstract base class for all attack classes."""
    __metaclass__ = ABCMeta

    def __init__(self, disassembly_root, verbose):
        self.disassembly_root = disassembly_root
        self.verbose = verbose

    @abstractmethod
    def insert(self, elem_name, mod_count=1):
        """Insert an specified element"""
Beispiel #10
0
from flask import request
from flask import jsonify
from config import KEYSTONE, DATABASE, DATABASE_CMDB, DATABASE_CLOUD, logging
from flask import g
import urlparse
import json
import re
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE
app.config['SQLALCHEMY_BINDS'] = {
    'cmdb': DATABASE_CMDB,
    'cloud': DATABASE_CLOUD
}

db = SQLAlchemy(app)
logger = logging.getLogger(__name__)

@app.errorhandler(401)
def page_not_found(error):
    return 'Unauthorized', 401

from apiUtil import http_request

@app.before_request
def before_request():
    token = request.headers.get("X-Auth-Token")
    g.party = request.headers.get("Sp-Agent", "default")
    g.admin_token = KEYSTONE[g.party]['admin_token']
    g.uri = KEYSTONE[g.party]['uri']
    g.admin_proj = KEYSTONE[g.party]['admin_proj']
    # 静态文件和监控数据不用验证,直接通过
Beispiel #11
0
 def __init__(self, logger_name):
     self.log = logging.getLogger(logger_name)
     self.log.addHandler(fh)
     self.log.setLevel(LOGGER_CONFIG["level"])
Beispiel #12
0
import hashlib
import math
import json
from config import CoreConfigure, logging
import os
from django.conf import settings
import datetime
import time

logging.basicConfig(level = logging.DEBUG,
                    format = '%(asctime)s %(name)s %(levelname)s %(message)s',
                    datefmt='%a, %d %b %Y %H:%M:%S',
                   )
logger = logging.getLogger("util")
class Node:
    def __init__(self, info):
        self.methodName = info['name']
        self.executeTime = info['execute_time']
        self.self_time = 0
        self.position = info['position']
        self.p_Father = 0
        self.p_Root = 0
        self.children = []
        self.father = None
        self.depth = 0
        self.isValid = False
        self.hashcode = None
        self.maxTime = info['execute_time']
        self.minTime = info['execute_time']
        self.avgTime = info['execute_time']
        self.percentageChildren = 0
                break
            else:
                if any((drinks_amount == [], ingredients_amount == [])):  
                    log.error(f'client has ordered non-existent drink \u2013 {choice_drink=}, {choice_ingredient=}')
                    client_socket.sendall('Sorry, I cannot make it'.encode())
                    break
                else:
                    if all((drinks_amount[0][0], ingredients_amount[0][0])):
                        transfer_data(update_menu, choice_drink, choice_ingredient)
                        event.set()
                        log.info(f'client has ordered {choice_drink} with {choice_ingredient}')
                        client_socket.sendall(f'Take your {choice_drink} with {choice_ingredient}'.encode())
                        break
                    else:
                        log.warning(f'coffe machine has no {choice_drink} or {choice_ingredient}')
                        client_socket.sendall(f'coffe machine has no {choice_drink} or {choice_ingredient}'.encode())
                        break
        
    log.info('client has disconnected')
    client_socket.close()
    

if __name__ == '__main__':
    log = logging.getLogger('Coffee machine')
    log.setLevel(LOGGER_CONFIG['level'])
    fh = logging.FileHandler(LOGGER_CONFIG['file'], 'w', 'utf-8')
    fh.setFormatter(LOGGER_CONFIG['formatter'])
    log.addHandler(fh)
    
    initialize_menu()
    accept_connection(server_socket, lock, event)
Beispiel #14
0
from multiprocessing.sharedctypes import Value
from multiprocessing.managers import BaseManager, SyncManager
from config import CoreConfigure, logging
from util import Node, Tree, InvertedIndex, DateTimeHashTable, HistroyRecord
import time
import signal
import os
from django.conf import settings
import json

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s %(name)s %(levelname)s %(message)s',
    datefmt='%a, %d %b %Y %H:%M:%S',
)
logger = logging.getLogger("detector")


class logProfiler(object):
    def __init__(self):
        self.reader = Reader()
        self.shareQueue = JoinableQueue()
        self.finalTreeCollection = Manager().Queue()
        self.processPool = []
        self.slaveNumber = 2
        self.count = Value('i', 0)
        self.manager = Manager()
        self.sharedLst = self.manager.list()
        self.invertedIndex = InvertedIndex()
        self.services = {}
        self.final = []
"""Extract various types of features"""
import os
import collections
import warnings

import numpy as np
from sklearn.preprocessing import MinMaxScaler
from collections import defaultdict

from tools import utils
from config import config, COMP, logging

from learner import drebin
from learner.drebin import DREBIN_FEAT_INFO

logger = logging.getLogger('learner.feature')


def normalize_data(X, is_fitting=False, feature_type='drebin'):
    if is_fitting:
        minmax_norm = MinMaxScaler()
        normalizer = minmax_norm.fit(X)
        utils.dump_pickle(normalizer,
                          config.get('feature.' + feature_type, 'normalizer'))
    elif os.path.exists(config.get('feature.' + feature_type,
                                   'normalizer')) and not is_fitting:
        normalizer = utils.read_pickle(
            config.get('feature.' + feature_type, 'normalizer'))
    else:
        raise ValueError("Unable to find the normalizer")
    feat_normlized = normalizer.transform(X)
Beispiel #16
0
__date__    = "2016-08-18"

import yaml
import sys
import json
import os
import time
import math
from lib2to3.pgen2 import driver

import requests
from bs4 import BeautifulSoup
from selenium import webdriver
import config
from config import logging
logger = logging.getLogger("test")

class PhantomJsUtils(object):

    __instance = None
    __phantomJsDriver = None

    def __new__(cls, *args, **keys):
        if cls.__instance is None:
            cls.__instance = object.__new__(cls)
            log_name = "/tmp/phantomjs.log" #ログを出力しない場合はos.path.devnull
            userAgent = "Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53"
            
            timeDriverStart = time.clock()
            cls.__phantomJsDriver = webdriver.PhantomJS(
                desired_capabilities={
import requests

from models import XRate, peewee_datetime

from config import logging, LOGGER_CONFIG

log = logging.getLogger('PrivatApi')
fh = logging.FileHandler(LOGGER_CONFIG['file'])
fh.setLevel(LOGGER_CONFIG['level'])
fh.setFormatter(LOGGER_CONFIG['formatter'])
log.addHandler(fh)
log.setLevel(LOGGER_CONFIG['level'])


def update_xrates(from_currency, to_currency):
    log.info(f'Started update for: {from_currency} => {to_currency}')
    # получение курса из БД
    xrate = XRate.select().where(XRate.from_currency == from_currency,
                                 XRate.to_currency == to_currency).first()
    log.debug(f'rate before: {xrate}')
    # получение нового значения от Привата и сохранение его в объект xrate
    xrate.rate = get_private_rate(from_currency)
    # обновление поля updated
    xrate.updated = peewee_datetime.datetime.now()
    xrate.save()

    log.debug(f'rate after: {xrate}')
    log.info(f'Finished update for: {from_currency} => {to_currency}')


def get_private_rate(from_currency):
"""
@author  : MG
@Time    : 2020/9/18 8:48
@File    : period_resonance_strategy.py
@contact : [email protected]
@desc    : 用于多周期共振策略
首先研究 1分钟、5分钟周期共振,之后再扩展到多周期,甚至日级别共振
"""
from vnpy.app.cta_strategy import (StopOrder, TickData, BarData, TradeData,
                                   OrderData, BarGenerator, ArrayManager,
                                   CtaSignal, TargetPosTemplate)
from config import logging

logger = logging.getLogger()


class MACDSignal(CtaSignal):
    """"""
    def __init__(self,
                 fast_window: int,
                 slow_window: int,
                 signal_period: int,
                 period: int = 30):
        """"""
        super().__init__()

        self.fast_window = fast_window
        self.slow_window = slow_window
        self.signal_period = signal_period

        self.period = period
Beispiel #19
0
from time import sleep
from defenses import BasicDNN, FeatureBinarizationDNN, AdversarialTrainingRegDNN, \
    DAE_RPST_DNN, JointDefense, RandomSubspaceMethod

from config import logging

logger = logging.getLogger("common")


class Defender(object):
    def __init__(self, defense_method_name='adv_training_dnn'):
        self.defense_method_name = defense_method_name
        self.defense = None
        if self.defense_method_name == 'basic_dnn':
            self.defense = BasicDNN()
        elif self.defense_method_name == 'feature_bnrz_dnn':
            self.defense = FeatureBinarizationDNN()
        elif self.defense_method_name == 'adv_training_dnn':
            self.defense = AdversarialTrainingRegDNN()
        elif self.defense_method_name == 'dae_rpst_dnn':
            self.defense = DAE_RPST_DNN()
        elif self.defense_method_name == 'joint_defense':
            self.defense = JointDefense()
        elif self.defense_method_name == 'random_subspace':
            self.defense = RandomSubspaceMethod()
        else:
            raise ValueError(
                "Please choose method from 'basic_dnn', 'feature_bnrz_dnn', 'adv_training_dnn', 'dae_rpst_dnn', 'joint_defense', and 'random_subspace'."
            )

    def train(self):
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import f1_score

sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from defenses.classification import *
from dataset.dataset import load_trainset, load_testset
from dataset.input_preprocessing import normalize_data, random_over_sampling
from utils import utils
from config import config, logging

import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()

logger = logging.getLogger("basic_dnn")


def graph(x_input, hidden_neurons=[160, 160], output_dim=5, is_training=True, name="BASIC_DNN",
          reuse=False):
    '''
    the defined architectures of nerual network
    :param x_input: Tensor
    :param hidden_neurons: neurons for hidden layers
    :param output_dim: int
    :param is_training: training or not
    :param name: string, nets name
    :param reuse: reuse or not
    :return: the defined graph of neural networks
    '''
    with tf.variable_scope("{}".format(name), reuse=reuse):
Beispiel #21
0
from config import logging

# define name and location of logfile
log_file = "./logfile.log"

# Configure logging
logging.basicConfig(level=logging.DEBUG, filename=log_file, filemode="a",
                    format="%(asctime)-15s %(levelname)-8s %(message)s")
logger = logging.getLogger("logger")

# Configure streamhandler
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(logging.Formatter("%(asctime)-15s %(levelname)-8s %(message)s"))

logger.addHandler(stream_handler)


def wrapper(pre, post):
    """
    Simple wrap function, Wrapper decorator @ is to be placed right on top of function to be wrapped
    :param pre: Function called before the function that is being wrapped
    :param post: Function called after the function that is being wrapped
    """

    def decorator(function):
        def inner(*args, **kwargs):
            # function that is called before executing wrapped function
            pre(function)
            # execute wrapped function
            result = function(*args, **kwargs)
Beispiel #22
0
import requests

import xml.etree.ElementTree as ET

from models import XRate, peewee_datetime
from config import logging, LOGGER_CONFIG

log = logging.getLogger('CBRApi')
fh = logging.FileHandler(LOGGER_CONFIG['file'])
fh.setLevel(LOGGER_CONFIG['level'])
fh.setFormatter(LOGGER_CONFIG['formatter'])
log.addHandler(fh)
log.setLevel(LOGGER_CONFIG['level'])


def update_xrates(from_currency, to_currency):
    log.info(f'Started update for: {from_currency} => {to_currency}')
    # получение курса из БД
    xrate = XRate.select().where(XRate.from_currency == from_currency,
                                 XRate.to_currency == to_currency).first()
    log.debug(f'rate before: {xrate}')
    # получение нового значения от Привата и сохранение его в объект xrate
    xrate.rate = get_cbr_rate(from_currency)
    # обновление поля updated
    xrate.updated = peewee_datetime.datetime.now()
    xrate.save()

    log.debug(f'rate after: {xrate}')
    log.info(f'Finished update for: {from_currency} => {to_currency}')

Beispiel #23
0
from reader import Reader
from multiprocessing import *
from multiprocessing.sharedctypes import  Value
from multiprocessing.managers import BaseManager, SyncManager
from config import CoreConfigure, logging
from util import Node, Tree, InvertedIndex, DateTimeHashTable, HistroyRecord
import time
import signal
import os
from django.conf import settings
import json
logging.basicConfig(level = logging.INFO,
                    format = '%(asctime)s %(name)s %(levelname)s %(message)s',
                    datefmt='%a, %d %b %Y %H:%M:%S',
                   )
logger = logging.getLogger("detector")


class logProfiler(object):
    def __init__(self):
        self.reader = Reader()
        self.shareQueue = JoinableQueue()
        self.finalTreeCollection = Manager().Queue()
        self.processPool = []
        self.slaveNumber = 2
        self.count = Value('i',0)
        self.manager = Manager()
        self.sharedLst = self.manager.list()
        self.invertedIndex = InvertedIndex()
        self.services = {}
        self.final = []
Beispiel #24
0
def main():
    global r

    prawlogger = logging.getLogger('prawcore')
    prawlogger.setLevel(logging.WARN)

    while True:
        # Login retry loop
        try:
            logger.info('Logging in as {0}'
                        .format(cfg_file.get('reddit', 'username')))
            r = praw.Reddit(client_id     = cfg_file.get('reddit', 'client_id'),
                            client_secret = cfg_file.get('reddit', 'client_secret'),
                            user_agent    = cfg_file.get('reddit', 'user_agent'),
                            username      = cfg_file.get('reddit', 'username'),
                            password      = cfg_file.get('reddit', 'password'))
            # break
        except Exception as e:
            logger.error('ERROR: {0}'.format(e))
            logger.debug(traceback.format_exc())
        else:
            sr_dict = get_moderated_subreddits()

            # load conditions from wiki
            rule_dict = load_all_rules(sr_dict)

            pprint.pprint(rule_dict)

            break

    while True:
        # main execution loop

        sleep_after = True
        reload_mod_subs = False

        try:
            # First, process command messages
            for message in unread_messages():
                try:
                    command = message.body.strip().lower()
                    sr_name = clean_sr_name(message.subject).lower()
                    subreddit = r.subreddit(sr_name)
                    # TODO: validate user is moderator
                    if message.author not in subreddit.moderator():
                        message.reply('Error: You do not moderate /r/{0}'.format(subreddit.display_name))
                        continue
                    # OK, validated
                    if command == 'register':
                        # do we know this sub?
                        if sr_name in sr_dict.keys():
                            message.reply("I already moderate /r/{}.\n\n".format(sr_name))
                            continue

                        # otherwise... try to accept mod invite
                        try:
                            subreddit.mod.accept_invite()
                        except:
                            # should be APIException(error_type='NO_INVITE_FOUND')
                            message.reply("You must invite me to moderate /r/{} first."
                                          .format(sr_name))
                            raise
                        else:
                            # get sub from db if previously registered:
                            db_subreddit = None
                            try:
                                db_subreddit = (session.query(Subreddit)
                                               .filter(Subreddit.name == sr_name)
                                               .one())
                            except NoResultFound:
                                # add to DB
                                db_subreddit = Subreddit()
                                db_subreddit.name = subreddit.display_name.lower()
                                db_subreddit.last_submission = datetime.utcnow() - timedelta(days=1)
                                db_subreddit.last_spam = datetime.utcnow() - timedelta(days=1)
                                db_subreddit.last_comment = datetime.utcnow() - timedelta(days=1)
                                db_subreddit.conditions_yaml = ''
                                session.add(db_subreddit)
                            finally:
                                # now that it definitely exists: set enabled
                                # (should we clear old rules from the db?)
                                db_subreddit.enabled = True
                                session.commit()
                            message.reply("I have joined /r/{}".format(db_subreddit.name))
                    elif command in ['update', 'status', 'enable', 'disable', 'leave']:
                        # these require the same database query
                        db_subreddit = None
                        try:
                            db_subreddit = (session.query(Subreddit)
                                           .filter(Subreddit.name == sr_name)
                                           .one())
                        except NoResultFound:
                            message.reply("Subreddit /r/{} is not registered with me."
                                          .format(sr_name))
                        else:
                            # only proceed if we get a database hit.
                            if command == 'update':
                                # refresh configuration for a subreddit
                                # todo: cache duplicate requests from multiple mods
                                reload_mod_subs = True
                                update_from_wiki(db_subreddit, message)
                            elif command == 'status':
                                pass
                            elif command == 'enable':
                                db_subreddit.enabled = True
                                reload_mod_subs = True
                            elif command == 'disable':
                                db_subreddit.enabled = False
                                reload_mod_subs = True
                            elif command == 'leave':
                                # leave moderator of subreddit
                                if db_subreddit.enabled:
                                    message.reply("Please disable me on this subreddit first.")
                                else:
                                    # TODO not implemented yet
                                    reload_mod_subs = True
                                    raise NotImplementedError
                            # the following commands should respond with the enabled status
                            if command in ['status', 'enable', 'disable']:
                                message.reply("Subreddit /r/{} is currently {}abled."
                                              .format(db_subreddit.name,
                                                      'en' if db_subreddit.enabled else 'dis'))
                        finally:
                            session.commit()
                    elif command == 'help':
                        # should this just provide a link, or real command explanations?
                        raise NotImplementedError
                    else:
                        # invalid command
                        message.reply("Invalid command.")
                except NotImplementedError:
                    message.reply("Error: that feature is not yet implemented.")
                except KeyboardInterrupt:
                    raise
                except Exception as e:
                    logger.error('ERROR: {0}'.format(e))
                    logger.debug(traceback.format_exc())
                    message.reply("# ERROR:\n\n{}".format(indent_lines(str(e))))
                finally:
                    message.mark_read()

            # changed mod subs
            if reload_mod_subs:
                sr_dict = get_moderated_subreddits()
                rule_dict = load_all_rules(sr_dict)

            # Then process queues: submission, comment, spam, report, comment reply, username mention
            # TODO: queue for edited items...

            # Queue priority, in increasing specificity:
            # - reports: multi/about/reports?only=(links|comments)
            #   - comment
            #   - submission
            #   - any
            # - spam: multi/about/spam?only=(links|comments)
            #   - comment
            #   - submission
            #   - any
            # - edited: multi/about/edited?only=(links|comments)
            #   - comment
            #   - submission
            #   - any
            # - reply: inbox
            # - mention: inbox
            # - submission: multi/new
            # - comment: multi/comments

            multi_mod_queues = ['reports', 'spam', 'edited'] # r.subreddit().mod.<q>
            multi_queues = ['new', 'comments'] # r.subreddit().<q>
            user_queues = ['comment_replies', 'submission_replies', 'mentions'] # r.user.inbox.<q>

            # proof of concept
            for sr_name, subreddit in sr_dict.items():
                logger.debug("Checking items in /r/{}".format(sr_name))

                sr = r.subreddit(sr_name)

                # mod-only level queues
                for item in sr.mod.spam():
                    for rule in rule_dict[sr_name]:
                        rule.process(item)
                for item in sr.mod.reports():
                    for rule in rule_dict[sr_name]:
                        rule.process(item)
                for item in sr.mod.edited():
                    for rule in rule_dict[sr_name]:
                        rule.process(item)

                # sub-level queues
                for item in sr.mod.new():
                    for rule in rule_dict[sr_name]:
                        rule.process(item)
                for item in sr.mod.comments():
                    for rule in rule_dict[sr_name]:
                        rule.process(item)

                # user queues - not implemented


            # for queue in queue_funcs:
            #     subreddits = [s for s in sr_dict
            #                   if s in cond_dict and len(cond_dict[s][queue]) > 0]
            #     if len(subreddits) == 0:
            #         continue

            #     multireddits = build_multireddit_groups(subreddits)

            #     # fetch and process the items for each multireddit
            #     for multi in multireddits:
            #         if queue == 'report':
            #             limit = cfg_file.get('reddit', 'report_backlog_limit_hours')
            #             stop_time = datetime.utcnow() - timedelta(hours=int(limit))
            #         else:
            #             stop_time = max(getattr(sr, 'last_'+queue)
            #                              for sr in sr_dict.values()
            #                              if sr.name in multi)

            #         queue_subreddit = r.get_subreddit('+'.join(multi))
            #         if queue_subreddit:
            #             queue_func = getattr(queue_subreddit, queue_funcs[queue])
            #             items = queue_func(limit=None)
            #             check_items(queue, items, stop_time, sr_dict, cond_dict)



        except KeyboardInterrupt:
            raise
        except Exception as e:
            logger.error('ERROR: {0}'.format(e))
            logger.debug(traceback.format_exc())
            session.rollback()
        finally:
            if sleep_after:
                logger.info('Sleeping for 10 seconds')
                sleep(10)
                logger.info('Sleep ended, resuming')

        logging.info("Looping")
Beispiel #25
0
import os
import shutil
import sys
import multiprocessing
import subprocess
import time
from collections import defaultdict

sys.path.append(os.path.dirname(os.getcwd()))
sys.path.append(os.getcwd())

from attacker.modification import *
from tools import utils, progressbar_wrapper
from config import config, COMP, logging, ErrorHandler

logger = logging.getLogger("attacker.modifier")
logger.addHandler(ErrorHandler)

OPERATOR = {
    # insert
    0: "insert",
    # remove
    1: "remove"
}

INSTR_ALLOWED = {
    OPERATOR[0]: [
        COMP['Permission'], COMP['Activity'], COMP['Service'],
        COMP['Receiver'], COMP['Hardware'], COMP['Intentfilter'],
        COMP['Android_API'], COMP['User_String']
    ],
Beispiel #26
0
from models import XRate, init_db

from config import logging, LOGGER_CONFIG

log = logging.getLogger('TestApi')
fh = logging.FileHandler(LOGGER_CONFIG['file'])
fh.setLevel(LOGGER_CONFIG['level'])
fh.setFormatter(LOGGER_CONFIG['formatter'])
log.addHandler(fh)
log.setLevel(LOGGER_CONFIG['level'])


def update_xrates(from_currency, to_currency):
    log.info(f'Started update for: {from_currency} => {to_currency}')
    xrate = XRate.select().where(XRate.from_currency == from_currency,
                                 XRate.to_currency == to_currency).first()
    log.debug(f'rate before: {xrate}')
    xrate.rate += 0.01
    xrate.save()

    log.debug(f'rate after: {xrate}')
    log.info(f'Finished update for: {from_currency} => {to_currency}')