예제 #1
0
class TestSetupScheduledJobs(object):
    """Tests for setup function 'schedule_job'"""

    def setUp(self):
        db = getattr(settings_test, 'REDIS_DB', 0)
        pwd = getattr(settings_test, 'REDIS_PWD', None)
        master = getattr(settings_test, 'REDIS_MASTER_NAME', 'mymaster')
        if all(hasattr(settings_test, attr) for attr in ['REDIS_MASTER_DNS', 'REDIS_PORT']):
            self.connection = StrictRedis(host=settings_test.REDIS_MASTER_DNS,
                port=settings_test.REDIS_PORT, db=db, password=pwd)
        else:
            sentinel = Sentinel(settings_test.REDIS_SENTINEL)
            self.connection = sentinel.master_for(master, db=db, password=pwd)
        self.connection.flushall()
        self.scheduler = Scheduler('test_queue', connection=self.connection)

    def test_adds_scheduled_job_with_interval(self):
        a_job['interval'] = 7
        schedule_job(a_job, self.scheduler)
        sched_jobs = list(self.scheduler.get_jobs())

        t = len(sched_jobs)
        assert t == 1, sched_jobs
        job = sched_jobs[0]
        assert job.meta['interval'] == 7 , job.meta
        a_job['interval'] = 1

    def test_adds_several_jobs_(self):
        schedule_job(a_job, self.scheduler)
        schedule_job(another_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()
        job_func_names = [job.func_name for job in sched_jobs]
        module_name = 'test_jobs.test_schedule_jobs'

        jobs = list(self.scheduler.get_jobs())
        assert len(jobs) == 2, len(jobs)
        assert module_name + '.a_function' in job_func_names, job_func_names
        assert module_name + '.another_function' in job_func_names, job_func_names

    def test_does_not_add_job_if_already_added(self):
        schedule_job(a_job, self.scheduler)
        schedule_job(a_job, self.scheduler)
        sched_jobs = list(self.scheduler.get_jobs())

        assert len(sched_jobs) == 1, sched_jobs

    def test_returns_log_messages(self):
        success_message = schedule_job(a_job, self.scheduler)
        s_m = 'Scheduled a_function([], {}) to run every 1 seconds'
        assert success_message == s_m, (success_message, s_m)

        failure_message = schedule_job(a_job, self.scheduler)
        assert failure_message == 'WARNING: Job a_function([], {}) is already scheduled'

    def test_failed_attempt_to_schedule_does_not_polute_redis(self):
        schedule_job(a_job, self.scheduler)
        schedule_job(a_job, self.scheduler)
        stored_values = self.connection.keys('rq:job*')

        assert len(stored_values) == 1, len(stored_values)
예제 #2
0
class RedisProxy:
    def __init__(self, client=None, db=2, decode_responses=True):
        # if a client object is not passed then try
        # connecting to redis at the default localhost port
        self.client = StrictRedis(
            host='127.0.0.1',
            port=6379,
            db=db,
            decode_responses=decode_responses) if client is None else client
        # self.expires = expires

    def add_proxy(self, proxy):
        """
        add proxy to redis list
        :param proxy: new proxy or proxies
        """
        if isinstance(proxy, list):
            self.client.rpush(REDIS_PROXY_KEY, *proxy)
        else:
            self.client.rpush(REDIS_PROXY_KEY, proxy)

    def pop_proxy(self):
        '''Randomly pick a proxy from redis list.'''
        return random.choice(self.all_proxies())

    def all_proxies(self):
        return self.client.lrange(REDIS_PROXY_KEY, 0, -1)

    def flushredis(self):
        self.client.flushall()
예제 #3
0
class TestProjectsStats(Test):

    def setUp(self):
        super(TestProjectsStats, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()
        self.scheduler = Scheduler('test_queue', connection=self.connection)


    @with_context
    def test_create_dict_job(self):
        """Test JOB create dict job works."""
        user = UserFactory.create(pro=True)
        app = AppFactory.create(owner=user)
        from sqlalchemy.sql import text
        from pybossa.core import db
        sql = text('''SELECT app.id, app.short_name FROM app, "user"
                   WHERE app.owner_id="user".id AND "user".pro=True;''')
        results = db.slave_session.execute(sql)
        jobs = create_dict_jobs(results, get_app_stats, (10 * 60))

        err_msg = "There should be only one job"
        assert len(jobs) == 1, err_msg

        job = jobs[0]
        assert 'get_app_stats' in job['name'].__name__
        assert job['args'] == [app.id, app.short_name]
        assert job['interval'] == 10 * 60

    @with_context
    def test_get_project_jobs(self):
        """Test JOB get project jobs works."""
        user = UserFactory.create(pro=True)
        app = AppFactory.create(owner=user)
        jobs = get_project_jobs()
        err_msg = "There should be only one job"

        assert len(jobs) == 1, err_msg

        job = jobs[0]
        err_msg = "There should have the same name, but it's: %s" % job['name']
        assert "get_app_stats" == job['name'].__name__, err_msg
        err_msg = "There should have the same args, but it's: %s" % job['args']
        assert [app.id, app.short_name] == job['args'], err_msg
        err_msg = "There should have the same kwargs, but it's: %s" % job['kwargs']
        assert {} == job['kwargs'], err_msg

    @with_context
    def test_get_project_jobs_for_non_pro_users(self):
        """Test JOB get project jobs works for non pro users."""
        AppFactory.create()
        jobs = get_project_jobs()

        err_msg = "There should be only 0 jobs"
        assert len(jobs) == 0, err_msg
예제 #4
0
class TestSetupScheduledJobs(object):
    """Tests for setup function '_schedule_job'"""

    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()
        self.scheduler = Scheduler('test_queue', connection=self.connection)


    def test_adds_scheduled_job_with_interval(self):
        a_job['interval'] = 7
        _schedule_job(a_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()

        assert len(sched_jobs) == 1, sched_jobs
        assert sched_jobs[0].meta['interval'] == 7 , sched_jobs[0].meta
        a_job['interval'] = 1


    def test_adds_several_jobs_(self):
        _schedule_job(a_job, self.scheduler)
        _schedule_job(another_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()
        job_func_names = [job.func_name for job in sched_jobs]
        module_name = 'test_jobs.test_schedule_jobs'

        assert len(sched_jobs) == 2, sched_jobs
        assert module_name + '.a_function' in job_func_names, job_func_names
        assert module_name + '.another_function' in job_func_names, job_func_names


    def test_does_not_add_job_if_already_added(self):
        _schedule_job(a_job, self.scheduler)
        _schedule_job(a_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()

        assert len(sched_jobs) == 1, sched_jobs


    def test_returns_log_messages(self):
        success_message = _schedule_job(a_job, self.scheduler)
        failure_message = _schedule_job(a_job, self.scheduler)

        assert success_message == 'Scheduled a_function([], {}) to run every 1 seconds'
        assert failure_message == 'WARNING: Job a_function([], {}) is already scheduled'


    def test_failed_attempt_to_schedule_does_not_polute_redis(self):
        _schedule_job(a_job, self.scheduler)
        _schedule_job(a_job, self.scheduler)
        stored_values = self.connection.keys('rq:job*')

        assert len(stored_values) == 1, len(stored_values)
예제 #5
0
def redis():
    try:
        from redis import StrictRedis
        from redis.exceptions import ConnectionError
    except ImportError:
        pytest.skip('redis library not installed')
    try:
        r = StrictRedis()
        r.ping()
    except ConnectionError:
        pytest.skip('could not connect to redis')
    r.flushall()
    return r
예제 #6
0
def redis():
    try:
        from redis import StrictRedis
        from redis.exceptions import ConnectionError
    except ImportError:
        pytest.skip('redis library not installed')
    try:
        r = StrictRedis()
        r.ping()
    except ConnectionError:
        pytest.skip('could not connect to redis')
    r.flushall()
    return r
class TestMaintenance(Test):
    def setUp(self):
        super(TestMaintenance, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()

    @with_context
    def test_get_maintenance_jobs(self):
        """Test get maintenance jobs works."""
        res = get_maintenance_jobs().next()
        assert res['queue'] == 'maintenance'

    @with_context
    @patch('pybossa.jobs.send_mail')
    @patch('rq.requeue_job', autospec=True)
    @patch('rq.get_failed_queue', autospec=True)
    def test_check_failed_variant(self, mock_failed_queue, mock_requeue_job,
                                  mock_send_mail):
        """Test JOB check failed works when no failed jobs."""
        fq = MagicMock
        fq.job_ids = []
        job = MagicMock()
        fq.fetch_job = job
        mock_failed_queue.return_value = fq
        response = check_failed()
        msg = "You have not failed the system"
        assert msg == response, response

    @with_context
    @patch('pybossa.jobs.send_mail')
    @patch('rq.requeue_job', autospec=True)
    @patch('rq.get_failed_queue', autospec=True)
    def test_check_failed(self, mock_failed_queue, mock_requeue_job,
                          mock_send_mail):
        """Test JOB check failed works."""
        fq = MagicMock
        fq.job_ids = ['1']
        job = MagicMock()
        fq.fetch_job = job
        mock_failed_queue.return_value = fq
        for i in range(self.flask_app.config.get('FAILED_JOBS_RETRIES') - 1):
            response = check_failed()
            msg = "JOBS: ['1'] You have failed the system."
            assert msg == response, response
            mock_requeue_job.assert_called_with('1')
            assert not mock_send_mail.called
        response = check_failed()
        assert mock_send_mail.called
        mock_send_mail.reset_mock()
        response = check_failed()
        assert not mock_send_mail.called
예제 #8
0
def crawling(root_url, thread_num, reset):
    q = Queue()
    fail_sites = {}

    try:
        f = open('local_test_data/todo_list.txt', 'r')
    except IOError:
        q.put(root_url)
    else:
        while True:
            ss = f.readline()
            if not ss:
                break
            if ss[-1] == '\n':
                ss = ss[:-1]
            ss = quote(ss, safe=punc_en, encoding=None, errors=None)
            q.put(ss)
        f.close()

    visited = StrictRedis(host='localhost', port=6379, decode_responses=True)
    if reset:
        visited.flushall()
    else:
        for key in visited.keys():
            if visited[key] != 'Done' and visited[key] != 'Failed':
                visited.set(key, 'Failed', xx=True)

    if thread_num == 1:
        BFS(q, visited, fail_sites)
    else:
        threads = [threading.Thread(target=BFS, args=(q, visited, fail_sites, ))
                   for i in range(thread_num)]

        threads[0].setDaemon(True)
        threads[0].start()
        sleep(4)
        for td in threads[1:]:
            td.setDaemon(True)
            td.start()
        for td in threads:
            td.join()

    fail_arr = []
    if not q.empty() or len(fail_sites.keys()) > 0:
        while not q.empty():
            fail_arr.append(q.get())
        for cur in fail_sites.keys():
            fail_arr.append(cur)

    return fail_arr
예제 #9
0
파일: test_news.py 프로젝트: fiorda/pybossa
class TestNews(Test):

    def setUp(self):
        super(TestNews, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()

    news = dict(updated='2015-01-01')

    @with_context
    def test_get_news_empty(self):
        news = get_news()
        assert len(news) == 0, len(news)

    @with_context
    def test_get_news_with_score_empty(self):
        news = get_news(score=1)
        assert len(news) == 0, len(news)

    @with_context
    def test_get_news(self):
        sentinel.master.zadd(myset, 0, pickle.dumps(self.news))
        news = get_news()
        assert len(news) == 1, len(news)
        news[0]['updated'] == self.news['updated'], news

    @with_context
    def test_get_news_with_score(self):
        sentinel.master.zadd(myset, 0, pickle.dumps(self.news))
        news = get_news(score=1)
        assert len(news) == 0, len(news)

    @with_context
    def test_notify_news_admins(self):
        user = UserFactory.create(admin=True)
        notify_news_admins()
        key = "notify:admin:%s" % user.id
        value = sentinel.slave.get(key)
        err_msg = "Key should exist"
        assert value == str(1), err_msg

    @with_context
    def test_notify_news_admins(self):
        user = UserFactory.create(admin=False)
        user2 = UserFactory.create(admin=False)
        notify_news_admins()
        key = "notify:admin:%s" % user2.id
        value = sentinel.slave.get(key)
        err_msg = "Key should not exist"
        assert value is None, err_msg
예제 #10
0
class TestNews(Test):
    def setUp(self):
        super(TestNews, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()

    news = dict(updated='2015-01-01')

    @with_context
    def test_get_news_empty(self):
        news = get_news()
        assert len(news) == 0, len(news)

    @with_context
    def test_get_news_with_score_empty(self):
        news = get_news(score=1)
        assert len(news) == 0, len(news)

    @with_context
    def test_get_news(self):
        sentinel.master.zadd(myset, 0, pickle.dumps(self.news))
        news = get_news()
        assert len(news) == 1, len(news)
        news[0]['updated'] == self.news['updated'], news

    @with_context
    def test_get_news_with_score(self):
        sentinel.master.zadd(myset, 0, pickle.dumps(self.news))
        news = get_news(score=1)
        assert len(news) == 0, len(news)

    @with_context
    def test_notify_news_admins(self):
        user = UserFactory.create(admin=True)
        notify_news_admins()
        key = "notify:admin:%s" % user.id
        value = sentinel.slave.get(key)
        err_msg = "Key should exist"
        assert value == str(1), err_msg

    @with_context
    def test_notify_news_admins(self):
        user = UserFactory.create(admin=False)
        user2 = UserFactory.create(admin=False)
        notify_news_admins()
        key = "notify:admin:%s" % user2.id
        value = sentinel.slave.get(key)
        err_msg = "Key should not exist"
        assert value is None, err_msg
class TestSetupScheduledJobs(object):
    """Tests for setup function 'schedule_job'"""
    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()
        self.scheduler = Scheduler('test_queue', connection=self.connection)

    def test_adds_scheduled_job_with_interval(self):
        a_job['interval'] = 7
        schedule_job(a_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()

        assert len(sched_jobs) == 1, sched_jobs
        assert sched_jobs[0].meta['interval'] == 7, sched_jobs[0].meta
        a_job['interval'] = 1

    def test_adds_several_jobs_(self):
        schedule_job(a_job, self.scheduler)
        schedule_job(another_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()
        job_func_names = [job.func_name for job in sched_jobs]
        module_name = 'test_jobs.test_schedule_jobs'

        assert len(sched_jobs) == 2, sched_jobs
        assert module_name + '.a_function' in job_func_names, job_func_names
        assert module_name + '.another_function' in job_func_names, job_func_names

    def test_does_not_add_job_if_already_added(self):
        schedule_job(a_job, self.scheduler)
        schedule_job(a_job, self.scheduler)
        sched_jobs = self.scheduler.get_jobs()

        assert len(sched_jobs) == 1, sched_jobs

    def test_returns_log_messages(self):
        success_message = schedule_job(a_job, self.scheduler)
        failure_message = schedule_job(a_job, self.scheduler)

        assert success_message == 'Scheduled a_function([], {}) to run every 1 seconds'
        assert failure_message == 'WARNING: Job a_function([], {}) is already scheduled'

    def test_failed_attempt_to_schedule_does_not_polute_redis(self):
        schedule_job(a_job, self.scheduler)
        schedule_job(a_job, self.scheduler)
        stored_values = self.connection.keys('rq:job*')

        assert len(stored_values) == 1, len(stored_values)
예제 #12
0
def main():
    redis = StrictRedis(**config)
    fakeredis = FakeStrictRedis()
    redis.flushall()
    fakeredis.flushall()

    print '[+] FakeRedis BLPOP:'
    now = time()
    popped = fakeredis.blpop('hello_world', 10)
    print '[*] Took %.2f seconds' % (time() - now)
    print '[*] Popped value:', popped
    print

    print '[+] Redis BLPOP:'
    now = time()
    popped = redis.blpop('hello_world', 10)
    print '[*] Took %.2f seconds' % (time() - now)
    print '[*] Popped value:', popped
class TestTasksMarkedForContribution(object):

    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()


    @patch('pybossa.api.get_user_id_or_ip')
    def test_mark_task_as_requested_by_user_creates_key_for_auth(self, user):
        """When an authenticated user requests a task, a key is stored in Redis
        with his id and task id"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        task = Task(id=22)
        key = 'pybossa:task_requested:user:33:task:22'

        mark_task_as_requested_by_user(task, self.connection)

        assert key in self.connection.keys(), self.connection.keys()


    @patch('pybossa.api.get_user_id_or_ip')
    def test_mark_task_as_requested_by_user_creates_key_for_anon(self, user):
        """When an anonymous user requests a task, a key is stored in Redis
        with his IP and task id"""
        user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'}
        task = Task(id=22)
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'

        mark_task_as_requested_by_user(task, self.connection)

        assert key in self.connection.keys(), self.connection.keys()


    @patch('pybossa.api.get_user_id_or_ip')
    def test_mark_task_as_requested_by_user_sets_expiration_for_key(self, user):
        """When a user requests a task, a key is stored with TTL of 1 hour"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        task = Task(id=22)
        key = 'pybossa:task_requested:user:33:task:22'

        mark_task_as_requested_by_user(task, self.connection)

        assert self.connection.ttl(key) == 60 * 60, self.connection.ttl(key)
예제 #14
0
class GraphStorage(object):
    def __init__(self, link_file='links.txt', host='localhost', port=6379):
        self.db = StrictRedis(host, port)
        self.link_file = link_file

    def add_nodes(self, graphDict, nameDict):
        pipe = self.db.pipeline()
        with open(self.link_file) as link_file:
            for _ in xrange(1):
                for _ in xrange(570607):
                    node, links = self.create_link(next(link_file))
                    name = linecache.getline('titles.txt', node)
                    pipe.rpush('links-{0}'.format(node), *links)
                    pipe.append('name-{0}'.format(node), name)
                    pipe.append('id-{0}'.format(name), node)
                pipe.execute()

    def create_link(self, link):
        start, targets = link.rstrip('\n').split(': ')
        return int(start), map(int, targets.split())

    def get(self, kind, value):
        key = '{0}-{1}'.format(kind, value)
        if kind in ('name', 'id'):
            return self.db.get(key)
        elif kind == 'links':
            return self.db.lrange(key, 0, -1)
        raise ValueError

    def is_leaf(self, value):
        return self.db.sismember('leaves', value)

    def flush(self):
        self.db.flushall()

    def __len__(self):
        return self.db.dbsize()


# 5706070
class TestTasksMarkedForContribution(object):
    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()

    @patch('pybossa.api.get_user_id_or_ip')
    def test_mark_task_as_requested_by_user_creates_key_for_auth(self, user):
        """When an authenticated user requests a task, a key is stored in Redis
        with his id and task id"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        task = Task(id=22)
        key = 'pybossa:task_requested:user:33:task:22'

        mark_task_as_requested_by_user(task, self.connection)

        assert key in self.connection.keys(), self.connection.keys()

    @patch('pybossa.api.get_user_id_or_ip')
    def test_mark_task_as_requested_by_user_creates_key_for_anon(self, user):
        """When an anonymous user requests a task, a key is stored in Redis
        with his IP and task id"""
        user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'}
        task = Task(id=22)
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'

        mark_task_as_requested_by_user(task, self.connection)

        assert key in self.connection.keys(), self.connection.keys()

    @patch('pybossa.api.get_user_id_or_ip')
    def test_mark_task_as_requested_by_user_sets_expiration_for_key(
            self, user):
        """When a user requests a task, a key is stored with TTL of 1 hour"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        task = Task(id=22)
        key = 'pybossa:task_requested:user:33:task:22'

        mark_task_as_requested_by_user(task, self.connection)

        assert self.connection.ttl(key) == 60 * 60, self.connection.ttl(key)
예제 #16
0
class Test_CRedisDict(TestCase):
    def setUp(self):
        self.redis = StrictRedis(host='localhost',
                                 port=6379,
                                 db=0,
                                 decode_responses=True)
        if not wait_for_redis(self.redis, 10):
            exit()
        self.redis.flushall()

    # init(redis) + dict()
    def test_initEmpty(self):
        self.redis.flushall()
        d = CRedisDict('d', self.redis)
        self.assertEqual(d.dict(), {})

    def test_initFromCRedisDict(self):
        self.redis.flushall()
        d1 = CRedisDict('d1', self.redis)
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        d2 = CRedisDict('d2', self.redis, d1)
        self.assertEqual(d1.dict(), d2.dict())

    def test_initFromDict(self):
        self.redis.flushall()
        d1 = {}
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        d2 = CRedisDict('d2', self.redis, d1)
        self.assertEqual(d2.dict(), d1)

    # keys()
    def test_keys(self):
        self.redis.flushall()
        d1 = CRedisDict('d1', self.redis)
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        self.assertEqual(d1.keys(), ['A', 'B', 'C'])

    # exists()
    def test_exists(self):
        self.redis.flushall()
        d1 = CRedisDict('d1', self.redis)
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        self.assertEqual(CRedisDict.exists(self.redis, 'd1'), True)
        self.assertEqual(CRedisDict.exists(self.redis, 'd2'), False)

    # len()
    def test_len(self):
        self.redis.flushall()
        d1 = CRedisDict('d1', self.redis)
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        self.assertEqual(len(d1), 3)
        d2 = CRedisDict('d2', self.redis)
        self.assertEqual(len(d2), 0)

    # is_empty
    def test_empty(self):
        self.redis.flushall()
        d1 = CRedisDict('d1', self.redis)
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        self.assertEqual(d1.is_empty(), False)
        d2 = CRedisDict('d2', self.redis)
        self.assertEqual(d2.is_empty(), True)

    # update()
    def test_update(self):
        self.redis.flushall()
        d1 = {}
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        d1_r = CRedisDict('d1', self.redis, d1)
        d2 = {}
        d2['D'] = '4'
        d2['E'] = 'e'
        d2['F'] = {'d': 4, 'e': 5}
        d2_r = CRedisDict('d2', self.redis, d2)
        d2_r.update(d1_r)
        d2.update(d1)
        self.assertEqual(d2_r.dict(), d2)

    def test_addInt(self):
        self.redis.flushall()
        d = CRedisDict('d', self.redis)
        d['A'] = 1
        self.assertEqual(d['A'], 1)

    def test_addStr(self):
        self.redis.flushall()
        d = CRedisDict('d', self.redis)
        d['B'] = 'b'
        self.assertEqual(d['B'], 'b')

    def test_addDict(self):
        self.redis.flushall()
        d = CRedisDict('d', self.redis)
        d['C'] = {'a': 1, 'b': 2}
        self.assertEqual(d['C'], {'a': 1, 'b': 2})

    def test_copyCRedisDict(self):
        self.redis.flushall()
        d1 = CRedisDict('d1', self.redis)
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        d2 = d1
        self.assertEqual(d1.dict(), d2.dict())

    def test_copyDict(self):
        self.redis.flushall()
        d1 = {}
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        d2 = CRedisDict('d2', self.redis)
        d2.copy(d1)
        self.assertEqual(d2.dict(), d1)

    def test_equal(self):
        self.redis.flushall()
        d1 = {}
        d1['A'] = '1'
        d1['B'] = 'b'
        d1['C'] = {'a': 1, 'b': 2}
        d2 = CRedisDict('d2', self.redis, d1)
        self.assertEqual(d2.dict(), d1)
예제 #17
0
class TestContributionsGuard(object):

    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()
        self.guard = ContributionsGuard(self.connection)
        self.anon_user = {'user_id': None, 'user_ip': '127.0.0.1'}
        self.auth_user = {'user_id': 33, 'user_ip': None}
        self.task = Task(id=22)

    def test_stamp_registers_specific_user_id_and_task(self):
        key = 'pybossa:task_requested:user:33:task:22'

        self.guard.stamp(self.task, self.auth_user)

        assert key in self.connection.keys(), self.connection.keys()

    def test_stamp_registers_specific_user_ip_and_task_if_no_id_provided(self):
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'

        self.guard.stamp(self.task, self.anon_user)

        assert key in self.connection.keys(), self.connection.keys()

    def test_stamp_expires_in_one_hour(self):
        key = 'pybossa:task_requested:user:33:task:22'
        ONE_HOUR = 60 * 60

        self.guard.stamp(self.task, self.auth_user)

        assert self.connection.ttl(key) == ONE_HOUR, self.connection.ttl(key)

    @patch('pybossa.contributions_guard.make_timestamp')
    def test_stamp_adds_a_timestamp_when_the_task_is_stamped(self, make_timestamp):
        make_timestamp.return_value = "now"
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'

        self.guard.stamp(self.task, self.anon_user)

        assert self.connection.get(key) == 'now'

    def test_check_task_stamped_returns_False_for_non_stamped_task(self):
        assert self.guard.check_task_stamped(self.task, self.auth_user) is False

    def test_check_task_stamped_returns_True_for_auth_user_who_requested_task(self):
        self.guard.stamp(self.task, self.auth_user)

        assert self.guard.check_task_stamped(self.task, self.auth_user) is True

    def test_check_task_stamped_returns_True_for_anon_user_who_requested_task(self):
        self.guard.stamp(self.task, self.anon_user)

        assert self.guard.check_task_stamped(self.task, self.anon_user) is True

    def test_retrieve_timestamp_returns_None_for_non_stamped_task(self):
        assert self.guard.retrieve_timestamp(self.task, self.auth_user) is None

    @patch('pybossa.contributions_guard.make_timestamp')
    def test_retrieve_timestamp_returs_the_timestamp_for_stamped_task(self, make_timestamp):
        make_timestamp.return_value = "now"
        self.guard.stamp(self.task, self.auth_user)

        assert self.guard.retrieve_timestamp(self.task, self.auth_user) == 'now'
예제 #18
0
class DBClient(object):
    """ Class used to abstract the use of the database/cache """
    def __init__(self, db_host, db_port=6379, db_name=None):
        self.db_host = db_host
        self.db_port = db_port
        self.db_conn = None

    def connect(self):
        """ This function inits the connection to the database """
        try:
            self.db_conn = StrictRedis(host=self.db_host, port=self.db_port)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1302] Redis Connection error:"
                         " %s" % str(exp))
            return False
        return True

    def disconnect(self):
        """ This function kills the connection to the database """
        pass

    @staticmethod
    def build_key(part1, part2):
        """ Build Redis key

        >>> build_key("part1", "part2")
        'part1:part2'
        """
        return ":".join((str(part1), str(part2)))

    def update_service_init(self, host, service, data):
        """ Insert/Update/Upsert service information in Redis by Arbiter """
        # We need to generate key for redis :
        # Like host:3 => ['service', 'service2'] that link
        # check interval to a service list
        key_ci = self.build_key(host, data["check_interval"])
        # Add service in host:interval list
        try:
            self.db_conn.sadd(key_ci, service)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1303] [%s, %s] "
                         "%s" % (host, service, str(exp)))
            return (None, True)
        # Then update propely host:service key
        self.update_service(host, service, data)

    def update_service(self, host, service, data, force=False):
        """ This function updates/inserts a service
        * It used by Arbiter in hook_late_configuration
          to put the configuration in the database
        * It used by Poller to put collected data in the database
        The 'force' is used to overwrite the service datas (used in
        cache manager)

        Return
        * query_result: None
        * error: bool
        """

        # Get key
        key = self.build_key(host, service)
        if not force:
            old_dict = self.db_conn.get(key)
            if old_dict is not None:
                old_dict = eval(old_dict)
            # Merge old data and new data
            data = merge_dicts(old_dict, data)

        if data is None:
            return (None, True)

        # Save in redis
        try:
            self.db_conn.set(key, data)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1304] [%s, %s] "
                         "%s" % (host, service, str(exp)))
            return (None, True)

        return (None, False)

    def get_service(self, host, service):
        """ This function gets one service from the database

        Return
        :query_result: dict
        """
        # Get key
        key = self.build_key(host, service)
        # Get service
        try:
            data = self.db_conn.get(key)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1305] [%s, %s] "
                         "%s" % (host, service, str(exp)))
            return None
        return eval(data) if data is not None else None

    def get_services(self, host, check_interval):
        """ This function Gets all services with the same host
        and check_interval

        Return
        :query_result: list of dicts
        """
        # Get key
        key_ci = self.build_key(host, check_interval)
        # Get services
        try:
            servicelist = self.db_conn.smembers(key_ci)

        except Exception as exp:
            logger.error("[SnmpBooster] [code 1306] [%s] "
                         "%s" % (host, str(exp)))
            return None

        if servicelist is None:
            # TODO : Bailout properly
            return None

        dict_list = []
        for service in servicelist:
            try:
                key = self.build_key(host, service)
                data = self.db_conn.get(key)
                if data is None:
                    logger.error(
                        "[SnmpBooster] [code 1307] [%s] "
                        "Unknown service %s", host, service)
                    continue
                dict_list.append(eval(data))
            except Exception as exp:
                logger.error("[SnmpBooster] [code 1308] [%s] "
                             "%s" % (host, str(exp)))
        return dict_list

    def show_keys(self):
        """ Get all database keys """
        return self.db_conn.keys()

    def get_hosts_from_service(self, service):
        """ List hosts with a service which match with the pattern """
        results = []
        for key in self.db_conn.keys():
            if re.search(":.*" + service, key) is None:
                # Look for service
                continue
            results.append(eval(self.db_conn.get(key)))

        return results

    def get_services_from_host(self, host):
        """ List all services from hosts which match the pattern """
        results = []
        for key in self.db_conn.keys():
            if re.search(host + ".*:", key) is None:
                # Look for host
                continue
            if re.search(":[0-9]+$", key) is not None:
                # we skip host:interval
                continue
            results.append(eval(self.db_conn.get(key)))

        return results

    def clear_cache(self):
        """ Clear all datas in database """
        self.db_conn.flushall()

    def get_all_services(self):
        """ List all services """
        results = []
        for key in self.db_conn.keys():
            if re.search(":[0-9]*$", key) is None:
                host, service = key.split(":", 1)
                results.append(self.get_service(host, service))

        return results

    def get_all_interval_keys(self):
        """ List all host:interval keys which match interval pattern """
        results = []
        for key in self.db_conn.keys():
            if re.search(":[0-9]*$", key) is not None:
                results.append(key)

        return results

    def delete_services(self, key_list):
        """ Delete services which match keys in key_list """
        nb_del = self.db_conn.delete(
            *[self.build_key(host, service) for host, service in key_list])
        if nb_del > 0:
            interval_key = self.get_all_interval_keys()
            for host, service in key_list:
                for key in [
                        key for key in interval_key if key.startswith(host)
                ]:
                    self.db_conn.srem(key, service)
        return nb_del

    def delete_host(self, host):
        """ Delete all services in the specified host """
        to_del = []
        for key in self.db_conn.keys():
            if re.search(host + ":", key) is not None:
                to_del.append(key)
        if len(to_del) > 0:
            return self.db_conn.delete(*to_del)
예제 #19
0
파일: all.py 프로젝트: alvinr/data-modeling
from redis import StrictRedis, WatchError
import os
import time
import random
import string
import json
from datetime import date

redis = StrictRedis(host=os.environ.get("REDIS_HOST", "localhost"), 
                    port=os.environ.get("REDIS_PORT", 6379),
                    db=0)
redis.flushall()

olympic_stadium = { 
	'venue': "Olympic Stadium",
  'capacity': 60000,
  'events': ["Athletics", "Football"],
  'geo': {'long': 139.76632, 'lat': 35.666754},
  'transit': ["Toei Odeo Line", "Chuo Main Line"]
}

nippon_budokan = {
	'venue': "Nippon Budokan",
	'capacity': 12000,
  'events': ["Judo", "Karate"],
  'geo': {'long': 139.75, 'lat': 35.693333},
  'transit':[ "Toei Shinjuku Line", "Tozai Line", "Hanzomon Line"]
}

makuhari_messe = {
	'venue': "Makuhari Messe",
예제 #20
0
        ua.insert({
            "userID": line.get('userId'),
            "eventID": line.get('eventId'),
            "appID": line.get('appId')
        }).run()


if __name__ == '__main__':

    # dfile = '~/Projects/unified-ETL-pipeline-/data/appEventSample.txt.gz'
    dfile = '~/Desktop/9_18_appevent_dump_full.txt.gz'
    NUM_REC = 100000
    dupes = []
    dq = CL.deque([], maxlen=1500)
    cxn1 = Redis(db=DB_PARAMS['events_db_id'])
    cxn1.flushall()
    fh = gzip.open(os.path.expanduser(dfile), 'rt', encoding='utf-8')
    # opener(fh, grep1(persist(DB_PARAMS)))
    # opener(fh, grep1(persist_rethink(DB_PARAMS)))
    # opener(fh, grep1(aggregate(DB_PARAMS)))

    opener(fh, grep1(grep2(persist(DB_PARAMS))), num_rec=2 * NUM_REC)
    # opener(fh, grep1(persist(DB_PARAMS)))

    print("number of records persisted: {}".format(len(cxn1.keys('*'))))

    print('file handle closed')

    print("dupes: {}".format(len(dupes)))
    print("deque: {}".format(len(dq)))
예제 #21
0
파일: gistio.py 프로젝트: skopp/gistio
        abort(404)
    resp = make_response(content, 200)
    resp.headers['Content-Type'] = 'application/json'
    resp.headers['X-Cache-Hit'] = cache_hit
    resp.headers['X-Expire-TTL-Seconds'] = cache.ttl(id)
    return resp


def fetch_and_render(id):
    """Fetch and render a post from the Github API"""
    r = requests.get('https://api.github.com/gists/{}'.format(id))
    if r.status_code != 200:
        return None
    decoded = r.json.copy()
    for f in decoded['files'].values():
        if f['language'] in RENDERABLE:
            f['rendered'] = bleach.clean(markdown(f['content']),
                                         tags=ALLOWED_TAGS,
                                         attributes=ALLOWED_ATTRIBUTES)
    encoded = json.dumps(decoded)
    cache.setex(id, CACHE_EXPIRATION, encoded)
    return encoded


if __name__ == '__main__':
    if HEROKU:
        app.run(host='0.0.0.0', port=PORT)
    else:
        cache.flushall()
        app.run(host='0.0.0.0', debug=True, port=PORT)
예제 #22
0
class TestContributionsGuard(object):
    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()
        self.guard = ContributionsGuard(self.connection)
        self.anon_user = {'user_id': None, 'user_ip': '127.0.0.1'}
        self.auth_user = {'user_id': 33, 'user_ip': None}
        self.task = Task(id=22)

    def test_stamp_registers_specific_user_id_and_task(self):
        key = 'pybossa:task_requested:user:33:task:22'

        self.guard.stamp(self.task, self.auth_user)

        assert key in self.connection.keys(), self.connection.keys()

    def test_stamp_registers_specific_user_ip_and_task_if_no_id_provided(self):
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'

        self.guard.stamp(self.task, self.anon_user)

        assert key in self.connection.keys(), self.connection.keys()

    def test_stamp_expires_in_one_hour(self):
        key = 'pybossa:task_requested:user:33:task:22'
        ONE_HOUR = 60 * 60

        self.guard.stamp(self.task, self.auth_user)

        assert self.connection.ttl(key) == ONE_HOUR, self.connection.ttl(key)

    @patch('pybossa.contributions_guard.make_timestamp')
    def test_stamp_adds_a_timestamp_when_the_task_is_stamped(
            self, make_timestamp):
        make_timestamp.return_value = "now"
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'

        self.guard.stamp(self.task, self.anon_user)

        assert self.connection.get(key) == 'now'

    def test_check_task_stamped_returns_False_for_non_stamped_task(self):
        assert self.guard.check_task_stamped(self.task,
                                             self.auth_user) is False

    def test_check_task_stamped_returns_True_for_auth_user_who_requested_task(
            self):
        self.guard.stamp(self.task, self.auth_user)

        assert self.guard.check_task_stamped(self.task, self.auth_user) is True

    def test_check_task_stamped_returns_True_for_anon_user_who_requested_task(
            self):
        self.guard.stamp(self.task, self.anon_user)

        assert self.guard.check_task_stamped(self.task, self.anon_user) is True

    def test_retrieve_timestamp_returns_None_for_non_stamped_task(self):
        assert self.guard.retrieve_timestamp(self.task, self.auth_user) is None

    @patch('pybossa.contributions_guard.make_timestamp')
    def test_retrieve_timestamp_returs_the_timestamp_for_stamped_task(
            self, make_timestamp):
        make_timestamp.return_value = "now"
        self.guard.stamp(self.task, self.auth_user)

        assert self.guard.retrieve_timestamp(self.task,
                                             self.auth_user) == 'now'
예제 #23
0
class TestWebHooks(Test):

    @with_context
    def setUp(self):
        super(TestWebHooks, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()
        self.project = ProjectFactory.create()
        self.webhook_payload = dict(project_id=self.project.id,
                                    project_short_name=self.project.short_name)


    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks(self, mock):
        """Test WEBHOOK works."""
        mock.return_value = FakeResponse(text=json.dumps(dict(foo='bar')),
                                                              status_code=200)
        err_msg = "The webhook should return True from patched method"
        assert webhook('url', self.webhook_payload), err_msg
        err_msg = "The post method should be called"
        assert mock.called, err_msg

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks_connection_error(self, mock):
        """Test WEBHOOK with connection error works."""
        import requests
        from pybossa.core import webhook_repo
        mock.side_effect = requests.exceptions.ConnectionError
        err_msg = "A webhook should be returned"
        res = webhook('url', self.webhook_payload)
        assert res.response == 'Connection Error', err_msg
        assert res.response_status_code == None, err_msg
        wh = webhook_repo.get(1)
        assert wh.response == res.response, err_msg
        assert wh.response_status_code == res.response_status_code, err_msg

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks_without_url(self, mock):
        """Test WEBHOOK without url works."""
        mock.post.return_value = True
        err_msg = "The webhook should return Connection Error"
        res = webhook(None, self.webhook_payload, None)
        assert res.response == 'Connection Error', err_msg
        assert res.response_status_code is None, err_msg

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_without_url(self):
        """Test WEBHOOK is triggered without url."""
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        queue.reset_mock()

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_with_url_not_completed_task(self):
        """Test WEBHOOK is not triggered for uncompleted tasks."""
        import random
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project)
        for i in range(1, random.randrange(2, 5)):
            TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        assert task.state != 'completed'
        queue.reset_mock()


    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_with_url(self):
        """Test WEBHOOK is triggered with url."""
        url = 'http://server.com'
        project = ProjectFactory.create(webhook=url,)
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        result = result_repo.get_by(project_id=project.id, task_id=task.id)
        payload = dict(event='task_completed',
                       project_short_name=project.short_name,
                       project_id=project.id,
                       task_id=task.id,
                       result_id=result.id,
                       fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        assert queue.enqueue.called
        assert queue.called_with(webhook, url, payload)
        queue.reset_mock()

    @with_context
    @patch('pybossa.jobs.send_mail')
    @patch('pybossa.jobs.requests.post')
    def test_trigger_fails_webhook_with_url(self, mock_post, mock_send_mail):
        """Test WEBHOOK fails and sends email is triggered."""
        response = MagicMock()
        response.text = "<html>Something broken</html>"
        response.status_code = 500
        mock_post.return_value = response
        project = ProjectFactory.create(published=True)
        payload = dict(event='task_completed',
                       project_short_name=project.short_name,
                       project_id=project.id,
                       task_id=1,
                       result_id=1,
                       fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        wbh = WebhookFactory.create()
        tmp = webhook('url', payload=payload, oid=wbh.id)
        headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
        mock_post.assert_called_with('url', data=json.dumps(payload), headers=headers)
        subject = "Broken: %s webhook failed" % project.name
        body = 'Sorry, but the webhook failed'
        mail_dict = dict(recipients=self.flask_app.config.get('ADMINS'),
                         subject=subject, body=body, html=tmp.response)
        mock_send_mail.assert_called_with(mail_dict)

    @with_context
    @patch('pybossa.jobs.send_mail')
    @patch('pybossa.jobs.requests.post')
    def test_trigger_fails_webhook_with_no_url(self, mock_post, mock_send_mail):
        """Test WEBHOOK fails and sends email is triggered when no URL or failed connection."""
        mock_post.side_effect = requests.exceptions.ConnectionError('Not URL')
        project = ProjectFactory.create(published=True)
        payload = dict(event='task_completed',
                       project_short_name=project.short_name,
                       project_id=project.id,
                       task_id=1,
                       result_id=1,
                       fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        wbh = WebhookFactory.create()
        tmp = webhook(None, payload=payload, oid=wbh.id)
        headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
        #mock_post.assert_called_with('url', data=json.dumps(payload), headers=headers)
        subject = "Broken: %s webhook failed" % project.name
        body = 'Sorry, but the webhook failed'
        mail_dict = dict(recipients=self.flask_app.config.get('ADMINS'),
                         subject=subject, body=body, html=tmp.response)
        mock_send_mail.assert_called_with(mail_dict)

    @with_context
    @patch('pybossa.jobs.send_mail')
    @patch('pybossa.jobs.requests.post', side_effect=requests.exceptions.ConnectionError())
    def test_trigger_fails_webhook_with_url_connection_error(self, mock_post, mock_send_mail):
        """Test WEBHOOK fails and sends email is triggered when there is a connection error."""
        project = ProjectFactory.create(published=True)
        payload = dict(event='task_completed',
                       project_short_name=project.short_name,
                       project_id=project.id,
                       task_id=1,
                       result_id=1,
                       fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        wbh = WebhookFactory.create()
        tmp = webhook('url', payload=payload, oid=wbh.id)
        headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
        mock_post.assert_called_with('url', data=json.dumps(payload), headers=headers)
        subject = "Broken: %s webhook failed" % project.name
        body = 'Sorry, but the webhook failed'
        mail_dict = dict(recipients=self.flask_app.config.get('ADMINS'),
                         subject=subject, body=body, html=tmp.response)
        mock_send_mail.assert_called_with(mail_dict)
예제 #24
0
파일: api.py 프로젝트: sleeka/Textman
def clear_database():
    counter_db = StrictRedis(connection_pool=pool)
    counter_db.flushall()
    counter_db.set('global_message_id', 0)
예제 #25
0
파일: empty.py 프로젝트: wliustc/vaayne.com
# -*- coding:utf-8 -*-
# Created by Vaayne at 2016/09/03 20:21

import pymongo
from redis import StrictRedis

rds = StrictRedis(host='localhost', port=6379, password='******')

mgd = pymongo.MongoClient().blog

r = rds.flushall()
m = mgd.posts.remove({})

print(r)
print(m)
class TestCheckTasksRequestedByUser(object):
    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()

    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_authenticated_key_exists(self, user):
        """_check_task_requested_by_user should return True for an authorized
        user that requested a task"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:33:task:22'
        self.connection.setex(key, 10, True)

        check = _check_task_requested_by_user(taskrun, self.connection)

        assert check is True, check

    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_anonymous_key_exists(self, user):
        """_check_task_requested_by_user should return True for an anonymous
        user that requested a task"""
        user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'
        self.connection.setex(key, 10, True)

        check = _check_task_requested_by_user(taskrun, self.connection)

        assert check is True, check

    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_wrong_key(self, user):
        """_check_task_requested_by_user should return False for a user that did
        not request a task"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:88:task:44'
        self.connection.setex(key, 10, True)

        check = _check_task_requested_by_user(taskrun, self.connection)

        assert check is False, check

    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_authenticated_deletes_key(
            self, user):
        """_check_task_requested_by_user deletes the key after checking that
        an authenticated user requested the task"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:33:task:22'
        self.connection.setex(key, 10, True)

        _check_task_requested_by_user(taskrun, self.connection)
        key_deleted = self.connection.get(key) is None

        assert key_deleted is True, key_deleted

    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_anonymous_preserves_key(self, user):
        """_check_task_requested_by_user does not delete the key after checking
        that an anonymous user requested the task (in case many simultaneous
        anonymous users are sharing the same IP"""
        user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'
        self.connection.setex(key, 10, True)

        _check_task_requested_by_user(taskrun, self.connection)
        key_deleted = self.connection.get(key) is None

        assert key_deleted is False, key_deleted
예제 #27
0
class TestWebHooks(Test):
    def setUp(self):
        super(TestWebHooks, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()
        self.project = ProjectFactory.create()
        self.webhook_payload = dict(project_id=self.project.id,
                                    project_short_name=self.project.short_name)

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks(self, mock):
        """Test WEBHOOK works."""
        mock.return_value = FakeResponse(text=json.dumps(dict(foo='bar')),
                                         status_code=200)
        err_msg = "The webhook should return True from patched method"
        assert webhook('url', self.webhook_payload), err_msg
        err_msg = "The post method should be called"
        assert mock.called, err_msg

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks_connection_error(self, mock):
        """Test WEBHOOK with connection error works."""
        import requests
        from pybossa.core import webhook_repo
        mock.side_effect = requests.exceptions.ConnectionError
        err_msg = "A webhook should be returned"
        res = webhook('url', self.webhook_payload)
        assert res.response == 'Connection Error', err_msg
        assert res.response_status_code == None, err_msg
        wh = webhook_repo.get(1)
        assert wh.response == res.response, err_msg
        assert wh.response_status_code == res.response_status_code, err_msg

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks_without_url(self, mock):
        """Test WEBHOOK without url works."""
        mock.post.return_value = True
        err_msg = "The webhook should return Connection Error"
        res = webhook(None, self.webhook_payload, None)
        assert res.response == 'Connection Error', err_msg
        assert res.response_status_code is None, err_msg

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_without_url(self):
        """Test WEBHOOK is triggered without url."""
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        queue.reset_mock()

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_with_url_not_completed_task(self):
        """Test WEBHOOK is not triggered for uncompleted tasks."""
        import random
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project)
        for i in range(1, random.randrange(2, 5)):
            TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        assert task.state != 'completed'
        queue.reset_mock()

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_with_url(self):
        """Test WEBHOOK is triggered with url."""
        url = 'http://server.com'
        project = ProjectFactory.create(webhook=url, )
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        result = result_repo.get_by(project_id=project.id, task_id=task.id)
        payload = dict(
            event='task_completed',
            project_short_name=project.short_name,
            project_id=project.id,
            task_id=task.id,
            result_id=result.id,
            fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        assert queue.enqueue.called
        assert queue.called_with(webhook, url, payload)
        queue.reset_mock()
예제 #28
0
class DBClient(object):
    """ Class used to abstract the use of the database/cache """

    def __init__(self, db_host, db_port=6379, db_name=None):
        self.db_host = db_host
        self.db_port = db_port
        self.db_conn = None

    def connect(self):
        """ This function inits the connection to the database """
        try:
            self.db_conn = StrictRedis(host=self.db_host, port=self.db_port)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1302] Redis Connection error:"
                         " %s" % str(exp))
            return False
        return True

    def disconnect(self):
        """ This function kills the connection to the database """
        pass

    @staticmethod
    def build_key(part1, part2):
        """ Build Redis key

        >>> build_key("part1", "part2")
        'part1:part2'
        """
        return ":".join((str(part1), str(part2)))

    def update_service_init(self, host, service, data):
        """ Insert/Update/Upsert service information in Redis by Arbiter """
        # We need to generate key for redis :
        # Like host:3 => ['service', 'service2'] that link
        # check interval to a service list
        key_ci = self.build_key(host, data["check_interval"])
        # Add service in host:interval list
        try:
            self.db_conn.sadd(key_ci, service)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1303] [%s, %s] "
                         "%s" % (host,
                                 service,
                                 str(exp)))
            return (None, True)
        # Then update propely host:service key
        self.update_service(host, service, data)

    def update_service(self, host, service, data, force=False):
        """ This function updates/inserts a service
        * It used by Arbiter in hook_late_configuration
          to put the configuration in the database
        * It used by Poller to put collected data in the database
        The 'force' is used to overwrite the service datas (used in
        cache manager)

        Return
        * query_result: None
        * error: bool
        """

        # Get key
        key = self.build_key(host, service)
        if not force:
            old_dict = self.db_conn.get(key)
            if old_dict is not None:
                old_dict = ast.literal_eval(old_dict)
            # Merge old data and new data
            data = merge_dicts(old_dict, data)

        if data is None:
            return (None, True)

        # Save in redis
        try:
            self.db_conn.set(key, data)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1304] [%s, %s] "
                         "%s" % (host,
                                 service,
                                 str(exp)))
            return (None, True)

        return (None, False)

    def get_service(self, host, service):
        """ This function gets one service from the database

        Return
        :query_result: dict
        """
        # Get key
        key = self.build_key(host, service)
        # Get service
        try:
            data = self.db_conn.get(key)
        except Exception as exp:
            logger.error("[SnmpBooster] [code 1305] [%s, %s] "
                         "%s" % (host,
                                 service,
                                 str(exp)))
            return None
        return ast.literal_eval(data) if data is not None else None

    def get_services(self, host, check_interval):
        """ This function Gets all services with the same host
        and check_interval

        Return
        :query_result: list of dicts
        """
        # Get key
        key_ci = self.build_key(host, check_interval)
        # Get services
        try:
            servicelist = self.db_conn.smembers(key_ci)

        except Exception as exp:
            logger.error("[SnmpBooster] [code 1306] [%s] "
                         "%s" % (host,
                                 str(exp)))
            return None

        if servicelist is None:
            # TODO : Bailout properly
            return None

        dict_list = []
        for service in servicelist:
            try:
                key = self.build_key(host, service)
                data = self.db_conn.get(key)
                if data is None:
                    logger.error("[SnmpBooster] [code 1307] [%s] "
                                 "Unknown service %s", host, service)
                    continue
                dict_list.append(ast.literal_eval(data))
            except Exception as exp:
                logger.error("[SnmpBooster] [code 1308] [%s] "
                             "%s" % (host,
                                     str(exp)))
        return dict_list

    def show_keys(self):
        """ Get all database keys """
        return self.db_conn.keys()

    def get_hosts_from_service(self, service):
        """ List hosts with a service which match with the pattern """
        results = []
        for key in self.db_conn.keys():
            if re.search(":.*"+service, key) is None:
                # Look for service
                continue
            results.append(ast.literal_eval(self.db_conn.get(key)))

        return results

    def get_services_from_host(self, host):
        """ List all services from hosts which match the pattern """
        results = []
        for key in self.db_conn.keys():
            if re.search(host+".*:", key)is None:
                # Look for host
                continue
            if re.search(":[0-9]+$", key) is not None:
                # we skip host:interval
                continue
            results.append(ast.literal_eval(self.db_conn.get(key)))

        return results

    def clear_cache(self):
        """ Clear all datas in database """
        self.db_conn.flushall()

    def get_all_services(self):
        """ List all services """
        results = []
        for key in self.db_conn.keys():
            if re.search(":[0-9]*$", key) is None:
                host, service = key.split(":", 1)
                results.append(self.get_service(host, service))

        return results

    def get_all_interval_keys(self):
        """ List all host:interval keys which match interval pattern """
        results = []
        for key in self.db_conn.keys():
            if re.search(":[0-9]*$", key) is not None:
                results.append(key)

        return results

    def delete_services(self, key_list):
        """ Delete services which match keys in key_list """
        nb_del = self.db_conn.delete(*[self.build_key(host, service)
                                       for host, service in key_list])
        if nb_del > 0:
            interval_key = self.get_all_interval_keys()
            for host, service in key_list:
                for key in [key for key in interval_key if key.startswith(host)]:
                    self.db_conn.srem(key, service)
        return nb_del

    def delete_host(self, host):
        """ Delete all services in the specified host """
        to_del = []
        for key in self.db_conn.keys():
            if re.search(host+":", key) is not None:
                to_del.append(key)
        if len(to_del) > 0:
            return self.db_conn.delete(*to_del)
예제 #29
0
class NetworkCandidateGraph(network.CandidateGraph):
    node_factory = NetworkNode
    edge_factory = NetworkEdge

    def __init__(self, *args, **kwargs):
        super(NetworkCandidateGraph, self).__init__(*args, **kwargs)
        #self._setup_db_connection()
        self._setup_queues()
        #self._setup_asynchronous_queue_watchers()
        # Job metadata
        self.job_status = defaultdict(dict)

        for i, d in self.nodes(data='data'):
            d.parent = self
        for s, d, e in self.edges(data='data'):
            e.parent = self

        self.processing_queue = config['redis']['processing_queue']

    def _setup_db_connection(self):
        """
        Set up a database connection and session(s)
        """
        try:
            Base.metadata.bind = engine
            Base.metadata.create_all(tables=[Network.__table__, Overlay.__table__,
                                         Edges.__table__, Costs.__table__, Matches.__table__,
                                         Cameras.__table__])
        except ValueError:
            warnings.warn('No SQLAlchemy engine available. Tables not pushed.')

    def _setup_queues(self):
        """
        Setup a 2 queue redis connection for pushing and pulling work/results
        """
        conf = config['redis']
        self.redis_queue = StrictRedis(host=conf['host'],
                                       port=conf['port'],
                                       db=0)

    def _setup_asynchronous_queue_watchers(self, nwatchers=3):
        """
        Setup a sentinel class to watch the results queue
        """
        # Set up the consumers of the 'completed' queue
        for i in range(nwatchers):
            # Set up the sentinel class that watches the registered queues for for messages
            s = AsynchronousQueueWatcher(self, self.redis_queue, config['redis']['completed_queue'])
            s.setDaemon(True)
            s.start()

        # Setup a watcher on the working queue for jobs that fail
        s = AsynchronousFailedWatcher(self, self.redis_queue, config['redis']['working_queue'])
        s.setDaemon(True)
        s.start()

    def empty_queues(self):
        """
        Delete all messages from the redis queue. This a convenience method.
        The `redis_queue` object is a redis-py StrictRedis object with API
        documented at: https://redis-py.readthedocs.io/en/latest/#redis.StrictRedis
        """
        return self.redis_queue.flushall()

    def apply(self, function, on='edge',out=None, args=(), walltime='01:00:00', **kwargs):

        options = {
            'edge' : self.edges,
            'edges' : self.edges,
            'e' : self.edges,
            0 : self.edges,
            'node' : self.nodes,
            'nodes' : self.nodes,
            'n' : self.nodes,
            1 : self.nodes
        }

        # Determine which obj will be called
        onobj = options[on]

        res = []
        key = 1
        if isinstance(on, EdgeView):
            key = 2

        for job_counter, elem in enumerate(onobj.data('data')):
            # Determine if we are working with an edge or a node
            if len(elem) > 2:
                id = (elem[0], elem[1])
                image_path = (elem[2].source['image_path'],
                              elem[2].destination['image_path'])
            else:
                id = (elem[0])
                image_path = elem[1]['image_path']

            msg = {'id':id,
                    'func':function,
                    'args':args,
                    'kwargs':kwargs,
                    'walltime':walltime,
                    'image_path':image_path,
                    'param_step':1}

            self.redis_queue.rpush(self.processing_queue, json.dumps(msg))

        # SLURM is 1 based, while enumerate is 0 based
        job_counter += 1

        # Submit the jobs
        spawn_jobarr('acn_submit', job_counter,
                     mem=config['cluster']['processing_memory'],
                     time=walltime,
                     queue=config['cluster']['queue'],
                     outdir=config['cluster']['cluster_log_dir']+'/slurm-%A_%a.out',
                     env=config['python']['env_name'])

        return job_counter

    def generic_callback(self, msg):
        id = msg['id']
        if isinstance(id, (int, float, str)):
            # Working with a node
            obj = self.nodes[id]['data']
        else:
            obj = self.edges[id]['data']
            # Working with an edge

        func = msg['func']
        obj.job_status[func]['success'] = msg['success']

        # If the job was successful, no need to resubmit
        if msg['success'] == True:
            return

    def generate_vrts(self, **kwargs):
        for i, n in self.nodes(data='data'):
            n.generate_vrt(**kwargs)

    def compute_overlaps(self):
        query = """
    SELECT ST_AsEWKB(geom) AS geom FROM ST_Dump((
        SELECT ST_Polygonize(the_geom) AS the_geom FROM (
            SELECT ST_Union(the_geom) AS the_geom FROM (
                SELECT ST_ExteriorRing(footprint_latlon) AS the_geom
                FROM images) AS lines
        ) AS noded_lines
    )
)"""
        session = Session()
        oquery = session.query(Overlay)
        iquery = session.query(Images)

        rows = []
        for q in self._engine.execute(query).fetchall():
            overlaps = []
            b = bytes(q['geom'])
            qgeom = shapely.wkb.loads(b)
            res = iquery.filter(Images.footprint_latlon.ST_Intersects(from_shape(qgeom, srid=949900)))
            for i in res:
                fgeom = to_shape(i.footprint_latlon)
                area = qgeom.intersection(fgeom).area
                if area < 1e-6:
                    continue
                overlaps.append(i.id)
            o = Overlay(geom='SRID=949900;{}'.format(qgeom.wkt), overlaps=overlaps)
            res = oquery.filter(Overlay.overlaps == o.overlaps).first()
            if res is None:
                rows.append(o)

        session.bulk_save_objects(rows)
        session.commit()

        res = oquery.filter(sqlalchemy.func.cardinality(Overlay.overlaps) <= 1)
        res.delete(synchronize_session=False)
        session.commit()
        session.close()

    def create_network(self, nodes=[]):
        cmds = 0
        session = Session()
        for res in session.query(Overlay):
            msg = json.dumps({'oid':res.id,'time':time.time()})

            # If nodes are passed, process only those overlaps containing
            # the provided node(s)
            if nodes:
                for r in res.overlaps:
                    if r in nodes:
                        self.redis_queue.rpush(config['redis']['processing_queue'], msg)
                        cmds += 1
                        break
            else:
                self.redis_queue.rpush(config['redis']['processing_queue'], msg)
                cmds += 1
        script = 'acn_create_network'
        spawn_jobarr(script, cmds,
                    mem=config['cluster']['processing_memory'],
                    queue=config['cluster']['queue'],
                    env=config['python']['env_name'])
        session.close()

    @classmethod
    def from_database(cls, query_string='SELECT * FROM public.images'):
        """
        This is a constructor that takes the results from an arbitrary query string,
        uses those as a subquery into a standard polygon overlap query and
        returns a NetworkCandidateGraph object.  By default, an images
        in the Image table will be used in the outer query.

        Parameters
        ----------
        query_string : str
                       A valid SQL select statement that targets the Images table

        Usage
        -----
        Here, we provide usage examples for a few, potentially common use cases.

        ## Spatial Query
        This example selects those images that intersect a given bounding polygon.  The polygon is
        specified as a Well Known Text LINESTRING with the first and last points being the same.
        The query says, select the footprint_latlon (the bounding polygons in the database) that
        intersect the user provided polygon (the LINESTRING) in the given spatial reference system
        (SRID), 949900.

        "SELECT * FROM Images WHERE ST_INTERSECTS(footprint_latlon, ST_Polygon(ST_GeomFromText('LINESTRING(159 10, 159 11, 160 11, 160 10, 159 10)'),949900)) = TRUE"
from_database
        ## Select from a specific orbit
        This example selects those images that are from a particular orbit. In this case,
        the regex string pulls all P##_* orbits and creates a graph from them. This method
        does not guarantee that the graph is fully connected.

        "SELECT * FROM Images WHERE (split_part(path, '/', 6) ~ 'P[0-9]+_.+') = True"

        """
        composite_query = """WITH
	i as ({})
SELECT i1.id as i1_id,i1.path as i1_path, i2.id as i2_id, i2.path as i2_path
FROM
	i as i1, i as i2
WHERE ST_INTERSECTS(i1.footprint_latlon, i2.footprint_latlon) = TRUE
AND i1.id < i2.id""".format(query_string)
        session = Session()
        res = session.execute(composite_query)

        adjacency = defaultdict(list)
        adjacency_lookup = {}
        for r in res:
            sid, spath, did, dpath = r

            adjacency_lookup[spath] = sid
            adjacency_lookup[dpath] = did
            if spath != dpath:
                adjacency[spath].append(dpath)
        session.close()
        # Add nodes that do not overlap any images
        obj = cls.from_adjacency(adjacency, node_id_map=adjacency_lookup, config=config)

        return obj

    @classmethod
    def from_filelist(cls, filelist, basepath=None):
        """
        This methods instantiates a network candidate graph by first parsing
        the filelist and adding those images to the database. This method then
        dispatches to the from_database cls method to create the network
        candidate graph object.
        """
        if isinstance(filelist, str):
            filelist = io_utils.file_to_list(filelist)

        if basepath:
            filelist = [(f, os.path.join(basepath, f)) for f in filelist]
        else:
            filelist = [(os.path.basename(f), f) for f in filelist]

        parent = Parent(config)
        # Get each of the images added to the DB (duplicates, by PATH, are omitted)
        for f in filelist:
            n = NetworkNode(image_name=f[0], image_path=f[1], parent=parent)
        pathlist = [f[1] for f in filelist]

        qs = 'SELECT * FROM public.Images WHERE public.Images.path IN ({})'.format(','.join("'{0}'".format(p) for p in pathlist))
        return NetworkCandidateGraph.from_database(query_string=qs)
예제 #30
0
def connection(redis_db_num):
    conn = StrictRedis(db=redis_db_num)
    try:
        yield conn
    finally:
        conn.flushall()
class StaleHTTPClientTestCase(AsyncTestCase):

    def setUp(self):
        super(StaleHTTPClientTestCase, self).setUp()
        self.fake_client = FakeClient()
        self.cache = StrictRedis()
        self.cache.flushall()

    @gen_test
    def test_returns_response(self):
        fake_response = self.fake_client.add_response(
            code=200, body=b'fake response', headers={'fake': 'header'})

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)

        response = yield client.fetch('/url')

        self.assertResponseEqual(response, fake_response)

    @gen_test
    def test_accepts_request_object(self):
        fake_response = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)

        request = HTTPRequest('/url')
        response = yield client.fetch(request)

        self.assertIs(response, fake_response)

    @gen_test
    def test_returns_real_response(self):
        expected_response = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)
        response = yield client.fetch('/url')

        self.assertIs(response, expected_response)

    @gen_test
    def test_returns_response_from_primary_cache(self):
        response = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)
        response = yield client.fetch('/url')
        cached_response = yield client.fetch('/url')

        self.assertIsNot(cached_response, response)
        self.assertResponseEqual(cached_response, response)

    @gen_test
    def test_returns_stale_response_after_error(self):
        expected_response = self.fake_client.add_response(body=b'stale')
        error_response = self.fake_client.add_response(body=b'error', code=500)

        client = StaleHTTPClient(
            cache=self.cache, client=self.fake_client, ttl=0.001)

        yield client.fetch('/url')
        time.sleep(0.002)
        stale_response = yield client.fetch('/url')

        self.assertIsNot(stale_response, error_response)
        self.assertResponseEqual(stale_response, expected_response)

    @gen_test
    def test_raises_error_after_error_with_empty_cache(self):
        self.fake_client.add_response(body=b'error', code=500)

        client = StaleHTTPClient(
            cache=self.cache, client=self.fake_client, ttl=None)

        with self.assertRaises(HTTPError):
            yield client.fetch('/url')

    @gen_test
    def test_returns_error_when_empty_cache_and_raise_error_flag_is_off(self):
        expected_response = self.fake_client.add_response(
            body=b'error', code=500)

        client = StaleHTTPClient(
            cache=self.cache, client=self.fake_client, ttl=None)

        response = yield client.fetch('/url', raise_error=False)

        self.assertIs(response, expected_response)

    @gen_test
    def test_caches_multiple_urls(self):
        first_expected = self.fake_client.add_response()
        second_expected = self.fake_client.add_response()

        client = StaleHTTPClient(
            cache=self.cache, client=self.fake_client, ttl=1)

        # Populate cache
        yield [client.fetch('/first'), client.fetch('/second')]

        # Read from cache
        first_response, second_response = yield [
            client.fetch('/first'), client.fetch('/second')]

        self.assertIsNot(first_response, first_expected)
        self.assertIsNot(second_response, second_expected)

        self.assertResponseEqual(first_response, first_expected)
        self.assertResponseEqual(second_response, second_expected)

    @gen_test
    def test_varies_cache_by_headers(self):
        json_response = self.fake_client.add_response(body=b'{}')
        xml_response = self.fake_client.add_response(body=b'<xml />')

        client = StaleHTTPClient(
            cache=self.cache, client=self.fake_client, ttl=1)

        # Populate and read from cache
        for i in range(2):
            first_response, second_response = yield [
                client.fetch('/url', headers={'Accept': 'application/json'}, vary=['Accept']),
                client.fetch('/url', headers={'Accept': 'text/xml'}, vary=['Accept'])
            ]

        self.assertIsNot(first_response, json_response)
        self.assertIsNot(second_response, xml_response)

        self.assertResponseEqual(first_response, json_response)
        self.assertResponseEqual(second_response, xml_response)

    def assertResponseEqual(self, response, expected_response):
        self.assertEqual(response.body, expected_response.body)
        self.assertEqual(response.code, expected_response.code)
        self.assertEqual(response.headers, expected_response.headers)

        self.assertIsInstance(response.headers, HTTPHeaders)

        self.assertIsInstance(response.request, HTTPRequest)
        self.assertIsInstance(response.request.headers, HTTPHeaders)
예제 #32
0
class TestWebHooks(Test):

    def setUp(self):
        super(TestWebHooks, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()


    @with_context
    @patch('pybossa.model.requests')
    def test_webhooks(self, mock):
        """Test WEBHOOK works."""
        mock.post.return_value = True
        err_msg = "The webhook should return True from patched method"
        assert webhook('url'), err_msg
        err_msg = "The post method should be called"
        assert mock.post.called, err_msg

    @with_context
    @patch('pybossa.model.requests')
    def test_webhooks_without_url(self, mock):
        """Test WEBHOOK without url works."""
        mock.post.return_value = True
        err_msg = "The webhook should return False"
        assert webhook(None) is False, err_msg

    @with_context
    @patch('pybossa.model.task_run.webhook_queue', new=queue)
    def test_trigger_webhook_without_url(self):
        """Test WEBHOOK is triggered without url."""
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        queue.reset_mock()

    @with_context
    @patch('pybossa.model.task_run.webhook_queue', new=queue)
    def test_trigger_webhook_with_url_not_completed_task(self):
        """Test WEBHOOK is not triggered for uncompleted tasks."""
        import random
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project)
        for i in range(1, random.randrange(2, 5)):
            TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        assert task.state != 'completed'
        queue.reset_mock()


    @with_context
    @patch('pybossa.model.task_run.webhook_queue', new=queue)
    def test_trigger_webhook_with_url(self):
        """Test WEBHOOK is triggered with url."""
        url = 'http://server.com'
        project = ProjectFactory.create(webhook=url,)
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        payload = dict(event='task_completed',
                       project_short_name=project.short_name,
                       project_id=project.id,
                       task_id=task.id,
                       fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        assert queue.enqueue.called
        assert queue.called_with(webhook, url, payload)
        queue.reset_mock()
예제 #33
0
from redis import StrictRedis
from pymongo import MongoClient
import json
import random


def get_url():
    with open('./gx_urls.json', 'r') as f:
        temp = json.load(f)

    urls = [x + '&offset={}' for x in temp]
    return urls


if __name__ == "__main__":
    host = '10.3.9.133'
    redis_port = 6379
    start_url = 'hotel:start_urls'
    r = StrictRedis(host=host, port=redis_port, db=0, password='******')
    r.flushall()

    urls = get_url()
    random.shuffle(urls)

    for url in urls:
        r.lpush(start_url, url)
예제 #34
0
파일: gistio.py 프로젝트: blaze33/gistio
    if content is None:
        abort(404)
    resp = make_response(content, 200)
    resp.headers['Content-Type'] = 'application/json'
    resp.headers['X-Cache-Hit'] = cache_hit
    resp.headers['X-Expire-TTL-Seconds'] = cache.ttl(id)
    return resp


def fetch_and_render(id):
    """Fetch and render a post from the Github API"""
    r = requests.get('https://api.github.com/gists/{}'.format(id))
    if r.status_code != 200:
        return None
    decoded = r.json.copy()
    for f in decoded['files'].values():
        if f['language'] in RENDERABLE:
            f['rendered'] = bleach.clean(markdown(f['content']),
                tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES)
    encoded = json.dumps(decoded)
    cache.setex(id, CACHE_EXPIRATION, encoded)
    return encoded


if __name__ == '__main__':
    if HEROKU:
        app.run(host='0.0.0.0', port=PORT)
    else:
        cache.flushall()
        app.run(host='0.0.0.0', debug=True, port=PORT)
예제 #35
0
class RedisDatabase:
    """
	docstring for RedisDatabase
	"""
    def __init__(self, password=""):
        self.start_redis()

    def is_redis_running(self):
        try:
            if sys.platform == 'win32':
                process = len(
                    os.popen('tasklist | findstr ' +
                             "redis-server.exe").readlines())
                if process >= 1:
                    return True
                else:
                    return False
            elif sys.platform == 'darwin':
                # macOS
                return True
            else:
                # other platform
                return True
        except Exception as e:
            raise Exception(
                'Unable to check redis running staate,error message: ' +
                str(e))

    def start_redis(self, password=''):
        try:
            if not self.is_redis_running():
                if sys.platform == 'win32':
                    os.system("e:/redis/redis-server --service-start")
                elif sys.platform == 'darwin':
                    # macOS
                    pass
                else:
                    pass
        except Exception as e:
            raise Exception('Unble to start redis, error message: ' + str(e))
        try:
            self.datadb = StrictRedis(host='localhost', port=6379, db=0)
            self.cachedb = StrictRedis(host='localhost',
                                       port=6379,
                                       db=1,
                                       decode_responses=True)
            self.hashdb = StrictRedis(host='localhost', port=6379, db=2)
        except Exception as e:
            raise Exception('Redis connection failed,error message:' + str(e))

    def stop_redis(self):
        try:
            if self.is_redis_running():
                # self.flushall()
                if sys.platform == 'win32':
                    os.system("e:/redis/redis-server --service-stop")
                elif sys.platform == 'darwin':
                    pass
                else:
                    pass
        except Exception as e:
            raise Exception('Unble to stop redis,error message:' + str(e))

    def set_value(self, obj, data_source):
        """
		Using a dictionary, a Mongdb object, a Net class, a Attr class, a DynamicNet class or a DynamicAttr class
			to set a new entry in Redis.
		"""
        if type(obj) is dict:
            key = self.generate_static_key(data_source, obj['scan'],
                                           obj['atlas'], obj['feature'])
            self.datadb.set(key, obj['value'], ex=1800)
            return self.trans_netattr(obj['scan'],
                                      obj['atlas'], obj['feature'],
                                      pickle.loads(obj['value']))
        elif type(obj) is pymongo.cursor.Cursor:
            value = []
            scan = obj[0]['scan']
            atlas = obj[0]['atlas']
            feature = obj[0]['feature']
            window_length = obj[0]['window_length']
            step_size = obj[0]['step_size']
            key_all = self.generate_dynamic_key(data_source, scan, atlas,
                                                feature, window_length,
                                                step_size)
            pipe = self.datadb.pipeline()
            length = obj.count()
            try:
                pipe.multi()
                pipe.set(key_all + ':0', length, ex=1600)
                for i in range(length):  # 使用查询关键字保证升序
                    pipe.set(key_all + ':' + str(i + 1), (obj[i]['value']),
                             ex=1800)
                    value.append(pickle.loads(obj[i]['value']))
                pipe.execute()
            except Exception as e:
                raise Exception(
                    'An error occur when tring to set value in redis, error message: '
                    + str(e))
            return self.trans_dynamic_netattr(scan, atlas, feature,
                                              window_length, step_size,
                                              np.array(value))
        elif type(obj) is netattr.Net or type(obj) is netattr.Attr:
            key = self.generate_static_key(data_source, obj.scan,
                                           obj.atlasobj.name, obj.feature_name)
            self.atadb.set(key, pickle.dumps(obj.data))
        elif type(obj) is netattr.DynamicNet or type(
                obj) is netattr.DynamicAttr:
            key_all = self.generate_dynamic_key(data_source, obj.scan,
                                                obj.atlasobj.name,
                                                obj.feature_name,
                                                obj.window_length,
                                                obj.step_size)
            length = obj.data.shape[2]
            pipe = self.datadb.pipeline()
            if type(obj) is netattr.DynamicNet:
                flag = True
            else:
                flag = False
            try:
                pipe.multi()
                pipe.set(key_all + ':0', length, ex=1600)
                for i in range(length):  # 使用查询关键字保证升序
                    if flag:
                        pipe.set(key_all + ':' + str(i + 1),
                                 pickle.dumps(obj.data[:, :, i]),
                                 ex=1800)
                    else:
                        pipe.set(key_all + ':' + str(i + 1),
                                 obj.data[:, i],
                                 ex=1800)
                pipe.execute()
            except Exception as e:
                raise Exception(
                    'An error occur when tring to set value in redis, error message: '
                    + str(e))

    def generate_static_key(self, data_source, subject_scan, atlas_name,
                            feature_name):
        key = data_source + ':' + subject_scan + ':' + atlas_name + ':' + feature_name + ':0'
        return key

    def generate_dynamic_key(self, data_source, subject_scan, atlas_name,
                             feature_name, window_length, step_size):
        key = data_source + ':' + subject_scan + ':' + atlas_name + ':' + feature_name + ':1:' + str(
            window_length) + ':' + str(step_size)
        return key

    def get_static_value(self, data_source, subject_scan, atlas_name,
                         feature_name):
        """
		Using data source, scan name, altasobj name, feature name to query static networks and attributes from Redis.
		If the query succeeds, return a Net or Attr class, if not, return none.
		"""
        key = self.generate_static_key(data_source, subject_scan, atlas_name,
                                       feature_name)
        res = self.datadb.get(key)
        self.datadb.expire(key, 1800)
        if res is not None:
            return self.trans_netattr(subject_scan, atlas_name, feature_name,
                                      pickle.loads(res))
        else:
            return None

    def trans_netattr(self, subject_scan, atlas_name, feature_name, value):
        if value.ndim == 1:  # 这里要改一下
            arr = netattr.Attr(value, atlas.get(atlas_name), subject_scan,
                               feature_name)
            return arr
        else:
            net = netattr.Net(value, atlas.get(atlas_name), subject_scan,
                              feature_name)
            return net

    def get_dynamic_value(self, data_source, subject_scan, atlas_name,
                          feature_name, window_length, step_size):
        """
		Using data source, scan name, altasobj name, feature name, window length, step size to query dynamic
			networks and attributes from Redis.
		If the query succeeds, return a DynamicNet or DynamicAttr class, if not, return none.
		"""
        key_all = self.generate_dynamic_key(data_source, subject_scan,
                                            atlas_name, feature_name,
                                            window_length, step_size)
        if self.datadb.exists(key_all + ':0'):
            pipe = self.datadb.pipeline()
            try:
                pipe.multi()
                length = int(self.datadb.get(key_all + ':0').decode())
                for i in range(1, length + 1, 1):
                    pipe.get(key_all + ':' + str(i))
                res = pipe.execute()
            except Exception as e:
                raise Exception(
                    'An error occur when tring to get value in redis, error message: '
                    + str(e))
            try:
                pipe.multi()
                value = []
                for i in range(length):
                    value.append(pickle.loads(res[i]))
                    pipe.expire(key_all + ':' + str(i + 1), 1800)
                pipe.expire(key_all + ':0', 1600)
                pipe.execute()
            except Exception as e:
                raise Exception(
                    'An error occur when tring to update expiration time in redis, error message: '
                    + str(e))
            return self.trans_dynamic_netattr(subject_scan, atlas_name,
                                              feature_name, window_length,
                                              step_size, np.array(value))
        else:
            return None

    def trans_dynamic_netattr(self, subject_scan, atlas_name, feature_name,
                              window_length, step_size, value):
        if value.ndim == 2:  # 这里要改一下
            arr = netattr.DynamicAttr(value.swapaxes(0, 1),
                                      atlas.get(atlas_name), window_length,
                                      step_size, subject_scan, feature_name)
            return arr
        else:
            net = netattr.DynamicNet(
                value.swapaxes(0, 2).swapaxes(0, 1), atlas.get(atlas_name),
                window_length, step_size, subject_scan, feature_name)
            return net

    def exists_key(self,
                   data_source,
                   subject_scan,
                   atlas_name,
                   feature_name,
                   isdynamic=False,
                   window_length=0,
                   step_size=0):
        """
		Using data source, scan name, atlas name, feature name to check the existence of an static entry in Redis.
		You can add isdynamic(True), window length, step size to check the existence of an dynamic entry in Redis.
		"""
        if isdynamic is False:
            return self.datadb.exists(
                self.generate_static_key(data_source, subject_scan, atlas_name,
                                         feature_name))
        else:
            return self.datadb.exists(
                self.generate_dynamic_key(data_source, subject_scan,
                                          atlas_name, feature_name,
                                          window_length, step_size) + ':0')

    """
	Redis supports storing and querying list as cache.
	Note: the items in list must be int or float.
	"""

    def set_list_all_cache(self, key, value):
        """
		Store a list to Redis as cache with cache_key.
		Note: please check the existence of the cache_key, or it will cover the origin entry.
		"""
        self.cachedb.delete(key)
        for i in value:
            self.cachedb.rpush(key, i)
        #self.cachedb.save()
        return self.cachedb.llen(key)

    def set_list_cache(self, key, value):
        """
		Append value to a list as the last one in Redis with cache_key.
		If the given key is empty in Redis, a new list will be created.
		"""
        self.cachedb.rpush(key, value)
        #self.cachedb.save()
        return self.cachedb.llen(key)

    def get_list_cache(self, key, start=0, end=-1):
        """
		Return a list with given cache_key in Redis.
		"""
        res = self.cachedb.lrange(key, start, end)
        lst = []
        for x in res:
            if x.isdigit():
                lst.append(int(x))
            else:
                lst.append(float(x))
        return lst

    def exists_key_cache(self, key):
        """
		Check the existence of a list in Redis by cache_key.
		"""
        return self.cachedb.exists(key)

    def delete_key_cache(self, key):
        """
		Delete an entry in Redis by cache_key.
		If the given key is empty in Redis, do nothing.
		"""
        value = self.cachedb.delete(key)
        #self.cachedb.save()
        return value

    def clear_cache(self):
        """
		Delete all the entries in Redis.
		"""
        self.cachedb.flushdb()

    """
	Redis supports storing and querying hash.
	Note: the keys in hash must be string.
	"""

    def set_hash_all(self, name, hash):
        """
		Store a hash to Redis with hash_name and a hash.
		Note: please check the existence of the hash_name, or it will cover the origin hash.
		"""
        self.hashdb.delete(name)
        for i in hash:
            hash[i] = pickle.dumps(hash[i])
        self.hashdb.hmset(name, hash)

    def set_hash(self, name, item1, item2=''):
        """
		Append an entry/entries to a hash in Redis with hash_name.
		If the given name is empty in Redis, a new hash will be created.
		The input format should be as follows:
			1.A hash
			2.A key and a value
		"""
        if type(item1) is dict:
            for i in item1:
                item1[i] = pickle.dumps(item1[i])
            self.hashdb.hmset(name, item1)
        else:
            self.hashdb.hset(name, item1, pickle.dumps(item2))

    def get_hash(self, name, keys=[]):
        """
		Support three query functions:
			1.Return a hash with a given hash_name in Redis.
			2.Return a value_list with a given hash_name and a key_list in Redis,
				the value_list is the same sequence as key_list.
			3.Return a value with a given hash_name and a key in Redis.
		"""
        if not keys:
            res = self.hashdb.hgetall(name)
            hash = {}
            for i in res:
                hash[i.decode()] = pickle.loads(res[i])
            return hash
        else:
            if type(keys) is list:
                res = self.hashdb.hmget(name, keys)
                for i in range(len(res)):
                    res[i] = pickle.loads(res[i])
                return res
            else:
                return pickle.loads(self.hashdb.hget(name, keys))

    def exists_hash(self, name):
        """
		Check the existence of a hash in Redis by hash_name.
		"""
        return self.hashdb.exists(name)

    def exists_hash_key(self, name, key):
        """
		Check the existence of a key in a given hash by key_name and hash_name.
		"""
        return self.hashdb.hexists(name, key)

    def delete_hash(self, name):
        """
		Delete a hash in Redis by hash_name.
		"""
        self.hashdb.delete(name)

    def delete_hash_key(self, name, key):
        """
		Delete a key in a given hash by key_name and hash_name.
		"""
        self.hashdb.hdel(name, key)

    def clear_hash(self):
        """
		Delete all the hashes in Redis by hash_name.
		"""
        self.hashdb.flushdb()

    def flushall(self):
        self.datadb.flushall()
예제 #36
0
class RedisProvider(object):
    """
    A storage provider that stores data in a Redis database
    """
    CONNECTION_POOLS = {}

    @staticmethod
    def get_pool(host, port):
        """
        Gets an existing connection pool to a given server or creates a new one
        Args:
            host(str): Host of the redis server
            port(int): Port number the resdis server is listening on
        Returns:
            BlockingConnectionPool: A blocking redis connection pool
        """
        if not isinstance(host, (str, unicode)) or not host:
            raise ValueError("host argument must be a non empty string")
        if not isinstance(port, int) or port <= 0 or port > 65535:
            raise ValueError(
                "port argument must be an integer between 0 and 65535")
        if not host in RedisProvider.CONNECTION_POOLS:
            RedisProvider.CONNECTION_POOLS[host] = {}
        if not port in RedisProvider.CONNECTION_POOLS[host]:
            RedisProvider.CONNECTION_POOLS[host][port] = ConnectionPool(
                host=host,
                port=port,
                db=0,
                max_connections=128,
                socket_keepalive=True)
        return RedisProvider.CONNECTION_POOLS[host][port]

    def __init__(self, configuration=None):
        configuration = configuration or {}
        host = configuration.get(
            "host", os.getenv("REDIS_PORT_6379_TCP_ADDR", "redis"))
        port = int(
            configuration.get("port",
                              os.getenv("REDIS_PORT_6379_TCP_PORT", 6379)))
        self.host = socket.gethostbyname(host)
        pool = RedisProvider.get_pool(self.host, port)
        self.redis = StrictRedis(connection_pool=pool,
                                 encoding=None,
                                 socket_keepalive=True)

    def get(self, path):
        """
        Fetches data from the database stored under the key <path>
        Args:
            path: Key under which the data is stored
        Returns:
            The data if it was found, None otherwise
        """
        start = time.clock()
        value = self.redis.get(path)
        end = time.clock()
        elapsed = end - start
        LOGGER.debug("Provider {:s} fetched {:s} in {:f} seconds".format(
            self.host, path, elapsed))
        return value

    def put(self, data, path):
        """
        Inserts data in the database under the key <path>
        Args:
            data: Data to store in the database
            path: Key under which the data is stored
        Returns:
            True if the insertion worked
        Raises:
            ConnectionError: If the client cannot connect to the server
        """
        start = time.clock()
        value = self.redis.set(path, data)
        end = time.clock()
        elapsed = end - start
        LOGGER.debug("Provider {:s} stored {:s} in {:f} seconds".format(
            self.host, path, elapsed))
        return value

    def delete(self, path):
        """
        Delete data from the database
        Args:
            path: key of the file to delete
        Returns:
            The number of keys deleted from the database
        """
        return self.redis.delete(path)

    def clear(self):
        """
        Deletes all entries in the redis database
        """
        return self.redis.flushall()

    def list(self):
        """
        Returns:
            list(str): The list of blocks on the storage provider
        """
        return self.redis.keys("*")

    @staticmethod
    def quota():
        """
        A compatibility method that returns sys.maxint.
        Returns:
            The value of sys.maxint
        """
        return sys.maxint
예제 #37
0
class TestNews(Test):

    d = MagicMock()
    d.entries = [dict(updated='2015-01-01')]

    def setUp(self):
        super(TestNews, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()
        self.user = UserFactory.create(admin=True)

    def get_notify_users(self):
        key = "notify:admin:%s" % self.user.id
        return sentinel.master.get(key)

    def delete_notify(self):
        key = "notify:admin:%s" % self.user.id
        return sentinel.master.delete(key)

    @with_context
    @patch('feedparser.parse')
    def test_news(self, feedparser_mock):
        """Test NEWS works."""
        feedparser_mock.return_value = self.d
        news()
        tmp = get_news()
        assert len(tmp) == 1, len(tmp)
        err_msg = "Notify user should be notified"
        assert self.get_notify_users() == '1', err_msg

    @with_context
    @patch('feedparser.parse')
    def test_news_no_new_items(self, feedparser_mock):
        """Test NEWS no new items works."""
        feedparser_mock.return_value = self.d
        news()
        feedparser_mock.return_value = self.d
        news()
        tmp = get_news()
        assert len(tmp) == 1, len(tmp)
        err_msg = "Notify user should be notified"
        assert self.get_notify_users() == '1', err_msg

    @with_context
    @patch('feedparser.parse')
    def test_news_no_new_items_no_notification(self, feedparser_mock):
        """Test NEWS no new items no notificaton works."""
        feedparser_mock.return_value = self.d
        news()
        self.delete_notify()
        feedparser_mock.return_value = self.d
        news()
        tmp = get_news()
        assert len(tmp) == 1, len(tmp)
        err_msg = "Notify user should NOT be notified"
        assert self.get_notify_users() == None, err_msg

    @with_context
    @patch('feedparser.parse')
    def test_news_check_config_urls(self, feedparser_mock):
        """Test NEWS adds config URLs."""
        urls = ['https://github.com/pybossa/pybossa/releases.atom',
                'http://scifabric.com/blog/all.atom.xml',
                'http://url']

        feedparser_mock.return_value = self.d
        with patch.dict(self.flask_app.config, {'NEWS_URL': ['http://url']}):
            news()
            calls = []
            for url in urls:
                calls.append(call(url))
            feedparser_mock.assert_has_calls(calls, any_order=True)
예제 #38
0
class TestOldProjects(Test):

    def setUp(self):
        super(TestOldProjects, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()
        self.scheduler = Scheduler('test_queue', connection=self.connection)

    @with_context
    def test_get_non_updated_apps_returns_none(self):
        """Test JOB get non updated returns none."""
        apps = get_non_updated_apps()
        err_msg = "There should not be any outdated project."
        assert len(apps) == 0, err_msg


    @with_context
    def test_get_non_updated_apps_returns_one_project(self):
        """Test JOB get non updated returns one project."""
        app = AppFactory.create(updated='2010-10-22T11:02:00.000000')
        apps = get_non_updated_apps()
        err_msg = "There should be one outdated project."
        assert len(apps) == 1, err_msg
        assert apps[0].name == app.name, err_msg


    @with_context
    @patch('pybossa.core.mail')
    def test_warn_project_owner(self, mail):
        """Test JOB email is sent to warn project owner."""
        # Mock for the send method
        send_mock = MagicMock()
        send_mock.send.return_value = True
        # Mock for the connection method
        connection = MagicMock()
        connection.__enter__.return_value = send_mock
        # Join them
        mail.connect.return_value = connection

        date = '2010-10-22T11:02:00.000000'
        app = AppFactory.create(updated=date)
        app_id = app.id
        warn_old_project_owners()
        err_msg = "mail.connect() should be called"
        assert mail.connect.called, err_msg
        err_msg = "conn.send() should be called"
        assert send_mock.send.called, err_msg
        err_msg = "app.contacted field should be True"
        assert app.contacted, err_msg
        err_msg = "The update date should be different"
        assert app.updated != date, err_msg

    @with_context
    def test_warn_project_owner_two(self):
        """Test JOB email is sent to warn project owner."""
        from pybossa.core import mail
        with mail.record_messages() as outbox:
            date = '2010-10-22T11:02:00.000000'
            app = AppFactory.create(updated=date)
            app_id = app.id
            warn_old_project_owners()
            assert len(outbox) == 1, outbox
            subject = 'Your PyBossa project: %s has been inactive' % app.name
            assert outbox[0].subject == subject
            err_msg = "app.contacted field should be True"
            assert app.contacted, err_msg
            err_msg = "The update date should be different"
            assert app.updated != date, err_msg

    @with_context
    def test_warn_project_owner_limits(self):
        """Test JOB email gets at most 25 projects."""
        from pybossa.core import mail
        # Create 50 projects with old updated dates
        date = '2010-10-22T11:02:00.000000'
        apps = []
        for i in range(0, 50):
            apps.append(AppFactory.create(updated=date))
        # The first day that we run the job only 25 emails should be sent
        with mail.record_messages() as outbox:
            warn_old_project_owners()
            err_msg = "There should be only 25 emails."
            assert len(outbox) == 25, err_msg
        # The second day that we run the job only 25 emails should be sent
        with mail.record_messages() as outbox:
            warn_old_project_owners()
            err_msg = ("There should be only 25 emails, but there are %s."
                       % len(outbox))
            assert len(outbox) == 25, err_msg
        # The third day that we run the job only 0 emails should be sent
        # as the previous projects have been already contacted.
        with mail.record_messages() as outbox:
            warn_old_project_owners()
            err_msg = "There should be only 0 emails."
            assert len(outbox) == 0, err_msg
예제 #39
0
class RedisAPICacheStore(APICacheStore):

    def __init__(self, *args, **kwargs):
        self.config = kwargs.get('config', {})
        self.ttl = self.config.get('ttl', 300)

        super(RedisAPICacheStore, self).__init__(*args, **kwargs)
        if self.config.get("use_settings", False):
            redis_settings = settings.CACHE_REDIS
        else:
            redis_settings = self.config.get('parameters')


        host = convert_variable_to_env_setting(redis_settings.get('host', "localhost"))
        port = redis_settings.get('port', 6379)
        db = redis_settings.get('db', 0)
        pw = redis_settings.get('password', None)

        timeout = redis_settings.get('timeout', .3)

        self.redis = StrictRedis(host=host,
                                 port=port,
                                 db=db,
                                 password=pw,
                                 socket_timeout=timeout)

        if self.config.get('use_settings'):
            logger.info("Configuring Face/Off API cache with REDIS using settings.py")
        else:
            logger.info("Configuring Face/Off API cache with REDIS using JSON settings")

        logger.info("Face/off API cache settings: redis://%s:%s/%s with ttl %s" %
                    (host, port, db, self.ttl))

    def retrieve(self, key):
        try:
            resp = self.redis.get(key)
            if resp is not None:
                return pickle.loads(resp)
            else:
                return None
        except ConnectionError as e:
            logger.warning("Got a timeout error trying to get from Redis API Cache", exc_info=True)
            return None

    def store(self, key, value, ttl=None):
        if ttl is None:
            ttl = self.ttl
        try:
            self.redis.set(key, pickle.dumps(value))
            if ttl > 0:
                self.redis.expire(key, ttl)
        except ConnectionError as e:
            logger.warning("Got a timeout error trying to store into Redis API Cache", exc_info=True)

    def invalidate(self, key):
        try:
            self.redis.delete(key)
        except ConnectionError as e:
            logger.warning("Got a timeout error trying to store invalidate Redis API Cache", exc_info=True)

    def flush(self):
        try:
            self.redis.flushall()
        except ConnectionError as e:
            logger.warning("Got a timeout error trying to flush Redis API Cache", exc_info=True)
class StaleHTTPClientTestCase(AsyncTestCase):
    def setUp(self):
        super(StaleHTTPClientTestCase, self).setUp()
        self.fake_client = FakeClient()
        self.cache = StrictRedis()
        self.cache.flushall()

    @gen_test
    def test_returns_response(self):
        fake_response = self.fake_client.add_response(
            code=200, body=b'fake response', headers={'fake': 'header'})

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)

        response = yield client.fetch('/url')

        self.assertResponseEqual(response, fake_response)

    @gen_test
    def test_accepts_request_object(self):
        fake_response = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)

        request = HTTPRequest('/url')
        response = yield client.fetch(request)

        self.assertIs(response, fake_response)

    @gen_test
    def test_returns_real_response(self):
        expected_response = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)
        response = yield client.fetch('/url')

        self.assertIs(response, expected_response)

    @gen_test
    def test_returns_response_from_primary_cache(self):
        response = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache, client=self.fake_client)
        response = yield client.fetch('/url')
        cached_response = yield client.fetch('/url')

        self.assertIsNot(cached_response, response)
        self.assertResponseEqual(cached_response, response)

    @gen_test
    def test_returns_stale_response_after_error(self):
        expected_response = self.fake_client.add_response(body=b'stale')
        error_response = self.fake_client.add_response(body=b'error', code=500)

        client = StaleHTTPClient(cache=self.cache,
                                 client=self.fake_client,
                                 ttl=0.001)

        yield client.fetch('/url')
        time.sleep(0.002)
        stale_response = yield client.fetch('/url')

        self.assertIsNot(stale_response, error_response)
        self.assertResponseEqual(stale_response, expected_response)

    @gen_test
    def test_raises_error_after_error_with_empty_cache(self):
        self.fake_client.add_response(body=b'error', code=500)

        client = StaleHTTPClient(cache=self.cache,
                                 client=self.fake_client,
                                 ttl=None)

        with self.assertRaises(HTTPError):
            yield client.fetch('/url')

    @gen_test
    def test_returns_error_when_empty_cache_and_raise_error_flag_is_off(self):
        expected_response = self.fake_client.add_response(body=b'error',
                                                          code=500)

        client = StaleHTTPClient(cache=self.cache,
                                 client=self.fake_client,
                                 ttl=None)

        response = yield client.fetch('/url', raise_error=False)

        self.assertIs(response, expected_response)

    @gen_test
    def test_caches_multiple_urls(self):
        first_expected = self.fake_client.add_response()
        second_expected = self.fake_client.add_response()

        client = StaleHTTPClient(cache=self.cache,
                                 client=self.fake_client,
                                 ttl=1)

        # Populate cache
        yield [client.fetch('/first'), client.fetch('/second')]

        # Read from cache
        first_response, second_response = yield [
            client.fetch('/first'),
            client.fetch('/second')
        ]

        self.assertIsNot(first_response, first_expected)
        self.assertIsNot(second_response, second_expected)

        self.assertResponseEqual(first_response, first_expected)
        self.assertResponseEqual(second_response, second_expected)

    @gen_test
    def test_varies_cache_by_headers(self):
        json_response = self.fake_client.add_response(body=b'{}')
        xml_response = self.fake_client.add_response(body=b'<xml />')

        client = StaleHTTPClient(cache=self.cache,
                                 client=self.fake_client,
                                 ttl=1)

        # Populate and read from cache
        for i in range(2):
            first_response, second_response = yield [
                client.fetch('/url',
                             headers={'Accept': 'application/json'},
                             vary=['Accept']),
                client.fetch('/url',
                             headers={'Accept': 'text/xml'},
                             vary=['Accept'])
            ]

        self.assertIsNot(first_response, json_response)
        self.assertIsNot(second_response, xml_response)

        self.assertResponseEqual(first_response, json_response)
        self.assertResponseEqual(second_response, xml_response)

    def assertResponseEqual(self, response, expected_response):
        self.assertEqual(response.body, expected_response.body)
        self.assertEqual(response.code, expected_response.code)
        self.assertEqual(response.headers, expected_response.headers)

        self.assertIsInstance(response.headers, HTTPHeaders)

        self.assertIsInstance(response.request, HTTPRequest)
        self.assertIsInstance(response.request.headers, HTTPHeaders)
class TestCheckTasksRequestedByUser(object):

    def setUp(self):
        self.connection = StrictRedis()
        self.connection.flushall()


    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_authenticated_key_exists(self, user):
        """_check_task_requested_by_user should return True for an authorized
        user that requested a task"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:33:task:22'
        self.connection.setex(key, 10, True)

        check = _check_task_requested_by_user(taskrun, self.connection)

        assert check is True, check


    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_anonymous_key_exists(self, user):
        """_check_task_requested_by_user should return True for an anonymous
        user that requested a task"""
        user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'
        self.connection.setex(key, 10, True)

        check = _check_task_requested_by_user(taskrun, self.connection)

        assert check is True, check


    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_wrong_key(self, user):
        """_check_task_requested_by_user should return False for a user that did
        not request a task"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:88:task:44'
        self.connection.setex(key, 10, True)

        check = _check_task_requested_by_user(taskrun, self.connection)

        assert check is False, check


    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_authenticated_deletes_key(self, user):
        """_check_task_requested_by_user deletes the key after checking that 
        an authenticated user requested the task"""
        user.return_value = {'user_id': 33, 'user_ip': None}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:33:task:22'
        self.connection.setex(key, 10, True)

        _check_task_requested_by_user(taskrun, self.connection)
        key_deleted = self.connection.get(key) is None

        assert key_deleted is True, key_deleted


    @patch('pybossa.api.task_run.get_user_id_or_ip')
    def test_check_task_requested_by_user_anonymous_preserves_key(self, user):
        """_check_task_requested_by_user does not delete the key after checking
        that an anonymous user requested the task (in case many simultaneous
        anonymous users are sharing the same IP"""
        user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'}
        taskrun = TaskRun(task_id=22)
        key = 'pybossa:task_requested:user:127.0.0.1:task:22'
        self.connection.setex(key, 10, True)

        _check_task_requested_by_user(taskrun, self.connection)
        key_deleted = self.connection.get(key) is None

        assert key_deleted is False, key_deleted
예제 #42
0
class TestWebHooks(Test):

    def setUp(self):
        super(TestWebHooks, self).setUp()
        self.connection = StrictRedis()
        self.connection.flushall()
        self.project = ProjectFactory.create()
        self.webhook_payload = dict(project_id=self.project.id,
                                    project_short_name=self.project.short_name)


    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks(self, mock):
        """Test WEBHOOK works."""
        mock.return_value = FakeResponse(text=json.dumps(dict(foo='bar')),
                                                              status_code=200)
        err_msg = "The webhook should return True from patched method"
        assert webhook('url', self.webhook_payload), err_msg
        err_msg = "The post method should be called"
        assert mock.called, err_msg

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks_connection_error(self, mock):
        """Test WEBHOOK with connection error works."""
        import requests
        from pybossa.core import webhook_repo
        mock.side_effect = requests.exceptions.ConnectionError
        err_msg = "A webhook should be returned"
        res = webhook('url', self.webhook_payload)
        assert res.response == 'Connection Error', err_msg
        assert res.response_status_code == None, err_msg
        wh = webhook_repo.get(1)
        assert wh.response == res.response, err_msg
        assert wh.response_status_code == res.response_status_code, err_msg

    @with_context
    @patch('pybossa.jobs.requests.post')
    def test_webhooks_without_url(self, mock):
        """Test WEBHOOK without url works."""
        mock.post.return_value = True
        err_msg = "The webhook should return Connection Error"
        res = webhook(None, self.webhook_payload, None)
        assert res.response == 'Connection Error', err_msg
        assert res.response_status_code is None, err_msg

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_without_url(self):
        """Test WEBHOOK is triggered without url."""
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        queue.reset_mock()

    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_with_url_not_completed_task(self):
        """Test WEBHOOK is not triggered for uncompleted tasks."""
        import random
        project = ProjectFactory.create()
        task = TaskFactory.create(project=project)
        for i in range(1, random.randrange(2, 5)):
            TaskRunFactory.create(project=project, task=task)
        assert queue.enqueue.called is False, queue.enqueue.called
        assert task.state != 'completed'
        queue.reset_mock()


    @with_context
    @patch('pybossa.model.event_listeners.webhook_queue', new=queue)
    def test_trigger_webhook_with_url(self):
        """Test WEBHOOK is triggered with url."""
        url = 'http://server.com'
        project = ProjectFactory.create(webhook=url,)
        task = TaskFactory.create(project=project, n_answers=1)
        TaskRunFactory.create(project=project, task=task)
        result = result_repo.get_by(project_id=project.id, task_id=task.id)
        payload = dict(event='task_completed',
                       project_short_name=project.short_name,
                       project_id=project.id,
                       task_id=task.id,
                       result_id=result.id,
                       fired_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
        assert queue.enqueue.called
        assert queue.called_with(webhook, url, payload)
        queue.reset_mock()
			"userID": line.get('userId'),
			"eventID": line.get('eventId'),
			"appID": line.get('appId')
		}).run()
		
		

if __name__ == '__main__':
	
	# dfile = '~/Projects/unified-ETL-pipeline-/data/appEventSample.txt.gz'
	dfile = '~/Desktop/9_18_appevent_dump_full.txt.gz'
	NUM_REC = 100000
	dupes = []
	dq = CL.deque([], maxlen=1500)
	cxn1 = Redis(db=DB_PARAMS['events_db_id'])
	cxn1.flushall()
	fh = gzip.open(os.path.expanduser(dfile), 'rt', encoding='utf-8')
	# opener(fh, grep1(persist(DB_PARAMS)))
	# opener(fh, grep1(persist_rethink(DB_PARAMS)))
	# opener(fh, grep1(aggregate(DB_PARAMS)))
	
	opener( fh, grep1(grep2(persist(DB_PARAMS))), num_rec=2*NUM_REC )
	# opener(fh, grep1(persist(DB_PARAMS)))
	
	
	
	print("number of records persisted: {}".format(len(cxn1.keys('*'))))
		
	
	print('file handle closed')
예제 #44
0
def _clear_cache():
    cache = StrictRedis(host=REDIS_HOST, port=REDIS_PORT)
    cache.flushall()
    print('Cache cleared...')
예제 #45
0
    def redis(self):
        redis = StrictRedis()

        yield redis

        redis.flushall()
예제 #46
0

# 获取 key 的过期时间. 单位为秒  -1 代表永不过期
print(redis.ttl('name'))


# 移动key 到其他的数据库
redis.move('name',2) # 2为数据库代号


# 删除当前选择数据库数据
redis.flushdb()


# 删除所有数据库数据
redis.flushall()


#  -------  string 相关 --------

# 给 key 赋予新 value 并返回原本 value
print(redis.getset('name', 'Jerry'))


# 返回多个 key 的 value
print(redis.mget(['name', 'nikename']))


# 如果 key 不存在就创建键值对, 否则保持不变
redis.setnx('newname', 'James')
예제 #47
0
redis.set('gender', 'male')
print(redis.get('gender'))

# 2.键的操作
print(redis.exists('name'))
print(redis.delete('gender'))
print(redis.type('name'))
print(redis.keys('a*'))
print(redis.randomkey())  #获取一个随机的键
print(redis.rename('name', 'nickname'))
print(redis.dbsize())  #获取当前数据键数
print(redis.expire('nickname', 60))
print(redis.ttl('nickname'))
print(redis.move('nickname', 1))
print(redis.flushdb())  #清空当前数据库中的键
print(redis.flushall())  #清空所有数据库中的键

# 3.字符串操作
print()
print(redis.set('emotion', 'smile'))
redis.set('name', 'Leo')
redis.set('age', '19')
print(redis.get('emotion'))
print(redis.getset('emotion', 'humour'))
print(redis.mget(['emotion', 'name', 'age']))
print(redis.setnx('newname', 'James'))  # 不存在键才更新
print(redis.setex('country', 1, 'china'))
redis.setrange('name', 3, '2019')
print(redis.get('name'))
print(redis.mset({'name1': 'Modric', 'name2': 'Van Dik'}))
print(redis.msetnx({'name3': 'Salah', 'name4': 'Mane'}))  #键均不存在才批量更新