Пример #1
0
    def get_percent_complete(self):
        '''
        This method calculates how many sends/queues exist and makes a percentage
        If percentage is 100 it saves the dispatch as completed
        If not its cached for 15 seconds
        '''
        if self.percent_complete == 100:
            return 100
        else:
            key = ('dispatcher_%s_percent' % self.id)
            if redis.exists(key):
                return int(redis.get(key))
            else:
                curr_sent_count = int(self.get_sent())
                queued = int(self.queued_count if self.queued_count else self.
                             get_queued())
                processed = curr_sent_count + int(self.get_skipped())
                percent = int(
                    (float(processed) / queued) * 100) if queued > 0 else 0

                # Note: self.queued_count is only set after queuing is done
                # percent needs to be >98 to allow old/dead jobs to run an hour or two later, data updates then too.
                if percent > 98 and self.queued_count:
                    self.sent_count = curr_sent_count
                    self.skipped_count = int(self.get_skipped())
                    self.percent_complete = 100
                    self.state = 10
                    self.save()
                    self.expire_counts(86400)
                else:
                    self.update(sent_count=curr_sent_count)
                    redis.set(key, percent)
                    redis.expire(
                        key, 15)  # cache current percent done for 15 seconds
                return percent
Пример #2
0
 def test_get_percent_complete_cache(self):
     stack = helpers.create_stack()
     d = Dispatcher.find_by_id_anon(stack['dispatcher_id'])
     d.expire_counts()
     redis.set('dispatcher_%s_percent' % d.id, 15)
     self.assertEqual(d.get_percent_complete(), 15)
     redis.delete('dispatcher_%s_percent' % d.id)
Пример #3
0
def run_import(list_id):
    from liaison.models.list import List
    from liaison.lib.aws import get_contents_as_string
    from liaison.lib.extensions import redis_store as redis

    key = 'list_%s_import' % list_id
    list_ = List.find_by_id_anon(list_id)
    if list_:
        filename = list_.filename
        logger.info("\n filename: %s \n" % str(filename))
        if "lists/" in filename:
            f = get_contents_as_string(filename)
        else:
            f = open(filename, 'rt')

        data = []
        try:
            data = [
                row for row in csv.DictReader(
                    f.read().splitlines(), restkey='rk', restval='rv')
            ]
            list_.import_data = data
            result = list_.save()
            if result:
                redis.set(key, 'Upload Successful.')
            else:
                redis.set(key,
                          'List failed to save, the data may be corrupted.')
        except Exception, e:
            logger.info("Error: import_failure_list: %s: \n %s" %
                        (e, traceback.format_exc()))
            redis.set(key, 'List failed to save, the data may be corrupted.')
            raise e
        finally:
Пример #4
0
def run_blacklist_import(account_id, filename):
    from liaison.models.blacklist import Blacklist
    from liaison.lib.aws import get_contents_as_string
    from liaison.lib.extensions import redis_store as redis

    key = 'blacklist_import_%s' % account_id
    if "Users/" in filename:
        f = open(filename, 'rt')
    else:
        f = get_contents_as_string(filename)

    try:
        data = [
            row for row in csv.DictReader(
                f.read().splitlines(), restkey='rk', restval='rv')
        ]
        ok_key = True
        for row in data:
            if ok_key:
                spellings = ('email', 'Email', 'email_address',
                             'Email_Address', 'email address', 'Email Address',
                             'EmailAddress')
                intersect = set(spellings).intersection(row.keys())
                intersect = intersect.pop() if intersect else None
                if intersect:
                    email = row.get(intersect)
                    reason = row.get('reason')
                    detail = row.get('detail')
                    Blacklist.insert(account_id, email, reason, detail)
                else:
                    ok_key = False
        if ok_key:
            redis.set(key, 'Upload Successful.')
    except Exception, e:
        logger.info("Error: import_failure_blacklist: %s: \n %s" %
                    (e, traceback.format_exc()))
        redis.set(key, 'Upload failed, the data may be corrupted.')
        raise e
Пример #5
0
def status():
    """Check the database and cache, and report their status."""
    services = {}
    code = 200

    # DB
    if isinstance(Account.query.all(), list):
        services['db'] = 'ok'
    else:
        services['db'] = 'offline'
        code = 503

    # Cache. Offline is ok on dev. To test use SimpleCache instead of NullCache.
    redis.set('status-chck', 'a-ok')
    redis.expire('status-chck', 2)
    if redis.get('status-chck') == 'a-ok':
        services['cache'] = 'ok'
    else:
        services['cache'] = 'offline'
        code = 503
    services['status_code'] = code
    services['redis-online'] = check_redis()

    return jsonify(services), code
Пример #6
0
def set_cache(key, data, expiring=None):
    if check_redis():
        response = redis.set(key, data)
        if expiring:
            redis.expire(key, expiring)
        return response