Exemplo n.º 1
0
def auto_delete_duplicate_crashes():
    logger = logging.getLogger('limitation')
    result = delete_duplicate_crashes()
    if result.get('count', 0):
        log_id = str(uuid.uuid4())
        params = dict(log_id=log_id)
        splunk_url = get_splunk_url(params)
        splunk_filter = 'log_id=%s' % log_id if splunk_url else None
        ids_list = sorted([element['id'] for element in result['elements']])
        raven_extra = {
            "id": log_id,
            "splunk_url": splunk_url,
            "splunk_filter": splunk_filter,
            "crash_list": ids_list
        }
        raven.captureMessage(
            "[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %s [%d]"
            % (result['count'], filters.filesizeformat(result['size']).replace(
                u'\xa0', u' '), time.time()),
            data=dict(level=20, logger='limitation'),
            extra=raven_extra)
        extra = dict(log_id=log_id,
                     meta=True,
                     count=result['count'],
                     size=filters.filesizeformat(result['size']).replace(
                         u'\xa0', u' '),
                     reason='duplicated',
                     model='Crash')
        logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
        for element in result['elements']:
            element.update({"log_id": log_id, "Crash_id": element.pop('id')})
            logger.info(
                add_extra_to_log_message('Automatic cleanup element',
                                         extra=element))
Exemplo n.º 2
0
def auto_delete_size_is_exceeded():
    logger = logging.getLogger('limitation')
    model_list = [('crash', 'Crash'), ('feedback', 'Feedback')]
    for model in model_list:
        result = delete_size_is_exceeded(*model)
        if result.get('count', 0):
            result['size'] /= 1024.0 * 1024
            log_id = str(uuid.uuid4())
            params = dict(log_id=log_id)
            splunk_url = get_splunk_url(params)
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
            raven_extra = dict(id=log_id,
                               splunk_url=splunk_url,
                               splunk_filter=splunk_filter)
            raven.captureMessage(
                "[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %.2f Mb[%d]"
                % (result['count'], model[1], result['size'], time.time()),
                data=dict(level=20, logger='limitation'),
                extra=raven_extra)
            extra = dict(log_id=log_id,
                         meta=True,
                         count=result['count'],
                         size=result['size'],
                         model=model[1],
                         reason='size_is_exceeded')
            logger.info(
                add_extra_to_log_message('Automatic cleanup', extra=extra))
            for element in result['elements']:
                element.update(dict(log_id=log_id))
                logger.info(
                    add_extra_to_log_message('Automatic cleanup element',
                                             extra=element))
Exemplo n.º 3
0
def auto_delete_dangling_files():
    logger = logging.getLogger('limitation')
    model_kwargs_list = [
        {'model': Crash, 'file_fields': ('upload_file_minidump', 'archive')},
        {'model': Feedback, 'file_fields': ('blackbox', 'system_logs', 'attached_file', 'screenshot')},
        {'model': Symbols, 'file_fields': ('file', )},
        {'model': Version, 'file_fields': ('file', )},
        {'model': SparkleVersion, 'file_fields': ('file', )}
    ]
    for model_kwargs in model_kwargs_list:
        result = handle_dangling_files(
            prefix=get_prefix(model_kwargs['model']),
            **model_kwargs
        )
        if result['mark'] == 'db':
            logger.info('Dangling files detected in db [%d], files path: %s' % (result['count'], result['data']))
            raven.captureMessage(
                "[Limitation]Dangling files detected in db, total: %d" % result['count'],
                data=dict(level=20, logger='limitation')
            )
        elif result['mark'] == 's3':
            logger.info('Dangling files deleted from s3 [%d], files path: %s' % (result['count'], result['data']))
            raven.captureMessage(
                "[Limitation]Dangling files deleted from s3, cleaned up %d files" % result['count'],
                data=dict(level=20, logger='limitation')
            )
        else:
            logger.info('Dangling files not detected')
Exemplo n.º 4
0
def auto_delete_duplicate_crashes():
    logger = logging.getLogger('limitation')
    result = delete_duplicate_crashes()
    if result.get('count', 0):
        result['size'] /= 1024.0 * 1024
        log_id = str(uuid.uuid4())
        params = dict(log_id=log_id)
        splunk_url = get_splunk_url(params)
        splunk_filter = 'log_id=%s' % log_id if splunk_url else None
        raven_extra = dict(id=log_id,
                           splunk_url=splunk_url,
                           splunk_filter=splunk_filter)
        raven.captureMessage(
            "[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %.2f Mb[%d]"
            % (result['count'], result['size'], time.time()),
            data=dict(level=20, logger='limitation'),
            extra=raven_extra)
        extra = dict(log_id=log_id,
                     meta=True,
                     count=result['count'],
                     size=result['size'],
                     reason='duplicated',
                     model='Crash')
        logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
        for element in result['elements']:
            element.update(dict(log_id=log_id))
            logger.info(
                add_extra_to_log_message('Automatic cleanup element',
                                         extra=element))
Exemplo n.º 5
0
def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None):
    full_result = dict(count=0, size=0, elements=[], signatures={})
    if limit_duplicated:
        result = delete_duplicate_crashes(limit=limit_duplicated)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']
            full_result['signatures'].update(result['signatures'])

    if limit_days:
        result = delete_older_than(*model, limit=limit_days)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_size:
        result = delete_size_is_exceeded(*model, limit=limit_size)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    full_result['size'] /= 1024.0 * 1024
    extra = dict(elements=full_result['elements'])
    extra.update(full_result['signatures'])
    raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %.2f Mb[%d]" %
                         (full_result['count'], model[1], full_result['size'], time.time()),
                         data=dict(level=20, logger='limitation'), extra=extra)
Exemplo n.º 6
0
def auto_delete_duplicate_crashes():
    result = delete_duplicate_crashes()
    if result.get('count', 0):
        result['size'] /= 1024.0 * 1024
        raven.captureMessage("[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %.2f Mb[%d]" %
                             (result['count'], result['size'], time.time()),
                             data=dict(level=20, logger='limitation'), extra=result['signatures'])
Exemplo n.º 7
0
def deferred_manual_cleanup(model,
                            limit_size=None,
                            limit_days=None,
                            limit_duplicated=None):
    logger = logging.getLogger('limitation')
    full_result = dict(count=0, size=0, elements=[])
    if limit_duplicated:
        result = delete_duplicate_crashes(limit=limit_duplicated)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_days:
        result = delete_older_than(*model, limit=limit_days)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_size:
        result = delete_size_is_exceeded(*model, limit=limit_size)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    full_result['size'] /= 1024.0 * 1024
    log_id = str(uuid.uuid4())
    params = dict(log_id=log_id)
    splunk_url = get_splunk_url(params)
    splunk_filter = 'log_id=%s' % log_id if splunk_url else None
    raven_extra = dict(id=log_id,
                       splunk_url=splunk_url,
                       splunk_filter=splunk_filter)
    raven.captureMessage(
        "[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %.2f Mb[%s]"
        % (full_result['count'], model[1], full_result['size'], log_id),
        data=dict(level=20, logger='limitation'),
        extra=raven_extra)

    extra = dict(log_id=log_id,
                 meta=True,
                 count=full_result['count'],
                 size=full_result['size'],
                 model=model[1],
                 limit_duplicated=limit_duplicated,
                 limit_size=limit_size,
                 limit_days=limit_days,
                 reason='manual')
    logger.info(add_extra_to_log_message('Manual cleanup', extra=extra))
    for element in full_result['elements']:
        element.update(dict(log_id=log_id))
        logger.info(
            add_extra_to_log_message('Manual cleanup element', extra=element))
Exemplo n.º 8
0
def auto_delete_size_is_exceeded():
    model_list = [
        ('crash', 'Crash'),
        ('feedback', 'Feedback')
    ]
    for model in model_list:
        result = delete_size_is_exceeded(*model)
        if result.get('count', 0):
            extra = dict(elements=result['elements'])
            result['size'] /= 1024.0 * 1024
            raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %.2f Mb[%d]" %
                                 (result['count'], model[1], result['size'], time.time()),
                                 data=dict(level=20, logger='limitation'), extra=extra)
Exemplo n.º 9
0
def auto_delete_duplicate_crashes():
    logger = logging.getLogger('limitation')
    result = delete_duplicate_crashes()
    if result.get('count', 0):
        log_id = str(uuid.uuid4())
        params = dict(log_id=log_id)
        splunk_url = get_splunk_url(params)
        splunk_filter = 'log_id=%s' % log_id if splunk_url else None
        raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
        raven.captureMessage("[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %s [%d]" %
                             (result['count'], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()),
                             data=dict(level=20, logger='limitation'), extra=raven_extra)
        extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), reason='duplicated', model='Crash')
        logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
        for element in result['elements']:
            element.update(dict(log_id=log_id))
            logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
Exemplo n.º 10
0
def auto_delete_size_is_exceeded():
    logger = logging.getLogger('limitation')
    model_list = [('crash', 'Crash'), ('feedback', 'Feedback')]
    for model in model_list:
        result = delete_size_is_exceeded(*model)
        if result.get('count', 0):
            log_id = str(uuid.uuid4())
            params = dict(log_id=log_id)
            splunk_url = get_splunk_url(params)
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
            ids_list = sorted(
                [element['id'] for element in result['elements']])
            raven_extra = {
                "id": log_id,
                "splunk_url": splunk_url,
                "splunk_filter": splunk_filter,
                "%s_list" % (model[1]): ids_list
            }
            raven.captureMessage(
                "[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %s [%d]"
                % (result['count'], model[1],
                   filters.filesizeformat(result['size']).replace(
                       u'\xa0', u' '), time.time()),
                data=dict(level=20, logger='limitation'),
                extra=raven_extra)
            extra = dict(log_id=log_id,
                         meta=True,
                         count=result['count'],
                         size=filters.filesizeformat(result['size']).replace(
                             u'\xa0', u' '),
                         model=model[1],
                         reason='size_is_exceeded')
            logger.info(
                add_extra_to_log_message('Automatic cleanup', extra=extra))
            for element in result['elements']:
                element.update({
                    "log_id": log_id,
                    "%s_id" % (model[1]): element.pop('id')
                })
                logger.info(
                    add_extra_to_log_message('Automatic cleanup element',
                                             extra=element))
Exemplo n.º 11
0
def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None):
    logger = logging.getLogger('limitation')
    full_result = dict(count=0, size=0, elements=[])
    if limit_duplicated:
        result = delete_duplicate_crashes(limit=limit_duplicated)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_days:
        result = delete_older_than(*model, limit=limit_days)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_size:
        result = delete_size_is_exceeded(*model, limit=limit_size)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    full_result['size'] /= 1024.0 * 1024
    log_id = str(uuid.uuid4())
    params = dict(log_id=log_id)
    splunk_url = get_splunk_url(params)
    splunk_filter = 'log_id=%s' % log_id if splunk_url else None
    raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
    raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %.2f Mb[%s]" %
                         (full_result['count'], model[1], full_result['size'], log_id),
                         data=dict(level=20, logger='limitation'), extra=raven_extra)

    extra = dict(log_id=log_id, meta=True, count=full_result['count'], size=full_result['size'], model=model[1],
                 limit_duplicated=limit_duplicated, limit_size=limit_size, limit_days=limit_days, reason='manual')
    logger.info(add_extra_to_log_message('Manual cleanup', extra=extra))
    for element in full_result['elements']:
        element.update(dict(log_id=log_id))
        logger.info(add_extra_to_log_message('Manual cleanup element', extra=element))
Exemplo n.º 12
0
def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None):
    logger = logging.getLogger('limitation')
    full_result = dict(count=0, size=0, elements=[])
    if limit_duplicated:
        result = delete_duplicate_crashes(limit=limit_duplicated)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_days:
        result = delete_older_than(*model, limit=limit_days)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    if limit_size:
        result = delete_size_is_exceeded(*model, limit=limit_size)
        if result.get('count', 0):
            full_result['count'] += result['count']
            full_result['size'] += result['size']
            full_result['elements'] += result['elements']

    log_id = str(uuid.uuid4())
    params = dict(log_id=log_id)
    splunk_url = get_splunk_url(params)
    splunk_filter = 'log_id=%s' % log_id if splunk_url else None
    ids_list = sorted([element['id'] for element in full_result['elements']])
    raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list}
    raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %s [%s]" %
                         (full_result['count'], model[1], filters.filesizeformat(full_result['size']).replace(u'\xa0', u' '), log_id),
                         data=dict(level=20, logger='limitation'), extra=raven_extra)

    extra = dict(log_id=log_id, meta=True, count=full_result['count'], size=filters.filesizeformat(full_result['size']).replace(u'\xa0', u' '), model=model[1],
                 limit_duplicated=limit_duplicated, limit_size=limit_size, limit_days=limit_days, reason='manual')
    logger.info(add_extra_to_log_message('Manual cleanup', extra=extra))
    for element in full_result['elements']:
        element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')})
        logger.info(add_extra_to_log_message('Manual cleanup element', extra=element))
Exemplo n.º 13
0
def auto_delete_dangling_files():
    logger = logging.getLogger('limitation')
    model_kwargs_list = [{
        'model': Crash,
        'file_fields': ('upload_file_minidump', 'archive')
    }, {
        'model':
        Feedback,
        'file_fields':
        ('blackbox', 'system_logs', 'attached_file', 'screenshot')
    }, {
        'model': Symbols,
        'file_fields': ('file', )
    }, {
        'model': Version,
        'file_fields': ('file', )
    }, {
        'model': SparkleVersion,
        'file_fields': ('file', )
    }]
    for model_kwargs in model_kwargs_list:
        result = handle_dangling_files(prefix=get_prefix(
            model_kwargs['model']),
                                       **model_kwargs)
        if result['mark'] == 'db':
            logger.info('Dangling files detected in db [%d], files path: %s' %
                        (result['count'], result['data']))
            raven.captureMessage(
                "[Limitation]Dangling files detected in db, total: %d" %
                result['count'],
                data=dict(level=20, logger='limitation'))
        elif result['mark'] == 's3':
            logger.info('Dangling files deleted from s3 [%d], files path: %s' %
                        (result['count'], result['data']))
            raven.captureMessage(
                "[Limitation]Dangling files deleted from s3, cleaned up %d files"
                % result['count'],
                data=dict(level=20, logger='limitation'))
        else:
            logger.info('Dangling files not detected')
Exemplo n.º 14
0
def auto_delete_older_than():
    logger = logging.getLogger('limitation')
    model_list = [
        ('crash', 'Crash'),
        ('feedback', 'Feedback')
    ]
    for model in model_list:
        result = delete_older_than(*model)
        if result.get('count', 0):
            log_id = str(uuid.uuid4())
            params = dict(log_id=log_id)
            splunk_url = get_splunk_url(params)
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
            raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
            raven.captureMessage("[Limitation]Periodic task 'Older than' cleaned up %d %s, total size of cleaned space is %s [%d]" %
                                 (result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()),
                                 data=dict(level=20, logger='limitation'), extra=raven_extra)
            extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='old')
            logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
            for element in result['elements']:
                element.update(dict(log_id=log_id))
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
Exemplo n.º 15
0
def auto_delete_size_is_exceeded():
    logger = logging.getLogger('limitation')
    model_list = [
        ('crash', 'Crash'),
        ('feedback', 'Feedback')
    ]
    for model in model_list:
        result = delete_size_is_exceeded(*model)
        if result.get('count', 0):
            log_id = str(uuid.uuid4())
            params = dict(log_id=log_id)
            splunk_url = get_splunk_url(params)
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
            ids_list = sorted([element['id'] for element in result['elements']])
            raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list}
            raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %s [%d]" %
                                 (result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()),
                                 data=dict(level=20, logger='limitation'), extra=raven_extra)
            extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='size_is_exceeded')
            logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
            for element in result['elements']:
                element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')})
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))