Exemplo n.º 1
0
    def _update_cachefiles(self, project, dsym_files):
        rv = []

        # Find all the known bad files we could not convert last time
        # around
        conversion_errors = {}
        for dsym_file in dsym_files:
            cache_key = 'scbe:%s:%s' % (dsym_file.uuid,
                                        dsym_file.file.checksum)
            err = default_cache.get(cache_key)
            if err is not None:
                conversion_errors[dsym_file.uuid] = err

        for dsym_file in dsym_files:
            dsym_uuid = dsym_file.uuid
            if dsym_uuid in conversion_errors:
                continue

            try:
                with dsym_file.file.getfile(as_tempfile=True) as tf:
                    fo = FatObject.from_path(tf.name)
                    o = fo.get_object(uuid=dsym_file.uuid)
                    if o is None:
                        continue
                    symcache = o.make_symcache()
            except SymbolicError as e:
                default_cache.set(
                    'scbe:%s:%s' % (dsym_uuid, dsym_file.file.checksum),
                    e.message, CONVERSION_ERROR_TTL)
                conversion_errors[dsym_uuid] = e.message
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True,
                             extra=dict(dsym_uuid=dsym_uuid))
                continue

            file = File.objects.create(
                name=dsym_file.uuid,
                type='project.symcache',
            )
            file.putfile(symcache.open_stream())
            try:
                with transaction.atomic():
                    rv.append((dsym_uuid,
                               ProjectSymCacheFile.objects.get_or_create(
                                   project=project,
                                   cache_file=file,
                                   dsym_file=dsym_file,
                                   defaults=dict(
                                       checksum=dsym_file.file.checksum,
                                       version=symcache.file_format_version,
                                   ))[0]))
            except IntegrityError:
                file.delete()
                rv.append((dsym_uuid,
                           ProjectSymCacheFile.objects.get(
                               project=project,
                               dsym_file=dsym_file,
                           )))

        return rv, conversion_errors
Exemplo n.º 2
0
def detect_dif_from_path(path):
    """This detects which kind of dif(Debug Information File) the path
    provided is. It returns an array since a FatObject can contain more than
    on dif.
    """
    # proguard files (proguard/UUID.txt) or
    # (proguard/mapping-UUID.txt).
    proguard_id = _analyze_progard_filename(path)
    if proguard_id is not None:
        data = {'features': ['mapping']}
        return [(
            'proguard',  # dif type
            'any',  # architecture
            proguard_id,  # debug_id
            path,  # basepath
            data,  # extra data
        )]

    # native debug information files (MachO, ELF or Breakpad)
    try:
        fo = FatObject.from_path(path)
    except ObjectErrorUnsupportedObject as e:
        raise BadDif("Unsupported debug information file: %s" % e)
    except SymbolicError as e:
        logger.warning('dsymfile.bad-fat-object', exc_info=True)
        raise BadDif("Invalid debug information file: %s" % e)
    else:
        objs = []
        for obj in fo.iter_objects():
            data = {
                'type': obj.type,
                'features': list(obj.features),
            }
            objs.append((obj.kind, obj.arch, obj.id, path, data))
        return objs
Exemplo n.º 3
0
def detect_dif_from_filename(filename):
    """This detects which kind of dif (Debug Information File) the filename
    provided is. It returns an array since a FatObject can contain more than
    on dif.
    """
    # proguard files (proguard/UUID.txt) or
    # (proguard/mapping-UUID.txt).
    proguard_uuid = _analyze_progard_filename(filename)
    if proguard_uuid is not None:
        return [('proguard', 'any', six.text_type(proguard_uuid), filename)]

    # macho style debug symbols
    try:
        fo = FatObject.from_path(filename)
    except UnsupportedObjectFile:
        pass
    except SymbolicError:
        # Whatever was contained there, was probably not a
        # macho file.
        # XXX: log?
        logger.warning('dsymfile.bad-fat-object', exc_info=True)
    else:
        objs = []
        for obj in fo.iter_objects():
            objs.append((obj.kind, obj.arch, six.text_type(obj.uuid), filename))
        return objs
Exemplo n.º 4
0
def detect_dif_from_path(path):
    """This detects which kind of dif(Debug Information File) the path
    provided is. It returns an array since a FatObject can contain more than
    on dif.
    """
    # proguard files (proguard/UUID.txt) or
    # (proguard/mapping-UUID.txt).
    proguard_id = _analyze_progard_filename(path)
    if proguard_id is not None:
        data = {'features': ['mapping']}
        return [(
            'proguard',   # dif type
            'any',        # architecture
            proguard_id,  # debug_id
            path,         # basepath
            data,         # extra data
        )]

    # native debug information files (MachO, ELF or Breakpad)
    try:
        fo = FatObject.from_path(path)
    except ObjectErrorUnsupportedObject as e:
        raise BadDif("Unsupported debug information file: %s" % e)
    except SymbolicError as e:
        logger.warning('dsymfile.bad-fat-object', exc_info=True)
        raise BadDif("Invalid debug information file: %s" % e)
    else:
        objs = []
        for obj in fo.iter_objects():
            data = {
                'type': obj.type,
                'features': list(obj.features),
            }
            objs.append((obj.kind, obj.arch, obj.id, path, data))
        return objs
Exemplo n.º 5
0
def create_files_from_dsym_zip(fileobj, project,
                               update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)

                # proguard files (proguard/UUID.txt) or
                # (proguard/mapping-UUID.txt).
                proguard_uuid = _analyze_progard_filename(fn)
                if proguard_uuid is not None:
                    to_create.append(('proguard', 'any', six.text_type(proguard_uuid), fn, ))
                    continue

                # macho style debug symbols
                try:
                    fo = FatObject.from_path(fn)
                except UnsupportedObjectFile:
                    pass
                except SymbolicError:
                    # Whatever was contained there, was probably not a
                    # macho file.
                    # XXX: log?
                    logger.warning('dsymfile.bad-fat-object', exc_info=True)
                else:
                    for obj in fo.iter_objects():
                        to_create.append((obj.kind, obj.arch,
                                          six.text_type(obj.uuid), fn))
                    continue

        rv = []
        for dsym_type, cpu, file_uuid, filename in to_create:
            with open(filename, 'rb') as f:
                dsym, created = _create_dsym_from_uuid(
                    project, dsym_type, cpu, file_uuid, f, os.path.basename(filename)
                )
                if created:
                    rv.append(dsym)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            uuids_to_update = [six.text_type(x.uuid) for x in rv
                               if x.supports_symcache]
            if uuids_to_update:
                symcache_update.delay(project_id=project.id,
                                      uuids=uuids_to_update)

        return rv
    finally:
        shutil.rmtree(scratchpad)
Exemplo n.º 6
0
    def _update_cachefile(self, debug_file, fileobj):
        debug_id = debug_file.debug_id

        # Locate the object inside the FatObject. Since we have keyed debug
        # files by debug_id, we expect a corresponding object. Otherwise, we
        # fail silently, just like with missing symbols.
        try:
            fo = FatObject.from_path(fileobj.name)
            o = fo.get_object(id=debug_id)
            if o is None:
                return None, None, None
            symcache = o.make_symcache()
        except SymbolicError as e:
            if not isinstance(e, (SymCacheErrorMissingDebugSection,
                                  SymCacheErrorMissingDebugInfo)):
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True,
                             extra=dict(debug_id=debug_id))

            return None, None, e.message

        file = File.objects.create(name=debug_id, type='project.symcache')
        file.putfile(symcache.open_stream())

        # Try to insert the new SymCache into the database. This only fail if
        # (1) another process has concurrently added the same sym cache, or if
        # (2) the debug symbol was deleted, either due to a newer upload or via
        # the API.
        try:
            with transaction.atomic():
                return ProjectSymCacheFile.objects.create(
                    project=debug_file.project,
                    cache_file=file,
                    dsym_file=debug_file,
                    checksum=debug_file.file.checksum,
                    version=symcache.file_format_version,
                ), symcache, None
        except IntegrityError:
            file.delete()

        # Check for a concurrently inserted symcache and use that instead. This
        # could have happened (1) due to a concurrent insert, or (2) a new
        # upload that has already succeeded to compute a symcache. The latter
        # case is extremely unlikely.
        cache_file = ProjectSymCacheFile.objects \
            .filter(project=debug_file.project, dsym_file__debug_id=debug_id) \
            .select_related('cache_file') \
            .order_by('-id') \
            .first()

        if cache_file is not None:
            return cache_file, None, None

        # There was no new symcache, indicating that the debug file has been
        # replaced with a newer version. Another job will create the
        # corresponding symcache eventually. To prevent querying the database
        # another time, simply use the in-memory symcache for now:
        return None, symcache, None
Exemplo n.º 7
0
    def _update_cachefiles(self, project, dsym_files):
        rv = []

        # Find all the known bad files we could not convert last time
        # around
        conversion_errors = {}
        for dsym_file in dsym_files:
            cache_key = 'scbe:%s:%s' % (dsym_file.uuid, dsym_file.file.checksum)
            err = cache.get(cache_key)
            if err is not None:
                conversion_errors[dsym_file.uuid] = err

        for dsym_file in dsym_files:
            dsym_uuid = dsym_file.uuid
            if dsym_uuid in conversion_errors:
                continue

            try:
                with dsym_file.file.getfile(as_tempfile=True) as tf:
                    fo = FatObject.from_path(tf.name)
                    o = fo.get_object(uuid=dsym_file.uuid)
                    if o is None:
                        continue
                    symcache = o.make_symcache()
            except SymbolicError as e:
                cache.set('scbe:%s:%s' % (
                    dsym_uuid, dsym_file.file.checksum), e.message,
                    CONVERSION_ERROR_TTL)
                conversion_errors[dsym_uuid] = e.message
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True, extra=dict(dsym_uuid=dsym_uuid))
                continue

            file = File.objects.create(
                name=dsym_file.uuid,
                type='project.symcache',
            )
            file.putfile(symcache.open_stream())
            try:
                with transaction.atomic():
                    rv.append((dsym_uuid, ProjectSymCacheFile.objects.get_or_create(
                        project=project,
                        cache_file=file,
                        dsym_file=dsym_file,
                        defaults=dict(
                            checksum=dsym_file.file.checksum,
                            version=symcache.file_format_version,
                        )
                    )[0]))
            except IntegrityError:
                file.delete()
                rv.append((dsym_uuid, ProjectSymCacheFile.objects.get(
                    project=project,
                    dsym_file=dsym_file,
                )))

        return rv, conversion_errors
Exemplo n.º 8
0
    def _update_cachefile(self, debug_file, fileobj):
        debug_id = debug_file.debug_id

        # Locate the object inside the FatObject. Since we have keyed debug
        # files by debug_id, we expect a corresponding object. Otherwise, we
        # fail silently, just like with missing symbols.
        try:
            fo = FatObject.from_path(fileobj.name)
            o = fo.get_object(id=debug_id)
            if o is None:
                return None, None, None
            symcache = o.make_symcache()
        except SymbolicError as e:
            if not isinstance(e, (SymCacheErrorMissingDebugSection, SymCacheErrorMissingDebugInfo)):
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True, extra=dict(debug_id=debug_id))

            return None, None, e.message

        file = File.objects.create(name=debug_id, type='project.symcache')
        file.putfile(symcache.open_stream())

        # Try to insert the new SymCache into the database. This only fail if
        # (1) another process has concurrently added the same sym cache, or if
        # (2) the debug symbol was deleted, either due to a newer upload or via
        # the API.
        try:
            with transaction.atomic():
                return ProjectSymCacheFile.objects.create(
                    project=debug_file.project,
                    cache_file=file,
                    dsym_file=debug_file,
                    checksum=debug_file.file.checksum,
                    version=symcache.file_format_version,
                ), symcache, None
        except IntegrityError:
            file.delete()

        # Check for a concurrently inserted symcache and use that instead. This
        # could have happened (1) due to a concurrent insert, or (2) a new
        # upload that has already succeeded to compute a symcache. The latter
        # case is extremely unlikely.
        cache_file = ProjectSymCacheFile.objects \
            .filter(project=debug_file.project, dsym_file__debug_id=debug_id) \
            .select_related('cache_file') \
            .order_by('-id') \
            .first()

        if cache_file is not None:
            return cache_file, None, None

        # There was no new symcache, indicating that the debug file has been
        # replaced with a newer version. Another job will create the
        # corresponding symcache eventually. To prevent querying the database
        # another time, simply use the in-memory symcache for now:
        return None, symcache, None
Exemplo n.º 9
0
    def _update_cachefile(self, debug_file, tf):
        try:
            fo = FatObject.from_path(tf.name)
            o = fo.get_object(id=debug_file.debug_id)
            if o is None:
                return None, None
            symcache = o.make_symcache()
        except SymbolicError as e:
            default_cache.set(
                'scbe:%s:%s' % (debug_file.debug_id, debug_file.file.checksum),
                e.message, CONVERSION_ERROR_TTL)

            if not isinstance(e, (SymCacheErrorMissingDebugSection,
                                  SymCacheErrorMissingDebugInfo)):
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True,
                             extra=dict(debug_id=debug_file.debug_id))

            return None, e.message

        # We seem to have this task running onconcurrently or some
        # other task might delete symcaches while this is running
        # which is why this requires a loop instead of just a retry
        # on get.
        for iteration in range(5):
            file = File.objects.create(
                name=debug_file.debug_id,
                type='project.symcache',
            )
            file.putfile(symcache.open_stream())
            try:
                with transaction.atomic():
                    return ProjectSymCacheFile.objects.get_or_create(
                        project=debug_file.project,
                        cache_file=file,
                        dsym_file=debug_file,
                        defaults=dict(
                            checksum=debug_file.file.checksum,
                            version=symcache.file_format_version,
                        ))[0], None
            except IntegrityError:
                file.delete()
                try:
                    return ProjectSymCacheFile.objects.get(
                        project=debug_file.project,
                        dsym_file=debug_file,
                    ), None
                except ProjectSymCacheFile.DoesNotExist:
                    continue

        raise RuntimeError('Concurrency error on symcache update')
Exemplo n.º 10
0
    def _update_cachefile(self, debug_file, tf):
        try:
            fo = FatObject.from_path(tf.name)
            o = fo.get_object(id=debug_file.debug_id)
            if o is None:
                return None, None
            symcache = o.make_symcache()
        except SymbolicError as e:
            default_cache.set('scbe:%s:%s' % (
                debug_file.debug_id, debug_file.file.checksum), e.message,
                CONVERSION_ERROR_TTL)

            if not isinstance(e, (SymCacheErrorMissingDebugSection, SymCacheErrorMissingDebugInfo)):
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True, extra=dict(debug_id=debug_file.debug_id))

            return None, e.message

        # We seem to have this task running onconcurrently or some
        # other task might delete symcaches while this is running
        # which is why this requires a loop instead of just a retry
        # on get.
        for iteration in range(5):
            file = File.objects.create(
                name=debug_file.debug_id,
                type='project.symcache',
            )
            file.putfile(symcache.open_stream())
            try:
                with transaction.atomic():
                    return ProjectSymCacheFile.objects.get_or_create(
                        project=debug_file.project,
                        cache_file=file,
                        dsym_file=debug_file,
                        defaults=dict(
                            checksum=debug_file.file.checksum,
                            version=symcache.file_format_version,
                        )
                    )[0], None
            except IntegrityError:
                file.delete()
                try:
                    return ProjectSymCacheFile.objects.get(
                        project=debug_file.project,
                        dsym_file=debug_file,
                    ), None
                except ProjectSymCacheFile.DoesNotExist:
                    continue

        raise RuntimeError('Concurrency error on symcache update')
Exemplo n.º 11
0
    def _update_cachefiles(self, project, dsym_files):
        rv = []

        for dsym_file in dsym_files:
            dsym_uuid = dsym_file.uuid
            try:
                with dsym_file.file.getfile(as_tempfile=True) as tf:
                    fo = FatObject.from_path(tf.name)
                    o = fo.get_object(uuid=dsym_file.uuid)
                    if o is None:
                        continue
                    cache = o.make_symcache()
            except SymbolicError:
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True, extra=dict(dsym_uuid=dsym_uuid))
                continue

            file = File.objects.create(
                name=dsym_file.uuid,
                type='project.symcache',
            )
            file.putfile(cache.open_stream())
            try:
                with transaction.atomic():
                    rv.append((dsym_uuid, ProjectSymCacheFile.objects.get_or_create(
                        project=project,
                        cache_file=file,
                        dsym_file=dsym_file,
                        defaults=dict(
                            checksum=dsym_file.file.checksum,
                            version=cache.file_format_version,
                        )
                    )[0]))
            except IntegrityError:
                file.delete()
                rv.append((dsym_uuid, ProjectSymCacheFile.objects.get(
                    project=project,
                    dsym_file=dsym_file,
                )))

        return rv
Exemplo n.º 12
0
    def _update_cachefiles(self, project, dsym_files):
        rv = []

        for dsym_file in dsym_files:
            dsym_uuid = dsym_file.uuid
            try:
                with dsym_file.file.getfile(as_tempfile=True) as tf:
                    fo = FatObject.from_path(tf.name)
                    o = fo.get_object(uuid=dsym_file.uuid)
                    if o is None:
                        continue
                    cache = o.make_symcache()
            except SymbolicError:
                logger.error('dsymfile.symcache-build-error',
                             exc_info=True, extra=dict(dsym_uuid=dsym_uuid))
                continue

            file = File.objects.create(
                name=dsym_file.uuid,
                type='project.symcache',
            )
            file.putfile(cache.open_stream())
            try:
                with transaction.atomic():
                    rv.append((dsym_uuid, ProjectSymCacheFile.objects.get_or_create(
                        project=project,
                        cache_file=file,
                        dsym_file=dsym_file,
                        defaults=dict(
                            checksum=dsym_file.file.checksum,
                            version=cache.file_format_version,
                        )
                    )[0]))
            except IntegrityError:
                file.delete()
                rv.append((dsym_uuid, ProjectSymCacheFile.objects.get(
                    project=project,
                    dsym_file=dsym_file,
                )))

        return rv
Exemplo n.º 13
0
def detect_dif_from_path(path):
    """This detects which kind of dif (Debug Information File) the path
    provided is. It returns an array since a FatObject can contain more than
    on dif.
    """
    # proguard files (proguard/UUID.txt) or
    # (proguard/mapping-UUID.txt).
    proguard_id = _analyze_progard_filename(path)
    if proguard_id is not None:
        return [('proguard', 'any', proguard_id, path)]

    # macho style debug symbols
    try:
        fo = FatObject.from_path(path)
    except ObjectErrorUnsupportedObject as e:
        raise BadDif("Unsupported debug information file: %s" % e)
    except SymbolicError as e:
        logger.warning('dsymfile.bad-fat-object', exc_info=True)
        raise BadDif("Invalid debug information file: %s" % e)
    else:
        objs = []
        for obj in fo.iter_objects():
            objs.append((obj.kind, obj.arch, obj.id, path))
        return objs
Exemplo n.º 14
0
def detect_dif_from_path(path):
    """This detects which kind of dif (Debug Information File) the path
    provided is. It returns an array since a FatObject can contain more than
    on dif.
    """
    # proguard files (proguard/UUID.txt) or
    # (proguard/mapping-UUID.txt).
    proguard_uuid = _analyze_progard_filename(path)
    if proguard_uuid is not None:
        return [('proguard', 'any', six.text_type(proguard_uuid), path)]

    # macho style debug symbols
    try:
        fo = FatObject.from_path(path)
    except UnsupportedObjectFile as e:
        raise BadDif("Unsupported debug information file: %s" % e)
    except SymbolicError as e:
        logger.warning('dsymfile.bad-fat-object', exc_info=True)
        raise BadDif("Invalid debug information file: %s" % e)
    else:
        objs = []
        for obj in fo.iter_objects():
            objs.append((obj.kind, obj.arch, six.text_type(obj.uuid), path))
        return objs
Exemplo n.º 15
0
    def _update_cachefile(self, debug_file, path, cls):
        debug_id = debug_file.debug_id

        # Skip silently if this cache cannot be computed from the given DIF
        if not cls.computes_from(debug_file):
            return None, None, None

        # Locate the object inside the FatObject. Since we have keyed debug
        # files by debug_id, we expect a corresponding object. Otherwise, we
        # fail silently, just like with missing symbols.
        try:
            fo = FatObject.from_path(path)
            o = fo.get_object(id=debug_id)
            if o is None:
                return None, None, None

            # Check features from the actual object file, if this is a legacy
            # DIF where features have not been extracted yet.
            if (debug_file.data or {}).get('features') is None:
                if o.features < set(cls.required_features):
                    return None, None, None

            cache = cls.cache_cls.from_object(o)
        except SymbolicError as e:
            if not isinstance(e, cls.ignored_errors):
                logger.error('dsymfile.%s-build-error' % cls.cache_name,
                             exc_info=True,
                             extra=dict(debug_id=debug_id))

            metrics.incr('%s.failed' % cls.cache_name,
                         tags={
                             'error': e.__class__.__name__,
                         },
                         skip_internal=False)

            return None, None, e.message

        file = File.objects.create(name=debug_id,
                                   type='project.%s' % cls.cache_name)
        file.putfile(cache.open_stream())

        # Try to insert the new Cache into the database. This only fail if
        # (1) another process has concurrently added the same sym cache, or if
        # (2) the debug symbol was deleted, either due to a newer upload or via
        # the API.
        try:
            with transaction.atomic():
                return cls.objects.create(
                    project=debug_file.project,
                    cache_file=file,
                    debug_file=debug_file,
                    checksum=debug_file.file.checksum,
                    version=cache.file_format_version,
                ), cache, None
        except IntegrityError:
            file.delete()

        # Check for a concurrently inserted cache and use that instead. This
        # could have happened (1) due to a concurrent insert, or (2) a new
        # upload that has already succeeded to compute a cache. The latter
        # case is extremely unlikely.
        cache_file = cls.objects \
            .filter(project=debug_file.project, debug_file__debug_id=debug_id) \
            .select_related('cache_file') \
            .order_by('-id') \
            .first()

        if cache_file is not None:
            return cache_file, None, None

        # There was no new cache, indicating that the debug file has been
        # replaced with a newer version. Another job will create the
        # corresponding cache eventually. To prevent querying the database
        # another time, simply use the in-memory cache for now:
        return None, cache, None
Exemplo n.º 16
0
    def _update_cachefile(self, debug_file, path, cls):
        debug_id = debug_file.debug_id

        # Skip silently if this cache cannot be computed from the given DIF
        if not cls.computes_from(debug_file):
            return None, None, None

        # Locate the object inside the FatObject. Since we have keyed debug
        # files by debug_id, we expect a corresponding object. Otherwise, we
        # fail silently, just like with missing symbols.
        try:
            fo = FatObject.from_path(path)
            o = fo.get_object(id=debug_id)
            if o is None:
                return None, None, None

            # Check features from the actual object file, if this is a legacy
            # DIF where features have not been extracted yet.
            if (debug_file.data or {}).get('features') is None:
                if o.features < set(cls.required_features):
                    return None, None, None

            cache = cls.cache_cls.from_object(o)
        except SymbolicError as e:
            if not isinstance(e, cls.ignored_errors):
                logger.error('dsymfile.%s-build-error' % cls.cache_name,
                             exc_info=True, extra=dict(debug_id=debug_id))

            metrics.incr('%s.failed' % cls.cache_name, tags={
                'error': e.__class__.__name__,
            }, skip_internal=False)

            return None, None, e.message

        file = File.objects.create(name=debug_id, type='project.%s' % cls.cache_name)
        file.putfile(cache.open_stream())

        # Try to insert the new Cache into the database. This only fail if
        # (1) another process has concurrently added the same sym cache, or if
        # (2) the debug symbol was deleted, either due to a newer upload or via
        # the API.
        try:
            with transaction.atomic():
                return cls.objects.create(
                    project=debug_file.project,
                    cache_file=file,
                    debug_file=debug_file,
                    checksum=debug_file.file.checksum,
                    version=cache.file_format_version,
                ), cache, None
        except IntegrityError:
            file.delete()

        # Check for a concurrently inserted cache and use that instead. This
        # could have happened (1) due to a concurrent insert, or (2) a new
        # upload that has already succeeded to compute a cache. The latter
        # case is extremely unlikely.
        cache_file = cls.objects \
            .filter(project=debug_file.project, debug_file__debug_id=debug_id) \
            .select_related('cache_file') \
            .order_by('-id') \
            .first()

        if cache_file is not None:
            return cache_file, None, None

        # There was no new cache, indicating that the debug file has been
        # replaced with a newer version. Another job will create the
        # corresponding cache eventually. To prevent querying the database
        # another time, simply use the in-memory cache for now:
        return None, cache, None