コード例 #1
0
def create_files_from_dsym_zip(fileobj, project,
                               update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)

                # proguard files (proguard/UUID.txt) or
                # (proguard/mapping-UUID.txt).
                proguard_uuid = _analyze_progard_filename(fn)
                if proguard_uuid is not None:
                    to_create.append(('proguard', 'any', six.text_type(proguard_uuid), fn, ))
                    continue

                # macho style debug symbols
                try:
                    fo = FatObject.from_path(fn)
                except UnsupportedObjectFile:
                    pass
                except SymbolicError:
                    # Whatever was contained there, was probably not a
                    # macho file.
                    # XXX: log?
                    logger.warning('dsymfile.bad-fat-object', exc_info=True)
                else:
                    for obj in fo.iter_objects():
                        to_create.append((obj.kind, obj.arch,
                                          six.text_type(obj.uuid), fn))
                    continue

        rv = []
        for dsym_type, cpu, file_uuid, filename in to_create:
            with open(filename, 'rb') as f:
                dsym, created = _create_dsym_from_uuid(
                    project, dsym_type, cpu, file_uuid, f, os.path.basename(filename)
                )
                if created:
                    rv.append(dsym)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            uuids_to_update = [six.text_type(x.uuid) for x in rv
                               if x.supports_symcache]
            if uuids_to_update:
                symcache_update.delay(project_id=project.id,
                                      uuids=uuids_to_update)

        return rv
    finally:
        shutil.rmtree(scratchpad)
コード例 #2
0
def create_files_from_dif_zip(fileobj, project, update_caches=True):
    """Creates all missing debug files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_debug_file_from_dif(to_create, project)

        # Trigger generation of symcaches and cficaches to avoid dogpiling when
        # events start coming in.
        if update_caches:
            from sentry.tasks.symcache_update import symcache_update
            ids_to_update = [
                six.text_type(dif.debug_id) for dif in rv
                if dif.supports_caches
            ]
            if ids_to_update:
                symcache_update.delay(project_id=project.id,
                                      debug_ids=ids_to_update)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
コード例 #3
0
ファイル: dsymfile.py プロジェクト: hosmelq/sentry
def create_files_from_dsym_zip(fileobj, project,
                               update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_dsym_from_dif(to_create, project)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            ids_to_update = [six.text_type(dif.debug_id) for dif in rv
                             if dif.supports_symcache]
            if ids_to_update:
                symcache_update.delay(project_id=project.id,
                                      debug_ids=ids_to_update)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
コード例 #4
0
ファイル: dsymfile.py プロジェクト: zhaokaiju/sentry
def create_files_from_dsym_zip(fileobj, project, update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_dsym_from_dif(to_create, project)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            uuids_to_update = [
                six.text_type(x.uuid) for x in rv if x.supports_symcache
            ]
            if uuids_to_update:
                symcache_update.delay(project_id=project.id,
                                      uuids=uuids_to_update)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
コード例 #5
0
def assemble_dif(project_id, file_id, file_blob_ids, checksum, **kwargs):
    from sentry.models import ChunkFileState, dsymfile, Project, CHUNK_STATE_HEADER
    with transaction.atomic():

        # Assemble the chunks into files
        file = assemble_chunks(file_id, file_blob_ids, checksum)

        # If an error happend during assembling, we early return here
        if file.headers.get(CHUNK_STATE_HEADER) == ChunkFileState.ERROR:
            return

        project = Project.objects.filter(id=project_id).get()

        with file.getfile(as_tempfile=True) as tf:
            result = dsymfile.detect_dif_from_filename(tf.name)
            if result:
                dsyms = dsymfile.create_dsym_from_dif(result, project,
                                                      file.name)

                from sentry.tasks.symcache_update import symcache_update
                uuids_to_update = [
                    six.text_type(x.uuid) for x in dsyms if x.supports_symcache
                ]
                if uuids_to_update:
                    symcache_update.delay(project_id=project.id,
                                          uuids=uuids_to_update)

                # Uploading new dsysm changes the reprocessing revision
                dsymfile.bump_reprocessing_revision(project)
                # We can delete the original chunk file since we created new dsym files
                file.delete()
            else:
                file.headers[CHUNK_STATE_HEADER] = ChunkFileState.ERROR
                file.headers['error'] = 'Invalid object file'
                file.save()
                logger.error('assemble_chunks.invalid_object_file',
                             extra={
                                 'error': file.headers.get('error', ''),
                                 'file_id': file.id
                             })