Ejemplo n.º 1
0
def create_files_from_macho_zip(fileobj, project=None):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    if not have_symsynd:
        raise RuntimeError('symsynd is unavailable.  Install sentry with '
                           'the dsym feature flag.')
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    uuids = get_macho_uuids(fn)
                except (IOError, ValueError):
                    # Whatever was contained there, was probably not a
                    # macho file.
                    continue
                for cpu, uuid in uuids:
                    to_create.append((cpu, uuid, fn))

        rv = []
        for cpu, uuid, filename in to_create:
            with open(filename, 'rb') as f:
                rv.append((_create_macho_dsym_from_uuid(
                    project, cpu, uuid, f, os.path.basename(filename))))
        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 2
0
def create_files_from_macho_zip(fileobj, project=None):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    if not have_symsynd:
        raise RuntimeError('symsynd is unavailable.  Install sentry with '
                           'the dsym feature flag.')
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    uuids = get_macho_uuids(fn)
                except (IOError, ValueError):
                    # Whatever was contained there, was probably not a
                    # macho file.
                    continue
                for cpu, uuid in uuids:
                    to_create.append((cpu, uuid, fn))

        rv = []
        for cpu, uuid, filename in to_create:
            with open(filename, 'rb') as f:
                rv.append(
                    (_create_macho_dsym_from_uuid(project, cpu, uuid, f,
                                                  os.path.basename(filename))))
        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 3
0
def create_files_from_dif_zip(fileobj, project):
    """Creates all missing debug files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_debug_file_from_dif(to_create, project)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 4
0
def create_files_from_macho_zip(fileobj, project=None):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    di = DebugInfo.open_path(fn)
                except DebugInfoError:
                    # Whatever was contained there, was probably not a
                    # macho file.
                    continue
                for variant in di.get_variants():
                    to_create.append((
                        variant.cpu_name,
                        str(variant.uuid),  # noqa: B308
                        fn,
                    ))

        rv = []
        for cpu, uuid, filename in to_create:
            with open(filename, 'rb') as f:
                rv.append(
                    (_create_macho_dsym_from_uuid(project, cpu, uuid, f,
                                                  os.path.basename(filename))))
        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 5
0
def create_files_from_dsym_zip(fileobj, project,
                               update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)

                # proguard files (proguard/UUID.txt) or
                # (proguard/mapping-UUID.txt).
                proguard_uuid = _analyze_progard_filename(fn)
                if proguard_uuid is not None:
                    to_create.append(('proguard', 'any', six.text_type(proguard_uuid), fn, ))
                    continue

                # macho style debug symbols
                try:
                    fo = FatObject.from_path(fn)
                except UnsupportedObjectFile:
                    pass
                except SymbolicError:
                    # Whatever was contained there, was probably not a
                    # macho file.
                    # XXX: log?
                    logger.warning('dsymfile.bad-fat-object', exc_info=True)
                else:
                    for obj in fo.iter_objects():
                        to_create.append((obj.kind, obj.arch,
                                          six.text_type(obj.uuid), fn))
                    continue

        rv = []
        for dsym_type, cpu, file_uuid, filename in to_create:
            with open(filename, 'rb') as f:
                dsym, created = _create_dsym_from_uuid(
                    project, dsym_type, cpu, file_uuid, f, os.path.basename(filename)
                )
                if created:
                    rv.append(dsym)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            uuids_to_update = [six.text_type(x.uuid) for x in rv
                               if x.supports_symcache]
            if uuids_to_update:
                symcache_update.delay(project_id=project.id,
                                      uuids=uuids_to_update)

        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 6
0
    def extract(self) -> TemporaryDirectory:
        """Extract contents to a temporary directory.

        The caller is responsible for cleanup of the temporary files.
        """
        temp_dir = TemporaryDirectory()
        safe_extract_zip(self._fileobj, temp_dir.name, strip_toplevel=False)

        return temp_dir
Ejemplo n.º 7
0
def create_files_from_dsym_zip(fileobj, project=None):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)

                # proguard files (proguard/UUID.txt) or
                # (proguard/mapping-UUID.txt).
                proguard_uuid = _analyze_progard_filename(fn)
                if proguard_uuid is not None:
                    to_create.append((
                        'proguard',
                        'any',
                        six.text_type(proguard_uuid),
                        fn,
                    ))

                # macho style debug symbols
                try:
                    di = DebugInfo.open_path(fn)
                except DebugInfoError:
                    # Whatever was contained there, was probably not a
                    # macho file.
                    pass
                else:
                    for variant in di.get_variants():
                        to_create.append((
                            'macho',
                            variant.cpu_name,
                            six.text_type(variant.uuid),
                            fn,
                        ))
                    continue

        rv = []
        for dsym_type, cpu, file_uuid, filename in to_create:
            with open(filename, 'rb') as f:
                dsym, created = _create_dsym_from_uuid(
                    project, dsym_type, cpu, file_uuid, f,
                    os.path.basename(filename))
                if created:
                    rv.append(dsym)
        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 8
0
def create_files_from_dsym_zip(fileobj, project=None):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)

                # proguard files (proguard/UUID.txt) or
                # (proguard/mapping-UUID.txt).
                proguard_uuid = _analyze_progard_filename(fn)
                if proguard_uuid is not None:
                    to_create.append((
                        'proguard',
                        'any',
                        six.text_type(proguard_uuid),
                        fn,
                    ))

                # macho style debug symbols
                try:
                    di = DebugInfo.open_path(fn)
                except DebugInfoError:
                    # Whatever was contained there, was probably not a
                    # macho file.
                    pass
                else:
                    for variant in di.get_variants():
                        to_create.append((
                            'macho',
                            variant.cpu_name,
                            six.text_type(variant.uuid),
                            fn,
                        ))
                    continue

        rv = []
        for dsym_type, cpu, file_uuid, filename in to_create:
            with open(filename, 'rb') as f:
                dsym, created = _create_dsym_from_uuid(
                    project, dsym_type, cpu, file_uuid, f,
                    os.path.basename(filename))
                if created:
                    rv.append(dsym)
        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 9
0
def create_files_from_dif_zip(fileobj, project, update_caches=True):
    """Creates all missing debug files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_debug_file_from_dif(to_create, project)

        # Trigger generation of symcaches and cficaches to avoid dogpiling when
        # events start coming in.
        if update_caches:
            from sentry.tasks.symcache_update import symcache_update
            ids_to_update = [
                six.text_type(dif.debug_id) for dif in rv
                if dif.supports_caches
            ]
            if ids_to_update:
                symcache_update.delay(project_id=project.id,
                                      debug_ids=ids_to_update)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 10
0
def create_files_from_dsym_zip(fileobj, project,
                               update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_dsym_from_dif(to_create, project)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            ids_to_update = [six.text_type(dif.debug_id) for dif in rv
                             if dif.supports_symcache]
            if ids_to_update:
                symcache_update.delay(project_id=project.id,
                                      debug_ids=ids_to_update)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 11
0
def create_files_from_dsym_zip(fileobj, project, update_symcaches=True):
    """Creates all missing dsym files from the given zip file.  This
    returns a list of all files created.
    """
    scratchpad = tempfile.mkdtemp()
    try:
        safe_extract_zip(fileobj, scratchpad, strip_toplevel=False)
        to_create = []

        for dirpath, dirnames, filenames in os.walk(scratchpad):
            for fn in filenames:
                fn = os.path.join(dirpath, fn)
                try:
                    difs = detect_dif_from_path(fn)
                except BadDif:
                    difs = None

                if difs is None:
                    difs = []
                to_create = to_create + difs

        rv = create_dsym_from_dif(to_create, project)

        # By default we trigger the symcache generation on upload to avoid
        # some obvious dogpiling.
        if update_symcaches:
            from sentry.tasks.symcache_update import symcache_update
            uuids_to_update = [
                six.text_type(x.uuid) for x in rv if x.supports_symcache
            ]
            if uuids_to_update:
                symcache_update.delay(project_id=project.id,
                                      uuids=uuids_to_update)

        # Uploading new dsysm changes the reprocessing revision
        bump_reprocessing_revision(project)

        return rv
    finally:
        shutil.rmtree(scratchpad)
Ejemplo n.º 12
0
def assemble_artifacts(org_id, version, checksum, chunks, **kwargs):
    """
    Creates release files from an uploaded artifact bundle.
    """

    import shutil
    import tempfile
    from sentry.utils.zip import safe_extract_zip
    from sentry.models import File, Organization, Release, ReleaseFile

    organization = Organization.objects.get_from_cache(pk=org_id)

    bind_organization_context(organization)

    set_assemble_status(AssembleTask.ARTIFACTS, org_id, checksum,
                        ChunkFileState.ASSEMBLING)

    # Assemble the chunks into a temporary file
    rv = assemble_file(
        AssembleTask.ARTIFACTS,
        organization,
        "release-artifacts.zip",
        checksum,
        chunks,
        file_type="release.bundle",
    )

    # If not file has been created this means that the file failed to
    # assemble because of bad input data.  Return.
    if rv is None:
        return

    bundle, temp_file = rv
    scratchpad = tempfile.mkdtemp()

    # Initially, always delete the bundle file. Later on, we can start to store
    # the artifact bundle as a release file.
    delete_bundle = True

    try:
        try:
            safe_extract_zip(temp_file, scratchpad, strip_toplevel=False)
        except BaseException:
            raise AssembleArtifactsError("failed to extract bundle")

        try:
            manifest_path = path.join(scratchpad, "manifest.json")
            with open(manifest_path, "rb") as manifest:
                manifest = json.loads(manifest.read())
        except BaseException:
            raise AssembleArtifactsError("failed to open release manifest")

        org_slug = manifest.get("org")
        if organization.slug != org_slug:
            raise AssembleArtifactsError(
                "organization does not match uploaded bundle")

        release_name = manifest.get("release")
        if release_name != version:
            raise AssembleArtifactsError(
                "release does not match uploaded bundle")

        try:
            release = Release.objects.get(organization_id=organization.id,
                                          version=release_name)
        except Release.DoesNotExist:
            raise AssembleArtifactsError("release does not exist")

        dist_name = manifest.get("dist")
        dist = None
        if dist_name:
            dist = release.add_dist(dist_name)

        artifacts = manifest.get("files", {})
        for rel_path, artifact in six.iteritems(artifacts):
            artifact_url = artifact.get("url", rel_path)
            artifact_basename = artifact_url.rsplit("/", 1)[-1]

            file = File.objects.create(name=artifact_basename,
                                       type="release.file",
                                       headers=artifact.get("headers", {}))

            full_path = path.join(scratchpad, rel_path)
            with open(full_path, "rb") as fp:
                file.putfile(fp, logger=logger)

            kwargs = {
                "organization_id": organization.id,
                "release": release,
                "name": artifact_url,
                "dist": dist,
            }

            # Release files must have unique names within their release
            # and dist. If a matching file already exists, replace its
            # file with the new one; otherwise create it.
            try:
                release_file = ReleaseFile.objects.get(**kwargs)
            except ReleaseFile.DoesNotExist:
                try:
                    with transaction.atomic():
                        ReleaseFile.objects.create(file=file, **kwargs)
                except IntegrityError:
                    # NB: This indicates a race, where another assemble task or
                    # file upload job has just created a conflicting file. Since
                    # we're upserting here anyway, yield to the faster actor and
                    # do not try again.
                    file.delete()
            else:
                old_file = release_file.file
                release_file.update(file=file)
                old_file.delete()

    except AssembleArtifactsError as e:
        set_assemble_status(AssembleTask.ARTIFACTS,
                            org_id,
                            checksum,
                            ChunkFileState.ERROR,
                            detail=six.text_type(e))
    except BaseException:
        logger.error("failed to assemble release bundle", exc_info=True)
        set_assemble_status(
            AssembleTask.ARTIFACTS,
            org_id,
            checksum,
            ChunkFileState.ERROR,
            detail="internal server error",
        )
    else:
        set_assemble_status(AssembleTask.ARTIFACTS, org_id, checksum,
                            ChunkFileState.OK)
    finally:
        shutil.rmtree(scratchpad)
        if delete_bundle:
            bundle.delete()