def create_files_from_dsym_zip(fileobj, project, update_symcaches=True): """Creates all missing dsym files from the given zip file. This returns a list of all files created. """ scratchpad = tempfile.mkdtemp() try: safe_extract_zip(fileobj, scratchpad, strip_toplevel=False) to_create = [] for dirpath, dirnames, filenames in os.walk(scratchpad): for fn in filenames: fn = os.path.join(dirpath, fn) try: difs = detect_dif_from_path(fn) except BadDif: difs = None if difs is None: difs = [] to_create = to_create + difs rv = create_dsym_from_dif(to_create, project) # By default we trigger the symcache generation on upload to avoid # some obvious dogpiling. if update_symcaches: from sentry.tasks.symcache_update import symcache_update ids_to_update = [six.text_type(dif.debug_id) for dif in rv if dif.supports_symcache] if ids_to_update: symcache_update.delay(project_id=project.id, debug_ids=ids_to_update) # Uploading new dsysm changes the reprocessing revision bump_reprocessing_revision(project) return rv finally: shutil.rmtree(scratchpad)
def assemble_dif(project_id, name, checksum, chunks, **kwargs): from sentry.models import ChunkFileState, debugfile, Project, \ ProjectDebugFile, set_assemble_status, BadDif from sentry.reprocessing import bump_reprocessing_revision project = Project.objects.filter(id=project_id).get() set_assemble_status(project, checksum, ChunkFileState.ASSEMBLING) # Assemble the chunks into files rv = assemble_file(project, name, checksum, chunks, file_type='project.dif') # If not file has been created this means that the file failed to # assemble because of bad input data. Return. if rv is None: return file, temp_file = rv delete_file = True try: with temp_file: # We only permit split difs to hit this endpoint. The # client is required to split them up first or we error. try: result = debugfile.detect_dif_from_path(temp_file.name) except BadDif as e: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=e.args[0]) return if len(result) != 1: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail='Contained wrong number of ' 'architectures (expected one, got %s)' % len(result)) return dif_type, cpu, file_id, filename, data = result[0] dif, created = debugfile.create_dif_from_id( project, dif_type, cpu, file_id, data, os.path.basename(name), file=file) indicate_success = True delete_file = False if created: # Bump the reprocessing revision since the symbol has changed # and might resolve processing issues. If the file was not # created, someone else has created it and will bump the # revision instead. bump_reprocessing_revision(project) # Try to generate caches from this DIF immediately. If this # fails, we can capture the error and report it to the uploader. # Also, we remove the file to prevent it from erroring again. error = ProjectDebugFile.difcache.generate_caches(project, dif, temp_file.name) if error is not None: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=error) indicate_success = False dif.delete() if indicate_success: set_assemble_status(project, checksum, ChunkFileState.OK) finally: if delete_file: file.delete()
def assemble_dif(project_id, name, checksum, chunks, **kwargs): from sentry.models import ChunkFileState, debugfile, Project, \ ProjectDebugFile, set_assemble_status, BadDif from sentry.reprocessing import bump_reprocessing_revision project = Project.objects.filter(id=project_id).get() set_assemble_status(project, checksum, ChunkFileState.ASSEMBLING) # Assemble the chunks into files rv = assemble_file(project, name, checksum, chunks, file_type='project.dif') # If not file has been created this means that the file failed to # assemble because of bad input data. Return. if rv is None: return file, temp_file = rv delete_file = True try: with temp_file: # We only permit split difs to hit this endpoint. The # client is required to split them up first or we error. try: result = debugfile.detect_dif_from_path(temp_file.name) except BadDif as e: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=e.args[0]) return if len(result) != 1: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail='Contained wrong number of ' 'architectures (expected one, got %s)' % len(result)) return dif_type, cpu, file_id, filename = result[0] dif, created = debugfile.create_dif_from_id( project, dif_type, cpu, file_id, os.path.basename(name), file=file) delete_file = False bump_reprocessing_revision(project) indicate_success = True # If we need to write a symcache we can use the # `generate_symcache` method to attempt to write one. # This way we can also capture down the error if we need # to. if dif.supports_symcache: symcache, error = ProjectDebugFile.difcache.generate_symcache( project, dif, temp_file) if error is not None: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=error) indicate_success = False dif.delete() if indicate_success: set_assemble_status(project, checksum, ChunkFileState.OK) finally: if delete_file: file.delete()
def assemble_dif(project_id, name, checksum, chunks, debug_id=None, **kwargs): """ Assembles uploaded chunks into a ``ProjectDebugFile``. """ from sentry.models import debugfile, Project, BadDif from sentry.reprocessing import bump_reprocessing_revision with configure_scope() as scope: scope.set_tag("project", project_id) project = Project.objects.filter(id=project_id).get() set_assemble_status(AssembleTask.DIF, project.id, checksum, ChunkFileState.ASSEMBLING) # Assemble the chunks into a temporary file rv = assemble_file(AssembleTask.DIF, project, name, checksum, chunks, file_type="project.dif") # If not file has been created this means that the file failed to # assemble because of bad input data. Return. if rv is None: return file, temp_file = rv delete_file = True try: with temp_file: # We only permit split difs to hit this endpoint. The # client is required to split them up first or we error. try: result = debugfile.detect_dif_from_path( temp_file.name, name=name, debug_id=debug_id ) except BadDif as e: set_assemble_status( AssembleTask.DIF, project.id, checksum, ChunkFileState.ERROR, detail=e.args[0] ) return if len(result) != 1: detail = "Object contains %s architectures (1 expected)" % len(result) set_assemble_status( AssembleTask.DIF, project.id, checksum, ChunkFileState.ERROR, detail=detail ) return dif, created = debugfile.create_dif_from_id(project, result[0], file=file) delete_file = False if created: # Bump the reprocessing revision since the symbol has changed # and might resolve processing issues. If the file was not # created, someone else has created it and will bump the # revision instead. bump_reprocessing_revision(project) except BaseException: set_assemble_status( AssembleTask.DIF, project.id, checksum, ChunkFileState.ERROR, detail="internal server error", ) logger.error("failed to assemble dif", exc_info=True) else: set_assemble_status( AssembleTask.DIF, project.id, checksum, ChunkFileState.OK, detail=serialize(dif) ) finally: if delete_file: file.delete()
def create_files_from_dsym_zip(fileobj, project, update_symcaches=True): """Creates all missing dsym files from the given zip file. This returns a list of all files created. """ scratchpad = tempfile.mkdtemp() try: safe_extract_zip(fileobj, scratchpad, strip_toplevel=False) to_create = [] for dirpath, dirnames, filenames in os.walk(scratchpad): for fn in filenames: fn = os.path.join(dirpath, fn) # proguard files (proguard/UUID.txt) or # (proguard/mapping-UUID.txt). proguard_uuid = _analyze_progard_filename(fn) if proguard_uuid is not None: to_create.append(('proguard', 'any', six.text_type(proguard_uuid), fn, )) continue # macho style debug symbols try: fo = FatObject.from_path(fn) except UnsupportedObjectFile: pass except SymbolicError: # Whatever was contained there, was probably not a # macho file. # XXX: log? logger.warning('dsymfile.bad-fat-object', exc_info=True) else: for obj in fo.iter_objects(): to_create.append((obj.kind, obj.arch, six.text_type(obj.uuid), fn)) continue rv = [] for dsym_type, cpu, file_uuid, filename in to_create: with open(filename, 'rb') as f: dsym, created = _create_dsym_from_uuid( project, dsym_type, cpu, file_uuid, f, os.path.basename(filename) ) if created: rv.append(dsym) # By default we trigger the symcache generation on upload to avoid # some obvious dogpiling. if update_symcaches: from sentry.tasks.symcache_update import symcache_update uuids_to_update = [six.text_type(x.uuid) for x in rv if x.supports_symcache] if uuids_to_update: symcache_update.delay(project_id=project.id, uuids=uuids_to_update) # Uploading new dsysm changes the reprocessing revision bump_reprocessing_revision(project) return rv finally: shutil.rmtree(scratchpad)
def assemble_dif(project_id, name, checksum, chunks, **kwargs): from sentry.models import ChunkFileState, dsymfile, Project, \ ProjectDSymFile, set_assemble_status, BadDif from sentry.reprocessing import bump_reprocessing_revision project = Project.objects.filter(id=project_id).get() set_assemble_status(project, checksum, ChunkFileState.ASSEMBLING) # Assemble the chunks into files rv = assemble_file(project, name, checksum, chunks, file_type='project.dsym') # If not file has been created this means that the file failed to # assemble because of bad input data. Return. if rv is None: return file, temp_file = rv delete_file = True try: with temp_file: # We only permit split difs to hit this endpoint. The # client is required to split them up first or we error. try: result = dsymfile.detect_dif_from_path(temp_file.name) except BadDif as e: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=e.args[0]) return if len(result) != 1: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail='Contained wrong number of ' 'architectures (expected one, got %s)' % len(result)) return dsym_type, cpu, file_uuid, filename = result[0] dsym, created = dsymfile.create_dsym_from_uuid( project, dsym_type, cpu, file_uuid, os.path.basename(name), file=file) delete_file = False bump_reprocessing_revision(project) indicate_success = True # If we need to write a symcache we can use the # `generate_symcache` method to attempt to write one. # This way we can also capture down the error if we need # to. if dsym.supports_symcache: symcache, error = ProjectDSymFile.dsymcache.generate_symcache( project, dsym, temp_file) if error is not None: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=error) indicate_success = False if indicate_success: set_assemble_status(project, checksum, ChunkFileState.OK) finally: if delete_file: file.delete()
def assemble_dif(project_id, name, checksum, chunks, **kwargs): from sentry.models import ChunkFileState, debugfile, Project, \ ProjectDebugFile, set_assemble_status, BadDif from sentry.reprocessing import bump_reprocessing_revision project = Project.objects.filter(id=project_id).get() set_assemble_status(project, checksum, ChunkFileState.ASSEMBLING) # Assemble the chunks into files rv = assemble_file(project, name, checksum, chunks, file_type='project.dif') # If not file has been created this means that the file failed to # assemble because of bad input data. Return. if rv is None: return file, temp_file = rv delete_file = True try: with temp_file: # We only permit split difs to hit this endpoint. The # client is required to split them up first or we error. try: result = debugfile.detect_dif_from_path(temp_file.name) except BadDif as e: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=e.args[0]) return if len(result) != 1: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail='Contained wrong number of ' 'architectures (expected one, got %s)' % len(result)) return dif_type, cpu, file_id, filename, data = result[0] dif, created = debugfile.create_dif_from_id(project, dif_type, cpu, file_id, data, os.path.basename(name), file=file) indicate_success = True delete_file = False if created: # Bump the reprocessing revision since the symbol has changed # and might resolve processing issues. If the file was not # created, someone else has created it and will bump the # revision instead. bump_reprocessing_revision(project) # Try to generate caches from this DIF immediately. If this # fails, we can capture the error and report it to the uploader. # Also, we remove the file to prevent it from erroring again. error = ProjectDebugFile.difcache.generate_caches( project, dif, temp_file.name) if error is not None: set_assemble_status(project, checksum, ChunkFileState.ERROR, detail=error) indicate_success = False dif.delete() if indicate_success: set_assemble_status(project, checksum, ChunkFileState.OK) finally: if delete_file: file.delete()