def _create_dsym_from_uuid(project, dsym_type, cpu_name, uuid, fileobj, basename): """This creates a mach dsym file or proguard mapping from the given uuid and open file object to a dsym file. This will not verify the uuid (intentionally so). Use `create_files_from_dsym_zip` for doing everything. """ if dsym_type == 'proguard': object_name = 'proguard-mapping' elif dsym_type == 'macho': object_name = basename else: raise TypeError('unknown dsym type %r' % (dsym_type, )) h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = ProjectDSymFile.objects.get(uuid=uuid, project=project) if rv.file.checksum == checksum: return rv, False except ProjectDSymFile.DoesNotExist: pass else: # The checksum mismatches. In this case we delete the old object # and perform a re-upload. rv.delete() file = File.objects.create( name=uuid, type='project.dsym', headers={'Content-Type': DSYM_MIMETYPES[dsym_type]}, ) file.putfile(fileobj) try: with transaction.atomic(): rv = ProjectDSymFile.objects.create( file=file, uuid=uuid, cpu_name=cpu_name, object_name=object_name, project=project, ) except IntegrityError: file.delete() rv = ProjectDSymFile.objects.get(uuid=uuid, project=project) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % uuid, ) return rv, True
def _create_macho_dsym_from_uuid(project, cpu_name, uuid, fileobj, object_name): """This creates a mach dsym file from the given uuid and open file object to a dsym file. This will not verify the uuid. Use `create_files_from_macho_zip` for doing everything. """ extra = {} if project is None: cls = GlobalDSymFile file_type = 'global.dsym' else: cls = ProjectDSymFile extra['project'] = project file_type = 'project.dsym' h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = cls.objects.get(uuid=uuid, **extra) if rv.file.checksum == checksum: return rv except cls.DoesNotExist: pass else: # The checksum mismatches. In this case we delete the old object # and perform a re-upload. rv.delete() file = File.objects.create( name=uuid, type=file_type, headers={'Content-Type': 'application/x-mach-binary'}, ) file.putfile(fileobj) try: with transaction.atomic(): rv = cls.objects.create(file=file, uuid=uuid, cpu_name=cpu_name, object_name=object_name, **extra) except IntegrityError: file.delete() rv = cls.objects.get(uuid=uuid, **extra) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % uuid, ) return rv
def _create_dsym_from_uuid(project, dsym_type, cpu_name, uuid, fileobj, basename): """This creates a mach dsym file or proguard mapping from the given uuid and open file object to a dsym file. This will not verify the uuid (intentionally so). Use `create_files_from_dsym_zip` for doing everything. """ if dsym_type == 'proguard': object_name = 'proguard-mapping' elif dsym_type == 'macho': object_name = basename else: raise TypeError('unknown dsym type %r' % (dsym_type, )) h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = ProjectDSymFile.objects.get(uuid=uuid, project=project) if rv.file.checksum == checksum: return rv, False except ProjectDSymFile.DoesNotExist: pass else: # The checksum mismatches. In this case we delete the old object # and perform a re-upload. rv.delete() file = File.objects.create( name=uuid, type='project.dsym', headers={'Content-Type': DSYM_MIMETYPES[dsym_type]}, ) file.putfile(fileobj) try: with transaction.atomic(): rv = ProjectDSymFile.objects.create( file=file, uuid=uuid, cpu_name=cpu_name, object_name=object_name, project=project, ) except IntegrityError: file.delete() rv = ProjectDSymFile.objects.get(uuid=uuid, project=project) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % uuid, ) return rv, True
def create_dif_from_id(project, dif_type, cpu_name, debug_id, data, basename, fileobj=None, file=None): """This creates a mach dsym file or proguard mapping from the given debug id and open file object to a debug file. This will not verify the debug id(intentionally so). Use `create_files_from_dif_zip` for doing everything. """ if dif_type == 'proguard': object_name = 'proguard-mapping' elif dif_type in ('macho', 'elf'): object_name = basename elif dif_type == 'breakpad': object_name = basename[:-4] if basename.endswith('.sym') else basename else: raise TypeError('unknown dif type %r' % (dif_type, )) if file is not None: checksum = file.checksum elif fileobj is not None: h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) else: raise RuntimeError('missing file object') dif = ProjectDebugFile.objects \ .select_related('file') \ .filter(project=project, debug_id=debug_id, file__checksum=checksum, data__isnull=False) \ .order_by('-id') \ .first() if dif is not None: return dif, False if file is None: file = File.objects.create( name=debug_id, type='project.dif', headers={'Content-Type': DIF_MIMETYPES[dif_type]}, ) file.putfile(fileobj) else: file.type = 'project.dif' file.headers['Content-Type'] = DIF_MIMETYPES[dif_type] file.save() dif = ProjectDebugFile.objects.create( file=file, debug_id=debug_id, cpu_name=cpu_name, object_name=object_name, project=project, data=data, ) # The DIF we've just created might actually be removed here again. But since # this can happen at any time in near or distant future, we don't care and # assume a successful upload. The DIF will be reported to the uploader and # reprocessing can start. clean_redundant_difs(project, debug_id) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % debug_id, ) return dif, True
def create_dif_from_id(project, meta, fileobj=None, file=None): """This creates a mach dsym file or proguard mapping from the given debug id and open file object to a debug file. This will not verify the debug id (intentionally so). Use `detect_dif_from_path` to do that. """ if meta.file_format == "proguard": object_name = "proguard-mapping" elif meta.file_format in ("macho", "elf", "pdb", "pe", "wasm", "sourcebundle"): object_name = meta.name elif meta.file_format == "breakpad": object_name = meta.name[:-4] if meta.name.endswith( ".sym") else meta.name else: raise TypeError("unknown dif type %r" % (meta.file_format, )) if file is not None: checksum = file.checksum elif fileobj is not None: h = hashlib.sha1() while True: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) else: raise RuntimeError("missing file object") dif = (ProjectDebugFile.objects.select_related("file").filter( project=project, debug_id=meta.debug_id, checksum=checksum, data__isnull=False).order_by("-id").first()) if dif is not None: return dif, False if file is None: file = File.objects.create( name=meta.debug_id, type="project.dif", headers={"Content-Type": DIF_MIMETYPES[meta.file_format]}, ) file.putfile(fileobj) else: file.type = "project.dif" file.headers["Content-Type"] = DIF_MIMETYPES[meta.file_format] file.save() dif = ProjectDebugFile.objects.create( file=file, checksum=file.checksum, debug_id=meta.debug_id, code_id=meta.code_id, cpu_name=meta.arch, object_name=object_name, project=project, data=meta.data, ) # The DIF we've just created might actually be removed here again. But since # this can happen at any time in near or distant future, we don't care and # assume a successful upload. The DIF will be reported to the uploader and # reprocessing can start. clean_redundant_difs(project, meta.debug_id) resolve_processing_issue(project=project, scope="native", object="dsym:%s" % meta.debug_id) return dif, True
def create_dsym_from_id(project, dsym_type, cpu_name, debug_id, basename, fileobj=None, file=None): """This creates a mach dsym file or proguard mapping from the given debug id and open file object to a dsym file. This will not verify the debug id (intentionally so). Use `create_files_from_dsym_zip` for doing everything. """ if dsym_type == 'proguard': object_name = 'proguard-mapping' elif dsym_type in ('macho', 'elf'): object_name = basename elif dsym_type == 'breakpad': object_name = basename[:-4] if basename.endswith('.sym') else basename else: raise TypeError('unknown dsym type %r' % (dsym_type, )) if file is None: assert fileobj is not None, 'missing file object' h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = ProjectDSymFile.objects.get(debug_id=debug_id, project=project) if rv.file.checksum == checksum: return rv, False except ProjectDSymFile.DoesNotExist: rv = None file = File.objects.create( name=debug_id, type='project.dsym', headers={'Content-Type': DSYM_MIMETYPES[dsym_type]}, ) file.putfile(fileobj) kwargs = { 'file': file, 'debug_id': debug_id, 'cpu_name': cpu_name, 'object_name': object_name, 'project': project } if rv is None: try: with transaction.atomic(): rv = ProjectDSymFile.objects.create(**kwargs) except IntegrityError: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) oldfile = rv.file rv.update(**kwargs) oldfile.delete() else: oldfile = rv.file rv.update(**kwargs) oldfile.delete() else: try: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) except ProjectDSymFile.DoesNotExist: try: with transaction.atomic(): rv = ProjectDSymFile.objects.create( file=file, debug_id=debug_id, cpu_name=cpu_name, object_name=object_name, project=project, ) except IntegrityError: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) oldfile = rv.file rv.update(file=file) oldfile.delete() else: oldfile = rv.file rv.update(file=file) oldfile.delete() rv.file.headers['Content-Type'] = DSYM_MIMETYPES[dsym_type] rv.file.save() resolve_processing_issue( project=project, scope='native', object='dsym:%s' % debug_id, ) return rv, True
def create_dsym_from_id(project, dsym_type, cpu_name, debug_id, basename, fileobj=None, file=None): """This creates a mach dsym file or proguard mapping from the given debug id and open file object to a dsym file. This will not verify the debug id (intentionally so). Use `create_files_from_dsym_zip` for doing everything. """ if dsym_type == 'proguard': object_name = 'proguard-mapping' elif dsym_type in ('macho', 'elf'): object_name = basename elif dsym_type == 'breakpad': object_name = basename[:-4] if basename.endswith('.sym') else basename else: raise TypeError('unknown dsym type %r' % (dsym_type, )) if file is None: assert fileobj is not None, 'missing file object' h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) if rv.file.checksum == checksum: return rv, False except ProjectDSymFile.DoesNotExist: rv = None file = File.objects.create( name=debug_id, type='project.dsym', headers={'Content-Type': DSYM_MIMETYPES[dsym_type]}, ) file.putfile(fileobj) kwargs = { 'file': file, 'debug_id': debug_id, 'cpu_name': cpu_name, 'object_name': object_name, 'project': project } if rv is None: try: with transaction.atomic(): rv = ProjectDSymFile.objects.create(**kwargs) except IntegrityError: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) oldfile = rv.file rv.update(**kwargs) oldfile.delete() else: oldfile = rv.file rv.update(**kwargs) oldfile.delete() else: try: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) except ProjectDSymFile.DoesNotExist: try: with transaction.atomic(): rv = ProjectDSymFile.objects.create( file=file, debug_id=debug_id, cpu_name=cpu_name, object_name=object_name, project=project, ) except IntegrityError: rv = ProjectDSymFile.objects.select_related('file') \ .get(debug_id=debug_id, project=project) oldfile = rv.file rv.update(file=file) oldfile.delete() else: oldfile = rv.file rv.update(file=file) oldfile.delete() rv.file.headers['Content-Type'] = DSYM_MIMETYPES[dsym_type] rv.file.save() resolve_processing_issue( project=project, scope='native', object='dsym:%s' % debug_id, ) return rv, True
def create_dif_from_id(project, meta, fileobj=None, file=None): """Creates the :class:`ProjectDebugFile` entry for the provided DIF. This creates the :class:`ProjectDebugFile` entry for the DIF provided in `meta` (a :class:`DifMeta` object). If the correct entry already exists this simply returns the existing entry. It intentionally does not validate the file, only will ensure a :class:`File` entry exists and set its `ContentType` according to the provided :class:DifMeta`. Returns a tuple of `(dif, created)` where `dif` is the `ProjectDebugFile` instance and `created` is a bool. """ if meta.file_format == "proguard": object_name = "proguard-mapping" elif meta.file_format in ( "macho", "elf", "pdb", "pe", "wasm", "sourcebundle", "bcsymbolmap", "uuidmap", ): object_name = meta.name elif meta.file_format == "breakpad": object_name = meta.name[:-4] if meta.name.endswith( ".sym") else meta.name else: raise TypeError(f"unknown dif type {meta.file_format!r}") if file is not None: checksum = file.checksum elif fileobj is not None: h = hashlib.sha1() while True: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) else: raise RuntimeError("missing file object") dif = (ProjectDebugFile.objects.select_related("file").filter( project=project, debug_id=meta.debug_id, checksum=checksum, data__isnull=False).order_by("-id").first()) if dif is not None: return dif, False if file is None: file = File.objects.create( name=meta.debug_id, type="project.dif", headers={"Content-Type": DIF_MIMETYPES[meta.file_format]}, ) file.putfile(fileobj) else: file.type = "project.dif" file.headers["Content-Type"] = DIF_MIMETYPES[meta.file_format] file.save() dif = ProjectDebugFile.objects.create( file=file, checksum=file.checksum, debug_id=meta.debug_id, code_id=meta.code_id, cpu_name=meta.arch, object_name=object_name, project=project, data=meta.data, ) # The DIF we've just created might actually be removed here again. But since # this can happen at any time in near or distant future, we don't care and # assume a successful upload. The DIF will be reported to the uploader and # reprocessing can start. clean_redundant_difs(project, meta.debug_id) resolve_processing_issue(project=project, scope="native", object="dsym:%s" % meta.debug_id) return dif, True
def create_dif_from_id(project, meta, fileobj=None, file=None): """This creates a mach dsym file or proguard mapping from the given debug id and open file object to a debug file. This will not verify the debug id (intentionally so). Use `detect_dif_from_path` to do that. """ if meta.file_format == 'proguard': object_name = 'proguard-mapping' elif meta.file_format in ('macho', 'elf'): object_name = meta.name elif meta.file_format == 'breakpad': object_name = meta.name[:-4] if meta.name.endswith('.sym') else meta.name else: raise TypeError('unknown dif type %r' % (meta.file_format, )) if file is not None: checksum = file.checksum elif fileobj is not None: h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) else: raise RuntimeError('missing file object') dif = ProjectDebugFile.objects \ .select_related('file') \ .filter(project=project, debug_id=meta.debug_id, file__checksum=checksum, data__isnull=False) \ .order_by('-id') \ .first() if dif is not None: return dif, False if file is None: file = File.objects.create( name=meta.debug_id, type='project.dif', headers={'Content-Type': DIF_MIMETYPES[meta.file_format]}, ) file.putfile(fileobj) else: file.type = 'project.dif' file.headers['Content-Type'] = DIF_MIMETYPES[meta.file_format] file.save() dif = ProjectDebugFile.objects.create( file=file, debug_id=meta.debug_id, code_id=meta.code_id, cpu_name=meta.arch, object_name=object_name, project=project, data=meta.data, ) # The DIF we've just created might actually be removed here again. But since # this can happen at any time in near or distant future, we don't care and # assume a successful upload. The DIF will be reported to the uploader and # reprocessing can start. clean_redundant_difs(project, meta.debug_id) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % meta.debug_id, ) return dif, True
def _create_macho_dsym_from_uuid(project, cpu_name, uuid, fileobj, object_name): """This creates a mach dsym file from the given uuid and open file object to a dsym file. This will not verify the uuid. Use `create_files_from_macho_zip` for doing everything. """ extra = {} if project is None: cls = GlobalDSymFile file_type = 'global.dsym' else: cls = ProjectDSymFile extra['project'] = project file_type = 'project.dsym' h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = cls.objects.get(uuid=uuid, **extra) if rv.file.checksum == checksum: return rv except cls.DoesNotExist: pass else: # The checksum mismatches. In this case we delete the old object # and perform a re-upload. rv.delete() file = File.objects.create( name=uuid, type=file_type, headers={ 'Content-Type': 'application/x-mach-binary' }, ) file.putfile(fileobj) try: with transaction.atomic(): rv = cls.objects.create( file=file, uuid=uuid, cpu_name=cpu_name, object_name=object_name, **extra ) except IntegrityError: file.delete() rv = cls.objects.get(uuid=uuid, **extra) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % uuid, ) return rv