def post(self, request, project): """ Assemble one or multiple chunks (FileBlob) into debug files ```````````````````````````````````````````````````````````` :auth: required """ schema = { "type": "object", "patternProperties": { "^[0-9a-f]{40}$": { "type": "object", "required": ["name", "chunks"], "properties": { "name": { "type": "string" }, "debug_id": { "type": "string" }, "chunks": { "type": "array", "items": { "type": "string", "pattern": "^[0-9a-f]{40}$" }, }, }, "additionalProperties": True, } }, "additionalProperties": False, } try: files = json.loads(request.body) jsonschema.validate(files, schema) except jsonschema.ValidationError as e: return Response({"error": str(e).splitlines()[0]}, status=400) except BaseException: return Response({"error": "Invalid json body"}, status=400) file_response = {} for checksum, file_to_assemble in six.iteritems(files): name = file_to_assemble.get("name", None) debug_id = file_to_assemble.get("debug_id", None) chunks = file_to_assemble.get("chunks", []) # First, check the cached assemble status. During assembling, a # ProjectDebugFile will be created and we need to prevent a race # condition. state, detail = get_assemble_status(AssembleTask.DIF, project.id, checksum) if state == ChunkFileState.OK: file_response[checksum] = { "state": state, "detail": None, "missingChunks": [], "dif": detail, } continue elif state is not None: file_response[checksum] = { "state": state, "detail": detail, "missingChunks": [] } continue # Next, check if this project already owns the ProjectDebugFile. # This can under rare circumstances yield more than one file # which is why we use first() here instead of get(). dif = (ProjectDebugFile.objects.filter( project=project, file__checksum=checksum).select_related( "file").order_by("-id").first()) if dif is not None: file_response[checksum] = { "state": ChunkFileState.OK, "detail": None, "missingChunks": [], "dif": serialize(dif), } continue # There is neither a known file nor a cached state, so we will # have to create a new file. Assure that there are checksums. # If not, we assume this is a poll and report NOT_FOUND if not chunks: file_response[checksum] = { "state": ChunkFileState.NOT_FOUND, "missingChunks": [] } continue # Check if all requested chunks have been uploaded. missing_chunks = find_missing_chunks(project.organization, chunks) if missing_chunks: file_response[checksum] = { "state": ChunkFileState.NOT_FOUND, "missingChunks": missing_chunks, } continue # We don't have a state yet, this means we can now start # an assemble job in the background. set_assemble_status(AssembleTask.DIF, project.id, checksum, ChunkFileState.CREATED) from sentry.tasks.assemble import assemble_dif assemble_dif.apply_async( kwargs={ "project_id": project.id, "name": name, "debug_id": debug_id, "checksum": checksum, "chunks": chunks, }) file_response[checksum] = { "state": ChunkFileState.CREATED, "missingChunks": [] } return Response(file_response, status=200)
def post(self, request, project): """ Assmble one or multiple chunks (FileBlob) into dsym files ````````````````````````````````````````````````````````` :auth: required """ schema = { "type": "object", "patternProperties": { "^[0-9a-f]{40}$": { "type": "object", "required": ["name", "chunks"], "properties": { "name": {"type": "string"}, "chunks": { "type": "array", "items": {"type": "string"} } }, "additionalProperties": False } }, "additionalProperties": False } try: files = json.loads(request.body) jsonschema.validate(files, schema) except jsonschema.ValidationError as e: return Response({'error': str(e).splitlines()[0]}, status=400) except BaseException as e: return Response({'error': 'Invalid json body'}, status=400) file_response = {} from sentry.tasks.assemble import assemble_dif for checksum, file_to_assemble in six.iteritems(files): name = file_to_assemble.get('name', None) chunks = file_to_assemble.get('chunks', []) try: found_files, response = self._check_file_blobs( project.organization, checksum, chunks) # This either returns a file OK because we already own all chunks # OR we return not_found with the missing chunks (or not owned) if response is not None: # We also found a file, we try to fetch project dsym to return more # information in the request file_response[checksum] = self._add_project_dsym_to_reponse( found_files, response) continue except File.DoesNotExist: pass file, file_blob_ids = self._create_file_for_assembling(name, checksum, chunks) # Start the actual worker which does the assembling. assemble_dif.apply_async( kwargs={ 'project_id': project.id, 'file_id': file.id, 'file_blob_ids': file_blob_ids, 'checksum': checksum, } ) file_response[checksum] = self._create_file_response( ChunkFileState.CREATED ) return Response(file_response, status=200)
def post(self, request, project): """ Assmble one or multiple chunks (FileBlob) into dsym files ````````````````````````````````````````````````````````` :auth: required """ schema = { "type": "object", "patternProperties": { "^[0-9a-f]{40}$": { "type": "object", "required": ["name", "chunks"], "properties": { "name": {"type": "string"}, "chunks": { "type": "array", "items": {"type": "string"} } }, "additionalProperties": False } }, "additionalProperties": False } try: files = json.loads(request.body) jsonschema.validate(files, schema) except jsonschema.ValidationError as e: return Response({'error': str(e).splitlines()[0]}, status=400) except BaseException as e: return Response({'error': 'Invalid json body'}, status=400) file_response = {} from sentry.tasks.assemble import assemble_dif for checksum, file_to_assemble in six.iteritems(files): name = file_to_assemble.get('name', None) chunks = file_to_assemble.get('chunks', []) # First, check the cached assemble status. During assembling, a # ProjectDSymFile will be created and we need to prevent a race # condition. state, detail = get_assemble_status(project, checksum) if state is not None: file_response[checksum] = { 'state': state, 'detail': detail, 'missingChunks': [], } continue # Next, check if this project already owns the DSymFile. # This can under rare circumstances yield more than one file # which is why we use first() here instead of get(). dif = ProjectDSymFile.objects.filter( project=project, file__checksum=checksum ).select_related('file').first() if dif is not None: file_response[checksum] = { 'state': ChunkFileState.OK, 'detail': None, 'missingChunks': [], 'dif': serialize(dif), } continue # There is neither a known file nor a cached state, so we will # have to create a new file. Assure that there are checksums. # If not, we assume this is a poll and report NOT_FOUND if not chunks: file_response[checksum] = { 'state': ChunkFileState.NOT_FOUND, 'missingChunks': [], } continue # Check if all requested chunks have been uploaded. missing_chunks = find_missing_chunks(project.organization, chunks) if missing_chunks: file_response[checksum] = { 'state': ChunkFileState.NOT_FOUND, 'missingChunks': missing_chunks, } continue # We don't have a state yet, this means we can now start # an assemble job in the background. set_assemble_status(project, checksum, state) assemble_dif.apply_async( kwargs={ 'project_id': project.id, 'name': name, 'checksum': checksum, 'chunks': chunks, } ) file_response[checksum] = { 'state': ChunkFileState.CREATED, 'missingChunks': [], } return Response(file_response, status=200)
def post(self, request, project): """ Assmble one or multiple chunks (FileBlob) into dsym files ````````````````````````````````````````````````````````` :auth: required """ schema = { "type": "object", "patternProperties": { "^[0-9a-f]{40}$": { "type": "object", "required": ["name", "chunks"], "properties": { "name": { "type": "string" }, "chunks": { "type": "array", "items": { "type": "string" } } }, "additionalProperties": False } }, "additionalProperties": False } try: files = json.loads(request.body) jsonschema.validate(files, schema) except jsonschema.ValidationError as e: return Response({'error': str(e).splitlines()[0]}, status=400) except BaseException as e: return Response({'error': 'Invalid json body'}, status=400) file_response = {} from sentry.tasks.assemble import assemble_dif for checksum, file_to_assemble in six.iteritems(files): name = file_to_assemble.get('name', None) chunks = file_to_assemble.get('chunks', []) # First, check if this project already owns the DSymFile try: dif = ProjectDSymFile.objects.filter( project=project, file__checksum=checksum).get() except ProjectDSymFile.DoesNotExist: # It does not exist yet. Check the state we have in cache # in case this is a retry poll. state, detail = get_assemble_status(project, checksum) if state is not None: file_response[checksum] = { 'state': state, 'detail': detail, 'missingChunks': [], } continue # There is neither a known file nor a cached state, so we will # have to create a new file. Assure that there are checksums. # If not, we assume this is a poll and report NOT_FOUND if not chunks: file_response[checksum] = { 'state': ChunkFileState.NOT_FOUND, 'missingChunks': [], } continue # Check if all requested chunks have been uploaded. missing_chunks = find_missing_chunks(project.organization, chunks) if missing_chunks: file_response[checksum] = { 'state': ChunkFileState.NOT_FOUND, 'missingChunks': missing_chunks, } continue # We don't have a state yet, this means we can now start # an assemble job in the background. set_assemble_status(project, checksum, state) assemble_dif.apply_async( kwargs={ 'project_id': project.id, 'name': name, 'checksum': checksum, 'chunks': chunks, }) file_response[checksum] = { 'state': ChunkFileState.CREATED, 'missingChunks': [], } else: file_response[checksum] = { 'state': ChunkFileState.OK, 'detail': None, 'missingChunks': [], 'dif': serialize(dif), } return Response(file_response, status=200)