コード例 #1
0
def batch_upload(upload: Tuple[dict, str, bytes, str]):
    """ Used for mapping an s3_upload function.  the tuple is unpacked, can only have one parameter. """
    ret = {'exception': None, 'traceback': None}
    try:
        if len(upload) != 4:
            # upload should have length 4; this is for debugging if it doesn't
            print(upload)
        chunk, chunk_path, new_contents, study_object_id = upload
        del upload

        if "b'" in chunk_path:
            raise Exception(chunk_path)

        s3_upload(chunk_path,
                  codecs.decode(new_contents, "zip"),
                  study_object_id,
                  raw_path=True)
        print("data uploaded!", chunk_path)

        if isinstance(chunk, ChunkRegistry):
            # If the contents are being appended to an existing ChunkRegistry object
            chunk.file_size = len(new_contents)
            chunk.update_chunk_hash(new_contents)

        else:
            # If a new ChunkRegistry object is being created
            # Convert the ID's used in the S3 file names into primary keys for making ChunkRegistry FKs
            participant_pk, study_pk = Participant.objects.filter(
                patient_id=chunk['user_id']).values_list('pk',
                                                         'study_id').get()
            if chunk['survey_id']:
                survey_pk = Survey.objects.filter(
                    object_id=chunk['survey_id']).values_list('pk',
                                                              flat=True).get()
            else:
                survey_pk = None

            ChunkRegistry.register_chunked_data(
                chunk['data_type'],
                chunk['time_bin'],
                chunk['chunk_path'],
                new_contents,  # unlikely to be huge
                study_pk,
                participant_pk,
                survey_pk,
            )

    # it broke. print stacktrace for debugging
    except Exception as e:
        traceback.print_exc()
        ret['traceback'] = sys.exc_info()
        ret['exception'] = e

    return ret
コード例 #2
0
def batch_upload(upload: Tuple[ChunkRegistry or dict, str, bytes, str]):
    """ Used for mapping an s3_upload function.  the tuple is unpacked, can only have one parameter. """

    ret = {'exception': None, 'traceback': None}
    with make_error_sentry(sentry_type=SentryTypes.data_processing):
        try:
            chunk, chunk_path, new_contents, study_object_id = upload
            del upload
            new_contents = decompress(new_contents)

            if "b'" in chunk_path:
                raise Exception(chunk_path)

            # for use with test script to avoid network uploads
            # with open("processing_tests/" + GLOBAL_TIMESTAMP, 'ba') as f:
            #     f.write(b"\n\n")
            #     f.write(new_contents)
            #     return ret

            s3_upload(chunk_path, new_contents, study_object_id, raw_path=True)

            # if the chunk object is a chunk registry then we are updating an old one,
            # otherwise we are creating a new one.
            if isinstance(chunk, ChunkRegistry):
                # If the contents are being appended to an existing ChunkRegistry object
                chunk.file_size = len(new_contents)
                chunk.update_chunk(new_contents)
            else:
                ChunkRegistry.register_chunked_data(**chunk,
                                                    file_contents=new_contents)

        # it broke. print stacktrace for debugging
        except Exception as e:
            traceback.print_exc()
            ret['traceback'] = sys.exc_info()
            ret['exception'] = e

            # using an error sentry we can easily report a real error with a real stack trace! :D
            raise

    return ret
コード例 #3
0
def batch_upload(upload):
    """ Used for mapping an s3_upload function. """
    ret = {'exception': None, 'traceback': None}
    try:
        if len(upload) != 4:
            # upload should have length 4; this is for debugging if it doesn't
            print(upload)
        chunk, chunk_path, new_contents, study_object_id = upload
        del upload
        new_contents = new_contents.decode("zip")
        s3_upload(chunk_path, new_contents, study_object_id, raw_path=True)
        print("data uploaded!", chunk_path)
        if isinstance(chunk, ChunkRegistry):
            # If the contents are being appended to an existing ChunkRegistry object
            chunk.low_memory_update_chunk_hash(new_contents)
        else:
            # If a new ChunkRegistry object is being created
            # Convert the ID's used in the S3 file names into primary keys for making ChunkRegistry FKs
            participant_pk, study_pk = Participant.objects.filter(
                patient_id=chunk['user_id']).values_list('pk',
                                                         'study_id').get()
            if chunk['survey_id']:
                survey_pk = Survey.objects.filter(
                    object_id=chunk['survey_id']).values_list('pk',
                                                              flat=True).get()
            else:
                survey_pk = None
            ChunkRegistry.register_chunked_data(
                chunk['data_type'],
                chunk['time_bin'],
                chunk['chunk_path'],
                new_contents,  # unlikely to be huge
                study_pk,
                participant_pk,
                survey_pk,
            )
    except Exception as e:
        ret['traceback'] = format_exc(e)
        ret['exception'] = e
    return ret