def get_exam_aggregator(name):

    if not name in AGGREGATOR_REGISTRY:
        error_msg = NO_AGGREGATOR_ERR.format(name, AGGREGATOR_REGISTRY.keys())
        logger.error(error_msg)
        raise Exception(error_msg)
    logger.info(AGGREGATOR_SUCCESS_MSG.format(name))
    return AGGREGATOR_REGISTRY[name]
def get_risk_factors(args, ssn, exam, risk_metadata_json, json_dir, logger):
    '''
    args:
        - args:
        - ssn:
        - exam:
        - json_dir:
        - logger:
    returns:
        - risk_factor_vector:
    '''
    try:
        os.makedirs(json_dir)
    except Exception as e:
        pass
    args.metadata_path = "{}.json".format(
        os.path.join(json_dir, str(uuid.uuid4())))
    args.risk_factor_metadata_path = "{}.json".format(
        os.path.join(json_dir, str(uuid.uuid4())))

    # Write current request to a file to use as a metadata path
    prior_hist = risk_metadata_json[ssn]['any_breast_cancer'] == 1
    metadata_json = [{
        'ssn':
        ssn,
        'accessions': [{
            'accession': exam,
            'prior_hist': prior_hist
        }]
    }]

    try:
        json.dump(metadata_json, open(args.metadata_path, 'w'))
        json.dump(risk_metadata_json, open(args.risk_factor_metadata_path,
                                           'w'))
    except Exception as e:
        delete_jsons(args)
        err_msg = FAIL_TO_SAVE_METADATA_MESSAGE.format(ssn, exam, e, args)
        logger.error(err_msg)
        raise Exception(err_msg)

    # Load risk factor vector from metadata file and del metadata json
    try:
        risk_factor_vectorizer = RiskFactorVectorizer(args)
        sample = {'ssn': ssn, 'exam': exam}
        risk_factor_vector = risk_factor_vectorizer.get_risk_factors_for_sample(
            sample)
        logger.info(SUCCESS_RISK_VEC_MESSAGE.format(ssn, exam, args))
        delete_jsons(args)
        return risk_factor_vector
    except Exception as e:
        delete_jsons(args)
        err_msg = FAIL_TO_GET_RISK_VECTOR_MESSAGE.format(ssn, exam, e, args)
        logger.error(err_msg)
        raise Exception(err_msg)
Exemple #3
0
def serve():
    '''
        API to serve a model from OncoNet
        Takes a list of dicom files, and a list of optional keys
        i.e { dicoms: [bytes, bytes, bytes], data: { optional metadata} }
        and returns:
            { prediction: Y, metadata: {}, model_name : NAME,
            oncoserve_version: X.X.X, onconet_version: X.X.X,
            oncodata_version: X.X.X}

        Prediction is the exam level prediction over dicoms.
        Model_name defines what predictor is running: i.e density, risk etc.
        Metadata is meant to contain things like MRN, ACCESSION and any
        additional metadata for object tracking.
        OncoServe_version is the version of the model deployment framework
        OncoNet_version is the version of the research model framework
        OncoData_version is the version of the dicom conversion framework

        The configuration of the model used to produce Y is set in the app configuration. See config.py for config objects.
    '''
    logger.info("Serving request...")
    response = {
        'model_name': app.config['NAME'],
        'oncoserve_version': app.config['ONCOSERVE_VERSION'],
        'onconet_version': app.config['ONCONET_VERSION'],
        'oncodata_version': app.config['ONCODATA_VERSION'],
        'log_file': LOG_FILE
    }
    try:
        dicoms = request.files.getlist('dicom')
        metadata = request.form
        response['metadata'] = metadata
        images = oncodata_wrapper.get_pngs(dicoms, oncodata_args, logger)
        logger.info(ONCODATA_SUCCESS_MSG)
        if onconet_args.use_risk_factors:
            assert 'mrn' in metadata
            assert 'accession' in metadata
            risk_factors = json.loads(
                request.files.getlist('risk_factors')[0].read())
            risk_factor_vector = oncoqueries_wrapper.get_risk_factors(
                onconet_args, metadata['mrn'], metadata['accession'],
                risk_factors, oncodata_args.temp_img_dir, logger)
            logger.info(ONCOQUERIES_SUCCESS_MSG)
        else:
            risk_factor_vector = None
        y = onconet.process_exam(images, risk_factor_vector)
        logger.info(ONCONET_SUCCESS_MSG)
        msg = 'OK'
        response['prediction'] = y
        response['msg'] = msg
        return jsonify(response), HTTP_200_OK

    except Exception as e:
        msg = ONCOSERVE_FAIL_MSG.format(str(e))
        response['prediction'] = None
        response['msg'] = msg
        return jsonify(response), HTTP_500_INTERNAL_SERVER_ERROR
    def __init__(self, args, aggregator_name, logger):
        logger.info(INIT_MESSAGE)
        self.args = args
        args.cuda = args.cuda and torch.cuda.is_available()
        args.test_image_transformers = parsing.parse_transformers(
            args.test_image_transformers)
        args.test_tensor_transformers = parsing.parse_transformers(
            args.test_tensor_transformers)
        test_transformers = transformer_factory.get_transformers(
            args.test_image_transformers, args.test_tensor_transformers, args)

        self.transformer = ComposeTrans(test_transformers)
        logger.info(TRANSF_MESSAGE)
        self.model = torch.load(args.snapshot, map_location='cpu')
        # Unpack models taht were trained as data parallel
        if isinstance(self.model, nn.DataParallel):
            self.model = self.model.module
        # Add use precomputed hiddens for models trained before it was introduced.
        # Assumes a resnet base backbone
        try:
            self.model._model.args.use_precomputed_hiddens = args.use_precomputed_hiddens
            self.model._model.args.cuda = args.cuda
        except Exception as e:
            pass
        # Load callibrator if desired
        if args.callibrator_path is not None:
            self.callibrator = pickle.load(open(args.callibrator_path, 'rb'))
        else:
            self.callibrator = None

        logger.info(MODEL_MESSAGE.format(args.snapshot))
        self.aggregator = aggregator_factory.get_exam_aggregator(
            aggregator_name)

        self.logger = logger
Exemple #5
0
        logger.info(ONCODATA_SUCCESS_MSG)
        if onconet_args.use_risk_factors:
            assert 'mrn' in metadata
            assert 'accession' in metadata
            risk_factors = json.loads(
                request.files.getlist('risk_factors')[0].read())
            risk_factor_vector = oncoqueries_wrapper.get_risk_factors(
                onconet_args, metadata['mrn'], metadata['accession'],
                risk_factors, oncodata_args.temp_img_dir, logger)
            logger.info(ONCOQUERIES_SUCCESS_MSG)
        else:
            risk_factor_vector = None
        y = onconet.process_exam(images, risk_factor_vector)
        logger.info(ONCONET_SUCCESS_MSG)
        msg = 'OK'
        response['prediction'] = y
        response['msg'] = msg
        return jsonify(response), HTTP_200_OK

    except Exception as e:
        msg = ONCOSERVE_FAIL_MSG.format(str(e))
        response['prediction'] = None
        response['msg'] = msg
        return jsonify(response), HTTP_500_INTERNAL_SERVER_ERROR


if __name__ == '__main__':
    port = app.config['PORT']
    logger.info("Launching app at port {}".format(port))
    app.run(host='0.0.0.0', port=port)