Esempio n. 1
0
    def test_custom_job_normal_workflow(self):
        SHORT_SEQUENCE = 'AAACCCGGGGTT'
        LONG_SEQUENCE = 'AAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTT' \
                      'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
        FASTA_DATA1 = '>someseq\n' + SHORT_SEQUENCE + '\n' \
                      '>someseq2\n' + LONG_SEQUENCE
        db = create_db_connection(TestWithPostgres.config.dbconfig)
        # upload FASTA file
        sequence_list = SequenceList.create_with_content_and_title(db, FASTA_DATA1, "sometitle")
        # create a job to determine predictions for a sequence_list
        job_uuid = CustomJob.create_job(db, DataType.PREDICTION, sequence_list, model_name="E2f1").uuid
        # mark job as running
        CustomJob.set_job_running(db, job_uuid)
        # upload file
        BED_DATA = """
someseq\t0\t10\t12.5\tAAACCCGGGG
someseq2\t20\t30\t4.5\tGGTTAAACCC
someseq2\t60\t75\t15.5\tAAAAAAAAAAAAAAA
            """.strip()
        result_uuid = CustomResultData.new_uuid()
        result = CustomResultData(db, result_uuid, job_uuid, model_name='E2f1', bed_data=BED_DATA)
        result.save()
        self.assertEqual(BED_DATA, CustomResultData.bed_file_contents(db, result_uuid).strip())

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=False,
                                                       limit=None, offset=None)
        self.assertEqual(2, len(predictions))
        first = predictions[0]
        self.assertEqual('someseq', first['name'])
        self.assertEqual(12.5, float(first['max']))
        self.assertEqual([{u'start': 0, u'end': 10, u'value': 12.5}], first['values'])
        self.assertEqual(SHORT_SEQUENCE, first['sequence'])

        second = predictions[1]
        self.assertEqual('someseq2', second['name'])
        self.assertEqual(15.5, float(second['max']))
        self.assertEqual(LONG_SEQUENCE, second['sequence'])

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=True,
                                                       limit=None, offset=None)
        self.assertEqual(2, len(predictions))
        self.assertEqual(15.5, float(predictions[0]['max']))
        self.assertEqual(12.5, float(predictions[1]['max']))

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=True,
                                                       limit=1, offset=1)
        self.assertEqual(1, len(predictions))
        self.assertEqual(12.5, float(predictions[0]['max']))

        # Make sure we can convert predictions to JSON
        json_version = json.dumps({'data': predictions})
        self.assertEqual('{"data', json_version[:6])
    def test_custom_job_normal_workflow(self):
        FASTA_DATA1 = """>someseq\nAAACCCGGGGTT\n>someseq2\nAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTT"""
        db = create_db_connection(TestWithPostgres.config.dbconfig)
        # upload FASTA file
        sequence_list = SequenceList.create_with_content_and_title(db, FASTA_DATA1, "sometitle")
        # create a job to determine predictions for a sequence_list
        job_uuid = CustomJob.create_job(db, DataType.PREDICTION, sequence_list, model_name="E2f1").uuid
        # mark job as running
        CustomJob.set_job_running(db, job_uuid)
        # upload file
        BED_DATA = """
someseq\t0\t10\t12.5
someseq2\t20\t30\t4.5
someseq2\t60\t75\t15.5
            """.strip()
        result_uuid = CustomResultData.new_uuid()
        result = CustomResultData(db, result_uuid, job_uuid, model_name='E2f1', bed_data=BED_DATA)
        result.save()
        self.assertEqual(BED_DATA, CustomResultData.bed_file_contents(db, result_uuid).strip())

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=False,
                                                       limit=None, offset=None)
        self.assertEqual(2, len(predictions))
        first = predictions[0]
        self.assertEqual('someseq', first['name'])
        self.assertEqual(12.5, float(first['max']))
        self.assertEqual([{u'start': 0, u'end': 10, u'value': 12.5}], first['values'])
        self.assertEqual('AAACCCGGGGTT', first['sequence'])

        second = predictions[1]
        self.assertEqual('someseq2', second['name'])
        self.assertEqual(15.5, float(second['max']))
        self.assertEqual('AAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTTAAACCCGGGGTT', second['sequence'])

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=True,
                                                       limit=None, offset=None)
        self.assertEqual(2, len(predictions))
        self.assertEqual(15.5, float(predictions[0]['max']))
        self.assertEqual(12.5, float(predictions[1]['max']))

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=True,
                                                       limit=1, offset=1)
        self.assertEqual(1, len(predictions))
        self.assertEqual(12.5, float(predictions[0]['max']))

        # Make sure we can convert predictions to JSON
        json_version = json.dumps({'data': predictions})
        self.assertEqual('{"data', json_version[:6])
Esempio n. 3
0
def search_custom_results(result_id):
    """
    Search a result for predictions.
    request['maxPredictionSort'] - when true sort by max prediction
    request['all'] - include values in download
    request['page'] - which page of results to show
    request['perPage'] - items per page to show
    :param result_id: str: uuid of the custom_predictions/custom_preferences we want to search
    :return: json response with 'result' property containing an array of predictions
    """
    args = request.args
    format = args.get('format')
    sort_by_max = args.get('maxPredictionSort')
    if sort_by_max == 'false':
        sort_by_max = None
    all_values = args.get('all')
    page = get_optional_int(args, 'page')
    per_page = get_optional_int(args, 'per_page')
    offset = None
    if page and per_page:
        offset = (page - 1) * per_page

    predictions = CustomResultData.get_predictions(get_db(), result_id, sort_by_max, per_page, offset)
    if format == 'tsv' or format == 'csv':
        filename = "custom_result.{}".format(format)
        separator = ','
        if format == 'tsv':
            separator = '\t'
        return download_file_response(filename, make_download_custom_result(separator, all_values, predictions))
    else:
        return make_ok_json_response({
            'result': predictions})
Esempio n. 4
0
def search_custom_results(result_id):
    """
    Search a result for predictions.
    request['maxPredictionSort'] - when true sort by max prediction
    request['all'] - include values in download
    request['page'] - which page of results to show
    request['perPage'] - items per page to show
    :param result_id: str: uuid of the custom_predictions/custom_preferences we want to search
    :return: json response with 'result' property containing an array of predictions
    """
    args = request.args
    format = args.get('format')
    sort_by_max = args.get('maxPredictionSort')
    if sort_by_max == 'false':
        sort_by_max = None
    all_values = args.get('all')
    page = get_optional_int(args, 'page')
    per_page = get_optional_int(args, 'per_page')
    offset = None
    if page and per_page:
        offset = (page - 1) * per_page

    predictions = CustomResultData.get_predictions(get_db(), result_id,
                                                   sort_by_max, per_page,
                                                   offset)
    if format == 'tsv' or format == 'csv':
        filename = "custom_result.{}".format(format)
        separator = ','
        if format == 'tsv':
            separator = '\t'
        return download_file_response(
            filename,
            make_download_custom_result(separator, all_values, predictions))
    else:
        return make_ok_json_response({'result': predictions})
    def test_custom_job_no_data(self):
        FASTA_DATA1 = """>someseq\nAAACCCGGGGTT"""
        db = create_db_connection(TestWithPostgres.config.dbconfig)
        # upload FASTA file
        sequence_list = SequenceList.create_with_content_and_title(db, FASTA_DATA1, "somelist")
        # create a job to determine predictions for a sequence_list
        job_uuid = CustomJob.create_job(db, DataType.PREDICTION, sequence_list, model_name='E2f1').uuid
        # mark job as running
        CustomJob.set_job_running(db, job_uuid)
        # upload file
        BED_DATA = ''
        result_uuid = CustomResultData.new_uuid()
        result = CustomResultData(db, result_uuid, job_uuid, model_name='E2f1', bed_data=BED_DATA)
        result.save()

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=False,
                                                       limit=None, offset=None)
        self.assertEqual(1, len(predictions))
        first = predictions[0]
        self.assertEqual('someseq', first['name'])
        self.assertEqual('None', first['max'])
        self.assertEqual([], first['values'])
        self.assertEqual('AAACCCGGGGTT', first['sequence'])
        # Make sure we can convert predictions to JSON
        json_version = json.dumps({'data': predictions})
        self.assertEqual('{"data', json_version[:6])
Esempio n. 6
0
    def test_custom_job_no_data(self):
        FASTA_DATA1 = """>someseq\nAAACCCGGGGTT"""
        db = create_db_connection(TestWithPostgres.config.dbconfig)
        # upload FASTA file
        sequence_list = SequenceList.create_with_content_and_title(db, FASTA_DATA1, "somelist")
        # create a job to determine predictions for a sequence_list
        job_uuid = CustomJob.create_job(db, DataType.PREDICTION, sequence_list, model_name='E2f1').uuid
        # mark job as running
        CustomJob.set_job_running(db, job_uuid)
        # upload file
        BED_DATA = ''
        result_uuid = CustomResultData.new_uuid()
        result = CustomResultData(db, result_uuid, job_uuid, model_name='E2f1', bed_data=BED_DATA)
        result.save()

        predictions = CustomResultData.get_predictions(db, result_uuid, sort_max_value=False,
                                                       limit=None, offset=None)
        self.assertEqual(1, len(predictions))
        first = predictions[0]
        self.assertEqual('someseq', first['name'])
        self.assertEqual('None', first['max'])
        self.assertEqual([], first['values'])
        self.assertEqual('AAACCCGGGGTT', first['sequence'])
        # Make sure we can convert predictions to JSON
        json_version = json.dumps({'data': predictions})
        self.assertEqual('{"data', json_version[:6])