示例#1
0
def api_upload_query():
    if UPLOAD_PART_NAME not in request.files:
        raise InvalidUsage("No '{}' part found".format(UPLOAD_PART_NAME))

    file = request.files[UPLOAD_PART_NAME]
    if file.filename == '':
        raise InvalidUsage('No selected file')

    if not _allowed_file(file.filename):
        raise InvalidUsage('File type not allowed')

    if not os.path.exists(UPLOAD_FOLDER):
        os.makedirs(UPLOAD_FOLDER)

    query_path = os.path.join(UPLOAD_FOLDER, UPLOAD_FILE_NAME)

    file.save(query_path)
    app.logger.info('Saved custom query to %s', query_path)

    global current_q_index
    request_id = request.args.get('requestID', type=int)

    with lock:
        # Unload the current custom query in memory
        if current_q_index != -1:
            onex.unloadDataset(current_q_index)
            app.logger.debug('Unloaded previous custom query')

        current_q_index = onex.loadDataset(query_path)
        app.logger.debug('Loaded new custom query')

        queries = onex.getAllSequences(current_q_index, 1)

        return jsonify(queries=queries, requestID=request_id)
示例#2
0
def api_dataset_init():
    global current_collection_index, current_ds_index
    request_id = request.args.get('requestID', type=int)
    ds_collection_index = request.args.get('dsCollectionIndex', type=int)
    st = request.args.get('st', 0.2, type=float)

    with lock:
        if ds_collection_index >= len(datasets) or ds_collection_index < 0:
            raise InvalidUsage('Dataset collection index out of bound')

        if st < 0:
            raise InvalidUsage('Invalid similarity threshold value')

        # Unload the current dataset in memory
        if current_ds_index != -1:
            onex.unloadDataset(current_ds_index)
            app.logger.debug('Unloaded dataset %d', current_collection_index)

        # Load the new dataset
        current_collection_index = ds_collection_index
        ds_path = str(datasets[current_collection_index].get('path'))
        ds_name = str(datasets[current_collection_index].get('name'))
        ds_metadata = datasets[current_collection_index].get('metadata')
        current_ds_index = onex.loadDataset(ds_path)

        metadata = None
        if ds_metadata:
            with open(ds_metadata) as metadata_file:
                metadata = json.load(metadata_file)
        else:
            app.logger.info('No metadata found for dataset %s', ds_name)

        app.logger.debug('Loaded dataset %d [%s]', current_collection_index,
                         ds_name)

        # Normalize the new dataset
        app.logger.debug('Normalizing dataset %d', current_collection_index)
        normalization = onex.normalizeDataset(current_ds_index)

        normalization = {'max': normalization[0], 'min': normalization[1]}
        app.logger.info('Normalized dataset %d', current_collection_index)

        # Group the new dataset
        app.logger.debug('Grouping dataset %d with st = %f',
                         current_collection_index, st)
        num_groups = onex.groupDataset(current_ds_index, st)
        app.logger.info('Grouped dataset %d with st = %f. Created %d groups',
                        current_collection_index, st, num_groups)

        # Return number of sequences in the dataset
        ds_length = onex.getDatasetSeqCount(current_ds_index)

        return jsonify(dsLength=ds_length,
                       metadata=metadata,
                       normalization=normalization,
                       numGroups=num_groups,
                       requestID=request_id)