def prediction():
    """Returns predictions for a given dataset."""

    # Missing arguments.
    if len(sys.argv) < 4:
        result = dict()
        result['runid'] = str(int(time.time()))
        result['status'] = estimator.GENERAL_ERROR
        result['info'] = [
            'Missing arguments, you should set:\
    - The model unique identifier\
    - The directory to store all generated outputs\
    - The file with samples to predict\
    Received: ' + ' '.join(sys.argv)
        ]

        print(json.dumps(result))
        sys.exit(result['status'])

    modelid = sys.argv[1]
    directory = sys.argv[2]
    dataset = sys.argv[3]

    # TensorFlow binary classifier - NN.
    classifier = estimator.Classifier(modelid, directory, dataset)

    result = classifier.predict_dataset(dataset)

    print(json.dumps(result))
    sys.exit(result['status'])
Esempio n. 2
0
def evaluation():

    uniquemodelid = get_request_value('uniqueid')
    modeldir = storage.get_model_dir('dirhash')

    minscore = get_request_value('minscore', pattern='[^0-9.$]')
    maxdeviation = get_request_value('maxdeviation', pattern='[^0-9.$]')
    niterations = get_request_value('niterations', pattern='[^0-9$]')

    datasetpath = get_file_path(storage.get_localbasedir(), 'dataset')

    trainedmodeldirhash = get_request_value('trainedmodeldirhash',
                                            exception=False)
    if trainedmodeldirhash is not False:
        # The trained model dir in the server is namespaced by uniquemodelid
        # and the trainedmodeldirhash which determines where should the results
        # be stored.
        trainedmodeldir = storage.get_model_dir('trainedmodeldirhash',
                                                fetch_model=True)
    else:
        trainedmodeldir = False

    classifier = estimator.Classifier(uniquemodelid, modeldir, datasetpath)
    result = classifier.evaluate_dataset(datasetpath, float(minscore),
                                         float(maxdeviation), int(niterations),
                                         trainedmodeldir)

    return json.dumps(result)
Esempio n. 3
0
def training():
    """Trains a ML classifier."""

    # Missing arguments.
    if len(sys.argv) < 4:
        result = dict()
        result['runid'] = str(int(time.time()))
        result['status'] = estimator.GENERAL_ERROR
        result['info'] = [
            'Missing arguments, you should set:\
    - The model unique identifier\
    - The directory to store all generated outputs\
    - The training file\
    Received: ' + ' '.join(sys.argv)
        ]

        print(json.dumps(result))
        sys.exit(result['status'])

    modelid = sys.argv[1]
    directory = sys.argv[2]
    dataset = sys.argv[3]

    classifier = estimator.Classifier(modelid, directory, dataset)

    result = classifier.train_dataset(dataset)

    print(json.dumps(result))
    sys.exit(result['status'])
Esempio n. 4
0
def prediction():

    uniquemodelid = get_request_value('uniqueid')
    modeldir = storage.get_model_dir('dirhash')

    datasetpath = get_file_path(storage.get_localbasedir(), 'dataset')

    classifier = estimator.Classifier(uniquemodelid, modeldir, datasetpath)
    result = classifier.predict_dataset(datasetpath)

    return json.dumps(result)
Esempio n. 5
0
def export_classifier():
    """Exports the classifier."""

    modelid = sys.argv[1]
    directory = sys.argv[2]

    classifier = estimator.Classifier(modelid, directory)
    exportdir = classifier.export_classifier(sys.argv[3])
    if exportdir:
        print(exportdir)
        sys.exit(0)

    sys.exit(1)
Esempio n. 6
0
def import_classifier():
    """Imports a trained classifier."""

    modelid = sys.argv[1]
    directory = sys.argv[2]

    classifier = estimator.Classifier(modelid, directory)
    classifier.import_classifier(sys.argv[3])

    print('Ok')
    # An exception will be thrown before if it can be imported.
    print('Ok')
    sys.exit(0)
Esempio n. 7
0
def import_model():

    uniquemodelid = get_request_value('uniqueid')
    modeldir = storage.get_model_dir('dirhash')

    importzippath = get_file_path(storage.get_localbasedir(), 'importzip')

    with zipfile.ZipFile(importzippath, 'r') as zipobject:
        importtempdir = tempfile.TemporaryDirectory()
        zipobject.extractall(importtempdir.name)

        classifier = estimator.Classifier(uniquemodelid, modeldir)
        classifier.import_classifier(importtempdir.name)

    return 'Ok', 200
Esempio n. 8
0
def export():

    uniquemodelid = get_request_value('uniqueid')
    modeldir = storage.get_model_dir('dirhash')

    # We can use a temp directory for the export data
    # as we don't need to keep it forever.
    tempdir = tempfile.TemporaryDirectory()

    classifier = estimator.Classifier(uniquemodelid, modeldir)
    exportdir = classifier.export_classifier(tempdir.name)
    if exportdir is False:
        return Response('There is nothing to export.', 503)

    zipf = tempfile.NamedTemporaryFile()
    zipdir(exportdir, zipf)

    return send_file(zipf.name, mimetype='application/zip')
def evaluation():
    """Evaluates the provided dataset."""

    # Missing arguments.
    if len(sys.argv) < 7:
        result = dict()
        result['runid'] = str(int(time.time()))
        result['status'] = estimator.GENERAL_ERROR
        result['info'] = ['Missing arguments, you should set:\
    - The model unique identifier\
    - The directory to store all generated outputs\
    - The training file\
    - The minimum score (from 0 to 1) to consider the model as valid (defaults to 0.6)\
    - The minimum deviation to accept the model as valid (defaults to 0.02)\
    - The number of times the evaluation will run (defaults to 100)\
    Received: ' + ' '.join(sys.argv)]

        print(json.dumps(result))
        sys.exit(result['status'])

    modelid = sys.argv[1]
    directory = sys.argv[2]
    dataset = sys.argv[3]

    classifier = estimator.Classifier(modelid, directory, dataset)

    if len(sys.argv) > 7:
        trained_model_dir = sys.argv[7]
    else:
        trained_model_dir = False

    result = classifier.evaluate_dataset(dataset,
                                                float(sys.argv[4]),
                                                float(sys.argv[5]),
                                                int(sys.argv[6]),
                                                trained_model_dir)

    print(json.dumps(result))
    sys.exit(result['status'])