コード例 #1
0
def score_models(test, separability_measure='MIS'):
    """
    Score the ML models on processed data and plot mode histograms.

    :param test: String indicating specific data set of test.
    :param separability_measure: String indicating the separability measure to
        use.
    :return scores: Numpy array of model accuracies.
    :rtype: array_type
    """
    X, y = get_sample_data(test)
    _, s, vh = svd_spectrogram(X)

    # Plot mode histograms to examine separability.
    plot_mode_histogram(vh[:,:6], range(6), test)
    plot_mode_histogram(vh[:,6:12], range(6,12), test)
    plot_mode_histogram(vh[:,12:18], range(12, 18), test)
    plot_mode_histogram(vh[:,18:24], range(18, 24), test)
    plot_sv_spectrum(s/np.max(s), test)

    # Compute separability measures of histograms.
    hsmodes = histogram_separability(vh, separability_measure)

    # Train and evaluation models on a taken number of modes. Increase the
    # number of modes by 2 until all modes are used.
    number_clf = 10
    mode_range = range(2, len(hsmodes)+2, 2)
    scores = np.zeros((number_clf, len(mode_range)))
    for j, m in enumerate(mode_range):
        Vh = np.asarray([vh[:,i] for i in hsmodes[:m]])
        scores_struct = train_and_evaluate(Vh.T, y)
        scores[:,j] = scores_struct

    return scores
コード例 #2
0
def aggregate_data(out_file=sys.stdout):
    dances = get_unique_dance_names(csv_data_dir)
    comprehensive_train_X = np.array([])
    comprehensive_train_Y = np.array([])
    comprehensive_validate_X = np.array([])
    comprehensive_validate_Y = np.array([])
    comprehensive_evaluation_X = np.array([])
    comprehensive_evaluation_Y = np.array([])

    comprehensive_train_Class_Y = np.array([])
    comprehensive_validate_Class_Y = np.array([])
    comprehensive_evaluation_Class_Y = np.array([])

    start_time = time.time()
    for dance in progressbar(dances, "Progress: "):
        csv_filename, np_filename = get_save_path(dance)
        train_X, train_Y, validate_X, validate_Y, evaluation_X, evaluation_Y = get_sample_data(
            csv_filename, np_filename, look_back, offset, forecast,
            sample_increment, training_split, validation_split,
            pos_pre_processes, rot_pre_processes)

        sentiment = dance.split('_')[-1]
        train_Class_Y = np.full((train_X.shape[0], 1), int(sentiment))
        validate_Class_Y = np.full((validate_X.shape[0], 1), int(sentiment))
        evaluation_Class_Y = np.full((evaluation_X.shape[0], 1),
                                     int(sentiment))

        if (len(comprehensive_train_X) == 0):
            comprehensive_train_X = train_X
            comprehensive_train_Y = train_Y
            comprehensive_validate_X = validate_X
            comprehensive_validate_Y = validate_Y
            comprehensive_evaluation_X = evaluation_X
            comprehensive_evaluation_Y = evaluation_Y

            comprehensive_train_Class_Y = train_Class_Y
            comprehensive_validate_Class_Y = validate_Class_Y
            comprehensive_evaluation_Class_Y = evaluation_Class_Y
        else:
            comprehensive_train_X = np.vstack((comprehensive_train_X, train_X))
            comprehensive_train_Y = np.vstack((comprehensive_train_Y, train_Y))
            comprehensive_validate_X = np.vstack(
                (comprehensive_validate_X, validate_X))
            comprehensive_validate_Y = np.vstack(
                (comprehensive_validate_Y, validate_Y))
            comprehensive_evaluation_X = np.vstack(
                (comprehensive_evaluation_X, evaluation_X))
            comprehensive_evaluation_Y = np.vstack(
                (comprehensive_evaluation_Y, evaluation_Y))

            comprehensive_train_Class_Y = np.vstack(
                (comprehensive_train_Class_Y, train_Class_Y))
            comprehensive_validate_Class_Y = np.vstack(
                (comprehensive_validate_Class_Y, validate_Class_Y))
            comprehensive_evaluation_Class_Y = np.vstack(
                (comprehensive_evaluation_Class_Y, evaluation_Class_Y))

    write(
        "Fetching and Agregating Training Data --- {} seconds ---".format(
            start_time - time.time()), out_file)

    np.save(training_filepath + "_X", comprehensive_train_X)
    np.save(training_filepath + "_Y", comprehensive_train_Y)
    np.save(validation_filepath + "_X", comprehensive_validate_X)
    np.save(validation_filepath + "_Y", comprehensive_validate_Y)
    np.save(evaluation_filepath + "_X", comprehensive_evaluation_X)
    np.save(evaluation_filepath + "_Y", comprehensive_evaluation_Y)

    np.save(training_filepath + "_Class_Y", comprehensive_train_Class_Y)
    np.save(validation_filepath + "_Class_Y", comprehensive_validate_Class_Y)
    np.save(evaluation_filepath + "_Class_Y", comprehensive_evaluation_Class_Y)

    print("Saved to", training_filepath + "_Class_Y")
コード例 #3
0
def train_model(model, out_file=sys.stdout):
    """ Trains the model with the dance data.
        The History object's History.history attribute is a record of training loss values and metrics values at successive epochs, 
            as well as cooresponding validation values (if applicable).  

    :param model: the model to train
    :type keras.Model
    :param out_file: what to display/write the status information to
    :type output stream
    :return: the class containing the training metric information, the trained model, and the comprehensive evaluation data
    :type tuple
    """

    dances = get_unique_dance_names(csv_data_dir)
    checkpoint = keras.callbacks.ModelCheckpoint(filepath=weights_file,
                                                 monitor='val_loss',
                                                 mode='auto',
                                                 save_weights_only=True,
                                                 save_best_only=True)
    early_stopping = tf.keras.callbacks.EarlyStopping(
        monitor='val_loss',
        patience=stopping_patience,
        verbose=2,
        mode='auto',
        restore_best_weights=True)
    callbacks_list = [
        keras.callbacks.TerminateOnNaN(), checkpoint, early_stopping,
        CustomCallback(out_file)
    ]

    comprehensive_train_X = np.array([])
    comprehensive_train_Y = np.array([])
    comprehensive_validate_X = np.array([])
    comprehensive_validate_Y = np.array([])
    comprehensive_evaluation_X = np.array([])
    comprehensive_evaluation_Y = np.array([])

    write("Fetching and Agregating Training Data ...")  #sys.stdout
    start_time = time.time()
    for dance in progressbar(dances, "Progress: "):
        csv_filename, np_filename = get_save_path(dance)
        train_X, train_Y, validate_X, validate_Y, evaluation_X, evaluation_Y = get_sample_data(
            csv_filename, np_filename, look_back, offset, forecast,
            sample_increment, training_split, validation_split,
            convensional_method)
        if (len(comprehensive_train_X) == 0):
            comprehensive_train_X = train_X
            comprehensive_train_Y = train_Y
            comprehensive_validate_X = validate_X
            comprehensive_validate_Y = validate_Y
            comprehensive_evaluation_X = evaluation_X
            comprehensive_evaluation_Y = evaluation_Y
        else:
            comprehensive_train_X = np.vstack((comprehensive_train_X, train_X))
            comprehensive_train_Y = np.vstack((comprehensive_train_Y, train_Y))
            comprehensive_validate_X = np.vstack(
                (comprehensive_validate_X, validate_X))
            comprehensive_validate_Y = np.vstack(
                (comprehensive_validate_Y, validate_Y))
            comprehensive_evaluation_X = np.vstack(
                (comprehensive_evaluation_X, evaluation_X))
            comprehensive_evaluation_Y = np.vstack(
                (comprehensive_evaluation_Y, evaluation_Y))
    write(
        "Fetching and Agregating Training Data --- {} seconds ---".format(
            start_time - time.time()), out_file)
    start_time = time.time()
    history = model.fit(comprehensive_train_X,
                        comprehensive_train_Y,
                        batch_size=batch_size,
                        callbacks=callbacks_list,
                        validation_data=(comprehensive_validate_X,
                                         comprehensive_validate_Y),
                        epochs=epochs,
                        shuffle=shuffle_data,
                        verbose=1)

    save_model_checkpoint(model, model_file)
    np.save(evaluation_filepath + "_X", comprehensive_evaluation_X)
    np.save(evaluation_filepath + "_Y", comprehensive_evaluation_Y)
    with open(history_train_file, "w") as history_file:
        json.dump(
            pd.DataFrame.from_dict(history.history).to_dict(), history_file)
    write("Saved training metric history to json file:\n\t" +
          history_train_file)  #sys.stdout
    write(
        "Saved training metric history to json file:\n\t" + history_train_file,
        out_file)
    return history, model, comprehensive_evaluation_X, comprehensive_evaluation_Y
コード例 #4
0
import random

from sample_data import SampleData
from utils import get_sample_data, get_logfile_sample
import pandas as pd
pd.set_option('display.float_format', lambda x: '%.0f' % x)

# Input Data
h_data, f_data, matches = get_sample_data(vehicles=10,
                                          period_of_time_h=3,
                                          plot=True)

# Summary h data
print(h_data.reset_index().groupby("session").agg({
    "timestamp": ['min', 'max'],
    'Speed': 'mean'
}))

# Summary f data
print(f_data.reset_index().groupby(["mac", "session"]).agg({
    "timestamp": ['min', 'max'],
    'Speed':
    'mean'
}))
コード例 #5
0
def blast(request, username, blast_form, template_init, template_result, blast_commandline, sample_fasta_path, extra_context=None):
    """
    Process blastn/tblastn (blast+) query or set up initial blast form.
    """

    if request.method == 'POST':

        form = blast_form(request.POST)

        if form.is_valid():

            query_file_object_tmp = form.cleaned_data['sequence_in_form']
            evalue = float(form.cleaned_data['evalue_in_form'])
            word_size = int(form.cleaned_data['word_size_in_form'])
            database_path = str(form.cleaned_data['blast_nucl_in_form'])

            standard_opt_dic = {'query': query_file_object_tmp, 'evalue': evalue, 'outfmt': 5, 'db': database_path, 'word_size': word_size}

            # none standard options:
            try:
                matrix = str(form.cleaned_data['matrix_in_form'])
                standard_opt_dic["matrix"] = matrix
            except:
                pass

            sensitivity_opt_dic = ast.literal_eval(str(form.cleaned_data['search_sensitivity_in_form']))

            # standard_opt_dic = {'query': query_file_object_tmp, 'evalue': evalue, 'outfmt': 5, 'db': nocoblast_db, "matrix": matrix, 'word_size': word_size}

            blast_records__file_xml = None
            try:
                """
                blast search, parse results from temp file, put them into template for rendering.
                """
                #fo.write(str(blast_commandline))
                blast_records__file_xml, blast_error = utils.run_blast_commands(blast_commandline, **dict(standard_opt_dic, **sensitivity_opt_dic))

                if len(blast_error) > 0:
                    return render_to_response(template_result, {"blast_record": '', 'username': username}, context_instance=RequestContext(request))

                else:
                    blast_records = NCBIXML.parse(blast_records__file_xml)
                    #print json.dumps((blast_records__file_xml.name),sort_keys=True, indent=4)
                    #print blast_records__file_xml.name
                    # converts blast results into objects and pack into list
                    blast_records_in_object_and_list = utils.blast_records_to_object(list(blast_records))

#Function for blast record vars for JSON
                    class myjsonrecord:

                        def __init__(self,contig,query,length,evalue,score,indent):

                             self.contig=contig
                             self.query=query
                             self.length=length
                             self.evalue=evalue
                             self.score=score
                             self.indent=indent

#Function for alignment vars for JSON
                    class myjsonalign:

                        def __init__(self,length,evalue,score,indent,positives,bits,query_start,query_end,subject_start,subject_end):

                             self.length=length
                             self.evalue=evalue
                             self.score=score
                             self.indentities=indent
                             self.positives=positives
                             self.bits=bits
                             self.query_start=query_start
                             self.query_end=query_end
                             self.subject_start=subject_start
                             self.subject_end=subject_end


#JSON Parser for blast output

                    THIS_DIR = os.path.dirname(os.path.abspath(__file__))
                    json_temp = os.path.join(THIS_DIR, 'my-output.json')
                    with open(json_temp, 'w') as f:
                       f.write("{ \"Description\": [")
                       for br in blast_records_in_object_and_list:
                             for alignment in br.alignments:
                                         myrecord=myjsonrecord(str(alignment.get_id()),str(br.query),str(alignment.length),str(alignment.best_evalue()),str(alignment.get_id()), str(alignment.best_identities()))
                                         #print vars(myrecord)
                                         f.write(json.dumps(vars(myrecord)))
                                         f.write(",")
                       f.seek(-1, os.SEEK_END)
                       f.truncate()
                       f.write("], \n\"Alignments\": [{")

                       for br in blast_records_in_object_and_list:
                             for alignment in br.alignments:
                                 f.write("\"%s\":[" % (alignment.hit_def))
                                 for hsp in alignment.hsp_list:
                                         myalign=myjsonalign(str(hsp.align_length),str(hsp.expect),str(hsp.score),str(hsp.identities), str(hsp.positives), str(hsp.bits), str(hsp.query_start), str(hsp.query_end),str(hsp.sbjct_start), str(hsp.sbjct_end))
                                         #print vars(myalign)
                                         f.write(json.dumps(vars(myalign)))
                                         f.write(",")
                                 f.seek(-1, os.SEEK_END)
                                 f.truncate()
                                 f.write("],")
                             f.seek(-1, os.SEEK_END)
                             f.truncate()
                       f.write("}]}")

                    try:
                        '''
                        user defined function to modify blast results
                        e.g. join blast results with external database in template

                        '''
                        if extra_context is not None:
                            blast_records_in_object_and_list = extra_context(blast_records_in_object_and_list)
                    except:
                        pass

                    return render_to_response(template_result,
                                              {'application': blast_records_in_object_and_list[0].application,
                                               'version': blast_records_in_object_and_list[0].version,
                                               'blast_records': blast_records_in_object_and_list,
                                               'username': username, },
                                              context_instance=RequestContext(request))

            finally:
                # remove result - temporary file
                if blast_records__file_xml is not None:
                    os.remove(blast_records__file_xml.name)

    else:
        form = blast_form(initial={'sequence_in_form': '', 'evalue_in_form': EVALUE_BLAST_DEFAULT})

    return render_to_response(template_init, {'form': form, 'sequence_sample_in_fasta': utils.get_sample_data(sample_fasta_path),
                                              "blast_max_number_seq_in_input": BLAST_MAX_NUMBER_SEQ_IN_INPUT,
                                              'username': username,
                                              }, context_instance=RequestContext(request))
コード例 #6
0
def blast(request,
          username,
          blast_form,
          template_init,
          template_result,
          blast_commandline,
          sample_fasta_path,
          extra_context=None):
    """
    Process blastn/tblastn (blast+) query or set up initial blast form.
    """

    if request.method == 'POST':

        form = blast_form(request.POST)

        if form.is_valid():

            query_file_object_tmp = form.cleaned_data['sequence_in_form']
            evalue = float(form.cleaned_data['evalue_in_form'])
            word_size = int(form.cleaned_data['word_size_in_form'])
            database_path = str(form.cleaned_data['blast_nucl_in_form'])

            standard_opt_dic = {
                'query': query_file_object_tmp,
                'evalue': evalue,
                'outfmt': 5,
                'db': database_path,
                'word_size': word_size
            }

            # none standard options:
            try:
                matrix = str(form.cleaned_data['matrix_in_form'])
                standard_opt_dic["matrix"] = matrix
            except:
                pass

            sensitivity_opt_dic = ast.literal_eval(
                str(form.cleaned_data['search_sensitivity_in_form']))

            # standard_opt_dic = {'query': query_file_object_tmp, 'evalue': evalue, 'outfmt': 5, 'db': nocoblast_db, "matrix": matrix, 'word_size': word_size}

            blast_records__file_xml = None
            try:
                """
                blast search, parse results from temp file, put them into template for rendering.
                """
                #fo.write(str(blast_commandline))
                blast_records__file_xml, blast_error = utils.run_blast_commands(
                    blast_commandline,
                    **dict(standard_opt_dic, **sensitivity_opt_dic))

                if len(blast_error) > 0:
                    return render_to_response(
                        template_result, {
                            "blast_record": '',
                            'username': username
                        },
                        context_instance=RequestContext(request))

                else:
                    blast_records = NCBIXML.parse(blast_records__file_xml)
                    #print json.dumps((blast_records__file_xml.name),sort_keys=True, indent=4)
                    #print blast_records__file_xml.name
                    # converts blast results into objects and pack into list
                    blast_records_in_object_and_list = utils.blast_records_to_object(
                        list(blast_records))

                    #Function for blast record vars for JSON
                    class myjsonrecord:
                        def __init__(self, contig, query, length, evalue,
                                     score, indent):

                            self.contig = contig
                            self.query = query
                            self.length = length
                            self.evalue = evalue
                            self.score = score
                            self.indent = indent

#Function for alignment vars for JSON

                    class myjsonalign:
                        def __init__(self, length, evalue, score, indent,
                                     positives, bits, query_start, query_end,
                                     subject_start, subject_end):

                            self.length = length
                            self.evalue = evalue
                            self.score = score
                            self.indentities = indent
                            self.positives = positives
                            self.bits = bits
                            self.query_start = query_start
                            self.query_end = query_end
                            self.subject_start = subject_start
                            self.subject_end = subject_end


#JSON Parser for blast output

                    THIS_DIR = os.path.dirname(os.path.abspath(__file__))
                    json_temp = os.path.join(THIS_DIR, 'my-output.json')
                    with open(json_temp, 'w') as f:
                        f.write("{ \"Description\": [")
                        for br in blast_records_in_object_and_list:
                            for alignment in br.alignments:
                                myrecord = myjsonrecord(
                                    str(alignment.get_id()), str(br.query),
                                    str(alignment.length),
                                    str(alignment.best_evalue()),
                                    str(alignment.get_id()),
                                    str(alignment.best_identities()))
                                #print vars(myrecord)
                                f.write(json.dumps(vars(myrecord)))
                                f.write(",")
                        f.seek(-1, os.SEEK_END)
                        f.truncate()
                        f.write("], \n\"Alignments\": [{")

                        for br in blast_records_in_object_and_list:
                            for alignment in br.alignments:
                                f.write("\"%s\":[" % (alignment.hit_def))
                                for hsp in alignment.hsp_list:
                                    myalign = myjsonalign(
                                        str(hsp.align_length), str(hsp.expect),
                                        str(hsp.score), str(hsp.identities),
                                        str(hsp.positives), str(hsp.bits),
                                        str(hsp.query_start),
                                        str(hsp.query_end),
                                        str(hsp.sbjct_start),
                                        str(hsp.sbjct_end))
                                    #print vars(myalign)
                                    f.write(json.dumps(vars(myalign)))
                                    f.write(",")
                                f.seek(-1, os.SEEK_END)
                                f.truncate()
                                f.write("],")
                            f.seek(-1, os.SEEK_END)
                            f.truncate()
                        f.write("}]}")

                    try:
                        '''
                        user defined function to modify blast results
                        e.g. join blast results with external database in template

                        '''
                        if extra_context is not None:
                            blast_records_in_object_and_list = extra_context(
                                blast_records_in_object_and_list)
                    except:
                        pass

                    return render_to_response(
                        template_result, {
                            'application':
                            blast_records_in_object_and_list[0].application,
                            'version':
                            blast_records_in_object_and_list[0].version,
                            'blast_records': blast_records_in_object_and_list,
                            'username': username,
                        },
                        context_instance=RequestContext(request))

            finally:
                # remove result - temporary file
                if blast_records__file_xml is not None:
                    os.remove(blast_records__file_xml.name)

    else:
        form = blast_form(initial={
            'sequence_in_form': '',
            'evalue_in_form': EVALUE_BLAST_DEFAULT
        })

    return render_to_response(
        template_init, {
            'form': form,
            'sequence_sample_in_fasta':
            utils.get_sample_data(sample_fasta_path),
            "blast_max_number_seq_in_input": BLAST_MAX_NUMBER_SEQ_IN_INPUT,
            'username': username,
        },
        context_instance=RequestContext(request))
コード例 #7
0
ファイル: instance.py プロジェクト: intk/perry
import sys
import json

from twisted.internet import reactor, defer
from twisted.python import log

from . import web

from utils import get_sample_data

DATA = get_sample_data()

MY_HASH = 'e4fdd677-e34f-4e8f-bec2-725b7bc6def0'


if __name__ == '__main__':




    reactor.listenUDP(PORT, concierge)
    reactor.listenTCP(8080, web.get_site())


    reactor.run()

コード例 #8
0
from flask import jsonify, make_response, request, abort, render_template
from flask import current_app as app
import pickle as pkl
import numpy as np
from CustomForm import CustomForm
import pandas as pd
from utils import get_sample_data

model = pkl.load(open('wine_clssifier.pkl', 'rb'))
temp_df = get_sample_data()


@app.route('/', methods=['GET'])
def main_page():
    form = CustomForm()
    return render_template('index.html', form=form)


@app.route('/predict', methods=['POST'])
def classify_wine():
    form = request.form
    input_data = {
        'alcohol': form['alcohol'],
        'alcalinity_of_ash': form['alcalinity_of_ash'],
        'total_phenols': form['total_phenols'],
        'flavanoids': form['flavanoids'],
        'nonflavanoid_phenols': form['nonflavanoid_phenols'],
        'proanthocyanins': form['proanthocyanins'],
        'color_intensity': form['color_intensity'],
        'od280/od315_of_diluted_wines': form['od280_od315_of_diluted_wines'],
        'proline': form['proline']