Exemple #1
0
def online_traning_api(input_file_name,model_id):
    print '%s' %settings.logging_file_training
    utils.setLog(settings.logging_file_training)
    logger=logging.getLogger('model-learner.train')

    #Log start of Full process
    utils.logInfoTime(logger, 'Started')

    # run hbc load data script
    logger.info('==> Load Data.')
    utils.logInfoTime(logger, 'Started Data Load')
    data_np_array, y_np_array = data_load_csv.csv_train_from_one_file(input_file_name);
    utils.logInfoTime(logger, 'Finished Data Load')

    # preprocessing featurizer sample data
    logger.info('==> Preprocessing feature data.')
    utils.logInfoTime(logger, 'Started Preprocessing')

    utils.logInfoTime(logger, 'Finished Preprocessing')

    # build models
    logger.info('==> Build Model.')
    utils.logInfoTime(logger, 'Started Model Building')
    model_building.modelsBuild(data_np_array, y_np_array,model_id,logger)
    utils.logInfoTime(logger, 'Finished Model Building')

    utils.logInfoTime(logger, 'Finished')
Exemple #2
0
def  offline_train():
#Setup the logger
    print '%s' %settings.logging_file_training
    utils.setLog(settings.logging_file_training)
    logger=logging.getLogger('model-learner.train')

    #Log start of Full process
    utils.logInfoTime(logger, 'Started')

    # run hbc load data script
    logger.info('==> Load Data.')
    utils.logInfoTime(logger, 'Started Data Load')
    data_np_array, y_np_array = data_load_csv.csv_train_file(settings.INPUT_DIR, settings.train_file_name_white, settings.train_file_name_black)
    utils.logInfoTime(logger, 'Finished Data Load')

    # preprocessing featurizer sample data
    logger.info('==> Preprocessing feature data.')
    utils.logInfoTime(logger, 'Started Preprocessing')

    utils.logInfoTime(logger, 'Finished Preprocessing')

    # build models
    logger.info('==> Build Model.')
    utils.logInfoTime(logger, 'Started Model Building')
    model_building.modelsBuild(data_np_array, y_np_array, 'hbc_train_offline.model',logger)
    utils.logInfoTime(logger, 'Finished Model Building')

    utils.logInfoTime(logger, 'Finished')

    print('model training complete')
Exemple #3
0
def score_one_iterm_online(model_path,feature_string):
    #feature_string =  "1,1,1,-1,1,1";
    logger = utils.setLog(settings.logging_file_scoring, logtype='Exec')
    logger = logging.getLogger('model-learner.test')

    logger.info('Start testing: %s', datetime.datetime.now().time().isoformat())
    # transform string to numpy array
    np_data = numpy.fromstring(feature_string, dtype=int, sep=",")

    np_data = np_data.reshape(1,-1)
    #print  np_data.shape
    output,clf = mexec.applyModel(model_path, np_data, settings.RESULTS_OUTPUT_DIR, settings.MODELS_OUTPUT_DIR + 'test_data.pkl')
    #print np_data
    print "returnValue:",score_normalization(300,900,output[0][0])
    logger.info('Finish testing: %s', datetime.datetime.now().time().isoformat())
Exemple #4
0
def score_csv():
    #Setup the logger
    print '%s' %settings.logging_file_scoring
    logger = utils.setLog(settings.logging_file_scoring, logtype='Exec')
    logger = logging.getLogger('model-learner.test')

    logger.info('Start testing: %s', datetime.datetime.now().time().isoformat())

    logger.info('==> Load Data.')
    data = data_load_csv.csv_score_file(settings.INPUT_DIR, settings.score_file_name)

    logger.info('==> Preprocessing data.')


    logger.info('==> Apply Data.')
    output,clf = mexec.applyModel(settings.MODELS_OUTPUT_DIR +'model_'+ settings.model_pickle_file, data, settings.RESULTS_OUTPUT_DIR, settings.MODELS_OUTPUT_DIR + 'test_data.pkl')

    logger.info('Finish testing: %s', datetime.datetime.now().time().isoformat())
    #print output[0]
    print score_normalization(300,900,output[0][0])
# import sub scripts and libraries
import logging
import datetime
import data_load
import data_preprocessing
import model_building   
import utils
import pprint as pp # to make log entries nicer to read
# get the settings for the run
import settings


#Setup the logger
utils.setLog(settings.LOGGING_FILE)
logger=logging.getLogger('Master')

#Log start of Full process
utils.logInfoTime(logger, 'Started')


# run data load script
logger.info('--------------------------------- Data Load -----------------------------------')
utils.logInfoTime(logger, 'Started Data Load')
# initial_data = data_load.psqlLoad(settings.INPUT_TABLE, settings.INPUT_SCHEMA, columns='*')
initial_data = data_load.csvfile(settings.INPUT_DIR, settings.file_name, settings.RESULTS_OUTPUT_DIR)
utils.logInfoTime(logger, 'Finished Data Load')

# run preprocessing script
logger.info('--------------------------------- Data Preprocessing -----------------------------------')
utils.logInfoTime(logger, 'Started Pre-Processing')
data_np_array, y_np_array, var_results = data_preprocessing.main(initial_data)
dir = os.path.dirname(__file__)
import pprint as pp # to make log entries nicer to read
from sklearn.externals import joblib
import json
import pandas as pd
from sklearn.metrics import roc_curve
import datetime
import pprint as pp # to make log entries nicer to read
import model_building_functions as modFuncs
import utils
import settings
import data_preprocessing as pproc
import model_execution_functions as mexec

#Setup the logger
logger = utils.setLog(settings.logging_file_exec,logtype='Exec') 
logger = logging.getLogger('Exec.model_execution')
    
logger.info('Started %s', datetime.datetime.now().time().isoformat())
logger.info('--------------------------------- Data Load -----------------------------------')
data, initial_data = mexec.loadDataToScore(settings.INPUT_DIR,settings.FILE_NAME_EXEC,settings.RESULTS_OUTPUT_DIR)

logger.info('--------------------------------- Apply Pre-Processing Steps-----------------------------------')
data, var_results = pproc.main(data,execute=True)

if settings.feature_selection:
    data,var_results = mexec.applyFeatureSelection(data,settings.MODELS_OUTPUT_DIR)
    
logger.info('--------------------------------- Apply Model -----------------------------------')
output_df,output,clf = mexec.applyModel(settings.model_pickle_file,data,initial_data,settings.RESULTS_OUTPUT_DIR,settings.MODELS_OUTPUT_DIR + '/test_data.pkl')
Exemple #7
0
import toml
from evdev import InputDevice
from devicer import findDevice, kvc2kv

from utils import setLog, getIP, createData

RUN_FILE_PATH = os.path.dirname(os.path.abspath(__file__))

LOGS_DIR = '{}{}{}'.format(RUN_FILE_PATH, os.sep, 'logs')
if not os.path.exists(LOGS_DIR):
    os.mkdir(LOGS_DIR)

CONFILE = '{}{}{}'.format(RUN_FILE_PATH, os.sep, 'conf/conf.toml')
CONF = toml.load(CONFILE)

setLog(CONF['log'])


class CardScanner:
    """Card scan monitor"""
    def __init__(self):
        """init """
        # run
        self.keyword = keyword = CONF['device'].get('keyword', 'usb')
        self.device = findDevice(keyword)
        self.cardnumber = ''

        # request
        getip_ip = CONF['url'].get('getip_ip', '127.0.0.1')
        getip_port = CONF['url'].get('getip_port', 80)
        self.my_ip = getIP((getip_ip, getip_port))