Exemplo n.º 1
0
def total_report():
    import define
    import prepare
    import fselect
    import evaluate
    import improve

    # label = 'LocalizationNew_Tx'
    label = 'BQ_AQUARIS_E5_'

    # for i in range(7, 8):
    for i in ['20', '30', '40']:

        data_name = "./uploads/" + label + str(i) + ".csv"
        print(data_name)
        # data_name = "iris.csv"
        class_name = "class"
        definer = define.Define(data_path=data_name,
                                header=None,
                                response=class_name).pipeline()

        preparer = prepare.Prepare(definer).pipeline()
        selector = fselect.Select(definer).pipeline()
        evaluator = evaluate.Evaluate(definer, preparer, selector)
        improver = improve.Improve(evaluator).pipeline()

        improver.save_full_report('./market/' + label + str(i))
        improver.save_score_report('./market/' + label + str(i))
Exemplo n.º 2
0
    def __init__(self, config, db_config):
        self.__threadNumOfDownload = config['thread_num_of_download']
        self.__threadNumOfHandle = config['thread_num_of_handle']
        self.__retryTimes = config['retry_times']
        self.__tmpDataDir = config['tmp_data_dir']
        self.__saveModule = config['save_module']
        self.__id = config['id']
        self.__fileName = config['start_date'] + '_' + config[
            'end_date'] + '_' + str(config['input_nodes']) + '_' + self.__id

        self.__config = config
        self.__dbConfig = db_config

        self.__uriLock = threading.Lock()
        self.__dataLock = threading.Lock()
        self.__fileLock = threading.Lock()

        self.__downloadThreadLock = threading.Lock()

        self.__failQueue = Queue.Queue()
        self.__failUriDict = {}

        self.__com = {}

        self.init()
        self.o_prepare = prepare.Prepare(self.__config, self.__dbConfig)

        self.__curlTime = 0
        self.__curlCounts = 0

        self.__handleTime = 0
        self.__handleCount = 0
Exemplo n.º 3
0
def report_improve(data_path, data_name, problem_type, optimizer, modelos):
    definer = define.Define(data_path=data_path,data_name=data_name,problem_type=problem_type).pipeline()
    preparer = prepare.Prepare(definer).pipeline()
    selector = fselect.Select(definer).pipeline()
    evaluator = evaluate.Evaluate(definer, preparer, selector)
    improver = improve.Improve(evaluator, optimizer, modelos).pipeline()

    plot = improver.plot_models()
    table = improver.report
    dict_report = {'plot': plot, 'table': table}
    #dict_report = {'table': table}

    return dict_report
Exemplo n.º 4
0
def report_model(response, data_path, data_name, problem_type):
    definer = define.Define(data_path=data_path,data_name=data_name,problem_type=problem_type).pipeline()
    preparer = prepare.Prepare(definer).pipeline() # scaler
    selector = fselect.Select(definer).pipeline() # pca
    evaluator = evaluate.Evaluate(definer, preparer, selector).pipeline()

    plot = evaluator.plot_models()
    table = evaluator.report

    data_name = data_name.replace(".csv", "")
    plot_path = os.path.join(app.config['MARKET_DIR'], data_name, 'model')
    tools.path_exists(plot_path)

    plot_path_plot = os.path.join(plot_path, 'boxplot.html')
    evaluator.save_plot(plot_path_plot)
    plot_path_report = os.path.join(plot_path, 'report.csv')
    evaluator.save_report(plot_path_report)

    dict_report = {'plot': plot, 'table': table}
    return dict_report
Exemplo n.º 5
0
    def __init__(self,  sample=False,
                        model_name='VGG16',
                        image_dir='data/images',
                        caption_file='data/caption/Flickr8k.token.txt',
                        clean_caption=False,
                        load_pickle=False):
        self.prepare = prepare.Prepare(sample,model_name,image_dir,caption_file,clean_caption,load_pickle)
        training_dataset = self.prepare.load_identifiers('data/caption/Flickr_8k.trainImages.txt')
        testing_dataset = self.prepare.load_identifiers('data/caption/Flickr_8k.testImages.txt')
        validation_dataset = self.prepare.load_identifiers('data/caption/Flickr_8k.devImages.txt')

        train_feature = self.prepare.load_features(training_dataset)
        test_feature = self.prepare.load_features(testing_dataset)
        validation_feature = self.prepare.load_features(validation_dataset)

        train_description = self.prepare.load_description(training_dataset)
        test_description = self.prepare.load_description(testing_dataset)
        validation_description = self.prepare.load_description(validation_dataset)

        tokenizer, vocab_size, max_length = self.prepare.create_tokens(self.prepare.description)
        _, _, m1 = self.prepare.create_tokens(train_description)
        _, _, m2 = self.prepare.create_tokens(test_description)
        _, _, m3 = self.prepare.create_tokens(validation_description)
        max_length = max([max_length, m1, m2, m3])

        detail = dict()
        detail['vocab_size'] = vocab_size
        detail['max_length'] = max_length
        pickle.dump(tokenizer, open('pkl/tokenizer.pkl', 'wb'))
        pickle.dump(detail, open('pkl/detail.pkl', 'wb'))

        X1train, X2train, ytrain = self.prepare.create_sequence(train_description, tokenizer, max_length, vocab_size, train_feature)
        X1test, X2test, ytest = self.prepare.create_sequence(test_description, tokenizer, max_length,vocab_size, test_feature)
        X1validation, X2validation, yvalidation = self.prepare.create_sequence(validation_description, tokenizer, max_length,vocab_size, validation_feature)

        self.prepare.define_caption_model(max_length, vocab_size)
        self.prepare.checkpoint_prepare()
        self.prepare.model_fit(X1train, X2train, ytrain,X1validation, X2validation, yvalidation)
Exemplo n.º 6
0
    search_dirs = [args.dir]

    if sdo:
        stats_list = {'sdo': search_dirs}
    else:
        stats_list = {'vendor': search_dirs}
    if args.run_integrity:
        stats_list = {
            'vendor': [get_curr_dir(__file__) + '/../../vendor/cisco']
        }
    LOGGER.info('Starting to iterate through files')
    for key in stats_list:
        search_dirs = stats_list[key]
        if key == 'sdo':
            sdo = True
            prepare_sdo = prepare.Prepare("prepare", yangcatalog_api_prefix)
            for search_dir in search_dirs:

                LOGGER.info('Found directory for sdo {}'.format(search_dir))
                integrity = statistics.Statistics(search_dir)

                capability = cap.Capability(search_dir, index, prepare_sdo,
                                            integrity, args.api, sdo,
                                            args.json_dir,
                                            args.result_html_dir,
                                            args.save_file_dir,
                                            private_credentials)
                LOGGER.info('Starting to parse files in sdo directory')
                capability.parse_and_dump_sdo()
                index += 1
            prepare_sdo.dump_modules(args.json_dir)
Exemplo n.º 7
0
    missing = []
    my_files = find_missing_hello('./../../vendor/', '*.yang')
    for name in set(my_files):
        if '.incompatible' not in name and 'MIBS' not in name:
            missing.append(name)
    missing = ', '.join(missing).replace('./../..', '')
    stats_file.write(
        '<h3>Folders with yang files but missing hello message inside of vendor:</h3><p>'
        + missing + '</p>')
    stats_file.write('</body></html>')
    stats_file.close()


start = time.time()
index = 1
prepare = prepare.Prepare("prepare")
update = True
for filename in find_files('../../vendor/', '*capabilit*.xml'):
    try:
        file_modification = open(
            'fileModificationDate/' + '-'.join(filename.split('/')[-4:]) +
            '.txt', 'rw')
        time_in_file = file_modification.readline()
        if time.strptime(time_in_file, '%a %b %d %H:%M:%S %Y') == time.ctime(
                os.path.getmtime(filename)):
            update = False
            file_modification.close()
        else:
            file_modification.seek(0)
            file_modification.write(time.ctime(os.path.getmtime(filename)))
            file_modification.truncate()
Exemplo n.º 8
0
    if sdo:
        stats_list = {'sdo': search_dirs}
    else:
        stats_list = {'vendor': search_dirs}
    if args.run_statistics:
        stats_list = {
            'sdo': ['../../experimental', '../../standard'],
            'vendor': ['../../vendor']
        }
    statistics_in_catalog = statisticsInCatalog.StatisticsInCatalog()
    LOGGER.info('Starting to iterate through files')
    for key in stats_list:
        search_dirs = stats_list[key]
        if key == 'sdo':
            sdo = True
            prepare_sdo = prepare.Prepare("prepare")
            for search_dir in search_dirs:

                LOGGER.info('Found directory for sdo {}'.format(search_dir))
                integrity = statistics.Statistics(search_dir)
                capability = cap.Capability(search_dir, index, prepare_sdo,
                                            integrity, args.api, sdo,
                                            args.json_dir,
                                            statistics_in_catalog)
                LOGGER.info('Starting to parse files in sdo directory')
                capability.parse_and_dump_sdo()
                index += 1
            prepare_sdo.dump_sdo(args.json_dir)
        else:
            sdo = False
            prepare_vendor = prepare.Prepare("prepare")
Exemplo n.º 9
0
    sdo = args.sdo
    search_dirs = [args.dir]

    if sdo:
        stats_list = {'sdo': search_dirs}
    else:
        stats_list = {'vendor': search_dirs}
    if args.run_integrity:
        stats_list = {'vendor': [get_curr_dir(__file__) + '/../../vendor/cisco']}
    LOGGER.info('Starting to iterate through files')
    for key in stats_list:
        search_dirs = stats_list[key]
        if key == 'sdo':
            sdo = True
            prepare_sdo = prepare.Prepare("prepare", args.result_html_dir,
                                          args.api_port, args.api_ip,
                                          args.api_protocol)
            for search_dir in search_dirs:

                LOGGER.info('Found directory for sdo {}'.format(search_dir))
                integrity = statistics.Statistics(search_dir)

                capability = cap.Capability(search_dir, index, prepare_sdo,
                                            integrity, args.api, sdo,
                                            args.json_dir, args.result_html_dir,
                                            args.save_file_dir)
                LOGGER.info('Starting to parse files in sdo directory')
                capability.parse_and_dump_sdo()
                index += 1
            prepare_sdo.dump_modules(args.json_dir)
        else:
Exemplo n.º 10
0
    if sdo:
        stats_list = {'sdo': search_dirs}
    else:
        stats_list = {'vendor': search_dirs}
    if args.run_statistics:
        stats_list = {
            'sdo': ['../../experimental', '../../standard'],
            'vendor': ['../../vendor']
        }
    LOGGER.info('Starting to iterate through files')
    for key in stats_list:
        search_dirs = stats_list[key]
        if key == 'sdo':
            sdo = True
            prepare_sdo = prepare.Prepare("prepare", args.result_html_dir)
            for search_dir in search_dirs:

                LOGGER.info('Found directory for sdo {}'.format(search_dir))
                integrity = statistics.Statistics(search_dir)

                capability = cap.Capability(search_dir, index, prepare_sdo,
                                            integrity, args.api, sdo,
                                            args.json_dir,
                                            args.result_html_dir)
                LOGGER.info('Starting to parse files in sdo directory')
                capability.parse_and_dump_sdo()
                index += 1
            prepare_sdo.dump_modules(args.json_dir)
        else:
            sdo = False
Exemplo n.º 11
0
def setup(thing):
    thing.prepare = prepare.Prepare(thing.context)
    thing.prepare._pool = MagicMock()
    return thing