def _init_config(self, db_name_ext=None): self.config = load_config() self.site = self.config['site'] self.site['url'] = 'localhost:{0}'.format(options.port) if db_name_ext and not self.config['database']['name'].endswith( '_test'): self.config['database']['name'] += db_name_ext
def setUp(self): logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG) self.config_ = load_config(("/media/sdc1/Aptana Studio 3 Workspace/" "configs/config.yaml"), logger, exit_with_error = True) if self.config_ == None: logger.error("No config. Exit.") sys.exit(1)
def setUp(self): fill_database() config_ = load_config(file_path = "/media/sdc1/Aptana Studio 3 Workspace/configs/config.yaml", logger = logger) self.feature_extractor = EsaFeatureExtractor(prefix = config_['prefix']) self.ranker = ArticleRanker(extractor = self.feature_extractor) self.article_as_dict = {'news_vendor': 'TechCrunch', 'author': "MG Siegler", 'link': "http://www.techcrunch.com", 'headline': "Again Apple", 'clean_content': "Fooobaaar!", 'content': "<p>Fooobaaar!</p>", 'features': {'version': '1.0', 'data': [(1, 0.5), (3, 0.6) ] } }
def setUp(self): logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG) config = load_config(file_path = "/home/karten/Programmierung/frontend/config.yaml", logger = logger, exit_with_error = True) #Connect to test database connect("nyan_test", port = 27017) fill_database() #connect(config['database']['db-name'], # username= config['database']['user'], # password= config['database']['passwd'], # port = config['database']['port']) self.user_id = User.objects(email = u'*****@*****.**').first().id #feature_extractor = EsaFeatureExtractor(prefix = config['prefix']) feature_extractor = TfidfFeatureExtractor(prefix = config['prefix']) self.trainer = UserModelBayes(self.user_id, extractor = feature_extractor)
def setUp(self): logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG) config = load_config(file_path = ("/media/sdc1/Aptana Studio 3 Workspace" "/configs/config.yaml"), logger = logger, exit_with_error = True) #Connect to test database connect("nyan_test", port = 20545) fill_database() #connect(config['database']['db-name'], # username= config['database']['user'], # password= config['database']['passwd'], # port = config['database']['port']) self.user_id = User.objects(email = u'*****@*****.**').first().id #feature_extractor = EsaFeatureExtractor(prefix = config['prefix']) feature_extractor = TfidfFeatureExtractor(prefix = config['prefix']) self.trainer = UserModelSVM(self.user_id, extractor = feature_extractor)
# create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.INFO) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s : %(levelname)s in %(module)s ' + '[%(pathname)s:%(lineno)d]: %(message)s') ch.setFormatter(formatter) fh.setFormatter(formatter) # add the handlers to logger logger.addHandler(ch) logger.addHandler(fh) logger.info("running %s" % ' '.join(sys.argv)) #Load config config_ = load_config(options.config, logger, exit_with_error = True) if config_ == None: logger.error("No config. Exit.") sys.exit(1) #Connect to mongo database connect(config_['database']['db-name'], username= config_['database']['user'], password= config_['database']['passwd'], port = config_['database']['port']) #Init clean corpus #corpus = CleanCorpus() #save dictionary: word <-> token id map
pickle.load( open( "{}/{}/{}/query_y_{}.pkl".format(opt["data_dir"], "testing", "log", idx), "rb"))) test_dataset = list(zip(supp_xs_s, supp_ys_s, query_xs_s, query_ys_s)) del (supp_xs_s, supp_ys_s, query_xs_s, query_ys_s) print( "# epoch\ttrain_loss\tprecision5\tNDCG5\tMAP5\tprecision7\tNDCG7\tMAP7\tprecision10\tNDCG10\tMAP10" ) if not os.path.exists(model_filename): print("Start training...") training(trainer, opt, train_dataset, test_dataset, batch_size=opt['batch_size'], num_epoch=opt['num_epoch'], model_save=opt["save"], model_filename=model_filename, logger=file_logger) else: print("Load pre-trained model...") opt = helper.load_config(model_filename[:-2] + "config") helper.print_config(opt) trained_state_dict = torch.load(model_filename) trainer.load_state_dict(trained_state_dict)
config['batch_size']) fd = { self.cnn.x: data, self.cnn.y: labels, self.cnn.is_training: True } loss, _, acc, smr = sess.run( [loss_op, train_op, acc_op, merged_summary], feed_dict=fd) if s % config['num_print_step'] == 0: writer.add_summary(smr, global_step) print('{} steps, train accuracy: {:.6f}, loss: {:.6f}'. format(global_step, acc, loss)) test_acc, test_smr = sess.run( [acc_op, test_smr_acc], feed_dict={ self.cnn.x: self.datasource.test_data, self.cnn.y: self.datasource.test_labels, self.cnn.is_training: False }) writer.add_summary(test_smr, global_step) global_step += 1 print('{} steps, test accuracy: {:.6f} ({}/{} epochs)'.format( global_step, test_acc, i, config['num_epoch'])) if __name__ == '__main__': args = helper.get_args() config = helper.load_config(args.config) CNNRunner(config).run()
torch.cuda.manual_seed(args.seed) # make opt opt = vars(args) label2id = constant.LABEL_TO_ID opt['num_class'] = len(label2id) # print opt helper.print_config(opt) helper.ensure_dir(opt['save_dir'], verbose=True) vocab_file = opt['save_dir'] + '/vocab.pkl' vocab = Vocab(vocab_file) opt['vocab_size'] = vocab.size opt = helper.load_config(opt['save_dir'] + '/config.json', verbose=True) if not torch.cuda.is_available(): opt['cuda'] = False # load data print("Loading data from {} with batch size {} ...".format( opt['data_dir'], opt['batch_size'])) dev_batch = DataLoader( eval(open(opt['data_dir'] + '/test.list', 'r', encoding='utf-8').read()), opt['batch_size'], opt, vocab) print('Building model...') trainer = MyTrainer(opt) current_lr = opt['lr']
#Configure logger logging.basicConfig(format='-' * 80 + '\n' + '%(asctime)s : %(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + '%(message)s\n' + '-' * 80, level=logging.DEBUG, filename= "log.txt") #Flask app app = Flask(__name__) #salt for hashing etc. SALT = u"" #Load non-FLASK config config = load_config("config.yaml", app.logger) #Flask config try: SECRET_KEY = config['flask']['secret_key'] DEBUG = config['flask']['debug'] except KeyError as e: app.logger.error("Malformed config." + "Could not get flask secret key and debug option: %s" % (e)) sys.exit(1) app.config.from_object(__name__) #Login manager login_manager = LoginManager()
def setUp(self): self.config = load_config(file_path = "/home/karten/Programmierung/frontend/config.yaml", logger = logger, exit_with_error = True)