def __init__(self, models): """ :param models: either a sklearn model or a dictionary of models and their parameters. """ print('Initializing MultiModel...') self._dm = DataManager() self._models = [] init_str = '' if isinstance(models, dict): if 'load_from_file' in models.keys(): lff = models['load_from_file'] else: lff = False init_str = [mod.__name__ for mod in models['models'].keys()] for model, params in models['models'].items(): self._models.append( Model( model, self._dm, params['fixed_parameters'], params['optimizable_parameters'] ) ) if lff: self._models[-1].load_from_file() # Otherwise check if it's only a sklearn module. elif inspect.getmodule(models).__name__.split('.')[0] == 'sklearn': init_str = models.__name__ self._models.append(Model(models, self._dm)) print(f"Initialized a MultiModel with the following models : {init_str}")
def test_diet_10n_10i(self): from Model.model import Model m = Model() MIN_REQ = load_obj('diet_10n_min_req') ingredients, list_of_ingredients = load_obj('diet_10n_10i_ing'), load_obj('diet_10n_10i_l_o_ing') x = [] for ing in list_of_ingredients: x.append(m.add_var("real+", name=ing)) x = np.array(x) m.minimize(sum(get_by_key(ingredients, "price", list_of_ingredients) * x)) for cst in MIN_REQ: left = get_by_key(ingredients, cst, list_of_ingredients) m.add_constraint(sum(left * x) >= MIN_REQ[cst]) m.solve(consider_dual=0) try: i = 0 for ing in list_of_ingredients: m.add_lazy_constraint(x[i] <= ingredients[ing]['max']) i += 1 except InfeasibleError as e: pass else: self.fail("Should raise InfeasibleError but didn't")
def create_model(sess,FLAGS,embedding_matrix): text_model=Model(FLAGS,embedding_matrix) ckpt=tf.train.get_checkpoint_state(FLAGS.ckpt_dir) if ckpt and ckpt.model_checkpoint_path: print("Restoring old model parameters from %s"%ckpt.model_checkpoint_path) text_model.saver.restore(sess,ckpt.model_checkpoint_path) return text_model
def __init__(self, width, height, fps, speed, player_left, player_bottom): self.model = Model(width, height, speed) self.view = View(self.model, player_left, player_bottom) self.fps = fps self.clock = pygame.time.Clock() self.running = True self.start = True self.alive = False
def main(): dataset, learning_rate, epoch, alpha, beta, gamma, pretrain_epoch = parse_argument( ) random.seed(9001) dataset_config = { 'feature_file': './Database/' + dataset + '/features.txt', 'graph_file': './Database/' + dataset + '/edges.txt', 'walks_file': './Database/' + dataset + '/walks.txt', 'label_file': './Database/' + dataset + '/group.txt' } graph = Dataset(dataset_config) pretrain_config = { 'net_shape': [6, 1], 'att_shape': [1], 'net_input_dim': graph.num_nodes, 'att_input_dim': graph.num_feas, 'pretrain_params_path': './Log/' + dataset + '/pretrain_params.pkl' } model_config = { 'net_shape': [6, 1], 'att_shape': [1], 'net_input_dim': graph.num_nodes, 'att_input_dim': graph.num_feas, 'is_init': True, 'pretrain_params_path': './Log/' + dataset + '/pretrain_params.pkl' } trainer_config = { 'net_shape': [6, 1], 'att_shape': [1], 'net_input_dim': graph.num_nodes, 'att_input_dim': graph.num_feas, 'drop_prob': 0.2, 'learning_rate': learning_rate, 'batch_size': 100, 'num_epochs': epoch, 'beta': beta, 'alpha': alpha, 'gamma': gamma, 'model_path': './Log/' + dataset + '/test_model.pkl', } print("=" * 30 + "开始进行参数预训练" + "=" * 30) pretrainer = PreTrainer(pretrain_config) pretrainer.pretrain(graph.X, 'net', pretrain_epoch) # walks.txt的矩阵 pretrainer.pretrain(graph.Z, 'att', pretrain_epoch) model = Model(model_config) trainer = Trainer(model, trainer_config) trainer.train(graph) train_emb = trainer.infer(graph) train_emb = np.array(train_emb) with open("./Log/" + dataset + "/train_emb.txt", 'w') as f: np.savetxt(f, train_emb)
def add(self, model): # TODO add the ability to add an already optimized model. """ Adds an sklearn model to the MultiModel :param model: The name of the sklearn model. """ if inspect.getmodule(model).__name__.split('.')[0] == 'sklearn': print(f'Adding model {model.__name__}') self._models.append(Model(model), self._dm) else: raise TypeError('Parameter model should be an sklearn model, {model.__name__} given')
def main(args): path = args.train_path path_valid = args.dev_path print("read_data....") data = read_data(path) # 训练集 data_valid = read_data(path_valid) # 验证集 # 参数设置 hidden_size = args.hidden_size char_size = args.convolutions embedding_size = args.embedding_size char_embed_size = args.char_embedding_size dropout = args.dropout kernel_size = args.kernel_size1 type = args.modeling_type output, char_set = get_structure(data) output_valid, char_set_valid = get_structure(data_valid) for char in char_set_valid: char_set.add(char) print("get char id") char_output, char_to_int, int_to_char, n_char = get_index(output, char_set) question_set = get_question_set(output) char_output_valid, x, y, z = get_index(output_valid, char_set) question_set_valid = get_question_set(output_valid) print("get word embedding") word_dict_question = get_word_embeddings(question_set, embedding_size) word_dict_context = get_word_embeddings(output['contexts'], embedding_size) word_dict_question_valid = get_word_embeddings(question_set_valid, embedding_size) word_dict_context_valid = get_word_embeddings(output_valid['contexts'], embedding_size) dataLoader = DataLoader(output, char_output, word_dict_context, word_dict_question) dataLoader_valid = DataLoader(output_valid, char_output_valid, word_dict_context_valid, word_dict_question_valid) model = Model(embedding_size, char_size, hidden_size, kernel_size, n_char, type, char_embed_size, dropout).cuda() epochs = args.epochs iterations = len(question_set) iterations_validation = len(question_set_valid) lossFunction = nn.CrossEntropyLoss() optimizer = optim.Adadelta(model.parameters(), lr=args.learning_rate) # 训练模型 print("training model") train(model, dataLoader, lossFunction, optimizer, epochs, iterations, dataLoader_valid, iterations_validation)
def start(self): self.model = Model() self.view = View() self.view.start_view() self.create_a_contact('pedro','orozco','*****@*****.**') self.create_a_contact('luis','medrano') self.create_a_contact('luis','medrano') #self.delete_a_contact('juan','lopez') self.show_all_contacts() self.update_a_contact('juan','perez','luis','zapata','*****@*****.**','4424232312') self.show_all_contacts() self.show_a_contact('luis','zapata') self.view.end_view()
def SendRequest(self,url,modelname): self.CurrentModule = Model(modelname) html = self.urlquest.GetHtml(url) htmlcode = self.urlquest.code self.CurrentModule.Prepare(url,html,htmlcode) diclist = self.GetAnalyzeDiclist(html) self.CurrentModule.Set("diclist",diclist) # 显示 sm = SimpleModel(modelname,url) self.UI.AddModelHistory(sm) self.UI.ShowModule(self.CurrentModule) # 添加抓取历史记录(URL,模型名) self.AddModelHistory(modelname,url)
def serialInput(book): """ input for serialization :param book: :return: """ read, write = Model().setTypeSerial() manager = input("1 - Load from the file\n" "2 - Save to the file\n") if manager == "1": book.reload(read()) elif manager == "2": write(book) else: print("Incorrect key pressed.")
def __init__(self, *args, **kwargs): self.__confdic = { "confFilePath":'./Config/MainConf.json', "HistoryDicList":[] } self.Conf_load(self.__confdic) self.CurrentModule = Model("ModelName") # 请求 self.Request = Request() self.urlquest = uq() # 解析 self.Analyze = Analyze() return super().__init__(*args, **kwargs)
def test_unbound(self): m = Model(print_obj={}) a = m.add_var("real+", name="a") b = m.add_var("real+", name="b") m.maximize(3 * a - b) m.add_constraint(-3 * a + 3 * b <= 6) m.add_constraint(-8 * a + 4 * b <= 4) try: m.solve() except Unbounded as e: pass else: self.fail("Should raise Unbounded but didn't")
def test_woody_max_dual_add_variable(self): m = Model() x1 = m.add_var("real+", name="a") x2 = m.add_var("real+", name="b") m.maximize(35 * x1 + 60 * x2) m.add_constraint(8 * x1 + 12 * x2 <= 120) m.add_constraint(15 * x2 <= 60) m.add_constraint(3 * x1 + 6 * x2 <= 48) m.solve(consider_dual=2) m.add_new_variable([16, 20, 9], 75) computed_solution = m.get_solution_object() real_sol = [12, 2, 0, 540] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_maximize_4v_3c_3o(self): m = Model(print_obj={}) x = [] for i in range(1, 5): x.append(m.add_var("real+", name="x_%d" % i)) m.maximize(3 * x[1] + x[2] + 4 * x[3]) m.add_constraint(x[0] + x[1] + x[2] + x[3] <= 40) m.add_constraint(2 * x[0] + x[1] + (-1 * x[2]) + (-1 * x[3]) <= 10) m.add_constraint(x[3] + (-1 * x[1]) <= 10) m.solve(revised=True) computed_solution = m.get_solution_object() real_sol = [0, 15.0, 0, 25.0, 145.0] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_minimize_2v_3c_2o(self): m = Model(print_obj={}) x = [] for i in range(1, 3): x.append(m.add_var("real+", name="x_%d" % i)) m.minimize(0.12 * x[0] + 0.15 * x[1]) m.add_constraint(60 * x[0] + 60 * x[1] >= 300) m.add_constraint(12 * x[0] + 6 * x[1] >= 36) m.add_constraint(10 * x[0] + 30 * x[1] >= 90) m.solve() computed_solution = m.get_solution_object() real_sol = [3, 2, 0.66] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_revised(self): m = Model() x1 = m.add_var("real+", name="a") x2 = m.add_var("real+", name="b") x3 = m.add_var("real+", name="c") m.maximize(5 * x1 + 4 * x2 + 3 * x3) m.add_constraint(2 * x1 + 3 * x2 + x3 <= 5) m.add_constraint(4 * x1 + x2 + 2 * x3 <= 11) m.add_constraint(3 * x1 + 4 * x2 + 2 * x3 <= 8) m.solve(consider_dual=0, revised=True) computed_solution = m.get_solution_object() real_sol = [2, 0, 1, 13] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_maximize_2v_4c_2o(self): m = Model(print_obj={}) x = [] for i in range(1, 3): x.append(m.add_var("real+", name="x_%d" % i)) m.maximize(4 * x[0] + 3 * x[1]) m.add_constraint(2 * x[0] + 3 * x[1] <= 6) m.add_constraint(-3 * x[0] + 2 * x[1] <= 3) m.add_constraint(0 * x[0] + 2 * x[1] <= 5) m.add_constraint(2 * x[0] + 1 * x[1] <= 4) m.solve() computed_solution = m.get_solution_object() real_sol = [1.5, 1, 9] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_woody_min_dual_lazy_constraint(self): m = Model() x1 = m.add_var("real+", name="a") x2 = m.add_var("real+", name="b") x3 = m.add_var("real+", name="c") m.minimize(35 * x1 + 60 * x2 + 75 * x3) m.add_constraint(8 * x1 + 12 * x2 + 16 * x3 >= 120) m.add_constraint(15 * x2 + 20 * x3 >= 60) m.add_constraint(3 * x1 + 6 * x2 + 9 * x3 >= 48) m.solve(consider_dual=2) m.add_lazy_constraint(x1 <= 5) m.add_lazy_constraint(x3 >= 1) computed_solution = m.get_solution_object() real_sol = [5, 0, 5, 550] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_diet(self): m = Model() a = m.add_var("real+", name="oat") b = m.add_var("real+", name="chicken") c = m.add_var("real+", name="egg") d = m.add_var("real+", name="milk") e = m.add_var("real+", name="cake") f = m.add_var("real+", name="bean") m.minimize(25 * a + 130 * b + 85 * c + 70 * d + 95 * e + 98 * f) # calories m.add_constraint(110 * a + 205 * b + 160 * c + 160 * d + 420 * e + 260 * f >= 2000) # proteins m.add_constraint(4 * a + 32 * b + 13 * c + 8 * d + 4 * e + 14 * f >= 55) # calcium m.add_constraint(2 * a + 12 * b + 54 * c + 285 * d + 22 * e + 80 * f >= 800) # oats m.add_constraint(a <= 4) # chicken m.add_constraint(b <= 3) # egg m.add_constraint(c <= 2) # milk m.add_constraint(d <= 8) # cake m.add_constraint(e <= 1) # bean m.add_constraint(f <= 2) m.solve(revised=True, consider_dual=0) computed_solution = m.get_solution_object() real_sol = [4.0, 0, 0, 3.875, 1, 2, 662.25] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def test_diet_integer_more_restrictions(self): m = Model(dtype="fraction") a = m.add_var("int+", name="oat") b = m.add_var("int+", name="chicken") c = m.add_var("int+", name="egg") d = m.add_var("int+", name="milk") e = m.add_var("int+", name="cake") f = m.add_var("int+", name="bean") m.minimize(25 * a + 130 * b + 85 * c + 70 * d + 95 * e + 98 * f) # calories m.add_constraint(110 * a + 205 * b + 160 * c + 160 * d + 420 * e + 260 * f >= 2000) # proteins m.add_constraint(4 * a + 32 * b + 13 * c + 8 * d + 4 * e + 14 * f >= 55) # calcium m.add_constraint(2 * a + 12 * b + 54 * c + 285 * d + 22 * e + 80 * f >= 800) # oats m.add_constraint(a <= 2) # chicken m.add_constraint(b <= 3) # egg m.add_constraint(c <= 2) # milk m.add_constraint(d <= 2) # cake m.add_constraint(e <= 1) # bean m.add_constraint(f <= 2) m.solve() computed_solution = m.get_solution_object() real_sol = [2, 1, 2, 2, 1, 2, 781] for x_idx in range(len(real_sol)): self.assertAlmostEqual(computed_solution[x_idx], real_sol[x_idx])
def ShowModule(self,tModel): self.CurrentModule = tModel if(isinstance(tModel, Model)==False): self.CurrentModule = Model(tModel) else: self.CurrentModule = tModel # 显示HTML和Diclist html = self.CurrentModule.Get("html") self.Content1.setPlainText(html)# setPlainText纯文本,settext富文本 # 清空显示diclist的控件 if(len(self.widgetlist_dic)!=0): for i,w in enumerate(self.widgetlist_dic): w.deleteLater() del self.widgetlist_dic[i] diclist = self.CurrentModule.Get("diclist") print(diclist) if(diclist!=[]): for i,dic in enumerate(diclist): dw = DiclistWidget(dic,i) self.widgetlist_dic.append(dw) self.ScroWidgetVBox.addWidget(dw)
def __init__(self): self.aid = None self.um_obj = Model() self.uv_obj = View()
def __init__(self): self.model = Model() self.view = View()
epoch_num = 300 z_dim = 100 save_path = os.getcwd() + '/Save_' + str(z_dim) if not os.path.exists(save_path): os.makedirs(save_path) # get_data print('-- get data--') mnist = input_data.read_data_sets("MNIST_data/", one_hot=False) input_dim = np.shape(mnist.train.images)[1] # make model print('-- make model --') model = Model(input_dim, z_dim) model.set_model() # save model setting with open(save_path + '/setting.json', 'w') as fp: setting = model.setting() json.dump(setting, fp) # training print('-- begin training --') data = mnist.train.images num_one_epoch = np.shape(mnist.train.images)[0] // batch_size record_rec = np.zeros(epoch_num) record_gen = np.zeros(epoch_num) record_disc = np.zeros(epoch_num)
import numpy as np from time import time import random, string from Model.model import Model m = Model(print_obj={'start_conf': True, 'end_conf': True}) def get_keys(obj): keys = [] for ing in obj: keys.append(ing) return keys def get_by_key(obj, key, key_list=False): arr = [] if key_list: for ing in key_list: arr.append(obj[ing][key]) else: for ing in obj: arr.append(obj[ing][key]) return np.array(arr) ingredients = { "oat": { "kcal": 110, "protein": 4,
import numpy as np from time import time import random, string from Model.model import Model m = Model(print_obj={'end_conf': True}) x = [] for i in range(4): x.append(m.add_var("real+", name=i)) x = np.array(x) m.maximize(sum(np.array([4, 1, 5, 3]) * x)) m.add_constraint(x[0] - x[1] - x[2] + 3 * x[3] <= 1) m.add_constraint(5 * x[0] + x[1] + 3 * x[2] + 8 * x[3] <= 55) m.add_constraint(-x[0] + 2 * x[1] + 3 * x[2] - 5 * x[3] <= 3) print("all added") t0 = time() m.solve() print("Solved in %f" % (time() - t0)) m.print_solution(slack=False) print("Steps: ", m.steps)
'att_shape': [200, 100], 'net_input_dim': graph.num_nodes, 'att_input_dim': graph.num_feas, 'is_init': True, 'pretrain_params_path': './Log/cora/pretrain_params.pkl' } trainer_config = { 'net_shape': [200, 100], 'att_shape': [200, 100], 'net_input_dim': graph.num_nodes, 'att_input_dim': graph.num_feas, 'drop_prob': 0.2, 'learning_rate': 1e-5, 'batch_size': 100, 'num_epochs': 500, 'beta': 100, 'alpha': 50, 'gamma': 500, 'model_path': './Log/cora/cora_model.pkl', } pretrainer = PreTrainer(pretrain_config) pretrainer.pretrain(graph.X, 'net') pretrainer.pretrain(graph.Z, 'att') model = Model(model_config) trainer = Trainer(model, trainer_config) trainer.train(graph) trainer.infer(graph)
import numpy as np from time import time import random, string random.seed(9001) from Model.model import Model m = Model(print_obj={ 'timing': True }) def get_keys(obj): keys = [] for ing in obj: keys.append(ing) return keys def get_by_key(obj,key, key_list=False): arr = [] if key_list: for ing in key_list: arr.append(obj[ing][key]) else: for ing in obj: arr.append(obj[ing][key]) return np.array(arr) ingredients = { "oat": { "kcal": 110, "protein": 4,
from Model.model import Model import numpy as np from time import time m = Model() """ A post office example: Minimize the number of full time employees Fulltime = 5 consecutive days + 2 free days Weekday | Employees needed Monday | 17 Tuesday | 13 Wednesday | 15 Thursday | 19 Friday | 14 Saturday | 16 Sunday | 11 """ mon = m.add_var("real+", name="Monday") tue = m.add_var("real+", name="Tuesday") wed = m.add_var("real+", name="Wednesday") thu = m.add_var("real+", name="Thursday") fri = m.add_var("real+", name="Friday") sat = m.add_var("real+", name="Saturday") sun = m.add_var("real+", name="Sunday")
from Model.model import Model import torch.optim as optim from torch.optim import lr_scheduler import torch.nn as nn from Data.data_preprocessing import Dataset from Model.data_loader import DL if __name__ == "__main__": csv_path = '' mp3_path = '' new_images_path = '' epochs = 10 preprocess = Dataset(mp3_path, new_images_path, csv_path) preprocess.create_data() trainloader, testloader = DL(csv_path, new_images_path).create_dataloader() model_object = Model() model = model_object.get_model() criterion = nn.CrossEntropyLoss() optimizer_ft = optim.SGD(model.parameters(), lr=0.0001, momentum=0.9) exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=2, gamma=0.1) trained_model = model_object.train_model(model, criterion, optimizer_ft, exp_lr_scheduler, epochs, trainloader)