def __init__(self, model_options, input_options, stock_code=None, load=False, saved_model_dir=None, saved_model_path=None): """Initializes the model. Creates a new model or loads a saved model.""" Model.__init__(self, model_options, input_options, stock_code=stock_code) # Please check scipy SVR documentation for details if not load or saved_model_dir is None: self.model = [ SVR(kernel=self.model_options["kernel"], degree=self.model_options["degree"], gamma=self.model_options["gamma"], coef0=self.model_options["coef0"], tol=self.model_options["tol"], C=self.model_options["C"], epsilon=self.model_options["epsilon"], shrinking=self.model_options["shrinking"], max_iter=self.model_options["max_iter"]) for _ in range(model_options["predict_n"]) ] else: model_path = saved_model_path if saved_model_path is not None else self.get_saved_model_path( saved_model_dir) if model_path is not None: self.load_model(path.join(saved_model_dir, model_path), self.SKLEARN_MODEL)
def __init__(self, dataset_name: str, hyperparameters: Dict, infra_s3: Dict, features: list, target: str, h2o_ip: str, data_dir: str, training_job_dir: str = None, clean: bool = False, model_id: str = None): Model.__init__(self, dataset_name, hyperparameters, infra_s3, features, target, data_dir, training_job_dir, clean) self.ip = h2o_ip.split(':')[0] self.port = h2o_ip.split(':')[1] if model_id: self.model_id = model_id timestamp = self.model_id[-23:] self.model_filename = os.path.join( *(training_job_dir.split('/')[:-1] + [timestamp, self.MODEL_FILENAME])) else: self.model_id = '-'.join(training_job_dir.split('/')[1:]) self.model_filename = os.path.join(training_job_dir, self.MODEL_FILENAME) logging.info('======== Model ID ========\n{}'.format(self.model_id))
def __init__(self, gamma_0=None, gamma_f=None, n=None): """ Initialise when we create a the model. """ # We initialise gamma_0, gamma_f and n if the user entered something. Model.__init__(self, gamma_0, gamma_f, n) ''' We define the fixed attribute (that won't change for all the equations of this model). display : how we show it to the user to ask him to fill the value var_name : how the variable is written in the .conf file value : The value of the variable. Set to "" (empty string) by default. ''' # FILL HERE : 3/4 put the good attributes of your model (specifics variables that the user need to give) self.fixed_attributes = [{ "display": "a", "var_name": "a_spec", "value": "" }, { "display": "b", "var_name": "b_spec", "value": "" }, { "display": "d", "var_name": "d_spec", "value": "" }] # We run the setup function to ask every attribute to the user. self.setup()
def __init__(self, model_options, input_options, stock_code): """Initializes the model.""" Model.__init__(self, model_options, input_options, stock_code=stock_code)
def __init__(self, session, initial_embeddings: Optional[np.ndarray], static: str='non-static', input_length=20, embeddings_dim=50, learning_rate=1, num_filters=100, regularization_rate=0.01, ckpt_file: Optional[str] = None): Model.__init__(self) self.sess = session self.learning_rate = learning_rate self.input_length = input_length assert static in ['non-static', 'static', 'rand', 'both'] self.static = static self.embeddings_dim = embeddings_dim if initial_embeddings is None: assert static == 'rand' self.initial_embeddings = np.random.rand(VOCABULARY_SIZE, self.embeddings_dim).astype(np.float32) else: self.initial_embeddings = initial_embeddings self.num_filters = num_filters self.regularization_rate = regularization_rate if ckpt_file: self.ckpt_file = ckpt_file else: ckpt_dir = os.path.join('tmp', 'models', str(self)) os.makedirs(ckpt_dir, exist_ok=True) self.ckpt_file = os.path.join(ckpt_dir, 'yoon_kim.ckpt') self._build_model() self._add_training_objectives() self._load_or_init()
def __init__(self, init_avg=1): Model.__init__(self) self.VERSION = 1 self.name = "TimeItem-average" self.model_time = True self._init_avg = init_avg
def __init__(self, model_options, input_options, stock_code=None, load=False, saved_model_dir=None, saved_model_path=None, build_model=True): """Initializes the model. Creates a new model or loads a saved model.""" Model.__init__(self, model_options, input_options, stock_code=stock_code) self.input_shape = get_input_shape(input_options) if not load or saved_model_dir is None and build_model: self.build_model() else: model_path = saved_model_path if saved_model_path is not None else self.get_saved_model_path( saved_model_dir) if model_path is not None: self.load_model(path.join(saved_model_dir, model_path), Model.KERAS_MODEL)
def __init__(self, feature_set, predict_table_name): Model.__init__(self, feature_set) self.predict_table_name = predict_table_name self.x = None self.y = None self.clf = None self.predict_x = None self.predict_id = None self.vectorizer = None
def __init__(self, ph): Model.__init__(self, ph) self.input_shape = INPUT_SHAPES[ph['dataset']] self.output_shape = OUTPUT_SHAPES[self.ph['dataset']] self.pretrained_model_fp = self.ph.setdefault('pretrained_model_fp', None) if self.pretrained_model_fp: print('training on a pretrained model')
def __init__(self, model_options, load=False, saved_model_dir=None, saved_model_path=None): Model.__init__(self, model_options) if not load or saved_model_dir is None: self.model = linear_model.LinearRegression() else: model_path = saved_model_path if saved_model_path is not None else self.get_saved_model_path(saved_model_dir) if model_path is not None: with open(saved_model_dir + "/" + model_path, "rb") as model_file: self.model = pickle.load(model_file)
def __init__(self, alpha=1.0, beta=0.1, KC=3.5, KI=2.5): Model.__init__(self) self.VERSION = 1 self.name = "Hierarchical" self._alpha = alpha self._beta = beta self._KC = KC self._KI = KI self.decay_function = lambda x: alpha / (1 + beta * x)
def __init__(self, alpha=1.0, beta=0.1, KC=1, KI=1): Model.__init__(self) self.VERSION = 2 self.name = "Prior-current" self._alpha = alpha self._beta = beta self._KC = KC self._KI = KI self.decay_function = lambda x: alpha / (1 + beta * x)
def __init__(self, tensor, keep_prob=1.0, num_classes=1000, retrain_layer=[], weights_path='./weights/vgg16.npy'): # Call the parent class, which will create the graph Model.__init__(self, tensor, keep_prob, num_classes, retrain_layer, weights_path) # Call the create function to build the computational graph self.final, self.endpoints = self.create()
def __init__(self, alpha=0.8, beta=0.08, KC=0.075, KI=0.1): Model.__init__(self) self.VERSION = 4 self.name = "TimeHierarchical" self.model_time = True self._alpha = alpha self._beta = beta self._KC = KC self._KI = KI self.decay_function = lambda x: alpha / (1 + beta * x)
def __init__(self, alpha=1.0, beta=0.1, K=1, init_avg=0, floating_start=True): Model.__init__(self) self.VERSION = 3 self.name = "Basic-Time" self.model_time = True self._alpha = alpha self._beta = beta self._K = K self._init_avg = init_avg self._floating_start = floating_start self.decay_function = lambda x: alpha / (1 + beta * x)
def __init__(self, alpha=1.0, beta=0.1, K=1, concepts=None): Model.__init__(self) self.VERSION = 2 self.name = "TimeConcepts" self.model_time = True self._alpha = alpha self._beta = beta self._K = K self._concepts = sorted(concepts.keys()) if concepts is not None else "All" self._init_concept_map(concepts) self.decay_function = lambda x: alpha / (1 + beta * x)
def __init__(self, alpha=1.0, beta=0.1, KC=1, KI=1, init_avg=0, first_level=3): Model.__init__(self) self.VERSION = 4 self.name = "Prior-Current-Time" self.model_time = True self._alpha = alpha self._beta = beta self._KC = KC self._KI = KI self._init_avg = init_avg self._first_level = first_level self.decay_function = lambda x: alpha / (1 + beta * x)
def __init__(self, from_ckpt=False, n_dim=None, r=2, opt_params=default_opt, log_prefix='./run'): # perform the usual initialization self.r = r Model.__init__(self, from_ckpt=from_ckpt, n_dim=n_dim, r=r, opt_params=opt_params, log_prefix=log_prefix)
def __init__(self, tensor, keep_prob=1.0, num_classes=1001, retrain_layer=[], weights_path='./weights/resnet_v2_101.ckpt'): # Call the parent class Model.__init__(self, tensor, keep_prob, num_classes, retrain_layer, weights_path) # TODO This implementation has a problem while validation (is still set to training) is_training = True if retrain_layer else False with slim.arg_scope(resnet_arg_scope()): self.final, self.endpoints = resnet_v2_101(self.tensor, num_classes=num_classes, is_training=is_training)
def __init__(self, ph): Model.__init__(self, ph) self.input_size = INPUT_SIZE[self.ph['dataset']] self.output_size = OUTPUT_SIZE[self.ph['dataset']] self.pretrained_model_fp = self.ph.setdefault('pretrained_model_fp', None) self.fine_tune = self.ph['fine_tune'] if self.pretrained_model_fp: print('training on pretrained model') else: print('training from scratch without pretrained model') if self.fine_tune: print('fine tuning pretrained model') else: print('pretrained model weights are frozen')
def __init__(self, from_ckpt=False, n_dim=None, r=2, pool_size=4, strides=4, opt_params=default_opt, log_prefix='./run'): # perform the usual initialization self.r = r self.pool_size = pool_size self.strides = strides Model.__init__(self, from_ckpt=from_ckpt, n_dim=n_dim, r=r, opt_params=opt_params, log_prefix=log_prefix)
def __init__(self, tensor, keep_prob=1.0, num_classes=1001, retrain_layer=[], weights_path='./weights/resnet_v2_101.ckpt'): # Call the parent class Model.__init__(self, tensor, keep_prob, num_classes, retrain_layer, weights_path) # Create the Graph is_training = True if retrain_layer else False with slim.arg_scope(resnet_arg_scope()): self.final, self.endpoints = resnet_v2_101( self.tensor, num_classes=num_classes, is_training=is_training, global_pool=True # True: both height_out and width_out equal one )
def __init__(self, session, corpus, sampler, k, factor_reg, bias_reg): self.sampler = sampler # 选择sample方式 self.lfactor_reg = factor_reg # 设置正则率 self.bias_reg = bias_reg # 设置bias正则率 self.K = k # k是latent factor model的维度 # self.u, self.i, self.j, self.mf_auc, self.bprloss, self.train_op = BPR.bpr_mf( corpus.user_count, corpus.item_count, k, regulation_rate=factor_reg, bias_reg=bias_reg) Model.__init__(self, corpus, session) print("bpr's restore...") self.restore() print("bpr - k=%d, reg_lf: %.2f, reg_bias=%.2f" % (k, factor_reg, bias_reg))
def __init__(self, model_options, input_options, stock_code=None, load=False, saved_model_dir=None, saved_model_path=None): """Initializes the model. Creates a new model or loads a saved model.""" Model.__init__(self, model_options, input_options, stock_code=stock_code) if not load or saved_model_dir is None: self.model = linear_model.LinearRegression() else: model_path = saved_model_path if saved_model_path is not None else self.get_saved_model_path( saved_model_dir) if model_path is not None: self.load_model(path.join(saved_model_dir, model_path), self.SKLEARN_MODEL)
def __init__(self, session, corpus, sampler, k, k2, factor_reg, bias_reg): self.K = k self.K2 = k2 self.lam = factor_reg self.bias_reg = bias_reg self.sampler = sampler self.u, self.i, self.j, self.iv, self.jv, self.loss, self.auc, self.train_op = VBPR.vbpr( corpus.user_count, corpus.item_count, len(corpus.image_features[1]), hidden_dim=k, hidden_img_dim=k2, l2_regulization=factor_reg, bias_regulization=bias_reg) Model.__init__(self, corpus, session) print("vbpr's restore...") self.restore() print("VBPR - K=%d, K2=%d, reg_lf=%.2f, reg_bias=%.2f" % (k, k2, factor_reg, bias_reg))
def __init__(self, model_options, load=False, saved_model_dir=None): Model.__init__(self, model_options) print(saved_model_dir) # Please check scipy SVR documentation for details if not load or saved_model_dir is None: self.model = SVR(kernel=self.model_options["kernel"], degree=self.model_options["degree"], gamma=self.model_options["gamma"], coef0=self.model_options["coef0"], tol=self.model_options["tol"], C=self.model_options["C"], epsilon=self.model_options["epsilon"], shrinking=self.model_options["shrinking"], cache_size=self.model_options["cache_size"], verbose=self.model_options["verbose"], max_iter=self.model_options["max_iter"]) else: model_path = self.get_saved_model_path(saved_model_dir) if model_path is not None: with open(saved_model_dir + "/" + model_path, "rb") as model_file: self.model = pickle.load(model_file)
def __init__(self, session, learning_rate=0.01, input_length=1014, regularization_rate=0.1, alphabet_size=70, ckpt_file: Optional[str] = None): Model.__init__(self) self.sess = session self.learning_rate = learning_rate self.input_length = input_length self.alphabet_size = alphabet_size self.reg_rate = regularization_rate if ckpt_file: self.ckpt_file = ckpt_file else: ckpt_dir = os.path.join('tmp', 'models', str(self)) os.makedirs(ckpt_dir, exist_ok=True) self.ckpt_file = os.path.join(ckpt_dir, 'char_cnn.ckpt') self._build_model() self._add_training_objectives() self._load_or_init()
def __init__(self, dataset_name: str, hyperparameters: Dict, infra_s3: Dict, infra_sm: Dict, features: List, target: str, data_dir: str, training_job_common_dir: str, training_job_dir: str, aws_model_id: str = None, clean: bool = False): Model.__init__(self, dataset_name, hyperparameters, infra_s3, features, target, data_dir, training_job_dir, clean) self.infra_sm = infra_sm self.aws_model_id = aws_model_id if not aws_model_id: # data-config-timestamp self.aws_model_id = '-'.join( training_job_dir.replace('_', '').split('/')[1:]) s3_timestamp = training_job_dir.split('/')[-1] else: s3_timestamp = self.aws_model_id[-23:] logging.info('======== Model ID ========\n{}'.format( self.aws_model_id)) self.training_job_common_dir = training_job_common_dir # data/ml/<dataset>/<config> self.s3_ml_data_common_dir = '/'.join( [self.infra_s3['s3_folder_ml']] + training_job_dir.split('/')[1:-1]) # A new local folder is created for the logs and plots to avoid erasing previous data # However, we need to use the S3 folder previously generated by the prediction step (e.g. if we run only eval) # data/ml/<dataset>/<config>/<timestamp> self.s3_ml_data_job_dir = '/'.join( [self.s3_ml_data_common_dir, s3_timestamp]) self.csv_training_filename = 'training.csv' self.csv_validation_filename = 'validation.csv' self.csv_testing_filename = 'testing.csv' self.s3_training_csv_path = self.s3_filepath( filetype='ml_data', filename=self.csv_training_filename, common=True, uri=True) self.s3_validation_csv_path = self.s3_filepath( filetype='ml_data', filename=self.csv_validation_filename, common=True, uri=True) self.s3_testing_csv_path = self.s3_filepath( filetype='ml_data', filename=self.csv_testing_filename, common=True, uri=True) self.local_training_preds_file = self.local_filepath( 'training_preds.csv') self.local_validation_preds_file = self.local_filepath( 'validation_preds.csv') self.local_testing_preds_file = self.local_filepath( 'testing_preds.csv') self.s3_preds_folder = self.s3_filepath(filetype='ml_data', uri=True) self.s3_training_preds_file = self.s3_filepath( filetype='ml_data', filename='training.csv.out') self.s3_validation_preds_file = self.s3_filepath( filetype='ml_data', filename='validation.csv.out') self.s3_testing_preds_file = self.s3_filepath( filetype='ml_data', filename='testing.csv.out') boto3_sess = boto3.Session() self.boto3_sm = boto3_sess.client('sagemaker') self.container = self._get_container(boto3_sess)
def __init__(self, ph): Model.__init__(self, ph) self.input_size = INPUT_SIZE[self.ph['dataset']]
def __init__(self, prediction_model, time_model): Model.__init__(self) self._prediction_model = prediction_model self._time_model = time_model
def __init__(self, ph): Model.__init__(self, ph)
def __init__(self, ph): Model.__init__(self, ph) self.pretrained_model_fp = None if 'pretrained_model_fp' in self.ph: self.pretrained_model_fp = self.ph['pretrained_model_fp']
def __init__(self, pre_word, post_word): Model.__init__(self, pre_word, post_word) # root = {"word_A": [sub-tree, count], "word_B": [sub-tree, count],...} self.root = {} self.sizemo = None
def __init__(self, env): Model.__init__(self, env) size, num_rocks = self.model_spec.split('x') self.size = int(size) self.num_rocks = int(num_rocks)
def __init__(self, pre_words, post_words): Model.__init__(self, pre_words, post_words) self.sizemo = None
def __init__(self, ph): Model.__init__(self, ph) self.input_shape = INPUT_SHAPES[self.ph['dataset']]
def __init__(self, pre_words, post_words): Model.__init__(self, pre_words, post_words)