def __init__(self, amt, radius): Model.__init__(self) self.source = vtk.vtkAppendPolyData() for i in range(amt): opX = 1.0 opY = 1.0 opZ = 1.0 if random() > 0.5: opX *= -1.0 if random() > 0.5: opY *= -1.0 if random() > 0.5: opZ *= -1.0 sRad = 0.25 + ( random() * 0.25 ) x = float(random() * radius) * opX y = float(random() * radius) * opY z = float(random() * radius) * opZ s = vtk.vtkSphereSource() s.SetCenter(x,y,z) s.SetRadius(float(sRad)) s.Update() self.source.AddInput(s.GetOutput()) #add center s = vtk.vtkSphereSource() s.SetCenter(0.0, 0.0, 0.0) s.SetRadius(0.5) s.Update() self.source.AddInput(s.GetOutput()) self.Update()
def __init__(self): self.table_name = 'items' self.test_key = { 'char_name': 'test_char_01', 'item_id': 'test_item_01' } Model.__init__(self)
def __init__(self): self.table_name = 'guild_member' self.test_key = { 'guild_name': 'test_guild_01', 'char_name': 'test_char_01' } Model.__init__(self)
def __init__(self, ID, params): Model.__init__(self, ID, params) h2o.init() datadir = os.path.expanduser('~') +'/FSA/data/' trainingFile = datadir + params[1][0] valFile = datadir + params[1][1] testingFile = datadir + params[1][2] self.trainData = h2o.import_file(path=trainingFile) self.valData = h2o.import_file(path=valFile) #self.valData = self.trainData self.testData = h2o.import_file(path=testingFile) # print self.trainData.col_names() # drop the invalid columns self.trainData = self.trainData.drop("away_score").drop("home_score") self.valData = self.valData.drop("away_score").drop("home_score") self.testData = self.testData.drop("away_score").drop("home_score") self.params = params if self.params[0] == False: self.trainData = self.trainData.drop('spread') # self.valData = self.valData.drop('spread') self.testData = self.testData.drop('spread') # for h2o, creating the model is the same as training the model so # need to hold of here self.model = None
def __init__(self, controller): Model.__init__(self, controller) self._title = "Plot set %d" % self.getID() self._curves = {} # Key: model ID, value: CurveModel self._currentCurve = None self._xlabel = "" self._ylabel = ""
def __init__(self, bactDensity, chemDensity, dt, lamda, d, e): self.motility = d self.chemSens = lamda * bactDensity/(1+e*bactDensity) self.liveCycle = 0 self.chemProd = bactDensity self.chemDegr = 1 Model.__init__(self, bactDensity, chemDensity, dt)
def __init__(self): Model.__init__(self) self.source = vtk.vtkSphereSource() self.source.SetCenter(0.0, 0.0, 0.0) self.source.SetRadius(0.5) self.source.Update() self.Update()
def __init__(self, title, body, user_id, category): Model.__init__(self, collection=self.collection) self.title = str(title), self.body = body, self.published_at = str(datetime.utcnow()) self.user_id = user_id, self.category = category self.id = uuid.uuid4().hex
def __init__(self): Model.__init__(self) self.make_param('x', 10) self.make_param('y', 10) self.make_param('diversity', 10) self.make_param('monoculture_level', 0) self.make_param('predators', 0.0000) self.hives = self.makeHives()
def __init__(self, controller): from collections import OrderedDict Model.__init__(self, controller) self._currentPlotSet = None self._plotSets = OrderedDict() # key: plotSet ID, value: instance of XYPlotSetModel. We use an OrderedDict so that # when removing elemetns, we can easily re-select the last-but-one. self._lockRepaint = False # if True, repaint routines are blocked. self._plotSetsRepaint = set() # plot waiting for repaint/update while repaint is locked
def __init__(self): Model.__init__(self) self.source = vtk.vtkCubeSource() self.source.SetCenter(0.0, 0.0, 0.0) self.source.SetXLength(1.0) self.source.SetYLength(1.0) self.source.SetZLength(1.0) self.source.Update() self.Update()
def __init__(self, options={}): Model.__init__(self, options) params = {} if utils.has(options, 'symbol'): params['identifier'] = options['symbol'] else: params = config.get('intrinio.api.url.params') self.url = config.get('intrinio.api.url.root') + '?' + urlencode( params)
def __init__(self, controller): from collections import OrderedDict Model.__init__(self, controller) self._currentPlotSet = None self._plotSets = OrderedDict( ) # key: plotSet ID, value: instance of XYPlotSetModel. We use an OrderedDict so that # when removing elemetns, we can easily re-select the last-but-one. self._lockRepaint = False # if True, repaint routines are blocked. self._plotSetsRepaint = set( ) # plot waiting for repaint/update while repaint is locked
def __init__(self, width: int): Model.__init__(self, width) self.offProb: int = 0 self.changeProb: int = 100 try: if (sys.argv[3] == 'help'): self.help() pass except IndexError: pass
def __init__(self,root='',database_path='data/',database_name='mydatabase.db'): Model.__init__(self,root,database_path,database_name) self.name = 'courses' self.columns["name"] = 'TEXT' self.columns["semester"] = 'TEXT' self.columns["type"] = 'TEXT' self.columns["lecture_group"] = 'TEXT' self.columns["day"] = 'TEXT' self.columns["start_time"] = 'TEXT' self.columns["end_time"] = 'TEXT' self.columns["venue"] = 'TEXT'
def __init__(self, X_train, y_train, X_test, y_test, X_val=None, y_val=None): """ :param corpus: """ Model.__init__(self, X_train, y_train, X_test, y_test, X_val, y_val) self.clf = ClassifierSVM()
def __init__(self): Model.__init__(self) self.model = tf.keras.models.Sequential([ tf.keras.layers.InputLayer(input_shape=[224, 224, 3]), tf.keras.layers.ZeroPadding2D((1, 1), input_shape=(3, 224, 224)), tf.keras.layers.Conv2D(64, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(64, (3, 3), activation='relu'), tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2)), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(128, (3, 3), activation='relu'), tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2)), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(256, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(256, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(256, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(256, (3, 3), activation='relu'), tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2)), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2)), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.ZeroPadding2D((1, 1)), tf.keras.layers.Conv2D(512, (3, 3), activation='relu'), tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2, 2)), tf.keras.layers.Flatten(), tf.keras.layers.Dense(4096, activation='relu'), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(4096, activation='relu'), tf.keras.layers.Dropout(0.4) # tf.keras.layers.Dense(4, activation='softmax') ])
def __init__(self, ubm, tv_dim=400, tv_iterations=5, tv_update_sigma=True, tv_variance_threshold=5e-4, ): Model.__init__(self) self.ubm = ubm self.tv_dim = tv_dim self.tv_iterations = tv_iterations self.tv_update_sigma = tv_update_sigma self.tv_variance_threshold = tv_variance_threshold
def __init__(self, options={}): Model.__init__(self, options) params = config.get('yahoo.api.url.params') if utils.has(options, 'startdate') and utils.has(options, 'enddate'): params['period1'] = options['startdate'] params['period2'] = options['enddate'] if utils.has(options, 'granularity'): params['interval'] = options['granularity'] if utils.has(options, 'ticker'): ticker = options['ticker'] else: ticker = config.get('yahoo.api.url.ticker') self.url = config.get('yahoo.api.url.root') + ticker + '?' + urlencode( params)
def __init__(self,configModel,utils,config,strTrial): Model.__init__(self,configModel,utils,strTrial) self.configPath = utils.MODEL_CONFIG_PATH + self.tag + \ '_t' + strTrial self.numIter = config.SVD_NUM_ITER self.SVDBufferPath = utils.SVDFEATURE_BUFFER_BINARY self.learningRate = config.SVD_LEARNING_RATE self.regularizationItem = config.SVD_REGULARIZATION_ITEM self.regularizationUser = config.SVD_REGULARIZATION_USER self.regularizationGlobal = config.SVD_REGULARIZATION_GLOBAL self.numFactor = config.SVD_NUM_FACTOR self.activeType = config.SVD_ACTIVE_TYPE self.modelOutPath = utils.SVDFEATURE_MODEL_OUT_PATH self.SVDFeatureBinary = utils.SVDFEATURE_BINARY self.SVDFeatureInferBinary= utils.SVDFEATURE_INFER_BINARY
def __init__(self, *sequences): """ Create a new Polymer. @param sequence: sequence, 1-letter coded or list of 3-letter names; several sequences are interpreted as distinct chains @type sequences: str or [ str ] """ Model.__init__(self) #: list of features mapping into this model self.features = [] if sequences: self.addSequences(sequences) self.__version__ = self.version()
def __init__(self, episodes, nbIteration=10, nbSimulation=10): """ Algorithme IC (Independent Cascade) Setting up the inference mechanism and the learning algorithm of infections probabilities. """ Model.__init__(self, episodes) # Nb Iterations for reaching convergence self.nbIteration = nbIteration # Nb simulation of the inference self.nbSimulation = nbSimulation # Set of episodes D- self.dMoins = np.zeros((self.nbUser, self.nbUser)) # Set of episodes D+ self.dPlus = {(i, j): [] for i in range(0, self.nbUser) for j in range(0, self.nbUser)} self.likelyHoods = np.zeros(nbIteration)
def __init__(self, logger = None): Model.__init__(self, logger) self.counter = { } self.counter[Event.BRIDGE] = 0 self.counter[Event.CONFBRIDGEEND] = 0 self.counter[Event.CONFBRIDGEJOIN] = 0 self.counter[Event.CONFBRIDGELEAVE] = 0 self.counter[Event.CONFBRIDGESTART] = 0 self.counter[Event.DIAL] = 0 self.counter[Event.END] = 0 self.counter[Event.HANGUP] = 0 self.counter[Event.LOCALBRIDGE] = 0 self.counter[Event.NEWCHANNEL] = 0 self.counter[Event.NEWSTATE] = 0 self.counter[Event.RENAME] = 0 self.counter[Event.SIPCALLID] = 0 self.counter[Event.HANGUPREQUEST] = 0 self.counter[Event.START] = 0 self.counter[Event.STOP] = 0 self.counter[Event.NEWCALLERID] = 0 self.counter[Event.SOFTHANGUPREQUEST] = 0
def __init__(self, n_gaussians, kmeans_iterations=10, gmm_iterations=10, gmm_enrollment_iterations=10, training_threshold=5e-4, variance_threshold=5e-4, update_weights=True, update_means=True, update_variances=True, relevance_factor=4): Model.__init__(self) self.gaussians = n_gaussians self.kmeans_iterations = kmeans_iterations self.gmm_iterations = gmm_iterations self.training_threshold = training_threshold self.variance_threshold = variance_threshold self.update_weights = update_weights self.update_means = update_means self.update_variances = update_variances self.relevance_factor = relevance_factor self.gmm_enrollment_iterations = gmm_enrollment_iterations
def __init__(self, D: DataCI, embedding_size: int = 200, optimizer: str = 'Adam', negative_ratio=1, nb_epochs: int = 10, batch_size: int = 1, classification: bool = False, kfolds: int = 10, model_file: str = 'model.h5', load: bool = False, save: bool = False): """ NNEmbeddings Class initialization. :param D: :param model_file: :param embedding_size: :param optimizer: :param save: :param load: """ Model.__init__(self) Metrics.__init__(self) Visualizer.__init__(self) self.Data = D # Parameter Grid self.param_grid = {'embedding_size': embedding_size, 'negative_ratio': negative_ratio, 'batch_size': batch_size, 'nb_epochs': nb_epochs, 'classification': classification, 'optimizer': optimizer } self.model_file = model_file self.nr_revision = len(self.Data.pairs) if load: self.model = keras.models.load_model(self.model_file) else: self.model = self.build_model() print(self.crossValidation(k_folds=kfolds)) if save: self.model.save(self.model_file)
def __init__(self, u, v, delta, dt, d, lamda, e): Model.__init__(self, u, v, delta, dt, d, lamda) self.e = c_double(e) self.lib.alternVolFill.restype = environment
def __init__(self): self.table_name = 'guild' self.test_key = {'guild_name': 'test_guild_01'} Model.__init__(self)
def __init__(self, controller, table=None, index=-1): Model.__init__(self, controller) self._title = "Curve %d" % self.getID() self._table = table self._yaxisIndex = index # column index in the table self._xaxisIndex = 0 # By default the first column of the table is used for the X-s
def __init__(self, u, v, delta, dt, d, lamda, r): Model.__init__(self, u, v, delta, dt, d, lamda) self.r = c_double(r) self.lib.cellKin.restype = environment
def __init__(self, u, v, delta, dt, d, lamda, n): Model.__init__(self, u, v, delta, dt, d, lamda) self.n = c_double(n) self.lib.nonLinDiff.restype = environment
def __init__(self, u, v, delta, dt, d, lamda, alpha): Model.__init__(self, u, v, delta, dt, d, lamda) self.alpha = c_double(alpha) self.lib.receptor.restype = environment
def __init__(self, configModel, utils, config, strTrial): #This function is to set up different parameters. Model.__init__(self, configModel, utils, strTrial) self.configPath = utils.MODEL_CONFIG_PATH + self.tag + \ '_t' + strTrial ### Baidu Specific ### ### Implicit Feedback Files ### self.userHistoryReindexPath= utils.MODEL_TMP_PATH + self.tag + \ '_userHistoryReindex' + '_t' + strTrial #The following 3 files are implicit feature files self.ImfeatTrain = utils.MODEL_FEATURED_PATH + self.tag + \ '_Imtrain' + '_t' + strTrial self.ImfeatCV = utils.MODEL_FEATURED_PATH + self.tag + \ '_ImCV' + '_t' + strTrial self.ImfeatTest = utils.MODEL_FEATURED_PATH + self.tag + \ '_Imtest' + '_t' + strTrial #Gp for group training file, the test file is already in group format,so skip it self.tmpGpTrain = utils.MODEL_TMP_PATH + self.tag + \ '_Gptrain' + '_t' + strTrial self.tmpGpCV = utils.MODEL_TMP_PATH + self.tag + \ '_GpCV' + '_t' + strTrial #for storing the line order of the group file self.tmpLineOrder = utils.MODEL_TMP_PATH + self.tag + \ '_LineOrder' + '_t' + strTrial ### End Implicit Feature Files ### self.regularizationFeedback = config.SVD_REGULARIZATION_FEEDBACK ### Neighborhood Model Files### if len(self.misc) > 0: if self.misc[0] == "MovieTag": self.TagFilePath = self.movieTagPath self.TagFileReindexPath = utils.MODEL_TMP_PATH + self.tag + \ '_' + self.misc[0] + '_t' + strTrial self.ShareTagPath = utils.MODEL_TMP_PATH + self.tag + \ '_share_' + self.misc[0] + '_t' + strTrial ### End Neighborhood Model Files### ### End Baidu Specific ### self.numIter = config.SVD_NUM_ITER self.SVDBufferPath = utils.SVDFEATURE_BUFFER_BINARY self.SVDGroupBufferPath = utils.SVDFEATURE_GROUP_BUFFER_BINARY self.learningRate = config.SVD_LEARNING_RATE self.regularizationItem = config.SVD_REGULARIZATION_ITEM self.regularizationUser = config.SVD_REGULARIZATION_USER self.regularizationGlobal = config.SVD_REGULARIZATION_GLOBAL self.numFactor = config.SVD_NUM_FACTOR self.activeType = config.SVD_ACTIVE_TYPE self.modelOutPath = utils.SVDFEATURE_MODEL_OUT_PATH self.SVDFeatureBinary = utils.SVDFEATURE_BINARY self.SVDFeatureInferBinary = utils.SVDFEATURE_INFER_BINARY self.SVDFeatureLineReorder = utils.SVDFEATURE_LINE_REORDER self.SVDFeatureSVDPPRandOrder = utils.SVDFEATURE_SVDPP_RANDORDER self.formatType = 0 self.numUserFeedback = 0 self.numUser = 0 self.numMovie = 0 self.numGlobal = 0 self.avg = 0 self.originDataSet = utils.ORIGINAL_DATA_PATH
def __init__(self, filename): Model.__init__(self, filename)
def __init__(self, query_str, im): Model.__init__(self, query_str, im) self.k1 = 2 self.k2 = 500 self.b = 0 self.threshold = 10000
def __init__(self, u, v, delta, dt, d, lamda): Model.__init__(self, u, v, delta, dt, d, lamda) self.lib.minimal.restype = environment
def __init__(self, u, v, delta, dt, d, lamda, gamma): Model.__init__(self, u, v, delta, dt, d, lamda) self.gamma = c_double(gamma) self.lib.volFill.restype = environment
def __init__(self, query, im): Model.__init__(self, query, im) self.threshold = 10000 self.uniq_doc_tokens = defaultdict(set)
def __init__(self, u, v, delta, dt, d, lamda, beta): Model.__init__(self, u, v, delta, dt, d, lamda) self.beta = c_double(beta) self.lib.logistic.restype = environment
def __init__(self,configModel,utils,config,strTrial): #This function is to set up different parameters. Model.__init__(self,configModel,utils,strTrial) self.configPath = utils.MODEL_CONFIG_PATH + self.tag + \ '_t' + strTrial ### Baidu Specific ### ### Implicit Feedback Files ### self.userHistoryReindexPath= utils.MODEL_TMP_PATH + self.tag + \ '_userHistoryReindex' + '_t' + strTrial #The following 3 files are implicit feature files self.ImfeatTrain = utils.MODEL_FEATURED_PATH + self.tag + \ '_Imtrain' + '_t' + strTrial self.ImfeatCV = utils.MODEL_FEATURED_PATH + self.tag + \ '_ImCV' + '_t' + strTrial self.ImfeatTest = utils.MODEL_FEATURED_PATH + self.tag + \ '_Imtest' + '_t' + strTrial #Gp for group training file, the test file is already in group format,so skip it self.tmpGpTrain = utils.MODEL_TMP_PATH + self.tag + \ '_Gptrain' + '_t' + strTrial self.tmpGpCV = utils.MODEL_TMP_PATH + self.tag + \ '_GpCV' + '_t' + strTrial #for storing the line order of the group file self.tmpLineOrder = utils.MODEL_TMP_PATH + self.tag + \ '_LineOrder' + '_t' + strTrial ### End Implicit Feature Files ### self.regularizationFeedback = config.SVD_REGULARIZATION_FEEDBACK ### Neighborhood Model Files### if len(self.misc) > 0: if self.misc[0] == "MovieTag": self.TagFilePath = self.movieTagPath self.TagFileReindexPath = utils.MODEL_TMP_PATH + self.tag + \ '_' + self.misc[0] + '_t' + strTrial self.ShareTagPath = utils.MODEL_TMP_PATH + self.tag + \ '_share_' + self.misc[0] + '_t' + strTrial ### End Neighborhood Model Files### ### End Baidu Specific ### self.numIter = config.SVD_NUM_ITER self.SVDBufferPath = utils.SVDFEATURE_BUFFER_BINARY self.SVDGroupBufferPath = utils.SVDFEATURE_GROUP_BUFFER_BINARY self.learningRate = config.SVD_LEARNING_RATE self.regularizationItem = config.SVD_REGULARIZATION_ITEM self.regularizationUser = config.SVD_REGULARIZATION_USER self.regularizationGlobal = config.SVD_REGULARIZATION_GLOBAL self.numFactor = config.SVD_NUM_FACTOR self.activeType = config.SVD_ACTIVE_TYPE self.modelOutPath = utils.SVDFEATURE_MODEL_OUT_PATH self.SVDFeatureBinary = utils.SVDFEATURE_BINARY self.SVDFeatureInferBinary= utils.SVDFEATURE_INFER_BINARY self.SVDFeatureLineReorder= utils.SVDFEATURE_LINE_REORDER self.SVDFeatureSVDPPRandOrder = utils.SVDFEATURE_SVDPP_RANDORDER self.formatType = 0 self.numUserFeedback = 0 self.numUser= 0 self.numMovie= 0 self.numGlobal = 0 self.avg= 0 self.originDataSet = utils.ORIGINAL_DATA_PATH
def __init__(self): self.table_name = 'dungeon' self.test_key = {'char_name': 'test_char_01', 'dungeon_mid': 1} Model.__init__(self)
def __init__(self): #super(Model, self).__init__() Model.__init__(self) self.names = ['mu', 'sigma']
def __init__(self): self.table_name = 'skill' self.test_key = {'char_name': 'test_char_01', 'skill_mid': 1} Model.__init__(self)
def __init__(self, input=None, eps=.001, diff_order = 5, verbose=None): if not scipy_imported: raise ImportError, 'Scipy must be installed to use NormApprox and MAP.' Model.__init__(self, input, verbose=verbose) # Allocate memory for internal traces and get stochastic slices self._slices = {} self.len = 0 self.stochastic_len = {} self.fitted = False self.stochastic_list = list(self.stochastics) self.N_stochastics = len(self.stochastic_list) self.stochastic_indices = [] self.stochastic_types = [] self.stochastic_type_dict = {} for i in xrange(len(self.stochastic_list)): stochastic = self.stochastic_list[i] # Check types of all stochastics. type_now = check_type(stochastic)[0] self.stochastic_type_dict[stochastic] = type_now if not type_now is float: print "Warning: Stochastic " + stochastic.__name__ + "'s value is neither numerical nor array with " + \ "floating-point dtype. Recommend fitting method fmin (default)." # Inspect shapes of all stochastics and create stochastic slices. if isinstance(stochastic.value, ndarray): self.stochastic_len[stochastic] = len(ravel(stochastic.value)) else: self.stochastic_len[stochastic] = 1 self._slices[stochastic] = slice(self.len, self.len + self.stochastic_len[stochastic]) self.len += self.stochastic_len[stochastic] # Record indices that correspond to each stochastic. for j in range(len(ravel(stochastic.value))): self.stochastic_indices.append((stochastic, j)) self.stochastic_types.append(type_now) self.data_len = 0 for datum in self.observed_stochastics: self.data_len += len(ravel(datum.value)) # Unpack step self.eps = zeros(self.len,dtype=float) if isinstance(eps,dict): for stochastic in self.stochastics: self.eps[self._slices[stochastic]] = eps[stochastic] else: self.eps[:] = eps self.diff_order = diff_order self._len_range = arange(self.len) # Initialize gradient and Hessian matrix. self.grad = zeros(self.len, dtype=float) self.hess = asmatrix(zeros((self.len, self.len), dtype=float)) self._mu = None # Initialize NormApproxMu object. self.mu = NormApproxMu(self) def func_for_diff(val, index): """ The function that gets passed to the derivatives. """ self[index] = val return self.i_logp(index) self.func_for_diff = func_for_diff
def __init__(self, options={}): Model.__init__(self, options)
def __init__(self): Model.__init__(self) # dim = (64,64,3) dim = (224, 224, 3) self.model = tf.keras.models.Sequential([ tf.keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False, input_shape=dim), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=64, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(2, 2), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=128, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=128, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(2, 2), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=256, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(2, 2), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=512, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.DepthwiseConv2D(kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.Conv2D(filters=512, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', activation='relu', use_bias=False), tf.keras.layers.BatchNormalization(), tf.keras.layers.GlobalAveragePooling2D(), tf.keras.layers.Dense(2, kernel_initializer='he_normal', activation='softmax') ])
def __init__(self,ModelName,ModelParams=(0.01,0.1)): Mod.__init__(self,"{0}_{1}".format("BlackScholes",ModelName),ModelParams)
def __init__(self): self.table_name = 'users' self.test_key = {'user_id': 'user_01'} Model.__init__(self)
def __init__(self): self.table_name = 'items' self.test_key = {'char_name': 'test_char_01', 'item_id': 'test_item_01'} Model.__init__(self)
def __init__(self, controller): Model.__init__(self, controller) self._columnTitles = {} self._data = None self._title = ""
def __init__(self, ID, params): Model.__init__(self, ID, params) self.n_est = params[0] self.model = ensemble.RandomForestRegressor(n_estimators=self.n_est, verbose=0, n_jobs=3, max_depth=10, oob_score=True)
def __init__(self, width: int): Model.__init__(self, width) self.color = (0, 255, 0)
def __init__(self): self.table_name = 'guild_member' self.test_key = {'guild_name': 'test_guild_01', 'char_name': 'test_char_01'} Model.__init__(self)
def __init__(self, u, v, delta, dt, d, lamda, phi): Model.__init__(self, u, v, delta, dt, d, lamda) self.phi = c_double(phi) self.lib.nonLinSignKin.restype = environment
def __init__(self, query_str, im): Model.__init__(self, query_str, im) self.alpha = 0.5 self.beta = 1.5 self.threshold = 6000
def __init__(self, hwc, ex_dim, action_dim, args): Model.__init__(self, hwc, ex_dim, action_dim, args)
def __init__(self, parse_data, feature_extractor, w=None): Model.__init__(self, parse_data, feature_extractor) if w: self.w = w