def __init__(self): Experiment.__init__(self) persistance.Dommable.__init__(self) self.amplifiers = {'None':'bridge', 'Prana':'Prana', 'Milmega 1':'Milmega', 'Milmega 2':'Milmega'} self.amplifier = EnumerateProperty('None',['Automatic'] + self.amplifiers.keys()) self.amplifier.changedTo.connect(self.setAmplifier)
def __init__(self, conn, setName, faasType, arraySize, duration): self.faasType = faasType self.arraySize = arraySize self.duration = duration self.setName = setName self.name = 'faas_' + faasType + '_' + str(arraySize) Experiment.__init__(self, conn, self.name, self.setName)
def __init__(self, learning_task, n_estimators=5000, max_hyperopt_evals=50, counters_sort_col=None, holdout_size=0, train_path=None, test_path=None, cd_path=None, output_folder_path='./'): assert holdout_size == 0, 'For Catboost holdout_size must be equal to 0' Experiment.__init__(self, learning_task, 'cab', n_estimators, max_hyperopt_evals, False, None, holdout_size, train_path, test_path, cd_path, output_folder_path) self.space = { 'depth': hp.choice('depth', [6]), 'ctr_border_count': hp.choice('ctr_border_count', [16]), 'border_count': hp.choice('border_count', [128]), 'ctr_description': hp.choice('ctr_description', [['Borders', 'CounterMax']]), 'learning_rate': hp.loguniform('learning_rate', -5, 0), 'random_strength': hp.choice('random_strength', [1, 20]), 'one_hot_max_size': hp.choice('one_hot_max_size', [0, 25]), 'l2_leaf_reg': hp.loguniform('l2_leaf_reg', 0, np.log(10)), 'bagging_temperature': hp.uniform('bagging_temperature', 0, 1), 'used_ram_limit': hp.choice('used_ram_limit', [100000000000]), } if learning_task == 'classification': self.space.update({ 'gradient_iterations': hp.choice('gradient_iterations', [1, 10]) }) self.default_params = { 'learning_rate': 0.03, 'depth': 6, 'fold_len_multiplier': 2, 'rsm': 1.0, 'border_count': 128, 'ctr_border_count': 16, 'l2_leaf_reg': 3, 'leaf_estimation_method': 'Newton', 'gradient_iterations': 10, 'ctr_description': ['Borders', 'CounterMax'], 'used_ram_limit': 100000000000, } self.default_params = self.preprocess_params(self.default_params) self.title = 'CatBoost'
def __init__(self): Experiment.__init__(self) self.passCriterion = ExperimentSlot(parent=self,defaultValue='VoltageCriterion') self.transmittedPower = ExperimentSlot(parent=self,defaultValue='TransmittedPower') self.powerMinimum = Property(Power(-30.,'dBm'),changedSignal=self.settingsChanged) self.powerMaximum = Property(Power(+15.,'dBm'),changedSignal=self.settingsChanged) self.frequencies = SweepRange(Frequency(150e3),Frequency(1500e6),21,changedSignal=self.settingsChanged)
def __init__(self, specification): self.num_jobs = specification["num_jobs"] self.num_workers = specification["num_workers"] self.prior_list = specification["prior_list"] self.reward_completed = specification["reward_completed"] self.reward_not_completed = specification["reward_not_completed"] self.num_rounds = specification["num_rounds"] self.num_simulations = specification["num_simulations"] Experiment.__init__(self)
def __init__(self, task, agent, avtable): if isinstance(agent, OptimizationAgent): self.doOptimization = True self.optimizer = agent.learner self.optimizer.setEvaluator(task, agent.module) self.optimizer.maxEvaluations = self.optimizer.numEvaluations self.task = task self.avtable = avtable else: Experiment.__init__(self, task, agent, avtable)
def __init__(self, line_flag=False): Experiment.__init__(self, line_flag) self.joint = BridgedJoint( self.english_corpus_dir, self.img_features_npy, self.japanese_corpus_dir, self.img_original_dir, self.img_correspondence_path, self.japanese_original_corpus_dir, BridgedExperiment.PCA_COMPRESS_WORD_DIM, BridgedExperiment.PCA_COMPRESS_IMG_DIM, line_flag) self.logger.info("<Initilalizing BridgedExperiment>") self.distribution_list = []
def __init__(self, learning_task, n_estimators=5000, max_hyperopt_evals=50, counters_sort_col=None, holdout_size=0, train_path=None, test_path=None, cd_path=None, output_folder_path='./'): Experiment.__init__(self, learning_task, 'xgb', n_estimators, max_hyperopt_evals, True, counters_sort_col, holdout_size, train_path, test_path, cd_path, output_folder_path) self.space = { 'eta': hp.loguniform('eta', -7, 0), 'max_depth': hp.quniform('max_depth', 2, 10, 1), 'subsample': hp.uniform('subsample', 0.5, 1), 'colsample_bytree': hp.uniform('colsample_bytree', 0.5, 1), 'colsample_bylevel': hp.uniform('colsample_bylevel', 0.5, 1), 'min_child_weight': hp.loguniform('min_child_weight', -16, 5), 'alpha': hp.choice('alpha', [0, hp.loguniform('alpha_positive', -16, 2)]), 'lambda': hp.choice('lambda', [0, hp.loguniform('lambda_positive', -16, 2)]), 'gamma': hp.choice('gamma', [0, hp.loguniform('gamma_positive', -16, 2)]) } self.default_params = { 'eta': 0.3, 'max_depth': 6, 'subsample': 1.0, 'colsample_bytree': 1.0, 'colsample_bylevel': 1.0, 'min_child_weight': 1, 'alpha': 0, 'lambda': 1, 'gamma': 0 } self.default_params = self.preprocess_params(self.default_params) self.title = 'XGBoost'
def __init__(self): Experiment.__init__(self) self.transmittedPower = ExperimentSlot(parent=self,defaultValue='TransmittedPower') self.measurement = ExperimentSlot(parent=self,defaultValue='ReceivedPower') #,defaultValue='VoltageCriterion') self.generatorPower = Property(Power(+10,'dBm'),changedSignal=self.settingsChanged) self.generatorFrequency = ScalarProperty(Frequency(1,'GHz'),changedSignal=self.settingsChanged,minimum=Frequency(1,'Hz')) self.startPosition = ScalarProperty(Position(22,'mm'),changedSignal=self.settingsChanged) self.stopPosition = ScalarProperty(Position(-24,'mm'),changedSignal=self.settingsChanged) self.xPosition = ScalarProperty(Position(122,'mm'),changedSignal=self.settingsChanged) self.zPosition = ScalarProperty(Position(84,'mm'),changedSignal=self.settingsChanged) self.numberOfSteps = ScalarProperty(Integer(11),minimum=Integer(1),maximum=Integer(10001),changedSignal=self.settingsChanged)
def __init__(self): Experiment.__init__(self) # Load cluster information self.loadClusters() self.makeSeedUsersData() self.makeFriendListData() self.makeLikeVectorData() self.makeTweetListLikedByEgoNetworkMembers() self.makeLikeCountData() self.makeMentionCountData() self.makeMutualFriendsCountData() self.makeClusterData()
def __init__(self, learning_task, n_estimators=5000, max_hyperopt_evals=50, counters_sort_col=None, holdout_size=0, train_path=None, test_path=None, cd_path=None, num_class=2, output_folder_path='./'): Experiment.__init__(self, learning_task, 'lgb', n_estimators, max_hyperopt_evals, True, counters_sort_col, holdout_size, train_path, test_path, cd_path, num_class, output_folder_path) self.space = { 'learning_rate': hp.loguniform('learning_rate', -7, 0), 'num_leaves': hp.qloguniform('num_leaves', 0, 7, 1), 'feature_fraction': hp.uniform('feature_fraction', 0.5, 1), 'bagging_fraction': hp.uniform('bagging_fraction', 0.5, 1), 'min_data_in_leaf': hp.qloguniform('min_data_in_leaf', 0, 6, 1), 'min_sum_hessian_in_leaf': hp.loguniform('min_sum_hessian_in_leaf', -16, 5), 'lambda_l1': hp.choice('lambda_l1', [0, hp.loguniform('lambda_l1_positive', -16, 2)]), 'lambda_l2': hp.choice('lambda_l2', [0, hp.loguniform('lambda_l2_positive', -16, 2)]), } self.default_params = { 'learning_rate': 0.1, 'num_leaves': 127, 'feature_fraction': 1.0, 'bagging_fraction': 1.0, 'min_data_in_leaf': 100, 'min_sum_hessian_in_leaf': 10, 'lambda_l1': 0, 'lambda_l2': 0 } self.default_params = self.preprocess_params(self.default_params) self.title = 'LightGBM'
def __init__(self, conn, conf_file, setName, title, duration, tcpdump_if, tcpdump_port): self.conn = conn self.conf_file = conf_file self.setName = setName self.title = title self.duration = duration self.tcpdump_if = tcpdump_if self.tcpdump_port = tcpdump_port self.exp_name = 'exp_' + title + '_' + str(int(time.time())) self.mon_controller = self.conn.monHostName self.iot = self.conn.monHostName # this is the same host where the Lattice controller id running self.edge = self.conn.clusterMgmHostName # the host where the processing functions are (cm in the case of faas / containers) self.receiver = self.conn.receiverHostName Experiment.__init__(self, conn, self.exp_name, self.setName) self.init()
def __init__(self, nFolds, outputDirPath): Experiment.__init__(self) # Number of folds self.nFolds = nFolds # Set output directory path self.outputDirPath = outputDirPath if not os.path.exists(self.outputDirPath): os.makedirs(self.outputDirPath) # For multi threading synchronization self.threadLock = threading.Lock() self.threadList = [] # Load cluster information self.loadClusters()
def __init__(self, outputDirPath): Experiment.__init__(self) # Set output directory path self.outputDirPath = outputDirPath if not os.path.exists(self.outputDirPath): os.makedirs(self.outputDirPath) # For multi threading synchronization self.threadLock = threading.Lock() self.threadList = [] # Load cluster information self.loadClusters() # CF result self.result_baseline_simple_userbased_cf = {}
def __init__(self, line_flag=False): Experiment.__init__(self, line_flag) self.joint = BridgedJoint( self.english_corpus_dir, self.img_features_npy, self.japanese_corpus_dir, self.img_original_dir, self.img_correspondence_path, self.japanese_original_corpus_dir, BridgedExperiment.PCA_COMPRESS_WORD_DIM, BridgedExperiment.PCA_COMPRESS_IMG_DIM, line_flag ) self.logger.info("<Initilalizing BridgedExperiment>") self.distribution_list = []
def __init__(self): Experiment.__init__(self) # Set figure save path self.save_path = "/home/changuk/Pictures/" + self.tool.getCurrentTime() + "/" if not os.path.exists(self.save_path): os.makedirs(self.save_path) # Get user list who has mention history print("Getting user list who has interaction history...") FILE_USERLIST_HAVING_INTERACTION_HISTORY = StaticVariable.ROOTPATH + "userlist_having_interaction_history.pickle" if os.path.exists(FILE_USERLIST_HAVING_INTERACTION_HISTORY) == True: file = open(FILE_USERLIST_HAVING_INTERACTION_HISTORY, "rb") self.userlist_having_interaction_history = pickle.load(file) file.close() else: self.userlist_having_interaction_history = self.dbAdapter.getUserListHavingMentionHistory() file = open(FILE_USERLIST_HAVING_INTERACTION_HISTORY, "wb") pickle.dump(self.userlist_having_interaction_history, file) file.close() # Get user list who has like history print("Getting user list who has like history...") FILE_USERLIST_HAVING_LIKE_HISTORY = StaticVariable.ROOTPATH + "userlist_having_like_history.pickle" if os.path.exists(FILE_USERLIST_HAVING_LIKE_HISTORY) == True: file = open(FILE_USERLIST_HAVING_LIKE_HISTORY, "rb") self.userlist_having_like_history = pickle.load(file) file.close() else: self.userlist_having_like_history = self.dbAdapter.getUserListHavingLikeHistory() file = open(FILE_USERLIST_HAVING_LIKE_HISTORY, "wb") pickle.dump(self.userlist_having_like_history, file) file.close() # Load clusters information self.loadClusters() # For plotting correlation coefficient self.boxplotData = {} self.boxPlotingOrder = []
def __init__(self): Experiment.__init__(self) self.outputVoltage = ScalarProperty( quantities.Voltage(3.3, "V"), changedSignal=self.settingsChanged, minimum=quantities.Voltage(0.0, "V"), maximum=quantities.Voltage(20.0, "V"), ) self.outputCurrent = ScalarProperty( quantities.Current(100, "mA"), changedSignal=self.settingsChanged, minimum=quantities.Current(0.0, "A"), maximum=quantities.Current(10.0, "A"), ) self.outputChannel = EnumerateProperty("1", ["1", "2", "3", "4"], changedSignal=self.settingsChanged) self.offOnTime = ScalarProperty( quantities.Time(0.5, "s"), minimum=quantities.Time(0.0, "s"), maximum=quantities.Time(10.0, "s") ) self.startupTime = ScalarProperty( quantities.Time(0.5, "s"), minimum=quantities.Time(0.0, "s"), maximum=quantities.Time(10.0, "s") ) self.outputOn = BooleanProperty(False, changedSignal=self.onOffChanged)
def __init__(self, name='FluoxetineExperiment', IST=20., FBT=30., WBT=30., MBVT=1., MBDT=5., MBTT=0.2, IGNORE_MD=False, FULLNAME=True, BIN_SHIFT=False, use_days='acclimated'): self.exp_dir = dataDirRoot() + 'Experiments/B6FLX/' Experiment.__init__( self, name, IST, FBT, WBT, MBVT, MBDT, MBTT, IGNORE_MD, FULLNAME, BIN_SHIFT, use_days) # have to explicitly call the superclass initializer self.short_name = 'B6FLX' self.num_strains = 3 self.initialize()
def __init__(self): Experiment.__init__(self) persistance.Dommable.__init__(self) self._physicalAmplifier = None self.amplifierSwitchPositions = { 'None (773D)': 'coupler', 'Amplifier 1 (Prana)': 'Amplifier 1', 'Amplifier 2 (Milmega band 1)': 'Amplifier 2', 'Amplifier 2 (Milmega band 2)': 'Amplifier 2'} self.amplifierDevices = { 'None (773D)': None, 'Amplifier 1 (Prana)': knownDevices['Prana'], 'Amplifier 2 (Milmega band 1)': knownDevices['Milmega'], 'Amplifier 2 (Milmega band 2)': knownDevices['Milmega']} self.amplifier = EnumerateProperty('Automatic (coupler)',['Automatic (coupler)','Automatic (amplifier)'] + self.amplifierSwitchPositions.keys()) self.amplifier.changed.connect(self.setAmplifier) self.amplifierCorrections = {} for amplifier in self.amplifierSwitchPositions.keys(): self.amplifierCorrections.update({amplifier:BestBenchCorrections(amplifier)})
def __init__(self, name='StrainSurveyExperiment', IST=20., FBT=30., WBT=30., MBVT=1., MBDT=5., MBTT=0.2, IGNORE_MD=True, FULLNAME=True, BIN_SHIFT=False, use_days='acclimated'): self.exp_dir = dataDirRoot() + 'Experiments/SS_Data_051905_FV/' Experiment.__init__( self, name, IST, FBT, WBT, MBVT, MBDT, MBTT, IGNORE_MD, FULLNAME, BIN_SHIFT, use_days) # have to explicitly call the superclass initializer self.short_name = 'SS' self.num_strains = 16 self.initialize()
def __init__(self, name='HiFat2Experiment', IST=20., FBT=30., WBT=30., MBVT=1., MBDT=5., MBTT=0.2, IGNORE_MD=True, FULLNAME=True, BIN_SHIFT=False, use_days='chow'): self.exp_dir = dataDirRoot() + 'Experiments/HiFat2/' Experiment.__init__( self, name, IST, FBT, WBT, MBVT, MBDT, MBTT, IGNORE_MD, FULLNAME, BIN_SHIFT, use_days) # have to explicitly call the superclass initializer self.short_name = 'HFD2' self.num_strains = 2 self.initialize()
def __init__(self, calanfpga): Experiment.__init__(self, calanfpga)
def __init__(self): Experiment.__init__(self) self.stimulusRange = SweepRange(Frequency(100e3),Frequency(20e9),21,changedSignal=self.settingsChanged) self.stimulus = ExperimentSlot(parent=self) self.measurement = ExperimentSlot(parent=self) #,defaultValue='TransmittedPower')
def __init__(self): Experiment.__init__(self) self.undisturbedOutputVoltage = ScalarProperty(quantities.Voltage(0.,'V'),changedSignal=self.settingsChanged,minimum=quantities.Voltage(-500.,'V'),maximum=quantities.Voltage(500.,'V')) self.voltageMargin = ScalarProperty(quantities.Voltage(200.,'mV'),changedSignal=self.settingsChanged,minimum=quantities.Voltage(0.,'V'),maximum=quantities.Voltage(500.,'V'))
def __init__(self): Experiment.__init__(self) self.generatorPower = Property(Power(-10.,'dBm'),changedSignal=self.settingsChanged) self.frequencies = SweepRange(Frequency(100e3),Frequency(20e9),21,changedSignal=self.settingsChanged) self.transmittedPower = ExperimentSlot(parent=self) #,defaultValue='TransmittedPower')
def __init__(self, learning_task, n_estimators=5000, max_hyperopt_evals=50, counters_sort_col=None, holdout_size=0, train_path=None, test_path=None, cd_path=None, output_folder_path='./'): Experiment.__init__(self, learning_task, 'xgb', n_estimators, max_hyperopt_evals, True, counters_sort_col, holdout_size, train_path, test_path, cd_path, output_folder_path) self.space = { 'eta': hp.loguniform('eta', -7, 0), 'max_depth': hp.quniform('max_depth', 2, 10, 1), 'subsample': hp.uniform('subsample', 0.5, 1), 'colsample_bytree': hp.uniform('colsample_bytree', 0.5, 1), 'colsample_bylevel': hp.uniform('colsample_bylevel', 0.5, 1), 'min_child_weight': hp.loguniform('min_child_weight', -16, 5), 'alpha': hp.choice('alpha', [0, hp.loguniform('alpha_positive', -16, 2)]), 'lambda': hp.choice('lambda', [0, hp.loguniform('lambda_positive', -16, 2)]), 'gamma': hp.choice('gamma', [0, hp.loguniform('gamma_positive', -16, 2)]) } #self.hyperparams = [ # Parameter(name='eta', param_type='continuous', lower=0.001, upper=1), # Parameter(name='max_depth', param_type='integer', lower=2, upper=20), # Parameter(name='subsample', param_type='continuous', lower=0.5, upper=1), # Parameter(name='colsample_bytree', param_type='continuous', lower=0.5, upper=1), # Parameter(name='colsample_bylevel', param_type='continuous', lower=0.5, upper=1), # Parameter(name='min_child_weight', param_type='continuous', lower=0.001, upper=1), # Parameter(name='alpha', param_type='continuous', lower=0.001, upper=1), # Parameter(name='lambda', param_type='continuous', lower=0.001, upper=1), # Parameter(name='gamma', param_type='continuous', lower=0.001, upper=1), # Parameter(name='n_estimators', param_type='integer', lower=10, upper=10) #] self.hyperparams = [ Parameter(name='max_depth', param_type='integer', lower=2, upper=20), Parameter(name='gamma', param_type='continuous', lower=0., upper=1.) ] self.default_params = { 'eta': 0.3, 'max_depth': 6, 'subsample': 1.0, 'colsample_bytree': 1.0, 'colsample_bylevel': 1.0, 'min_child_weight': 1, 'alpha': 0, 'lambda': 1, 'gamma': 0, 'n_estimators': n_estimators } self.default_params = self.preprocess_params(self.default_params) self.title = 'XGBoost' self.model = xgb
def __init__(self, conn, setName, arraySize, duration): self.arraySize = arraySize self.duration = duration self.setName = setName self.name = 'micro_' + str(arraySize) Experiment.__init__(self, conn, self.name, self.setName)
def __init__(self): Experiment.__init__(self) # Set figure save path self.save_path = "/home/changuk/Pictures/" + self.tool.getCurrentTime() + "/" if not os.path.exists(self.save_path): os.makedirs(self.save_path) # Get mention count print("Getting mention count...") FILE_MENTIONCOUNT = StaticVariable.ROOTPATH + "exp1_mentioncount.pickle" if os.path.exists(FILE_MENTIONCOUNT) == True: file_mentioncount = open(FILE_MENTIONCOUNT, "rb") self.mention_count = pickle.load(file_mentioncount) file_mentioncount.close() else: self.mention_count = {} for egouser in self.egousers: rawValues = [] for friend in self.friendList[egouser]: rawValues.append(self.dbAdapter.getMentionCount(egouser, friend)) self.mention_count[egouser] = self.tool.minmaxScalingForArray(rawValues) file_mentioncount = open(FILE_MENTIONCOUNT, "wb") pickle.dump(self.mention_count, file_mentioncount) file_mentioncount.close() # Get mutual friends count print("Getting mutual friends count...") FILE_MUTUALFRIENDS = StaticVariable.ROOTPATH + "exp1_mutualfriends.pickle" if os.path.exists(FILE_MUTUALFRIENDS) == True: file_mutualfriends = open(FILE_MUTUALFRIENDS, "rb") self.mutual_friends_count = pickle.load(file_mutualfriends) file_mutualfriends.close() else: self.mutual_friends_count = {} for egouser in self.egousers: rawValues = [] for friend in self.friendList[egouser]: rawValues.append(self.dbAdapter.getMutualFriendsCount(egouser, friend, self.friendList[egouser], self.friendList[friend])) self.mutual_friends_count[egouser] = self.tool.minmaxScalingForArray(rawValues) file_mutualfriends = open(FILE_MUTUALFRIENDS, "wb") pickle.dump(self.mutual_friends_count, file_mutualfriends) file_mutualfriends.close() # Get Like count print("Getting like count for a user...") FILE_LIKECOUNT = StaticVariable.ROOTPATH + "exp1_likecount.pickle" if os.path.exists(FILE_LIKECOUNT) == True: file_likecount = open(FILE_LIKECOUNT, "rb") self.like_count = pickle.load(file_likecount) file_likecount.close() else: self.like_count = {} for egouser in self.egousers: rawValues = [] for friend in self.friendList[egouser]: rawValues.append(self.dbAdapter.getLikeCount(egouser, friend)) self.like_count[egouser] = self.tool.minmaxScalingForArray(rawValues) file_likecount = open(FILE_LIKECOUNT, "wb") pickle.dump(self.like_count, file_likecount) file_likecount.close() # Get Liked count print("Getting liked count for a user...") FILE_LIKEDCOUNT = StaticVariable.ROOTPATH + "exp1_likedcount.pickle" if os.path.exists(FILE_LIKEDCOUNT) == True: file_likedcount = open(FILE_LIKEDCOUNT, "rb") self.liked_count = pickle.load(file_likedcount) file_likedcount.close() else: self.liked_count = {} for egouser in self.egousers: rawValues = [] for friend in self.friendList[egouser]: rawValues.append(self.dbAdapter.getLikeCount(friend, egouser)) self.liked_count[egouser] = self.tool.minmaxScalingForArray(rawValues) file_likedcount = open(FILE_LIKEDCOUNT, "wb") pickle.dump(self.liked_count, file_likedcount) file_likedcount.close() # For plotting boxplots self.boxplotData = [] self.pearsonData = {} self.spearmanData = {}
def __init__(self): Experiment.__init__(self) self.train = None
def __init__(self): Experiment.__init__(self) self.span = ScalarProperty(Frequency(100,'kHz'),changedSignal=self.settingsChanged,minimum=Frequency(1,'Hz')) self.numberOfAveragingPoints = Property(100,changedSignal=self.settingsChanged)
def __init__(self, image_iter, window_sampler): Experiment.__init__(self, image_iter) self.window_sampler = window_sampler
def __init__(self, image_iter, feature_size): Experiment.__init__(image_iter) self.feature_size = feature_size