def main(): appPath, host, port = parseOptions() jspath = os.environ.get('JSPATH', '').split(':') jspaths = [os.path.abspath(os.path.expanduser(path)) for path in jspath] appjsPath = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) if appjsPath not in jspaths: jspaths.append(appjsPath) from Loader import Loader loader = Loader(jspaths) # XXXjoe Take this list as a command line argument loader.transientExtensions = ['.jss'] app = None if appPath: app = loader.searchApp(appPath) if not app: import sys sys.stderr.write('Unable to find app "%s"\n' % appPath) return from WebServer import WebServer WebServer.serve(host, port, loader, app)
def main(): filename = '/home/itay/Documents/EKpy/ML_Background/Datasets/diabetes.arff' datasets = [] classAttributeIndices = {} baseFolder = '/home/itay/Documents/java/ExploreKit/AutomaticFeatureGeneration-master/ML_Background/Datasets/' # datasets.add("/global/home/users/giladk/Datasets/heart.arff"); # datasets.add("/global/home/users/giladk/Datasets/cancer.arff"); # datasets.add("/global/home/users/giladk/Datasets/contraceptive.arff"); # datasets.add("/global/home/users/giladk/Datasets/credit.arff"); datasets.append("german_credit.arff") # datasets.add("/global/home/users/giladk/Datasets/diabetes.arff"); # datasets.add("/global/home/users/giladk/Datasets/Diabetic_Retinopathy_Debrecen.arff"); # datasets.add("/global/home/users/giladk/Datasets/horse-colic.arff"); # datasets.add("/global/home/users/giladk/Datasets/Indian_Liver_Patient_Dataset.arff"); # datasets.add("/global/home/users/giladk/Datasets/seismic-bumps.arff"); # datasets.add("/global/home/users/giladk/Datasets/cardiography_new.arff"); loader = Loader() randomSeed = 42 for i in range(1): for datasetPath in datasets: if datasetPath not in classAttributeIndices.keys(): dataset = loader.readArff(baseFolder+datasetPath, randomSeed, None, None, 0.66) else: dataset = loader.readArff(baseFolder+datasetPath, randomSeed, None, classAttributeIndices[datasetPath], 0.66) exp = FilterWrapperHeuristicSearch(15) exp.run(dataset, "_" + str(i))
def main(args): from Loader import Loader loader = Loader(new_bd=False, db_name=args.database) cursor = loader.cursor user = login(cursor, args.user) if not user: return 0 product = buy_product(cursor, args.product) if not product: return 0 if args.dinamic is None: result = get_static_bundle_recomendation(cursor, user, product, args.num_users, args.bundle_size, 3) else: result = get_dinamic_bundle_recomendation(cursor, user, product, args.num_users, args.bundle_size, args.dinamic) print("\nYou bought a {0} ({1}), why don't you buy this too:".format( product.title, product.id)) for r in result: p = Product.get_product_from_asin(cursor, r[0]) print(" > {0} ({1})".format(p.title, p.id)) print("") loader.disconnect_database() return result
class BurpExtender(IBurpExtender): """ hiccupy """ def registerExtenderCallbacks(self, callbacks): self.mCallBacks = callbacks self.config = Config(callbacks) self.loader = Loader(self.config) self.dispatcher = Dispatcher(self.config, self.loader.getPlugins()) def processProxyMessage(self, messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, path, resourceType, statusCode, responseContentType, message, interceptAction): self.loader.reloadIfChanged() url = URL("HTTPS" if serviceIsHttps else "HTTP", remoteHost, remotePort, path) if self.mCallBacks.isInScope(url): if messageIsRequest: self.dispatcher.processProxyRequest(messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, path, resourceType, statusCode, responseContentType, message, interceptAction) else: self.dispatcher.processProxyResponse(messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, path, resourceType, statusCode, responseContentType, message, interceptAction) return message def processHttpMessage(self, toolName, messageIsRequest, message): if toolName == "intruder" and messageIsRequest: print "[%s] %s" % (toolName, message.getRequest())
class BurpExtender(IBurpExtender): """ hiccupy """ def registerExtenderCallbacks(self, callbacks): self.mCallBacks = callbacks self.config = Config(callbacks) self.loader = Loader(self.config) self.dispatcher = Dispatcher(self.config, self.loader.getPlugins()) def processProxyMessage(self, messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, path, resourceType, statusCode, responseContentType, message, interceptAction): self.loader.reloadIfChanged() url = URL("HTTPS" if serviceIsHttps else "HTTP", remoteHost, remotePort, path) if self.mCallBacks.isInScope(url): if messageIsRequest: self.dispatcher.processProxyRequest( messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, path, resourceType, statusCode, responseContentType, message, interceptAction) else: self.dispatcher.processProxyResponse( messageReference, messageIsRequest, remoteHost, remotePort, serviceIsHttps, httpMethod, path, resourceType, statusCode, responseContentType, message, interceptAction) return message def processHttpMessage(self, toolName, messageIsRequest, message): if toolName == "intruder" and messageIsRequest: print "[%s] %s" % (toolName, message.getRequest())
def main(): PATH = './data/' now = datetime.now() - timedelta(days=15) cur_date = now.strftime('%Y%m') # argument arg = argparse.ArgumentParser() arg.add_argument('--date', type=str, default=cur_date) args = arg.parse_args() print(args.date) # Loader codes = Loader.get_codes() configs = Loader.get_configs() #data set apt = AptDetailReader(configs['service_key']) result = [] for code in codes: items = apt.DataReader(code, args.date) if items is None: continue result.append(items) for item in items: requests.post("http://localhost:3691/data-lake/apt-trade-info", data=item) with open(PATH + cur_date + ".json", "w", encoding="utf-8") as make_file: json.dump(result, make_file, ensure_ascii=False, indent="\t")
def __init__(self, parent, controller): self.parent = parent self.controller = controller self.images = None self.n_image = 0 tk.Frame.__init__(self, parent) self.loader = Loader(self, controller.color_scheme, 'Processing', fontsize=24) self.loader.place(relx=0.5, rely=0.5, anchor='center') self.loader.start() self.menu = tk.Canvas(self, width=150) self.save_indices_button = FlatButton( self.menu, controller.color_scheme, 'Save indices', 16, lambda x: save_indices(self, controller)) self.recompute_button = FlatButton( self.menu, controller.color_scheme, 'Recompute', 16, lambda x: recompute(self.controller)) self.new_images_button = FlatButton( self.menu, controller.color_scheme, 'New images', 16, lambda x: new_images(self.controller)) self.view = tk.Canvas(self) self.image_view = tk.Label(self.view) self.controls = tk.Canvas(self, height=80, width=100) self.next_button = FlatButton(self.controls, controller.color_scheme, '>', 30, lambda x: self.show_next()) self.prev_button = FlatButton(self.controls, controller.color_scheme, '<', 30, lambda x: self.show_prev())
def main(): folders = {} folders["politik"] = "data/politik" folders["sport"] = "data/sport" folders["wirtschaft"] = "data/wirtschaft" bank = ClassBank() l = Loader() # train data for classname, folder in folders.iteritems(): count = 0 content = "" for file in os.listdir(folder + "/train/"): if file.endswith(".txt"): count = count + 1 content = content + " " + l.load_txt(folder + "/train/" + file) c = Class(classname, content, count) bank.addClass(c) bank.train() c = Classifier() # test data for classname, folder in folders.iteritems(): print "\n=== Testing",classname, "===\n" for file in os.listdir(folder + "/test/"): if file.endswith(".txt"): tokenizer = Tokenizer(l.load_txt(folder + "/test/" + file)) classifiedClass = c.classify(tokenizer.getTokens(), bank) print file,"=",classifiedClass.getName()
def __init__(self, path): loader = Loader() self.path = path self.right_place = False self.description = loader.load(self.path, "leveldata") self.colours = loader.load(self.path, "entity_colours") self.song = loader.compose(self.path, "music") self.cat = self.set_cat()
def selector_first_word (self, operation): if operation == 'cargar': print('usted cargara el/los siguiente(s) archivo(s) json ' + str(self.todos_comandos[1:])) self.loader = Loader(self.todos_comandos[1:]) self.loader.plus_dot_json() self.loader.load_file() elif operation == 'seleccionar': if self.todos_comandos[1] == '*': self.loader.print_file() else: contador = 0 for element in self.todos_comandos: try: if element == 'donde': if type(self.todos_comandos[contador + 3]) == int: pass else: condicion = self.todos_comandos[contador + 3:] condicion_str = '' if len(condicion) >= 2: for elemento in condicion: condicion_str += elemento + ' ' condicion_str = condicion_str[0:-1] else: for elemento in condicion: condicion_str += '' + elemento self.loader.condition(self.todos_comandos[1:contador], self.todos_comandos[contador + 1], condicion_str) return except: print('Revise sus datos') else: contador += 1 self.loader.print_select(self.todos_comandos[1:]) elif operation == 'maximo': try: self.loader.print_maximo(self.todos_comandos[1]) except: print('Estos valores no son numeros') elif operation == 'minimo': try: self.loader.print_minimo(self.todos_comandos[1]) except: print('Estos valores no son numeros') elif operation == 'suma': try: self.loader.print_total_suma(self.todos_comandos[1]) except: print('Estos valores no son numeros') elif operation == 'cuenta': self.loader.print_cuenta() elif operation == 'reportar': pass elif operation == 'exit': print('adios wapo') else: print('no escogio ningun comando')
def __init__(self, parent, controller): self.parent = parent self.controller = controller tk.Frame.__init__(self, parent) self.loader = Loader(self, controller.color_scheme, 'Loading', fontsize=24) self.loader.place(relx=0.5, rely=0.5, anchor='center') self.loader.start() self.description = tk.Label(self, text=TEXT, font=('Monaco', 13), fg=controller.color_scheme['comments'], justify=tk.LEFT) self.k_ult = tk.Label(self, text='k_ult =', font=('Monaco', 16), justify=tk.LEFT) self.k_out = tk.Label(self, text='k_out =', font=('Monaco', 16), justify=tk.LEFT) self.canvas = tk.Canvas(self, width=400, height=140) self.process = FlatButton(self.canvas, controller.color_scheme, '.process()', fontsize=30, target=lambda event: self.target()) self.pic = tk.Label(self.canvas, text=PIC, font=('Monaco', 10), justify=tk.CENTER) self.k_ult_var = tk.StringVar() self.ultEntry = tk.Entry(self, width=5, font=('Monaco', 16), textvariable=self.k_ult_var, highlightthickness=0, borderwidth=0) self.k_ult_var.set('10.0') self.k_out_var = tk.StringVar() self.outEntry = tk.Entry(self, width=5, font=('Monaco', 16), textvariable=self.k_out_var, highlightthickness=0, borderwidth=0) self.k_out_var.set('3.0')
def generateMetaFeaturesInstances(self, includeValueBased: bool): datasetFilesForBackgroundArray = self.getOriginalBackgroundDatasets() for datasetForBackgroundModel in datasetFilesForBackgroundArray: possibleFolderName = Properties.DatasetInstancesFilesLocation + \ FileUtils.getFilenameFromPath(datasetForBackgroundModel) + '_' + str(Properties.randomSeed) if not os.path.isdir(possibleFolderName): loader = Loader() Logger.Info("Getting candidate attributes for " + datasetForBackgroundModel) backgroundDataset = loader.readArff(datasetForBackgroundModel, int(Properties.randomSeed), None, None, 0.66) self.createDatasetMetaFeaturesInstances(backgroundDataset, includeValueBased)
def train(): loader = Loader(data_folder) X, X_length, Y, max_length = loader.load_training_data( data_folder + '/fbank/train.ark', data_folder + '/label/train.lab') print('max length: {}'.format(max_length)) X_padded = keras.preprocessing.sequence.pad_sequences(X, dtype='float32', maxlen=max_length, padding='post') Y_padded = keras.preprocessing.sequence.pad_sequences(Y, dtype='float32', maxlen=max_length, padding='post') print('X length: {}'.format(len(X_length))) print('Y length: {}'.format(len(Y_padded))) for length, sentence in zip(X_length, Y_padded): sentence[length:, 0] = 1. x_val = X_padded[X_padded.shape[0] - validation_size:] y_val = Y_padded[X_padded.shape[0] - validation_size:] x_train = X_padded[:X_padded.shape[0] - validation_size] y_train = Y_padded[:X_padded.shape[0] - validation_size] print('X shape: {}'.format(X_padded.shape)) print('Y shape: {}'.format(Y_padded.shape)) lstm_model = build_model(X_padded.shape[1], X_padded.shape[2]) print(lstm_model.summary()) callbacks = [ keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, patience=2, verbose=0, mode='auto'), keras.callbacks.ModelCheckpoint(model_name, monitor='val_loss', verbose=0, save_best_only=True, save_weights_only=False, mode='auto', period=1) ] lstm_model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_data=(x_val, y_val), callbacks=callbacks) lstm_model.save(model_name)
def load_mails_info(self): """ Load user labels and mails. """ self.load_labels() self.loader = Loader(self.service, self.directory, self.label_ids) self.load_mails() self.counts = self.mails['label'].value_counts() self.label_names.sort(key=lambda label: self.counts[label], reverse=True) DataCleaner.pack(self.mails)
def test(): loader = Loader(data_folder) Test_X, Test_X_length, Test_X_id, max_length = loader.load_testing_data(data_folder + '/fbank/test.ark') Test_X_padded = keras.preprocessing.sequence.pad_sequences(Test_X, dtype='float32', maxlen=777, padding='post') Test_X_padded = np.expand_dims(Test_X_padded, axis=3) print('max length: {}'.format(max_length)) print('Test X shape: {}'.format(Test_X_padded.shape)) lstm_model = load_model(model_name) Predict_Y = lstm_model.predict(Test_X_padded) loader.transfer_csv(Predict_Y, Test_X_length, Test_X_id, output_filename) print('Finished')
def main(): # Data print("Getting dir infos...") files = [f for f in os.listdir(DATA_PATH) if f.rfind('.csv') != -1] files = files[:5000] print("Loading Labels...") y_csv = np.loadtxt(LABL_PATH, dtype={ 'names': ('Id', 'Class'), 'formats': ('|S20', np.int) }, delimiter=',', skiprows=1) y_tmp = [d[1] for d in y_csv if d[0].decode('utf-8') + '.csv' in files] y_data = [] for t in y_tmp: tmp = [0] * Model.SIZE_Y tmp[t] = 1 y_data.append(tmp) y_data = np.array(y_data) print("Loading Datas...") loader = Loader() x_data = np.empty([len(files), loader.SIZE**2]) for i in range(len(files)): x_data[i] = loader.load(os.path.join(DATA_PATH, files[i])) #make test x_test = x_data[-int(0.3 * len(x_data)):] x_data = x_data[:int(0.7 * len(x_data))] y_test = y_data[-int(0.3 * len(y_data)):] y_data = y_data[:int(0.7 * len(y_data))] # Tensorflow m1 = Model("m1") m1.set(16, 10, 0.01) if LOAD or TEST: m1.load("./model") if not TEST: print("Learning Start...") for epoch in range(EPOCH): #cost, optimizer cost, _, _ = m1.train(x_data, y_data) if epoch % 100 == 0: print("In", str(epoch) + "... cost:", cost) print("Learning Finished") print("Accuracy :", m1.accuracy(x_test, y_test)) if SAVE: m1.save("./model/ckpt")
class game: def __init__(self,surface,clock): self.loader = Loader() self.loader.init_map() self.sound = SoundManager() self.sound.init_sound_manager() self.clock = clock self.alive = True self.screen = surface self.width = surface.get_size()[0] self.height = surface.get_size()[1] self.screens = [AuthorScreen(3)] self.current_screen = 0 self.screens[self.current_screen].on_focus() def reset(self): self.current_screen = 0 self.screens[self.current_screen].on_focus() #self.layers.append(FPSScreen(self.clock)) def draw(self): # INCREDIBLY wasteful... self.screen.fill( (0,0,0) ) self.screens[self.current_screen].draw(self.screen) return True def handle_event(self,event): if event.type == QUIT: self.alive = False elif event.type == KEYDOWN: self.changed = True if event.key == K_ESCAPE: self.alive = False try: self.screens[self.current_screen].handle_event(event) except AttributeError: True def update(self): # If the current screen returns FALSE we should move to the next screen in the list. retval = self.screens[self.current_screen].update() if retval == False: if self.current_screen < len(self.screens)-1: self.current_screen += 1 self.screens[self.current_screen].on_focus() return True else: return False else: return True
def read_topology(): """ Read the whole topology from the file """ Loader.load_topo_init() relay_list = [] for key, node in Loader.HOSTS_TABLE.items(): if 'relay' in node['name'].lower(): neighbors = [] for neighbor in node['neighbors']: host, port = neighbor.split(':') neighbors.append({'forward_host': host, 'forward_port': port}) relay_list.append((node['host'], node['port'], neighbors)) return relay_list
class CoverSearch (threading.Thread): loader = None Result = False artist = "" album = "" callback_fn = False AlbumCover = "/tmp/nowplaying-album.jpg" def __init__(self): self.loader = Loader() self.engine = AmazonCoverArtSearch(self.loader) self.Result = False threading.Thread.__init__(self) def initData(self, artist, album, fn): self.artist = artist self.album = album self.callback_fn = fn def saveimg(self, data): fobj = open(self.AlbumCover,"w") fobj.write(data) fobj.close() self.Result = True def cb(self, itm, artist, album, result, *args): data = self.engine.get_best_match_urls(result) if data and self.artist == artist and self.album == album: #print data[0] self.loader.get_url(data[0], self.saveimg) def run(self): if os.path.exists(self.AlbumCover): os.remove(self.AlbumCover) self.Result = False self.engine.search (self.artist, self.album, self.cb) while True: if self.Result: break if not self.engine.search_next (): break cover = False if os.path.exists(self.AlbumCover): cover = self.AlbumCover #print threading.currentThread() self.callback_fn(cover) return None
def main(): loader = Loader(FILE_PATH, PREDICT_PATH) loader.load_user_base() user_base = UserBase(loader, 5) try: user_base.pearson = pickle.load(open('temp/user_base/pearson.matrix', 'rb')) except Exception: user_base.similarities() pickle.dump(user_base.pearson, open('temp/user_base/pearson.matrix', 'wb'), protocol=-1) print '> PEARSON SAVE' try: user_base.mean = pickle.load(open('temp/user_base/user_mean.p', 'rb')) except Exception: user_base.generate_mean() pickle.dump(user_base.mean, open('temp/user_base/user_mean.p', 'wb')) print '> Testing model' # k_values = [5, 10, 20, 30] # results = {} # results.setdefault('RMSE', []) # results.setdefault('Precision', []) # results['K'] = k_values # user_base.mean_item() # kf = cross_validation.KFold(len(user_base.loader.users), n_folds=5) # RMSE = 0. # precision = 0. # for k in k_values: # user_base.k = k # for train_index, test_index in kf: # user_base.test_idx = test_index # user_base.train_idx = train_index # user_base.knn() # RMSE += user_base.test_error(test_index) # precision += user_base.test_topN(test_index) # print str.format('RMSE [k={0}] = {1}', k, RMSE/5) # print str.format('PRECISION@10 [k={0}] = {1}', k, precision/5) # results['RMSE'].append(RMSE / 5) # results['Precision'].append(precision / 5) # print results print '> Make predictions' user_base.mean_item() user_base.knn() user_base.top10()
class CoverSearch(threading.Thread): loader = None Result = False artist = "" album = "" callback_fn = False AlbumCover = "/tmp/nowplaying-album.jpg" def __init__(self): self.loader = Loader() self.engine = AmazonCoverArtSearch(self.loader) self.Result = False threading.Thread.__init__(self) def initData(self, artist, album, fn): self.artist = artist self.album = album self.callback_fn = fn def saveimg(self, data): fobj = open(self.AlbumCover, "w") fobj.write(data) fobj.close() self.Result = True def cb(self, itm, artist, album, result, *args): data = self.engine.get_best_match_urls(result) if data and self.artist == artist and self.album == album: #print data[0] self.loader.get_url(data[0], self.saveimg) def run(self): if os.path.exists(self.AlbumCover): os.remove(self.AlbumCover) self.Result = False self.engine.search(self.artist, self.album, self.cb) while True: if self.Result: break if not self.engine.search_next(): break cover = False if os.path.exists(self.AlbumCover): cover = self.AlbumCover #print threading.currentThread() self.callback_fn(cover) return None
def select_pool(self, population, fitness, selection_pair, GA, size): first, second = 3, 4 factor = Config.B if (selection_pair == 'A'): first, second = 1, 2 factor = Config.A size_1 = int(factor * size) size_2 = size - size_1 population_1 = Loader.select(first)(population, fitness, GA, size_1) population_2 = Loader.select(second)(population, fitness, GA, size_2) population_1.extend(population_2) return population_1
def __init__(self, filename, k, errorcorrect=False): #loads file reads = Loader.load(filename) #gets graph self.graph = Graph(reads, k, errorcorrect) self.k = k
def __init__(self, caption, font, size, color, position, camera): quad_obj = Loader("./resources/models/quad.obj") self.font = Font(caption, font, color[0], color[1], color[2]) self.quad = Object(quad_obj, camera, self.font) self.quad.model['rotation'] = [0, math.pi / 2, 0] self.quad.translate(*position) self.quad.scale(size * 0.04 * len(caption) / 10, 1, size * 0.1 / 10)
def importKey(self, acceptKeyId): return { 'success': False, 'msg': 'Importing of custom keys are not allowed' } filename = '%s/staging.zip' % Board.pluginPath() if not os.path.exists(filename): return {'success': False, 'msg': 'No plugin uploaded'} try: gpg = loadGPG() with zipfile.ZipFile(filename, 'r') as z: cfg = yaml.load(z.read('manifest.yml')) k = z.extract(cfg['key'], '/tmp/') keys = gpg.scan_keys(k) if len(keys) != 1: raise Exception( 'Key must only contain exactly one public key') key = keys[0] name = key['uids'] fingerprint = key['fingerprint'] keyid = key['keyid'] if keyid != acceptKeyId: return { 'name': name, 'fingerprint': fingerprint, 'keyid': keyid } result = gpg.import_keys(open(k).read()) os.unlink(k) # Reload loaded keys Loader(self.context).initializeKeychain() except Exception as e: os.unlink(filename) return {'success': False, 'msg': str(e)} return {'success': True}
def __init__(self, editor): GObject.__init__(self) self.__init_attributes(editor) from GUI.Manager import Manager Manager(self, editor) from Loader import Loader Loader(self, editor)
def generateBackgroundARFFFileForDataset(self, dataset:Dataset, backgroundFilePath: str, candidateAttrDirectories: list, includeValueBased: bool): addHeader = True for candidateAttrDirectory in candidateAttrDirectories: if (not candidateAttrDirectory.__contains__(dataset.name)) and FileUtils.listFilesInDir(candidateAttrDirectory)!=None: #none means dir exist merged = self.getMergedFile(candidateAttrDirectory,includeValueBased) if merged is not None: MLAttributeManager.addArffFileContentToTargetFile(backgroundFilePath, merged[0].getAbsolutePath(),addHeader) addHeader = False else: instances = [] #List<Instances> instances = new ArrayList<>(); for file in listFilesInDir(candidateAttrDirectory): if (file.contains('.arff') and not(not includeValueBased and file.contains(self.VALUES_BASED)) and not(file.contains('merged'))): absFilePath = os.path.abspath(file) instance = Loader().readArffAsDataframe(absFilePath) instances.append(instance) else: Logger.Info(f'Skipping file: {file}') mergedFile = self.mergeInstancesToFile(includeValueBased, candidateAttrDirectory, instances) if mergedFile is None: continue self.addArffFileContentToTargetFile(backgroundFilePath, FileUtils.getAbsPath(mergedFile), addHeader) addHeader = False
def writeinput(self,filepath): #writes the input .dat file data = Loader().load_data_mac(self.data) data1 = data[0:20,(3,4)] # get column 3,4 # select first 20 rows # next write dat file named title.dat with open(filepath + self.title+'.dat','w') as f: f.write(self.title +'\n') # set title options = str(self.wavelength) +" "+str(self.zeropoint)+" "+ str(self.ngrid)+'\n' f.write(options) # write in file options if self.keywordflag == 1: # check none of the items are None else return error self.check_keywords() #after chcck writes the additional parameters in format required by macmaille d = self.dict_list f.write(d['Symmetry codes']+"\n") f.write(d['W']+' '+d['Nind']+"\n") f.write(d['Pmin']+" " +d['Pmax']+" "+ d['Vmin'] +" "+d['Vmax']+" "+d['Rmin']+" "+d['Rmax']+" "+d['Rmaxref']+"\n") f.write(d['Spar']+" "+d['Sang']+"\n") f.write(d['Ntests']+" "+d['Nruns']+"\n") f.write("!!! \n") # blank line signals beginning of data for x in data1: a = '%f' % x[0] # read values as floats with no scientific notation b = '%f' % x[1] c = str(a)+ " " + str(b) + "\n" # combine values to make line f.write(c) # write line f.close()
class PCCharacter(object): def __init__(self): # Basic bio info self.name = "" self.classPerLevel = [] self.race = None self.age = 0 self.gender = "" self.height = "" self.weight = "" self.DEX = PCStatDEX.PCStatDEX(self) self.PERC = PCStatPERC.PCStatPERC(self) self.VIT = PCStatVIT.PCStatVIT(self) self.STR = PCStatSTR.PCStatSTR(self) self.INT = PCStatINT.PCStatINT(self) self.SCI = PCStatSCI.PCStatSCI(self) self.WILL = PCStatWILL.PCStatWILL(self) # init races self._race_loader = Loader(PCRaceBase.PCRaceBase, "./PCRace") # init classes self._class_loader = Loader(PCClassBase.PCClassBase, "./PCClass") def run(self): ''' Run all necessary calculations :o ''' pass def get_class_types(self): ''' Return a list of all valid classes. ''' return [(c.name, c.short) for c in self._class_loader.get_classes()] def get_race_types(self): ''' Return a list of all valid races. ''' return [(c.name, c.short) for c in self._race_loader.get_classes()] def add_level(self, class_): print "Adding level:", class_ pass
class AuthorScreen(AbstractScreen): def __init__(self,ttl): AbstractScreen.__init__(self) self.loader = Loader() self.state = State() large_font = self.loader.load_font("slkscr",72) med_font = self.loader.load_font("slkscr",32) small_font = self.loader.load_font("slkscr",16) x = self.state.screen_size[0]/2 y = self.state.screen_size[1]/2 self.labels = [ Label("a rapid framework",med_font,(255,96,96),[x-280,y-30]), Label("Bebop",large_font,(255,255,255),[x-180,y-10]), Label("for game compos",small_font,(255,255,255),[x-70,y+50]) ] for label in self.labels: label.goal_alpha = 255 # Set up fade timer self.ttl = ttl self.sound = SoundManager() def on_focus(self): self.start = pygame.time.get_ticks() self.fade_out = self.start + self.ttl*1000 self.alive = True self.sound.start_music(0) def update(self): AbstractScreen.update(self) time = pygame.time.get_ticks() if time >= self.fade_out and self.alive: self.alive = False for label in self.labels: label.goal_alpha = 0 elif time >= self.fade_out and self.labels[0].alpha == 0: return False for label in self.labels: label.update() return True def handle_event(self,event): if event.type == pygame.locals.KEYDOWN or event.type == pygame.locals.MOUSEBUTTONDOWN: self.fade_out = self.start def draw(self,screen): for label in self.labels: label.draw(screen) return True
def train(): loader = Loader(data_folder) X, X_length, Y, max_length = loader.load_training_data(data_folder + '/fbank/train.ark', data_folder + '/label/train.lab') print('max length: {}'.format(max_length)) X_padded = keras.preprocessing.sequence.pad_sequences(X, dtype='float32', maxlen=max_length, padding='post') Y_padded = keras.preprocessing.sequence.pad_sequences(Y, dtype='float32', maxlen=max_length, padding='post') X_padded = np.expand_dims(X_padded, axis=3) print('X length: {}'.format(len(X_length))) print('Y length: {}'.format(len(Y_padded))) sample_weightes = [] for length, sentence in zip(X_length, Y_padded): sentence[length:, 0] = 1. weights = [0.] * len(sentence) weights[:length] = [1. for _ in range(length)] sample_weightes.append(weights) sample_weightes = np.array(sample_weightes, dtype='float32') sample_weightes = sample_weightes[:X_padded.shape[0]-validation_size] x_val = X_padded[X_padded.shape[0]-validation_size:] y_val = Y_padded[X_padded.shape[0]-validation_size:] x_train = X_padded[:X_padded.shape[0]-validation_size] y_train = Y_padded[:X_padded.shape[0]-validation_size] print('X shape: {}'.format(X_padded.shape)) print('Y shape: {}'.format(Y_padded.shape)) lstm_model = build_model(X_padded.shape[1], X_padded.shape[2]) print(lstm_model.summary()) callbacks = [ keras.callbacks.EarlyStopping(monitor='val_acc', min_delta=0, patience=2, verbose=0, mode='auto'), keras.callbacks.ModelCheckpoint(model_name, monitor='val_acc', verbose=0, save_best_only=True, save_weights_only=False, mode='auto', period=1) ] try: lstm_model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_data=(x_val, y_val), #callbacks=callbacks, sample_weight=sample_weightes ) except KeyboardInterrupt: lstm_model.save(model_name) lstm_model.save(model_name)
def getInstancesFromARFF(self, backgroundFilePath: str): # BufferedReader reader = new BufferedReader(new FileReader(backgroundFilePath + ".arff")); data = Loader().readArffAsDataframe(backgroundFilePath + '.arff') Logger.Info('reading from file ' + backgroundFilePath + '.arff') # ArffLoader.ArffReader arffReader = new ArffLoader.ArffReader(reader); # Instances data = arffReader.getData(); # data.setClassIndex(data.numAttributes() - 1); return data
def main(): dt_index = pandas.date_range(start='20160101', end='20191201') dt_list = dt_index.strftime("%Y%m").tolist() dt_list = list(set(dt_list)) dt_list.sort(reverse=True) abs_path = os.path.dirname(os.path.realpath(__file__)) path = os.path.join(abs_path, "data") file_list = os.listdir(path) file_list_json = [file for file in file_list if file.endswith(".json")] for dt in dt_list: if dt + '.json' not in file_list_json: cur_date = dt break print(f"Calling the data with {cur_date}") PATH = os.path.join(abs_path, 'data/' + str(cur_date) + '.json') # argument arg = argparse.ArgumentParser() arg.add_argument('--date', type=str, default=cur_date) args = arg.parse_args() # Loader codes = Loader.get_codes() configs = Loader.get_configs() #data set apt = AptDetailReader(configs['service_key']) result = [] print("Running...") for code in tqdm(codes): items = apt.DataReader(code, args.date) if items is None: continue result += items #break with open(PATH, "w", encoding="utf-8") as make_file: json.dump(result, make_file, ensure_ascii=False, indent="\t")
def run(self, pages): if(pages < 1): raise ValueError("The number of pages cannot be zero or negative") self.logger.log("Start running Scraper") try: loader = Loader() reviews = loader.load(pages) reviews.sort(key=lambda x: x.length, reverse=True) for i in range(0, 3): review = reviews[i] review.print() self.logger.log(str(i+1)+100*"-") self.logger.log(review.title+" - "+review.date+" "+review.author) self.logger.log(100*"-"+"\n") except: print("Unable to fetch review at this moment. Please try again later")
def evaluation(self, loader: Loader, epochs: int, imshow=False): # for debugging purposes val_accuracy = [] loss = [] for i, (image_batch, seg_batch) in enumerate(loader.get_minibatch(train=False, Aug=False)): pred = self.call(image_batch.astype(np.float32), training=False) loss.append(self.loss(seg=seg_batch.astype(np.float32), predictions=pred)) val_accuracy.append(Jaccard_Index(output_batch=pred, gt_batch=seg_batch, visual=imshow)) if i == epochs: return np.mean(val_accuracy), np.mean(loss)
class Game(object): """Game classor Game""" __scenes = [] def __init__(self, width = 640, height = 480 color = (255, 255, 255) fps = 40, loader = Loader()):
def __init__(self, parent, controller): self.parent = parent self.controller = controller tk.Frame.__init__(self, parent) self.loader = Loader(self, controller.color_scheme, 'Loading', fontsize=24) self.loader.place(relx=0.5, rely=0.5, anchor='center') self.loader.start() self.description = tk.Label(self, text=TEXT, font=('Monaco', 13), fg=controller.color_scheme['comments'], justify=tk.LEFT ) self.k_ult = tk.Label(self, text='k_ult =', font=('Monaco', 16), justify=tk.LEFT) self.k_out = tk.Label(self, text='k_out =', font=('Monaco', 16), justify=tk.LEFT) self.canvas = tk.Canvas(self, width=400, height=140) self.process = FlatButton(self.canvas, controller.color_scheme, '.process()', fontsize=30, target=lambda event: self.target()) self.pic = tk.Label(self.canvas, text=PIC, font=('Monaco', 10), justify=tk.CENTER) self.k_ult_var = tk.StringVar() self.ultEntry = tk.Entry(self, width=5, font=('Monaco', 16), textvariable=self.k_ult_var, highlightthickness=0, borderwidth=0 ) self.k_ult_var.set('10.0') self.k_out_var = tk.StringVar() self.outEntry = tk.Entry(self, width=5, font=('Monaco', 16), textvariable=self.k_out_var, highlightthickness=0, borderwidth=0 ) self.k_out_var.set('3.0')
def evaluation_with_assaf(self, loader: Loader, epochs:int, imshow=True): # for debugging purposes seg_measure = SegMeasure() val_accuracy = [] loss = [] for i, (image_batch, seg_batch) in enumerate(loader.get_minibatch(train=False)): pred = self.call(image_batch.astype(np.float32), training=False) loss.append(self.loss(seg=seg_batch.astype(np.float32), predictions=pred)) val_accuracy.append(seg_measure(seg_batch, pred).numpy()) if i == epochs: return np.mean(val_accuracy), np.mean(loss)
def __init__(self, position, power, color, radius, camera): self.position = position self.power = power self.color = color self.radius = radius sphere_obj = Loader("./resources/models/sphere.obj") red_texture = Texture("./resources/textures/triangles_red.png") self.sphere = Object(sphere_obj, camera, red_texture, color=color) self.sphere.scale(radius, radius, radius) self.sphere.translate(position[0], position[1], position[2])
def mutation(self, offspring): if (Config.mutation_uniformity == 2): if Config.kicking_flag == 1 and Config.kicking: Config.p_m = Config.initial_p_m GeneticAlgorithm.kicked_gen = State.generation else: Config.p_m = 0.001 + Config.initial_p_m / ( 1 + Config.p_m_cooling_alpha * pow( (State.generation - GeneticAlgorithm.kicked_gen), 2)) return Loader.mutation()(offspring)
def __init__(self,surface,clock): self.loader = Loader() self.loader.init_map() self.sound = SoundManager() self.sound.init_sound_manager() self.clock = clock self.alive = True self.screen = surface self.width = surface.get_size()[0] self.height = surface.get_size()[1] self.screens = [AuthorScreen(3)] self.current_screen = 0 self.screens[self.current_screen].on_focus()
def __init__(self): # Basic bio info self.name = "" self.classPerLevel = [] self.race = None self.age = 0 self.gender = "" self.height = "" self.weight = "" self.DEX = PCStatDEX.PCStatDEX(self) self.PERC = PCStatPERC.PCStatPERC(self) self.VIT = PCStatVIT.PCStatVIT(self) self.STR = PCStatSTR.PCStatSTR(self) self.INT = PCStatINT.PCStatINT(self) self.SCI = PCStatSCI.PCStatSCI(self) self.WILL = PCStatWILL.PCStatWILL(self) # init races self._race_loader = Loader(PCRaceBase.PCRaceBase, "./PCRace") # init classes self._class_loader = Loader(PCClassBase.PCClassBase, "./PCClass")
def __init__(self): AbstractScreen.__init__(self) self.loader = Loader() self.state = State() self.raw_image = self.loader.load(sys.argv[1])[0] self.image_size = self.raw_image.get_size() self.scale = 1.0 if (self.image_size[0] < ScreenSize[0]/2 and self.image_size[1] < ScreenSize[1]/2): self.scale = 2.0 elif (self.image_size[0] > ScreenSize[0] or self.image_size[1] > ScreenSize[1]): self.scale = 0.5 self.image = pygame.transform.scale(pygame.transform.flip(self.raw_image, False, True), (self.image_size[0]*self.scale, self.image_size[1]*self.scale)) self.points = [] self.offset = (10,10) self.global_scale = 1.0 self.cursor_point = (0, 0)
def __init__(self,ttl): AbstractScreen.__init__(self) self.loader = Loader() self.state = State() large_font = self.loader.load_font("slkscr",72) med_font = self.loader.load_font("slkscr",32) small_font = self.loader.load_font("slkscr",16) x = self.state.screen_size[0]/2 y = self.state.screen_size[1]/2 self.labels = [ Label("a rapid framework",med_font,(255,96,96),[x-280,y-30]), Label("Bebop",large_font,(255,255,255),[x-180,y-10]), Label("for game compos",small_font,(255,255,255),[x-70,y+50]) ] for label in self.labels: label.goal_alpha = 255 # Set up fade timer self.ttl = ttl self.sound = SoundManager()
def __init__(self, resources_path="resources", max_fps=40, mixer_config=None): """ Initialise pygame et s'occuppe de la boucle principale, envoie aussi les événements à la fenêtre. @param resources_path: Le chemin vers le dossier contenant les resources @param max_fps: Limite le nombre de fps à ce nombre @param mixer_config: Dictionnaire contenant la configuration personnalisée du module mixer """ if not mixer_config is None: pygame.mixer.pre_init(**mixer_config) pygame.init() self._max_fps = max_fps self._clock = pygame.time.Clock() self._windows = [] self._loader = Loader(resources_path)
class FPSScreen: def __init__(self,clock): self.font = Loader().load_font("slkscr",16) self.clock = clock self.render() self.tick = 0 self.state = State() def render(self): self.fps = self.font.render(str(int(self.clock.get_fps()*100)/100.0),False,(255,255,255)) self.size = self.fps.get_size() def update(self): self.tick += 1 if self.tick > FPSCounter: self.tick = 0 self.render() def draw(self,surface): surface.blit(self.fps,(self.state.screen_size[0]-self.size[0],self.state.screen_size[1] - self.size[1]))
def __init__(self,arguments): self.debug = False self.capturing = False self.captureFPSWait = 0 self.running = True self.fullscreen = False self.paused = False self.cachedScenes = {} self.currentScene = None self.currentElement = None self.currentWindow = None self.currentWidget = None self.values = {} self.Loader = Loader(self) self.setupScreen() self.setupAudio() self.displayTitle() self.Loader.preload() self.TitleManager = TitleManager(self) self.Cursor = Cursor(self) self.Renderer = Renderer(self) self.AudioController = AudioController(self) self.Inventory = Inventory(self) self.TopicMenu = TopicMenu(self) self.ScriptManager = ScriptManager(self) self.Player = Player(self) self.EventManager = EventManager(self) self.parseArguments(arguments) self.loadScene("blacksmith") self.run()
def main(): loader = Loader(FILE_PATH, PREDICT_PATH) loader.load_item_base() item_base = ItemBase(loader, 5) loader2 = Loader(FILE_PATH, PREDICT_PATH) loader2.load_user_base() item_base.user_data = loader2.data try: item_base.mean = pickle.load(open('temp/item_base/user_mean.p', 'rb')) except Exception: item_base.mean() pickle.dump(item_base.mean, open('temp/item_base/user_mean.p', 'wb')) try: item_base.cosine = pickle.load(open('temp/item_base/ad_cosine.matrix', 'rb')) except Exception: item_base.similarities() pickle.dump(item_base.cosine, open('temp/item_base/ad_cosine.matrix', 'wb')) print '> Testing model' k_values = [5, 10, 20, 30] results = {} results.setdefault('RMSE', []) results.setdefault('Precision', []) results['K'] = k_values kf = cross_validation.KFold(len(item_base.loader.items), n_folds=5) for k in k_values: item_base.k = k RMSE = 0. precision = 0. for train_index, test_index in kf: item_base.train_idx = train_index item_base.test_idx = test_index item_base.knn() RMSE += item_base.test_error(test_index) precision += item_base.test_topN(test_index) print str.format('RMSE [k={0}] = {1}', k, RMSE/5) print str.format('PRECISION@10 [k={0}] = {1}', k, precision/5) results['RMSE'].append(RMSE / 5) results['Precision'].append(precision / 5) print results
class CoverArtDatabase (object): def __init__ (self): self.loader = Loader() def create_search (self): return AmazonCoverArtSearch (self.loader) def build_art_cache_filename (self, album, artist, extension): art_folder = os.path.expanduser (ART_FOLDER) if not os.path.exists (art_folder): os.mkdir (art_folder) if extension is None: extension = "jpg" # FIXME: the following block of code is messy and needs to be redone ASAP return art_folder + '/%s - %s.%s' % (artist.replace ('/', '-'), album.replace ('/', '-'), extension) def get_pixbuf (self, db, entry, callback): if entry is None: callback (entry, None) return st_artist = db.entry_get (entry, rhythmdb.PROP_ARTIST) st_album = db.entry_get (entry, rhythmdb.PROP_ALBUM) # Handle special case if st_album == "": st_album = "Unknown" if st_artist == "": st_artist = "Unknown" # If unknown artist and album there is no point continuing if st_album == "Unknown" and st_artist == "Unknown": callback (entry, None) return # replace quote characters # don't replace single quote: could be important punctuation for char in ["\""]: st_artist = st_artist.replace (char, '') st_album = st_album.replace (char, '') art_location = self.build_art_cache_filename (st_album, st_artist, "jpg") blist_location = self.build_art_cache_filename (st_album, st_artist, "rb-blist") # Check local cache if os.path.exists (art_location): pixbuf = gtk.gdk.pixbuf_new_from_file (art_location) callback (entry, pixbuf) # Check for unsuccessful previous image download to prevent overhead search elif os.path.exists (blist_location): callback (entry, None) else: # Otherwise spawn (online) search-engine search se = self.create_search () se.search (db, entry, self.on_search_engine_results, callback) def on_search_engine_results (self, search_engine, entry, results, callback): if results is None: self._do_blacklist_and_callback (search_engine, callback) return # Get best match from results best_match = search_engine.get_best_match (results) if best_match is None: self._do_blacklist_and_callback (search_engine, callback) return # Attempt to download image for best match pic_url = str (best_match.ImageUrlLarge) self.loader.get_url (pic_url, self.on_image_data_received, search_engine, "large", callback, best_match) def _do_blacklist_and_callback (self, search_engine, callback): self._create_blacklist (search_engine.st_artist, search_engine.st_album) callback (search_engine.entry, None) def _create_blacklist (self, artist, album): location = self.build_art_cache_filename (album, artist, "rb-blist") f = file (location, 'w') f.close () return location def _create_artwork (self, artist, album, image_data): location = self.build_art_cache_filename (album, artist, "jpg") f = file (location, 'wb') f.write (image_data) f.close () return location def on_image_data_received (self, image_data, search_engine, image_version, callback, best_match): if image_data is None: res = search_engine.search_next () if not res: self._do_blacklist_and_callback (search_engine, callback) return if len (image_data) < 1000: if image_version == "large" and best_match is not None: # Fallback and try to load medium one pic_url = str (best_match.ImageUrlMedium) self.loader.get_url (pic_url, self.on_image_data_received, search_engine, "medium", callback, best_match) return res = search_engine.search_next () if not res: # only write the blist if there are no more queries to try self._do_blacklist_and_callback (search_engine, callback) else: location = self._create_artwork (search_engine.st_artist, search_engine.st_album, image_data) pixbuf = gtk.gdk.pixbuf_new_from_file (location) callback (search_engine.entry, pixbuf)
i = self.loader.item_idx(item) u = self.loader.user_idx(user) score = self.predict(u, i) predictions.append((item, score)) predictions = sorted(predictions, key=operator.itemgetter(1)) predictions.reverse() top10[user] = predictions[0:10] file = open("temp/slope_one/top10.txt", "wb") for user,items in top10.items(): file.write(str.format("{0}\n", user)) for data in items: file.write(str.format('\t"{0}"\n', data[0])) if __name__ == '__main__': loader = Loader(FILE_PATH, PREDICT_PATH) loader.load_user_base() slope_one = SlopeOne(loader) loader2 = Loader(FILE_PATH, PREDICT_PATH) loader2.load_item_base() slope_one.item_data = loader2.data try: slope_one.avg = pickle.load(open('temp/slope_one/mean.p', 'rb')) except Exception: slope_one.mean() pickle.dump(slope_one.avg, open('temp/slope_one/mean.p', 'wb')) try: print '> Loading dev matrix' f = open('temp/slope_one/freqs.p', 'rb')
warnings.warn("filename as url is deprecated") url = filename filename, pathinfile = os.path.split(url) if not os.path.exists(filename): msg = "invalid histogram url: %s" % url raise ValueError, url if fs is None: from h5py import File try: fs = File( filename, 'r') except IOError, msg: raise IOError, "unable to load histogram. filename=%s, "\ "pathinfile=%s, kwds=%s" % (filename, pathinfile, kwds) from Loader import Loader loader = Loader(fs, pathinfile) return loader.load(**kwds) def dump( histogram, filename = None, pathinfile = '/', mode = 'c', fs = None, compression = 'lzf'): '''dump( histogram, hdf_filename, path_in_hdf_file, mode ) -> save histogram into a hdf file. histogram: The histogram to be written hdf_filename: The hdf filename in which the histogram will be saved path_in_hdf_file: The path inside the hdf file where the histogram is located. mode: The mode to be used to write to the hdf file.
class Application(object): def __init__(self, resources_path="resources", max_fps=40, mixer_config=None): """ Initialise pygame et s'occuppe de la boucle principale, envoie aussi les événements à la fenêtre. @param resources_path: Le chemin vers le dossier contenant les resources @param max_fps: Limite le nombre de fps à ce nombre @param mixer_config: Dictionnaire contenant la configuration personnalisée du module mixer """ if not mixer_config is None: pygame.mixer.pre_init(**mixer_config) pygame.init() self._max_fps = max_fps self._clock = pygame.time.Clock() self._windows = [] self._loader = Loader(resources_path) def register_window(self, win): """ Ajoute une fenêtre à l'application afin que celle-ci lui envoie les événements. @param win: La fenêtre à ajouter """ win.set_loader(self._loader) self._windows.append(win) def get_icon(self): """ Recherche une icone nommée icon.png dans le loader """ try: self._loader.preload_image(("icon", "icon.png")) except: return None return self._loader.images.icon def run(self): """ Lance la boucle principale du jeu """ q = False while not q: for event in pygame.event.get(): if event.type == QUIT: q = True elif event.type == KEYDOWN: for w in self._windows: w.keydown(event.key, event.mod) elif event.type == KEYUP: for w in self._windows: w.keyup(event.key, event.mod) elif event.type == MOUSEBUTTONDOWN: for w in self._windows: w.mousebuttondown(event.button, event.pos) elif event.type == MOUSEBUTTONUP: for w in self._windows: w.mousebuttonup(event.button, event.pos) for w in self._windows: w.draw() pygame.display.flip() self._clock.tick(self._max_fps) pygame.mixer.quit() pygame.font.quit() pygame.display.quit()
def __init__(self,clock): self.font = Loader().load_font("slkscr",16) self.clock = clock self.render() self.tick = 0 self.state = State()
class SketchScreen(AbstractScreen): def __init__(self): AbstractScreen.__init__(self) self.loader = Loader() self.state = State() self.raw_image = self.loader.load(sys.argv[1])[0] self.image_size = self.raw_image.get_size() self.scale = 1.0 if (self.image_size[0] < ScreenSize[0]/2 and self.image_size[1] < ScreenSize[1]/2): self.scale = 2.0 elif (self.image_size[0] > ScreenSize[0] or self.image_size[1] > ScreenSize[1]): self.scale = 0.5 self.image = pygame.transform.scale(pygame.transform.flip(self.raw_image, False, True), (self.image_size[0]*self.scale, self.image_size[1]*self.scale)) self.points = [] self.offset = (10,10) self.global_scale = 1.0 self.cursor_point = (0, 0) def on_focus(self): self.alive = True def update(self): AbstractScreen.update(self) return self.alive def handle_event(self,event): if event.type == pygame.locals.MOUSEBUTTONDOWN: pos = (event.pos[0] - self.offset[0], event.pos[1] - self.offset[1]) self.points.append(pos) elif event.type == pygame.locals.MOUSEMOTION: self.cursor_point = (event.pos[0], event.pos[1]) elif event.type == pygame.locals.KEYDOWN: if event.key == pygame.locals.K_ESCAPE: first = self.points[0] x = self.global_scale * (first[0]/self.scale - self.image_size[0]/2) y = self.global_scale * (first[1]/self.scale - self.image_size[1]/2) s = str(x) + "," + str(y) for pos in self.points[1:]: x = pos[0]/self.scale - self.image_size[0]/2 y = pos[1]/self.scale - self.image_size[1]/2 s += "," + str(x) + "," + str(y) sys.stdout.write(s) self.alive = False; elif event.key == pygame.locals.K_SPACE: self.points = [] def draw(self,screen): screen.blit(self.image, self.offset) prev = -1 for pos in self.points: off = (pos[0] + self.offset[0], pos[1] + self.offset[1]) pygame.draw.circle(screen, (0, 255, 0), off, 3) if prev != -1: pygame.draw.line(screen, (0, 255, 0), prev, off) prev = off if len(self.points) > 0: recent = self.points[len(self.points)-1] recent = (recent[0] + self.offset[0], recent[1] + self.offset[1]) pygame.draw.line(screen, (255, 0, 255), recent, self.cursor_point) return True
# Create it if not exists. if not os.path.exists(log_out): os.makedirs(log_out) # Checkpoint directory. checkpoint_dir = os.path.join(FLAGS.tianchi, 'checkpoints', date) checkpoint_prefix = os.path.join(checkpoint_dir, 'ckpt') if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) # Log the hyperparameters. dump_parameters(os.path.join(checkpoint_dir, 'hyper_params.json')) # Data loader loader = Loader(classes=FLAGS.num_labels, reduction=FLAGS.reduction, batch_size=FLAGS.batch_size, patch_size=FLAGS.patch_size, depth=FLAGS.depth, num_hidden=FLAGS.num_hidden, epoch=FLAGS.num_epochs, img_size=FLAGS.image_size) # Return the container with data/labels for train data. ctrain = loader.run(category='Training') ctest = loader.run(category='Testing') # Variables need to be catched in the environnement. # tf.trainable_variables() to know which variables are being catched. with tf.Graph().as_default(): sess = tf.Session() with sess.as_default(): # Run the default session. # Create the network. cnn = CNN(batch_size=FLAGS.batch_size, image_size=FLAGS.image_size, num_channels=FLAGS.num_channels,
def registerExtenderCallbacks(self, callbacks): self.mCallBacks = callbacks self.config = Config(callbacks) self.loader = Loader(self.config) self.dispatcher = Dispatcher(self.config, self.loader.getPlugins())
score1 += 1 else: # predicted incorrectly if testY[r] > 0: #virus as GTA phage_as_gta.append(testNames[r]) else: #gta as virus gta_as_phage.append(testNames[r]) # if not MINI: # print("\nPhages (%d) misclassified over %d reps: %s" % (len(phage_as_gta), nrep, phage_as_gta)) # print("\nGTA (%d) misclassified over %d reps: %s\n" % (len(gta_as_phage), nrep, gta_as_phage)) return (score0/nrep, score1/nrep) if __name__ == '__main__': # Load profiles gta_profs = Loader.load(GTA_PATH, "GTA") viral_profs = Loader.load(VIRAL_PATH, "virus") # Make features feats = Feature(gta_profs.profiles + viral_profs.profiles) # kmer feats.make_kmer_dict(K) feats.kmer_feat() # pseaac feats.pseaac(lam=LAM, weight=PSE_WEIGHT) # physicochem feats.physicochem() # Xval # predictor = KNeighborsClassifier(n_neighbors=10) predictor = MultinomialNB() result = xval(predictor, gta_profs, viral_profs, NFOLDS, NREPS)
def load_github(environ, start_response): store = environ['tiddlyweb.store'] loader = Loader(store) loader.load_github() start_response('200', [('Content-Type', 'text/html; charset=UTF-8')]) return SUCCESS_RESPONSE
def __init__ (self): self.loader = Loader()