def set_file_handler_access_type(self, access_type): if self.file_handler is not None: self.file_handler.close_file() self.file_handler = FileHandler(self.filename, access_type = access_type) if not self.file_handler.open_new_file(): self.file_handler.open_existing_file()
class AddressBook(object): def __init__(self): self.contacts = [] ##holds contact objects self.changed = False self.sortMethod = ("name", False) self.FileHandler = FileHandler() def sort(self, attr, isDescending=False): contactAttr = lambda contact: contact.getAttr(attr).translate(None, punctuation) if attr == "name": contactAttr = lambda contact: contact.getAttr(attr).split(" ")[::-1] ##puts last name first in split list self.contacts.sort(key=contactAttr, reverse=isDescending) def addContact(self, **attrs): """ adds new contact instance to contact list """ contact = Contact(**attrs) self.contacts.append(contact) def removeSelected(self, selected): """remove contact list from self.contacts """ for index in selected: del self.contacts[index] def loadTSV(self, filepath): fromTSV = self.FileHandler.readUSPS(filepath) for attr in fromTSV: self.contacts.append(Contact(**attr)) def writeTSV(self, filepath): self.FileHandler.writeUSPS(self.contacts, filepath)
def __init__(self, filename = None, json_data = None): self.json_data = json_data # filename defaults to DATABASE_FILENAME if filename is None: filename = DATABASE_FILENAME self.filename = filename # Create a FileHandler object for hanlding file I/O self.file_handler = FileHandler(filename, access_type = 'r') # If this file does not already exist if not self.file_handler.open_new_file(): # Accept the data passed in rather than reading from the file self.json_data = json_data # Output the json_data to the file self.output_file_contents() else: # Read the JSON data from the file self.read_file_contents()
def __init__(self, pastafile, reffile, seqfile, verbose=False): """ Constructor @param pastafile: filename for residue list @type pastafile: string @param reffile: filename for reference list (eg. BMRB ASCII file) @type reffile: string @param seqfile: filename for FASTA sequence file @type seqfile: string """ fh = FileHandler() ## list of Residue.PastaResidue objects self.residues = fh.read_pasta(pastafile, reffile) ## list of Residue.AminoAcid objects self.amino_acids = fh.read_preset(reffile) ## list of Residue.AminoAcid objects self.seq = fh.read_seq(seqfile, reffile) self.P = None ## numpy.ndarray for typing posterior probabilities self.L = None ## numpy.ndarray for linking constraints self.S = None ## numpy.ndarray for aa type in sequence self.A = None ## list of assignments and respective similarity score ## ILP STUFF self.B = None ## list of assignments and respective costs self.C = None ## cost matrix ILP self.Xs = None ## assignment matrices from solution pool, self.ILP_L = None ## Linking Matrix of ILP self.typing_elapsed = 0 self.linking_elapsed = 0 self.mapping_elapsed = 0 self.full_running_time = 0
def import_task_data_from_file(self, **kwargs): filename = kwargs.get('filename', 'schedule.json') revert_changes_on_error = kwargs.get('revert_changes_on_error', True) file_handler = FileHandler(filename) transient_task_backup = copy.deepcopy(self.transient_tasks) recurring_task_backup = copy.deepcopy(self.recurring_tasks) try: for task in file_handler.read_tasks(): try: self.add_task(task) except TaskInsertionError as err: if revert_changes_on_error: raise PSSInvalidOperationError() except PSSInvalidOperationError: # Revert changes if revert_changes_on_error self.transient_tasks = transient_task_backup self.recurring_tasks = recurring_task_backup raise PSSInvalidOperationError( "Could not complete data import: Data invalid or contains conflict. No changes have been made." ) recurring_tasks_by_name = {} recurring_tasks_by_name.update([(task.name, task) for task in self.recurring_tasks])
def test_write_file_success(self, tmpdir, capfd): file = tmpdir.mkdir("sub").join("test.txt") f = FileHandler(file) f.write_file() out, err = capfd.readouterr() assert out == "Data write to '{}' successful\n".format(file)
def test_read_file_not_found(self, tmpdir, capfd): file = tmpdir.mkdir("sub").join("file_dne.txt") f = FileHandler(file) f.read_file() out, err = capfd.readouterr() assert out == "File '{}' does not exist. Add and submit data to create it.\n".format(file)
def test_read_file_found(self, tmpdir, capfd): file = tmpdir.mkdir("sub").join("tester.txt") file.write("") f = FileHandler(file) f.read_file() out, err = capfd.readouterr() assert out == "File '{}' found. Its data will be used in this application.\n".format(file)
def __init__(self, fileloc, clf, ss=None): self.clf = clf self.standardizer = ss self.file_loc = fileloc self.file_handler = FileHandler(self.file_loc) self.file_handler.set_file_extensions((".wav")) self.file_handler.create_all_file_list() self.file_handler.split_train_test() self.extractor = FeatureExtractor()
def test_input_file_parse(): file_handler = FileHandler('unit_test_inputs/test_input_file_parse.json') tasks = file_handler.read_tasks() assert len(tasks) == 3 transient_task = tasks[0] recurring_task = tasks[1] anti_task = tasks[2] test_equal(transient_task.__class__, TransientTask) test_equal(recurring_task.__class__, RecurringTask) test_equal(anti_task.__class__, AntiTask)
def run(self): """ Method tu run MapReduce implementation """ url_file = 'http://' + self.url_server + '/' ip = self.ip set_context() host = create_host('http://' + str(ip) + ':6002') registry = host.lookup_url('http://' + str(ip) + ':6000/regis', 'Registry', 'Registry') list_workers = registry.get_all_names() print 'Workers registered in server: ' + str(list_workers) num_workers = len(list_workers) print 'Number of workers: ' + str(num_workers) print 'Start file splitter...' file_handler = FileHandler(self.input_file_path, self.output_dir) file_handler.split_file(num_workers) print 'Finish splitting...' # Create Timer Actor if not host.has_actor('timer'): timer = host.spawn('timer', 'Timer/Timer') else: timer = host.lookup('timer') # Create reducer if not host.has_actor('reducer'): reducer = host.spawn('reducer', self.reduce_impl) else: reducer = host.lookup('reducer') reducer.set_parameters(num_workers, file_handler, self.output_dir, self.output_filename, timer) # Create mapper actors for i in range(num_workers): remote_host = registry.lookup(list_workers[i]) # print remote_host if remote_host is not None: if not remote_host.has_actor('mapper'): worker = remote_host.spawn('mapper', self.map_impl) else: worker = remote_host.lookup('mapper') print "Mapper created in host -> " + list_workers[i] url_file_chank = url_file + "file_" + str(i) + '.txt' worker.start_map(url_file_chank, reducer, timer) serve_forever()
def getBest(self): self.fitness_sums = [0 for i in range(self.species)] self.best = self.nets[0][0] for i in range(self.species): for b in self.nets[i]: self.fitness_sums[i] += b.fitness if self.best.fitness < b.fitness: self.best = b if (self.save_to_file): FileHandler.save(self.best.copy(), 'best.pickle') FileHandler.save(self.nets, 'generation.pickle')
def get_data_from_folder(folder, classification, train_p, total_amount, valid_ext, extractor): max_train = round(total_amount * train_p) max_test = round(total_amount * (1 - train_p)) x_train = np.empty((max_train, extractor.tot_features), float) y_train = np.empty((max_train, 1), str) x_test = np.empty((max_test, extractor.tot_features), float) y_test = np.empty((max_test, 1), str) print("Begin %s" % (classification)) fhandler = FileHandler(folder, training_percent=train_p) fhandler.set_file_extensions(valid_ext) fhandler.create_all_file_list() fhandler.split_train_test() count = 0 print("Get %d training pts" % round(max_train)) for tf in fhandler.train_files: cur_clips = fhandler.file_to_clips(tf) for clip in cur_clips: extractor.set_clip(clip) x_train[count][:] = extractor.extract() y_train[count] = classification count = count + 1 if count % 100 == 0: print(count) if count == max_train: break if count == max_train: break print("Collected %d of %d requested %s samples" % (count, max_train, classification)) x_train = x_train[0:count, :] y_train = y_train[0:count, :] print("Get %d testing" % (max_test)) count = 0 for tf in fhandler.test_files: cur_clips = fhandler.file_to_clips(tf) for clip in cur_clips: extractor.set_clip(clip) x_test[count][:] = extractor.extract() y_test[count] = classification count = count + 1 if count % 100 == 0: print(count) if count == max_test: break if count == max_test: break x_test = x_test[0:count, :] y_test = y_test[0:count, :] return x_train, y_train, x_test, y_test
def main(): # instantiate global items gFH = FileHandler() # load settings file settings_array = gFH.readFileToArray("setting") GENERATION_COUNT = int(settings_array[0]) # Load Generations GENERATION_DICT = {} for gen in range(GENERATION_COUNT): GENERATION_DICT[("GEN" + str(gen))] = Generation(50)
def __init__(self, *args, obj=None, **kwargs): super(MainWindow, self).__init__(*args, **kwargs) self.setupUi(self) self.load_config() self.file_handler = FileHandler(config=self.config) self.options = OptionsView(config=self.config) self.loading_message = DialogLoading() self.set_ui() self.set_signals() self.check_lib()
def __init__(self, path, max_replay_load=10, max_event_parsed=50): self.factory = SC2Factory() self.folder = path self.training_builds = {"Protoss": [], "Terran": [], "Zerg": []} self.predict_builds = {"Protoss": [], "Terran": [], "Zerg": []} self.training_replays = [] self.predict_replays = [] self.success_replay = [] self.max_replay_load = max_replay_load self.max_event_parsed = max_event_parsed self.dir_list = os.listdir(self.folder) self.clustering = Clustering() self.file_handler = FileHandler()
def __init__(self, logger, input_folder, exl_type, output_file): self.logger = logger self.log_ref = '' self.input_folder = input_folder self.exl_type = exl_type self.fh = FileHandler(logger, input_folder, exl_type) self.fh.handle_input_file() self.df_ifile_read = self.fh.df_ifile_read # DataFrame self.col_list = self.fh.col_list self.val_conv = self.fh.val_conv self.output_file = output_file self.df_new = None
def test_complex_array(self): compiler = CompilationEngine(COMPLEX_ARRAY) root_node = compiler.compile() generator = VMGenerator() code = generator.process(root_node) correct_code = FileHandler(ROOT_DIR + '/output/ComplexArrays/Main.vm').fileContent self.assertEqual(correct_code, code)
def test_average(self): compiler = CompilationEngine(AVERAGE) root_node = compiler.compile() generator = VMGenerator() code = generator.process(root_node) correct_code = FileHandler(ROOT_DIR + '/output/Average/Main.vm').fileContent self.assertEqual(correct_code, code)
def test_seven(self): compiler = CompilationEngine(FILE_SEVEN) root_node = compiler.compile() generator = VMGenerator() code = generator.process(root_node) correct_code = FileHandler(ROOT_DIR + '/output/Seven/Main.vm').fileContent self.assertEqual(correct_code, code)
class Vehicle: vehicles_db = FileHandler() vehicles = vehicles_db.load_from_csv('csv_files/Vehicles.csv') def __init__(self, path_to_vehicles_db): self.path_to_vehicles_db = path_to_vehicles_db def update_vehicle_with_id(self, id, **kwargs): return self.vehicles_db.update_csv(self.path_to_vehicles_db, id, kwargs) def get_car_by_features(self, and_or='and', **kwargs): return self.vehicles_db.get_rows_matching_search_criteria(self.path_to_vehicles_db, kwargs, and_or) def get_time_to_test(self, id): car_matching_id = [] for car in self.vehicles: if car['id'] == id: car_matching_id = car if len(car_matching_id) == 0: return False else: previous_test_date = car_matching_id['last_test'] previous_test_date = datetime.strptime(previous_test_date, "%d-%m-%Y") days_since_last_test = datetime.now() - previous_test_date days_until_next_test = 180 - days_since_last_test.days if days_until_next_test > 0: print("You have {} days until your next test.".format(days_until_next_test)) else: print("You are overdue for your test by {} days.".format(days_until_next_test * -1)) return days_until_next_test
def test_convert_to_bin(self): compiler = CompilationEngine(CONVERT_TO_BIN) root_node = compiler.compile() generator = VMGenerator() code = generator.process(root_node) correct_code = FileHandler(ROOT_DIR + '/output/ConvertToBin/Main.vm').fileContent self.assertEqual(correct_code, code)
class User(): user_db = FileHandler() def __init__(self, path_to_database): self.path_to_database = path_to_database self.users = self.user_db.load_from_csv(self.path_to_database) def user_auth(self, name, password): try: self.validate_argument_is_string(name, password) except Exception as e: print( "Error: The 'user_auth' function only accepts strings as arguments." ) exit() user_exists = False for row in self.users: username_from_db = "{} {}".format(row['first'], row['last']) if username_from_db.lower() == name.lower( ) and row['password'] == password: user_exists = row['role'] return user_exists def add_user(self, **kwargs): return self.user_db.append_to_csv(self.path_to_database, kwargs) def validate_argument_is_string(self, *args): for argument in args: if not isinstance(argument, str): raise Exception()
def main(): print("Starting session...") file_name = input( "Enter name of file to read and write to (press 'Enter' to use default — 'directory.txt': " ) queried_file = FileHandler(file_name) collection = Directory() queried_file.read_file(collection) print_directions() val = enter_command() while val.lower() != "quit": if val.lower() == "view directory": collection.print_directory() elif val.lower() == "entry count": collection.print_length() elif first_chars(5, val) == "view ": collection.print_entry(remaining_chars(5, val)) elif first_chars(5, val) == "edit ": name = remaining_chars(5, val) collection.edit_entry(name) elif first_chars(4, val) == "add ": name = remaining_chars(4, val) email = add_field("email") phone = add_field("phone") department = add_field("department") title = add_field("title") education = add_field("education") new_employee = Employee(name, email, phone, department, title, education) collection.add_employee(new_employee) elif first_chars(7, val) == "delete ": collection.remove_employee(remaining_chars(7, val)) elif val == "help": print_directions() else: print("Invalid command entered") val = enter_command() write_ref = FileHandler(file_name) write_ref.write_file(collection) print("Session ended")
def load_partial(self): input_file = str(input("Ingrese la ruta del dump de entrada: ")) output_file = str(input("Ingrese la ruta del archivo de salida: ")) file_handler = FileHandler(input_file, output_file) results = {} try: file_handler.load_boards_file_dump() for key in range(file_handler.boards_count): board = Board(file_handler.get_board(key)) solver = Solver(board, callback=self.show_solution_callback) solver.solve() results.setdefault(key, solver.solutions) except FileNotFoundError: print("No se encontró el archivo especificado") self.display_menu() except KeyboardInterrupt: dump_name = datetime.datetime.now().strftime( "%Y-%b-%d_%H-%M-%S") + ".save" self.save_partial(file_handler, results, dump_name) print("Se ha interrumpido la ejecución, dump guardado en %s" % dump_name) exit(0) file_handler.write_results_to_file(results) print("Ha finalizado la resolución! encontrará en %s los resultados" % output_file)
def asr(): res = [] for f in request.files: if f.startswith('audio_blob') and FileHandler.check_format(request.files[f]): response_code, filename, response = FileHandler.get_recognized_text(request.files[f]) if response_code == 0: response_audio_url = url_for('media_file', filename=filename) else: response_audio_url = None res.append({ 'response_audio_url': response_audio_url, 'response_code': response_code, 'response': response, }) return json.dumps({'r': res}, ensure_ascii=False)
def modules_connections(self): LineMain(self.pad) Scrollbar(self.pad) StationeryFunctions(self.pad) Popup(self.pad) FindReplaceFunctions(self.pad) FileHandler(self.pad) FontChooser(self.pad) return
def __init__(self): with FileHandler('BooksRecord.txt', 'r') as oldFile: for line in oldFile: print("Updating details for bool: {0}".format( self.parse_title(line))) with FileHandler('UpdatedBooksRecord.txt', 'a') as newFile: isbn: str = self.validate_length( "Please enter isbn number: ", "ISBN should consist of 11 digit", 11) date_published: str = self.validate_length( "Please enter the date published: ", "Date published " "should has 8 " "digit!", 8) num_copies_str: int = self.valid_range( "Please enter number of copies: ", "Number of copies shoud " "be larger than one", 0) newFile.write("{0}{1}#{2}#{3}#".format( line, isbn, date_published, num_copies_str))
def add_new_scores(): comp = lambda x, upper, lower: x < lower or x > upper score_date: str = input("Input the date for the scores") with FileHandler("ScoreDetails.txt", 'a') as file: while (member_ship_number := input("Input the Membership number")) != "": while comp(score := int(input("Input the score")), 99, 50): print("Input a valid score from 50 to 99") file.write("{0}{1}{2}".format(member_ship_number, score_date, score))
def test_pong(self): file_names = os.listdir(PONG_DIR) for filename in file_names: if filename[-5:] == '.jack': compiler = CompilationEngine(PONG_DIR + '/' + filename) root_node = compiler.compile() generator = VMGenerator() code = generator.process(root_node) correct_code = FileHandler(''.join([ROOT_DIR, '/output/Pong/', filename[:-5], '.vm'])).fileContent self.assertEqual(correct_code, code)
def __init__(self, data, priors): # TODO: autoriser l'utilisateur à ne pas faire de Gelman-Rubin # TODO: Revoir les méthodes Temporary et showCorner de AISampler # TODO: revoir la méthode showPDF # TODO: dans le notebook, avant de faire tourner le MCMC, on doit présenter les méthodes liés à la visualisation des data, ... # TODO: créer une méthode qui ajoute ou retire un point d'observation dans les data. # TODO: les highly probable initial guess devrait se trouver dans un fichier texte lui-même sauvegardé dans PyAstrOFit/rsc. """ The constructor of the class """ # ----------------------------- # Main part of the constructor # ----------------------------- ## Get the data and convert the time of observation in JD self._ob, self._er = get_planet_data(data) if self._ob is None and self._er is None: try: FileHandler.__init__(self,data,addContent=[]) (self._ob, self._er) = FileHandler.getInfo(self) except: raise Exception("Crash during the load of your data. Is the path correct ? --> {}".format(data)) if len(self._ob.values()[0]) == 0: raise Exception("The file '{}' seems to be empty.".format(data)) self._ob['timePositionJD'] = [Time(self._ob['timePosition'][k],format='iso',scale='utc').jd for k in range(len(self._ob['timePosition']))] self._l = len(self._ob['timePositionJD']) self._data = data self._ndim = 6 self._pKey = ['semiMajorAxis','eccentricity','inclinaison', 'longitudeAscendingNode', 'periastron', 'periastronTime'] ## Check what contains the attribut priors and initialize the initial state self._theta0 self._priors, self._theta0 = AISampler.priors_check(self,priors) ## List class attributs self._list = [s for s in dir(self) if s[0] == '_' and s[1] != '_' and s[1:12] != 'FileHandler' and s[1:4] != 'get']
def WOC_load_test(graph_location, log_location, superiority_tolerance=0.8): # Read in test data graph = Graph(FileHandler.read_graph(graph_location)) # calculate edges graph.build_graph() test_crowd_solution = CrowdSolution(graph) test_crowd_solution.load(log_location) test_crowd_solution.display() test_crowd_solution.generate_heat_map(superiority_tolerance=superiority_tolerance)
def WOC_load_and_complete_test(graph_location, log_location, superiority_tolerance=0.8): # Read in test data graph = Graph(FileHandler.read_graph(graph_location)) # calculate edges graph.build_graph() test_crowd_solution = CrowdSolution(graph) test_crowd_solution.load(log_location) test_crowd_solution.generate_heat_map(superiority_tolerance=superiority_tolerance) test_crowd_solution.complete_graph_greedy_heuristic(superiority_tolerance=superiority_tolerance) test_crowd_solution.route.plot()
def createTable(cls, model): # write a model to the file storage = FileHandler("data") page = [] # TODO warp in try-except page = storage.read_page().next() tablePage = len(page)/2 + 1 # 2 is a numer of fields in Table entity page.append(model.__name__) page.append(tablePage) storage.write_page(page) storage.write_page(["null", ], tablePage, True)
def __init__(self): # Initialize the file self.__file = FileHandler() self.options = self.__file.getOptions() # Get the last player playerName = self.__getLastPlayer() self.loadPlayer(playerName) # Set the Game menu self.__status = self.IN_MENU self.resetTimer() # Points self.__resetModCounter() self.__resetPointModifier() # Deck self.__initializeDeck() self.__recorder = Recorder(self.__player.get('options.undo'))
def __init__(self): self.contacts = [] ##holds contact objects self.changed = False self.sortMethod = ("name", False) self.FileHandler = FileHandler()
class Game: PAUSED = 0 IN_GAME = 1 FINISHED = 2 IN_MENU = 3 QUIT = 4 def __init__(self): # Initialize the file self.__file = FileHandler() self.options = self.__file.getOptions() # Get the last player playerName = self.__getLastPlayer() self.loadPlayer(playerName) # Set the Game menu self.__status = self.IN_MENU self.resetTimer() # Points self.__resetModCounter() self.__resetPointModifier() # Deck self.__initializeDeck() self.__recorder = Recorder(self.__player.get('options.undo')) ## # Builds the deck and temporary deck piles ## def __initializeDeck(self): # Initialize discard columns self.__discardPiles = [ Deck(), Deck(), Deck(), Deck() ] deck = Deck() deck.buildFullDeck() deck.shuffle() self.__deck = deck def undo(self): if self.__recorder.canUndo(): state = self.__recorder.undo() if state is not None: self.__applyState(state) def __applyState(self, state): self.__player.set('score', state['score']) self.__deck = state['deck'] self.__discardPiles = state['discardPiles'] def __saveState(self): state = { 'score': self.__player.get('score'), 'deck': deepcopy(self.__deck), 'discardPiles': deepcopy(self.__discardPiles) } self.__recorder.pushState(state) ## # Sets the current game status ## def __setGameStatus(self, status): self.__status = status ## # Returns the last recorded player or the default ## def __getLastPlayer(self): name = self.__file.getLatestPlayerByName() if name is None: return Player.DEFAULT_PLAYER return name def getPlayer(self): return self.__player def getPlayerStats(self): stats = self.__player.getAllStats(options=False) # Pretty print time stats['time'] = time.strftime("%M:%S", time.gmtime(int(stats['time']))) table = [] for stat in stats: table.append([ stat, stats[stat] ]) return table ## # Sets the current game status to running ## def startGame(self): self.__setGameStatus(Game.IN_GAME) self.__initializeDeck() self.deal() # First deal of the game, reset the state self.__recorder.toggleUndos(self.__player.get('options.undo')) self.__recorder.reset() self.__resetModCounter() self.__resetPointModifier() self.resetTimer() self.__startTimer() def isInGame(self): return self.getGameStatus() == Game.IN_GAME def isPaued(self): return self.getGameStatus() == Game.PAUSED def isFinished(self): return self.getGameStatus() == Game.FINISHED def isInMenu(self): return self.getGameStatus() == Game.IN_MENU def quit(self): return self.getGameStatus() == Game.QUIT ## # Sets the current game status to paused ## def pauseGame(self): self.__player.addTo('time', self.__pauseTimer()) self.__setGameStatus(Game.PAUSED) ## # Sets the current game status to finished ## def finishGame(self): self.__setGameStatus(Game.FINISHED) if self.didPlayerWin(): self.__player.addTo('gamesWon', 1) else: self.__player.addTo('gamesLost', 1) self.__player.addTo('time', self.__pauseTimer()) self.savePlayer(self.__player) def quitGame(self): self.__setGameStatus(Game.QUIT) self.savePlayer(self.__player) ## # Saves the active Player to file ## def savePlayer(self, player): self.__file.savePlayer(player) ## # Loads a Player from file ## def loadPlayer(self, playerName): playerInfo = self.__file.loadPlayerByName(playerName) if playerInfo is not None: self.__player = Player(playerInfo) self.__file.saveLatestPlayer(playerName) else: self.__player = Player(playerName) self.__file.savePlayer(self.__player) ## # Returns the 10 top scores from file ## def getTopScores(self): scores = self.__file.getScores() scores.sort(reverse=True) return scores[0, 10] ## # Returns the 10 top times from file ## def getTopTimes(self): times = self.__file.getTimes() times.sort(reverse=True) return times[0, 10] ## # Returns all Players from file ## def getPlayerNames(self): return self.__file.listAllPlayers() ## # Returns the current game status ## def getGameStatus(self): return self.__status ## # Returns the current facing playable Cards ## def getCurrentFacingCards(self): cards = [] for deck in self.__discardPiles: cards.append(deck.getCardAt(-1)) return cards def printCards(self): printableDecks = [] for deck in self.__discardPiles: printableDecks.append(deck.getDeckPrint()) output = '' found = True index = 0 while found: found = False row = '' for deck in printableDecks: row += ' ' try: card = deck[index] except IndexError: card = None if card is not None: found = True row += str(card) else: row += ' ' * 7 index += 1 output += '\n' + row return output ## # Deals four Cards from the deck ## def deal(self): if self.__deck.cardsRemaining() > 0: self.__saveState() for el in range(len(self.__discardPiles)): card = self.__deck.popCard() self.__discardPiles[el].pushCard(card) self.__resetModCounter() self.__resetPointModifier() return self.getCurrentFacingCards() # no more cards left, game over self.finishGame() return None ## # Removes a Card from its pile ## def rmCard(self, index): if self.__canRemoveCardFromPile(index): self.__saveState() self.incrementModCounter() self.__discardPiles[index].popCard() self.addRemovedCardPoints() def mvCard(self, index): if index in range(0, len(self.__discardPiles)): cardInQuestion = self.__discardPiles[index].getFacingCard() if cardInQuestion is not None: for pile in self.__discardPiles: if pile.getFacingCard() is None: self.__saveState() self.__discardPiles[index].popCard() pile.pushCard(cardInQuestion) return True print(str(cardInQuestion) + ' cannot be moved') print('Invalid column selected') def __canRemoveCardFromPile(self, index): if index in range(0, len(self.__discardPiles)): cardInQuestion = self.__discardPiles[index].getFacingCard() print('card in question: ' + str(cardInQuestion)) if cardInQuestion is not None: for pile in self.__discardPiles: if pile.dominatesCard(cardInQuestion): return True print(str(cardInQuestion) + ' cannot be removed') print('Invalid column selected') return False ## # Adds points to the Player's score multiplied by the mod counter ## def addRemovedCardPoints(self): if self.__modCounter == 2: self.__resetModCounter() self.__modifier += 1 updated = self.__player.get('score') + (10 * self.getModifier()) self.__player.set('score', updated) ## # Adds one to the mod counter ## def incrementModCounter(self): self.__modCounter += 1 ## # Resets the mod counter back to default (0) ## def __resetModCounter(self): self.__modCounter = 0 ## # Resets the modifier back to default (1) ## def __resetPointModifier(self): self.__modifier = 1 ## # Increases the point modifier by 1 ## def increasePointModifier(self): self.__modifier += 1 def getModifier(self): return self.__modifier ## # Adds a static number of points when winning the game ## def addGameWonPoints(self): self.__player.set('score', 100) def getCurrentTime(self): return time.strftime("%M:%S", time.gmtime(int(time.time() - self.__startTime))) def resetTimer(self): self.__startTime = 0 self.__pausedTime = 0 def __startTimer(self): diff = 0 if self.__pausedTime > 0: diff = int(self.__pausedTime - self.__startTime) self.__pausedTime = 0 self.__startTime = time.time() - diff def __pauseTimer(self): self.__pausedTime = time.time() return int(self.__pausedTime - self.__startTime) def didPlayerWin(self): for deck in self.__discardPiles: if deck.cardsRemaining() != 1 or deck.getFacingCard().getPosition() != 1: return False return True
print "Any Mapping? ", len(mapping) > 0 assert not len(mapping) > 0 d = self.diff(residues[i], residues[j], mapping) print i, j for key in d.keys(): print key, d[key], "Within Tolerance? ", abs(d[key]) < tol assert abs(d[key]) < tol print residues[i] print residues[j] if __name__ == '__main__': from FileHandler import FileHandler from Linking import Linking import pylab as pl fh = FileHandler() statsfile = 'shiftPresets/bmrb.shift' folder = "Datasets/Ubiquitin/" fn = folder + "residue_lists/Ub_opt_relabeled.pasta" seqfile = folder + "sequences/Ub.fasta" result_folder = folder + "Results/" tol = .6 strat = "Joint" residues = fh.read_pasta(fn, statsfile) link = Linking(residues) L = link.linking_matrix(strat, tolerance=tol, conservative=True) L2 = link.linking_matrix(strat, tolerance=tol, conservative=False) pl.matshow(L) pl.matshow(L2)
from random import choice n = float(get_no_of_carbon_shifts(residues)) m = float(get_no_of_ambiguous_keys(residues)) while m/n < percentage: print m/n previous = choice([True,False]) i = get_indices(residues, previous) res= get_rnd_residue(residues,i) swap_rnd_atom(res,previous) i = get_indices(residues, previous) m = float(get_no_of_ambiguous_keys(residues)) return residues if __name__ == '__main__': fh = FileHandler() folder = "Datasets/Ubiquitin/" pastafn = "residue_lists/Ub_bmrb_unassigned.pasta" pastafn2 = "residue_lists/Ub_opt_unambiguous_unassigned.pasta" presetfn = 'shiftPresets/bmrb.shift' seqfn = folder + "sequences/Ub_bmrb.fasta" result_folder = folder + "Results/" residues = fh.read_pasta(folder + pastafn, presetfn) residues2 = fh.read_pasta(folder + pastafn2, presetfn) ## ADD NOISE # noise = linspace(0,3,31) # for n in noise: # print residues[0].shifts_i.values() # new_res = add_noise(deepcopy(residues), n)
def createTable(self, model): # write a model to the file storage = FileHandler("data") page = storage.read_page().next() page.append(model.__name__) storage.write_page(page)
def RunTweakerFromProgram(conf): # Get the command line arguments. Run in IDE for demo tweaking. stime = time() # try: # args = getargs() # if args is None: # sys.exit() # except: # raise try: FileHandler_local = FileHandler() print(conf.inputfile) objs = FileHandler_local.load_mesh(conf.inputfile) if objs is None: sys.exit() except(KeyboardInterrupt, SystemExit): raise SystemExit("Error, loading mesh from file failed!") # Start of tweaking. if conf.verbose: print("Calculating the optimal orientation:\n {}" .format(conf.inputfile.split(os.sep)[-1])) c = 0 info = dict() for part, content in objs.items(): mesh = content["mesh"] info[part] = dict() if conf.convert: info[part]["matrix"] = [[1, 0, 0], [0, 1, 0], [0, 0, 1]] else: try: cstime = time() x = Tweak(mesh, conf.extended_mode, conf.verbose, conf.show_progress, conf.favside, conf.volume) info[part]["matrix"] = x.matrix info[part]["tweaker_stats"] = x except (KeyboardInterrupt, SystemExit): raise SystemExit("\nError, tweaking process failed!") # List tweaking results if conf.result or conf.verbose: print("Result-stats:") print(" Tweaked Z-axis: \t{}".format(x.alignment)) print(" Axis, angle: \t{}".format(x.euler_parameter)) print(""" Rotation matrix: {:2f}\t{:2f}\t{:2f} {:2f}\t{:2f}\t{:2f} {:2f}\t{:2f}\t{:2f}""".format(x.matrix[0][0], x.matrix[0][1], x.matrix[0][2], x.matrix[1][0], x.matrix[1][1], x.matrix[1][2], x.matrix[2][0], x.matrix[2][1], x.matrix[2][2])) print(" Unprintability: \t{}".format(x.unprintability)) print("Found result: \t{:2f} s\n".format(time()-cstime)) print(conf.result) if not conf.result: print('writing output file') FileHandler_local.write_mesh(objs, info, conf.outputfile, conf.output_type) # Success message if conf.verbose: print("Tweaking took: \t{:2f} s".format(time()-stime)) print("Successfully Rotated!")
# non_ambiguous_keys.append(key) if previous: mapping.append((key, self.rules[key[:-3]][0])) else: mapping.append((key, self.rules[key][0])) return mapping if __name__ == "__main__": import pylab as pl from numpy import max, mean, take, zeros from Residue import PastaResidue, AminoAcid from MaxLikelihood import Likelihood from FileHandler import FileHandler fh = FileHandler() fn = "/is/ei/jhooge/EclipseWorkspaces/PASTA/PyPASTA/GMM/src/"\ "Classification/tests/reference_lists/bmrb.shift" # pastalist = 'tests/residue_lists/all_singles.pasta' pastalist = "Datasets/Ubiquitin/residue_lists/"\ "incompleteData/Ub_bmrb_missing_shifts_0.50.pasta" statsfile = fn amino_acids = fh.read_preset(fn) residues = fh.read_pasta(pastalist, statsfile) toAA = AbstractMapping.create("on_amino_acid") toRes = AbstractMapping.create("on_residue") aas = amino_acids del aas[1]
''' Created on Jun 10, 2012 @author: jhooge ''' if __name__ == '__main__': from FileHandler import FileHandler from Mapping import Mapping from Mapping2 import AbstractMapping from Definitions import three2One from MaxLikelihood import Likelihood from numpy import array,mean, max import pylab as pl fh = FileHandler() pairs = 'tests/residue_lists/all_pairs.pasta' singles = 'tests/residue_lists/all_singles.pasta' seqfile_pairs = 'tests/sequences/all_pairs.fasta' seqfile_singles = 'tests/sequences/all_singles.fasta' statsfile = 'tests/reference_lists/bmrb.shift' single_residues = fh.read_pasta(singles, statsfile) amino_acids = fh.read_seq(seqfile_singles, statsfile) atoms = {'CO' :0, 'CA' :1, 'CB' :2, 'CG' :3, 'CG1':4, 'CG2':5, 'CD' :6, 'CD1':7, 'CD2':8, 'CE' :9, 'CE1':10, 'CE2':11, 'CE3':12, 'CZ':13, 'CZ2':14, 'CZ3':15, "CH2":16}
else: i = 0 for r in residue_list: shifts_i, keys_i = m.get_carbons(r, previous=False) if len(shifts_i) == 0: x[i] = ones_like(x[i]) i += 1 return x if __name__ == '__main__': from FileHandler import FileHandler from numpy import argmax, mean, max import pylab as pl fh = FileHandler() L = Likelihood() # pastafile = 'multiple_test_files/Ubiquitin/Ub_new.pasta' pastafile = 'tests/residue_lists/all_singles.pasta' statsfile = 'shiftPresets/bmrb.shift' residues = fh.read_pasta(pastafile, statsfile) amino_acids = fh.read_preset(statsfile) print ' '.join([aa.three_let for aa in amino_acids]) # r = residues[1] # print r.name # p11 = L.calc_likelihoods(residues, amino_acids, previous=False, summarize=mean) # p12 = L.calc_likelihoods(residues, amino_acids, previous=True, summarize=mean) p21 = L.calc_likelihoods(residues, amino_acids, previous=False, summarize=max) p22 = L.calc_likelihoods(residues, amino_acids, previous=True, summarize=max)
res.shifts_i['HN'] = centroids[id][1] def get_negative_intensities(self, systems): idx = [] for i, system in enumerate(systems): if (system < 0).any(): idx.append(i) return idx if __name__ == '__main__': from FileHandler import FileHandler # from Visualization.Grouping import plot_2d_clustering from Pasta import Pasta from numpy import take group = Grouping() fh = FileHandler() # data4d = Load('/tmp/ubq4d.pkl') # data2d = Load('/tmp/ubq2d.pkl') identifiers, data2d = fh.pasta_from_2D_Nhsqc('/is/ei/jhooge/EclipseWorkspaces/PASTA/PyPASTA/GMM/src/Classification/multiple_test_files/Ubiquitin/Nhsqc-Ub.list') identifiers, data4d = fh.pasta_from_4D_Nhsqc('/is/ei/jhooge/EclipseWorkspaces/PASTA/PyPASTA/GMM/src/Classification/multiple_test_files/Ubiquitin/CCCANH-4D-ref.list') # x = data2d # y = take(data4d,(2,3),1) # # tol = (0.3, 0.03) # n = 1 # # labels, centers = group.assign_amides(x, y, tol, n=n)
def main(): if len(sys.argv) < 3: sys.stderr.write('Usage: python paFloorplanner.py <confu.dat> <power.txt> <const.txt>\n') sys.exit(-1) fh = FileHandler(sys.argv[1], sys.argv[2], sys.argv[3]) #Data structures to hold the input information rrCount = fh.getRRCount() rrList = fh.getRRList() thermCond = fh.getThermCond() aSect = fh.getASect() sliceHeight = fh.getSliceHeight() sliceWidth = fh.getSliceWidth() airTemp = fh.getAirTemp() airResistance = fh.getAirResistance() rrManager = RRManager(thermCond, aSect, sliceHeight, sliceWidth, airTemp, airResistance, fh) powerDict = fh.getPowerDict() for rrNum in xrange(rrCount): #what values to give as cx and cy? rr = ReconfigurableRegion("rec" + `rrNum + 1`, 0, 0, powerDict[rrNum], 1000*random(), rrManager) rrManager.addRR(rr) #print rrManager.getSequence1() fh.updateDat(rrManager.sequencePair, rrManager.distanceVector) saTemperature = 6161980393991160000 saCoolingRate = 0.003 currentSolutionCost = 1000000000000.0 sequencePair = SequencePair(list(), list()) distanceVector = [[0 for x in xrange(rrCount)] for x in xrange(rrCount)] goodSolutions = [] fh.incrementalFloorplan(rrManager) #while 0: while not rrManager.isUniformityReached() and saTemperature > 1: ''' choice = randint(1, 2) sequencePair = rrManager.makeSwapMove() #pass this sequence pair to the milp if choice == 1: sequencePair = rrManager.makeSwapMove() #pass this sequence pair to the milp else: distanceVector = rrManager.makeDistanceVectorMove() ''' sequencePair = rrManager.makeSwapMove() #pass this sequence pair to the milp objVal = rrManager.applyMILP(sequencePair, distanceVector) if objVal == 817609: print "soluzione scartata due to infeasibility" continue rrManager.calculateTemperatures() newSolutionCost = rrManager.getSolutionCost() #if it has a better cost, save it in the good solutions array to not lose it if currentSolutionCost - newSolutionCost > 0: goodSolutions.append(Solution(sequencePair, distanceVector, newSolutionCost)) if acceptanceProbability(currentSolutionCost, newSolutionCost, saTemperature): print("soluzione accettata con "+"Current: "+str(currentSolutionCost)+" New: "+str(newSolutionCost) + " Tmax: "+str(rrManager.getTmax())+" MILP: "+str(rrManager.getMILPObj())) rrManager.updateSequencePair(sequencePair) #rrManager.updateDistanceVector(distanceVector) currentSolutionCost = newSolutionCost #rrManager.drawOnBrowser("soluzione accettata") rrManager.writeMatlabRegionsFile() else: print("soluzione scartata con "+"Current: "+str(currentSolutionCost)+" New: "+str(newSolutionCost) + " Tmax: "+str(rrManager.getTmax())+" MILP: "+str(rrManager.getMILPObj())) saTemperature *= 1 - saCoolingRate print("saTemperature: " +str(saTemperature)) print("SA finito!") if saTemperature < 1: print "Searching good Solutions for the best one..." bestSolution = goodSolutions[0] minCost = goodSolutions[0].cost for solution in goodSolutions: if solution.cost < minCost: minCost = solution.cost bestSolution = solution bestSequencePair = bestSolution.sequencePair rrManager.applyMILP(bestSequencePair, distanceVector) rrManager.calculateTemperatures() rrManager.writeMatlabRegionsFile() print " Tmax: "+str(rrManager.getTmax())+" MILP: "+str(rrManager.getMILPObj())