def findEntries(): i = 1 while i < numberOfEvents - 1: currentX = originDataFrame.loc[i, 'x_coord_ft'] previousX = originDataFrame.loc[i - 1, 'x_coord_ft'] #check if in neutral zone if (75 <= currentX) and (currentX <= 125): #check if drop pass, dump and chase, or no entry attempt #check for drop pass if previousX - currentX >= dropPassThreshold: newEntry = entry.Entry() newEntry.add_style("drop") #call function to fill in rest of entry, pass newEntry and index i = mapEntry(newEntry, i) #insert function here listOfEntries.append(newEntry) continue #check for a dump and chase entry elif currentX - previousX >= dumpThreshold and previousX <= 150 and previousX >= 125: newEntry = entry.Entry() newEntry.add_style("dump") #call function to fill in rest of entry, pass newEntry and index i = mapEntry(newEntry, i - 1) listOfEntries.append(newEntry) continue i = i + 1
def testUniqueName(self): """Test Entry.GetUniqueName""" Node = collections.namedtuple('Node', ['name', 'parent']) base_node = Node('root', None) base_entry = entry.Entry(None, None, base_node) self.assertEqual('root', base_entry.GetUniqueName()) sub_node = Node('subnode', base_node) sub_entry = entry.Entry(None, None, sub_node) self.assertEqual('root.subnode', sub_entry.GetUniqueName())
def test_that_backlog_raises_an_overflow_exception_after_inserting_more_than_its_limit(self): '''Tests if inserting into a backlog after it reaches a certain limit raises an exception''' for _ in range(current_backlog.Current_Backlog._ITEMS_LIMIT): self._bklog1.insert_item(entry.Entry("", "", "", 0.69, 1985, 1, "")) with self.assertRaises(media_sublist.MSOverflowException): self._bklog1.insert_item(entry.Entry("", "", "", 0.69, 1985, 1, "")) self.assertEqual(len(self._bklog1.get_items()), current_backlog.Current_Backlog._ITEMS_LIMIT)
def __init__(self, tlb_type, entry_num): self.stats = stats.Statistics() self.tlb_type = tlb_type if tlb_type == Structure.FULLY_ASSOCIATIVE: # a fully associative cache has multiple blocks in one set self.set_num = entry_num # We do not need an index for a fully-associative tlb/cache self.set_index = 1 self.content = [tlb_entry.Entry()] * entry_num elif tlb_type == Structure.DIRECT_MAP: raise NotImplementedError( "The direct map tlb is not implemented yet\n") self.set_num = 1 # the index = # the total number of entries self.set_index = entry_num self.init_content_set() elif tlb_type == Structure.SET2_ASSOCIATIVE: self.set_num = 2 # the index = # the total number of entries / N (number of sets) self.set_index = int(entry_num / 2) self.init_content_set() elif tlb_type == Structure.SET4_ASSOCIATIVE: self.set_num = 4 self.set_index = int(entry_num / 4) self.init_content_set() else: raise NotImplementedError("unimplemented tlb type")
def edit_task(self): """Edits current task in list""" old_task = self.tasks[self.index] self.tasks[self.index] = entry.Entry().enter_all(old_task) new_task = self.tasks[self.index] self.index = 0 return old_task, new_task
def delete(entry_id, force=False, quiet=False): """ Main function for deleting wallabag entries. """ conf.load() if not force: try: request = api.api_get_entry(entry_id) __handle_request_error(request) entr = entry.Entry(json.loads(request.response)) print("Do you really wish to delete the following entry?") i = input(entr.title + " [y/N] ") if str.lower(i) not in ["y", "yes"]: exit(0) except api.OAuthException as ex: print("Error: {0}".format(ex.text)) print() exit(-1) try: request = api.api_delete_entry(entry_id) __handle_request_error(request) if not quiet: print("Entry successfully deleted.") print() exit(0) except api.OAuthException as ex: print("Error: {0}".format(ex.text)) print() exit(-1)
def __init__(self, table, db, input_html_path=None, output_html_path=None, html_template_path=None): """ Args: file_path: (str) The full path to the file which the WTFileIO object is work with. """ self.status = "" self.result = "None" self.input_html_path = input_html_path self.output_html_path = output_html_path self.html_template_path = html_template_path self.table = table self.db = db self.db_name = os.path.split(self.db)[1].replace("-", " ").replace( "_", " ") self.content = None self.entry_list = list() self.e = entry.Entry(0, "", "", "", "") self.fw_to = flwk.FileWork() self.fw_from = flwk.FileWork()
def validate_entry(self, filename): if os.path.getsize(filename) == 1024: self.filetype = 'entry' else: self.filetype = 'partition' if self.filetype == 'partition': p = entry.Partition(filename) while True: try: w = p.walk() self.add_entry(w.__next__()) except StopIteration: break else: with open(filename, 'rb') as data: d = data.read(1024) e = entry.Entry(d) try: e.validate() except ValidationError: showwarning( "Invalid Mft entry", "This file is not a valid MFT entry. Its signature value is %s" % e.signature.raw) else: self.add_entry(e)
def setUp(self): numbers_list = list() table = "entries" db = "../fixtures/test_journal" sg = sqlitemgr.SQLiteMgr(db) conn = sg.make_conn() self.sm = sqliteminor.SQLiteMinor(conn, table) test_entry = entry.Entry(16, "<h2 class='title'>A test Title</h2><p>This is a test entry. No Godlore to speak of.</p>",\ "test godlore", datetime.date(2017, 6, 25) , table, db) self.number_added_comp = 1 self.entry_numbers_comp = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ] self.added_entry_matter_comp = [( 16, "<h2 class='title'>A test Title</h2><p>This is a test entry. No Godlore to speak of.</p>", '2017-06-25', 'test godlore')] self.ce = rdbrecord.CreateEntry(conn, table, db) self.number_added = self.ce.create(test_entry.entry_number, test_entry.entry_text, test_entry.entry_keywords, test_entry.entry_date) self.create_result = self.ce.result self.entry_numbers = [i[0] for i in self.sm.read_all("entry_number")] self.added_entry_matter = self.sm.read_search_rows("entry_number", 16) self.sm.delete("entry_number", 16)
def show(entry_id, colors=True, raw=False, html=False): """ Main function for showing an entry. """ conf.load() try: request = api.api_get_entry(entry_id) __handle_request_error(request) entr = entry.Entry(json.loads(request.response)) except api.OAuthException as ex: print("Error: {0}".format(ex.text)) print() exit(-1) title = entr.title try: delimiter = "".ljust(os.get_terminal_size().columns, '=') # piped output to file or other process except OSError: delimiter = "\n" article = entr.content if not html: article = html2text(article, colors) output = "{0}\n{1}\n{2}".format(title, delimiter, article) if not raw: output = __format_text(output) print(output)
def kariz_cache_file(self, fname, size, score=0): evicted = [] if fname in self.status: e = self.status[fname] else: e = entry.Entry(fname) e.parent_id = self.id if e.size > size: return e, evicted, status.SUCCESS if self.unpinned_space < ( size - e.size): # cannot cache the file /w size on this worker return None, evicted, status.NO_SPACE_LEFT if self.free_space < (size - e.size): evicted_size, evicted, estatus = self.kariz_free((size - e.size), score) if estatus != status.SUCCESS: return None, evicted, estatus oldsize = e.size e.size = size self.addto_used_space(size - oldsize) self.status[fname] = e # self.pin_file(fname, size - oldsize) return e, evicted, status.SUCCESS
def start(): q = do_run.DoRun.all() result = q.get() if not result: # Creates a datastore entry to prevent readding hardcoded blocks hasRun = do_run.DoRun(runTrue=True) hasRun.put() # Opens the reg_blocks.yaml file and saves the contents to doc with open('backend/reg_blocks.yaml', 'r') as f: doc = yaml.load(f) # For every entry in the 'treeroot' directory of the reg_blocks.yaml file, do this: for x in doc['treeroot']: # Take the variables 'shour' and 'smin' and create a formated time stored in sTime sTime = datetime.time(doc['treeroot'][x]['shour'], doc['treeroot'][x]['smin']) # Take the variables 'ehour' and 'emin' and create a formated time stored in eTime eTime = datetime.time(doc['treeroot'][x]['ehour'], doc['treeroot'][x]['emin']) # Store the instance of Entry with the inputed data into entries entries = entry.Entry(key_name=doc['treeroot'][x]['key_name'], name=doc['treeroot'][x]['name'], sTime=sTime, eTime=eTime, day=doc['treeroot'][x]['day']) # Store the data that entries contains into the datastore entries.put()
def mrd_cache_file(self, fname, size, distance): evicted = {} if fname in self.status: e = self.status[fname] else: e = entry.Entry(fname) e.parent_id = self.id if self.unpinned_space < size: # cannot cache the file /w size on this worker return 0, None, status.NO_SPACE_LEFT elif self.free_space < size: evicted_size, evicted, estatus = self.mrd_free( size - e.size, distance) if evicted_size < size: return None, None, estatus #update sizes here before update the cache e.size = size e.mrd_distance = distance e.touch() self.status[fname] = e self.used_space += e.size self.free_space = self.size - self.used_space self.mrd_table_bydistance.append(e) self.mrd_update_rf_table() return e, evicted, status.SUCCESS
def new_entry(self, input_path): """ Create a new entry in the configuration. This will add the given path as a new input as it creates a new Entry object and adds it to the list. :param input_path: The path to a folder or file to backup. """ if self.num_entries() < MAX_ENTRIES: self._entries.append(entry.Entry(input_path))
def test_collect_new_entry(self): work_log.collect_new_entry(entry) self.assertEqual( entry, entry.Entry(name="sachin", title="cricket", time_spent=230, notes="most number of 100s"))
def load(self, journal): with open(journal) as handle: for line in csv.reader(handle): line += [''] if entry.is_header(line[0]): column_map = entry.column_map(line) continue if entry.is_id(line[0]): self.entry_.append(entry.Entry(line, column_map))
def test_if_a_current_backlog_can_initialize_with_an_overly_long_list_of_entries_and_remain_under_its_limit(self): '''Tests if Current_Backlog initialization with an items argument of a length over its limit will be truncated upon creation of the Current_Backlog's items member''' test = [] for _ in range(current_backlog.Current_Backlog._ITEMS_LIMIT + 20): test.append(entry.Entry("", "", "", 0.69, 1985, 1, "")) bklog = current_backlog.Current_Backlog(test) self.assertEqual(len(bklog.get_items()), current_backlog.Current_Backlog._ITEMS_LIMIT)
def claimEntry(ip): inputcmds = getArgs(ip) prooflist = re.findall("http[\S]+",ip) if len(prooflist) == 0: proof = "proofless" else: proof = prooflist[0] ret = entry.Entry('claim',inputcmds[0], inputcmds[1], None, proof, inputcmds[2]) return ret
def ledgerEntry(ip): inputcmds = getArgs(ip) prooflist = re.findall("http[\S]+",ip) if len(prooflist) == 0: proof = "proofless" else: proof = prooflist[0] ret = entry.Entry('ledger',inputcmds[0], 'dummy', float(inputcmds[1]), proof, inputcmds[2]) return ret
def init_content_set(self): self.content = list() # does not work since every pointer entry will be pointing to the same # list entry [tlb_entry.Entry()] * entry_num # self.content = [[tlb_entry.Entry()] * 2] * entry_num # tlb_type contain for _ in range(self.set_index): entry = [tlb_entry.Entry()] * self.set_num self.content.append(entry)
def get_entries(self): entries = [] with open(self.file_location, "r") as csv_in: entryin = csv.DictReader(csv_in, delimiter=',', lineterminator='\n') for row in entryin: target = entry.Entry(**dict(row)) entries.append(target) return entries
def _populate_entry_list_objects(self, rows): """ Compose the Entry objects and populate entry_list with them. """ if rows: # def __init__(self, entry_number, entry_text, entry_keywords, entry_date, entry_db): # members = (entry_number, entry_text, entry_keywords, entry_date) for row in rows: self.entry_list.append(entry.Entry(row[0], row[1], row[2], row[3], self.db))
def setUp(self): '''Sets up variables used for unit testing''' # TEST ENTRIES self._item1 = entry.Entry("SATURATION", "BROCKHAMPTON", "Hip-Hop", 9.99, 2017, 5, "best boy band in the world") wait_time() self._item2 = entry.Entry("A Fever You Can't Sweat Out", "Panic! at the Disco", "Pop Punk", 9.99, 2005, 4, "I miss the old p!atd") wait_time() self._item3 = entry.Entry("In the Aeroplane Over the Sea", "Neutral Milk Hotel", "Indie Rock", 9.99, 1998, 4, "WHAT A BEAUTIFUL PLACE I HAVE FOUND IN THIS PLACE") wait_time() self._item4 = entry.Entry("My Beautiful Dark Twisted Fantasy", "Kanye West", "Hip-Hop", 13.99, 2010, 5, "") wait_time() self._item5 = entry.Entry("Whatever People Say I am That's What I'm Not", "Arctic Monkeys", "Indie Rock", 7.99, 2006, 2, "the only good arctic monkeys album") self._items = [self._item1, self._item2, self._item3, self._item4, self._item5] # TEST BACKLOGS self._bklog1 = current_backlog.Current_Backlog() self._bklog2 = current_backlog.Current_Backlog(self._items[:5])
def setUp(self): '''Sets up the variables needed for testing''' # Entry LIST VARIABLES self._backlog1 = [] self._backlog2 = [ entry.Entry("", "", "", i, 0, 1, "") for i in range(5) ] self._completed = [ entry.Entry("", "", "", i, 0, 1, "") for i in range(5) ] for e in self._completed: e.set_completed() # Shelf VARIABLES self._shelf1 = shelf.Shelf("Empty") self._shelf2 = shelf.Shelf( "With-Items", { "3": ([], []), "1": (self._backlog2, []), "2": (self._backlog2, self._completed) })
def evict_set_lru(self, index, dirty, tag, lru): ''' TLB LRU replacement for fully associative TLB ''' min_iset = 0 # python 3 does not have the max integer min_lru = float("inf") for iset in range(self.set_num): if min_lru > self.content[index][iset].get_lru(): min_lru = self.content[index][iset].get_lru() min_iset = iset self.content[index][min_iset] = tlb_entry.Entry(True, dirty, tag, lru)\
def read_budget_dictionary(budget_dict): # Reads item dictionary dictionary = pd.read_excel( "C:\\Users\\f-eng\\OneDrive\\Documents\\Budget\\dictionary.xlsx", engine="openpyxl") # Moves entries from dictionary into a item dictionary. for row in range(len(dictionary)): key = str(dictionary.loc[row, "ITEM"]).lower().strip() new_entry = entry.Entry(key, dictionary.loc[row, "CAT"], dictionary.loc[row, "SUBCAT"], dictionary.loc[row, "BREAKDOWN"]) budget_dict[new_entry.get_item()] = new_entry
def test_if_marking_an_item_as_incomplete_when_the_backlog_is_at_capacity_raises_an_exception( self): '''Tests if marking an item as incomplete when the backlog is at capacity would fail, causing the Entry to be remarked as complete, reinserted back into the completed list''' for i in range(self._mlist2.get_backlog_limit() - 5): self._mlist2.backlog.insert_item( entry.Entry(f"{i}", f"{i}", f"{i}", i, i, 1, "")) with self.assertRaises(media_sublist.MSOverflowException): self._mlist2.mark_incomplete(0) self.assertEqual(len(self._mlist2.get_backlog()), self._mlist2.get_backlog_limit()) self.assertEqual(len(self._mlist2.get_completed()), self._mlist2.get_completed_limit())
def lookup_fa(self, dirty=False, tag=0, lru=0): for i in range(self.set_num): if self.content[i].get_valid(): if self.content[i].get_tag() == tag: self.content[i].update_lru(lru) if dirty: self.content[i].set_dirty(dirty) return True # hit continue self.content[i] = tlb_entry.Entry(True, dirty, tag, lru) return False # miss self.evict_fa_lru(dirty, tag, lru) return False # miss
def run_dispatch(input_fp, base_url, filters=[], disqus_shortname=None): # parse like a MIME document msg = email.message_from_file(input_fp) meta = dict(msg.items()) body_unfiltered = msg.get_payload() body = body_unfiltered.strip() if len(body) == 0: body = None else: body = apply_filters_for(filters, 'entry_body', body) e = entry.Entry(meta, body, body_unfiltered, base_url, disqus_shortname) html = apply_filters_for(filters, 'html_file', e.to_html_tree()) return (e, html)
def __init__(self): super(Window, self).__init__() uic.loadUi('material.ui', self) self.db = 'patient_try' self.identityTable = 'patient' self.visitTable = 'visits' self.dbu = DB_manager.DatabaseUtility(self.db) self.qu = Queue() self.worker = Worker(qu=self.qu, parent=self) self.entry_app = entry.Entry(self, self.dbu, self.identityTable, self.visitTable, self.qu) self.katar_app = katar.Katar(self, self.dbu, self.identityTable, self.visitTable, self.qu) self.addPushButton.clicked.connect(self.katar_app.update_queue) self.worker.start()