def test_save_related_results_missing_fields(self): user_data = self.data.test_data["user_missing_id"] org_data = self.data.test_data["org_all_fields"] ticket_data = self.data.test_data["ticket_missing_id"] # build expected strings user_id = None user_name = user_data["name"] user_info = f"Name: {user_name} | ID: {user_id}" org_id = org_data["_id"] org_name = org_data["name"] org_info = f"Name: {org_name} | ID: {org_id}" ticket_id = None ticket_subject = ticket_data["subject"] ticket_info = f"Subject: {ticket_subject} | ID: {ticket_id}" # create dummy entries user = Entry(user_data, "test", "_id", "users") org = Entry(org_data, "test", "_id", "orgs") ticket = Entry(ticket_data, "test", "_id", "tickets") expected = {"users": [user_info], "org": [org_info], "ticket": [ticket_info], "none": None} self.entry.related_results = { "users": [user], "org": [org], "ticket": [ticket], "none": None, } self.entry.save_related_results() self.assertEqual(self.entry.related_results, expected)
def test_update3(self): p = Plan("today") p.update("07:00 aufstehen\n08:00 essen") TestHelper.test_listByInstance(self,p.step_list,"1R") t = p.step_list[0] self.assertEqual(t.step_list[0],Entry("01:00","aufstehen")) self.assertEqual(t.step_list[1],Entry("00:00","essen"))
def load(self, path): for root, dirs, files in os.walk(path): for file in files: path = os.path.join(root, file) suffix = os.path.splitext(path)[1] # this belong in representation / entry # file of many entries .. if suffix == ".tsv": import csv with open(path) as f: reader = csv.DictReader(f, delimiter='\t') for d in reader: if len(d.keys()): entry = Entry() entry.primitive = d self._store.put(entry) else: # assumes one entry per-file. print("slurping %s" % path) text = open(path).read() entry = Entry() if suffix == ".yaml": entry.yaml = text elif suffix == ".json": entry.json = text self.put(entry)
def populate(files): if len(files) <= 0: raise ValueError("Invalid list") manager = OneServerManager() tcsRoot = Entry("/tcs", manager.CONTAINER_MIME, None, [], "TCS", "tcs", -1, None) idlna = DLNAInterface() dlna = manager.dlna for f in files: profile = idlna.dlna_guess_media_profile(dlna, f) manager.log.debug('Profile for %s: %s', f, str(profile)) if profile is None: raise ValueError("Invalid media type on {0}".format(f)) try: profile.contents except ValueError: OneServerManager().log.debug("Invalid profile object, skipping "+f) break size = os.path.getsize(f) child = Entry(f, profile, tcsRoot, None, f, "", size, createLocalFileHandle) tcsRoot.children.append(child) return tcsRoot
def set(self, key: str, value: str, abs_time=0, idle_time=0) -> None: entry = Entry() old_entry = None try: old_entry = self.table.find(key) except KeyError: pass if old_entry: entry = old_entry self.ll.remove_middle(entry) self.table.remove(key) else: entry = Entry(key, value, 0, 0) curr_time = int(time.time()) if abs_time > 0 or self.abs_time > 0: abs_value = abs_time if abs_time > 0 else self.abs_time entry.abs_time = curr_time + abs_value if idle_time > 0 or self.idle_time > 0: idle_value = idle_time if idle_time > 0 else self.idle_time entry.idle_time = curr_time + idle_value self.ll.add_rear(entry) if self.max_size > 0 and self.ll.size() > self.max_size: e = self.ll.remove_front() self.table.remove(e.key) self.table.add(key, entry)
def wrapper(self, *args, **kwargs): result = func(self, *args, **kwargs) if isinstance(result, list): return [Entry(*t) for t in result] if isinstance(result, tuple): return Entry(*result) return result
def get_entries(db, date_for_lineup): entries = [] # Check if data already exists in database db.query("select exists(select 1 from daily_draftkings_entries where date(createdOn) = date(?)) entryExists", (date_for_lineup,)) if db.fetchone()['entryExists'] == 1: logging.info("Entries for DraftKings already exists, grabbing from database.") try: db.query('''select dde.id, dde.entryId, dde.contestName, dde.contestId, dde.entryFee from daily_draftkings_entries dde where date(dde.createdOn) = date(?)''', (date_for_lineup,)) for dde in db.fetchall(): logging.debug(dde) entry_info = Entry(db, dde['entryId'], dde['contestName'], dde['contestId'], dde['entryFee'], dde['id']) entries.append(entry_info) return entries except Exception as e: logging.error("Could not find entries for DraftKings on " + str(date_for_lineup)) logging.error("Got the following error:") logging.error(e) raise e # Doesn't, so check from file else: try: logging.info( "Entry information for DraftKings doesn't exist, trying to grab from csv file: DKEntries_" + date_for_lineup.strftime( "%d%b%Y").upper() + ".csv") with open("../resources/DKEntries_" + date_for_lineup.strftime("%d%b%Y").upper() + ".csv", "r") as csvfile: reader = csv.DictReader(csvfile) for row in reader: if row['Entry ID']: logging.debug(row) entry_info = Entry(db, row['Entry ID'], row['Contest Name'], row['Contest ID'], row['Entry Fee']); entries.append(entry_info) entry_info.insert_entry() csvfile.close() return entries except FileNotFoundError as e: logging.info("Did not find entries, could be the initial lineup on " + str(date_for_lineup)) logging.info("Got the following error:") logging.info(e) return None
def create_directory(self, directory, name, attrib=0x10): if name in [e.name() for e in directory.entries]: print(name, " already exists in directory") return None cluster = self.next_free_cluster() filename, extension = short_filename(name, directory) if name == filename.strip() + ('.' + extension.strip()) if len( extension.strip()) != 0 else '': name = '' today = datetime.datetime.today() milliseconds = today.microsecond // 10000 + (today.second % 2) * 100 entry = Entry(filename, extension, Attrib(attrib), 0x00, milliseconds, today.time(), today.date(), today.date(), today.time(), today.date(), cluster, 0, name) directory.write(entry) self.current_directory.read_from_disk() self.write_to_fat(cluster, 0x0FFFFFFF) new_directory = Directory(self, cluster) self_entry = Entry(". ", " ", Attrib(0x10), 0x00, milliseconds, today.time(), today.date(), today.date(), today.time(), today.date(), cluster, 0, '') new_directory.write(self_entry) new_directory.read_from_disk() up_entry = Entry(".. ", " ", Attrib(0x10), 0x00, milliseconds, today.time(), today.date(), today.date(), today.time(), today.date(), directory.first_cluster, 0, '') new_directory.write(up_entry)
def init(): global quotes global count global users global photo_uploads quotes_no_key = [] file = open(quotes_name) for line in file: key_occur_val = line.strip().split( ',', 2) # splits at first two commas only key = key_occur_val[0].strip() occurances = key_occur_val[1].strip() quote = key_occur_val[2].strip() if key_occur_val[0].strip() != "": quotes[key] = Entry(key, occurances, quote) else: quotes_no_key.append(Entry(key, occurances, quote)) file.close() quotes[None] = quotes_no_key file = open(count_name) count = int(file.readline()) file.close() for line in open(photo_uploads): line = line.strip().split(" ") users[line[0]] = line[1]
def __delitem__(self, key): """Removes the entry associated with key.""" entry = Entry(key, None) if key in self: entry = Entry(key, None) entry = self._table[self._index].remove(entry) self._size -= 1
def get_app_constant(self): self.root.change_attributes(event_mask=Xlib.X.FocusChangeMask) try: window_id = self.root.get_full_property( self.NET_ACTIVE_WINDOW, Xlib.X.AnyPropertyType).value[0] window = self.disp.create_resource_object('window', window_id) window.change_attributes(event_mask=Xlib.X.PropertyChangeMask) window_name = window.get_full_property(self.NET_WM_NAME, 0).value except Xlib.error.XError: # simplify dealing with BadWindow window_name = None if window_name != self.last_entry and self.last_entry is not None: data_tuple = (datetime.datetime.now(), MACHINE, str(window_name)) # self.db_cursor.execute(self.sqlite_insert_with_param, data_tuple) # self.db_conn.commit() # self.last_entry = window_name if self.current is None: self.current = Entry(window_name, datetime.datetime.now(), 0, MACHINE) else: self.entries.append(self.current) self.current = Entry(window_name, datetime.datetime.now(), 0, MACHINE) elif window_name == self.last_entry: self.current.endTime = (datetime.datetime.now() - self.current.startTime).total_seconds() else: self.current = Entry(window_name, datetime.datetime.now(), 0, MACHINE) self.last_entry = window_name return window_name event = self.disp.next_event()
def load_remote(self, url): try: result = urlopen(url).read() stream = BytesIO(result) zipfile = ZipFile(stream, 'r') # TODO - handle json and other formats file_names = [ name for name in zipfile.namelist() if name.endswith('.yaml') or name.endswith('.tsv') ] for name in file_names: with zipfile.open(name, 'r') as f: file_contents = TextIOWrapper(f, encoding='utf-8', newline='') if name.endswith('.yaml'): entry = Entry() entry.yaml = file_contents.read() self.put(entry) elif name.endswith('.tsv'): reader = csv.DictReader(file_contents, delimiter='\t') for row in reader: if len(row.keys()): entry = Entry() entry.primitive = row self.store.put(entry) print('stored', name) except Exception as ex: log_traceback(logger, ex)
def test_update4(self): text = "07:00 aufstehen\n08:00 essen\n09:00 Zaehneputzen" p = Plan("today") p.update(text) TestHelper.test_listByInstance(self,p.step_list,"1R") t = p.step_list[0] self.assertEqual(t.step_list[0],Entry("01:00","aufstehen")) self.assertEqual(t.step_list[1],Entry("01:00","essen")) self.assertEqual(t.step_list[2],Entry("00:00","Zaehneputzen"))
def set_patient(self, bundle, prefix=None): """Generates and appends the Patient entry to the transaction""" if GENERATION_MAP["patient"]: patient = Patient.mpi[self.pid] if prefix: patient.pid = prefix + "-" + patient.pid # look up patient photos if self.pid in Document.documents: for d in [ doc for doc in Document.documents[self.pid] if doc.type == 'photograph' ]: data = fetch_document(self.pid, d.file_name) binary_id = uid(None, "%s-photo" % d.id, prefix) self.appendEntry( bundle, Binary({ "mime_type": d.mime_type, "content": data['base64_content'], "id": binary_id })) patient.photo_title = d.title patient.photo_code = d.mime_type patient.photo_binary_id = binary_id patient.photo_hash = data["hash"] patient.photo_size = data["size"] patientJSON = patient.toJSON(prefix) bundle = self.set_documents(bundle, prefix) self.appendEntry(bundle, Entry(patientJSON)) if patient.gestage: self.appendEntry( bundle, Entry( Observation( { "id": uid(None, "%s-gestage" % self.pid, prefix), "pid": self.pid, "date": patient.dob, "code": "18185-9", "name": "Gestational age at birth", "scale": "Qn", "value": patient.gestage, "units": "weeks", "unitsCode": "wk", "categoryCode": "exam", "categoryDisplay": "Exam" }, prefix))) return bundle
def add_item(self, item_data, group_name): # creates Entry for all the field values in each user, ticket and org # this is then inserted into the trie for field, value in item_data.items(): if type(value) is list: for search_term in value: new_entry = Entry(item_data, search_term, field, group_name) self.trie.add(new_entry) else: new_entry = Entry(item_data, value, field, group_name) self.trie.add(new_entry)
def main(): block1 = TimeBlock('') entry1 = Entry(0, 'https://www.cnn.com') entry2 = Entry(3, 'https://www.reddit.com') entry3 = Entry(6, 'https://www.facebook.com') block1.add_new_entry(entry1) block1.add_new_entry(entry2) block1.add_new_entry(entry3) block1.print() print('Block Hash:', block1.get_block_hash())
def __init__(self, store: StorageManager): self.root = store.read() self.dirty = False if self.root is None: self.root = Entry('root', None) a = Entry('Topic 01', self.root) b = Entry('Subtopic', a) c = Entry('Topic 02', self.root) self.dirty = True if (self.root.children == None): a = Entry(' ', self.root) self.dirty = True self.selected_row = 0 self.render()
def get_issue(self, term): results = self.search(term) correct_entry = [ x for x in results['props']['pageProps']['plugins'] if x['_source']['script_name'].lower() == term.lower() ] if len(correct_entry) == 0: return Entry(issue=term) return Entry(nessus_id=correct_entry[0]['_id'], issue=term, description=correct_entry[0]['_source']['description'], solution=correct_entry[0]['_source']['solution'])
def get_entries(): """Returns lists of new released singles and albums with relevant information Returns: singles_list (tuple[]): list of potential singles picks. Each tuple will carry information pertaining to what is needed for making tweets. See: (artist, artist_id, title, Spotify link, popularity score) albums_list (tuple[]): list of potential album picks (see singles_list for details) """ singles_list = [] albums_list = [] releases = get_new_releases() for release in releases: artist = release["artists"][0]["name"] artist_id = release["artists"][0]["id"] title = release["name"] link = release["external_urls"]["spotify"] genres = get_genres(artist_id) if used_links.find_one({"link": link}) == None: if release["album_type"] == "single": tracks_url = "https://api.spotify.com/v1/albums/" + \ release["id"] + "/tracks" tracks = requests.get( tracks_url, headers=spotify_access_header).json() track_id = tracks["items"][0]["id"] pop_url = "https://api.spotify.com/v1/tracks/" + track_id popularity = requests.get( pop_url, headers=spotify_access_header).json() popularity = popularity["popularity"] entry = Entry(artist, title, "single", link, popularity, genres) singles_list.append(entry) elif release["album_type"] == "album": pop_url = "https://api.spotify.com/v1/albums/" + release["id"] popularity = requests.get( pop_url, headers=spotify_access_header).json() popularity = popularity["popularity"] entry = Entry(artist, title, "album", link, popularity, genres) albums_list.append(entry) return singles_list, albums_list
def load(self, f): self.f = f[0:-5] QECODE = 0 HELM = 1 CREW = 2 CLASS = 3 SAILNO = 4 TIME = 5 LAPS = 6 FINCODE = 7 lines = open(f, "r").readlines()[1:] for line in lines: line = line.replace("\r", "") line = line.replace("\n", "") if line == "<HANDICAPS>": break else: try: if self.maxLaps < int(line.split(",")[LAPS]): self.maxLaps = int(line.split(",")[LAPS]) except: continue for line in lines: line = line.replace("\r", "") line = line.replace("\n", "") # print(line) if line == "<HANDICAPS>": break else: tokens = line.split(",") if len(tokens) == 9: qe = tokens[QECODE] valid = False for QEcode in self.QEs: if qe == QEcode.QE: QuickE = QEcode valid = True if valid & (tokens[FINCODE] == ""): try: laps = int(tokens[LAPS]) time = int(tokens[TIME]) self.entries.append(Entry(QuickE, laps, time, tokens[FINCODE], self.maxLaps)) except: print("WARNING:", line, "in file: ", f, "PY, LAPS & TIME must all be integers") elif valid: self.entries.append(Entry(QuickE, 0, 0, tokens[FINCODE], self.maxLaps)) else: if not (",,,,,,," in line): print("WARNING:", line, "in file: ", f, "unknown QE")
def handle_callback(self, bot, update): query = update.callback_query bot.delete_message(chat_id=query.message.chat_id, message_id=query.message.message_id) if query.data[0] == 'i': movie = Entry([query.data[1:]]) self.respond_with_movie(bot, query, movie) if query.data[0] == 'd': movie = Entry([query.data[1:]]) correctly_deleted = self.delete_movie_from_db(movie, query) if correctly_deleted: text_answer = "<< " + movie.name + " >>" + " removed from watchlist!" else: text_answer = "<< " + movie.name + " >>" + " not in watchlist!" bot.sendMessage(chat_id=query.message.chat_id, text=text_answer)
def __init__(self, name): # name is the name of the associated file self.name = name # numentries is the number of entries in the file self.numentries = 0 # entries is the list of Entry objects corresponding to the file's # entries self.entries = [] # selected is a list continaing the indexes of the entries selected # to be summarized self.selected = [] # variables contains the values entered for the selected entries' # parameters self.variables = [] for i in range(1, 21): self.variables.append(set()) # Opens the file and reads in the lines of text fileobj = open(self.name) if (fileobj.closed): print("Error opening file: " + self.name) sys.exit(1) lines = fileobj.readlines() fileobj.close() # Iterates through the lines of text in the file entbegin = 0 linenum = 0 for l in lines: # "id" line is designated as the start of the next entry if (l[0:2] == "id"): # If it is not the first "id" line if (linenum != 0): # Creates an entry from previous lines and adds it to the # file's list of entries self.entries.append( Entry(lines[entbegin:linenum], entbegin)) self.numentries += 1 # Updates the beginning of the entry entbegin = linenum # If the end of the file has been reached if (linenum + 1 == len(lines)): # Creates an entry from remaining lines and adds it to the # file's list of entries self.entries.append(Entry(lines[entbegin:], entbegin)) self.numentries += 1 # Updates the current line number linenum += 1
def read_from_file(filename): with open(filename, "r") as f: data = f.readlines() for line in data: info = line.split() e = Entry(info[0], info[1], info[2], info[3], info[4]) entry_list.append(e)
def pluster(pc, delta, tau): f = approximate(pc, 0.1) # sort point_cloud and f by f in ascending order sorted_idxs = argsort(f) f = f[sorted_idxs] pc = pc[sorted_idxs] lims, _, I = rips_graph(pc, delta) entries = Entries() for i in range(len(f)): nbr_idxs = I[lims[i]:lims[i+1]] upper_star_idxs = nbr_idxs[nbr_idxs < i] if upper_star_idxs.size == 0: # i is a local maximum entries.create(Entry(i)) else: # i is not a local maximum entry_idx = entries.find_entry_idx_by_point(upper_star_idxs[0]) entries.attach(entry_idx, i) entries = merge(pc, f, entries, i, upper_star_idxs, tau) return entries
def __contains__(self, key): """Return True if key is in the dictionary; return False otherwise""" entry = Entry(key, None) self._probeCount = 0 # Get the home index self._homeIndex = abs(self._hash(key)) % len(self._table) rehashAttempt = 0 index = self._homeIndex # Stop searching when an empty cell is encountered while rehashAttempt < len(self._table): self._probeCount += 1 if self._table[index] == OpenAddrHashDict.EMPTY: self._actualIndex = index return False elif self._table[index] == entry: self._actualIndex = index return True # Increment the index and wrap around to first # position if necessary rehashAttempt += 1 if self._linear: index = (self._homeIndex + rehashAttempt) % len(self._table) else: # Quadratic probing index = (self._homeIndex + (rehashAttempt**2 + rehashAttempt) // 2) % len( self._table) # An empty cell is found, so store the item return False
def __contains__(self, key): """Returns True if key is in the dictionary or False otherwise.""" self._index = abs(hash(key)) % self._capacity entry = Entry(key, None) return self._table[self._index].search(entry)
def create_date(self): system('cls') num_id = int(input("Enter the student's identification: ")) pos = self.find_student(num_id) if pos != -1: number = self.generate_number_entry() student = self.students[pos] year = int(input("Enter the year of the date: ")) mounth = int(input("Enter the mounth of the date: ")) day = int(input("Enter the day of the date: ")) new_date = Date(year, mounth, day) new_entry = Entry(number, new_date, student) self.entrys.append(new_entry) system('cls') print("The number of the entry is: ", number) else: print("ERROR - The student not exists") input()
def get_default_fields(self): entry = Entry() entry = entry.get_values() temp = [] for key in entry: temp.append(key) return temp
def add_movie(self, bot, update, args): if not args: bot.sendMessage( chat_id=update.message.chat_id, text='Please enter a name for the movie after the command') return movie = Entry(args) correctly_added = self.add_movie_to_db(movie, update) if not correctly_added: text_answer = "<< " + movie.name + " >>" + " already in your watchlist (or maybe a database problem)!" bot.sendMessage(chat_id=update.message.chat_id, text=text_answer) return if not movie.movie_found: bot.sendMessage( chat_id=update.message.chat_id, text='Unable to find <<' + movie.name + '>> in Internet Movie database, but it has still been added to your list!' ) return if movie.category != "null": text_answer = "<< " + movie.name + " >>" + " added to your watchlist inside the category: " + movie.category + "!" else: text_answer = "<< " + movie.name + " >>" + " added to your watchlist without category!" bot.sendMessage(chat_id=update.message.chat_id, text=text_answer)
def main_menu(self): """Main program menu, prompts user for input""" print("\nWork Logger Application") exists = os.path.isfile(filename) while True: print("\nOptions:\n") print(" 1. Add a new entry") print(" 2. Lookup a previous entry") print(" 3. Quit\n") get_user_choice = input( "Please enter the number of your selection (1-3): ") print() if get_user_choice == "1": entry = Entry() self.add_entry(entry) print("Entry successfully added.") self.main_menu() elif get_user_choice == "2": if not exists: print( "There are currently 0 entries. Must add entries, before searching." ) else: self.lookup_entry() elif get_user_choice == "3": print("\nExiting Work Logger") exit() else: print("Invalid choice, please try again.")