def test_write_lines_txt(self): file = "test_write_lines.txt" data = ["firstline", "secondline", "thirdline"] FileHandler.write_lines(file, data) exists_non_empty = path.exists(file) and path.getsize(file) != 0 self.assertTrue(exists_non_empty)
def test_csv(self): fh = FileHandler(Validator()) actual = fh.open('data.png')[0] expected = { 'EMPID': 'A001', 'GENDER': 'FM', 'AGE': '2w', 'SALES': '001', 'BMI': 'No34r', 'SALARY': '123', 'BIRTHDAY': '1-1-1996' } def test_csv(self): fh = FileHandler(Validator()) actual = fh.open('help.txt')[0] expected = { 'EMPID': 'A001', 'GENDER': 'FM', 'AGE': '2w', 'SALES': '001', 'BMI': 'No34r', 'SALARY': '123', 'BIRTHDAY': '1-1-1996' } self.assertEquals(actual, expected)
class DBOperationsHandler: """ Processes DB data. """ def __init__(self): self.db = DBHandler() self.file_handler = FileHandler() self.db.create_database() self.db.create_tables() self.db.create_room_birthday_idx() self.db.create_sex_index() self.insert_data() def insert_data(self): self.db.insert_rooms(self.file_handler.rooms) self.db.insert_students(self.file_handler.students) def get_data_from_queries(self): self.file_handler.write(self.queries_merger()) self.db.close_connection() def queries_merger(self): merged_data = list() merged_data.append(self.db.get_students_num_in_rooms()) merged_data.append(self.db.get_top_min_avg_age()) merged_data.append(self.db.get_top_max_age_difference()) merged_data.append(self.db.get_mixed_sex_rooms()) return merged_data
def write_to_file(self, lines): """ Write the list of definitions into query_history.txt. :param lines: list of string :return: None """ FileHandler.write_lines('query_history.txt', lines)
def write_search_history(self, lines): """ Write the results of successful searches into output file. :param lines: String :return: None """ FileHandler.write_lines(self.dictionary.output, lines)
def main(): # parsing arguments passed when calling script parser = argparse.ArgumentParser( description="Choosing task file and max velocity") parser.add_argument('file', help="type filename with tasks", type=str) parser.add_argument('velocity', help="type velocity points of your team", type=int) arg = parser.parse_args() # pass a filename to TaskHandler class constructor file = FileHandler() task_list = file.read_file(arg.file) task_handler = TaskHandler() # provide every task with KSP/storypoints ratio task_list = task_handler.update_tasks_ratio(task_list) best_tasks = task_handler.choose_best_tasks(task_list, arg.velocity, best_tasks=[]) best_task_ids = [task['task_id'] for task in best_tasks] print("Best tasks available for you:") task_str = ', '.join(best_task_ids) print(task_str)
def back_up_data(self, path): """ Prints all Card objects into a file in path. :param path: String """ for card in self.cards: FileHandler.write_line(path, str(card))
def update_salary_by_name(self, name, new_salary): if not self.is_admin(): print( "User is not administrator. " "Please try to run the function as adminstrator." ) return False employees_csv = FileHandler("users.csv") employees = employees_csv.get_csv_data() same_name_employees = [] for employee in employees: if employee["name"] == name: employee["salary"] = new_salary same_name_employees.append(employee) if not same_name_employees: print("No employees with that name. Please enter a valid employee name.") return False for employee in same_name_employees: employees_csv.update_csv(employee) print("Updated salaries successfully.") return True
def write_query_list(self, path): """ Writes the list of queried words and their definitions to a .txt file """ FileHandler.write_lines(Path.cwd() / path, self.formatted_query_list())
def __init__(self, title="QuizMaster"): super().__init__() self.title(title) self.geometry("720x300") self.__quiz = None self.question = tk.StringVar() self.info = tk.StringVar() self.question_frame = tk.Frame(self).grid(row=1, sticky=tk.W + tk.E + tk.S + tk.N) tk.Label(self.question_frame, textvariable=self.question).grid(padx=75, pady=20, sticky=tk.W + tk.E + tk.S + tk.N, columnspan=5) self.buttons = ButtonBlock(self, categories, 2, answers=False) self.info_frame = tk.Frame(self) self.info_frame.grid(row=6, column=2, padx=20, pady=15, sticky=tk.W + tk.E + tk.S + tk.N) tk.Label(self.info_frame, textvariable=self.info).pack(side=tk.LEFT, padx=40) self.canvas = tk.Canvas(self, width=500, height=200) self.canvas.grid(row=12, columnspan=9, padx=120, sticky=tk.W + tk.E + tk.S + tk.N) self.protocol("WM_DELETE_WINDOW", self.close) if os.path.exists('db/games/quiz.dat'): msq = messagebox.askquestion("Lataa peli", "Haluatko jatkaa viimeksi tallennettua peliä?") if msq == 'yes': f = FileHandler() self.__quiz = f.deserialize() self.__quiz.loaded_game_question = self.__quiz.questionToAsk self.start_quiz() else: os.remove('db/games/quiz.dat') self.create_start_menu() else: self.create_start_menu()
def __init__(self): self.mjp_soup = jmp_page.MjpPageSoup() self.words = [] self.word_csvs = [] self.to_save_numbers = [] self.file_handler = FileHandler() self.cell_parser = MjpRowParser()
def create_file(filename): """ >>> create_file("employeeinfo2.csv") [] """ csv = FileHandler(filename, True) return csv.read()
def __init__(self): auto_generated_id = ''.join(random.choices(string.ascii_uppercase, k=16)) self._component_id = os.environ.get("SCREENCRASH_COMPONENT_ID", auto_generated_id) self._base_path = Path(__file__).parent.parent / "resources" self._mixer = AudioMixerVLC(self._handle_mixer_event) self._file_handler = FileHandler(self._base_path) self._custom_event_handler = None self._sounds = {}
def test_file_handler_non_std_file_types(): try: fh = FileHandler("Data/Test.psv") fh.process_file() assert (False) except IOError: assert (True)
def get_answers(questions): f = FileHandler() answers = [] for q in questions: answers_for_question = f.read_answers(q.get_category(), q.get_number()) answers.append(answers_for_question) return answers
def test_write_data(data): test_file_path = "tests/test_data/test_json.json" ecg_file = FileHandler("/test_data/test1.csv", initialize=False) ecg_file.write_data(data, test_file_path) with open(test_file_path) as json_data: file_data = json.load(json_data) assert file_data == data
def __init__(self): self.db = DBHandler() self.file_handler = FileHandler() self.db.create_database() self.db.create_tables() self.db.create_room_birthday_idx() self.db.create_sex_index() self.insert_data()
def __init__(self): self.__list_of_vehicles = [] self.__filter_list = [] self.__employee_cars = [] self.file_handle = FileHandler('user.csv') self.file_handle_vehicle = FileHandler('vehicle.csv') self.user = User() self.__data_list = self.file_handle.get_data() self.__vehicle_data_list = self.file_handle_vehicle.get_data()
def run_program(root_directory, overwrite_lyrics): print('Program started') handler = FileHandler(overwrite_lyrics=overwrite_lyrics) handler.read_files_and_add_lyrics(root_directory) print('Program finished')
def download_file(item: Item): try: youtube = FileHandler(item.url) path = youtube.down_load_file() return FileResponse(path) except Exception as e: raise HTTPException( status_code=500, detail=f"Error Procesando la Peticion. Detalle - {repr(e)}")
def save_queries(queries: dict, file_name: str = "saved_queries.txt") -> None: """ Save the definitions and words that the user queries to a text file. :param queries: the user queries as a dict, keys are words, values are definitions :param file_name: the output file txt, default is saved_queries.txt """ FileHandler.write_lines(file_name, queries)
def test_write_lines(self): """ Test if lines are written by appending to test.txt text file successfully. File is read and split into a list by lines. """ lines = ["start", "test1", "test2", "test3", "end"] FileHandler.write_lines("test.txt", lines) with open("test.txt", mode='r', encoding="utf-8") as file: file = file.read().splitlines() self.assertEqual(file[-5:], lines)
def print_and_write(word, definition): """ The method prints the dictionary definition of the word. and write it into definition.txt file. :param word: String :param definition: String """ print(definition) formatted_definition = f'{word}: {definition}\n' FileHandler.write_line('definitions.txt', formatted_definition)
def test_persist_classified_with_new_records_returns_correct_diff_record_count( self): data = Data(self.db_name) handler = FileHandler() seed = handler.read_seed_csv(self.seed_file) data.seed_transactions(seed) new_data = handler.read_classified_csv(self.classified) result = data.persist_classified(new_data) self.assertEqual(144, result)
def test_write_lines(self): with open(Path.cwd() / 'test.txt', mode='r', encoding='utf-8') \ as text_file: before = text_file.read() FileHandler.write_lines(Path.cwd() / 'test.txt', 'it worked!') with open(Path.cwd() / 'test.txt', mode='r', encoding='utf-8') \ as text_file: after = text_file.read() self.assertEqual(before + 'it worked!', after)
def create_report(self): date = datetime.date.today().strftime("%d%m%y") time = datetime.datetime.now().strftime("%H%M") file_path = f"DTR_{date}_{time}.txt" current_date = datetime.date.today().strftime("%d-%m-%Y") current_time = datetime.datetime.now().strftime("%H:%m") transaction_report = f"HOLIDAY STORE - DAILY TRANSACTION REPORT (DRT)" \ f"\n{current_date} {current_time}\n" FileHandler.write_order(file_path, transaction_report) FileHandler.write_order(file_path, self._dtr) return self._dtr
def test_csv(self): fh = FileHandler(Validator()) actual = fh.open('data.csv')[0] expected = {'EMPID': 'A001', 'GENDER': 'F', 'AGE': '21', 'SALES': '001', 'BMI': 'Normal', 'SALARY': '12', 'BIRTHDAY': '1-1-1996'} self.assertEquals(actual, expected)
def test_write_line(self): """ Tests the method write_line from FileHandler. Checks if the specified line is written in a specified file path. """ line = 'Hello, testing.' os.remove('test.txt') FileHandler.write_line('test.txt', line) test_file = open('test.txt') test_content = test_file.read() test_file.close() self.assertEqual(test_content, line)
def setup(self): # Add server socket to the list of readable connections self.connection_handler = ConnectionHandler(self.server_socket, self.buffer, self.local_ip) # Setup server handlers self.file_handler = FileHandler() self.data_handler = self.connection_handler.get_data_handler() # Runtime vars self.run_server = True self.session_name = datetime.datetime.now()
def download_file(url: str = Body(..., embed=True, alias="url")): if not valid_url(url): raise HTTPException(status_code=400, detail="invalid url") try: youtube = FileHandler(url) path = youtube.down_load_file() except Exception as e: raise HTTPException(status_code=400, detail=str(e)) return FileResponse(path)
def test_extractFiles(self, folder = os.path.dirname(__file__)): """ Object method Params: folder (str) -> tested folder's path, has the directory path of the current file for default value. Return: None This method tests the behaviour of the extractFiles method. This method is called with folder as param. Each returned file needs to be a python or an html file. Then, the content of the dir 'folder', given as params, is returned. Each files of the dir needs to be in the return of extractFiles. For each subfolder in the dir, this current method is called. """ files = [os.path.basename(f) for f in FileHandler.extractFiles(folder)] for f in files: extension = os.path.splitext(f)[1] self.assertTrue(extension in (".pyw", ".py", ".html", ".htm")) dir_content = os.listdir(folder) for el in dir_content: if os.path.isfile(el) and os.path.splitext(el)[1] in (".pyw", ".py", ".html", ".htm"): self.assertTrue(el in files) if os.path.isdir(el): path = folder + "/" + el self.test_extractFiles(path)
class Main(object): def __init__(self, fieldnames_register, fieldnames_street_spec): self.filehandler = FileHandler() self.register_updater = RegisterUpdater() self.fieldnames_register = fieldnames_register self.fieldnames_street_spec = fieldnames_street_spec '''The top level class. Read register csv file into a table (list of dict) Read street_spec csv file into a table (list of dict) Create ward_street_spec: {(ward, street_name): [ street_number_spec, street_number_spec,...], ...} Update the wards Write to a new csv file. SKIP first line of csv line: Date Published: 01/05/2015 ''' def register_update(self, csv_register, csv_street_spec, number_filename, street_fieldname): (register, street_spec) = self.csv_read(csv_register, csv_street_spec) # Create lookup ward_lookup = self.register_updater.get_ward_lookup(street_spec) # Append new wards to register table (register_updated, errors) = self.register_updater.register_update(register, ward_lookup, number_fieldname, street_fieldname) # Write the updated register to a new csv file self.filehandler.csv_write(register_updated, csv_register.replace('.csv', 'WardUpdated.csv'), fieldnames + ('ward_new',)) for (k, v) in sorted(errors.items()): print(k, v) def csv_read(self, csv_register, csv_street_spec): '''Update ward names in register ''' skip_lines = 0 # Read register csv file into table (array of dict) register (register, unused) = self.filehandler.csv_read(csv_register, self.fieldnames_register, skip_lines) # Read street spec csv file into table (array of dict) street_spec (street_spec, unused) = self.filehandler.csv_read(csv_street_spec, self.fieldnames_street_spec) return (register, street_spec)
def __init__(self, csv_register, street_fieldname): fieldnames = ('PD', 'ENO', 'Status', 'Title', 'First Names', 'Initials', 'Surname', 'Suffix', 'Date of Attainment', 'Franchise Flag', 'Address 1', 'Address 2', 'Address 3', 'Address 4', 'Address 5', 'Address 6', 'Address 7', 'Address 8', 'Address 9', 'Postcode') skip_lines = 1 self.filehandler = FileHandler() # Read csv data file into a table (register, unused) = self.filehandler.csv_read(csv_register, fieldnames, skip_lines) self.street_names_set = {} for row in register: ward = pd2ward(row['PD']) street_name = row[street_fieldname].strip() street_name = re.sub('^\d+\w*\s+', '', street_name) street_name = re.sub('^[-/\s\d]+', '', street_name) if street_name: self.street_names_set.setdefault(ward, set()).add(street_name)
class StreetName(object): '''Create street names by ward. {ward:{street_name, street_name, ...}} dict of set of street names ''' def __init__(self, csv_register, street_fieldname): fieldnames = ('PD', 'ENO', 'Status', 'Title', 'First Names', 'Initials', 'Surname', 'Suffix', 'Date of Attainment', 'Franchise Flag', 'Address 1', 'Address 2', 'Address 3', 'Address 4', 'Address 5', 'Address 6', 'Address 7', 'Address 8', 'Address 9', 'Postcode') skip_lines = 1 self.filehandler = FileHandler() # Read csv data file into a table (register, unused) = self.filehandler.csv_read(csv_register, fieldnames, skip_lines) self.street_names_set = {} for row in register: ward = pd2ward(row['PD']) street_name = row[street_fieldname].strip() street_name = re.sub('^\d+\w*\s+', '', street_name) street_name = re.sub('^[-/\s\d]+', '', street_name) if street_name: self.street_names_set.setdefault(ward, set()).add(street_name) def write(self, csv_street_names): street_names_array2d = [] for (ward, street_names) in sorted(self.street_names_set.items()): row = [ward] + sorted(list(street_names)) street_names_array2d.append(row) # Pad nrows = max([len(row)for row in street_names_array2d]) for ward_row in street_names_array2d: ward_row += [''] * (nrows - len(ward_row)) # Transpose street_names_table = zip(*street_names_array2d) # Write with open(expanduser(csv_street_names), 'w', newline='') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) for row in street_names_table: csv_writer.writerow(row)
import time import defines from serial_ard import SerialArduino from file_handler import FileHandler from chart import Chart from table import Table from interface import ConsoleInterface txt = FileHandler('newtags') txt.parse()
def __init__(self, fieldnames_register, fieldnames_street_spec): self.filehandler = FileHandler() self.register_updater = RegisterUpdater() self.fieldnames_register = fieldnames_register self.fieldnames_street_spec = fieldnames_street_spec
#!/usr/bin/env python import sys from file_handler import FileHandler if len(sys.argv) < 5 or sys.argv[1] == '-h': print('Usage:') print('... <password> <base64-iv> <algorithm:[AES128|AES256]> <compressed:[y|n]> /path/to/file') print('For the base64-iv you need to find the value of the cb-encryptioninfo header. It will ' 'have a semi-colon seperated string like: 1;190728;AES;256;kgAAAAAAwlXSWZGeLJlaWg==;;;') print('The 5th item is the base64 encoded "IV" value which is kgAAAAAAwlXSWZGeLJlaWg== in ' 'this case. You will need this to decrypt the file (it is different for each file).') exit() password = sys.argv[1] # Cloudberry client-side encryption password base64iv = sys.argv[2] # Base64 IV from 'cb-encryptioninfo' metadata header algorithm = sys.argv[3].upper() # AES128 or AES256 compressed = sys.argv[4].lower() == 'y' file_to_decrypt = sys.argv[5] encrypted_file = open(file_to_decrypt, 'rb') encrypted_data = encrypted_file.read() encrypted_file.close() file_handler = FileHandler(algorithm=algorithm, password=password) decrypted_data = file_handler.get_contents(path=file_to_decrypt, base64iv=base64iv, compressed=True, encrypted=True) decrypted_file = open(file_to_decrypt, 'wb') decrypted_file.write(decrypted_data) decrypted_file.close()
start_t = txt.get_start_time() m_time = int((int(m_time) - start_t) / 1000) # temp_values = [float(y) for y in m_temp] # fbar_values = [float(y) for y in m_fbar] # lint_values = [float(y) for y in m_lint] row = [str(m_time), m_temp, m_fbar, m_lint] # data are float values return row #### Code itself #### # Objects declaration ard = SerialArduino(port, baud_rate) txt = FileHandler(user_file) dat = Table("data") cli = ConsoleInterface(dat) # binds data table to interface object cli.init_interface() # prints header op = cli.start_menu() if op.upper() == "Y": # cli.setup_interface() user_file = raw_input("Type TXT file name: ") txt.set_name(user_file) cli.show_warning("Hit Ctrl-C to stop acquisition") cli.getch() ### !!! BIG INCONSISTENCY RIGHT HERE !!!