def compress_into_one_file(self) -> pd.DataFrame: path_to_files = search_import_file( where=os.path.join(self.parent, self.master, self.download_folder), extension=".xlsx", ) consolidated = pd.DataFrame() for path_to_file in path_to_files: dummy = pd.read_excel(path_to_file, sheet_name="Data") consolidated = pd.concat([consolidated, dummy], sort=False) return consolidated
def run_delete_all_xlsx(self): xlsx_dir = os.path.join(self.parent, self.master, self.download_folder) if os.path.isdir(xlsx_dir): xlsx = search_import_file(".xlsx", where=xlsx_dir) list(map(lambda x: os.remove(x), xlsx)) if len(xlsx) == 0: logging.getLogger(self.script_name).info( "tried to delete the xlsx files but they could not be found") else: logging.getLogger(self.script_name).info( f"{len(xlsx)} xlsx files were deleted")
def rename(self, filename: str) -> None: """ Visit the temp file, rename the file with the correct name and move it to the self.download_folder """ path_where_file_to_rename = os.path.join(self.parent, self.master, self.temp_folder) found = search_import_file(where=path_where_file_to_rename, extension="xls") if len(found) > 1: raise ValueError( f"Too many temp files in the {self.temp_folder} folder") try: file_to_rename = found[0] abs_path = os.path.join(self.parent, self.master, self.download_folder, filename) os.rename(file_to_rename, abs_path) except IndexError: time.sleep(10)
def process_files(self) -> list: path_to_mapping = os.path.join( self.parent, self.master, self.mapping_file_name) files = search_import_file( where=os.path.join(self.parent, self.master, self.download_folder), extension=".xls", ) keep_track_test_case_num = 1 data = dict() self.init_progress_counter(path_to_mapping) for file in files: logging.getLogger(self.script_name).info( f"Processing started for: {file}") path_to_file = os.path.join( self.parent, self.master, self.download_folder, file ) if self.check_file_size(path_to_file): process = pd.read_excel( path_to_file, encoding="iso-8859-1", header=None ) final_json = self.mash_data(process) df = self.convert_to_df(final_json) if ( self.ticket_does_not_exist(path_to_mapping, file) and df.empty is False ): df = self.complement_missing_data(df) ordering = self.grab_first_item(df) df = self.apply_ordering(df, ordering) df = self.convert_to_xray_format( df, file, keep_track_test_case_num, path_to_mapping, ) df = self.add_url(df, path_to_mapping, file) payload = self.create_jira_ticket_payload(df, file) if ( payload is not None and "description" in payload.keys() ): _id = payload["description"] data[_id] = payload payloads = self.save_json(data) jira_key = self.create_jira_ticket(payload) self.add_jira_key(jira_key, path_to_mapping, file) if len(jira_key) > 0: self.update_label_jira_ticket(df, jira_key) self.generate_excel(df, file) keep_track_test_case_num += 1 else: logging.getLogger(self.script_name).info( f"Webservice not called as there is nothing to create. The process on ARIS does not justify the test case creation" ) else: logging.getLogger(self.script_name).info( f"{path_leaf(file)} is too small, skipping" ) if self.total_tickets != 0: progress = (keep_track_test_case_num-1)*100/self.total_tickets self.update_progress_bar(progress, keep_track_test_case_num) return payloads