def setUp(self): self.action = MoveAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.list_action = ListAction(os.getcwd()) self.added_dirs = { #dictionary of directory names to directory paths 'full': 'full', 'empty': 'empty', 'hassub': 'hassub', 'issub': 'hassub' + os.sep + 'issub' } create_directory(os.path.join(os.getcwd(), self.added_dirs['hassub'])) create_directory(os.path.join(os.getcwd(), self.added_dirs['issub'])) create_directory(os.path.join(os.getcwd(), self.added_dirs['full'])) create_directory(os.path.join(os.getcwd(), self.added_dirs['empty'])) self.added_files = { #dictionary of file names to file paths 'fileA.txt': 'fileA.txt', 'fileB.txt': os.path.join(self.added_dirs['full'], 'fileB.txt'), 'fileC.txt': os.path.join(self.added_dirs['hassub'], 'fileC.txt'), #untracked 'fileD.txt': os.path.join(self.added_dirs['issub'], 'fileD.txt') } for filename in self.added_files: create_txt_file( os.path.join(os.getcwd(), self.added_files[filename])) if filename == 'fileC.txt': continue #don't add so we can test with an untracked file self.add_action.add_action([self.added_files[filename]])
def tearDown(self): self.rm_action = RmAction(os.getcwd()) for curr_file in self.files: if curr_file in self.forced: continue self.rm_action.rm_action(curr_file, remote=True, force=True) self.action.clean_action(True, False, None) self.action.close()
def tearDown(self): self.rm_action = RmAction(os.getcwd()) self.rm_action.rm_action(self.filename, remote=True, force=True) for fn in self.references: delete_file(fn) self.clean_action.clean_action(False, False, None) self.rm_action.close() self.clean_action.close() self.action.close()
def tearDown(self): self.rm_action = RmAction(os.getcwd()) for doc_id in self.doc_ids: self.rm_action.rm_action(doc_id, id=True, remote=True, force=True) for file_name in self.imported: if os.path.isfile(os.path.join(os.getcwd(), file_name)): delete_file(file_name) self.clean_action.clean_action(True, False, None) self.rm_action.close() self.action.close()
def tearDown(self): self.rm_action = RmAction(os.getcwd()) for curr_file in self.files: self.rm_action.rm_action([curr_file], remote=True, force=True) self.clean_action.clean_action(False, False, None) for dl_file in self.downloaded_files: if os.path.exists(dl_file): os.remove(dl_file) self.rm_action.close() self.clean_action.close() self.action.close()
def setUp(self): self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.files = ['sample.txt', 'sample1.txt', 'sample2.txt'] for fn in self.files: create_txt_file(fn) self.add_action.add_action(['sample*.txt'], overwrite=True) self.doc_ids = self.add_action.doc_manager.get_doc_ids() for doc_id in self.doc_ids: assert poll_doc(self.add_action, doc_id) self.workflow_id = '2b5498e0-f3c7-4c49-9afa-cca4b3345af7' #need to use a workflow that doesn't have machine translation so we have time to cancel targets before they're completed. This is a system workflow template, so everyone will have access to it no matter who runs these tests
def setUp(self): create_config() self.action = StatusAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.file_name = 'sample.txt' self.file_path = create_txt_file(self.file_name) self.add_action.add_action([self.file_name], overwrite=True) self.doc_id = self.action.doc_manager.get_doc_ids()[0] assert poll_doc(self.action, self.doc_id) self.targets = ['ja-JP', 'de-DE']
def setUp(self): self.action = WatchAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.locales_to_test = ['de-DE', 'es-AR', 'ja-JP'] #default in my test was clone on download folder none target locale folders none self.config_action = ConfigAction(os.getcwd()) self.clean_action.clean_action(False, False, None) self.config_action.config_action(target_locales=self.locales_to_test) # self.action.open() self.downloaded = [] self.files = [] self.dir_name = "dir1" create_directory(self.dir_name) self.add_action.add_action([self.dir_name], overwrite=True)
def setUp(self): self.action = CloneAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.added_directories = [] self.config_action = ConfigAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.dir_path1 = os.path.join(os.getcwd(), 'dir1') self.added_directories.append(self.dir_path1) create_directory(self.dir_path1) self.dir_path2 = os.path.join(os.getcwd(), 'dir2') self.added_directories.append(self.dir_path2) create_directory(self.dir_path2) self.dir_path3 = os.path.join(os.getcwd(), 'dir1','dir3') self.added_directories.append(self.dir_path3) create_directory(self.dir_path3) self.delete_directory(os.path.join(os.getcwd(), 'ja-JP')) self.delete_directory(os.path.join(os.getcwd(), 'es-MX')) self.delete_directory(os.path.join(os.getcwd(), 'downloads'))
def setUp(self): create_config() self.downloaded = [] self.add_action = AddAction(os.getcwd()) self.action = PushAction( self.add_action, os.getcwd(), False, False ) #all push_action calls below need to have the kwargs 'due_date' and 'due_reason'. They aren't declared in the push action itself, but are in the click command that calls the push action when a user uses it and are used in the logic of the push action self.clean_action = CleanAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.download_action = DownloadAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.files = ['sample.txt', 'sample1.txt', 'sample2.txt'] for fn in self.files: create_txt_file(fn) os.system('ltk add sample*.txt -o' ) # Let the command line handle parsing the file pattern self.doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in self.doc_ids: assert poll_doc(self.action, doc_id)
def tearDown(self): self.rm_action = RmAction(os.getcwd()) for fn in self.added_files: self.rm_action.rm_action(fn, remote=True, force=True) not_empty = True while not_empty: not_empty = False for d in self.added_directories: if os.path.exists(d) and os.path.isdir(d): if len(os.listdir(d)): self.rm_action.rm_action(d, remote=True, force=True) not_empty = True else: self.rm_action.rm_action(d, remote=True, force=True) delete_directory(d) self.clean_action.clean_action(False, False, None) self.rm_action.close() self.clean_action.close() self.action.close()
class TestClean(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = CleanAction(os.getcwd()) self.action.clean_action(False, False, None) self.files = ['sample.txt', 'sample1.txt', 'sample2.txt'] self.forced = [] for fn in self.files: create_txt_file(fn) subprocess.call(['ltk', 'add', '-o', 'sample*.txt']) self.entries = self.action.doc_manager.get_all_entries() for entry in self.entries: assert poll_doc(self.action, entry['id']) def tearDown(self): self.rm_action = RmAction(os.getcwd()) for curr_file in self.files: if curr_file in self.forced: continue self.rm_action.rm_action(curr_file, remote=True, force=True) self.action.clean_action(True, False, None) self.action.close() def test_clean(self): delete_id = self.entries[0]['id'] r = self.action.api.document_delete(delete_id) self.forced.append(self.entries[0]['file_name']) assert r.status_code == 204 assert self.action.doc_manager.get_doc_by_prop('id', delete_id) assert poll_rm( self.action, delete_id ), "The file is in the process queue to be deleted but hasn't been deleted yet" self.action.clean_action(False, False, None) assert not self.action.doc_manager.get_doc_by_prop( 'id', delete_id), "doc is " + str( self.action.doc_manager.get_doc_by_prop('id', delete_id)) def test_clean_force(self): delete_id = self.entries[0]['id'] doc_name = self.action.doc_manager.get_doc_by_prop( 'id', delete_id)['file_name'] r = self.action.api.document_delete(delete_id) assert r.status_code == 204 assert self.action.doc_manager.get_doc_by_prop('id', delete_id) assert poll_rm( self.action, delete_id ), "The file is in the process queue to be deleted but hasn't been deleted yet" self.action.clean_action(True, False, None) self.forced.append(self.entries[0]['file_name']) assert not self.action.doc_manager.get_doc_by_prop('id', delete_id) assert not os.path.isfile(os.path.join(self.action.path, doc_name)) def test_disassociate(self): self.action.clean_action(False, True, None) for entry in self.entries: assert not self.action.doc_manager.get_doc_by_prop( 'id', entry['id']) assert poll_rm(self.action, entry['id'], cancelled=True) delete_file(entry['file_name']) self.action.api.document_delete(entry['id']) self.forced.append(entry['file_name']) # Test that a specified file is disassociated def test_clean_single(self): delete_id = self.entries[0]['id'] doc_name = self.entries[0]['file_name'] self.action.clean_action(False, False, doc_name) assert poll_rm(self.action, delete_id, cancelled=True) assert not self.action.doc_manager.get_doc_by_prop('id', delete_id) delete_file(doc_name) self.action.api.document_delete(delete_id) self.forced.append(doc_name)
class TestMv(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = MoveAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.list_action = ListAction(os.getcwd()) self.added_dirs = { #dictionary of directory names to directory paths 'full': 'full', 'empty': 'empty', 'hassub': 'hassub', 'issub': 'hassub' + os.sep + 'issub' } create_directory(os.path.join(os.getcwd(), self.added_dirs['hassub'])) create_directory(os.path.join(os.getcwd(), self.added_dirs['issub'])) create_directory(os.path.join(os.getcwd(), self.added_dirs['full'])) create_directory(os.path.join(os.getcwd(), self.added_dirs['empty'])) self.added_files = { #dictionary of file names to file paths 'fileA.txt': 'fileA.txt', 'fileB.txt': os.path.join(self.added_dirs['full'], 'fileB.txt'), 'fileC.txt': os.path.join(self.added_dirs['hassub'], 'fileC.txt'), #untracked 'fileD.txt': os.path.join(self.added_dirs['issub'], 'fileD.txt') } for filename in self.added_files: create_txt_file( os.path.join(os.getcwd(), self.added_files[filename])) if filename == 'fileC.txt': continue #don't add so we can test with an untracked file self.add_action.add_action([self.added_files[filename]]) def tearDown(self): self.rm_action.rm_action((), all=True, remote=True) for file in self.added_files.values(): delete_file(file, os.getcwd()) allCleared = False while not allCleared: allCleared = True for dirname in self.added_dirs: if os.path.isdir( os.path.join(os.getcwd(), self.added_dirs[dirname]) ): #stops infinite loop where delete_directory returns false if it doesn't exist if not delete_directory( os.path.join(os.getcwd(), self.added_dirs[dirname]) ): #directory was full, catch it on the next loop after its contents have been deleted allCleared = False def test_mv_file(self): self.action.mv_action([self.added_files['fileA.txt']], self.added_dirs['full']) self.added_files['fileA.txt'] = os.path.join(self.added_dirs['full'], 'fileA.txt') try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[filename] in info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values()) def test_mv_empty_dir(self): self.action.mv_action([self.added_dirs['empty']], self.added_dirs['full']) self.added_dirs['empty'] = os.path.join(self.added_dirs['full'], 'empty') try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[filename] in info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values()) def test_mv_dir(self): self.action.mv_action([self.added_dirs['full']], self.added_dirs['hassub']) self.added_dirs['full'] = os.path.join(self.added_dirs['hassub'], 'full') self.added_files['fileB.txt'] = os.path.join(self.added_dirs['full'], 'fileB.txt') try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[filename] in info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values()) def test_mv_subdir(self): self.action.mv_action([self.added_dirs['hassub']], self.added_dirs['full']) self.added_dirs['hassub'] = os.path.join(self.added_dirs['full'], 'hassub') self.added_dirs['issub'] = os.path.join(self.added_dirs['hassub'], 'issub') self.added_files['fileC.txt'] = os.path.join(self.added_dirs['hassub'], 'fileC.txt') self.added_files['fileD.txt'] = os.path.join(self.added_dirs['issub'], 'fileD.txt') try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[filename] in info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values()) def test_mv_file_untracked(self): self.action.mv_action([self.added_files['fileC.txt']], self.added_dirs['full']) #fileC.txt should not change location try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[ filename] in info, filename + " not found in " + info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values()) def test_rename_file(self): self.action.mv_action([self.added_files['fileA.txt']], 'fileE.txt') self.added_files['fileA.txt'] = 'fileE.txt' try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[filename] in info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values()) def test_rename_dir(self): self.action.mv_action([self.added_dirs['full']], 'filled') self.added_dirs['full'] = 'filled' self.added_files['fileB.txt'] = os.path.join(self.added_dirs['full'], 'fileB.txt') try: out = StringIO() sys.stdout = out self.list_action.list_ids(False) info = out.getvalue() for filename in self.added_files: if filename == 'fileC.txt': assert 'fileC.txt' not in info #check just the name to cover any possible filepaths else: assert self.added_files[filename] in info finally: sys.stdout = sys.__stdout__ assert all( os.path.isfile(os.path.join(os.getcwd(), path)) for path in self.added_files.values()) assert all( os.path.isdir(os.path.join(os.getcwd(), path)) for path in self.added_dirs.values())
class TestStatusAction(unittest.TestCase): def setUp(self): create_config() self.action = StatusAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.file_name = 'sample.txt' self.file_path = create_txt_file(self.file_name) self.add_action.add_action([self.file_name], overwrite=True) self.doc_id = self.action.doc_manager.get_doc_ids()[0] assert poll_doc(self.action, self.doc_id) self.targets = ['ja-JP', 'de-DE'] def tearDown(self): # remove the created file self.rm_action.rm_action(self.file_name, remote=True, force=True) self.clean_action.clean_action(True, False, None) self.action.close() cleanup() def test_status(self): # see that there is a status try: out = StringIO() sys.stdout = out self.action.get_status() status = out.getvalue() assert 'Status of {0}'.format(self.file_name) in status finally: sys.stdout = sys.__stdout__ def test_status_detailed(self): # see that there are targets # request translations self.request_action = RequestAction(os.getcwd(), None, self.file_name, self.targets, False, False, None, None) self.request_action.target_action() try: out = StringIO() sys.stdout = out self.action.get_status(detailed=True) status = out.getvalue() assert 'Status of {0}'.format(self.file_name) in status for target in self.targets: assert 'Locale: {0}'.format(target) in status finally: sys.stdout = sys.__stdout__ def test_status_no_target(self): # when no targets have been added try: out = StringIO() sys.stdout = out handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) logger.addHandler(handler) self.action.get_status(detailed=True) info = out.getvalue() assert 'No translations exist for document {0}'.format( self.file_name) in info logger.removeHandler(handler) finally: sys.stdout = sys.__stdout__ def test_status_all(self): #add a second file self.file_name2 = 'sample2.txt' self.file_path2 = create_txt_file(self.file_name2) self.add_action.add_action([self.file_name2], overwrite=True) self.doc_id2 = self.action.doc_manager.get_doc_ids()[1] assert poll_doc(self.action, self.doc_id2) #remove second file from local tracking self.clean_action.clean_action(False, False, [self.file_name2]) try: out = StringIO() sys.stdout = out #test that a normal status call doesn't see the new file self.action.get_status() status = out.getvalue() assert 'Status of {0}'.format(self.file_name) in status assert 'Status of {0}'.format(self.file_name2) not in status #test that a status all call sees both files #reset output capture out = StringIO() sys.stdout = out self.action.get_status(all=True) status = out.getvalue() assert 'Status of {0}'.format(self.file_name) in status assert 'Status of {0}'.format(self.file_name2) in status finally: sys.stdout = sys.__stdout__ #remove second file self.rm_action.rm_action(self.doc_id2, id=True, remote=True) delete_file(self.file_path2) def test_status_name(self): #add a second file in a directory self.dir_name = 'folder' self.dir_path = os.path.join(os.getcwd(), self.dir_name) create_directory(self.dir_path) self.file_name2 = self.dir_name + os.sep + 'sample2.txt' self.file_name2_short = 'sample2.txt' self.file_path2 = create_txt_file(self.file_name2) self.add_action.add_action([self.file_name2], overwrite=True) self.doc_id2 = self.action.doc_manager.get_doc_ids()[1] assert poll_doc(self.action, self.doc_id2) try: out = StringIO() sys.stdout = out #test that where the name is the same as the path self.action.get_status(doc_name=self.file_name) status = out.getvalue() assert 'Status of {0}'.format(self.file_name) in status assert 'Status of {0}'.format(self.file_name2_short) not in status #test where the name is different than the path #reset output capture out = StringIO() sys.stdout = out self.action.get_status(doc_name=self.file_name2_short) status = out.getvalue() assert 'Status of {0}'.format(self.file_name2_short) in status assert 'Status of {0}'.format(self.file_name) not in status finally: sys.stdout = sys.__stdout__ #remove second file self.rm_action.rm_action(self.file_name2, remote=True, force=True) delete_directory(self.dir_path)
class TestPush(unittest.TestCase): def setUp(self): create_config() self.downloaded = [] self.add_action = AddAction(os.getcwd()) self.action = PushAction( self.add_action, os.getcwd(), False, False ) #all push_action calls below need to have the kwargs 'due_date' and 'due_reason'. They aren't declared in the push action itself, but are in the click command that calls the push action when a user uses it and are used in the logic of the push action self.clean_action = CleanAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.download_action = DownloadAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.files = ['sample.txt', 'sample1.txt', 'sample2.txt'] for fn in self.files: create_txt_file(fn) os.system('ltk add sample*.txt -o' ) # Let the command line handle parsing the file pattern self.doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in self.doc_ids: assert poll_doc(self.action, doc_id) def tearDown(self): # delete files added to lingotek cloud for curr_file in self.files: self.rm_action.rm_action(curr_file, remote=True, force=True) # delete downloaded translations for df in self.downloaded: os.remove(df) delete_directory("es-AR") self.downloaded = [] self.clean_action.clean_action(True, False, None) self.action.close() cleanup() def test_push_1(self): append_file(self.files[0]) locales = ['es-AR'] test_doc_id = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[0])['id'] self.request_action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, None, test_doc_id) self.request_action.target_action() orig_dates = get_orig_dates( self.action, [test_doc_id] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates self.action.push_action(due_date='', due_reason='') assert check_updated_ids( self.action, orig_dates ) # Poll and wait until the modification has taken effect in the cloud downloaded_path = self.download_action.download_action( test_doc_id, locales[0], False) #print("downloaded_path: "+str(downloaded_path)) self.downloaded.append(downloaded_path) with open(downloaded_path, 'r') as f: downloaded_text = f.read() #print ("Downloaded_text: " + downloaded) assert "Texto agregado." in downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text def test_push_mult(self): append_file(self.files[0]) append_file(self.files[1]) locales = ['es-AR'] test_doc_id_0 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[0])['id'] test_doc_id_1 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[1])['id'] self.request_action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, None, test_doc_id_0) target1 = self.request_action.target_action() self.request_action = RequestAction(os.getcwd(), self.files[1], None, locales, False, False, None, None, test_doc_id_1) target2 = self.request_action.target_action() orig_dates = get_orig_dates( self.action, [test_doc_id_0, test_doc_id_1] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates push = self.action.push_action(due_date='', due_reason='') assert check_updated_ids( self.action, orig_dates ) # Poll and wait until the modification has taken effect on the cloud dl_path_0 = self.download_action.download_action( test_doc_id_0, locales[0], False) dl_path_1 = self.download_action.download_action( test_doc_id_1, locales[0], False) self.downloaded = [dl_path_0, dl_path_1] for path in self.downloaded: with open(path, 'r') as f: downloaded_text = f.read() #print("downloaded_text: "+downloaded_text) assert "Texto agregado." in downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text def test_push_none(self): try: # out = StringIO() # sys.stdout = out assert not self.action.push_action(due_date='', due_reason='') # info = out.getvalue() # assert 'All documents up-to-date with Lingotek Cloud.' in info finally: sys.stdout = sys.__stdout__ def test_push_dry_run(self): append_file(self.files[0]) append_file(self.files[1]) locales = ['es-AR'] test_doc_id_0 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[0])['id'] test_doc_id_1 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[1])['id'] self.request_action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, None, test_doc_id_0) target1 = self.request_action.target_action() self.request_action = RequestAction(os.getcwd(), self.files[1], None, locales, False, False, None, None, test_doc_id_1) target2 = self.request_action.target_action() orig_dates = get_orig_dates( self.action, [test_doc_id_0, test_doc_id_1] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates try: out = StringIO() sys.stdout = out self.action.test = True handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) logger.addHandler(handler) push = self.action.push_action(due_date='', due_reason='') info = out.getvalue() assert 'Update ' + self.files[0] in info assert 'Update ' + self.files[1] in info assert 'TEST RUN: Added 0, Updated 2 (Total 2)' in info logger.removeHandler(handler) finally: sys.stdout = sys.__stdout__ print( "polling to check that file wasn't modified. This will take 6 minutes if successful." ) assert not check_updated_ids( self.action, orig_dates ) # Poll and wait to make sure the modification didn't occur on the cloud dl_path_0 = self.download_action.download_action( test_doc_id_0, locales[0], False) dl_path_1 = self.download_action.download_action( test_doc_id_1, locales[0], False) self.downloaded = [dl_path_0, dl_path_1] for path in self.downloaded: with open(path, 'r') as f: downloaded_text = f.read() #print("downloaded_text: "+downloaded_text) assert "Texto agregado." not in downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text def test_push_title(self): dir_path = os.path.join(os.getcwd(), 'nested') nestedfile = 'nested' + os.sep + 'nestedfile.txt' create_directory(dir_path) create_txt_file(nestedfile) os.system('ltk add nested' + os.sep + 'nestedfile.txt -o') append_file(self.files[0]) append_file(nestedfile) locales = ['es-AR'] test_doc_id_0 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[0])['id'] test_doc_id_1 = self.action.doc_manager.get_doc_by_prop( 'file_name', nestedfile)['id'] self.request_action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, None, test_doc_id_0) target1 = self.request_action.target_action() self.request_action = RequestAction(os.getcwd(), nestedfile, None, locales, False, False, None, None, test_doc_id_1) target2 = self.request_action.target_action() orig_dates = get_orig_dates( self.action, [test_doc_id_0, test_doc_id_1] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates try: out = StringIO() sys.stdout = out self.action.title = True handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) logger.addHandler(handler) push = self.action.push_action(due_date='', due_reason='') info = out.getvalue() assert 'Updated ' + self.files[0] in info assert 'Updated nestedfile.txt' in info #should be just nestedfile.txt, not nested/nestedfile.txt logger.removeHandler(handler) finally: sys.stdout = sys.__stdout__ assert check_updated_ids( self.action, orig_dates ) # Poll and wait until the modification has taken effect on the cloud dl_path_0 = self.download_action.download_action( test_doc_id_0, locales[0], False) dl_path_1 = self.download_action.download_action( test_doc_id_1, locales[0], False) self.downloaded = [dl_path_0, dl_path_1] for path in self.downloaded: with open(path, 'r') as f: downloaded_text = f.read() #print("downloaded_text: "+downloaded_text) assert "Texto agregado." in downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text self.rm_action.rm_action(nestedfile, remote=True, force=True) delete_directory("nested") def test_push_metadata(self): from unittest.mock import patch with patch('builtins.input', side_effect=[ 'alpha', 'beta', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '' ]): self.action.push_action(set_metadata=True, due_date='', due_reason='') #check that the metadata is attached to the documents for doc_id in self.action.doc_manager.get_doc_ids(): properties = self.action.api.get_document( doc_id).json()['properties'] for field in METADATA_FIELDS: if field == METADATA_FIELDS[0]: assert field in properties assert properties[field] == 'alpha' elif field == METADATA_FIELDS[1]: assert field in properties assert properties[field] == 'beta' else: #for some reason, the PATCH call adds some False or None values where metadata wasn't set. Values are kept if they are set, so everything works, we just need to check for fields that are returned that we didn't set. if field == 'require_review': assert not properties[ 'require_review'] #should be False because it was set to an empty string continue if field == 'external_url': assert not properties[ 'require_review'] #should be None because it was set to an empty string continue assert field not in properties #modify the metadata with patch('builtins.input', side_effect=[ '', 'delta', 'gamma', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '' ]): self.action.push_action(set_metadata=True, due_date='', due_reason='') #check that the metadata was updated for doc_id in self.action.doc_manager.get_doc_ids(): properties = self.action.api.get_document( doc_id).json()['properties'] for field in METADATA_FIELDS: if field == METADATA_FIELDS[0]: assert field in properties assert properties[field] == 'alpha' elif field == METADATA_FIELDS[1]: assert field in properties assert properties[field] == 'delta' elif field == METADATA_FIELDS[2]: assert field in properties assert properties[field] == 'gamma' else: if field == 'require_review': #for some reason, the PATCH call adds some False or None values where metadata wasn't set. Values are kept if they are set, so everything works, we just need to check for fields that are returned that we didn't set. assert not properties[ 'require_review'] #should be False because it was set to an empty string continue if field == 'external_url': assert not properties[ 'require_review'] #should be None because it was set to an empty string continue assert field not in properties def test_push_default_metadata(self): #set default metadata directly so the config action can be tested in its own unit test self.action.default_metadata[METADATA_FIELDS[2]] = "delta" self.action.default_metadata[METADATA_FIELDS[3]] = "gamma" self.action.push_action(due_date='', due_reason='') #check that the metadata is attached to the documents for doc_id in self.action.doc_manager.get_doc_ids(): properties = self.action.api.get_document( doc_id).json()['properties'] for field in METADATA_FIELDS: if field == METADATA_FIELDS[2]: assert field in properties assert properties[field] == 'delta' elif field == METADATA_FIELDS[3]: assert field in properties assert properties[field] == 'gamma' else: if field == 'require_review': #for some reason, the PATCH call adds some False or None values where metadata wasn't set. Values are kept if they are set, so everything works, we just need to check for fields that are returned that we didn't set. assert not properties[ 'require_review'] #should be False because it was set to an empty string continue if field == 'external_url': assert not properties[ 'require_review'] #should be None because it was set to an empty string continue assert field not in properties def test_push_named(self): append_file(self.files[0]) append_file(self.files[1]) locales = ['es-AR'] test_doc_id_0 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[0])['id'] test_doc_id_1 = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[1])['id'] self.request_action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, None, test_doc_id_0) target1 = self.request_action.target_action() self.request_action = RequestAction(os.getcwd(), self.files[1], None, locales, False, False, None, None, test_doc_id_1) target2 = self.request_action.target_action() orig_dates0 = get_orig_dates( self.action, [test_doc_id_0] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates0 orig_dates1 = get_orig_dates( self.action, [test_doc_id_1] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates1 push = self.action.push_action(files=[self.files[0]], due_date='', due_reason='') assert check_updated_ids( self.action, orig_dates0 ) # Poll and wait until the modification has taken effect on the cloud print( "polling to check that file wasn't modified. This will take 3 minutes if successful." ) assert not check_updated_ids( self.action, orig_dates1 ) # Poll and wait to make sure the modification didn't occur on the cloud dl_path_0 = self.download_action.download_action( test_doc_id_0, locales[0], False) dl_path_1 = self.download_action.download_action( test_doc_id_1, locales[0], False) self.downloaded = [dl_path_0, dl_path_1] with open(dl_path_0, 'r') as f: downloaded_text = f.read() #print("downloaded_text: "+downloaded_text) assert "Texto agregado." in downloaded_text, downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text, downloaded_text with open(dl_path_1, 'r') as f: downloaded_text = f.read() #print("downloaded_text: "+downloaded_text) assert "Texto agregado." not in downloaded_text, downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text, downloaded_text def test_push_metadata_only(self): append_file(self.files[0]) locales = ['es-AR'] test_doc_id = self.action.doc_manager.get_doc_by_prop( 'file_name', self.files[0])['id'] self.request_action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, None, test_doc_id) self.request_action.target_action() orig_dates = get_orig_dates( self.action, [test_doc_id] ) #get the initial timestamp before modifying the document on the cloud assert orig_dates from unittest.mock import patch with patch('builtins.input', side_effect=[ 'alpha', 'beta', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '' ]): self.action.push_action(set_metadata=True, metadata_only=True, due_date='', due_reason='') #check that the file contents weren't updated print( "polling to check that file wasn't modified. This will take 3 minutes if successful." ) assert not check_updated_ids( self.action, orig_dates ) # Poll and wait to make sure the modification didn't occur on the cloud downloaded_path = self.download_action.download_action( test_doc_id, locales[0], False) #print("downloaded_path: "+str(downloaded_path)) self.downloaded.append(downloaded_path) with open(downloaded_path, 'r') as f: downloaded_text = f.read() #print ("Downloaded_text: " + downloaded) assert "Texto agregado." not in downloaded_text assert "Este es un ejemplo de archivo de texto." in downloaded_text #check that the metadata was updated for doc_id in self.action.doc_manager.get_doc_ids(): properties = self.action.api.get_document( doc_id).json()['properties'] for field in METADATA_FIELDS: if field == METADATA_FIELDS[0]: assert field in properties assert properties[field] == 'alpha' elif field == METADATA_FIELDS[1]: assert field in properties assert properties[field] == 'beta' else: if field == 'require_review': #for some reason, the PATCH call adds some False or None values where metadata wasn't set. Values are kept if they are set, so everything works, we just need to check for fields that are returned that we didn't set. assert not properties[ 'require_review'] #should be False because it was set to an empty string continue if field == 'external_url': assert not properties[ 'require_review'] #should be None because it was set to an empty string continue assert field not in properties
def setUp(self): self.action = ListAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.rm_action = RmAction(os.getcwd())
class TestWatch(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = WatchAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.locales_to_test = ['de-DE', 'es-AR', 'ja-JP'] #default in my test was clone on download folder none target locale folders none self.config_action = ConfigAction(os.getcwd()) self.clean_action.clean_action(False, False, None) self.config_action.config_action(target_locales=self.locales_to_test) # self.action.open() self.downloaded = [] self.files = [] self.dir_name = "dir1" create_directory(self.dir_name) self.add_action.add_action([self.dir_name], overwrite=True) # todo current problem: watchdog does not seem to detect changes in daemon # but not daemonizing watch causes tests to hang.. #this function is run again in each test so seems superfluous #watch_thread = Thread(target=self.action.watch_action, args=('.', (), None)) #watch_thread.daemon = True #watch_thread.start() def tearDown(self): #delete files for fn in self.files: self.rm_action.rm_action(fn, remote=True, force=True) self.clean_action.clean_action(False, False, None) # delete downloads self.action.folder_manager.clear_all() for fn in self.downloaded: os.remove(fn) # delete directory # using rmtree so it deletes recursively when file not empty shutil.rmtree(self.dir_name) for locale in self.locales_to_test: if os.path.exists(locale) and os.path.isdir(locale): shutil.rmtree(locale) #delete_directory(self.dir_name) def test_watch_new_file(self): file_name = "test_watch_sample_0.txt" self.files.append(self.dir_name + os.sep + file_name) if os.path.exists(self.dir_name + file_name): delete_file(file_name) #start the watch self.action.timeout = 5 #set poll to 5 seconds instead of a minute for testing watch_thread = Thread(target=self.action.watch_action, args=((), None, False, False)) watch_thread.daemon = True watch_thread.start() time.sleep( 10 ) #Gives watch enough time to start up before creating the document create_txt_file(file_name, self.dir_name) # check if watch detected file and added it to db doc = None time_passed = 0 while doc is None and time_passed < 10: doc = self.action.doc_manager.get_doc_by_prop('name', file_name) time.sleep(1) time_passed += 1 assert doc assert poll_doc(self.action, doc['id']) def test_watch_update(self): file_name = "test_watch_sample_1.txt" self.files.append(self.dir_name + os.sep + file_name) if os.path.exists(self.dir_name + file_name): delete_file(file_name) create_txt_file(file_name, self.dir_name) self.add_action.add_action( [self.dir_name + os.sep + file_name], overwrite=True ) #add the document so we're only testing updating, not adding #start the watch self.action.timeout = 5 #set poll to 5 seconds instead of a minute for testing watch_thread = Thread(target=self.action.watch_action, args=((), None, False, False)) watch_thread.daemon = True watch_thread.start() time.sleep( 10 ) #Gives watch enough time to start up before appending to the document append_file(file_name, self.dir_name) time.sleep(10) #Gives watch enough time to pick up on the append doc = self.action.doc_manager.get_doc_by_prop('name', file_name) newcontent = str( self.action.api.document_content(doc['id'], None, False).content) assert 'Appended text.' in newcontent def watch_ignore_thread(self): os.system('ltk watch -t 5 --ignore .pdf --ignore .html') #test watch new file when a download directory is in the config file (currently broken) @unittest.skip("skipping until watch is fully functional") def test_watch_ignore( self): #currently not working because watch is broken file_name1 = "test_watch_text.txt" self.files.append(self.dir_name + os.sep + file_name1) if os.path.exists(self.dir_name + file_name1): delete_file(file_name1) file_name2 = "test_watch_html.html" self.files.append(self.dir_name + os.sep + file_name2) if os.path.exists(self.dir_name + file_name2): delete_file(file_name2) file_name3 = "test_watch_pdf.pdf" self.files.append(self.dir_name + os.sep + file_name3) if os.path.exists(self.dir_name + file_name3): delete_file(file_name3) #create and add a file to ignore for append testing create_txt_file(file_name2, self.dir_name) self.add_action.add_action([self.dir_name + os.sep + file_name2], overwrite=True) #start the watch watch_thread = Thread( target=self.watch_ignore_thread ) #if not done with an os.system call, something happens to the document database when new documents are added and they somehow overwrite ignored documents. watch_thread.daemon = True watch_thread.start() time.sleep( 10 ) #Gives watch enough time to start up before creating the document create_txt_file( file_name1, self.dir_name) #create control file that should be added/appended create_txt_file( file_name3, self.dir_name) #create a file to ignore for add testing time.sleep(10) #Gives watch enough time to pick up on the new files assert file_name1 in self.action.doc_manager.get_names() assert file_name2 in self.action.doc_manager.get_names() assert file_name3 not in self.action.doc_manager.get_names() doc1 = self.action.doc_manager.get_doc_by_prop('name', file_name1) doc2 = self.action.doc_manager.get_doc_by_prop('name', file_name2) assert poll_doc(self.action, doc1['id']) assert poll_doc(self.action, doc2['id']) append_file(file_name1, self.dir_name) append_file(file_name2, self.dir_name) time.sleep(10) #Gives watch enough time to pick up on the changes doc1 = self.action.doc_manager.get_doc_by_prop('name', file_name1) newcontent1 = str( self.action.api.document_content(doc1['id'], None, False).content) assert 'Appended text.' in newcontent1 doc2 = self.action.doc_manager.get_doc_by_prop('name', file_name2) newcontent2 = str( self.action.api.document_content(doc2['id'], None, False).content) assert 'Appended text.' not in newcontent2 #test watch auto with new file (currently broken, can't test until expected behavior is better defined) @unittest.skip("skipping until watch is fully functional") def test_watch_auto(self): pass #test watch no_folderswith update and new file (currently broken) @unittest.skip("skipping until watch is fully functional") def test_watch_no_folders(self): file_name1 = "test_watch_text_1.txt" self.files.append(self.dir_name + os.sep + file_name1) if os.path.exists(self.dir_name + file_name1): delete_file(file_name1) file_name2 = "test_watch_text_2.txt" self.files.append(self.dir_name + os.sep + file_name2) if os.path.exists(self.dir_name + file_name2): delete_file(file_name2) #create and add a file that won't be ignored create_txt_file(file_name1, self.dir_name) self.add_action.add_action([self.dir_name + os.sep + file_name1], overwrite=True) #start the watch self.action.timeout = 5 #set poll to 5 seconds instead of a minute for testing watch_thread = Thread(target=self.action.watch_action, args=((), None, True, False)) watch_thread.daemon = True watch_thread.start() time.sleep( 10 ) #Gives watch enough time to start up before creating the document create_txt_file( file_name2, self.dir_name ) #create a file to add and be ignored -- why exactly should it be ignored if we pass in () as ignore list time.sleep(10) #Gives watch enough time to pick up on the new files assert file_name1 in self.action.doc_manager.get_names( ), self.action.doc_manager.get_names() assert file_name2 not in self.action.doc_manager.get_names( ), self.action.doc_manager.get_names() doc1 = self.action.doc_manager.get_doc_by_prop('name', file_name1) assert poll_doc(self.action, doc1['id']) append_file(file_name1, self.dir_name) append_file(file_name2, self.dir_name) time.sleep(10) #Gives watch enough time to pick up on the changes doc1 = self.action.doc_manager.get_doc_by_prop('name', file_name1) newcontent1 = str( self.action.api.document_content(doc1['id'], None, False).content) assert 'Appended text.' in newcontent1 assert file_name2 not in self.action.doc_manager.get_names() #check in progress - get a list of remote files and ensure that the one that was supposed to be ignored isn't there #print(str(self.action.api.list_documents(self.action.project_id)['entities']['properties']['title'])) #the following three tests are for a functionality that is currently broken in watch, so they are not yet implemented #test watch downloading @unittest.skip("skipping until watch is fully functional") def test_watch_translation(self): pass #test watch downloading translations with a download folder specified @unittest.skip("skipping until watch is fully functional") def test_watch_translation_download(self): pass #test watch downloading translations with clone specified @unittest.skip("skipping until watch is fully functional") def test_watch_translation_clone(self): pass #test creating file in a subdirectory with clone option on, make sure recursion does not occur def test_watch_subdir_clone_recursion(self): self.config_action.config_action(clone_option='on', download_folder='--none') self.action.watch_locales = self.locales_to_test #this changes watch_locale options for the daemon instead of current thread self.action.folder_manager.clear_all() #don't add dir1 as watch file subdir_name = "subdir" working_directory = self.dir_name + os.sep + subdir_name create_directory(working_directory) file_name1 = "test_watch_clone.txt" self.files.append(working_directory + os.sep + file_name1) if os.path.exists(working_directory + os.sep + file_name1): delete_file(file_name1) self.action.timeout = 5 #set poll to 5 seconds instead of a minute for testing watch_thread = Thread(target=self.action.watch_action, args=((), None, False, False)) watch_thread.daemon = True watch_thread.start() time.sleep( 10 ) #Gives watch enough time to start up before appending to the document create_txt_file(file_name1, working_directory) #checks that the document was added to local tracking doc = None time_passed = 0 while doc is None and time_passed < 10: doc = self.action.doc_manager.get_doc_by_prop('name', file_name1) time.sleep(1) time_passed += 1 assert doc #checks that the document was added to Lingotek assert poll_doc(self.action, doc['id']) #checks the locale folders were created when the document was downloaded waittime = 0 while not all( os.path.isdir(locale) for locale in self.locales_to_test): time.sleep(5) waittime += 5 if waittime == 120: print("TEST FAIL: Timed out before locale folder was added") assert False #check that downloaded files exist locally waittime = 0 while not all( os.path.exists(locale + os.sep + file_name1) for locale in self.locales_to_test): time.sleep(5) waittime += 5 if waittime == 30 * len(self.locales_to_test): print("TEST FAIL: Timed out before translation was downloaded") assert False #wait for two minutes in case it tries to upload them (which we're testing to make sure it doesn't) time.sleep(120) #check that no new files were added (if len(self.action.doc_manager.get_doc_whatever) == 1 assert len(self.action.doc_manager.get_file_names()) == 1
class TestDownload(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.config_action = ConfigAction(os.getcwd()) self.config_action.config_action(clone_option='off') self.config_action.config_action(download_folder='--none') self.downloaded_files = [] self.locales = ['ja-JP', 'zh-CN'] self.action = DownloadAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.request_action = RequestAction(os.getcwd(), None, None, self.locales, False, False, None, None) self.pull_action = PullAction(os.getcwd(), self.action) self.clean_action.clean_action(False, False, None) self.files = ['sample.txt', 'sample1.txt'] self.first_doc = 'sample.txt' for fn in self.files: create_txt_file(fn) os.system('ltk add sample*.txt -o' ) # Let the command line handle parsing the file pattern self.doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in self.doc_ids: assert poll_doc(self.action, doc_id) self.request_action.target_action() def tearDown(self): self.rm_action = RmAction(os.getcwd()) for curr_file in self.files: self.rm_action.rm_action([curr_file], remote=True, force=True) self.clean_action.clean_action(False, False, None) for dl_file in self.downloaded_files: if os.path.exists(dl_file): os.remove(dl_file) self.rm_action.close() self.clean_action.close() self.action.close() def get_dl_path(self, locale, document): name_parts = document.split('.') if len(name_parts) > 1: name_parts.insert(-1, locale) downloaded_name = '.'.join(part for part in name_parts) else: downloaded_name = name_parts[0] + '.' + locale dl_path = os.path.join(self.action.path, downloaded_name) return dl_path def test_download_name(self): self.action.download_by_path(self.first_doc, self.locales[0], False, False, False, False) dl_file = self.get_dl_path(self.locales[0], self.first_doc) assert self.locales[0] in dl_file assert os.path.isfile(dl_file) self.downloaded_files.append(dl_file) def test_pull_all(self): for document in self.files: for locale in self.locales: dl_file = self.get_dl_path(locale, document) self.downloaded_files.append(dl_file) self.pull_action.pull_translations(None, False, False, False) for path in self.downloaded_files: assert os.path.isfile(path) def test_pull_locale(self): for document in self.files: dl_file = self.get_dl_path(self.locales[0], document) self.downloaded_files.append(dl_file) self.pull_action.pull_translations(self.locales[0], False, False, False) for path in self.downloaded_files: assert os.path.isfile(path)
class TestImport(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = ImportAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.clean_action.clean_action(False, False, None) self.files = ['sample.txt', 'sample1.txt', 'sample2.txt'] for fn in self.files: create_txt_file(fn) self.doc_ids = [] for fn in self.files: title = os.path.basename(os.path.normpath(fn)) response = self.action.api.add_document(self.action.locale, fn, self.action.project_id, title) assert response.status_code == 202 self.doc_ids.append(response.json()['properties']['id']) for doc_id in self.doc_ids: assert poll_doc(self.action, doc_id) for fn in self.files: delete_file(fn) self.action.doc_manager.clear_all() self.imported = [] def tearDown(self): self.rm_action = RmAction(os.getcwd()) for doc_id in self.doc_ids: self.rm_action.rm_action(doc_id, id=True, remote=True, force=True) for file_name in self.imported: if os.path.isfile(os.path.join(os.getcwd(), file_name)): delete_file(file_name) self.clean_action.clean_action(True, False, None) self.rm_action.close() self.action.close() def test_import_single_untracked(self): self.action.api.document_cancel(self.doc_ids[0]) assert poll_rm(self.action, self.doc_ids[0], cancelled=True) self.action.import_action(False, False, False, False, False, self.doc_ids[0]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[0])) self.imported.append(self.files[0]) self.action.import_action(False, False, False, False, False, self.doc_ids[1]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[1])) self.imported.append(self.files[1]) def test_import_single_tracked(self): self.action.api.document_cancel(self.doc_ids[0]) assert poll_rm(self.action, self.doc_ids[0], cancelled=True) self.action.import_action(False, False, False, True, False, self.doc_ids[0]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[0])) self.imported.append(self.files[0]) self.action.import_action(False, False, False, True, False, self.doc_ids[1]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[1])) ids_to_check = self.action.doc_manager.get_doc_ids() assert self.doc_ids[1] in ids_to_check assert self.doc_ids[0] not in ids_to_check def test_import_all_untracked(self): self.action.api.document_cancel(self.doc_ids[0]) assert poll_rm(self.action, self.doc_ids[0], cancelled=True) self.action.import_action(True, False, False, False, False) assert os.path.isfile(os.path.join(os.getcwd(), self.files[0])) self.imported.append(self.files[0]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[1])) self.imported.append(self.files[1]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[2])) self.imported.append(self.files[2]) def test_import_all_tracked(self): self.action.api.document_cancel(self.doc_ids[0]) assert poll_rm(self.action, self.doc_ids[0], cancelled=True) self.action.import_action(True, False, False, True, False) assert os.path.isfile(os.path.join(os.getcwd(), self.files[0])) self.imported.append(self.files[0]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[1])) self.imported.append(self.files[1]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[2])) self.imported.append(self.files[2]) ids_to_check = self.action.doc_manager.get_doc_ids() assert self.doc_ids[1] in ids_to_check assert self.doc_ids[2] in ids_to_check assert self.doc_ids[0] not in ids_to_check def test_import_path(self): dirpath = os.path.join(os.getcwd(), "subdir") create_directory(dirpath) self.action.import_action(False, False, "subdir", False, False, self.doc_ids[0]) assert os.path.isfile(os.path.join(dirpath, self.files[0])) delete_file(os.path.join("subdir", self.files[0])) delete_directory(dirpath) def test_import_no_cancel(self): self.action.api.document_cancel(self.doc_ids[0]) assert poll_rm(self.action, self.doc_ids[0], cancelled=True) self.action.import_action(True, False, False, False, True) assert not os.path.isfile(os.path.join(os.getcwd(), self.files[0])) assert os.path.isfile(os.path.join(os.getcwd(), self.files[1])) self.imported.append(self.files[1]) assert os.path.isfile(os.path.join(os.getcwd(), self.files[2])) self.imported.append(self.files[2])
class TestAdd(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = AddAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.added_files = [] self.added_directories = [] def tearDown(self): self.rm_action = RmAction(os.getcwd()) for fn in self.added_files: self.rm_action.rm_action(fn, remote=True, force=True) not_empty = True while not_empty: not_empty = False for d in self.added_directories: if os.path.exists(d) and os.path.isdir(d): if len(os.listdir(d)): self.rm_action.rm_action(d, remote=True, force=True) not_empty = True else: self.rm_action.rm_action(d, remote=True, force=True) delete_directory(d) self.clean_action.clean_action(False, False, None) self.rm_action.close() self.clean_action.close() self.action.close() def test_add_single(self): # check that document is added to db file_name = 'sample.txt' self.added_files.append(file_name) create_txt_file(file_name) self.action.add_action([file_name]) doc_id = self.action.doc_manager.get_doc_ids()[0] assert self.action.doc_manager.get_doc_by_prop('name', file_name) # check that document is added to Lingotek assert poll_doc(self.action, doc_id) def test_add_multiple(self): # test that adding with a pattern gets all expected matches in local db files = ['sample.txt', 'sample1.txt', 'sample2.txt'] self.added_files = files for fn in files: create_txt_file(fn) # self.action.add_action(['sample*.txt']) os.system('ltk add sample*.txt' ) # Let the command line handle parsing the file pattern for fn in files: doc = self.action.doc_manager.get_doc_by_prop('name', fn) assert doc # test that adding with a pattern gets all expected matches in Lingotek assert poll_doc(self.action, doc['id']) ''' Test that a directory, and documents inside directory, are added to the db ''' def test_add_directory(self): #test add an empty directory directory = 'test_add_empty_directory' dir_path = os.path.join(os.getcwd(), directory) self.added_directories.append(dir_path) create_directory(dir_path) self.action.add_action([dir_path]) assert self.action._is_folder_added(dir_path) delete_directory(dir_path) #test add a directory with documents inside directory = 'test_add_full_directory' dir_path = os.path.join(os.getcwd(), directory) self.added_directories.append(dir_path) create_directory(dir_path) files = ['sample.txt', 'sample1.txt', 'sample2.txt'] self.added_files = files for fn in files: create_txt_file(fn, dir_path) self.action.add_action([dir_path]) assert self.action._is_folder_added(dir_path) for fn in files: doc = self.action.doc_manager.get_doc_by_prop('name', fn) assert doc # test that adding a directory gets all expected matches in Lingotek assert poll_doc(self.action, doc['id']) def test_add_target_folders(self): #create and add files directory1 = os.path.join(os.getcwd(), 'test_dir_1') self.added_directories.append(directory1) create_directory(directory1) directory2 = os.path.join(os.getcwd(), 'test_dir_2') self.added_directories.append(directory2) create_directory(directory2) directory3 = os.path.join(os.getcwd(), 'test_dir_1/test_dir_3') self.added_directories.append(directory3) create_directory(directory3) file1 = 'testfile1.txt' #root to root file2 = 'testfile2.txt' #root to sub (dir1) file3 = 'test_dir_1/test_dir_3/testfile3.txt' #sub to root file4 = 'test_dir_2/testfile4.txt' #sub to sub (dir2 to dir3) file5 = 'test_dir_2/testfile5.txt' #sub to same file6 = 'testfile6.txt' #root to none file7 = 'test_dir_1/testfile7.txt' #sub to none self.added_files.append(file1) create_txt_file(file1) self.added_files.append(file2) create_txt_file(file2) self.added_files.append(file3) create_txt_file(file3) self.added_files.append(file4) create_txt_file(file4) self.added_files.append(file5) create_txt_file(file5) self.added_files.append(file6) create_txt_file(file6) self.added_files.append(file7) create_txt_file(file7) os.system('ltk add ' + file1 + ' -D .') os.system('ltk add ' + file2 + ' -D test_dir_1') os.system('ltk add ' + file3 + ' -D .') os.system('ltk add ' + file4 + ' -D test_dir_1/test_dir_3') os.system('ltk add ' + file5 + ' -D test_dir_2') os.system('ltk add ' + file6) os.system('ltk add ' + file7) #check that they were added correctly doc1 = self.action.doc_manager.get_doc_by_prop('file_name', file1) assert doc1 assert doc1['download_folder'] == '.' doc2 = self.action.doc_manager.get_doc_by_prop('file_name', file2) assert doc2 assert doc2['download_folder'] == 'test_dir_1' doc3 = self.action.doc_manager.get_doc_by_prop('file_name', file3) assert doc3 assert doc3['download_folder'] == '.' doc4 = self.action.doc_manager.get_doc_by_prop('file_name', file4) assert doc4 assert doc4['download_folder'] == 'test_dir_1/test_dir_3' doc5 = self.action.doc_manager.get_doc_by_prop('file_name', file5) assert doc5 assert doc5['download_folder'] == 'test_dir_2' doc6 = self.action.doc_manager.get_doc_by_prop('file_name', file6) assert doc6 assert doc6['download_folder'] == '' doc7 = self.action.doc_manager.get_doc_by_prop('file_name', file7) assert doc7 assert doc7['download_folder'] == '' ''' Test adding a directory with the -d flag so it only adds the directory and not the files ''' def test_add_directory_only(self): #test add an empty directory directory = 'test_add_empty_directory' dir_path = os.path.join(os.getcwd(), directory) self.added_directories.append(dir_path) create_directory(dir_path) self.action.add_action([dir_path]) assert self.action._is_folder_added(dir_path) delete_directory(dir_path) #test add a directory with documents inside directory = 'test_add_full_directory' dir_path = os.path.join(os.getcwd(), directory) self.added_directories.append(dir_path) create_directory(dir_path) files = ['sample.txt', 'sample1.txt', 'sample2.txt'] self.added_files = files for fn in files: create_txt_file(fn, dir_path) self.action.add_action([dir_path], directory=True) assert self.action._is_folder_added(dir_path) for fn in files: assert not self.action.doc_manager.get_doc_by_prop('name', fn) #delete the files here because they are untracked and won't be picked up in teardown for fn in files: delete_file(fn, dir_path) self.added_files.clear() def test_add_metadata(self): from unittest.mock import patch file_name = 'sample.txt' self.added_files.append(file_name) create_txt_file(file_name) with patch('builtins.input', side_effect=[ 'alpha', 'beta', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '' ]): self.action.add_action([file_name], metadata=True) doc_id = self.action.doc_manager.get_doc_ids()[0] assert self.action.doc_manager.get_doc_by_prop('name', file_name) # check that document is added to Lingotek assert poll_doc(self.action, doc_id) #check that the metadata is attached to the document properties = self.action.api.get_document(doc_id).json()['properties'] for field in METADATA_FIELDS: if field == METADATA_FIELDS[0]: assert field in properties assert properties[field] == 'alpha' elif field == METADATA_FIELDS[1]: assert field in properties assert properties[field] == 'beta' else: assert field not in properties def test_add_default_metadata(self): file_name = 'sample.txt' self.added_files.append(file_name) create_txt_file(file_name) #set default metadata directly instead of using the config action so the config action can be tested in its own unit test self.action.default_metadata[METADATA_FIELDS[2]] = 'delta' self.action.default_metadata[METADATA_FIELDS[3]] = 'gamma' self.action.add_action([file_name]) doc_id = self.action.doc_manager.get_doc_ids()[0] # check that document is added to Lingotek assert poll_doc(self.action, doc_id) #check that the metadata is attached to the document properties = self.action.api.get_document(doc_id).json()['properties'] for field in METADATA_FIELDS: if field == METADATA_FIELDS[2]: assert field in properties assert properties[field] == 'delta' elif field == METADATA_FIELDS[3]: assert field in properties assert properties[field] == 'gamma' else: assert field not in properties
class TestClone(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = CloneAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.added_directories = [] self.config_action = ConfigAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.dir_path1 = os.path.join(os.getcwd(), 'dir1') self.added_directories.append(self.dir_path1) create_directory(self.dir_path1) self.dir_path2 = os.path.join(os.getcwd(), 'dir2') self.added_directories.append(self.dir_path2) create_directory(self.dir_path2) self.dir_path3 = os.path.join(os.getcwd(), 'dir1','dir3') self.added_directories.append(self.dir_path3) create_directory(self.dir_path3) self.delete_directory(os.path.join(os.getcwd(), 'ja-JP')) self.delete_directory(os.path.join(os.getcwd(), 'es-MX')) self.delete_directory(os.path.join(os.getcwd(), 'downloads')) def tearDown(self): for d in self.added_directories: self.delete_directory(d) self.rm_action.close() self.action.close() def test_clone_single_locale(self): #currently doesn't work, clone needs to be fixed os.system('ltk config -t ja-JP') self.add_action.add_action([self.dir_path1]) self.add_action.add_action([self.dir_path2]) os.system('ltk clone') assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP')) self.added_directories.append(os.path.join(os.getcwd(), 'ja-JP')) assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir1')) assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir1','dir3')) assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir2')) def test_clone_multi_locale(self): #currently doesn't work, clone needs to be fixed os.system('ltk config -t ja-JP,es-MX') self.add_action.add_action([self.dir_path1]) self.add_action.add_action([self.dir_path2]) os.system('ltk clone') assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP')) self.added_directories.append(os.path.join(os.getcwd(), 'ja-JP')) assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir1')) assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir1','dir3')) assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir2')) assert os.path.isdir(os.path.join(os.getcwd(), 'es-MX')) self.added_directories.append(os.path.join(os.getcwd(), 'es-MX')) assert os.path.isdir(os.path.join(os.getcwd(), 'es-MX','dir1')) assert os.path.isdir(os.path.join(os.getcwd(), 'es-MX','dir1','dir3')) assert os.path.isdir(os.path.join(os.getcwd(), 'es-MX','dir2')) def test_clone_root(self): #clone needs to be fixed before a test can be written assert True def test_clone_root_only(self): #clone needs to be fixed before a test can be written assert True def test_clone_single_folder(self): #currently doesn't work, clone needs to be fixed os.system('ltk config -t ja-JP') self.add_action.add_action([self.dir_path2]) os.system('ltk clone') assert os.path.isdir(os.path.join(os.getcwd(), 'ja-JP')) self.added_directories.append(os.path.join(os.getcwd(), 'ja-JP')) assert not os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir1')) assert not os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir1','dir3')) assert not os.path.isdir(os.path.join(os.getcwd(), 'ja-JP','dir2')) #because it was only one folder, the locale folder is used instead of dir2 def test_clone_with_download_folder(self): #currently doesn't work, clone needs to be fixed self.download_path = os.path.join(os.getcwd(), 'downloads') self.added_directories.append(self.download_path) create_directory(self.download_path) os.system('ltk config -d downloads') os.system('ltk config -t ja-JP') self.add_action.add_action([self.dir_path1]) self.add_action.add_action([self.dir_path2]) os.system('ltk clone') assert os.path.isdir(os.path.join(os.getcwd(), 'downloads','ja-JP')) self.added_directories.append(os.path.join(os.getcwd(), 'downloads','ja-JP')) assert os.path.isdir(os.path.join(os.getcwd(), 'downloads','ja-JP','dir1')) assert os.path.isdir(os.path.join(os.getcwd(), 'downloads','ja-JP','dir1','dir3')) assert os.path.isdir(os.path.join(os.getcwd(), 'downloads','ja-JP','dir2')) def delete_directory(self, dir_path): if os.path.exists(dir_path) and os.path.isdir(dir_path): for subdir in os.listdir(dir_path): if(dir_path.endswith(os.sep)): self.delete_directory(dir_path+subdir) else: self.delete_directory(dir_path+os.sep+subdir) print("disassociating directory: "+dir_path.replace(os.getcwd()+os.sep, '')) self.rm_action.rm_action([dir_path.replace(os.getcwd()+os.sep, '')], remote=True, force=True) print("deleting directory: "+dir_path) os.rmdir(dir_path) #See ticket LP-28910 #part of clone fix: #126 if os.path.isdir(path): #127! matched_dirs.append('') #128 for root, subdirs, files in os.walk(path):
class TestRequest(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.rm_action = RmAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.files = ['sample.txt', 'sample1.txt', 'sample2.txt'] for fn in self.files: create_txt_file(fn) self.add_action.add_action(['sample*.txt'], overwrite=True) self.doc_ids = self.add_action.doc_manager.get_doc_ids() for doc_id in self.doc_ids: assert poll_doc(self.add_action, doc_id) self.workflow_id = '2b5498e0-f3c7-4c49-9afa-cca4b3345af7' #need to use a workflow that doesn't have machine translation so we have time to cancel targets before they're completed. This is a system workflow template, so everyone will have access to it no matter who runs these tests def tearDown(self): for curr_file in self.files: self.rm_action.rm_action(curr_file, remote=True, force=True) self.clean_action.clean_action(True, False, None) if self.action: self.action.close() def _check_locales_set(self, document, locales): curr_doc = self.add_action.doc_manager.get_doc_by_prop( 'name', document) if 'locales' not in curr_doc: return False return all(locale in curr_doc['locales'] for locale in locales) def _check_locales_unset(self, document, locales, empty=False): curr_doc = self.add_action.doc_manager.get_doc_by_prop( 'name', document) if 'locales' not in curr_doc: return empty return all(locale not in curr_doc['locales'] for locale in locales) def test_request_one_locale_doc(self): locales = ['ja_JP'] self.action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, self.workflow_id) self.action.target_action() entity = self.action.doc_manager.get_doc_by_prop('name', self.files[0]) response = self.action.api.document_translation_locale_status( entity['id'], 'ja-JP') assert response.status_code == 200 assert response.json()['properties']['status'].upper() != 'CANCELLED' entity = self.action.doc_manager.get_doc_by_prop('name', self.files[1]) response = self.action.api.document_translation_locale_status( entity['id'], 'ja-JP') assert response.status_code == 404 entity = self.action.doc_manager.get_doc_by_prop('name', self.files[2]) response = self.action.api.document_translation_locale_status( entity['id'], 'ja-JP') assert response.status_code == 404 assert self._check_locales_set(self.files[0], locales) assert self._check_locales_unset(self.files[1], locales, True) assert self._check_locales_unset(self.files[2], locales, True) def test_request_mult_locale_doc(self): locales = ['ja_JP', 'zh_CN', 'es_MX'] self.action = RequestAction(os.getcwd(), self.files[0], None, locales, False, False, None, self.workflow_id) self.action.target_action() entity = self.action.doc_manager.get_doc_by_prop('name', self.files[0]) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' entity = self.action.doc_manager.get_doc_by_prop('name', self.files[1]) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) assert response.status_code == 404 entity = self.action.doc_manager.get_doc_by_prop('name', self.files[2]) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) assert response.status_code == 404 assert self._check_locales_set(self.files[0], locales) assert self._check_locales_unset(self.files[1], locales, True) assert self._check_locales_unset(self.files[2], locales, True) def test_request_one_locale_proj(self): locales = ['ja_JP'] self.action = RequestAction(os.getcwd(), None, None, locales, False, False, None, self.workflow_id) self.action.target_action() for file_name in self.files: entity = self.action.doc_manager.get_doc_by_prop('name', file_name) response = self.action.api.document_translation_locale_status( entity['id'], 'ja-JP') assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' assert self._check_locales_set(self.files[0], locales) assert self._check_locales_set(self.files[1], locales) assert self._check_locales_set(self.files[2], locales) def test_request_mult_locale_proj(self): locales = ['ja_JP', 'zh_CN', 'es_MX'] self.action = RequestAction(os.getcwd(), None, None, locales, False, False, None, self.workflow_id) self.action.target_action() for file_name in self.files: entity = self.action.doc_manager.get_doc_by_prop('name', file_name) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], 'ja-JP') assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' assert self._check_locales_set(self.files[0], locales) assert self._check_locales_set(self.files[1], locales) assert self._check_locales_set(self.files[2], locales) def test_delete_locale_doc(self): locales = ['ja_JP', 'zh_CN', 'es_MX'] self.action = RequestAction(os.getcwd(), None, None, locales, False, False, None, self.workflow_id) self.action.target_action() self.action = RequestAction(os.getcwd(), self.files[0], None, ['ja_JP'], False, True, None, self.workflow_id) self.action.target_action() entity = self.action.doc_manager.get_doc_by_prop('name', self.files[0]) for file_name in self.files: entity = self.action.doc_manager.get_doc_by_prop('name', file_name) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) if file_name == self.files[0] and locale == 'ja_JP': assert response.status_code == 404 else: assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' assert self._check_locales_unset(self.files[0], ['ja_JP']) assert self._check_locales_set(self.files[0], ['zh_CN', 'es_MX']) assert self._check_locales_set(self.files[1], locales) assert self._check_locales_set(self.files[2], locales) def test_delete_locale_proj(self): locales = ['ja_JP', 'zh_CN', 'es_MX'] self.action = RequestAction(os.getcwd(), None, None, locales, False, False, None, self.workflow_id) self.action.target_action() self.action = RequestAction(os.getcwd(), None, None, ['ja_JP'], False, True, None, self.workflow_id) self.action.target_action() for file_name in self.files: entity = self.action.doc_manager.get_doc_by_prop('name', file_name) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) if locale == 'ja_JP': assert response.status_code == 404 else: assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' assert self._check_locales_unset(self.files[0], ['ja_JP']) assert self._check_locales_unset(self.files[1], ['ja_JP']) assert self._check_locales_unset(self.files[2], ['ja_JP']) locales.remove('ja_JP') assert self._check_locales_set(self.files[0], locales) assert self._check_locales_set(self.files[1], locales) assert self._check_locales_set(self.files[2], locales) def test_cancel_locale_doc(self): locales = ['ja_JP', 'zh_CN', 'es_MX'] self.action = RequestAction(os.getcwd(), None, None, locales, False, False, None, self.workflow_id) self.action.target_action() self.action = RequestAction(os.getcwd(), self.files[0], None, ['ja_JP'], True, False, None, self.workflow_id) self.action.target_action() entity = self.action.doc_manager.get_doc_by_prop('name', self.files[0]) for file_name in self.files: entity = self.action.doc_manager.get_doc_by_prop('name', file_name) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) if file_name == self.files[0] and locale == 'ja_JP': assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) == 'CANCELLED' else: assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' assert self._check_locales_unset(self.files[0], ['ja_JP']) assert self._check_locales_set(self.files[0], ['zh_CN', 'es_MX']) assert self._check_locales_set(self.files[1], locales) assert self._check_locales_set(self.files[2], locales) def test_cancel_locale_proj(self): locales = ['ja_JP', 'zh_CN', 'es_MX'] self.action = RequestAction(os.getcwd(), None, None, locales, False, False, None, self.workflow_id) self.action.target_action() self.action = RequestAction(os.getcwd(), None, None, ['ja_JP'], True, False, None, self.workflow_id) self.action.target_action() for file_name in self.files: entity = self.action.doc_manager.get_doc_by_prop('name', file_name) for locale in locales: response = self.action.api.document_translation_locale_status( entity['id'], locale) if locale == 'ja_JP': assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) == 'CANCELLED' else: assert response.status_code == 200 assert response.json()['properties']['status'].upper( ) != 'CANCELLED' assert self._check_locales_unset(self.files[0], ['ja_JP']) assert self._check_locales_unset(self.files[1], ['ja_JP']) assert self._check_locales_unset(self.files[2], ['ja_JP']) locales.remove('ja_JP') assert self._check_locales_set(self.files[0], locales) assert self._check_locales_set(self.files[1], locales) assert self._check_locales_set(self.files[2], locales)
class TestReference(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = ReferenceAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.filename = 'sample.txt' create_txt_file(self.filename) self.references = [] self.add_action.add_action([self.filename]) self.doc_id = self.add_action.doc_manager.get_doc_by_prop( 'name', self.filename)['id'] poll_doc(self.action, self.doc_id) self.references.append('reference1.txt') self.references.append('reference2.txt') file_path = os.path.join(os.getcwd(), self.references[0]) print("create_ref_file") with open(file_path, 'w') as ref_file: ref_file.write('This is a sample reference file number one.') ref_file.close() file_path = os.path.join(os.getcwd(), self.references[1]) print("create_ref_file") with open(file_path, 'w') as ref_file: ref_file.write('This is a sample reference file number two.') ref_file.close() def tearDown(self): self.rm_action = RmAction(os.getcwd()) self.rm_action.rm_action(self.filename, remote=True, force=True) for fn in self.references: delete_file(fn) self.clean_action.clean_action(False, False, None) self.rm_action.close() self.clean_action.close() self.action.close() def test_add_reference(self): with patch('builtins.input', side_effect=[ self.references[0], '', '', 'Y', self.references[1], 'Ref 2', 'The second reference', 'N' ]): self.action.reference_add_action(self.filename, False) #use API instead of list function so it can be tested separately response = self.action.api.document_list_reference(self.doc_id) assert response.json()['properties']['size'] == 2 materials = response.json()['entities'] #order of material in API response is not guaranteed, so we have to run checks if materials[0]['properties']['name'] == 'reference1.txt' and materials[ 1]['properties']['name'] == 'Ref 2': assert 'description' not in materials[0]['properties'] assert 'description' in materials[1]['properties'] assert materials[1]['properties'][ 'description'] == 'The second reference' elif materials[1]['properties'][ 'name'] == 'reference1.txt' and materials[0]['properties'][ 'name'] == 'Ref 2': assert 'description' not in materials[1]['properties'] assert 'description' in materials[0]['properties'] assert materials[0]['properties'][ 'description'] == 'The second reference' else: assert False def test_get_all_references(self): #use API instead of add function so it can be tested separately referenceA = {'file': self.references[0]} referenceB = { 'file': self.references[1], 'name': 'Ref 2', 'description': 'The second reference' } self.action.api.document_add_reference(self.doc_id, referenceA) self.action.api.document_add_reference(self.doc_id, referenceB) pathA = self.references[0] pathB = self.references[1] delete_file(pathA) self.references.remove(pathA) delete_file(pathB) self.references.remove(pathB) assert not os.path.isfile(pathA) assert not os.path.isfile(pathB) self.action.reference_download_action(self.filename, False, True, False) assert os.path.isfile(pathA) self.references.append(pathA) assert os.path.isfile('Ref 2') self.references.append('Ref 2') with open(pathA, 'r') as ref_file: ref_contents = ref_file.read() assert 'This is a sample reference file number one' in ref_contents with open('Ref 2', 'r') as ref_file: ref_contents = ref_file.read() assert 'This is a sample reference file number two' in ref_contents def test_list_references(self): #use API instead of add function so it can be tested separately referenceA = {'file': self.references[0]} referenceB = { 'file': self.references[1], 'name': 'Ref 2', 'description': 'The second reference' } self.action.api.document_add_reference(self.doc_id, referenceA) self.action.api.document_add_reference(self.doc_id, referenceB) try: from io import StringIO import re out = StringIO() sys.stdout = out self.action.reference_list_action(self.filename, False) info = out.getvalue() assert re.search( "reference1.txt\s*[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\s*\n", info) assert re.search( "Ref 2\s*[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\s*The second reference", info) finally: sys.stdout = sys.__stdout__ def test_remove_reference(self): #use API instead of add function so it can be tested separately referenceA = {'file': self.references[0]} referenceB = { 'file': self.references[1], 'name': 'Ref 2', 'description': 'The second reference' } self.action.api.document_add_reference(self.doc_id, referenceA) self.action.api.document_add_reference(self.doc_id, referenceB) #use API instead of list function so it can be tested separately. Verify that the references exist before deleting them response = self.action.api.document_list_reference(self.doc_id) assert response.json()['properties']['size'] == 2 self.action.reference_remove_action(self.filename, False, True) #use API instead of list function so it can be tested separately response = self.action.api.document_list_reference(self.doc_id) assert response.json()['properties']['size'] == 0
class TestList(unittest.TestCase): @classmethod def setUpClass(cls): create_config() @classmethod def tearDownClass(cls): cleanup() def setUp(self): self.action = ListAction(os.getcwd()) self.clean_action = CleanAction(os.getcwd()) self.add_action = AddAction(os.getcwd()) self.clean_action.clean_action(True, False, None) self.rm_action = RmAction(os.getcwd()) def tearDown(self): self.clean_action.clean_action(True, False, None) self.action.close() def test_list_doc(self): files = ['sample.txt', 'sample1.txt', 'sample2.txt'] file_paths = [] for fn in files: file_paths.append(create_txt_file(fn)) self.add_action.add_action(['sample*.txt'], overwrite=True) doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in doc_ids: assert poll_doc(self.action, doc_id) try: out = StringIO() sys.stdout = out self.action.list_ids(False) info = out.getvalue() for doc_id in doc_ids: assert doc_id in info finally: sys.stdout = sys.__stdout__ for fn in files: self.rm_action.rm_action(fn, remote=True, force=True) self.clean_action.clean_action(False, False, None) def test_list_docs_none(self): try: out = StringIO() sys.stdout = out self.action.list_ids(False) info = out.getvalue() assert 'No local documents' in info finally: sys.stdout = sys.__stdout__ def test_list_workflow(self): try: out = StringIO() sys.stdout = out self.action.list_workflows() info = out.getvalue() assert all(header in info for header in ['Workflow Name', 'ID']) assert 'c675bd20-0688-11e2-892e-0800200c9a66' in info assert 'Machine Translation' in info finally: sys.stdout = sys.__stdout__ def test_list_locale(self): try: out = StringIO() sys.stdout = out self.action.list_locales() info = out.getvalue() import re assert re.search( 'ar-AE\s*\(Arabic, United Arab Emirates\)', info ) #changed to regex because display uses tabulate, which has an indeterminate amount of whitespace to create the columns assert re.search( 'zh-TW\s*\(Chinese, Taiwan\)', info ) #changed to regex because display uses tabulate, which has an indeterminate amount of whitespace to create the columns finally: sys.stdout = sys.__stdout__ def test_list_format(self): try: out = StringIO() sys.stdout = out self.action.list_formats() info = out.getvalue() assert info.startswith('Lingotek Cloud accepts content') assert 'CSV' in info assert 'XML_OKAPI' in info finally: sys.stdout = sys.__stdout__ def test_list_filters_default(self): try: out = StringIO() sys.stdout = out self.action.list_filters() info = out.getvalue() decoded_info = info assert all(header in info for header in ['ID', 'Created', 'Title']) assert '*****@*****.**' in info assert '0e79f34d-f27b-4a0c-880e-cd9181a5d265' in info finally: sys.stdout = sys.__stdout__ def test_list_filters_custom(self): # create custom filters # list # check pass def test_list_doc_remote(self): files = ['sample.txt', 'sample1.txt', 'sample2.txt'] file_paths = [] for fn in files: file_paths.append(create_txt_file(fn)) self.add_action.add_action(['sample*.txt'], overwrite=True) doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in doc_ids: assert poll_doc(self.action, doc_id) try: out = StringIO() sys.stdout = out self.action.list_remote() info = out.getvalue() for doc_id in doc_ids: assert doc_id in info finally: sys.stdout = sys.__stdout__ for fn in files: self.rm_action.rm_action(fn, remote=True, force=True) self.clean_action.clean_action(False, False, None) #can't test a remote list of none because there is no guarantee that the project used in the config file for the test is empty, and we don't want to clear the project remotely in case it is one that has stuff unrelated to testing that needs to stay #def test_list_docs_remote_none(self): # try: # out = StringIO() # sys.stdout = out # self.action.list_remote() # info = out.getvalue() # assert 'No documents to report' in info # finally: # sys.stdout = sys.__stdout__ def test_target_download_folder(self): files = ['sample.txt', 'sample1.txt', 'sample2.txt'] file_paths = [] for fn in files: file_paths.append(create_txt_file(fn)) directory = os.path.join(os.getcwd(), 'test_dir') create_directory(directory) self.add_action.add_action(['sample.txt'], overwrite=True, download_folder='test_dir') self.add_action.add_action(['sample1.txt'], overwrite=True, download_folder='.') self.add_action.add_action(['sample2.txt'], overwrite=True, download_folder='') doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in doc_ids: assert poll_doc(self.action, doc_id) try: out = StringIO() sys.stdout = out self.action.list_action(hide_docs=False, title=False, show_dests=True) info = out.getvalue() import re match1 = re.search( '\nsample.txt\s*[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\s*test_dir\s*\n', info) assert match1 match2 = re.search( '\nsample1.txt\s*[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\s*\.\s*\n', info) assert match2 match3 = re.search( '\nsample2.txt\s*[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\s*\n', info) assert match3 finally: sys.stdout = sys.__stdout__ for fn in files: self.rm_action.rm_action(fn, remote=True, force=True) self.clean_action.clean_action(False, False, None) delete_directory(directory) def test_list_cancelled(self): files = ['sample.txt', 'sample1.txt', 'sample2.txt'] file_paths = [] for fn in files: file_paths.append(create_txt_file(fn)) self.add_action.add_action(['sample*.txt'], overwrite=True) doc_ids = self.action.doc_manager.get_doc_ids() for doc_id in doc_ids: assert poll_doc(self.action, doc_id) self.action.api.document_cancel(doc_ids[0]) assert poll_rm(self.action, doc_ids[0], cancelled=True) try: out = StringIO() sys.stdout = out self.action.list_ids(False) info = out.getvalue() for doc_id in doc_ids: assert doc_id in info finally: sys.stdout = sys.__stdout__ for fn in files: self.rm_action.rm_action(fn, remote=True, force=True) self.clean_action.clean_action(False, False, None)