def publish_merged_file(self, filename): merged_path = os.path.join(self.publish_catapult_directory, filename) merged_file = open(merged_path, 'w') self.headers.append(os.path.join('catapult', filename)) for source_directory in self.source_directories: component_file = os.path.join(source_directory, filename) if not os.path.exists(component_file): continue for line in FileInput(component_file): merged_file.write(line)
def test_readline_binary_mode(self): with open(TESTFN, 'wb') as f: f.write(b'A\nB\r\nC\rD') self.addCleanup(safe_unlink, TESTFN) with FileInput(files=TESTFN, mode='rb') as fi: self.assertEqual(fi.readline(), b'A\n') self.assertEqual(fi.readline(), b'B\r\n') self.assertEqual(fi.readline(), b'C\rD') # Read to the end of file. self.assertEqual(fi.readline(), b'') self.assertEqual(fi.readline(), b'')
def test_context_manager(self): try: t1 = writeTmp(1, ["A\nB\nC"]) t2 = writeTmp(2, ["D\nE\nF"]) with FileInput(files=(t1, t2)) as fi: lines = list(fi) self.assertEqual(lines, ["A\n", "B\n", "C", "D\n", "E\n", "F"]) self.assertEqual(fi.filelineno(), 3) self.assertEqual(fi.lineno(), 6) self.assertEqual(fi._files, ()) finally: remove_tempfiles(t1, t2)
def test_files_that_dont_end_with_newline(self): t1 = t2 = None try: t1 = writeTmp(1, ["A\nB\nC"]) t2 = writeTmp(2, ["D\nE\nF"]) fi = FileInput(files=(t1, t2)) lines = list(fi) self.assertEqual(lines, ["A\n", "B\n", "C", "D\n", "E\n", "F"]) self.assertEqual(fi.filelineno(), 3) self.assertEqual(fi.lineno(), 6) finally: remove_tempfiles(t1, t2)
def writeFirstLine(string: str, filePath: str): createFilesIfNotExist(filePath) if os.path.getsize(filePath) == 0: # fileIsEmpty with open(filePath, 'w') as f: # writing empty file f.write(string + '\n') else: with FileInput(filePath, inplace=True) as f: for line in f: if f.isfirstline(): print(string, end='\n') else: print(line, end='')
def UpdateControlDataPathInPluginConfFile(workspaceID): # Check for presence of the plugin conf file confFilePath = os.path.join(PREFIX_ETC, workspaceID, RPATH_PLUGIN_CONF, FNAME_PLUGIN_CONF) if not os.path.exists(confFilePath): return False controlDataDirPath = os.path.join(PREFIX_ETC, workspaceID, RPATH_AGENT_CONF) # Update the plugin conf file control data path variable for line in FileInput(confFilePath, inplace=True): if "%CONF_DIR_WS%" in line: line = line.replace("%CONF_DIR_WS%", controlDataDirPath) print(line, end='') return True
def synthesize(self, session: ISynthesisSession): # when called with `cdk deploy` this outputs to cdk.out # when called from python directly, this outputs to a temporary directory result = DefaultStackSynthesizer.synthesize(self, session) asset_path_regional = self._stack.node.try_get_context( "SOLUTIONS_ASSETS_REGIONAL") asset_path_global = self._stack.node.try_get_context( "SOLUTIONS_ASSETS_GLOBAL") logger.info( f"solutions parameter substitution in {session.assembly.outdir} started" ) templates = Path(session.assembly.outdir).glob("*.template.json") for template in templates: with FileInput(template, inplace=True) as template_lines: for line in template_lines: # handle all template subsitutions in the line for match in SolutionStackSubstitions.substitution_re.findall( line): placeholder = match.replace("%", "") replacement = self._stack.node.try_get_context( placeholder) if not replacement: raise ValueError( f"Please provide a parameter substitution for {placeholder} via environment variable or CDK context" ) line = line.replace(match, replacement) # print the (now substituted) line in the context of template_lines print(line, end="") logger.info("solutions parameter substitution completed") # do not perform solution resource/ template cleanup if asset paths not passed if not asset_path_global or not asset_path_regional: return logger.info( f"solutions template customization in {session.assembly.outdir} started" ) for template in self._templates(session): template.patch_lambda() template.delete_bootstrap_parameters() template.delete_cdk_helpers() template.save( asset_path_global=asset_path_global, asset_path_regional=asset_path_regional, ) logger.info("solutions template customization completed") return result
def save_value(key, value, filepath): """writes the given value to the given key in the given file""" try: with FileInput(filepath, inplace=1) as f: for line in f: line = line.rstrip() if key in line: line = line.replace(line, key + ' ' + value) print(line) return True except Exception as e: print(e) return False
def test_fileno(self): try: t1 = writeTmp(1, ["A\nB"]) t2 = writeTmp(2, ["C\nD"]) fi = FileInput(files=(t1, t2)) self.assertEqual(fi.fileno(), -1) line = fi.next() self.assertNotEqual(fi.fileno(), -1) fi.nextfile() self.assertEqual(fi.fileno(), -1) line = list(fi) self.assertEqual(fi.fileno(), -1) finally: remove_tempfiles(t1, t2)
def _bump_version(self, release=False, zstream=False): version = super()._bump_version().split('-', maxsplit=1)[0] pattern = re.compile(r'(?<=^m4_define\(\[faf_version\], \[)' r'.*' r'(?=\]\))') with FileInput(files='configure.ac', inplace=True) as input: for line in input: if pattern.search(line): line = pattern.sub(version, line) print(line, end='') return version
def update_documentation(self) -> None: log_info("Changing the " + self.__config.wiki_version_overview_page + " file, updating the version number...") version_decl = self.__config.cobigenwiki_title_name new_version_decl = version_decl + " v" + self.__config.release_version modified_file = os.path.join(self.__config.root_path, "documentation", self.__config.wiki_version_overview_page) with FileInput(modified_file, inplace=True) as file: for line in file: line = re.sub(r'' + version_decl + r'\s+v[0-9]\.[0-9]\.[0-9]', new_version_decl, line) sys.stdout.write(line) self.add_modified_files()
def copy_file_with_replacement(src, dest, random_suffix, another_elm_version): def on_line(line): return line.replace( "Platform.Unstable.", f"Platform.Unstable{random_suffix}.", ).replace( 'ANOTHER-ELM-VERSION', another_elm_version, ) with dest.open(mode='w') as dest_file: with FileInput(src, inplace=False) as src_reader: for line in src_reader: print(on_line(line), end='', file=dest_file)
def deleteDuplicate(): seen = set() dupeCount = 0 counter = 0 with FileInput('DBA.csv', inplace=1) as f: f, f_orig = tee(f) for row, line in zip(csv.reader(f), f_orig): if row[2] in seen: dupeCount += 1 continue seen.add(row[2]) counter += 1 print(line, end='') print(counter) print("Removed {} Duplicates".format(dupeCount))
def test_readline_buffering(self): src = LineReader() with FileInput(files=['line1\nline2', 'line3\n'], openhook=src.openhook) as fi: self.assertEqual(src.linesread, []) self.assertEqual(fi.readline(), 'line1\n') self.assertEqual(src.linesread, ['line1\n']) self.assertEqual(fi.readline(), 'line2') self.assertEqual(src.linesread, ['line2']) self.assertEqual(fi.readline(), 'line3\n') self.assertEqual(src.linesread, ['', 'line3\n']) self.assertEqual(fi.readline(), '') self.assertEqual(src.linesread, ['']) self.assertEqual(fi.readline(), '') self.assertEqual(src.linesread, [])
def test_iteration_buffering(self): src = LineReader() with FileInput(files=['line1\nline2', 'line3\n'], openhook=src.openhook) as fi: self.assertEqual(src.linesread, []) self.assertEqual(next(fi), 'line1\n') self.assertEqual(src.linesread, ['line1\n']) self.assertEqual(next(fi), 'line2') self.assertEqual(src.linesread, ['line2']) self.assertEqual(next(fi), 'line3\n') self.assertEqual(src.linesread, ['', 'line3\n']) self.assertRaises(StopIteration, next, fi) self.assertEqual(src.linesread, ['']) self.assertRaises(StopIteration, next, fi) self.assertEqual(src.linesread, [])
def test_readline_os_chmod_raises_OSError(self): """Tests invoking FileInput.readline() when os.chmod() raises OSError. This exception should be silently discarded.""" os_chmod_orig = os.chmod os_chmod_replacement = UnconditionallyRaise(OSError) try: t = writeTmp(1, ['\n']) self.addCleanup(remove_tempfiles, t) with FileInput(files=[t], inplace=True) as fi: os.chmod = os_chmod_replacement fi.readline() finally: os.chmod = os_chmod_orig self.assertTrue(os_chmod_replacement.invoked, 'os.fstat() was not invoked')
def update_submodule(self, submodule_path: str) -> None: sm_repo = GitRepo(self.__config, submodule_path) sm_repo.checkout('master') sm_repo.pull() log_info("Changing the "+self.__config.wiki_version_overview_page + " file, updating the version number...") version_decl = self.__config.cobigenwiki_title_name new_version_decl = version_decl+" v"+self.__config.release_version with FileInput(os.path.join(self.__config.wiki_submodule_path, self.__config.wiki_version_overview_page), inplace=True) as file: for line in file: line = re.sub(r''+version_decl+r'\s+v[0-9]\.[0-9]\.[0-9]', new_version_decl, line) sys.stdout.write(line) sm_repo.add([self.__config.wiki_version_overview_page], False) sm_repo.commit("update wiki docs") sm_repo.push()
def test_nextfile_oserror_deleting_backup(self): """Tests invoking FileInput.nextfile() when the attempt to delete the backup file would raise OSError. This error is expected to be silently ignored""" os_unlink_orig = os.unlink os_unlink_replacement = UnconditionallyRaise(OSError) try: t = writeTmp(1, ['\n']) self.addCleanup(remove_tempfiles, t) with FileInput(files=[t], inplace=True) as fi: next(fi) os.unlink = os_unlink_replacement fi.nextfile() finally: os.unlink = os_unlink_orig self.assertTrue(os_unlink_replacement.invoked, 'os.unlink() was not invoked')
def test_readline(self): with open(TESTFN, 'wb') as f: f.write(b'A\nB\r\nC\r') # Fill TextIOWrapper buffer. f.write(b'123456789\n' * 1000) # Issue #20501: readline() shouldn't read whole file. f.write(b'\x80') self.addCleanup(safe_unlink, TESTFN) with FileInput(files=TESTFN, openhook=hook_encoded('ascii'), bufsize=8) as fi: self.assertEqual(fi.readline(), 'A\n') self.assertEqual(fi.readline(), 'B\n') self.assertEqual(fi.readline(), 'C\n') with self.assertRaises(UnicodeDecodeError): # Read to the end of file. list(fi)
def test_readline(self): with open(TESTFN, 'wb') as f: f.write(b'A\nB\r\nC\r') f.write(b'123456789\n' * 1000) f.write(b'\x80') self.addCleanup(safe_unlink, TESTFN) with FileInput(files=TESTFN, openhook=hook_encoded('ascii')) as fi: try: self.assertEqual(fi.readline(), 'A\n') self.assertEqual(fi.readline(), 'B\n') self.assertEqual(fi.readline(), 'C\n') except UnicodeDecodeError: self.fail('Read to end of file') with self.assertRaises(UnicodeDecodeError): list(fi) self.assertEqual(fi.readline(), '') self.assertEqual(fi.readline(), '')
def add_named_folder(m): x = copy_path_to_clipboard(None) path = clip.get() # Stop if it isn't a folder if path[-1] != '/': return filename = path.split('/')[-2] keyword = filename.split(' ')[0] with FileInput(files=[folders_filename], inplace=True) as f: for line in f: line = line.rstrip() if line in '{}' or line[-1] is ',': print(line) else: print(line + ',') print(' "' + keyword + '": "' + path + '"')
def test_readline_os_chmod_raises_OSError(self): """Tests invoking FileInput.readline() when os.chmod() raises OSError. This exception should be silently discarded.""" os_chmod_orig = os.chmod os_chmod_replacement = UnconditionallyRaise(OSError) try: t = self.writeTmp("\n") with FileInput(files=[t], inplace=True) as fi: os.chmod = os_chmod_replacement fi.readline() finally: os.chmod = os_chmod_orig # sanity check to make sure that our test scenario was actually hit self.assertTrue(os_chmod_replacement.invoked, "os.fstat() was not invoked")
def setup_apn(self, apn): """ Setup the APN for internet connection by editing the '/etc/network/interfaces' file. :param apn: the internet APN for the WWAN interface """ try: for line in FileInput('/etc/network/interfaces', inplace=1): if 'wwan_apn' in line: line = ' wwan_apn "%s"\n' % (apn) print(line, end='') except FileNotFoundError as err: log.error('Could not setup APN %s for "%s": %s' % (apn, self.iface, err)) return 1 self.apn = apn log.info('Setup APN %s for "%s"' % (apn, self.iface)) return 0
def perturbate(self, obfuscation: Obfuscation): pattern = re.compile(r"\s+(?P<op_code>\S+)") valid_opcodes = self.get_valid_nops() patched_opcodes = 0 with FileInput(obfuscation.smali_files, inplace=True) as file: for line in file: print(line, end='') match = pattern.match(line) if match: op_code = match.group('op_code').strip() if op_code in valid_opcodes: patched_opcodes = patched_opcodes + 1 for i in range(10): print("\tnop") self.logger.debug( '{} op_codes patched by NopsBombing'.format(patched_opcodes))
def update(adder, adder_path, report_path): adder = 'iadder_B16_{0}.v'.format(adder) f = FileInput('./pscripts/auto_power.tcl', inplace=True) for line in f: if KEY.ANALYZE_IADDER in line: print('analyze -f verilog ../{0} ;# {1}\n'.format( adder, KEY.ANALYZE_IADDER), end='') elif KEY.IADDER_FOLDER in line: print('set saiffiles [glob {0}/*.saif] ;# {1}\n'.format( adder_path, KEY.IADDER_FOLDER), end='') elif KEY.REPORT_POWER in line: print("report_power > {0}/$fbasename.txt ; # {1}\n".format( report_path, KEY.REPORT_POWER), end='') else: print(line, end='') f.close()
def test_zero_byte_files(self): t1 = self.writeTmp("") t2 = self.writeTmp("") t3 = self.writeTmp("The only line there is.\n") t4 = self.writeTmp("") fi = FileInput(files=(t1, t2, t3, t4)) line = fi.readline() self.assertEqual(line, 'The only line there is.\n') self.assertEqual(fi.lineno(), 1) self.assertEqual(fi.filelineno(), 1) self.assertEqual(fi.filename(), t3) line = fi.readline() self.assertFalse(line) self.assertEqual(fi.lineno(), 1) self.assertEqual(fi.filelineno(), 0) self.assertEqual(fi.filename(), t4) fi.close()
def namd_conf_mod(inputdir, newNamd, anchor): vector, origin = namd_conf_read(inputdir, anchor) with FileInput(files=newNamd, inplace=True) as f: for line in f: line = line.strip() info = line.split() if info == []: continue if info[0].lower() == "cellbasisvector1": info[1] = vector[0] if info[0].lower() == "cellbasisvector2": info[2] = vector[1] if info[0].lower() == "cellbasisvector3": info[3] = vector[2] if info[0].lower() == "cellorigin": info[1:4] = origin line = " ".join(str(x) for x in info) print(line)
def test_readline(self): with open(TESTFN, 'wb') as f: f.write('A\nB\r\nC\r') # Fill TextIOWrapper buffer. f.write('123456789\n' * 1000) # Issue #20501: readline() shouldn't read whole file. f.write('\x80') self.addCleanup(safe_unlink, TESTFN) fi = FileInput(files=TESTFN, openhook=hook_encoded('ascii'), bufsize=8) # The most likely failure is a UnicodeDecodeError due to the entire # file being read when it shouldn't have been. self.assertEqual(fi.readline(), u'A\n') self.assertEqual(fi.readline(), u'B\r\n') self.assertEqual(fi.readline(), u'C\r') with self.assertRaises(UnicodeDecodeError): # Read to the end of file. list(fi) fi.close()
def main(args): old_version = None old_hash = None sha1 = None filepath = ( f"{os.path.dirname(__file__)}/../libtiledbvcf/cmake/Modules/FindTileDB_EP.cmake" ) filepath = os.path.realpath(filepath) print(f"Updating {filepath}") with FileInput(filepath, inplace=True) as fp: for line in fp: line = line.rstrip() if old_version is None: m = re.search(r"TileDB/releases/download/(.*?)/.*-(.*)\.zip", line) if m: old_version = m.group(1) old_hash = m.group(2) if old_version is not None: # modify url if "https://" in line: line = line.replace(old_version, args.version) line = line.replace(old_hash, args.hash) # update sha1 value computed on previous line if sha1 is not None: if "URL_HASH" in line: line = re.sub(r"SHA1=.*", f"SHA1={sha1}", line) else: line = re.sub(r'".*"', f'"{sha1}"', line) sha1 = None else: m = re.search(r'"(https://.*)"', line) if m: sha1 = hash_url_file(m.group(1)) # print line to file print(line)
def test_nextfile_oserror_deleting_backup(self): """Tests invoking FileInput.nextfile() when the attempt to delete the backup file would raise OSError. This error is expected to be silently ignored""" os_unlink_orig = os.unlink os_unlink_replacement = UnconditionallyRaise(OSError) try: t = writeTmp(1, ["\n"]) self.addCleanup(remove_tempfiles, t) with FileInput(files=[t], inplace=True) as fi: next(fi) # make sure the file is opened os.unlink = os_unlink_replacement fi.nextfile() finally: os.unlink = os_unlink_orig # sanity check to make sure that our test scenario was actually hit self.assertTrue(os_unlink_replacement.invoked, "os.unlink() was not invoked")