def choco_pack_wrapper(branch, filename, url, version_number, hash, releasenotes, filesize): # Create copies of the template files shutil.copyfile("./" + branch + "/opencv.nuspec.template", "./" + branch + "/opencv.nuspec") shutil.copyfile("./" + branch + "/tools/chocolateyinstall.ps1.template", "./" + branch + "/tools/chocolateyinstall.ps1") # Change the contents of the new files with the new data with in_place.InPlace("./" + branch + "/opencv.nuspec", encoding="utf-8") as file: for line in file: file.write(line.replace("[VERSION_NUMBER]", version_number)) with in_place.InPlace("./" + branch + "/opencv.nuspec", encoding="utf-8") as file: for line in file: file.write(line.replace("[FILESIZE]", str(filesize))) with in_place.InPlace("./" + branch + "/opencv.nuspec", encoding="utf-8") as file: for line in file: file.write(line.replace("[RELEASE_NOTES]", releasenotes)) with in_place.InPlace("./" + branch + "/tools/chocolateyinstall.ps1", encoding="utf-8") as file: for line in file: file.write(line.replace("[URL]", url)) with in_place.InPlace("./" + branch + "/tools/chocolateyinstall.ps1", encoding="utf-8") as file: for line in file: file.write(line.replace("[CHECKSUM]", hash)) print(filename) print(url) print(version_number) print(hash) print(releasenotes) print(filesize) # Run 'choco pack' with the new files os.system("cd " + branch + " && choco pack") # Run 'choco push' but NOT if in debug :^) if DEBUG is False: os.system("cd " + branch + " && choco push OpenCV." + version_number + ".nupkg")
def increment_version(self): FILE = "/var/www/access_settings.php" with in_place.InPlace(FILE) as file: for line in file: match = re.search(r'define\(\'CURRENT_VERSION\', \'clarity_(2_0_\d+_\d+)\'\)', line) if match: old_ver = match.group(1) new_ver = old_ver.split("_") if int(new_ver[-1]) >= 50: new_ver[-2] = int(new_ver[-2]) + 1 new_ver[-1] = 0 else: new_ver[-1] = int(new_ver[-1]) + 1 new_ver = "_".join(map(str, new_ver)) line = line.replace(old_ver, new_ver) self.old_version = old_ver self.new_version = new_ver file.write(line)
def add_syms(self): #file_id = open(self.filename, "w+") #lines = file_id.readlines() with in_place.InPlace(self.filename) as file: for self.line in file: if "[style = invis]" in self.line: # a line in the dot format we would like to avoid. file.write(self.line) continue if not ( bool(re.search(r'\d', self.line)) ): # this line uses regex to check if there are any numbers in the line file.write(self.line) continue if not self.constants_found_flag: # process the file up to the integer list self.get_constants() else: if "rank = same;" in self.line: integer_variable = re.findall('[0-9]+', self.line) self.line = self.line.replace( str(integer_variable[0]), self.bi_direction_variable_dict.inverse[str( integer_variable[0])], 1) file.write(self.line)
def update_version_for_external_dependencies(dependency_dict): with in_place.InPlace(EXTERNAL_DEPENDENCIES_FILE) as file: for line in file: line = line.strip() if line.startswith('#') or not line: file.write(line) else: key_value = line.split(';', 1) key = key_value[0] value = key_value[1] if key not in SKIP_IDS and key in dependency_dict: value_in_dict = dependency_dict[key] if version_bigger_than(value, value_in_dict): log.warn( 'Version update skipped. key = {}, value = {}, new_value = {}' .format(key, value, value_in_dict)) file.write(line) elif version_bigger_than(value_in_dict, value): log.info( 'Version updated. key = {}, value = {}, new_value = {}' .format(key, value, value_in_dict)) file.write('{};{}'.format(key, value_in_dict)) else: file.write(line) else: file.write(line) file.write('\n')
def fix_up_expected(self, path): """Fix up any files made in the output directory This will enlarge all SVG by a factor of 10 in each direction until they are at least 1000 in each dimension. This makes them easier to view on github. """ def bigger(matchobj): width = float(matchobj.group('width')) height = float(matchobj.group('height')) while width < 1000 or height < 1000: width *= 10 height *= 10 return 'width="{:.12g}" height="{:.12g}" '.format(width, height) for root, _, files in os.walk(path): for current_file in files: with in_place.InPlace(os.path.join(root, current_file)) as svg_file: for line in svg_file: if line.startswith("<svg"): svg_file.write( "<!-- original:\n" + line + "-->\n" + re.sub( 'width="(?P<width>[^"]*)" height="(?P<height>[^"]*)" ', bigger, line)) else: svg_file.write(line)
def replace_artifact_id(module, pom): """ Replace artifactId in dependency and plugin part. :param module: module name :param pom: pom file path """ log.debug('Replacing artifact id in file: {}'.format(pom, module)) pom_dict = config[module][pom] if ARTIFACT_ID_PAIRS not in pom_dict: log.warn('No config key {} in pom parameters.'.format(ARTIFACT_ID_PAIRS)) return artifact_id_pairs = pom_dict[ARTIFACT_ID_PAIRS] log.debug('Module: {}, artifact ids: {}'.format(module, get_str(artifact_id_pairs))) with in_place.InPlace(pom) as file: line_num = 0 for line in file: line_num = line_num + 1 for artifact_id_pair in artifact_id_pairs: if artifact_id_pair.old_artifact_id in line: new_line = line.replace(artifact_id_pair.old_artifact_id, artifact_id_pair.new_artifact_id) log.debug('Updating artifact id in line {}'.format(line_num)) log.debug(' old_line = {}.'.format(line.strip('\n'))) log.debug(' new_line = {}.'.format(new_line.strip('\n'))) line = new_line file.write(line)
def updateLastRun(self, task, timestamp, user): lastRun = self.lastRun(task, user) with in_place.InPlace('data/%s.dat' % user) as f: for data in f: data = data.replace('%s:%s' % (task, lastRun), '%s:%s' % (task, time.time())) f.write(data)
def replace_in_place(pairs, file_name): import in_place with in_place.InPlace(file_name) as f: for line in f: for r, s in pairs: line = re.sub(r, s, line) f.write(line)
def modify_template(yaml_item, template_file, app=None): with in_place.InPlace(template_file) as file: for line in file: for key, value in VALID_REPLACEMENTS.items(): line = line.replace(str(value), str(yaml_item[key])) # Manually add SOFTWARE_UPPER if "__SOFTWARE_UPPER__" in line: line = line.replace("__SOFTWARE_UPPER__", yaml_item["software"].upper()).replace( "-", "_") if "__BIND_PATHS_ARRAY__" in line: line = line.replace( "__BIND_PATHS_ARRAY__", "(%s)" % ' '.join("%s" % array for array in yaml_item['bind_paths'])) if "__BIND_PATHS__" in line: line = line.replace( "__BIND_PATHS__", "(%s)" % ' '.join("%s" % array for array in yaml_item['bind_paths'])) if '__APP__' in line: if app is None: line = line.replace("__APP__", "") else: line = line.replace("__APP__", app['name']) file.write(line)
def replace_tfvars(key, value, fileName, quoteVal=True): with in_place.InPlace(fileName) as fileContent: for line in fileContent: if quoteVal: fileContent.write(re.sub(r'({0} = )(.*)'.format(key), r'\1"{0}"'.format(value), line)) else: fileContent.write(re.sub(r'({0} = )(.*)'.format(key), r'\1{0}'.format(value), line))
def fix_up_expected(self, path): """Fix up any files made in the output directory This will enlarge all SVG by a factor of 10 in each direction until they are at least 1000 in each dimension. This makes them easier to view on github. Also adjust the order of all SVG paths that start and end at the same place to start on the smallest element. This will make github diffs smaller in some cases where a no-effect union or intersection of polygons chnaged the order of points. """ def bigger(matchobj): width = float(matchobj.group('width')) height = float(matchobj.group('height')) while width < 1000 or height < 1000: width *= 10 height *= 10 return 'width="' + str(width) + '" height="' + str(height) + '" ' for root, subdirs, files in os.walk(path): for current_file in files: with in_place.InPlace(os.path.join(root, current_file)) as f: for line in f: if line.startswith("<svg"): f.write("<!-- original:\n" + line + "-->\n" + re.sub('width="(?P<width>[^"]*)" height="(?P<height>[^"]*)" ', bigger, line)) elif line.startswith('<g fill-rule="evenodd"><path d="M '): etree = xml.etree.ElementTree.fromstring(line) pathstring = etree[0].attrib['d'] f.write(line.replace(pathstring, self.rotate_pathstring(pathstring))) else: f.write(line)
def add_host(process, host_list, template_file): print(host_list) with open(process) as f: file_read = f.read() for host in host_list: host_1, host_2 = generate_temp(host, template_file) # Check if Host is in the File if host in file_read: logging.debug( "[i] %s Hosts already exists: Update Host in Progeress\n" % host) # Replace Line with New Template with in_place.InPlace(process) as fp: for line in fp: match = re.match(r"{}\s+\=".format(host), line) # If it' s in this Line : Replace with new Line if bool(match) is True: line = line.replace(line, host_1) fp.write(line) # It it's not in this Line : copy Line to new File elif bool(match) is False: fp.write(line) # Add New Host elif host is not file_read: logging.debug("[i] %s Hosts not exists: Writing new Host\n" % host) write_new_host(process, host_1, host_2)
def change(self, target_line, new_line): with in_place.InPlace(self.file_path, backup_ext=".bak", encoding="utf-8") as config_file: for line in config_file: if target_line in line: config_file.write(line.replace(line, new_line)) else: config_file.write(line)
def edit_date(): killdate = input( colored("[+] Enter the killdate of the implant: [DD/MM/YYYY] ", "blue")) day, month, year = killdate.split('/', 3) if day[0] == '0': day = day[1] if month[0] == '0': month = month[1] command1 = ' if (year > ' + year + ') return 1;\n' command2 = ' if (year < ' + year + ') return 0;\n' command3 = ' if (month < ' + month + ') return 0;\n' command4 = ' if (month == ' + month + ' && day <= ' + day + ') return 0;\n' linecount = 0 with in_place.InPlace('./source/safety_measures.cpp') as fp: for line in fp: linecount += 1 if ( linecount == 92 ): # If you edit safety_measures.cpp, don't forget to change me to an appropriate line number too fp.write(command1) elif (linecount == 93): fp.write(command2) elif (linecount == 94): fp.write(command3) elif (linecount == 95): fp.write(command4) else: fp.write(''.join(c for c in line)) print("\n")
def create_files(self, cooc_window=5, min_tf=0, min_df=0, apply_zero_index=True): _file = {} for s, vowpal in self._splits: _file.update({ s: { 'cooc_tf': self._path('cooc', min_tf, 'tf', s, extension='txt'), 'cooc_df': self._path('cooc', min_df, 'df', s, extension='txt'), 'ppmi_tf': self._path('ppmi', min_tf, 'tf', s, extension='txt'), 'ppmi_df': self._path('ppmi', min_df, 'df', s, extension='txt') } }) _ = self.create_cooc_files(vowpal, self._vocab, _file[s]['cooc_tf'], _file[s]['cooc_df'], _file[s]['ppmi_tf'], _file[s]['ppmi_df'], cooc_window=cooc_window, min_tf=min_tf, min_df=min_df) if _ == 0: print("Created ppmi files for '{}' split.".format( (lambda x: 'all' if not x else x)(s))) else: print( "Something went wrong when creating ppmi files for '{}' split." .format((lambda x: 'all' if not x else x)(s))) if apply_zero_index: print("Applying zero indexing of tokens (ids)") for key, path in _file[s].items(): with in_place.InPlace(path, backup_ext='.bak') as fp: for line in fp: match_obj = re.search(r'^(\d+) (\d+) (.+)$', line).groups() fp.write('{} {} {}\n'.format( int(match_obj[0]) - 1, int(match_obj[1]) - 1, match_obj[2]))
def replacement(target_file, old, new): with in_place.InPlace(target_file) as fil: for line in fil: if old in line: new_line = line.replace(old, new) else: new_line = line fil.write(new_line)
def replace_version(module, pom): """ Replace version in dependency and plugin part. :param module: module name :param pom: pom file path """ log.debug('Replacing version in file: {}'.format(pom)) pom_dict = config[module][pom] if VERSION_UPDATE_ITEMS not in pom_dict: log.warn( 'No config key {} in pom parameters.'.format(VERSION_UPDATE_ITEMS)) return version_update_items = pom_dict[VERSION_UPDATE_ITEMS] log.debug('Module: {}, versions: {}'.format(module, get_str(version_update_items))) with in_place.InPlace(pom) as file: line_num = 0 for line in file: line_num = line_num + 1 for version_update_item in version_update_items: if version_update_item.id in line: # update version in dependency part if X_VERSION_UPDATE in line: old_version = line[(line.index('<version>') + 9):line.index('</version>')] if old_version != version_update_item.new_version: new_line = line.replace( old_version, version_update_item.new_version) log.debug( 'Updating version of dependency in line {}'. format(line_num)) log.debug(' old_line = {}.'.format( line.strip('\n'))) log.debug(' new_line = {}.'.format( new_line.strip('\n'))) line = new_line else: log.warn( 'The same with new version in dependency part.' ) # update version in plugin part elif X_INCLUDE_UPDATE in line: old_version = line[(line.index('[') + 1):line.index(']')] if old_version != version_update_item.new_version: new_line = line.replace( old_version, version_update_item.new_version) log.debug('Updating line {}'.format(line_num)) log.debug(' old_line = {}.'.format( line.strip('\n'))) log.debug(' new_line = {}.'.format( new_line.strip('\n'))) line = new_line else: log.warn( 'The same with new version in plugin part.') file.write(line)
def next_ztp(filename, mac): _from = os.path.basename(filename) _to = f"{_from.split('_')[0]}_{int(_from.rstrip('.cfg').split('_')[-1]) + 1}.cfg" host_lines = [] if not os.path.isfile(ztp_opts_conf): log.warning(f"{ztp_opts_conf} not found. Noting to do.") else: if not os.path.isfile(f"{os.path.dirname(filename)}/{_to}"): log.info(f"No More Files for {_from.split('_')[0]}") with in_place.InPlace(ztp_opts_conf) as fp: line_num = 1 for line in fp: if _from in line: # TODO if not mac.ok don't write retry lines... print/log/warning if mac.ok: fp.write( f"# {mac.cols}|{ip} Sent {_from}" f"{' Success' if ztp_ok else 'WARN file size != xfer total check switch and logs'}\n" ) fp.write( f"# -- Retry Lines for {_from.rstrip('.cfg')} Based On mac {mac.cols} --\n" ) fp.write( f'tag:{mac.tag},option:bootfile-name,"{_from}"\n') host_lines.append( f"{mac.cols},{mac.tag},,{ztp_lease_time},set:{mac.tag},set:sent\n" ) else: print( f'Unable to write Retry Lines for previously updated device. Mac {mac.orig} appears invalid' ) fp.write(f"# SENT # {line}") log.info( f"Disabled {_from} on line {line_num} of {os.path.basename(ztp_opts_conf)}" ) log.info( f"Retry Entries Created for {_from.rstrip('.cfg')} | {mac.cols} | {ip}" ) elif _to in line: if not line.startswith('#'): log.warning( f'Expected {_to} option line to be commented out @ this point. It was not.' ) fp.write(line.lstrip('#').lstrip()) log.info( f"Enabled {_to} on line {line_num} of {os.path.basename(ztp_opts_conf)}" ) else: fp.write(line) line_num += 1 if host_lines: with open(ztp_hosts_conf, 'a') as fp: fp.writelines(host_lines) log.info( f"Retry Entries Written to file for {_from.rstrip('.cfg')} | {mac.cols} | {ip}" )
def on_modified(self, event): if event is None or event.src_path == abs_path: print(f"Regenerating from {abs_path}...") with in_place.InPlace(stream_file) as out: generator.generate(abs_path, out) with open(notebook_file, "w") as out: for line in open(abs_path, "r"): if "__st" not in line: out.write(line)
def run(): source = '<TICKER>,<PER>,<DATE>,<TIME>,<OPEN>,<HIGH>,<LOW>,<CLOSE>,<VOL>,<OPENINT>' target = 'TICKER,PER,DATE,TIME,OPEN,HIGH,LOW,CLOSE,VOL,OPENINT' path = pathlib.Path('/home/kris/repos/trend/stooq/daily/uk/') for p in path.rglob('*.txt'): print(p) with in_place.InPlace(p) as fp: for line in fp: modified = line.replace(source, target) fp.write(modified)
def write_new_version( new_version: str, lineno: int, cfg_path: str, write_func: Callable[[str, in_place.InPlace], None]) -> None: with in_place.InPlace(cfg_path) as fo: count = -1 for line in fo: count += 1 if count == lineno: write_func(new_version, fo) else: fo.write(line)
def _sReplace(filename, search, replace, unixEol=False): # with fileinput.FileInput(filename, openhook=fileinput.hook_encoded("utf-8"), inplace=True) as file: # for line in file: # print(line.replace(search, replace), end='') with in_place.InPlace(filename, encoding="utf-8") as file: for line in file: file.write(line.replace(search, replace)) if unixEol: fileContents = open(filename,"r", encoding="utf-8").read() f = open(filename,"w", encoding="utf-8", newline="\n") f.write(fileContents) f.close()
def on_modified(self, event): if event is None or event.src_path == self.abs_path: if not self.quiet: print(f"Regenerating from {self.abs_path}...") with in_place.InPlace(self.stream_file) as out: self.generator.generate(self.abs_path, out) with open(self.notebook_file, "w") as out: for line in open(self.abs_path, "r"): if "__st" not in line: out.write(line) if self.jupytext_command is not None: os.system(self.jupytext_command)
def update_ism(ism_file): print('Begin update_ism') try: with in_place.InPlace(ism_file) as file: for line in file: if "ProductVersion" in line: line=line.replace(line,'\t\t<row><td>ProductVersion</td><td>' +version + '</td><td/></row>\r') file.write(line) else: file.write(line) except Exception as e: error_message("Update_ism failure. " + e)
def to_lower(file_path): """ args: file_path - file path of the corpus returns: converts text to lower case, removes sentences with less than 50 tokens """ print('---------------------------------------------------------') print("converting to lower case and adding delimiters") with in_place.InPlace(file_path, encoding='utf-8') as file: for line in tqdm(file): if len(line) > 1: line = line.lower() file.write(line[:-2]) file.write('\n') gc.collect() print('Done') print('---------------------------------------------------------') with in_place.InPlace(file_path, encoding='utf-8') as file: for line in tqdm(file): if len(line) > 50: file.write(line) #file.write('\n') gc.collect()
def replace(file_path, pattern, subst): f = [] for (dirpath, dirnames, filenames) in os.walk(file_path): f.extend(filenames) break for file in f: if file.endswith(".c") or file.endswith(".h"): with in_place.InPlace(file_path + "\\" + file) as my_file: for line in my_file: line = line.replace(pattern, subst) my_file.write(line)
def edit_url(): download_url = input(colored("[+] Enter the Github shellcode URL(raw): ", "blue")) encrypted_command_url = xor(download_url, KEY) command = ' char downloadURL[] = ' + encrypted_command_url + '\n' linecount = 0 with in_place.InPlace('./source/falcon_zero.cpp') as fp: for line in fp: linecount += 1 if (linecount == 23): # If you edit falcon_zero.cpp, don't forget to change me to an appropriate line number too fp.write(command) else: fp.write(''.join(c for c in line)) print("\n")
def edit_host(): host = input(colored("[+] Enter the hostname of the target machine: ", "blue")) targethash = hashlib.md5(host.encode()).hexdigest() encrypted_targethash = xor(targethash, KEY) command = ' char hash[] = ' + encrypted_targethash + '\n' linecount = 0 with in_place.InPlace('./source/safety_measures.cpp') as fp: for line in fp: linecount += 1 if (linecount == 69): # If you edit safety_measures.cpp, don't forget to change me to an appropriate line number too fp.write(command) else: fp.write(''.join(c for c in line)) print("\n")
def replace_schema_base_url(temp_dir, schema_base_url): """Do a replacement of the schema_base_url, needed to valdidate schemas when served from another base url, e.g. for testing """ for root, dirs, file_names in os.walk(temp_dir): root_path = Path(root) for file_name in file_names: file_path = root_path / file_name if file_path.suffix == ".json": with in_place.InPlace(root_path / file_name) as file: for line in file: line = line.replace(DEFAULT_BASE_URL, schema_base_url) file.write(line)
def dump_data(cls, output_file, user): try: with in_place.InPlace(output_file) as out: for line in out: if line.startswith(user.username): stringify = [str(item) for item in user.values] stringify.append('\n') new_line = '\t\t'.join(stringify) line = line.replace(line, new_line) out.write(line) except FileNotFoundError: raise GeneralError(f"Unable to find file: {output_file}") except Exception as exc: raise GeneralError(f"Unknown exception occurred: {exc}")