def replace_in_file(path, replacements, logger=None): """ This helper function performs a line replacement in the file located at 'path'. :param is_debug_flag: true if method is called from apply_debug_flag. Needed only for managing logger output :param path: path to the file to be altered :param replacements: list of string pairs formatted as [old_line_pattern, new_line_replacement] :param logger: logger object, optional. If not none, used to output if replacement is successful """ tmp = path + '.tmp' if isinstance(replacements[0], string_types): replacements = [replacements] regexs = [] for replacement in replacements: try: regex = re.compile(replacement[0]) except re.error: regex = None regexs.append(regex) replacements_found = [] with open(tmp, 'w+') as nf: with open(path) as of: for line in of.readlines(): for replacement, regex in zip(replacements, regexs): # try a simple string match if replacement[0] in line: line = line.replace( replacement[0], "" if replacement[1] in (None, '') else replacement[1]) replacements_found.append( (replacement[0], replacement[1])) # then try a regex match else: if regex is not None: match = regex.search(line) if match is not None: try: line = line.replace( match.groups(0)[0], "" if replacement[1] in (None, '') else replacement[1]) replacements_found.append( (match.groups(0)[0], replacement[1])) except IndexError: line = line.replace( match.group(), replacement[1]) replacements_found.append( ([match.group()], replacement[1])) break # write changed line back nf.write(line) sh.rm(path) sh.mv(tmp, path) if logger: c = Counter(replacements_found) for k in c.keys(): logger.debug( " > Found and replaced \"{}\" with \"{}\" {} times in path: {}" .format(k[0], k[1], c[k], path))
def run(self): global g, pid time.sleep(15) try: while True: sstv_debug_log('Encoder', 'Encoder reporting for duty') convert( '/tmp/latest.jpg', '-resize', '320x240!', '-pointsize', '35', '-fill', 'black', '-annotate', '+0+37', 'W8UPD', '-fill', 'white', '-annotate', '+0+40', 'W8UPD', '-fill', 'black', '-annotate', '+30+230', '%f, %f' % (g.fix.latitude, g.fix.longitude), '-fill', 'white', '-annotate', '+33+233', '%f, %f' % (g.fix.latitude, g.fix.longitude), '/tmp/latest.ppm') robot36_encode('/tmp/latest.ppm', '/tmp/inprogress.wav') mv('/tmp/inprogress.wav', '/tmp/latest.wav') except Exception as e: sstv_debug_log('Encoder', str(e), True) os._exit(1)
def convertSample(file_name): info = file_info(file_name).replace(file_name + ': ', '') if regexMatch('Audio file with', info): info = info.replace('Audio file with ', '') else: aac_match = regexMatch("ISO Media, Apple iTunes (.*)", info) if aac_match: encoding = aac_match.group(1) print("!! Error: Unsupported Apple audio encoding:", encoding) rm(file_name) return None print("!! Error: Downloaded 'audio' file", "'"+file_name+"'", "with attributes:") print(" ", info) return None mp3_match = regexMatch("ID3 version (.*) contains:(.*)", info) if mp3_match: id3_version, encoding = mp3_match.groups() new_file_name = file_name.replace('.unknown', '.mp3') elif False: new_file_name = '' else: print("!! Error: Unrecognized audio container:", info) return None mv(file_name, new_file_name) flac_file_name = file_name.replace('.unknown', '.flac') sox(new_file_name, flac_file_name) rm(new_file_name) return flac_file_name
def main(): logging.debug('start') if YANDEX_SEARCH_ID == "": logging.warn( 'to enable seach on your site run\n python3 build3.py "http://website.url/" 123\n where 123 is yandex search id obtainable on http://site.yandex.ru/searches/new/' ) #create and clear output directory if necessary mkdir("-p", "_site/") rm("-Rf", glob("_site/*")) #copy static contant cp("-a", glob("_web/*"), "_site/") mv("_site/dot_htaccess", "_site/.htaccess") #copy optimized css cssoptimizer(cat(glob("_css/*")), "-i", "_site/style.css") #copy optimized js uglifyjs(cat(glob("_js/*")), "-o", "_site/scripts.js") #generate content materialize_notes(SOURCE) materialize_template("Y_Search", "Y_Search", {"title": "Поиск"}) logging.debug('end.') logging.info( 'To start copy following url into your browser: \n%sindex.html' % BASE_URL)
def file_backup_restore(process, action): if action == 'backup': with sh.pushd('/opt/anaconda'): sh.tar( "--exclude=storage/pgdata", "--exclude=storage/object/anaconda-repository", "-czvf", process.storage_backup_name, "storage" ) sh.mv( f'/opt/anaconda/{process.storage_backup_name}', f'{process.backup_directory}/' ) elif action == 'restore': sh.cp( f'{process.backup_directory}/{process.storage_backup_name}', '/opt/anaconda' ) with sh.pushd('/opt/anaconda'): sh.tar( '-xzvf', f'/opt/anaconda/{process.storage_backup_name}' ) sh.rm(f'{process.storage_backup_name}')
def _runTest(self, shards, max_threads): for threads in range(1, max_threads + 1): for shard in range(0, shards): with sh.sudo: outfile = output_file_name(shards, shard, threads) zmap(p=80, T=threads, shards=shards, shard=shard, _out="tempfile") parse("tempfile", _out=outfile) dup_lines = int(wc(uniq(cat(outfile), "-d"), "-l")) self.assertEqual(dup_lines, 0) shard_file = shard_file_name(shards, threads) if shard == 0: cat(outfile, _out=shard_file) else: cat(shard_file, outfile, _out="tempfile") mv("tempfile", shard_file) for threads in range(1, max_threads + 1): shard_file = shard_file_name(shards, threads) num_lines = int(wc(cat(shard_file), "-l")) self.assertEqual(num_lines, TestSharding.NUM_IPS) dup_lines = int( wc(uniq(sh.sort(cat(shard_file), "-n"), "-d"), "-l")) self.assertEqual(dup_lines, 0)
def backup_repository_db(process): process.get_postgres_docker_container() backup_command = ( "su - postgres -c 'pg_dump -U postgres -F t anaconda_repository > " f"{process.postgres_container_repo_backup_path}'" ) # Check for existing backup file and if there remove it if os.path.isfile(f'{process.postgres_system_repo_backup_path}'): os.remove(f'{process.postgres_system_repo_backup_path}') # Backup the repository process.run_command_on_container(process.docker_cont_id, backup_command) # Check for file path and ensure that it created if not os.path.isfile(f'{process.postgres_system_repo_backup_path}'): log.error('Could not find backup file for postgres') raise exceptions.NoPostgresBackup( 'Could not find backup file for postgres' ) # Move the backup to the backup directory sh.mv( f'{process.postgres_system_repo_backup_path}', f'{process.backup_directory}/' )
def auto_convert(input_fname): """ Automatically guess options and what to do with input file. """ if 'original' in input_fname: print(f'skip converting {input_fname}') return outdir = path.dirname(input_fname) if outdir == '': outdir = '.' converting_dir = path.join(outdir, 'converting') sh.mkdir('-p', converting_dir) out_basename = path.splitext(path.basename(input_fname))[0] + '.mov' out_fname = path.join(converting_dir, out_basename) if 'F-Log' in input_fname: print(f'convert {input_fname} with lut') lut = 'wdr' else: print(f'convert {input_fname} without lut') lut = None convert(input_fname, out_fname, lut=lut) original_dir = path.join(path.dirname(input_fname), 'original') sh.mkdir('-p', original_dir) sh.mv(input_fname, original_dir) sh.mv('-f', out_fname, outdir)
def put_file(self, path): temp_file = str(sh.mktemp("-p", self.tmp).stdout,'utf8').strip() path = path.strip() if "'" in path: returncode, stdout, stderr = launch_command( "dd if=\"{0}\" iflag=nofollow bs=4k | tee {1} | sha1sum".format( path, temp_file ) ) else: returncode, stdout, stderr = launch_command( "dd if='{0}' iflag=nofollow bs=4k | tee {1} | sha1sum".format( path, temp_file ) ) if returncode != 0: print(stdout) print(stderr) raise UnableToHashFile("File : {0}".format(path)) hash_str = re.search("^[0-9a-f]*", str(stdout,'utf8')).group(0) destination_folder = self.create_destination_folder(hash_str) destination_path = os.path.join(destination_folder, hash_str) if not self.is_stored(hash_str): sh.mv(temp_file, destination_path) sh.chmod("444", destination_path) else: sh.rm(temp_file) return destination_path
def efi(): """ Write programs to EFI partition """ partitions = Partitions() paths.efi_mnt.mkdir(exist_ok=True) if not partitions.is_mounted(paths.efi_mnt): sh.mount(config.efi_dev, paths.efi_mnt) paths.efi.mkdir(exist_ok=True) # clean slate sh.rm('-rf', paths.efi_boot) # Make refind the default loader by naming convention so that we don't have # to worry about EFI variables being set correctly sh.cp('-r', '/usr/share/refind/refind', paths.efi_boot) sh.mv(paths.efi_boot / 'refind_x64.efi', paths.efi_boot / 'bootx64.efi') # Memtest paths.memtest_mnt.mkdir(exist_ok=True) if not partitions.is_mounted(paths.memtest_mnt): img_fpath, efi_start_bytes = memtest_extract() sh.mount('-o', f'loop,ro,offset={efi_start_bytes}', img_fpath, paths.memtest_mnt) # clean slate and copy files to our EFI partition sh.rm('-rf', paths.efi_memtest) sh.cp('-r', paths.memtest_boot, paths.efi_memtest) sh.mv(paths.efi_memtest / 'BOOTX64.efi', paths.efi_memtest / 'memtestx64.efi')
def restore_file(filename): ''' Context manager restores a file to its previous state. If the file exists on entry, it is backed up and restored. If the file does not exist on entry and does exists on exit, it is deleted. ''' exists = os.path.exists(filename) if exists: # we just want the pathname, not the handle # tiny chance of race if someone else gets the temp filename handle, backup = tempfile.mkstemp() os.close(handle) sh.cp('--archive', filename, backup) try: yield finally: if os.path.exists(filename): sh.rm(filename) if exists: # restore to original state sh.mv(backup, filename)
def make_bootable(self): """ Tworzenie dysku bootowalnego """ self.uuid = re.search("UUID=\"(\w*)\"", str(sh.blkid(self.device + "1"))).group(1) print("Device UUID:", self.uuid) # W niektórych wersjach windows katalog ten jest z drukowanej self.boot_folder = self.destination_mount + "/boot" try: sh.mv(self.destination_mount + "/BOOT", self.boot_folder) except: pass # Instalownie bootloadera # grub-install --target=i386-pc --boot-directory="/<USB_mount_folder>/boot" /dev/sdX installer = sh.Command("grub-install") installer(self.device, target="i386-pc", boot_directory=self.destination_mount + "/boot") # Tworzenie konfiguracji GRUBa with open("{}/grub/grub.cfg".format(self.boot_folder), "wt") as config: config.write(""" set menu_color_normal=white/black set menu_color_highlight=black/light-gray menuentry 'Install Windows' { ntldr /bootmgr } """)
def process(self, *, skip_ocr=False): # Prepare directories self.prepare_directories() cd(self.workdir) # Scan pages self.scan_pages() # Combine tiffs into single multi-page tiff self.combine_tiffs() # Convert tiff to pdf self.convert_tiff_to_pdf() # Do OCR if skip_ocr is False: self.do_ocr() filename = 'clean.pdf' else: filename = 'output.pdf' # Move file print(prefix() + 'Moving resulting file...') cd('..') mv('{}/{}'.format(self.workdir, filename), self.output_path) print('\nDone: %s' % self.output_path)
def move_services(filename, num_ext): # move to services folder file_path = filename.rsplit('.', num_ext)[0] src_path = path.join(app.config['UPLOAD_FOLDER'], file_path, file_path) dest_path = app.config['SERVICES_FOLDER'] i = 0 while i != -1: try: src_path_i = path.join(app.config['UPLOAD_FOLDER'], file_path, file_path+str(i)) src_path_ii = path.join(app.config['UPLOAD_FOLDER'], file_path, file_path+str(i-1)) if i == 0: mv(src_path, dest_path) elif i == 1: mv(src_path, dest_path_i) mv(dest_path_i, dest_path) else: mv(dest_path_ii, dest_path_i) mv(dest_path_i, dest_path) i = -1 except: i += 1 # remove leftover files in tmp remove(path.join(app.config['UPLOAD_FOLDER'], filename)) rmtree(path.join(app.config['UPLOAD_FOLDER'], file_path))
def fix_binfile(src, dest=None): _, outfile = mkstemp() logging.info("Updating {} (writing temporary file to {}).".format(src, outfile)) with open(outfile, 'w') as outf: with open(src) as inf: for line in inf: if line.startswith('declare -r app_classpath='): outf.write(LINE_REPLACEMENT) else: outf.write(line) if not dest: infile_bak = '.'.join([src, 'orig']) logging.warning("Overwriting original file {} (backup copy kept in {})".format( src, infile_bak)) try: cp(src, infile_bak) dest = src except ErrorReturnCode as error: logging.error("Failed to make backup copy of {}; did you have the necessary " "permissions? (Error: {})".format(src, error.stderr)) exit(1) mv(outfile, dest) chmod('ug+x', dest)
def move_services(filename, j, num_ext): # move to services folder file_path = filename.rsplit('.', num_ext)[0] src_path = path.join(app.config['UPLOAD_FOLDER'], file_path, file_path) dest_path = app.config['SERVICES_FOLDER'] i = 0 while i != -1: try: src_path_i = path.join(app.config['UPLOAD_FOLDER'], file_path, file_path + str(i)) src_path_ii = path.join(app.config['UPLOAD_FOLDER'], file_path, file_path + str(i - 1)) if i == 0: mv(src_path, dest_path) elif i == 1: mv(src_path, dest_path_i) mv(dest_path_i, dest_path) else: mv(dest_path_ii, dest_path_i) mv(dest_path_i, dest_path) j = i i = -1 except: i += 1 # remove leftover files in tmp remove(path.join(app.config['UPLOAD_FOLDER'], filename)) rmtree(path.join(app.config['UPLOAD_FOLDER'], file_path)) return j
def save_snippet(self, fname, start, end): if os.path.exists(fname): tqdm.write(f'Cached "{fname}"') else: dur = end - start # extract relevant section from video (without download whole video) cmd = sh.ffmpeg.bake( '-y', '-ss', start, '-i', self.stream_url, '-t', dur, # '-to', dur, '-c', 'copy', fname) # tqdm.write(str(cmd)) cmd() # cut to correct duration because previous ffmpeg command # creates a video with still frames at the end # TODO: figure out why this is happening sh.ffmpeg('-y', '-i', fname, '-t', dur, 'tmp.mp4') sh.mv('tmp.mp4', fname) self.fname = fname
def backup_postgres_database(process): process.get_postgres_docker_container() backup_command = ( "su - postgres -c 'pg_dumpall -U postgres --clean -f " f"{process.postgres_container_backup_path}'" ) # Check for existing sql file and if there remove it if os.path.isfile(process.postgres_system_backup_path): os.remove(process.postgres_system_backup_path) # Backup the repository process.run_command_on_container(process.docker_cont_id, backup_command) # Check for the file and ensure it is there if not os.path.isfile(process.postgres_system_backup_path): log.error('Could not find backup file for postgres') raise exceptions.NoPostgresBackup( 'Could not find backup file for postgres' ) # Move the backup to the backup directory sh.mv( process.postgres_system_backup_path, f'{process.backup_directory}/' )
def generate_template(): template_file = "" if not isdir(build_dir): mkdir(build_dir) if isdir(build_dir): template_file = build_dir + "/dekko.dekkoproject.pot" print("TemplateFile: " + template_file) cd(build_dir) print("Running cmake to generate updated template") cmake('..') print("Running make") make("-j2") if isfile(template_file): if isdir(po_dir): print("Moving template to po dir: " + po_dir) mv(template_file, po_dir) else: print("Couldn't find po dir: " + po_dir) cleanup() return else: cleanup() print("No template found for: " + template_file) return print("Cleaning up") cleanup() print("YeeHaa!") print("All done, you need to commit & push this to bitbucket now :-)") print("NOTE: this would also be a good time to sync with launchpad, run") print(" $ python3 launchpad_sync.py")
def main(self): data = requests.head(self.targetURL) #print(data.headers) headerDict = ast.literal_eval(str(data.headers)) filesize = headerDict['Content-Length'] # print 'Filesize is %s' % filesize startPos = 0 partSize = int(filesize) / self.numWorkers endPos = partSize # print 'End Position is %s' % endPos threads = [] for i in range(0, self.numWorkers): endPos = partSize * (i + 1) if (i + 1) == self.numWorkers: # Is last iteration? endPos = filesize # print '%i> Start: %s End: %s' % (i, startPos, endPos) t = threading.Thread(target=self.downloadSection, args=(i, startPos, endPos)) t.start() threads.append(t) startPos = int(endPos) + 1 # endPos = partSize * (i + 1) map(lambda t: t.join(), threads) print('combining all pieces...') sh('./combine.sh') fileName = self.targetURL.split('/')[-1] mv('movie.mkv', fileName) print('Moving file to video storage') mv(fileName, '/media/storage/videos/') return 0
def move_all_files(all_files, output_directory): sh.mkdir("-p", output_directory) for each_file in all_files: fname_splitter = each_file.split("/") gcmtid = fname_splitter[-2] output_path = join(output_directory, f"{gcmtid}") sh.mv(each_file, output_path)
def move(source, dest, owner=None, group=None, perms=None): ''' Move source to dest. move() tries to generally follow the behavior of the 'mv --force' command. move() creates any missing parent dirs of dest. If dest is a dir, source is copied into dest. Otherwise, source will overwrite any existing dest. >>> import os.path, sh, tempfile >>> def temp_filename(dir=None): ... if dir is None: ... dir = test_dir ... handle, path = tempfile.mkstemp(dir=dir) ... # we don't need the handle ... os.close(handle) ... return path >>> >>> test_dir = tempfile.mkdtemp() >>> >>> # test move to dir >>> source = temp_filename() >>> dest_dir = tempfile.mkdtemp(dir=test_dir) >>> move(source, dest_dir) >>> assert not os.path.exists(source) >>> basename = os.path.basename(source) >>> assert os.path.exists(os.path.join(dest_dir, basename)) >>> >>> # test move to new filename >>> source = temp_filename() >>> dest_filename = temp_filename(dir=test_dir) >>> move(source, dest_filename) >>> >>> # test move to existing filename >>> DATA = 'data' >>> source = temp_filename() >>> dest_filename = temp_filename() >>> with open(source, 'w') as sourcefile: ... sourcefile.write(DATA) >>> move(source, dest_filename) >>> assert not os.path.exists(source) >>> assert os.path.exists(dest_filename) >>> with open(dest_filename) as destfile: ... assert DATA == destfile.read() >>> >>> # clean up >>> sh_result = sh.rm('--force', '--recursive', test_dir) >>> assert sh_result.exit_code == 0 ''' parent_dir = os.path.dirname(dest) if not os.path.exists(parent_dir): makedir(parent_dir, owner=owner, group=None, perms=None) sh.mv('--force', source, dest) set_attributes(dest, owner, group, perms, recursive=True)
def main(): if not os.path.isfile(PATH + 'cowposts.db'): db_init() filename = sys.argv[1] with open(filename, 'r') as f: text = f.read() db_insert(filename, text) mv(filename, 'posts/')
def make_compliant_fastaa(self): print "making compliant fastaas" script = sh.Command(self.orthoMCL_path + "orthomclAdjustFasta") if not os.path.exists(self.out_dir + "temp"): os.makedirs(self.out_dir + "temp") for f in tqdm(self.proteoms): script(f.split("/")[-1].split(".")[0], f, 1) sh.mv(f.split("/")[-1].split(".")[0] + ".fasta",self.out_dir + "temp/" )
def __enter__(self): try: sh.test('-d', self.path) self.already_disabled = False except sh.ErrorReturnCode_1: self.already_disabled = True else: sh.mv(self.path, self.hidden_path)
def _commit_changes(self): self._ensure_subvolume() self._ensure_subvolume() gc_dir = self.gc_dir.format(timestamp=datetime.now().strftime("%s")) out = sh.mv(self.service_dir, gc_dir) assert out.exit_code == 0 and out.stderr == "" out = sh.mv(self.working_dir, self.service_dir) assert out.exit_code == 0 and out.stderr == ""
def file_sample(language: str, checksum: str, sample: AudioSample) -> None: sample_name = '{language}-{checksum}.mp3'.format(language=language, checksum=checksum) parent_dir = path.join(SAMPLE_DIR, language) if not path.isdir(parent_dir): os.mkdir(parent_dir) dest_file = path.join(parent_dir, sample_name) sh.mv(sample.filename, dest_file)
def stop_cli(): p = subprocess.Popen(['ps', '-aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.splitlines(): if ('app.py') in line: pid = int(line.split()[1]) os.kill(pid, signal.SIGKILL) sh.mv("scripts/iota_api/conf.bak", "scripts/iota_api/conf")
def interpret(filename, module, params, verbose=False): cur_dir = os.path.abspath(os.getcwd()) res_name = filename + '.bin' # build the script params['backend'] = 'interpreter' script = _gen_sim_script(filename, module, params) execute_ode(cur_dir, script, verbose=verbose) # move res to cur dir sh.mv(os.path.join(ODE_DIR, res_name), cur_dir)
def group_files(files, output_dir): """attempts to move files into output_dir.""" try: sh.mkdir('-p', output_dir) for output in files: if os.path.exists(output): sh.mv('-f', output, output_dir) except BaseException: raise
def deploy(name, fn=None, bucket='lambda_methods'): print 'Preparing lambda method:', name orig_dir = sh.pwd().strip() dirname = '{}/{}'.format(orig_dir, name) zip_name = '{}/{}.zip'.format(dirname, name) if os.path.exists( dirname ): sh.rm('-rf', dirname) # cp skeleton project data sh.cp('-r', os.path.join(os.path.dirname(__file__), 'project'), dirname) base_zip = '{}/dist.zip'.format(dirname) if not os.path.exists(base_zip): _docker('--rm', '-v', '{}:/app'.format(dirname), 'quay.io/pypa/manylinux1_x86_64', '/app/scripts/build.sh') sh.zip('-9', zip_name, '-j', '{}/README.md'.format(dirname)) sh.cd(os.path.join(dirname, 'build')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(dirname) else: sh.mv( base_zip, zip_name ) if fn is not None: with open(os.path.join(dirname, 'src', 'custom.py'), 'w') as fh: fh.write(fn) sh.cp(os.path.join(dirname, 'src', 'template.py'), os.path.join(dirname, 'src', '{}.py'.format(name))) sh.cd(os.path.join(dirname, 'src')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(orig_dir) def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() print 'Publishing zip file to S3', 's3://{}/{}.zip'.format(bucket, name) b = _s3conn.get_bucket(bucket) k = Key(b) k.key = '{}.zip'.format(name) k.set_contents_from_filename(zip_name, cb=percent_cb, num_cb=10) try: _lambda.delete_function(FunctionName=name) except: pass b = _s3conn.get_bucket('idaho-lambda') for key in b.list(prefix=name): key.delete() print 'Creating function' code = {'S3Bucket': bucket, 'S3Key': '{}.zip'.format(name)} handler = '{}.handler'.format(name) role = 'arn:aws:iam::523345300643:role/lambda_s3_exec_role' _lambda.create_function(FunctionName=name, Code=code, Role=role, Handler=handler, Runtime='python2.7', Timeout=60, MemorySize=1024)
def run(self, repos, nightly): self.log("Tarballing it...") cd(self._basedir) version = get_version(repos, nightly) import platform bits = platform.architecture()[0][:2] bundle_name = "Bitmask-linux%s-%s" % (bits, version) mv("Bitmask", bundle_name) tar("cjf", bundle_name + ".tar.bz2", bundle_name) self.log("Done")
def build_obj(filename, module, params, verbose=False): cur_dir = os.path.abspath(os.getcwd()) exe_name = filename + '.o' # build the script params['backend'] = 'objectfile' params['disableExecute'] = True script = _gen_sim_script(filename, module, params) execute_ode(cur_dir, script, verbose=verbose) # move exe to cur dir sh.mv(os.path.join(ODE_DIR, exe_name), cur_dir)
def rename(event_path_this_rank): root_path = str(sh.pwd())[:-1] for thedir in event_path_this_rank: sh.cd(thedir) for fname in glob.glob("*.SAC"): net, sta, loc, chn = fname.split('.')[6:10] # logger.info( # f"[rank:{rank},dir:{thedir}] rename {fname} to {net}.{sta}.{loc}.{chn}.SAC") sh.mv(fname, f"{net}.{sta}.{loc}.{chn}.SAC") sh.cd(root_path)
def dump_pairs(self): if os.path.exists( "pairs/"): sh.rm("-r", "pairs/") if os.path.exists(self.out_dir + "pairs/"): sh.rm("-r", self.out_dir + "pairs/") print "dumping pairs" script = sh.Command(self.orthoMCL_path + "orthomclDumpPairsFiles") script(self.db_cfg) sh.mv("mclInput", self.out_dir) sh.mv( "pairs", self.out_dir)
def build(filename, module, params, log_prefix, verbose=False): cur_dir = os.path.abspath(os.getcwd()) exe_name = filename + '.exe' # build the script params['backend'] = 'aotcompiler' params['disableExecute'] = True script = _gen_sim_script(filename, module, params) execute_ode(cur_dir, script, verbose=verbose, log_prefix=log_prefix) # move exe to cur dir sh.mv(os.path.join(ODE_DIR, exe_name), cur_dir)
def run(self): data = {"convert":"True"} with self.output().open('r') as f: existing = json.load(f) for folders in existing['folders_list']: sh.mv(folders,self.dropbox_folder) with self.output().open('w') as f: existing.update(data) json.dump(existing,f)
def retry_import_files(input): errors = os.path.join(input, '_ERRORS') if not os.path.exists(errors): logger.error("Task dir _ERRORS doesn't exist.") for dir_name, dir_names, file_names in os.walk(errors): for file_name in file_names: error_path = os.path.join(dir_name, file_name) logger.info('Moving %s to %s' % (error_path, input)) sh.mv(error_path, input) import_files(input)
def build(cwd, site_dir): cfg = config.load_config() # sanity check - the version dirs exist as named for version in cfg['extra']['versions']: if not 'separate' in version or not version['separate']: d = os.path.join('versions', version['dir']) print('Verifying dir %s' % (d)) if not os.path.isdir(d): print("The directory %s does not exist" % (d)) return # sanity check - dependent_repos exist in '..' for repo in dependent_repos: d = os.path.join(cwd, '..', repo) print('Verifying repo dependency in %s' % (d)) if not os.path.isdir(d): print("The directory %s does not exist" % (d)) return # sanity check - only one latest latest = False for version in cfg['extra']['versions']: if not latest and 'latest' in version and version['latest']: print('Latest is %s' % (version['dir'])) latest = True elif latest and 'latest' in version and version['latest']: print('ERROR: More than one version is latest.') print('Only one version can be latest: True.') print('Check mkdocs.yml.') return print("Building site pages") sh.rm('-rf', site_dir) sh.mkdocs('build', '--clean', '--site-dir', site_dir) for version in cfg['extra']['versions']: print("Building doc pages for: %s" % (version['dir'])) if not 'separate' in version or not version['separate']: sh.mkdocs('build', '--site-dir', os.path.join(site_dir, version['dir']), _cwd=os.path.join("versions", version['dir'])) else: repo_dir = os.path.join(cwd, '..', 'mynewt-documentation') if version['dir'] != 'master': repo_dir = os.path.join(repo_dir, 'versions', version['dir'], 'mynewt-documentation') sh.make('clean', _cwd=repo_dir) sh.make('docs', _cwd=repo_dir) sh.mv(os.path.join(repo_dir, '_build', 'html'), os.path.join(site_dir, version['dir'])) if 'latest' in version and version['latest']: sh.ln('-s', version['dir'], 'latest', _cwd=site_dir)
def import_json(import_folder): dicts = map(f2dict, Path(import_folder).glob('*.json')) notes = [] notebooks = [] resources = [] tags = [] relations = [] for adict in dicts: if adict['type_'] == 1: notes.append(adict) elif adict['type_'] == 2: notebooks.append(adict) elif adict['type_'] == 3: print('id:', adict['id'], 'type:', 3) elif adict['type_'] == 4: resources.append(adict) elif adict['type_'] == 5: tags.append(adict) elif adict['type_'] == 6: relations.append(adict) for rel in relations: the_tag = [x for x in tags if x['id'] == rel['tag_id']][0] the_note = [x for x in notes if x['id'] == rel['note_id']][0] if 'tag' not in the_note: the_note['tag'] = [] the_note['tag'].append(the_tag['title']) for nb in notebooks: nb['slug'] = get_slug(nb) jop_link = re.compile(r'!\[\w{32}\.(\w{1,5})\]\(:/(\w{32})\)', re.MULTILINE) for note in notes: nb = [x for x in notebooks if x['id'] == note['parent_id']][0] body = re.sub(jop_link, r'![image](resources/\2.\1)', note['body']) fn = f"{note['id']}.md" tags = note['tag'] if 'tag' in note else '' created = datetime.utcfromtimestamp( int(note['user_created_time']) / 1000).strftime('%Y-%m-%d %H:%M:%S') updated = datetime.utcfromtimestamp( int(note['user_updated_time']) / 1000).strftime('%Y-%m-%d %H:%M:%S') print(f'Write to {fn} ...') with open(fn, 'w') as f: md_note = (f"Title: {note['title']}\n" f"Tags: {tags}\n" f"Notebook: {nb['slug']}\n" f"Created: {created}\n" f"Updated: {updated}\n" "\n------\n\n") + body f.write(md_note) sh.mv(fn, configs['repo'])
def setup_local_server(task_name, task_files_to_copy=None): global server_process print("Local Server: Collecting files...") server_source_directory_path = \ os.path.join(parent_dir, legacy_server_source_directory_name) local_server_directory_path = os.path.join(parent_dir, '{}_{}'.format( local_server_directory_name, task_name )) # Delete old server files sh.rm(shlex.split('-rf ' + local_server_directory_path)) # Copy over a clean copy into the server directory shutil.copytree(server_source_directory_path, local_server_directory_path) # Consolidate task files task_directory_path = \ os.path.join(local_server_directory_path, task_directory_name) sh.mv( os.path.join(local_server_directory_path, 'html'), task_directory_path ) hit_config_file_path = os.path.join(parent_dir, 'hit_config.json') sh.mv(hit_config_file_path, task_directory_path) for file_path in task_files_to_copy: try: shutil.copy2(file_path, task_directory_path) except IsADirectoryError: # noqa: F821 we don't support python2 dir_name = os.path.basename(os.path.normpath(file_path)) shutil.copytree( file_path, os.path.join(task_directory_path, dir_name)) except FileNotFoundError: # noqa: F821 we don't support python2 pass print("Local: Starting server...") os.chdir(local_server_directory_path) packages_installed = subprocess.call(['npm', 'install']) if packages_installed != 0: raise Exception('please make sure npm is installed, otherwise view ' 'the above error for more info.') server_process = subprocess.Popen(['node', 'server.js']) time.sleep(1) print('Server running locally with pid {}.'.format(server_process.pid)) host = input( 'Please enter the public server address, like https://hostname.com: ') port = input('Please enter the port given above, likely 3000: ') return '{}:{}'.format(host, port)
def run(input_file, options, control_file=None, out_dir=None): out_files = (remove_suffix(input_file) + "_peaks.bed", remove_suffix(input_file) + "_summits.bed") cmd = _build_command(input_file, options, control_file, out_dir) subprocess.check_call(cmd) if out_dir: for f in out_files: sh.mv(f, os.path.join(out_dir, os.path.basename(f))) out_files = [os.path.join(out_dir, os.path.basename(x)) for x in out_files] return out_files
def after_job(self, status=None, ctx={}, *args, **kwargs): log_file = ctx.get('log_file', None) log_link = ctx.get('log_link', None) if log_file == "/dev/null": return if status == "fail": log_file_save = log_file + ".fail" try: sh.mv(log_file, log_file_save) except: pass self.create_link(log_link, log_file_save)
def tearDownClass(cls): cls.log.debug("\n"+"#"*90) # Getting back original site.pp cls.log.debug("Restoring original site.pp ...") if os.geteuid() != 0: sh.sudo('/bin/mv', cls.bck_manifest_name, cls.manifest_path + "/site.pp") else: sh.mv(cls.bck_manifest_name, cls.manifest_path + "/site.pp") return
def disable_etc(chroot=None): '''Disable bluetooth etc. directories in /etc.''' for module in BadModules: etc_paths_text = sh.find(fullpath('/etc', chroot)) etc_paths = etc_paths_text.strip().split('\n') for path in etc_paths: if path.endswith('/{}'.format(module)): if os.path.exists(path): try: sh.mv('--force', path, path + '-unused') except e: print(e)
def reduce_python(self): print("Reduce python") oldpwd = os.getcwd() try: print("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python")) sh.rm("-rf", "share") sh.rm("-rf", "bin") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("libpython2.7.a") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib", "python2.7")) sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";") sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";") #sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";") sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot") sh.rm("-rf", sh.glob("lib*")) # now create the zip. print("Create a python27.zip") sh.rm("config/libpython2.7.a") sh.rm("config/python.o") sh.rm("config/config.c.in") sh.rm("config/makesetup") sh.rm("config/install-sh") sh.mv("config", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python27.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def replace_in_file(path, replacements, logger=None): """ This helper function performs a line replacement in the file located at 'path'. :param is_debug_flag: true if method is called from apply_debug_flag. Needed only for managing logger output :param path: path to the file to be altered :param replacements: list of string pairs formatted as [old_line_pattern, new_line_replacement] :param logger: logger object, optional. If not none, used to output if replacement is successful """ tmp = path + '.tmp' if isinstance(replacements[0], string_types): replacements = [replacements] regexs = [] for replacement in replacements: try: regex = re.compile(replacement[0]) except re.error: regex = None regexs.append(regex) replacements_found = [] with open(tmp, 'w+') as nf: with open(path) as of: for line in of.readlines(): for replacement, regex in zip(replacements, regexs): # try a simple string match if replacement[0] in line: line = line.replace(replacement[0], "" if replacement[1] in (None, '') else replacement[1]) replacements_found.append(replacement[0]) # then try a regex match else: if regex is not None: match = regex.search(line) if match is not None: try: line = line.replace(match.groups(0)[0], "" if replacement[1] in (None, '') else replacement[1]) replacements_found.append(match.groups(0)[0]) except IndexError: line = line.replace(match.group(), replacement[1]) replacements_found.append([match.group()]) break # write changed line back nf.write(line) sh.rm(path) sh.mv(tmp, path) if logger: c = Counter(replacements_found) for k in c.keys(): logger.debug("Found and replaced {} occurrence{}: \t{} ".format(c[k], ['', 's'][c[k] > 1], k))
def reduce_python(self): logger.info("Reduce python") oldpwd = os.getcwd() try: logger.info("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python3")) # os.execve("/bin/bash", ["/bin/bash"], env=os.environ) sh.rm("-rf", "bin", "share") # platform binaries and configuration os.chdir(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.7", "config-3.7m-darwin")) sh.rm("libpython3.7m.a") sh.rm("python.o") sh.rm("config.c.in") sh.rm("makesetup") sh.rm("install-sh") # cleanup pkgconfig and compiled lib os.chdir(join(self.ctx.dist_dir, "root", "python3", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("-f", "libpython3.7m.a") # cleanup python libraries os.chdir(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.7")) sh.rm("-rf", "wsgiref", "curses", "idlelib", "lib2to3", "ensurepip", "turtledemo", "lib-dynload", "venv", "pydoc_data") sh.find(".", "-path", "*/test*/*", "-delete") sh.find(".", "-name", "*.exe", "-type", "f", "-delete") sh.find(".", "-name", "test*", "-type", "d", "-delete") sh.find(".", "-iname", "*.pyc", "-delete") sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # now precompile to Python bytecode hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, "-m", "compileall", "-f", "-b") # sh.find(".", "-iname", "*.py", "-delete") # some pycache are recreated after compileall sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # create the lib zip logger.info("Create a python3.7.zip") sh.mv("config-3.7m-darwin", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python37.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config-3.7m-darwin", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def move_folder(src_path, dst_path, new_folder_name=None): """ This helper function is aimed to copy a folder from a source path to a destination path, eventually renaming the folder to be moved. If it fails, it does it silently. :param src_path: absolute or relative source path :param dst_path: absolute or relative destination root path :param new_folder_name: new name for the source path's folder """ src_path, dst_path = __expand_folders(src_path, dst_path) if new_folder_name is not None: dst_path = join(dst_path, new_folder_name).rstrip("/") try: if src_path != dst_path: sh.mv(src_path, dst_path) except sh.ErrorReturnCode_1: pass
def replace_in_file(path, replacements): """ This helper function performs a line replacement in the file located at 'path'. :param path: path to the file to be altered :param replacements: list of string pairs formatted as [old_line_pattern, new_line_replacement] """ tmp = path + '.tmp' if isinstance(replacements[0], string_types): replacements = [replacements] regexs = [] for replacement in replacements: try: regex = re.compile(replacement[0]) except re.error: regex = None regexs.append(regex) with open(tmp, 'w+') as nf: with open(path) as of: for line in of.readlines(): skip = False for replacement, regex in zip(replacements, regexs): # try a simple string match if replacement[0] in line: if replacement[1] in (None, ''): skip = True else: line = line.replace(replacement[0], replacement[1]) break # then try a regex match else: if regex is not None: match = regex.search(line) if match is not None: if replacement[1] in (None, ''): skip = True try: line = line.replace(match.groups(0)[0], replacement[1]) except IndexError: line = line.replace(match.group(), replacement[1]) break if not skip: nf.write(line) sh.rm(path) sh.mv(tmp, path)
def rename(newname): this_file_path = os.path.split(os.path.realpath(__file__))[0] old_project_path = os.path.split(this_file_path)[0] oldname = os.path.split(old_project_path)[-1] new_project_path = os.path.join(os.path.split(old_project_path)[0], newname) cp_cmd = cp('-r', old_project_path, new_project_path) click.echo(''.join(cp_cmd)) django_settings_dir = os.path.join(new_project_path, oldname) if os.path.exists(django_settings_dir): mv(django_settings_dir, os.path.join(new_project_path, newname)) for nginx_conf in NGINX_CONFS: file_path = os.path.join(new_project_path, nginx_conf) mv(file_path.format(oldname), file_path.format(newname)) cmd ='cd {} && grep {} -ril ./ | xargs sed -i "s/{}/{}/g"'.format(new_project_path, oldname, oldname, newname) os.system(cmd) click.echo('new files in {}'.format(new_project_path)) click.echo("please run cmd: 'cd {} && grep {} -ril ./' to check if all string has replaced!".format(new_project_path, oldname)) click.echo('done.')
def postClone(self, cloned_files, target_dir, version): """ Extracts the compressed archives. .. versionadded:: 0.3.0 """ f = cloned_files[0] if self.newer("hg18", version): # GZIP and TAR the file and save to the target directory sh.tar("-xzf", f, "-C", target_dir) else: # Rename to ".zip" sh.mv(f, f.replace("tar.gz", "zip")) # GunZIP the file (and remove the archive) sh.gunzip(f)
def move_private_key(dirname, private_key_name): ''' Move the private key to the dirname. By default openssl puts private keys in a 'private' subdir. Some apps need them in a different dir, such as the dir where the pub keys are. ''' private_dir = os.path.join(dirname, 'private') private_key_path = os.path.join(private_dir, private_key_name) new_private_key_path = os.path.join(dirname, private_key_name) with locked(): sh.mv(private_key_path, new_private_key_path) log('moved {} to {}'.format(private_key_path, new_private_key_path)) # remove openssl's 'private' dir when empty if not os.listdir(private_dir): sh.rmdir(private_dir) log('removed {}'.format(os.path.join(private_dir, private_key_name)))
def move_files(src_path, dst_path, *files): """ This helper function is aimed to move files from a source path to a destination path. :param src_path: absolute or relative source path :param dst_path: absolute or relative destination path :param files: tuples with the following format (source_filename, destination_filename) """ src_path, dst_path = __expand_folders(src_path, dst_path) for file in files: if isinstance(file, tuple): src, dst = file elif isinstance(file, string_types): src, dst = 2 * [file] else: continue src, dst = join(src_path, src), join(dst_path, dst) if src != dst: sh.mv(src, dst)
def extract(path, archive_url, host): """Fetch a tgz archive and extract it to the desired path""" # new_env = resetEnv(host) # TODO: check file type types = { 'application/zip': sh.unzip, 'application/x-tar': lambda f: sh.tar('-xf', f) } import mimetypes filename = os.path.basename(archive_url) file_type = mimetypes.guess_type(archive_url)[0] logger.debug('Extracting {} - {} - {} to {}' .format(archive_url, filename, file_type, path)) old_path = os.getcwd() tmp_file = '/tmp/{}'.format(filename) download = requests.get(archive_url, stream=True) if file_type not in types: return try: os.makedirs(path) os.chdir(path) with open(tmp_file, 'wb') as f: for data in download.iter_content(5120000): f.write(data) f.close() types[file_type](tmp_file) # Ensure the Vagrantfile is in the destination path # ignore the first folder if necessary has_one_folder = os.listdir(path) if len(has_one_folder) == 1: sh.mv(sh.glob(os.path.join(path, has_one_folder[0], '*')), os.path.join(path, '.')) os.remove(tmp_file) logger.debug('{} {}'.format( archive_url, path )) except: logger.error('Failed to extract project at {}'.format(path), exc_info=True) os.chdir(old_path)
def switch_to_custom_manifest(cls, manifest_body): """ Helper to overwrite original manifest by custom manifest :param manifest_body: :return: None """ with open("/var/tmp/netapp_test_suite_tmp_site.pp", 'w') as temp_site_pp: temp_site_pp.write(manifest_body) if os.geteuid() != 0: sh.sudo('/bin/mv', '/var/tmp/netapp_test_suite_tmp_site.pp', cls.manifest_path + "/site.pp") sh.sudo('/bin/chmod', '664', cls.manifest_path + "/site.pp") else: sh.mv('/var/tmp/netapp_test_suite_tmp_site.pp', cls.manifest_path + "/site.pp") sh.chmod('664', cls.manifest_path + "/site.pp") # Show how looks like site.pp for now cls.log.debug("How looks site.pp for now (by 'cat {0}'):".format(cls.manifest_path + "/site.pp")) cls.log.debug(sh.cat(cls.manifest_path + "/site.pp"))
def make_dir_unused(standard_dir): ''' If the standard dir exists, then rename it to "-unused". It may be necessary to delete the dir, or at least move it to another dir tree with a different root. ''' unused_dir = standard_dir + '-unused' if os.path.exists(standard_dir): if os.path.exists(unused_dir): shutil.rmtree(unused_dir) try: os.rename(standard_dir, unused_dir) except Exception: try: # if the rename fails, try mv sh.mv(standard_dir, unused_dir) except: pass