def safe_move(src: str, dest: str) -> None: """ safety checks: 1. check if source exists before move 2. check if same-name file already exist in destination before move Only move file when passing above two checks """ src = Path(src) dest = Path(dest) if not Path(src).exists(): logger.error( f"safe_move cannot be done, because source file '{src}' not found." ) return if Path(dest).exists(): logger.warning( f"safe_move cannot be done, because destination file '{dest}' already exist." ) return try: os.move(src, dest) logger.debug(f"'{src}' has been moved to '{dest}'") except Exception as err: logger.error(f"Move operation failed, reason: {err}")
def run(): print("Restoring test data...") clear_initial_dir() for folder in os.listdir(Path.invalid_dir): if folder[0] == '.': continue folder_path = os.path.join(Path.invalid_dir, folder) for file in os.listdir(folder_path): src_path = os.path.join(folder_path, file) dest_path = os.path.join(Path.initial_sub_dir, file) os.move(src_path, dest_path) move_back_to_initial_dir(Path.valid_checked_dir) move_back_to_initial_dir(Path.valid_sub_dir) move_back_to_initial_dir(Path.valid_checked_graded_dir) test_zips = os.listdir(Path.initial_sub_dir) for i in range(0, len(test_zips)): test_zip_folder = os.path.join(Path.initial_sub_dir, f"test_zip_{i+1}") os.mkdir(test_zip_folder) os.move(os.path.join(Path.initial_sub_dir, test_zips[i]), os.path.join(test_zip_folder, test_zips[i])) for folder in os.listdir(Path.unzipped_dir): if folder[0] == '.': continue folder_path = os.path.join(Path.unzipped_dir, folder) shutil.rmtree(folder_path) print("Default test data restored.")
def move_back_to_initial_dir(dir): for filename in os.listdir(dir): if filename[0] == '.': continue src_path = os.path.join(dir, filename) dest_path = os.path.join(Path.initial_sub_dir, filename) os.move(src_path, dest_path)
def recursive_gtltsum(infiles, outfile): """ Run gtltsum recursively to sum all livetime cubes. """ infiles = list(infiles) # in case generator if len(infiles) < 2: raise Exception('Must sum >= 2 livetime cubes') temp = NamedTemporaryFile(suffix='.fits',delete=False) tempfile = temp.name recursive_gtltsum.i=1 def sum_ltcube(infile1,infile2): print 'Merging file %s and %s (%s/%s)' % (infile1, infile2,recursive_gtltsum.i,len(infiles)) recursive_gtltsum.i+=1 if infile1 == outfile: gtltsum(infile1=infile1, infile2=infile2, outfile=tempfile) return tempfile else: gtltsum(infile1=infile1, infile2=infile2, outfile=outfile) return outfile accum=reduce(sum_ltcube, infiles) if accum == tempfile: os.move(tempfile, outfile) else: os.remove(tempfile)
def rotator(self): """rotation process""" date = datetime.datetime.now().date() while not self.stopping.is_set(): time.sleep(0.1) cur_date = datetime.datetime.now().date() if date != cur_date: logs = open(self.dirs_file).read().split('\n') self.debug_log('rotate:', logs) date_str = str(date) cur_date_str = str(cur_date) del_date_str = str(cur_date - datetime.timedelta( days=self.life_time)) for log in logs: logfile_src = os.path.join(log, date_str) logfile_dst = os.path.join(log, cur_date_str) logfile_del = os.path.join(log, del_date_str) if os.path.exists(logfile_src): os.move(logfile_src, logfile_dst) if os.path.exists(logfile_del): os.remove(logfile_del) self.date = datetime.now().date()
def reformat_files(): ext = input('Enter format to change to: ') for files in os.listdir(os.chdir(path)): os.move(files, files + ext) done = os.listdir(os.getcwd()) for fil in done: print(fil)
def archive_run(run): rppath=os.path.join(run, 'runParameters.xml') try: rp=XTenRunParametersParser(os.path.join(run, 'runParameters.xml')) except OSError: logger.warn("Cannot find the runParameters.xml file at {}. This is quite unexpected. please archive the run {} manually".format(rppath, run)) else: try: #Works for recent control software runtype=rp.data["Setup"].get("Application Name") except KeyError : #should work for ancient control software runtype=rp.data.get("Application Name") if "HiSeq X" in runtype: destination=CONFIG['storage']['archive_dir']['HiSeqX'] elif "MiSeq" in runtype: destination=CONFIG['storage']['archive_dir']['MiSeq'] elif "HiSeq" in runtype: destination=CONFIG['storage']['archive_dir']['HiSeq'] else: logger.warn("unrecognized runtype {}, cannot archive the run {}.".format(runtype, run)) destination=None if destination: logger.info('archiving run {}'.format(run)) os.move(os.path.abspath(run), os.path.join(destination, os.path.basename(os.path.abspath(run))))
def signalp_runner(input_directory_path, faa_file, output_directory_path): #Creating file path input_file = input_directory_path + faa_file #Creating a subdirectory in the output directory output_subdir = output_directory_path + "signalp/" #Checking if the subdirectory exists if not "signalp" in os.listdir(output_directory_path): os.mkdir(output_subdir) #Executing PilerCR try: print("SignalP 5.0 " + contig_file) signalp_outout = subprocess.check_output( ["signalp -fasta ", input_file, "-org gram- -format short -gff3"]) os.move("*.gff", "output_directory_path") #pilercr_output=subprocess.check_output(["pilercr", "-in", input_file, "-out",output_file, "-noinfo", "-quiet"]) except subprocess.CalledProcessError as err: print("Error running SignalP. Check the input files") print("Error thrown: " + err.output) return False print("Completed running SignalP") return True
def move_to_auto(dir, auto_dir): files = find("m4a", dir=dir) def filename(f): return f.split("/")[-1] for file in files: os.move(dir + "/" + filename(file), auto_dir + "/" + filename(file))
def move_files(fnames, subdir): olddir = path.dirname(fnames[0]) newdir = path.join(olddir, subdir) basenames = [path.basename(fname) for fname in fnames] newfnames = [path.join(newdir, basename) for bname in basenames] for fname, newfname in zip(fnames, newfnames): os.move(fname, newfname) return newfnames
def link(self, other, mask='*', dryrun=1): for fn in self.targets(mask): if not islink(fn): arc = other.arc(fn) if not exists(arc): print("Moving: {} -> {}".format(fn, arc)) if not dryrun: os.move(fn) print("Link: {} -> {}".format(fn, arc)) if not dryrun: os.symlink(arc, fn)
def creatZip(self,p): # create dir from the directory pathdir from shutil import rmtree,move rmtree('/tmp/env/', ignore_errors=True) self.zipname = str(p.resolve() / "env.zip") with cd.cd(str(p)) : subprocess.run(['zip','-qjr','env.zip','.']) os.move(self.zipname,)
def creation_doss_rognages_ciels(self, dossier, fichier): self.dossier = dossier self.ficheir = fichier os.chdir(self.dossier) liste = os.listdir(".") mettre_fichier = liste[-1] os.move(self.fichier, mettre_fichier)
def _MoveFiles(self): _allfiles = os.listdir(self._output_dir) _files = [] for _file in _allfiles: if _file.endswith(".fil"): _files.append(_file) if _file.endswith("_v1.fil"): print "WARNING: something went wrong. there is a file, that ends on '_v1.fil': ", _file print "this should not happen under normal circumstances. Please investigate." #sys.exit(0) if len(_files) == 0: return if len(self._valuelist) == 0: print "valuelist is empty. Something went wrong. Exit." sys.exit(0) _expected_filelist = [] for i in self._valuelist: format_string = "%s_" format_string += self._par1_format self._sJobLabel = format_string % (self._sJobLabel_Prefix, i) _expected_filelist.append("%s_%s.fil" % (self._g4ds_output_prefix, self._sJobLabel)) #print _expected_filelist if len(_files)< len(self._valuelist): print "Only a subset has been produced:" _fileset = set(_files).intersection(_expected_filelist) print _fileset print "Is production currently ongoing? Do nothing and wait." print "last modification times: " for _file in _files: print "%s: %s" % (_file, time.ctime(os.path.getmtime(self._output_dir+_file))) sys.exit(0) else: overlap = set(_files).intersection(_expected_filelist) if len(overlap)==len(_expected_filelist): #the file lists are identical #while 1: #create dir i=1 while i: if os.path.isdir(self._output_dir+"/v%d" % i): i+=1 else: os.mkdir(self._output_dir+"/v%d" % i) break for _file in os.listdir(self._output_dir+"/*.*"): os.move(_file, self._output_dir+"/v%d" % i) print "files have been moved to: %s" % (self._output_dir+"/v%d" % i)
def convert_unix(filename): f = open(filename, "U") t = tempfile.TemporaryFile() for l in f: t.write(l) f.close() t.close() os.move(t.name, f.name)
def shift(): 'Do archiving here' 'Always do in a try statement, as the task could of been completed already by someone else.' try: os.move(active_timeline, "~.config/openStatus/network/archive/" + datetime.time.hour + "-" + datetime.time.minute + "-" + datetime.time.second + "-" + datetime.date.day + "-" + datetime.date.month + "-" + datetime.date.year) 'Just in-case, create a new timeline.' writer = open(active_timeline, "w").write("") writer.close() 'If the DynamicShift succeeds, return the error code "0".' return 0 except: 'If the DynamicShift fails for some reason, return the error code "1".' return 1
def replace_link(linkpath, linkdest): if os.path.lexists(linkpath): if not os.path.islink(linkpath): newname = linkpath + "." + str(int(time.time())) logger.warn("%s already exists and it is not a symlink " "as expected, renaming it to %s" % (linkpath, newname)) os.move(linkpath, newname) else: logger.debug("removing existing link %s" % (linkpath)) os.unlink(linkpath) logger.debug("creating link %s pointing to %s" % (linkpath, linkdest)) os.symlink(linkdest, linkpath)
def rename_data(request, post_id): data = Post.objects.get(url_id=post_id) image = PostImage.objects.filter(post=data.id) title = str(data.title).replace(" ", "") unique = randomword(4) img_dir = settings.IMAGE_DIR thumb_dir = settings.MEDIA_ROOT + 'image/thumb' print img_dir count = 0 print image for x in image: # print x.images ext = str(x.images).split('.')[-1] if count <= image.count(): try: src = ("{0}/{1}".format(settings.MEDIA_ROOT, x.images)) dest = ("{0}/{1}_{2}_img_{3}.{4}".format( img_dir, unique, title, count, ext)) print "data belum diubah %s" % src print "data setelah diubah %s" % dest os.move(src, dest) src_thumb = ("{0}/{1}".format(settings.MEDIA_ROOT, x.thumbs)) dest_thumb = ("{0}/{1}_{2}_img_{3}-150x150thumb.{4}".format( thumb_dir, unique, title, count, ext)) print "data belum diubah %s" % src_thumb print "data setelah diubah %s" % dest_thumb os.move(src_thumb, dest_thumb) image_name = ("image/{0}_{1}_img_{2}.{3}".format( unique, title, count, ext)) thumb_name = ( "image/thumb/{1}_{2}_img_{3}-150x150thumb.{4}".format( unique, title, count, ext)) print "image %s" % image_name print "thumb %s" % thumb_name data = PostImage(post=data, images=image_name, thumbs=thumb_name) data.save() # x.images = ("image/{0}_{1}_img_{2}.{3}".format(unique, title, count, ext)) # x.save() count = count + 1 except: print "error file doesnt exist" return HttpResponseRedirect("/")
def RunOneRun(n,settings): runStr = "%04d" % n print "----------------------------------------------------" print "----------------------------------------------------" print " Now starting run %s" % runStr print "----------------------------------------------------" print "----------------------------------------------------" os.chdir("Run%s" % runStr) os.system("GEB_HFC Global.dat") os.move("HFC.dat","HFC_%s" % runStr) #os.system("GrROOT_changing -i HFC_%s.dat -o raw.root -s %s -rt" % (runStr,settings)) #os.system(" Histos -i raw.root -o rawmode2_histos.root") #os.system("Calculate -i raw.root -o cal.root -s %s" % settings) #os.chdir("../kathrin") #os.system("ln -s ../Run%s/cal.root cal_run%s" % (runStr,runStr)) os.chdir(starting_folder)
def relocate(timestamp): dateStamp = date.now() year = str(dateStamp.year) month = str(dateStamp.month) day = str(dateStamp.day) newDir = (archivesDirectory + year + "/" + month + "/" + day + "/" + timestamp + "/") dir = dirname(newDir) if not exists(dir): mkdir(dir) src = cwd + "*" dst = newDir + "*" move(src, dst)
def RunOneRun(n, settings): runStr = "%04d" % n print "----------------------------------------------------" print "----------------------------------------------------" print " Now starting run %s" % runStr print "----------------------------------------------------" print "----------------------------------------------------" os.chdir("Run%s" % runStr) os.system("GEB_HFC Global.dat") os.move("HFC.dat", "HFC_%s" % runStr) #os.system("GrROOT_changing -i HFC_%s.dat -o raw.root -s %s -rt" % (runStr,settings)) #os.system(" Histos -i raw.root -o rawmode2_histos.root") #os.system("Calculate -i raw.root -o cal.root -s %s" % settings) #os.chdir("../kathrin") #os.system("ln -s ../Run%s/cal.root cal_run%s" % (runStr,runStr)) os.chdir(starting_folder)
def shift(): 'Do archiving here' 'Always do in a try statement, as the task could of been completed already by someone else.' try: os.move( active_timeline, "~.config/openStatus/network/archive/" + datetime.time.hour + "-" + datetime.time.minute + "-" + datetime.time.second + "-" + datetime.date.day + "-" + datetime.date.month + "-" + datetime.date.year) 'Just in-case, create a new timeline.' writer = open(active_timeline, "w").write("") writer.close() 'If the DynamicShift succeeds, return the error code "0".' return 0 except: 'If the DynamicShift fails for some reason, return the error code "1".' return 1
def write(self): if self.args['writemode'] == 'replace': if os.path.isfile( self.args['output']) and self.args['output'] != sys.stdout: os.move(self.args['output'], os.path.join(self.args['output'], '.bak')) for h in self.keys.keys(): for i in self.keys[h]: _s = '# Automatically added via hostscan.py\n{0} {1} {2}\n'.format( i['host'], i['type'], i['key']) if self.args['output'] == sys.stdout: print(_s, end='') else: with open(self.args['output'], 'a') as f: f.write(_s) os.chmod(self.args['output'], 0o644) os.chown(self.args['output'], pwd.getpwnam(self.args['chown_user'])[2], grp.getgrnam(self.args['chown_grp'])[2]) return ()
def move_file(): file_liste = listdir() fichiers_liste = [elmt for elmt in file_liste if elmt[-3:] == '.md'] for i in range(len(fichiers_liste)): f = open(fichiers_liste[i], 'r') date_trouvee = f.readlines(1) f.close() date_trouvee = "".join(date_trouvee) date_trouvee = date_trouvee.split('/') date_trouvee = tuple(date_trouvee) jour, mois, annee = date_trouvee mois_liste = [ 'Janvier', 'Fevrier', 'Mars', 'Avril', 'Mai', 'Juin', 'Juillet', 'Août', 'Septembre', 'Octobre', 'Novembre', 'Decembre' ] mois = mois_liste[int(mois) - 1] for dossier in file_liste: if str(jour + ' ' + mois) in dossier: dossier_destination = dossier path_destination = dossier_destination + '\ '.strip( ) + fichiers_liste[i] move(fichiers_liste[i], path_destination)
import os urls = [] with open('downloads.txt') as f: for line in f: items = eval(line) urls.append(items[0]) for item in os.listdir('.'): for url in urls: if url in item: os.move(item, 'maxers/' + item)
try: # if the file does not exist, it will bring up an error # these remove the current file because on windows, # moving a file to another file's directory even if they # are the same file, brings up an error. rem(path + "/release/Read.pyc") rem(path + "/release/Settings.pyc") rem(path + "/release/Socket.pyc") rem(path + "/release/Commands.pyc") rem(path + "/release/Run.pyc") rem(path + "/release/Threads.pyc") except: t(.5) # waits half a second (to let the HDD remove the .pyc files) # moves the .pyc in the building directory to the release directory move("Read.pyc", path + "/release/Read.pyc") move("Settings.pyc", path + "/release/Settings.pyc") move("Socket.pyc", path + "/release/Socket.pyc") move("Commands.pyc", path + "/release/Commands.pyc") move("Run.pyc", path + "/release/Run.pyc") move("Threads.pyc", path + "/release/Threads.pyc") quit() # exits the program #identical to the except block t(.5) move("Read.pyc", path + "/release/Read.pyc") move("Settings.pyc", path + "/release/Settings.pyc") move("Socket.pyc", path + "/release/Socket.pyc") move("Commands.pyc", path + "/release/Commands.pyc") move("Run.pyc", path + "/release/Run.pyc") move("Threads.pyc", path + "/release/Threads.pyc")
def run(self): files = glob.glob(config.holding_dir+"/*EF*.xml") for xml in files: st = os.stat(xml) image = xml.replace('.xml', '.jpg') if not os.path.exists(image): print 'Corresponding image not found for', f, 'expected', image continue if time.time() - st.st_mtime > 10: tree = ET.parse(xml) root = tree.getroot() # deal with xml namespace ns = root.tag.split('}')[0].strip('{') nss = { 'oppf': ns } inspectionid = root.find('oppf:ImagingId', nss).text container = self._get_container(inspectionid) if container is None: continue new_path = '{root}/data/{year}/{prop}-{vn}'.format(root=config.upload_dir, year=container['year'], prop=container['prop'], vn=0) if not os.path.exists(new_path): print 'Upload location for image doesnt exist', new_path continue position = self.get_position(root.find('oppf:Drop', nss).text, container['platetype']) if position is None: continue sampleid = self._get_sampleid(position, container['containerid']) if sampleid is None: continue mppx = root.find('oppf:SizeInMicrons', nss).find('Width').text / root.find('oppf:SizeInPixels', nss).find('Width').text mppy = root.find('oppf:SizeInMicrons', nss).find('Height').text / root.find('oppf:SizeInPixels', nss).find('Height').text db.pq("""INSERT INTO BLSampleImage (blsampleid, micronsperpixelx, micronsperpixely, containerinspectionid) VALUES (%s,%s,%s,%s)""", [sampleid, mppx, mppy, inspectionid]) iid = db.id() # Use blsampleimageid as file name as we are sure this is unique new_file = '{path}/{iid}.jpg'.format(path=new_path, iid=iid) db.pq("""UPDATE BLSampleImage set imagefullpath=%s WHERE blsampleimageid=%s""", [new_file, iid]) # move image copyfile(image, new_file) # clear up os.move(image, image.replace(config.holding_dir, config.holding_dir+'/processed')) os.move(f, f.replace(config.holding_dir, config.holding_dir+'/processed')) #os.unlink(image) #os.unlink(f) time.sleep(10)
except IOError: pass # no snp list convert_impute2_to_beagle( open("%s.sample" % chro), open("%s.haps" % chro), gzip.open("T%s.gz" % chro, "w"), is_phased=True ) if snps is not None or inds is not None: project_beagle_phase( gzip.open("%s.gz" % chro, "w"), gzip.open("T%s.gz" % chro), ind_retain=inds, snp_retain=snps, want_phased=True, is_phased=True, ) else: os.move("T%s.gz" % chro, "%s.chro" % chro) sys.exit(0) if os.path.exists("inds"): print("projecting individuals") if os.path.exists("snps"): print("projecting snps") lexec = MEGA.executor for i in range(maxChro): k = i + 1 lexec.submit("python", MEGA.phasingScripts + "/toBeagle.py %d" % k) lexec.wait(True)
def move_file(src, dst): os.move(src, dst) return dst
# split data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) # generating train.txt for i, j in zip(X_train, y_train): with open(os.path.join(abs_path, 'trainlist01.txt'), 'a') as f: f.write(i + " " + j) f.write('\n') # generating test.txt for i, j in zip(X_test, y_test): with open(os.path.join(abs_path, 'testlist01.txt'), 'a') as f: f.write(i) f.write('\n') os.move("D:/TESIS/ULTIMO/data/classInd.txt D:/TESIS/ULTIMO/data/annotation/") os.move("D:/TESIS/ULTIMO/data/testlist01.txt D:/TESIS/ULTIMO/data/annotation/") os.move( "D:/TESIS/ULTIMO/data/trainlist01.txt D:/TESIS/ULTIMO/data/annotation/") os.remove("D:/TESIS/ULTIMO/data/trainval.txt") # # generating train.txt # for i,label in enumerate(labels): # video_names=os.listdir(os.path.join(abs_path,image_folder,'val',label)) # for video_name in video_names: # with open(os.path.join(abs_path,'val.txt'),'a') as f: # f.write(os.path.join(abs_path,image_folder,'val',label,video_name)+ " " + dict_labels[label]) # f.write('\n')
def save_cache(self, save_file): self.connection.close() os.move(self.cachename, save_file) self.connection = sqlite.connect(save_file) self.cursor = self.connection.cursor() self.cachename = save_file
result = cmd("sudo make html", fail=False) with open("build_html.log", "w", "utf-8") as log_file: log_file.write("Output:\n{0}\nError:\n{1}".format(result["stdout"], result["stderr"])) if result["returncode"] != 0: print_error("Unable to generate HTML version of the report, see build_html.log") if args.pdf: print(HELP_PDF) result = cmd("sudo make latexpdf", fail=False) with open("build_pdf.log", "w", "utf-8") as log_file: log_file.write("Output:\n{0}\nError:\n{1}".format(result["stdout"], result["stderr"])) if result["returncode"] != 0: print_error("Unable to generate PDF version of the report, see build_pdf.log") print("Move PDF into releases directory") for pdf_filename in glob.glob(join(DAVID_REPORT_BUILD_PATH, "*.pdf")): os.move(pdf_filename, DAVID_REPORT_RELEASE_PATH) # pecho u'Compress report' # cd "$DAVID_REPORT_RELEASE_PATH" || xecho "Unable to find path $DAVID_REPORT_RELEASE_PATH" # gs -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/ebook -dNOPAUSE -dQUIET -dBATCH \ # -sOutputFile=MA_DavidFischer_OSCIED_compressed.pdf MA_DavidFischer_OSCIED.pdf if args.wiki: print(HELP_WIKI) file_regex = re.compile(r":file:`([^`]*)`") include_regex = re.compile(r"(?P<space>\s*)\.\. literalinclude::\s+(?P<link>\S+)\s*") option_regex = re.compile(r"(?P<space>\s*):(?P<name>\S+):\s+(?P<value>\S+)\s*") c_data = "".join(filter(lambda l: ":orphan:" not in l, open(DAVID_REPORT_COMMON_FILE, "r", "utf-8"))) for rst_src_filename in glob.glob(join(WIKI_SOURCE_PATH, "*.rst")): rst_dst_filename = join(WIKI_BUILD_PATH, basename(rst_src_filename)) with open(rst_src_filename, "r", "utf-8") as rst_src_file:
def move(paths, rename_func, do_move=False, **kwargs): """convenient interface for moving multiple paths rename func can takes the entire path, not only the basename, but it can only alter the basename only, and not the containing directory. files cannot change contaning directories after the rename. :param paths: paths to act on not an iterator, so that sorting can be done. relative paths are converted to full paths before rename_func acts on them rationale: absolute paths are taken even if only basebame can be renamed so that functions that use data inside the file can work. if you want to do a function that uses only basename information, use the act_basename_only decorator :type paths: list of strigs :param rename_func: rename function of that returns the *full path* from a given *full path* :type rename_func: function with signature (string,*args,**kwargs) :param do_move: if True, really renames, else, only outputs changes that would be done :type do_move: boolean :param mv_func: if given, uses this function to rename files from old to new name. default: os.rename this function can suppose that: - the target parent directory exists - the target file does not exist it should throw any exception if the rename fails. :type mv_func: func(stringn,string). side effect: move old_path to new_path :param can_change_dirs: default: False if True, new path can be in a new dir from old one. else, if this is tried a warning is logged, and move is skipped example: old path: /usr/file.py new path: /home/file.py if True, the move works :type can_change_dirs: boolean :param make_missing_dirs: default: False if True, automatically makes any non existant directories that would be necessary for the new path. else, loggs a warning and skips current move this option implies can_change_dirs if a file is going to be moved to a different parent dir, and the parent dir is an existing file, only moves if overwrite == True and make_missing_dirs == True example: existing dirs: /usr/ old path: /usr/file.py new path: /usr/non/existant/dirs/file.py if True, creates: /usr/non/ /usr/non/existant/ /usr/non/existant/dir/ and then moves the old path there: /usr/non/existant/dir/file.py :type make_missing_dirs: boolean :param overwrite: default: False if True overwrites existing files without asking. else, logs a warning, and skips move if a file is going to be moved to a different parent dir, and the parent dir is an existing file, only moves if overwrite == True and make_missing_dirs == True :type overwrite: boolean. :param sort_func: default: sorted(reverse=True) function used to sort paths, and therefore decide order in which paths are renamed clearly, this function can have an impact on the rename results. example: existing files: /a /b rename func: lambda p: 'c' in this case, either a or b will be renamed, depending on which goes first. if overwrite is True, loss of data would occur rationale hehind default: with sorted(reverse=True), when renameing basenames, which is the major use case, one always renames parent dirs before files inside them. :type sort_func: function([string, ...]) TODO ==== - THE MOVE DIRS IS BUGGED DON'T USE IT!!!!!! - rename do_move to do_mv - add act on abspath/act on relpath option """ sort_func = kwargs.pop("sort_func", sorted) func_args = kwargs.pop("func_args", []) func_kwargs = kwargs.pop("func_kwargs", {}) make_missing_dirs = kwargs.pop("make_missing_dirs", True) can_change_dirs = kwargs.pop("can_change_dirs", True) or make_missing_dirs mv_func = kwargs.pop("mv_func", os.rename) overwrite = kwargs.pop("overwrite", False) paths = map(os.path.abspath, paths) paths = sort_func(paths, reverse=True) warnings = [] errors = [] for path in paths: head, bname = os.path.split(path) new_bname = rename_func(path, *func_args, **func_kwargs) new_path = os.path.join(head, new_bname) if new_path != path: logging.info("%s\n%s\n" % (path, new_path)) # make sure new path is clear if os.path.exists(new_path): if overwrite and do_move: try: remove_recursive(new_path) except Exception, e: errors.append("os error: could not remove existing path" "\n%s\n%s\n\n%s" % (path, new_path, e)) continue else: warnings.append( "new path already exists." "rename skipped\nold path:" "%s\nnew path: %s" % (path, new_path) ) continue # make sure new dir exists old_dir = os.path.split(path)[0] new_dir = os.path.split(new_path)[0] if old_dir != new_dir: makedirs = False continue # TODO this is for security before I get the tests done!!!!!! if can_change_dirs: if os.path.exists(new_dir): if not os.path.isdir(new_dir): # its a file if make_missing_dirs: makedirs = True if ( not os.path.commonprefix([old_path, new_dir]) == old_path ): # old_path is an ancestor for new path if overwrite: try: shutil.rmtree(new_path) except Exception, e: print e, "TODO" continue else: print overwrite_error # TODO elif overwrite: # new_dir == old_path: must move old path to temp path! # TODO get temppath try: os.move(path, tmppath) except Exception, e: errors.append("TODO" % (path, new_path, e)) continue path = temppath else: print "missing dirs warn TODO" elif make_missing_dirs: makedirs = True else: print "missing dirs warn TODO" # now that the area is clear, make the missing dirs if needed if makedirs and do_move: try: os.makedirs(new_dir) # ensure dir exists except Exception, e: errors.append( "os error: could not create non-existant dirs for new path. \n%s\n%s\n\n%s" % (path, new_path, e) ) continue else: warnings.append("new dir does not exist\n%s\n%s\n" % (path, new_path)) continue
def splitall(path): allparts = [] while 1: parts = os.path.split(path) if parts[0] == path: # sentinel for absolute paths allparts.insert(0, parts[0]) break elif parts[1] == path: # sentinel for relative paths allparts.insert(0, parts[1]) break else: path = parts[0] allparts.insert(0, parts[1]) return allparts root = '/media/katie/storage/7508/func' for d in os.listdir(root): for letter in ['L', 'M']: if letter in s_name: old_path = os.path.join(root, d) dir_name = 'run_' + d[d.find(letter)-1:d.find(letter)+2] new_path = os.path.join(root, dir_name) print old_path print new_path os.move(old_path, new_path)
from os import path with open("Package.swift") as file: data = file.readlines() for i in range(len(data)): if "//dev" in data[i]: data[i] = data[i][2:] if "//nodev" in data[i]: data[i] = "//" + data[i] with open("Package.swift", "w") as file: file.writelines(data) with open("*****@*****.**") as file: data = file.readlines() for i in range(len(data)): if "//dev" in data[i]: data[i] = data[i][2:] if "//nodev" in data[i]: data[i] = "//" + data[i] with open("*****@*****.**", "w") as file: file.writelines(data) if path.exists("Package.resolved"): move("Package.resolved", "Package.resolved.nodanger") if path.exists("Package.resolved.danger"): move("Package.resolved.danger", "Package.resolved")