def output(self, lastTime=False): # original output function super.output(lastTime) # link all the ouptput to ~/public_html/log/<name>/<date>/ home_log_dir = os.path.join('~/public_html/log', self.name) if not os.path.exists(home_log_dir): os.makedirs(home_log_dir) sim_dir = os.path.join(home_log_dir, mp.strTime(self.start_time)) logm('Create symbolic link at for log files at', sim_dir) abs_logdir = os.path.dirname(os.path.realpath(self.output_filename)) if os.exists(sim_dir) and not os.path.samefile(sim_dir, abs_logdir): if os.path.islink(sim_dir): os.rename(sim_dir, sim_dir + '.backup') logwarn('link exists. Backup link as {:s}.backup -> {:s}' .format(sim_dir, os.path.realpath(os.path.expanduser(sim_dir)))) else: os.rename(sim_dir, sim_dir + '.backup') logwarn('file or dir exists. Backup as{:s}.backup' .format(sim_dir)) elif not os.exists(sim_dir): os.symlink(abs_logdir, mp.strTime(self.start_time), target_is_directory=True, dir_fd=home_log_dir)
def get_devtls_tool_path_list(self, devtls_path): if not os.path.exists(devtls_path): return [] devtls_path = os.path.abspath(devtls_path) path_list = [] if 'Windows' == platform.system(): if os.path.exists(os.path.join(devtls_path, '@Win32')): path_list.append( os.path.join(devtls_path, '@Win32/tool/7zip/18.05')) path_list.append( os.path.join(devtls_path, '@Win32/tool/cmake/3.12.2')) path_list.append( os.path.join(devtls_path, '@Win32/tool/llvm/7.0.1')) path_list.append( os.path.join(devtls_path, '@Win32/tool/python/3.7')) path_list.append( os.path.join(devtls_path, '@Win32/tool/rclone/1.45')) elif 'Linux' == platform.system(): if os.exists(os.path.join(devtls_path, '@Linux')): print('not implement') elif 'Darwin' == platform.system(): if os.exists(os.path.join(devtls_path, '@MacOS')): print('not implement') else: path_list = [] return path_list
def _setup_output_dir(output_root): if not os.exists(output_root): os.mkdir(output_root) results_dir = os.path.join(output_root, str(datetime.date.today())) if not os.exists(results_dir): os.mkdir(results_dir) return os.path.abspath(results_dir)
def create_paths(): if not os.exists(a.a_dir): print("No existe el directorio A") return if not os.exists(a.b_dir): print("No existe el directorio B") return input_dir = a.a_dir b_dir = a.b_dir if not os.exists(a.output_dir): os.mkdir(a.output_dir) output_dir = a.output_dir input_paths = [] output_paths = [] b_dir_paths = [] for style in os.listdir(input_dir): style_path = os.path.join(input_dir, style) input_paths.append(style_path) for style in os.listdir(input_dir): style_path = os.path.join(output_dir, style) output_paths.append(style_path) for style in os.listdir(b_dir): style_path = os.path.join(b_dir, style) b_dir_paths.append(style_path) return input_paths, output_paths, b_dir_paths
def copy_to(paths, dir): if !os.exists(dir): os.mkdir(dir) for path in paths: if os.exists(path): for file in get_special_paths(path): shutil.copy(file,dir)
def run(self): import pyload.webui as webinterface global webinterface reset = False if self.https and (not os.exists(self.cert) or not os.exists(self.key)): log.warning(_("SSL certificates not found.")) self.https = False if self.server in ("lighttpd", "nginx"): log.warning( _("Sorry, we dropped support for starting %s directly within pyLoad" ) % self.server) log.warning( _("You can use the threaded server which offers good performance and ssl," )) log.warning( _("of course you can still use your existing %s with pyLoads fastcgi server" ) % self.server) log.warning( _("sample configs are located in the pyload/web/servers directory" )) reset = True elif self.server == "fastcgi": try: import flup except Exception: log.warning( _("Can't use %(server)s, python-flup is not installed!") % {"server": self.server}) reset = True if reset or self.server == "lightweight": if os.name != "nt": try: import bjoern except Exception, e: log.error(_("Error importing lightweight server: %s") % e) log.warning( _("You need to download and compile bjoern, https://github.com/jonashaag/bjoern" )) log.warning( _("Copy the bjoern.so file to lib/Python/Lib or use setup.py install" )) log.warning( _("Of course you need to be familiar with linux and know how to compile software" )) self.server = "auto" else: self.core.log.info( _("Server set to threaded, due to known performance problems on windows." )) self.core.config.set("webui", "server", "threaded") self.server = "threaded"
def _save_sample(self, image, mask, out_path): image_path = os.path.join(out_path, 'image.npy') mask_path = os.path.join(out_path, 'mask.npy') if not self.overwrite: if not os.exists(image_path): np.save(image_path, image.astype(np.float32)) if not os.exists(mask_path): np.save(mask_path, mask.astype(bool)) else: np.save(image_path, image.astype(np.float32)) np.save(mask_path, mask.astype(bool))
def check(self): """ Checks parameters and paths """ if 'DATA' not in PATH: raise Exception if not exists(PATH.DATA): raise Exception if 'OUTPUT' in PATH: assert exists(PATH.OUTPUT)
def __afterchange__(self,input_file,output_file,ext,success): backup = input_file.replace('.nii.gz','_pre_{}.nii.gz'.format(ext)) if(success): #shutil.copy(output_file,input_file) #os.remove(output_file) - We don't remove this because that's how we check if we did the process already os.remove(backup) else: #shutil.copy(backup,input_file) if(os.exists(output_file)): os.remove(output_file) if(os.exists(backup)): os.remove(backup)
def get_test_data(self, path): fn = op.join(test_data, path) if not os.exists(test_data): os.mkdir(test_data) if not os.exists(fn): if path.endswith('/'): util.download_dir(op.join(url, path), fn) else: util.download_file(op.join(url, path), fn) return fn
def saveSettings(purgeAr,purgeNonAr,purgeProj,months1,months2,months3,recDir,projDir): """ settings file format follows: purgeAr(1 = on, 0 = off),months1 purgeNonAr(1 = on, 0 = off),months2 purgeProj(1 = on, 0 = off),months3 Record Directory Project Directory """ global displayVar global displayVar6 try: if purgeAr == 1: int(months1.strip()) if purgeNonAr == 1: int(months2.strip()) if purgeProj == 1: int(months3.strip()) except: tkMessageBox.showerror("Invalid Input", "The number of months is invalid, please go back or try again.") return try: os.exists(recDir) except: try: ensureRecProj(recDir,projDir) except: tkMessageBox.showerror("Invalid Input", "The record directory is empty, please go back or try again.") ## readlines = [] ## fr = open('settings','r') ## for line in fr: ## readlines.append(line) ## fr.close() writelines = [] writelines.append(str(purgeAr)+','+str(months1).strip()+'\n') writelines.append(str(purgeNonAr)+','+str(months2).strip()+'\n') writelines.append(str(purgeProj)+','+str(months3).strip()+'\n') writelines.append(str(recDir)+'\n') writelines.append(str(projDir)+'\n') fw = open('settings','w') for line in writelines: fw.write(line) fw.close() displayVar = recDir displayVar6 = projDir tkMessageBox.showinfo("Saved", "Settings Saved.")
def directory(self,directory): """Set the IRAFSet directory for this object.""" if directory is not None: dbase,fname = os.path.split(directory) dparent = "".join(dbase.split("/")[:-1]) if fname is not None: if os.exists(directory): self._directory = directory elif os.exists(dparent): os.mkdir(directory) self._directory = directory else: raise ValueError("Can't find directory to set: %s" % dbase) else: self._directory = None
def load_camera_settings(loc_dic): # defaults for when config file not found s_val = "20" c_val = "20" g_val = "20" b_val = "50" x_dim = 1600 y_dim = 1200 additonal_commands = "-d/dev/video0 -w" loc_settings = "/home/pi/Pigrow/config/camera_settings.txt" caps_path = "/home/pi/Pigrow/caps/" try: caps_path = loc_dic['caps_path'] except Exception: if os.exists(caps_path): print("Using default")
def __init__(self,directory=None): super(IRAFFileSet, self).__init__() self._directory = tempfile.mkdtemp() if directory is not None: dbase,fname = os.path.split(directory) dparent = "".join(dbase.split("/")[:-1]) if fname is not None: if os.exists(directory): self._directory = directory elif os.exists(dparent): os.mkdir(directory) self._directory = directory self._files = [] self._open = True LOG.log(2,"IRAF File Set created with directory %s" % self._directory)
def prePareDir(fullotapath): dirslist = fullotapath.split("/") if len(dirslist) != 5 or not filename[4].endswith("zip"): return None projectname = dirslist[2] projectinfos = projectname.split("_") localpath = '' if len(projectinfos) == 4: if isMtkProject(projectname): localpath = mtk_custom + projectinfos[0].upper( ) + "-" + projectinfos[3].upper() else: localpath = spr_custom + projectinfos[0].upper( ) + "-" + projectinfos[3].upper() else: for i in project_map.keys(): if projectname.upper() == i: localpath = project_map[i] break if localpath != '' and not os.exists(localpath): os.makedirs(localpath) customdirs = currdir.split("/") if len(customdirs) == 5: newloacalpath = localpath + "/" + customdirs[3] + "/" + customdirs[4] os.makedirs(newloacalpath) return newloacalpath + "/" + dirslist[4] return None
def get_hcp_data(data_dir, raw): if not os.exists(join(data_dir, 'HCP_extra')): raise ValueError( 'Please download HCP_extra folder using make download-hcp_extra' ' first.') if raw: mask = join(data_dir, 'HCP_extra/mask_img.nii.gz') try: mapping = json.load( open(join(data_dir, 'HCP_unmasked/mapping.json'), 'r')) except FileNotFoundError: raise ValueError( 'Please unmask the data using hcp_prepare.py first.') func_filenames = sorted(list(mapping.values())) else: hcp_dataset = datasets.fetch_hcp_rest(data_dir=data_dir, n_subjects=2000) mask = hcp_dataset.mask # list of 4D nifti files for each subject func_filenames = hcp_dataset.func # Flatten it func_filenames = [(record for record in subject) for subject in func_filenames] # print basic information on the dataset print('First functional nifti image (4D) is at: %s' % hcp_dataset.func[0]) # 4D data return mask, func_filenames
def main(input_file, attribution_file, label_map_file, num_classes, num_samples, overwrite): label_map = [{ 'index': i, 'word_net_id': f'{i:08d}', 'name': f'Class {i:d}', } for i in range(num_classes)] with open(label_map_file, 'w') as fd: json.dump(label_map, fd) if overwrite: for fname in (input_file, attribution_file): if os.exists(fname): os.remove(fname) for label_s in range(num_classes): data = np.random.uniform(0, 1, size=(num_samples, 3, 32, 32)) label = np.array([label_s] * num_samples) attrib = np.random.uniform(-1, 1, size=(num_samples, 3, 32, 32)) out = np.random.uniform(0, 1, size=(num_samples, num_classes)) append_input(input_file, data, label) append_attribution(attribution_file, attrib, out, label)
def __init__(self, filename): import os from moviepy.Clip import Clip if not filename.endswith('.wav'): name, ext = os.path.splitext(os.path.basename(filename)) if temp_wav is None: filename = Clip._TEMP_FILES_PREFIX + filename + '.wav' if not os.exists(filename): ffmpeg.extract_sound(filename, temp, fps, bitrate) self.filename = filename wavf = wave.open(filename) self.nchannels = wavf.getnchannels() self.fps = wavf.getframerate() self.nframes = wavf.getnframes() self.nbytes = wavf.getsampwidth() self._wavfile = wavf self.duration = (1.0 * self.nframes / self.fps) self.end = self.duration self.np_dtype = {1: 'int8', 2: 'int16', 4: 'int32'}[self.nbytes] self.buffersize = buffersize self._buffer_around(0)
def submit(genotype_file, coverage_file='', username=None, password=None): # get genotype file r = urllib2.Request(genotype_file) if username is not None: h = "Basic %s" % base64.encodestring('%s:%s' % (username, password)).strip() r.add_header("Authorization", h) handle = urllib2.urlopen(r) # write it to a temporary location while calculating its hash s = hashlib.sha1() output_handle, output_path = mkstemp() for line in handle: os.write(output_handle, line) s.update(line) os.close(output_handle) # now figure out where to store the file permanently permanent_dir = os.path.join(UPLOAD_DIR, s.hexdigest()) permanent_file = os.path.join(permanent_dir, "genotype.gff") if not os.exists(permanent_dir): os.makedirs(permanent_dir) shutil.move(output_path, permanent_file) # run the query submit_local(permanent_file) return s
def make_get_dff_movie(self): moviename = os.path.split( str(self.manifest_dict['decimated_jcam_movie_file_path']))[-1] dff_moviename = moviename.replace( '16_16_1.{}'.format(self.ext), '16-16-1_dff_rolling_gaussian.{}'.format(self.ext)) if os.exists(os.path.join(self.path, dff_moviename)): if self.ext == 'h5': f = h5py.File(os.path.join(self.path, dff_moviename), 'r') dff_movie = f['data'] if self.ext == 'npy': dff_movie = BinarySlicer( str(os.path.join(self.path, dff_moviename))) else: print 'Making DFF movie' dff_movie = normalize_movie(self.movie, mask_data=False, show_progress=True) filename = os.path.join(self.path, dff_moviename) if self.ext == 'h5': hf = h5py.File(filename, 'w') hf.create_dataset('data', data=dff_movie) hf.close() if self.ext == 'npy': np.save(filename, dff_movie) self.dff_movie = dff_movie return self.dff_movie
def mkpath0(split_path): ppart = split_path[0] dpart = split_path[1] if os.exists(ppart) and opath.isdir(ppart): os.mkdir(ppart + opath.sep + dpart) return mkpath0(opath.split(dpart))
def del_em(file_dict, dest_dir): for d, files in file_dict.keys(): if os.isdir(d): glob = '*' dp_io.printf("arg %s is a dir, glob(%s)? ", d, glob) a = sys.stdin.readline() if a == "\n": a = glob files = os.listdir(glob) for f in files: # if dest_file exists and is the same, del in src.: dest_file = os.join(d, f) num = 0 while os.exists(dest_file): if filecmp.cmp(f, dest_file): os.unlink(f) dp_io.printf("dest_file(%s) exists copying with modified name\n", dest_file) name, ext = opath.splitext(dest_file) dest_file = name + "-" + str(num) + ext num += 1 print "os.rename(%s, %s)" % (f, dest_dir) remains = os.listdir(d) if remains: ans = "n" dp_io.printf("files remain in src dir(%s); Remove them(y/N)? ", d) ans = sys.std
def run(): # power_level 25 doesn't go, 30 does. try: delay = Config.getfloat('power', 'delay') Logger.info("start: delay %.3f", delay) with control.pololu(timeout=1) as ctl: if not os.exists(Input_filename): open(Input_filename, 'w').close() with open(Input_filename) as input: line = None # read to EOF: for line in input.readline(): pass # wait for first line: while not line: time.sleep(0.2) line = input.readline() while True: if line: power_level1, power_level2 = \ (int(x) for x in line.split()) Logger.info("power_level1 %d, power_level2 %d", power_level1, power_level2) time.sleep(delay) ctl.set_power(power_level1) time.sleep(delay) ctl.set_power(power_level2) line = input.readline() except Exception, e: print >> sys.stderr, "got exception" Logger.exception("%s: %s", e.__class__.__name__, e) raise
def load_font(fontname, fontsize): """ load_font(fontname, fontsize) -> the appropriate pygame.Font() Searches for the font given by fontname and fontsize at the following places (in order): - the pygame system fonts - the standard MS fonts at /usr/share/fonts/truetype/msttcorefonts - /usr/share/fonts (recursive) - working dir If the font isn't found, the default pygame font is returned. """ # system fonts if pygame.font.get_fonts().count(fontname) == 1: return pygame.font.SysFont(fontname, fontsize) # standard MS fonts if os.path.exists('/usr/share/fonts/truetype/msttcorefonts/'+fontname+'.ttf'): return pygame.font.Font('/usr/share/fonts/truetype/msttcorefonts/'+fontname+'.ttf', fontsize) # search /usr/share/fonts/ for root, dirs, files in os.walk('/usr/share/fonts'): if fontname+'.ttf' in files: return pygame.font.Font(os.path.join(root, fontname+'.ttf'), fontsize) # search in working dir if os.exists('./'+fontname+'.ttf'): return pygame.font.Font(fontname+'.ttf', fontsize) # last resort: return default font return pygame.font.Font(None, fontsize)
def __init__(self, filename): import os from moviepy.Clip import Clip if not filename.endswith('.wav'): name, ext = os.path.splitext(os.path.basename(filename)) if temp_wav is None: filename = Clip._TEMP_FILES_PREFIX + filename+'.wav' if not os.exists(filename): ffmpeg.extract_sound(filename, temp, fps, bitrate) self.filename = filename wavf = wave.open(filename) self.nchannels = wavf.getnchannels() self.fps = wavf.getframerate() self.nframes = wavf.getnframes() self.nbytes = wavf.getsampwidth() self._wavfile = wavf self.duration = (1.0*self.nframes/self.fps) self.end = self.duration self.np_dtype = {1:'int8',2:'int16',4:'int32'}[self.nbytes] self.buffersize= buffersize self._buffer_around(0)
def do_dirSelect(self): start_path = str(self.registryPath.text()) if not os.exists(start_path): start_path = '.' dirname = str(QtGui.QFileDialog.getExistingDirectory(self, "Select Directory", directory=start_path)) if dirname: self.registryPath.setText(dirname)
def start(self): #parse arguments parser = argparse.ArgumentParser(description="Replay video and text logs from a flight") #required arguments parser.add_argument('log_file', action="store", type=str, help='Enter absolute location of log file. Ex: /home/odroid/Smart_Camera/logs/Smart_Camera-2015-01-22-20-35.log') parser.add_argument('raw_vid', action="store", type=str, help='Enter absolute location of RAW video file. Ex: /home/odroid/Smart_Camera/logs/Smart_Camera-raw-2015-01-22-20-35.avi') #optional arguments parser.add_argument('-g' ,'--gui_vid' , action="store", type=str, help='Enter absolute location of GUI video file. Ex: /home/odroid/Smart_Camera/logs/Smart_Camera-gui-2015-01-22-20-35.avi') args, unknown = parser.parse_known_args() #check and open files #log if(not os.path.exists(args.log_file)): print 'Unable to find log file' sys.exit(0) self.log = open(args.log_file) #raw video if(not os.path.exists(args.raw_vid)): print 'Unable to find raw video file' sys.exit(0) self.raw = cv2.VideoCapture(args.raw_vid) #gui video if(args.gui_vid is not None): if(not os.exists(args.log_file)): print 'Unable to find gui file' sys.exit(0) self.gui = cv2.VideoCapture(args.gui_vid)
def __init__(self, model_name, overwrite=False, options={}, description=None, grammar=None): """ Creates an empty, untrained model. To load a previously stored model, use from_file(). Optionally stores some custom descriptive text. This will be included in the descriptive text that gets stored along with the model. """ self.model_name = model_name if overwrite and os.exists(self._filename): # Remove the old file if we're asked to overwrite if overwrite: os.remove(self._filename) self._options = None self._options_dict = options self._generate_description() self.model_description = description self.grammar = grammar
def make_sure_directory_exists(directory): """check for the existence of a directory, and make it if not there """ if (os.exists(directory)): return os.makedirs(directory) return
def loaddb(): nonlocal database with open("animals.db") as f: if os.exists(f): database = json.load(f) else: database = examples
def __init__ (self, fileName): if(exists(filename)): #dictionary of residual values for translation purposes self.resdict = { 'ALA': 'A', 'CYS': 'C', 'ASP': 'D', 'GLU': 'E', 'PHE': 'F', \ 'GLY': 'G', 'HIS': 'H', 'ILE': 'I', 'LYS': 'K', 'LEU': 'L', \ 'MET': 'M', 'ASN': 'N', 'PRO': 'P', 'GLN': 'Q', 'ARG': 'R', \ 'SER': 'S', 'THR': 'T', 'VAL': 'V', 'TRP': 'W', 'TYR': 'Y' } #nab the file fp = open(filename,'r') self.fileDump = fp.readlines() fp.close() #get the file length size = len(self.fileDump) #split the atom vecotrs for i in range(0,size): if(self.fileDump[i][0:3] == "ATOM"): self.fileDump[i] = split('\s+',self.fileDump) else: #end of usefull stuff break self.residue = self.parseResidue()
def move_to_classified(predictor, image_path, threshold, classified_path): result = predict(predictor, image_path) if result[-1][0] > threshold: result_dir = os.path.join(classified_path, result) if not os.exists(result_dir): os.mkdir(result_dir) shutil.copy2(image_path, result_dir)
def train_xgboost(data, variables, identifier, run_number, seed, prc_train=0.95, num_rounds=100): model_filename = os.path.join("..", "DATA", "xgboost", "model_xgboost_%s_%d_%d.model" % (identifier, run_number, seed)) if not os.exists(model_filename): X = np.array(data.ix[data["dataset"] == "train", variables]) np.random.seed(seed) np.random.shuffle(X) y = np.array(data.ix[data["dataset"] == "train", 'signal']) np.random.seed(seed) np.random.shuffle(y) n = int(X.shape[0]*prc_train) dtrain = xgb.DMatrix(X[:n,:], label=y[:n]) dtest = xgb.DMatrix(X[n:,:], label=y[n:]) param = {'max_depth':10, 'eta':0.05, 'silent':1 , 'objective' : 'binary:logistic'} watchlist = [(dtrain,'train'), (dtest, 'test')] def evalerror(preds, dtrain): labels = dtrain.get_label() # return a pair metric_name, result # since preds are margin(before logistic transformation, cutoff at 0) return 'truncated AUC', -evaluation.roc_auc_truncated(labels, preds) print("Evaluation %s for run %d with seed %d" % (identifier, run_number, seed)) bst = xgb.train(param, dtrain, num_rounds, watchlist, feval=evalerror) print("Saving %s" % model_filename) bst.save_model(model_filename)
def confirm_schedule(testing): if testing.keys() != task_elements: return "task array was malformed" if testing['time'] < 0: #we can't handle negative times, that doesn't make sense return "Time is less than zero" if testing['pattern'] not in valid_patterns: #we don't support the given pattern return "Unsupported Pattern" if type(testing['function']) != type(main): return "Function error, wrong type: " + string(testing['function']) if testing['prefs'].keys() != prefs_needed: return "Preference array was malformed" if testing['prefs']['max_size'] < 100: return "Nonsense maximum file size" if not os.exists(testing['prefs']['folder']): return "Path does not exist" #create a test ssh connection try: test = pysftp.Connection(testing['prefs']['server'], username=testing['prefs']['user'], password=testing['prefs']['pass']) except Exception as e: util.log(e) return "Connection with given arguments failed" return None
def train( iterations=None, cfg=None, evaluation=True, classes=None, resume=True, save=False, ): if cfg is None: cfg = get_cfg(find_outputn() + 1) if not iterations is None: cfg.SOLVER.MAX_ITER = iterations trainer = CustomTrainer(cfg) trainer.resume_or_load(resume=resume) trainer.train() if evaluation: evaluate(cfg, trainer, classes) if save and (resume or not os.exists( f"/content/gdrive/My\\ Drive/4YP\\ Output/detectron/{cfg.OUTPUT_DIR.split('/')[-1]}/" )): os.system( f"cp -rf /content/outputs/{cfg.OUTPUT_DIR.split('/')[-1]} /content/gdrive/My\\ Drive/4YP\\ Output/detectron" ) elif save: print( "Warning: folder exists, did not save to Drive (did you forget to set resume=True?)" )
def is_network_cracked(self): """ If the network has been cracked we'll save the key to a key file for that specific target. This function just asks if the network has been cracked. Possibly not going to be used as network_key returns False if not cracked. """ return os.exists(self.key_file)
async def init_cache_file(file): try: if os.exists("cache/" + file + ".p"): return await True assert AIOFile.os.stat("cache/" + file + ".p").st_size > 0 return await False except: return await False
def mkdir(self, path): path = path.strip() isExists = os.exists(path) if not isExists: os.makedirs(path) return True else: return False
def load_file(): if not os.exists(COST_FILE_PATH): return None with open(COST_FILE_PATH, 'r') as file: data = file.read() return data
def _save_sample(self, image, mask, descriptors, out_path): super()._save_sample(image, mask, out_path) descriptors_path = os.path.join(out_path, 'descriptors.npy') if not self.overwrite: if not os.exists(descriptors_path): np.save(descriptors_path, descriptors) else: np.save(descriptors_path, descriptors)
def uninstall(self, delete=True): path = os.path.normpath(os.path.join(self.parent.config['path'], 'packages', self.get_atom(True, False))) if os.exists(os.path.join(path, self['name'] + '.filelist')): filelist = FileList() filelist.load(os.path.join(path, self['name'] + '.filelist')) # FIXME: remove files in filelist.files if delete: pass #shutil.rmtree(path)
def __init__(self, filename: str = None) -> None: if not finename: filename = f'unnamed.dump' self.original_filename : str = filename self.filename : str = filename if os.exists(self.filename): raise Exception("Dump file already exists") if not os.access(self.filename, os.W_OK): raise Exception("Access denied to create file")
def __init__(self, source, name, volume): if not os.exists(source): raise ValueError("FILE "+source+" DOESN'T EXIST!") self.source = source self.name = "[" + name + "]" self.volume = volume self.cmd = ""
def download_anaconda(self): archivedir = 'https://repo.continuum.io/archive/' files = ['Anaconda2-2.4.1-Windows-x86.exe', 'Anaconda2-2.4.1-Windows-x86_64.exe'] for f in files: d = os.path.join(self.installers, f) if not os.exists(d): urllib.urlretrieve(archivedir + f, d) self.anacondafiles = files
def delete_files(files): for i in files: try: os.remove(i) except: try: shutil.rmtree(i) except: try: if os.exists(i): print 'failed to delete %s' % i except: pass
def put_output(dir_in, opts, Flowcell, Lane): """Uses shutil to move the output into galaxy directory""" seq1_name = '%(code)s_%(Flowcell)s_s_%(lane)s_fastq.txt'%\ ({'code': 'R1samplecode123','Flowcell':Flowcell, 'lane':Lane}) seq2_name = '%(code)s_%(Flowcell)s_s_%(lane)s_fastq.txt'%\ ({'code': 'R2samplecode123','Flowcell':Flowcell, 'lane':Lane}) if not os.exists(os.path.join(dir_in, seq1_name)): seq1_name += ".gz" seq2_name += ".gz" shutil.move(os.path.join(dir_in, seq1_name), opts.match1) shutil.move(os.path.join(dir_in, seq2_name), opts.match2) return 0
def fetch_init(args): import os import sys if args.input is None: input = Set(Iterator(sys.stdin, args.format)) elif os.exists(args.input + '.lci'): input = IndexedSet(args.input) else: input = Set(Iterator(args.input, args.format)) output = sys.stdout if args.output is None else args.output fetch(input, output, args.query, args.format)
def copy_image_to_volume(self, context, volume, image_service, image_id): # TODO(jdurgin): replace with librbd # this is a temporary hack, since rewriting this driver # to use librbd would take too long if FLAGS.volume_tmp_dir and not os.exists(FLAGS.volume_tmp_dir): os.makedirs(FLAGS.volume_tmp_dir) with tempfile.NamedTemporaryFile(dir=FLAGS.volume_tmp_dir) as tmp: image_service.download(context, image_id, tmp) # import creates the image, so we must remove it first self._try_execute("rbd", "rm", "--pool", FLAGS.rbd_pool, volume["name"]) self._try_execute("rbd", "import", "--pool", FLAGS.rbd_pool, tmp.name, volume["name"])
def _createSource(self, source): if(os.exists(source)): raise SourceCreateError, "%s already exists." % source tempfile = os.tempnam(os.path.dirname(os.path.abspath(source)), os.path.basename(source)) open(tempfile,'w') # Create (and.. uhu.. possibly overwrite) try: try: os.link(tempfile, source) # Will fail if exists except Exception, exception: raise SourceCreateError, exception finally: os.unlink(tempfile)
def run(self): import pyload.webui as webinterface global webinterface reset = False if self.https and (not os.exists(self.cert) or not os.exists(self.key)): log.warning(_("SSL certificates not found.")) self.https = False if self.server in ("lighttpd", "nginx"): log.warning(_("Sorry, we dropped support for starting %s directly within pyLoad") % self.server) log.warning(_("You can use the threaded server which offers good performance and ssl,")) log.warning(_("of course you can still use your existing %s with pyLoads fastcgi server") % self.server) log.warning(_("sample configs are located in the pyload/web/servers directory")) reset = True elif self.server == "fastcgi": try: import flup except Exception: log.warning(_("Can't use %(server)s, python-flup is not installed!") % { "server": self.server}) reset = True if reset or self.server == "lightweight": if os.name != "nt": try: import bjoern except Exception, e: log.error(_("Error importing lightweight server: %s") % e) log.warning(_("You need to download and compile bjoern, https://github.com/jonashaag/bjoern")) log.warning(_("Copy the boern.so file to lib/Python/Lib or use setup.py install")) log.warning(_("Of course you need to be familiar with linux and know how to compile software")) self.server = "builtin" else: self.core.log.info(_("Server set to threaded, due to known performance problems on windows.")) self.core.config.set("webui", "server", "threaded") self.server = "threaded"
def _load_creds(self): credsfile = os.environ['HOME'] + "/.vindalu/credentials" if !os.exists(credsfile): print "Creds file not found: %s" % (credsfile) exit(2) fh = open(credsfile, "r") jcreds = json.load(fh) fh.close() creds = Creds() creds.username = jcreds["auth"]["username"] creds.password = jcreds["auth"]["password"] return creds
def batchrename(dir_name, prefix = "IMG_", generator_func = number_generator()): '''batch rename files rename files in the directory specified by dir_path to [prefix][designtor].[jpg, png, etc.], designator is generated by generator_func ''' for filename in os.listdir(dir_path): old_filename = os.path.join(dir_path, filename) new_filename = prefix.upper() + generator_func()\ + '.' + getFileSuffix(filename) if os.path.isfile(old_filename) and (not os.exists(new_filename)): os.rename(old_filename, os.join(dir_path, new_filename))
def download(link,names): if not os.exists('meme'): # If meme folder didn't exist # Create it os.mkdir('meme') if 'meme' not in os.path.abspath('.'): # If not inside meme folder # cd into it os.chdir('meme') for i in link: clear() urllib.urlretrieve(i,names[link.index(i)]) print "[+] Downloaded %s" % names[link.index(i)] print "[+] {0} Left to Download".format(len(link)-link.index(i)+1)