def _mount_drive(self): """If not already mounted, mount the USB flash drive. Return False if failed. """ # If the /mnt/usbflash/data dir does not exist, # try to mount sda1, sdb1 or sdc1. The flash drive # could be on any of those devices. if utils.path_exists(usb_mgr_config.usb_flash_data_dir): return True dev_a = usb_mgr_config.usb_flash_device_a dev_b = usb_mgr_config.usb_flash_device_b dev_c = usb_mgr_config.usb_flash_device_c mount_pt = usb_mgr_config.usb_flash_dir utils.call_subprocess('umount %s' % mount_pt) utils.call_subprocess('mount %s %s' % (dev_a, mount_pt)) if utils.path_exists(usb_mgr_config.usb_flash_data_dir): self._log.info('Mounted USB device %s' % dev_a) return True utils.call_subprocess('mount %s %s' % (dev_b, mount_pt)) if utils.path_exists(usb_mgr_config.usb_flash_data_dir): self._log.info('Mounted USB device %s' % dev_b) return True utils.call_subprocess('mount %s %s' % (dev_c, mount_pt)) if utils.path_exists(usb_mgr_config.usb_flash_data_dir): self._log.info('Mounted USB device %s' % dev_c) return True return False
def lock(self): self._homedir = get_user_dir( ) + utils.path_sep + u"." + self._name.lower() if not utils.path_exists(self._homedir): utils.path_makedirs(self._homedir) self._lockfilename = self._homedir + utils.path_sep + "monitor.lock" try: if is_linux(): import fcntl self._lockfile = utils.file_open(self._lockfilename, "w") fcntl.lockf(self._lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) else: if utils.path_exists(self._lockfilename): utils.path_remove(self._lockfilename) self._lockfile = utils.file_open(self._lockfilename, "w") self._lockfile.write("\x00") except: try: self._lockfile.close() except: None if self._mode == "systray": print("An Instance is already running.") else: self.add_show_file() return False #self.remove_show_file() return True
def download_url_file(urlsrc, fdest, proxy_info=None, response_transfer_progress=None): sredurl = None sp = _split_utl(urlsrc) #Richiesta al server sock = _connect_socket(sp["host"], sp["port"], proxy_info) try: req = Request("GET", sp["path"], { 'Host': sp["host"] + ':' + str(sp["port"], ), 'Connection': 'close' }) sock.sendall(req.to_message()) #Legge risposta if utils.path_exists(fdest): utils.path_remove(fdest) ftmp = fdest + "TMP" if utils.path_exists(ftmp): utils.path_remove(ftmp) resp = Response(sock, ftmp, response_transfer_progress) if resp.get_code() == '301': sredurl = resp.get_headers()["Location"] elif resp.get_code() != '200': raise Exception("Download error " + str(resp.get_code()) + ".") finally: sock.shutdown(1) sock.close() if sredurl is not None: download_url_file(sredurl, fdest, proxy_info, response_transfer_progress) else: if utils.path_exists(ftmp): utils.path_move(ftmp, fdest)
def check_args(args): if not utils.path_exists(args.style): print('Style image not found in', args.style) exit(-1) if not utils.path_exists(args.train): print('Train path', args.style, 'not found') exit(-1) pathlib.Path(args.weights).parent.mkdir(parents=True, exist_ok=True)
def set_paths(self): project_path = utils.path_exists(self.codebase_root_path) project_prefix_path = "" #utils.path_exists(os.path.join(self.codebase_root_path, self.project_name, self.folder_suffix)) dataset_path = utils.path_exists(os.path.join(self.codebase_root_path, "../data", self.dataset_name)) ckptdir_path = utils.path_exists(os.path.join(self.codebase_root_path, "checkpoint")) train_path = os.path.join(dataset_path, "data_batch_") test_path = os.path.join(dataset_path, "test_batch") return project_path, project_prefix_path, dataset_path, train_path, test_path, ckptdir_path
def set_paths(self): project_path = utils.path_exists(self.codebase_root_path) project_prefix_path = "" #utils.path_exists(os.path.join(self.codebase_root_path, self.project_name, self.folder_suffix)) dataset_path = utils.path_exists(os.path.join(self.codebase_root_path, "../data", self.dataset_name)) ckptdir_path = utils.path_exists(os.path.join(self.codebase_root_path, "bin")) train_path = os.path.join(dataset_path, self.dataset_name + "-train") test_path = os.path.join(dataset_path, self.dataset_name + "-test") return project_path, project_prefix_path, dataset_path, train_path, test_path, ckptdir_path
def set_paths(self): project_path = utils.path_exists(self.codebase_root_path) project_prefix_path = "" #utils.path_exists(os.path.join(self.codebase_root_path, self.project_name, self.folder_suffix)) dataset_path = utils.path_exists( os.path.join(self.codebase_root_path, "data", self.dataset_name)) ckptdir_path = utils.path_exists( os.path.join(self.codebase_root_path, "checkpoint")) train_path = os.path.join(dataset_path, "data_batch_") test_path = os.path.join(dataset_path, "test_batch") return project_path, project_prefix_path, dataset_path, train_path, test_path, ckptdir_path
def init_guilnc(self, ag): self._propguilnc_semaphore.acquire() try: #COMPATIBILITA VERSIONI PRECEDENTI if utils.path_exists("native/dwagguilnc"): self._propguilnc = {} if not utils.path_exists("guilnc.run"): f = utils.file_open("guilnc.run", "wb") f.close() finally: self._propguilnc_semaphore.release()
def get_resource_path(self): pths = [] itm={"Name": u"/"} pths.append(itm) if utils.path_exists(u"/home"): itm={"Name": u"/home"} pths.append(itm) if utils.path_exists(u"/root"): itm={"Name": u"/root"} pths.append(itm) return pths
def load_library(self): try: if self._dwaglib is None: lbn = "dwaglib.dylib" #COMPATIBILITY BEFORE 14/11/2019 if not utils.path_exists(".srcmode"): if not utils.path_exists("native/" + lbn): lbn = "dwaglib.so" #COMPATIBILITY BEFORE 14/11/2019 self._dwaglib = _load_lib_obj(lbn) except: None
def _write(self, path, prop): if "encoding" in prop: enc = prop["encoding"] else: enc = sys.getfilesystemencoding() if "endline" in prop: endl = prop["endline"] else: endl = utils.line_sep bm = None #CREA FILE TEMPORANEO pathtmp = None sprnpath = utils.path_dirname(path) while True: r = "".join([random.choice("0123456789") for x in xrange(6)]) pathtmp = sprnpath + utils.path_sep + "temporary" + r + ".dwstext" if not utils.path_exists(pathtmp): utils.file_open( pathtmp, 'wb').close() #Crea il file per imposta i permessi self._agent_main.get_osmodule().fix_file_permissions( "CREATE_FILE", pathtmp) if (enc is not None): flgop = "w" if "bom" in prop and prop["bom"] == 'true': bm = self._get_bom_byname(enc) if bm is not None: #Scrive BOM text_file = utils.file_open(pathtmp, 'wb') text_file.write(bm["Data"]) text_file.close() flgop = "a" text_file = utils.file_open(pathtmp, flgop, enc) else: text_file = utils.file_open(pathtmp, 'w') break try: s = prop["text"] s = endl.join(s.split("\n")) text_file.write(s) finally: text_file.close() if utils.path_exists(path): if utils.path_isdir(path): utils.path_remove(self._tmpname) raise Exception("PATH is directory.") else: self._agent_main.get_osmodule().fix_file_permissions( "COPY_FILE", pathtmp, path) utils.path_remove(path) shutil.move(pathtmp, path)
def load(self, path): """ Loads TokenGraph() from folder at path """ utils.path_exists(path) assert not (self.initialized or self.tokenizer), ('TokenGraph file' \ "can't be loaded into "\ 'an initialized '\ 'TokenGraph.') # self.corrMatrix = np.load(f'{path}/corrMatrix.npy') self.corrDict = utils.load_obj(f'{path}/corrDict.sav') self.tokenizer = Tokenizer() self.tokenizer.load(f'{path}/tokenizer') self.initialized = True return True
def get_library_config(name): fn = None if utils.path_exists(".srcmode"): fn = ".." + utils.path_sep + "lib_" + name + utils.path_sep + "config.json" else: fn = "native" + utils.path_sep + "lib_" + name + ".json" if utils.path_exists(fn): f = utils.file_open(fn) jsapp = json.loads(f.read()) f.close() return jsapp else: return None
def _connected(): """Return True if currently connected to the RUDICS server""" if not utils.path_exists(svr_proxy_config.connect_time_file): # Haven't connected yet print '_connected: connect_time_file does not exist' return False if not utils.path_exists(svr_proxy_config.disconnect_time_file): # Haven't disconnected yet print '_connected: disconnect_time_file does not exist' return True last_connect_time = utils.get_file_mod_time(svr_proxy_config.connect_time_file) last_disconnect_time = utils.get_file_mod_time(svr_proxy_config.disconnect_time_file) connected = last_connect_time > last_disconnect_time print '_connected: returning %s' % str(connected) return connected
def init_path(): if not utils.path_exists(SHAREDMEM_PATH): utils.path_makedir(SHAREDMEM_PATH) else: #Elimina tutti i file lst = utils.path_list(SHAREDMEM_PATH) for fname in lst: try: if fname[0:7] == "stream_": if utils.path_exists(SHAREDMEM_PATH + utils.path_sep + fname): utils.path_remove(SHAREDMEM_PATH + utils.path_sep + fname) except: None
def __init__(self, opts, load=False): """Class to handle data management Args: opts: All the hyper-parameters of the network load: Load dataset if this is true """ self.records = {} self.opts = opts self.check_records() if load: if opts.load_images: print ' - Loading numpy raw images...' begin_time = time.time() self.t_images_data = np.load(os.path.join(opts.dataset_dir, opts.dataset, 'train.npy')) print ' - Loaded {} images in: {} seconds'.format(len(self.t_images_data) * 2, time.time() - begin_time) else: self.t_image_paths = utils.read_file_lines( os.path.join(opts.dataset_dir, opts.dataset, 'train.txt')) self.v_image_paths = utils.read_file_lines( os.path.join(opts.dataset_dir, opts.dataset, 'val.txt')) test_path = os.path.join(opts.dataset_dir, opts.dataset, 'test.txt') if utils.path_exists(test_path): self.test_image_paths = utils.read_file_lines( os.path.join(opts.dataset_dir, opts.dataset, 'test.txt'))
def check_and_replace_path(self, cinfo, path, operation, options={}) : if path is None: raise Exception("Path is none") sret=None paths = self.get_permission_path(cinfo, path, options) for itmpth in paths: if operation==self.OPERATION_VIEW and itmpth["allow_view"]: sret = itmpth["name"] break elif operation==self.OPERATION_EDIT and itmpth["allow_edit"]: sret = itmpth["name"] break elif operation==self.OPERATION_DOWNLOAD and itmpth["allow_download"]: sret = itmpth["name"] break elif operation==self.OPERATION_UPLOAD and itmpth["allow_upload"]: sret = itmpth["name"] break if sret is None: raise Exception("Permission denied.\nOperation: " + operation + "\nPath: " + path); #Verifica se esiste il path check_exists=True if "check_exists" in options: check_exists = options["check_exists"] if check_exists and not utils.path_exists(sret): raise Exception("Permission denied or read error."); return sret
def create_transition_video(input_file_1, input_file_2, width, height, time_per_picture, transition_time, framerate, work_dir="work", output_file_suffix="_merged"): input_path_1, output_file, output_path = utils.get_input_and_output_paths_with_output_file( input_file_1, work_dir, output_file_suffix, extension="mp4") if utils.path_exists(output_path): return output_file input_1_segment = f"-i {input_path_1}" black_input_string = f"-f lavfi -i color=black:r={framerate}:s={width}x{height}" if input_file_2 == "!black": input_2_segment = black_input_string input_2_mods = f"trim=duration={transition_time}," else: input_path_2 = utils.file_to_path(input_file_2, work_dir) input_2_segment = f"-i {input_path_2}" input_2_mods = "" command = f"ffmpeg {input_1_segment} {input_2_segment} {black_input_string} -filter_complex \ \"[0:v]format=pix_fmts=yuva420p,fade=t=out:st={time_per_picture - transition_time}:d={transition_time}:alpha=1,setpts=PTS-STARTPTS[va0];\ [1:v]format=pix_fmts=yuva420p,{input_2_mods}fade=t=in:st=0:d={transition_time}:alpha=1,setpts=PTS-STARTPTS+{time_per_picture - transition_time}/TB[va1];\ [2:v]trim=duration={time_per_picture}[over];\ [over][va0]overlay[over1];\ [over1][va1]overlay=format=yuv420[outv]\" \ -vcodec libx264 -map [outv] {output_path}" subprocess.run(command, shell=True, check=True) return output_file
def req_copy(self, cinfo ,params): pathsrc = self.check_and_replace_path(cinfo, agent.get_prop(params,'pathsrc',None), self.OPERATION_EDIT) pathdst = self.check_and_replace_path(cinfo, agent.get_prop(params,'pathdst',None), self.OPERATION_EDIT) files = agent.get_prop(params,'files',None) replace = agent.str2bool(agent.get_prop(params, "replace", "false")) arfiles = json.loads(files) arret=[] for i in range(len(arfiles)): nm = arfiles[i] fs = pathsrc + nm fd = pathdst + nm cnt = 0 if fs==fd: while utils.path_exists(fd): cnt+=1 nm = "copy " + str(cnt) + " of " + arfiles[i]; fd = pathdst + nm b = True if not fs==fd and fd.startswith(fs + utils.path_sep): b = False else: b = self._cpmv("copy", fs, fd, replace); if b is True: self._append_to_list(arret, pathdst, nm) else: arret.append({'Name': "E:" + nm}) return json.dumps({'items' : arret})
def _seperate_one(self, _file): time = self._get_time(_file) folder = self._get_time_folder(time) fd_path = path_join([self.mv_path, folder, '']) if not path_exists(fd_path): create_folder(fd_path) print _file, fd_path print copy(_file, fd_path)
def get_resource_path(self): pths = [] itm={"Name": u"/"} pths.append(itm) if utils.path_exists(u"/Users"): itm={"Name": u"/Users"} pths.append(itm) if utils.path_exists(u"/Library"): itm={"Name": u"/Library"} pths.append(itm) if utils.path_exists(u"/System"): itm={"Name": u"/System"} pths.append(itm) if utils.path_exists(u"/Volumes"): itm={"Name": u"/Volumes"} pths.append(itm) return pths
def copy_to_work_dir(unescaped_input_path, work_dir="work"): input_file = unescaped_input_path.split(os.sep)[-1] output_path = utils.file_to_path(input_file, work_dir) if utils.path_exists(output_path): return input_file command = f"cp {utils.escape_string (unescaped_input_path)} {output_path}" subprocess.run(command, shell=True, check=True) return input_file
def _cpmv(self, tp, fs, fd, replace): bok = True if utils.path_isdir(fs): if not utils.path_exists(fd): utils.path_makedirs(fd) if tp=="copy": self._agent_main.get_osmodule().fix_file_permissions("COPY_DIRECTORY",fd, fs) elif tp=="move": self._agent_main.get_osmodule().fix_file_permissions("MOVE_DIRECTORY",fd, fs) lst=None try: lst=utils.path_list(fs) for fname in lst: b = self._cpmv(tp, fs + utils.path_sep + fname, fd + utils.path_sep + fname, replace) if bok is True: bok = b except Exception: bok=False if tp=="move": try: utils.path_remove(fs) except Exception: bok=False else: b=True if utils.path_exists(fd): if replace is True: try: utils.path_remove(fd) except Exception: bok = False b = False else: b = False if b is True: try: if tp=="copy": utils.path_copy(fs, fd) self._agent_main.get_osmodule().fix_file_permissions("COPY_FILE",fd, fs) elif tp=="move": utils.path_move(fs, fd) self._agent_main.get_osmodule().fix_file_permissions("MOVE_FILE",fd) except Exception: bok=False return bok
def check_args(args): if args.gen is not None: if args.webcam: print( 'WARN: Ignoring webcam argument since you specified a content image' ) if not utils.path_exists(args.content): print('Content image not found in', args.content) exit(-1) elif not args.webcam: print( 'Please specify a content image with --content, or use --webcam to generate from your webcam' ) if not utils.path_exists(args.weights): print('Weights not found in', args.weights) exit(-1) if args.gen is not None: pathlib.Path(args.gen).parent.mkdir(parents=True, exist_ok=True)
def __init__(self, pkgnm): if utils.path_exists(".srcmode"): self._pkgnm = pkgnm.split(".")[1] else: self._pkgnm = pkgnm self._data_default = self._get_data("default") self._lang_current = None self._data_current = None self._semaphore = threading.Condition()
def __init__(self, x=None, y_true=None, train=True, \ load=True, name="trained_net", args={}): #Constants self.FLAGS = { "filter_size1" : 5, "num_filters1" : 16, "image_length" : 128, "num_channels" : 1, "num_classes" : 2, "filter_size2" : 5, "num_filters2" : 36, "fc_size" : 128, "learning_rate": 1e-4, "train_batch_size" : 64, "eval_batch_size" : 64, "num_epochs" : 1 } for key, val in args.items(): if key in self.FLAGS: self.FLAGS[key] = val self.sess = tf.Session() self.build_graph() self.sess.run(tf.global_variables_initializer()) self.saver = tf.train.Saver() checkpoint_folder = f"nets/{name}" checkpoint_file = f"nets/{name}/{name}" if load and utils.path_exists(f"{checkpoint_folder}/checkpoint"): self.load_net(checkpoint_file, checkpoint_folder) elif load: print("No Checkpoint available to load") if train and x is not None and y_true is not None: assert x.shape[0] == y_true.shape[0], \ f"First dim of x {x.shape} must be \ same size as y_true {y_true.shape}" if not utils.path_exists(checkpoint_folder): os.makedirs(checkpoint_folder) self.train_net(checkpoint_file, checkpoint_folder, \ x, y_true, self.FLAGS["num_epochs"])
def load(self, path): """ Loads Tokenizer() from folder at path """ utils.path_exists(path) assert not self.initialized, ("Tokenizer file can't be loaded into an "\ "initialized Tokenizer.") # load pickled objects self.freqDict = utils.load_obj(f'{path}/freqDict') self.idx = utils.load_obj(f'{path}/idx') self.tokenizer = utils.load_obj(f'{path}/tokenizer') # load lower bool with open(f'{path}/lower.sav', 'r') as lowerFile: lowerStr = lowerFile.read() self.lower = True if (lowerStr == 't') else False # extrapolate from loaded objects self.build_reverse_idx() self.vocabSize = len(self.freqDict) self.initialized = True return True
def term_guilnc(self): self._propguilnc_semaphore.acquire() try: if utils.path_exists("guilnc.run"): utils.path_remove("guilnc.run") if self._propguilnc is not None: for l in self._propguilnc: self._propguilnc[l].close() self._propguilnc = None finally: self._propguilnc_semaphore.release()
def load_net(self, checkpoint_file, checkpoint_folder): self.saver.restore(self.sess, checkpoint_file) arg_file = f"{checkpoint_folder}/FLAGS.args" if utils.path_exists(arg_file): with open(arg_file) as f: self.FLAGS = eval(f.readline()) else: print("\nUnable to restore flags\n") print("\nCheckpoint Restored\n")
def _connected(self): """Return True if currently connected to the RUDICS server""" if not utils.path_exists(svr_proxy_config.connect_time_file): # Haven't connected yet #self._log.debug('Not connected to RUDICS server') return False if not utils.path_exists(svr_proxy_config.disconnect_time_file): # Haven't disconnected yet #self._log.debug('Connected to RUDICS server') return True last_connect_time = utils.get_file_mod_time( svr_proxy_config.connect_time_file) last_disconnect_time = utils.get_file_mod_time( svr_proxy_config.disconnect_time_file) connected = last_connect_time > last_disconnect_time #if not connected: # self._log.debug('Not connected to RUDICS server') #else: # self._log.debug('Connected to RUDICS server') return connected
def start_guilnc(self): self._propguilnc_stop = False signal.signal(signal.SIGTERM, self._signal_handler) bload = False suid = str(os.getuid()) spid = str(os.getpid()) lnc = sharedmem.Property() prcs = [] try: while not self._propguilnc_stop and utils.path_exists( "guilnc.run"): if not bload: if lnc.exists("gui_launcher_" + suid): try: lnc.open("gui_launcher_" + suid) lnc.set_property("pid", spid) bload = True except: time.sleep(1) else: time.sleep(1) if bload: if lnc.get_property("state") == "LNC": popenar = [] popenar.append(sys.executable) popenar.append(u'agent.pyc') popenar.append(u'app=' + lnc.get_property("app")) for i in range(GUILNC_ARG_MAX): a = lnc.get_property("arg" + str(i)) if a == "": break popenar.append(a) libenv = os.environ libenv["LD_LIBRARY_PATH"] = utils.path_absname( "runtime/lib") #print "Popen: " + " , ".join(popenar) try: p = subprocess.Popen(popenar, env=libenv) prcs.append(p) #print "PID: " + str(p.pid) if p.poll() is None: lnc.set_property("state", str(p.pid)) else: lnc.set_property("state", "ERR") except: lnc.set_property("state", "ERR") time.sleep(0.2) #Pulisce processi prcs = [p for p in prcs if p.poll() is None] finally: if bload: lnc.close()
def connect(self, fname): self._semaphore.acquire() try: if self._binit == True: raise Exception("Shared file already initialized.") self._side = 2 self._path = sharedmem_manager.getPath(fname) if not utils.path_exists(self._path): raise Exception("Shared file not found.") self._size = utils.path_size(self._path) self._initialize() finally: self._semaphore.release()