def test_log_artifacts(): for artifact_path in [None, "sub_dir", "very/nested/sub/dir"]: file_content_1 = 'A simple test artifact\nThe artifact is located in: ' + str( artifact_path) file_content_2 = os.urandom(300) file1 = "meta.yaml" directory = "saved_model" file2 = "sk_model.pickle" with TempDir() as local, TempDir() as remote: with open(os.path.join(local.path(), file1), "w") as f: f.write(file_content_1) os.mkdir(os.path.join(local.path(), directory)) with open(os.path.join(local.path(), directory, file2), "wb") as f: f.write(file_content_2) sftp_path = "sftp://" + remote.path() store = SFTPArtifactRepository(sftp_path) store.log_artifacts(local.path(), artifact_path) remote_dir = posixpath.join( remote.path(), '.' if artifact_path is None else artifact_path) assert posixpath.isdir(remote_dir) assert posixpath.isdir(posixpath.join(remote_dir, directory)) assert posixpath.isfile(posixpath.join(remote_dir, file1)) assert posixpath.isfile( posixpath.join(remote_dir, directory, file2)) with open(posixpath.join(remote_dir, file1), 'r') as remote_content: assert remote_content.read() == file_content_1 with open(posixpath.join(remote_dir, directory, file2), 'rb') as remote_content: assert remote_content.read() == file_content_2
def HandleEvent(self, event): """Unified FAM event handler for DirShadow.""" action = event.code2str() if event.filename[0] == '/': return epath = "".join([self.data, self.handles[event.requestID], event.filename]) if posixpath.isdir(epath): ident = self.handles[event.requestID] + event.filename else: ident = self.handles[event.requestID][:-1] if action in ['exists', 'created']: if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data):]) if ident not in self.entries and posixpath.isfile(epath): dirpath = "".join([self.data, ident]) self.entries[ident] = self.es_cls(self.filename_pattern, dirpath, self.es_child_cls, self.encoding) self.Entries['Path'][ident] = self.entries[ident].bind_entry if not posixpath.isdir(epath): # do not pass through directory events self.entries[ident].handle_event(event) if action == 'changed' and ident in self.entries: self.entries[ident].handle_event(event) elif action == 'deleted': fbase = self.handles[event.requestID] + event.filename if fbase in self.entries: # a directory was deleted del self.entries[fbase] del self.Entries['Path'][fbase] else: self.entries[ident].handle_event(event)
def HandleEvent(self, event=None): """ Updates which files this plugin handles based upon filesystem events. Allows configuration items to be added/removed without server restarts. """ action = event.code2str() if event.filename[0] == "/": return epath = "".join([self.data, self.handles[event.requestID], event.filename]) if posixpath.isdir(epath): ident = self.handles[event.requestID] + event.filename else: ident = self.handles[event.requestID][:-1] fname = "".join([ident, "/", event.filename]) if event.filename.endswith(".xml"): if action in ["exists", "created", "changed"]: if event.filename.endswith("key.xml"): key_spec = dict(list(lxml.etree.parse(epath).find("Key").items())) self.key_specs[ident] = {"bits": key_spec.get("bits", 2048), "type": key_spec.get("type", "rsa")} self.Entries["Path"][ident] = self.get_key elif event.filename.endswith("cert.xml"): cert_spec = dict(list(lxml.etree.parse(epath).find("Cert").items())) ca = cert_spec.get("ca", "default") self.cert_specs[ident] = { "ca": ca, "format": cert_spec.get("format", "pem"), "key": cert_spec.get("key"), "days": cert_spec.get("days", 365), "C": cert_spec.get("c"), "L": cert_spec.get("l"), "ST": cert_spec.get("st"), "OU": cert_spec.get("ou"), "O": cert_spec.get("o"), "emailAddress": cert_spec.get("emailaddress"), } cp = ConfigParser() cp.read(self.core.cfile) self.CAs[ca] = dict(cp.items("sslca_" + ca)) self.Entries["Path"][ident] = self.get_cert if action == "deleted": if ident in self.Entries["Path"]: del self.Entries["Path"][ident] else: if action in ["exists", "created"]: if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data) :]) if ident not in self.entries and posixpath.isfile(epath): self.entries[fname] = self.__child__(epath) self.entries[fname].HandleEvent(event) if action == "changed": self.entries[fname].HandleEvent(event) elif action == "deleted": if fname in self.entries: del self.entries[fname] else: self.entries[fname].HandleEvent(event)
def add_entry(self, event): epath = self.event_path(event) ident = self.event_id(event) if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data):]) if ident not in self.entries and posixpath.isfile(epath): dirpath = "".join([self.data, ident]) self.entries[ident] = self.es_cls(self.filename_pattern, dirpath, self.es_child_cls, self.encoding) self.Entries['Path'][ident] = self.entries[ident].bind_entry if not posixpath.isdir(epath): # do not pass through directory events self.entries[ident].handle_event(event)
def folders_equal(self, path1, path2): files1 = sorted(os.listdir(path1)) files2 = sorted(os.listdir(path2)) self.assertEqual(files1, files2) for f in files1: f1 = posixpath.join(path1, f) f2 = posixpath.join(path2, f) is_dir1 = posixpath.isdir(f1) is_dir2 = posixpath.isdir(f2) self.assertEqual(is_dir1, is_dir2) if is_dir1: self.folders_equal(f1, f2) else: self.assertTrue(filecmp.cmp(f1, f2))
def process_dir(source_dir, dest_dir): if not posixpath.isdir(dest_dir): os.mkdir(dest_dir) #print source_dir, dest_dir for f in os.listdir(source_dir): #print "process", f, filename_in=posixpath.join(source_dir, f) filename_out=posixpath.join(dest_dir, f) #print filename_in, filename_out if posixpath.isdir(filename_in): #print "dir" process_dir(filename_in, filename_out) else: #print "file" process_img(filename_in, filename_out)
def get_posix_root(valid_posix_path): if posixpath.isdir(valid_posix_path) and not valid_posix_path.endswith( posixpath.sep): valid_posix_path += posixpath.sep else: valid_posix_path = posixpath.dirname(valid_posix_path) return valid_posix_path.split(posixpath.sep)[1] or valid_posix_path
def _check_folders(self, path): # The goal is to find out if there was a modification if self.modified: return # Recursive check the last modified date for local_path in listdir(path): local_file = f'{path}/{local_path}' if isdir(local_file): # If there is no last backup if not self.last_date: self.modified = True return modified_date = datetime.utcfromtimestamp(getmtime(local_file)) if modified_date > self.last_date: self.modified = True return # print(local_file) self._check_folders(local_file) # If there is no last backup if not self.last_date: self.modified = True
def upload_dir(self, dirpath, cmk_id=None): """上传目录下的文件(不递归上传子文件夹中文件) 仅上传目录中的文件,软链接、符号链接、文件夹均不会上传至平台。 Args: dirpath (str): 要上传到服务器的文件夹; cmk_id (str): 阿里云 KMS 服务用户主密钥 ID,加密上传时提供 CMK ID 即可; Returns: object_names: 上传的文件 OSS 对象名列表; """ token = self.get_upload_token() object_names = [] for filename in os.listdir(dirpath): filepath = join(dirpath, filename) if isdir(filepath): continue sys.stdout.write('开始上传:{}\n'.format(filepath)) with open(filepath, 'rb') as fp: object_name = self._upload_file(token, filename, fp, cmk_id=cmk_id) sys.stdout.write('\n\n') sys.stdout.flush() object_names.append(object_name) return object_names
def swap_dir(rootpath, path): """Swap a symlink with its target directory. Args: rootpath: Rootpath for tag conversions. path: Path of target symlink. """ target = path if posixpath.islink(target) and posixpath.isdir(target): here = target there = pathlib.readlink(target) # here is the symlink # there is the dir here_tag = tagnames.path2tag(rootpath, here) there_tag = tagnames.path2tag(rootpath, there) dtags.remove_tag(here, here_tag) dtags.add_tag(here, there_tag) os.unlink(here) # here is now nothing # there is now the dir os.rename(there, here) # here is now the dir # there is now nothing os.symlink(here, there) else: raise ValueError('{} is not a symlink to a directory'.format(target))
def start(self, *args): if self.process is not None: raise RuntimeError( 'Process already started (PID {})'.format(self.process.pid)) subenv = os.environ.copy() subenv['PYTHONUNBUFFERED'] = 'x' if self.chroot_dir is not None: # The chroot messes up coverage cov_core_datafile = subenv.get('COV_CORE_DATAFILE') if cov_core_datafile: cov_file_name = posixpath.basename(cov_core_datafile) cov_file_dir = posixpath.join( self.chroot_dir, self.dirname.lstrip('/')) if not posixpath.isdir(cov_file_dir): os.makedirs(cov_file_dir) subenv['COV_CORE_DATAFILE'] = posixpath.join( self.dirname, cov_file_name) if self.sudo and psutil.LINUX: self._setup_chroot_dir() base_command = [sys.executable, self.path] if self.sudo: base_command = ['sudo', '-E'] + base_command self.process = subprocess.Popen( base_command + list(args), cwd=self.dirname, env=subenv, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return self.process
def makedirs(name, mode=0o777, exist_ok=False): """makedirs(name [, mode=0o777][, exist_ok=False]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. If the target directory already exists, raise an OSError if exist_ok is False. Otherwise no exception is raised. This is recursive. """ head, tail = path.split(name) if not tail: head, tail = path.split(head) if head and tail and not path.exists(head): try: makedirs(head, exist_ok=exist_ok) except FileExistsError: # Defeats race condition when another thread created the path pass cdir = curdir if isinstance(tail, bytes): cdir = bytes(curdir, 'ASCII') if tail == cdir: # xxx/newdir/. exists if xxx/newdir exists return try: mkdir(name, mode) except OSError: # Cannot rely on checking for EEXIST, since the operating system # could give priority to other errors like EACCES or EROFS if not exist_ok or not path.isdir(name): raise
def do_GET(self): self.path = self.path.lstrip('/').rstrip('/') path_chars = list(self.path) path_chars.insert(0, '/') self.path = ''.join(path_chars) if self.path == '/': self.path = '.' if posixpath.isdir(self.path): data = directory(self.path) self.send_response(200) self.send_header('content-type', 'text/html') self.end_headers() self.copyfile(data, self.wfile) data.close() return if posixpath.isfile(self.path): data = self.get_head() prinn(data) data = self.send_head() if data: try: self.copyfile(data, self.wfile) except Exception as error: print(error) finally: data.close()
def makedirs(name, mode=511, exist_ok=False): """makedirs(name [, mode=0o777][, exist_ok=False]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. If the target directory already exists, raise an OSError if exist_ok is False. Otherwise no exception is raised. This is recursive. """ head, tail = path.split(name) if not tail: head, tail = path.split(head) if head: if tail: pass if not path.exists(head): try: makedirs(head, exist_ok=exist_ok) except FileExistsError: pass cdir = curdir if isinstance(tail, bytes): cdir = bytes(curdir, 'ASCII') if tail == cdir: return try: mkdir(name, mode) except OSError: if not (exist_ok and path.isdir(name)): raise
def delete(Home): for files in os.listdir(Home): if posixpath.isdir(Home+files+"/"): delete(Home+files+"/") os.rmdir(Home+files) else: os.remove(Home+files)
def get_list_of_categories(): ''' Walk the current directory and get a list of all subdirectories at that level. These are the "categories" in which there are recipes.''' dirs = [ x for x in os.listdir('.') if posixpath.isdir(x) and '.git' not in x ] return dirs
def makedirs(name, mode=0o777, exist_ok=False): """makedirs(name [, mode=0o777][, exist_ok=False]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. If the target directory already exists, raise an OSError if exist_ok is False. Otherwise no exception is raised. This is recursive. """ head, tail = path.split(name) if not tail: head, tail = path.split(head) if head and tail and not path.exists(head): try: makedirs(head, mode, exist_ok) except FileExistsError: # be happy if someone already created the path pass cdir = curdir if isinstance(tail, bytes): cdir = bytes(curdir, 'ASCII') if tail == cdir: # xxx/newdir/. exists if xxx/newdir exists return try: mkdir(name, mode) except OSError as e: if not exist_ok or e.errno != errno.EEXIST or not path.isdir(name): raise
def getAttributes(self, path): attributes = {} sys_path = "/sys%s" % path try: names = os.listdir(sys_path) except OSError: return attributes for name in names: name_path = posixpath.join(sys_path, name) if name[0] == "." \ or name in ["dev", "uevent"] \ or posixpath.isdir(name_path) \ or posixpath.islink(name_path): continue try: value = open(name_path, "r").read().strip() except IOError: continue value = value.split("\n")[0] if [c for c in value if not isprint(c)]: continue attributes[name] = value return attributes
def test_isdir(self): self.assertIs(posixpath.isdir(test_support.TESTFN), False) f = open(test_support.TESTFN, "wb") try: f.write("foo") f.close() self.assertIs(posixpath.isdir(test_support.TESTFN), False) os.remove(test_support.TESTFN) os.mkdir(test_support.TESTFN) self.assertIs(posixpath.isdir(test_support.TESTFN), True) os.rmdir(test_support.TESTFN) finally: if not f.close(): f.close() self.assertRaises(TypeError, posixpath.isdir)
def __setitem__(self, key, value): if key == "includes": if isinstance(value, list): value = value[0] for path in split(value): path = self._parser._interpolate("DEFAULT", None, path, self) path = posixpath.expanduser(path) if not posixpath.exists(path): raise Exception, "No such configuration file: %s" % path if posixpath.isdir(path): logging.info("Parsing config filenames from directory: %s", path) def walk_func(arg, directory, names): for name in names: path = posixpath.join(directory, name) if not posixpath.isdir(path): arg._parser.read(path) posixpath.walk(path, walk_func, self) else: logging.info("Parsing config filename: %s", path) self._parser.read(path) # Environment has precedence over configuration elif not key.startswith("CHECKBOX") or key.upper() not in os.environ: super(IncludeDict, self).__setitem__(key, value)
def makedirs(name, mode=511, exist_ok=False): (head, tail) = path.split(name) if not tail: (head, tail) = path.split(head) if head and tail and not path.exists(head): try: makedirs(head, mode, exist_ok) except OSError as e: while e.errno != errno.EEXIST: raise cdir = curdir if isinstance(tail, bytes): cdir = bytes(curdir, 'ASCII') if tail == cdir: return try: mkdir(name, mode) except OSError as e: dir_exists = path.isdir(name) expected_mode = _get_masked_mode(mode) if dir_exists: actual_mode = st.S_IMODE(lstat(name).st_mode) & ~st.S_ISGID else: actual_mode = -1 if dir_exists and actual_mode != expected_mode: pass raise
def Main(): localfile = open('/home/www/pals/html/dowser/html/cdf.txt','w') comp = localfile.read() files = posix.listdir('/home/www/pals/html/dowser/') os.chdir('/home/www/pals/html/dowser/') dirs = [] paths = [] for i in range(len(files)): file = files[i] if posixpath.isdir(file): os.chdir('/home/www/pals/html/dowser/'+file+'/') hello = posix.getcwd() refs = posix.listdir(hello) for it in range(len(refs)): ref = refs[it] paths.append(hello+"/"+ref) os.chdir('/home/www/pals/html/dowser/') print comp for i in range(len(paths)): path = paths[i] localfile.write(path+"\n") localfile.close() print "Files in dowser updated"
def UpdateListBoxes(self): from os import listdir from posixpath import isfile, isdir, join, basename from commands import getoutput from string import splitfields cwd = self.cwd self.fileLb.delete(0, self.fileLb.size()) filter = self.filterEntry.get() # '*' will list recurively, we don't want that. if filter == '*': filter = '' cmd = "/bin/ls " + join(cwd, filter) cmdOutput = getoutput(cmd) files = splitfields(cmdOutput, "\n") files.sort() for i in range(len(files)): if isfile(join(cwd, files[i])): self.fileLb.insert('end', basename(files[i])) self.dirLb.delete(0, self.dirLb.size()) files = listdir(cwd) files.sort() for i in range(len(files)): if isdir(join(cwd, files[i])): self.dirLb.insert('end', files[i]) self.dirLabel['text'] = "Directory:" + self.cwd_print()
def add_source(self, name, package_name, directory, cache_max_age=None): """ Add a static files source directory, optionally associated with a python package. :param name: The (unique) name used to identify this source. :param package_name: The name of the python package containing the files :param directory: Path to the directory containing the static files. Should be relative if package_name is specified, otherwise absolute. :param cache_max_age: Optional duration in seconds for the Cache-Control max-age header. If omitted the default value is used """ if name in self.sources: raise ValueError("StaticFiles source %r is already used" % (name, )) if package_name: map_path = get_provider(package_name).get_resource_filename else: def map_path(resource_manager, path): return path static_root = map_path(self.resource_manager, directory) if not isdir(static_root): raise ValueError("%r is not a directory" % (static_root, )) cache_max_age = (self.cache_max_age if cache_max_age is None else cache_max_age) self.sources[name] = (map_path, directory, cache_max_age)
def makedirs(name, mode=0o777, exist_ok=False): """makedirs(path [, mode=0o777][, exist_ok=False]) Super-mkdir; create a leaf directory and all intermediate ones. Works like mkdir, except that any intermediate path segment (not just the rightmost) will be created if it does not exist. If the target directory with the same mode as we specified already exists, raises an OSError if exist_ok is False, otherwise no exception is raised. This is recursive. """ head, tail = path.split(name) if not tail: head, tail = path.split(head) if head and tail and not path.exists(head): try: makedirs(head, mode, exist_ok) except OSError as e: # be happy if someone already created the path if e.errno != errno.EEXIST: raise if tail == curdir: # xxx/newdir/. exists if xxx/newdir exists return try: mkdir(name, mode) except OSError as e: import stat as st if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)): raise
def impl_test_apply(self, data_name, test_name): work_dir = posixpath.join(work_root, test_name) if posixpath.isdir(work_dir): shutil.rmtree(work_dir) os.makedirs(work_dir) test_data = posixpath.join(test_data_root, data_name) music1_dir = posixpath.join(work_dir, 'music1') result1_dir = posixpath.join(work_dir, 'result1') music2_dir = posixpath.join(work_dir, 'music2') result2_dir = posixpath.join(work_dir, 'result2') create_collection(posixpath.join(test_data, 'image1.json'), result1_dir, music1_dir, 'data.json') create_collection(posixpath.join(test_data, 'image2.json'), result2_dir, music2_dir, 'data.json') snapshots1 = Snapshots(result1_dir) collection1 = Collection(snapshots1, music1_dir) snapshots2 = Snapshots(result2_dir) Collection(snapshots1, music2_dir) collection1.apply_snapshot(snapshots2.load('data.json')) collection1.remove_unused_pictures() snapshots1.save(collection1.state, 'data.json') self.folders_equal(music1_dir, music2_dir) self.folders_equal(posixpath.join(result1_dir, 'pictures'), posixpath.join(result2_dir, 'pictures')) self.snapshots_equal(posixpath.join(result1_dir, 'data.json'), posixpath.join(result2_dir, 'data.json'))
def __init__( self, name=None, prefix='/opt', log_prefix='', **kwargs): if name is not None: self.name = name elif not getattr(self, 'name', None): raise DaemonError('name must be defined for FHSDaemon') kwargs.update({ 'chrootdir': None, 'detach': True, }) prefix = posixpath.realpath(prefix) if prefix == '/opt': kwargs.update({ 'pid_file': '/var/opt/{name}/run/{name}.pid', 'stdout_file': '/var/opt/{name}/log/{log_prefix}out.log', 'stderr_file': '/var/opt/{name}/log/{log_prefix}err.log', }) elif prefix == '/usr/local': kwargs.update({ 'pid_file': '/var/local/run/{name}/{name}.pid', 'stdout_file': '/var/local/log/{name}/{log_prefix}out.log', 'stderr_file': '/var/local/log/{name}/{log_prefix}err.log', }) elif prefix == '/usr': kwargs.update({ 'pid_file': '/var/run/{name}/{name}.pid', 'stdout_file': '/var/log/{name}/{log_prefix}out.log', 'stderr_file': '/var/log/{name}/{log_prefix}err.log', }) else: kwargs.update({ 'pid_file': posixpath.join(prefix, 'run/{name}.pid'), 'stdout_file': posixpath.join( prefix, 'log/{log_prefix}out.log'), 'stderr_file': posixpath.join( prefix, 'log/{log_prefix}err.log'), }) # Format paths for key in ('pid_file', 'stdout_file', 'stderr_file'): kwargs[key] = kwargs[key].format( name=self.name, log_prefix=log_prefix) if 'work_dir' in kwargs: work_dir = posixpath.realpath(kwargs['work_dir']) if work_dir == prefix and not posixpath.isdir(work_dir): # Normally, the work_dir is required to exist, but if the # work_dir is the same as the prefix, automatically create it # if it doesn't exist. umask = kwargs.get('umask', 0o22) uid = kwargs.get('uid', os.getuid()) gid = kwargs.get('gid', os.getgid()) os.makedirs(work_dir, 0o777 & ~umask) os.chown(work_dir, uid, gid) super(FHSDaemon, self).__init__(**kwargs)
def test_swap_dir(self): with patch('dantalian.dtags.add_tag', autospec=True) as mock_add, \ patch('dantalian.dtags.remove_tag', autospec=True) as mock_rm: base.swap_dir(self.root, 'bag/apple') self.assertTrue(posixpath.islink('apple')) self.assertTrue(posixpath.isdir('bag/apple')) mock_rm.assert_called_with('bag/apple', '//bag/apple') mock_add.assert_called_with('bag/apple', '//apple')
def getallmandirs(dirs): mandirs = [] for filename in dirs: path,name = posixpath.split(filename) if name == 'RCS' or name == 'sec' or name == "concepts" or name == "SCCS" : continue if posixpath.isdir(filename): mandirs.append(filename) return mandirs
def Separate(head, List, ext): for files in List: if use.isdir(use.join(head, files)): Folders.append(use.join(head, files)) else: if use.splitext(files) == ext: Files.append(use.join(head, files)) return Folders, Files
def __init__(self, persist, directory, directory_size=1000): self._directory = directory self._directory_size = directory_size self._original_persist = persist self._persist = persist.root_at("message-store") message_dir = self._message_dir() if not posixpath.isdir(message_dir): os.makedirs(message_dir)
def validate(self): if posixpath.isdir(self.form.values[0]): return True else: boring.dialog.MessageBox.warning( parent=self, title='Wrong path', message='The export path is not a valid folder') return False
def magic_list(args): rootpath = _tag_convert(args, 'path') path = args.path if posixpath.isdir(path) and args.tags: results = dtags.list_tags(path) else: results = base.list_links(rootpath, path) for item in results: print(item)
def cleanup_drive(self): # Delete all folders except 'laptop' and self.last_backup_folder path = self.backup for local_path in listdir(path): local_file = f'{path}/{local_path}' if isdir(local_file): if local_path != self.last_backup_folder and local_path != 'laptop': command = f'rm -r "{local_file}"' pipe(command)
def Separate(path,List): Files,Folders=[],[] for files in List: if use.isdir(use.join(path,files)): Folders.append(use.join(path,files)) for files in List: if files not in Folders and use.splitext(files)[1]=='.jar': Files.append(use.join(path,files)) return Folders,Files
def listdir(path, fil=None, rec=False): names = [posixpath.join(path, name) for name in posix.listdir(path)] for name in names: if posixpath.isdir(name): for name in listdir(name, fil, rec): yield name if (fil is not None and fnmatch(name, fil)) or fil is None: yield name
def test_piddir(pyscript): script = pyscript(""" import sys import time from daemonocle import Daemon def worker(): time.sleep(10) daemon = Daemon(worker=worker, name='foo', pid_file='foo/foo.pid') daemon.do_action(sys.argv[1]) """) piddir = posixpath.join(script.dirname, 'foo') script.run('start') assert posixpath.isdir(piddir) assert os.listdir(piddir) == ['foo.pid'] script.run('stop') assert posixpath.isdir(piddir) assert os.listdir(piddir) == []
def go(self): print('Checking for new folders.') self._check_new() if self.checked: return local_path = f'{self.backup}/{self.this_backup_folder}' if not isdir(local_path): command = f'Create current backup folder: {local_path})' if self.verbose: print(command) else: mkdir(local_path) # Backup files in base folders print('Backup files in base folders.') for local_path in self.base_folders: self._make_tarball(local_path) # Always backup print('Backup folders that should always be backed up.') self.modified = True for local_path in self.always: self._backup(local_path) # Normal folders, check if they're modified # If they're not modified, move to new backup folder # If they are modified, create new backup tarball print('Backup modified folders.') for local_path in self.normal: self.modified = False self._check_folders(local_path) self._backup(local_path) # Individual files print('Backup individual files.') for local_file in self.individual: command = f'tar -rf "{self.backup}/{self.this_backup_folder}/individual.tar" "{local_file}"' if self.verbose: print(command) else: pipe(command) command = f'gzip -c "{self.backup}/{self.this_backup_folder}/individual.tar" > ' \ f'"{self.backup}/{self.this_backup_folder}/individual.tar.gz"' if self.verbose: print(command) else: pipe(command) command = f'rm "{self.backup}/{self.this_backup_folder}/individual.tar"' if self.verbose: print(command) else: pipe(command)
def init_scaffold(self, args): scaffold_name = args.scaffold_name home = args.home if home is None: home = get_home() scaffold_dir = join(home, scaffold_name) if exists(scaffold_dir): output('[red]错误!{} 已存在[/red]'.format(scaffold_dir)) sys.exit(1) if not exists(home): output('[red]错误!无法找到 home 目录 {}。[/red]'.format(home)) sys.exit(1) model_id, runtime, memory_size, timeout = self._config_model() os.makedirs(scaffold_dir) bge_dir = join(scaffold_dir, '.bge') lib_dir = join(scaffold_dir, 'lib') for dir_ in [scaffold_dir, bge_dir, lib_dir]: output(CREATE_MESSAGE.format(dir_)) if not exists(dir_): os.makedirs(dir_) output('[green]完成[/green]') elif not isdir(dir_): output('[red]失败!{} 存在但不是目录。[/red]'.format(dir_)) sys.exit(1) else: output('[red]已存在[/red]') model_config_path = join(scaffold_dir, 'model.ini') output(CREATE_MESSAGE.format(model_config_path)) if not exists(model_config_path): open(model_config_path, 'w').write(MODEL_CONFIG_TEMPLATE) # 模型源码打包忽略规则文件 ignore_path = join(scaffold_dir, BGE_IGNORE_FILE) output(CREATE_MESSAGE.format(ignore_path)) if not exists(ignore_path): open(ignore_path, 'w').write(BGEIGNORE_TEMPLATE) # 模型源码打包混淆规则文件 minify_path = join(scaffold_dir, BGE_MINIFY_FILE) output(CREATE_MESSAGE.format(minify_path)) if not exists(minify_path): open(minify_path, 'w').write(BGEMINIFY_TEMPLATE) script_name = 'main.py' script_path = join(scaffold_dir, script_name) with open(script_path, 'wb') as file_out: file_out.write(MAIN_PY.encode()) st = os.stat(script_path) os.chmod(script_path, st.st_mode | stat.S_IEXEC) self._save_model_config(model_id, runtime, memory_size, timeout, home=scaffold_dir) if confirm('是否安装 bge-python-sdk?'): os.chdir(scaffold_dir) self._install_sdk() output('[green]成功创建模型项目脚手架[/green]')
def AddDirectoryMonitor(self, relative): """Add new directory to FAM structures.""" if not relative.endswith('/'): relative += '/' name = self.data + relative if relative not in list(self.handles.values()): if not posixpath.isdir(name): print("Failed to open directory %s" % (name)) return reqid = self.core.fam.AddMonitor(name, self) self.handles[reqid] = relative
def is_dir(s, evaluate=None): """ entry must be a directory """ if posixpath.isdir(s): r = VALID else: r = INVALID if evaluate: return (r, s) return r
def parseComps(cview, viewDir, buildTests, bc, compsToBuild): librarySources = dict() includes = list() modulesToBuild = list() compDict = dict() moduleDict = dict() for comp in compsToBuild: for library, compModules in comp.libraries.items(): compDict[library] = compModules for module in compModules: moduleDict[module] = library for moduleSearchDir in cview.getItemPaths('modules'): for module in os.listdir(moduleSearchDir): moduleDir = moduleSearchDir + os.sep + module if not posixpath.isdir(moduleDir): continue if module in moduleDict.keys(): sourceDir = ctx_cmod.getSourceDir(moduleDir) sources, prebuiltObjFiles, subBCSrcDict = ctx_cmod.getSourcesFromDir(sourceDir, bc.getArchPath(), bc.getSubBC()) libraryName = moduleDict[module] if libraryName not in librarySources.keys(): librarySources[libraryName] = list() for baseSourceFile in sources: librarySources[libraryName].append(sourceDir + os.sep + baseSourceFile) inlSources = ctx_cmod.getInlSourcesFromDir(sourceDir) for baseInlSource in inlSources: includes.append(sourceDir + os.sep + baseInlSource) pubHeaderDir = ctx_cmod.getPubHeaderDir(moduleDir) pubHeaders = ctx_cmod.getPubHeadersFromDir(pubHeaderDir) for basePubHeader in pubHeaders: includes.append(pubHeaderDir + os.sep + basePubHeader) if buildTests: testSourceDir = ctx_cmod.getTestDir(moduleDir) testSourceLists = ctx_cmod.getSourcesFromDir(srcDir = testSourceDir, archPath = bc.getArchPath(), subBCDict = bc.getSubBC()) if module in moduleDict.keys(): libraryName = moduleDict[module] if libraryName not in librarySources.keys(): librarySources[libraryName] = list() for baseTestSourceList in testSourceLists: for baseTestSource in baseTestSourceList: librarySources[libraryName].append(testSourceDir + os.sep + baseTestSource) root,ext = posixpath.splitext(baseTestSource) if ext in ['.hpp', '.h', '.inl']: includes.append(testSourceDir + os.sep + baseTestSource) return librarySources,includes
def SaiKiran(Home): List=['config.properties','java(new).policy','log4j.properties','NewClass.class'] List2=['bean','com','display','META-INF','model','org','plugins','sample','support'] for Files in os.listdir(Home): if posixpath.isdir(Home+Files+"/"): if Files in List2: delete(Home+Files+"/") os.rmdir(Home+Files+"/") else: SaiKiran(Home+Files+"/") else: if Files in List: os.remove(Home+Files)
def add_directory_monitor(self, relative): """Add a new directory to FAM structures for monitoring. :param relative: Path name to monitor. This must be relative to the plugin's directory. An empty string value ("") will cause the plugin directory itself to be monitored. """ dirpathname = os.path.join(self.data, relative) if relative not in self.handles.values(): if not posixpath.isdir(dirpathname): logger.error("Failed to open directory %s" % (dirpathname)) return reqid = self.fam.AddMonitor(dirpathname, self) self.handles[reqid] = relative
def path_expand_recursive(path): paths = [] for path in path_expand(path): if posixpath.isdir(path): def walk_func(arg, directory, names): for name in names: path = posixpath.join(directory, name) if not posixpath.isdir(path): arg.append(path) posixpath.walk(path, walk_func, paths) else: paths.append(path) return paths
def SaiKiran(Home,Folders,Dict): try: for Files in os.listdir(Home): if posixpath.isdir(Home+Files+"/"): Folders=Folders+[Home+Files+"/"] SaiKiran(Home+Files+"/",Folders,Dict) else: ext=posixpath.splitext(Home+Files) if ext[1] in Dict: Dict[ext[1]]+=[ext[0]] else: Dict[ext[1]]=[ext[0]] except OSError: print "you are unable to access :",Home return Dict,Folders
def link(rootpath, src, dst): """Link src to dst. Args: rootpath: Path for tagname conversions. src: Source path. dst: Destination path. """ if posixpath.isdir(src): src = pathlib.readlink(src) os.symlink(posixpath.abspath(src), dst) dtags.add_tag(src, tagnames.path2tag(rootpath, dst)) else: os.link(src, dst)
def CheckDeletedFiles(self): ''' The database is "walked" looking for missing files. ''' with self._db as c: cur = self._db.execute('select * from files order by path collate wincase') for row in cur: if row['deleted']: continue remotepath = posixpath.join(self._remotepath, row['path']) filepath = posixpath.join(self._localpath, row['path'], row['file']) if row['isfolder']: if not posixpath.isdir(filepath): self._rmdir(row['path'], row['file'], remotepath, c) elif not posixpath.isfile(filepath): self._rm(row['path'], row['file'], remotepath, c)