def sync(fp_source, fp_target, create_copy): bk = BookKeeper() lst_synced = bk.read() lst_source = Directory(fp_source) lst_target = Directory(fp_target) count = 0 what = 'copied' if create_copy else 'moved' for source in lst_source: if source not in lst_synced and source not in lst_target: full_source = os.path.join(lst_source.path, source) full_target = os.path.join(lst_target.path, source) if create_copy: shutil.copy(full_source, full_target) else: shutil.move(full_source, full_target) bk.write(source) count += 1 logging.debug('{}: {} {}'.format(lst_source.path, what, source)) else: if source in lst_synced: logging.debug('{}: {} already synced'.format( lst_source.path, source)) if source in lst_target: logging.debug('{}: {} at target location'.format( lst_source.path, source)) logging.info('{} {} of {} files from {} to {}.'.format( what, count, len(lst_source), lst_source.path, lst_target.path))
def __init__(self, input_=None, output_=None): if input_ is None: input_ = Directory(path_="../resource/wakachi/", default_extension_=".meishi.wakachi") if output_ is None: output_ = Directory(path_="../resource/imporwords/", default_extension_=".uniq.csv") super().__init__(input_.path, output_.path, input_.default_extension, output_.default_extension)
def next(): reader = Directory().reader("club") entry = reader.search()[0].entry_writable() result = entry.neatNextUidNumber.value entry.neatNextUidNumber = result + 1 entry.entry_commit_changes() return result
def getInfoDir(dirPath): out = subprocess.Popen(['stat', dirPath], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = out.communicate() res = stdout.decode("utf-8") attrs = res.split() permission = attrs[2] dirNode = int(attrs[3]) dirOwner = attrs[4] groupOwner = attrs[5] dirSize = int(attrs[7]) dirName = attrs[-1].split("/")[-1] date = formatDate(res) dir = Directory(dirPath, dirName, permission, dirOwner, groupOwner, dirSize, dirNode, date) subout = subprocess.Popen(['ls', '-lh', dirPath], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) substdout, substderr = subout.communicate() if len(substdout) == 0: return dir subres = substdout.decode("utf-8").splitlines(True) for sub in subres: subAttrs = sub.split() if subAttrs[0].startswith('d'): dir.addDirSub(subAttrs[-1]) return dir
def _list_inventory(self): d = Directory(self.nodes_uri) entities = {} def register_fn(dirpath, filenames): for f in filter(lambda f: f.endswith(FILE_EXTENSION), filenames): name = f[:-len(FILE_EXTENSION)] nodeinfo = self.nodeinfo(name) entities[name] = nodeinfo d.walk(register_fn) applications = {} classes = {} for f, nodeinfo in entities.iteritems(): for a in nodeinfo['applications']: if a in applications: applications[a].append(f) else: applications[a] = [f] for c in nodeinfo['classes']: if c in classes: classes[c].append(f) else: classes[c] = [f] return entities, applications, classes
def _download(self, content): cwd = os.getcwd() os.chdir(self._working_dir) for multihash in content.keys(): self._ipfs_client.get(multihash) for multihash in os.listdir(self._working_dir): name = base64.b64decode(content[multihash]).decode() os.rename(multihash, name) full_path = os.path.join(self._working_dir, name) self._content.add(full_path, multihash) if os.path.isfile(full_path): File(full_path).decrypt_content(cipher=self._cipher, dst_dir=self._root_dir) time.sleep(0.1) os.remove(full_path) elif os.path.isdir(full_path): Directory(full_path).decrypt_content(cipher=self._cipher, dst_dir=self._root_dir) time.sleep(0.1) shutil.rmtree(full_path) os.chdir(cwd)
def start_server(ip, port): server = SimpleXMLRPCServer((ip, port)) PYTHON_LOGGER.info("Server listen at adresse {} port {}".format(ip, port)) server.register_function(Directory(), "directory_class") server.serve_forever() PYTHON_LOGGER.info("Server stop")
def main(): d = Directory() folder = d.get_folder_from_user() folder_contents = d.get_folder_contents(folder) table_name = d.get_table_name() df = d.merge_all_tables(folder_contents, table_name) df.to_csv('../merged_tables.csv')
def __init__(self, name): self.lock = threading.Lock() self.name = name self.config = None self.cache_layer = {} self.c_nodes = 0 self.links = {} self.env = None self.compute_nodes = 0 self.mapper_list = {} self.cpu = 0 self.placement = None self.logger = None self.blk_dir = Directory('blk_dir') #block directory self.jobStat = JobStat() self.hash_ring = None self.nic_count = 0 self.scheduler = None self.repType = 'rep' self.repCount = 3 self.ec = [] self.rep_size = 4 self.chunk_size = 4 self.osdMap = None self.dl_access = 0 self.outstanding_req = {}
def create_directory(self, directory, name, attrib=0x10): if name in [e.name() for e in directory.entries]: print(name, " already exists in directory") return None cluster = self.next_free_cluster() filename, extension = short_filename(name, directory) if name == filename.strip() + ('.' + extension.strip()) if len( extension.strip()) != 0 else '': name = '' today = datetime.datetime.today() milliseconds = today.microsecond // 10000 + (today.second % 2) * 100 entry = Entry(filename, extension, Attrib(attrib), 0x00, milliseconds, today.time(), today.date(), today.date(), today.time(), today.date(), cluster, 0, name) directory.write(entry) self.current_directory.read_from_disk() self.write_to_fat(cluster, 0x0FFFFFFF) new_directory = Directory(self, cluster) self_entry = Entry(". ", " ", Attrib(0x10), 0x00, milliseconds, today.time(), today.date(), today.date(), today.time(), today.date(), cluster, 0, '') new_directory.write(self_entry) new_directory.read_from_disk() up_entry = Entry(".. ", " ", Attrib(0x10), 0x00, milliseconds, today.time(), today.date(), today.date(), today.time(), today.date(), directory.first_cluster, 0, '') new_directory.write(up_entry)
def _parse_entry_table(self) -> (List[Firmware], List[Directory]): entries = chunker(self.firmware_entry_table[4:], 4) for index, entry in enumerate(entries): firmware_type = self._FIRMWARE_ENTRY_TYPES[index] if index < len( self._FIRMWARE_ENTRY_TYPES) else 'unknown' address = struct.unpack('<I', entry)[0] & 0x00FFFFFF # assumption: offset == 0 is an invalid entry if address not in [0x0, 0xfffffe]: directory = self[address:address + 16 * 8] magic = directory[:4] # either this entry points to a PSP directory directly if magic in [b'$PSP', b'$BHD']: directory = Directory(self, address, firmware_type) self.directories.append(directory) # if this Directory points to a secondary directory: add it, too if directory.secondary_directory_address is not None: secondary_directory = Directory( self, directory.secondary_directory_address, 'secondary') self.directories.append(secondary_directory) # or this entry points to a combo-directory (i.e. two directories) elif magic == b'2PSP': psp_dir_one_addr = struct.unpack( '<I', directory[10 * 4:10 * 4 + 4])[0] & 0x00FFFFFF psp_dir_two_addr = struct.unpack( '<I', directory[14 * 4:14 * 4 + 4])[0] & 0x00FFFFFF for address in [psp_dir_one_addr, psp_dir_two_addr]: directory = Directory(self, address, firmware_type) self.directories.append(directory) # if this Directory points to a secondary directory: add it, too if directory.secondary_directory_address is not None: secondary_directory = Directory( self, directory.secondary_directory_address, 'secondary') self.directories.append(secondary_directory) # or this entry is unparsable and thus a firmware else: firmware = Firmware(self, address, firmware_type, magic) self.firmwares.append(firmware)
def test_load(self): self.dir.add_contact("pepe", "*****@*****.**", "27", "Mexico") self.dir.add_contact("juan", "*****@*****.**", "27", "Mexico") self.dir.add_contact("oscar", "*****@*****.**", "29", "Mexico") self.dir.save() dir2 = Directory(self.file) self.assertEqual(self.dir.search_by_email("*****@*****.**"), dir2.search_by_email("*****@*****.**"))
def testcase1(self): rootFolder = Directory() folderNames = '/home/gcj/finals' rootFolder.addDirectory(folderNames.split('/')[1:]) self.assertEqual(3, rootFolder.size()) folderNames = '/home/gcj/quals' rootFolder.addDirectory(folderNames.split('/')[1:]) self.assertEqual(4, rootFolder.size())
def addRoot(my_user): directory_id = my_user.key.id() + '/' directory = Directory(id=directory_id) directory.name = 'root' directory.parent = None directory.path = '/' directory.put() my_user.root = directory.key my_user.put()
def getPaths(self): dirs = [] with open(self.__file_path, 'r') as json_file: data = json.load(json_file) if data['directories'] is not None and len( data['directories']) > 0: for data_dir in data['directories']: directory = Directory(dictionary=data_dir) dirs.append(directory) return dirs
def setResolvedUrl(handle, succeeded, listitem): log.debug('setResolvedUrl: {}'.format(listitem._path)) directory = Directory() directory.path = Route.current_explored_route.path item = Item() item.url = listitem._path item.listitem = listitem item.is_folder = False directory.items[1] = item Directory.current_directory = directory
def get_last_backup(self): dirs = (x for x in sorted((Directory(date=x, path=self.mount_point) for x in os.listdir(self.mount_point) if is_date(date=x, time_format=TIME_FORMAT)), reverse=True)) try: return next(dirs) except StopIteration: return None
def getFolderSizeHelper(folder, dirList, currDir, nested): for item in os.scandir(folder): if item.is_file(): currDir.size+=item.stat().st_size elif item.is_dir(): subDir = Directory(item.path[(item.path).rfind('/'):], (nested + 1)) currDir.size += getFolderSizeHelper(item.path, dirList, subDir, subDir.size) dirList.append(currDir) return currDir.size
def __init__(self, output_path="./resource/imporwords/", tfidf_=None, model_=None): """初期化""" if tfidf_ is None: tfidf_ = Directory(path_="./resource/tfidf/", default_extension_=".tfidf", is_import_=True) if model_ is None: model_ = Directory(path_="./resource/vector/", default_extension_=".vector", is_import_=True) super().__init__(None, output_path, None, ".imporword.csv") self.models = { "path": model_.path, "file_list": model_.get_file_list(is_add_test_=False) } self.tfidfs = tfidf_.get_file_path_list(is_add_test_=False) self.important_words = None
def free_space(self, force_clean=False): if LOGLEVEL > 0: if force_clean is False and not self.has_space(): print( "Disk at {} has no space left:{}deleting some old backups..." .format(self.mount_point, os.linesep)) elif force_clean is True: print("Force clean on disk {}:{}deleting all old backups...". format(self.mount_point, os.linesep)) dirs = (x for x in sorted( Directory(date=x, path=self.mount_point) for x in os.listdir(self.mount_point) if is_date(date=x, time_format=TIME_FORMAT))) try: previous_backup = next(dirs) for tmp_dir in dirs: if not force_clean and self.has_space(): break else: if tmp_dir.is_deletable(previous_dir=previous_backup): tmp_dir.remove() else: previous_backup = tmp_dir except StopIteration: pass if not self.has_space(): if LOGLEVEL > 0: print("No more useless backup: deleting from the oldest") dirs = (x for x in sorted((Directory(date=x, path=self.mount_point) for x in os.listdir(self.mount_point) if is_date(date=x, time_format=TIME_FORMAT)))) while not self.has_space(): try: next(dirs).remove() except StopIteration: print( "All backup removed but still not enough space. Aborting.") exit(1) if LOGLEVEL > 0: print("Cleanup finished.")
def _add_file(self, file_path, add_file=True): """A helper function to add a file or directory""" paths = file_path.split('/') curr = self._root for i, path in enumerate(paths): next = curr._children.get(path) if next is None: if i == len(path) - 1 and add_file: next = curr._children[path] = File(path) else: next = curr._children[path] = Directory(path) curr = next
def sync(fp_target, remove_synced=False): bk = BookKeeper() lst_synced = bk.read() target = Directory(fp_target) ad = AndroidDevice(lst_synced) ad.connect() ad.get_files_to_sync() lst_new = ad.sync(target=target, remove_synced=remove_synced) ad.disconnect() bk.write(lst_new)
def addDirectory(my_user, name, parentKey): parent = parentKey.get() path = getPath(name, parent) directory_id = my_user.key.id() + path directory = Directory(id=directory_id) if exists(directory.key, parent.directories): parent.directories.append(directory.key) parent.put() directory.parent = parentKey directory.name = name directory.path = path directory.put()
def build_inmem_filesys(): fs = FileSystem(path=['', 'tmp', 'test-the-filesystem'], rootdirectory=Directory()) fs.rootdirectory().add(name='Confix2.in', entry=File(lines=['PACKAGE_NAME("basic")', 'PACKAGE_VERSION("6.6.6")'])) dir1 = Directory() dir1.add(name='Confix2.in', entry=File(lines=['IGNORE_ENTRIES(["file1_1.h", "file1_1.c"])'])) dir1.add(name='file1_1.h', entry=File(lines=[])) dir1.add(name='file1_1.c', entry=File(lines=[])) dir1.add(name='file1_2.h', entry=File(lines=[])) dir1.add(name='file1_2.c', entry=File(lines=[])) dir2 = Directory() dir2.add(name='file2_1.h', entry=File(lines=[])) dir2.add(name='file2_1.c', entry=File(lines=[])) dir1.add(name='dir2', entry=dir2) fs.rootdirectory().add(name='dir1', entry=dir1) return fs
def add_root_dir(self, start_time): for path in os.listdir(self._root_dir_path): full_path = os.path.join(self._root_dir_path, path).replace(os.sep, '/') if os.path.getctime(full_path) >= start_time: continue if os.path.isfile(full_path): file = File(full_path) hash = self._ipfs_client.add_file(file) self._content.add(file.path, hash) elif os.path.isdir(full_path): content_list = self._ipfs_client.add_dir(Directory(full_path)) self._content.add_list(content_list) self._ipfs_cluster.pin(self._content[full_path])
def main(): directory = Directory() add_persons(directory) show_directory(directory) find_user(directory, "*****@*****.**", 62) find_user(directory, "*****@*****.**", 42) delete_user(directory, 2) show_directory(directory) save_directory(directory)
def delete_directory(self, directory, name): entry = [e for e in directory.entries if e.name() == name][0] current_directory = Directory(self, entry.cluster) self.delete_entry(directory, name) for e in current_directory.entries: if e.name() == "." or e.name() == "..": continue if e.attrib.subdirectory: self.delete_directory(current_directory, e.name()) else: self.delete_entry(current_directory, e.name()) directory.read_from_disk()
def get_directory(self, path): if path == '': return self.current_directory if path[0] == '/': directory = Directory(self, self.root_cluster) path = path[1:] else: directory = self.current_directory for p in path.replace(r'\ ', ' ').split('/'): if p == '': continue result = [ entry for entry in directory.entries if entry.attrib.subdirectory and entry.name() == p ] if len(result) == 0: print(colors.WARNING + '"' + path + '" is not a directory' + colors.END) return directory = Directory(self, result[0].cluster) return directory
def main(): logging.info("Running Application...") directory = Directory() add_persons(directory) show_directory(directory) find_user(directory, "*****@*****.**", 62) find_user(directory, "*****@*****.**", 42) delete_user(directory, 2) show_directory(directory) save_directory(directory)
def scan_dir(path): ret = Directory(state=DirectoryState.SYNC) for entry in os.listdir(os.sep.join(path)): if entry in ['.', '..']: continue next_path = path + [entry] next_path_str = os.sep.join(next_path) if os.path.isfile(next_path_str): ret.add(name=entry, entry=File(state=FileState.SYNC_CLEAR)) continue if os.path.isdir(next_path_str): dir = scan_dir(next_path) ret.add(entry, dir) continue raise Error(next_path_str+' has unknown type') return ret