def save_to_uri(self, bytes, uri, save_metadata=True): # Have to use a two-step process to write to the file: open the # filesystem, then open the file. Have to open the filesystem # as writeable in case this is a virtual filesystem (like ZipFS), # otherwise the write to the actual file will fail with a read- # only filesystem error. if uri.startswith("file://"): # FIXME: workaround to allow opening of file:// URLs with the # ! character uri = uri.replace("file://", "") fs, relpath = opener.parse(uri, writeable=True) fh = fs.open(relpath, 'wb') log.debug("saving to %s" % uri) fh.write(bytes) fh.close() if save_metadata: metadata_dict = dict() self.get_extra_metadata(metadata_dict) if metadata_dict: relpath += ".omnivore" log.debug("saving extra metadata to %s" % relpath) jsonpickle.set_encoder_options("json", sort_keys=True, indent=4) bytes = jsonpickle.dumps(metadata_dict) text = jsonutil.collapse_json(bytes) header = self.get_extra_metadata_header() fh = fs.open(relpath, 'wb') fh.write(header) fh.write(text) fh.close() self.metadata_dirty = False fs.close()
def export_neglected_file_list(monitoring_dir, ROOT_DIR, LOG_DIR, backup_file_list): today_obj = datetime.datetime.today() today_str = str(today_obj.year)+str(today_obj.month)+str(today_obj.day) export_name = today_str + "_neglected_files.log" export_path = fs.join([ROOT_DIR, LOG_DIR, export_name]) if not fs.exists(fs.join([ROOT_DIR, LOG_DIR])): try: fs.mkdir(fs.join([ROOT_DIR, LOG_DIR])) except: print ("Can't create LOG_DIR in Func:", export_neglected_file_list) try: fs.touch(export_path) file = fs.open(export_path, 'w') for f in backup_file_list: try: file.write('================================================') file.write('\n') file.write(fs.filename(f)) file.write('\n') file.write(fs.dirname(f)) file.write('\n') except: print("Cant' write export file in func: export_neglected_file_list") except: print ("cant export in func: export_neglected_file_list")
def iter_assets(fs, path): import hashlib # for filesystems that don't provide this info import os.path # for join because we return virtual unixy paths for entry in fs.scandir("/".join(["assets"] + path)): if entry.is_dir: for asset in iter_assets(fs, path + [entry.name]): yield asset else: fn = "/".join(path + [entry.name]) with fs.open("assets/" + fn, "rb") as f: m = hashlib.sha256() while True: data = f.read(8192) if not data: break m.update(data) content_hash = m.hexdigest() def make_content_loader(fn): def content_loader(): with fs.open("assets/" + fn, "rb") as f: return f.read() return content_loader yield (fn, content_hash, make_content_loader(fn))
def main(): fs.init('fs') fs.mkdir('a') fs.mkdir('b') fs.mkdir('a/c') fs.create('a/d.txt', 20) fs.create('a/c/e.txt', 20) fd1 = fs.open('a/d.txt', 'rw') fd2 = fs.open('a/c/e.txt', 'rw') fs.write(fd1, 'hello\nbye\n') fs.write(fd2, 'goodbye\n') print fs.read(fd2, 4) print fs.readlines(fd1) for f in fs.readlines(fd1): print(f), fs.close(fd1) fs.close(fd2) fs.suspend()
def save_to_uri(self, uri, editor, saver=None, save_metadata=True): # Have to use a two-step process to write to the file: open the # filesystem, then open the file. Have to open the filesystem # as writeable in case this is a virtual filesystem (like ZipFS), # otherwise the write to the actual file will fail with a read- # only filesystem error. if saver is None: bytes = self.bytes.tostring() else: bytes = saver(self, editor) if uri.startswith("file://"): # FIXME: workaround to allow opening of file:// URLs with the # ! character uri = uri.replace("file://", "") fs, relpath = opener.parse(uri, writeable=True) fh = fs.open(relpath, 'wb') log.debug("saving to %s" % uri) fh.write(bytes) fh.close() if save_metadata: mdict = self.init_extra_metadata_dict(editor) task_metadata = dict() editor.to_metadata_dict(task_metadata, self) self.store_task_specific_metadata(editor, mdict, task_metadata) if mdict: relpath += ".omnivore" log.debug("saving extra metadata to %s" % relpath) jsonpickle.set_encoder_options("json", sort_keys=True, indent=4) bytes = jsonpickle.dumps(mdict) text = jsonutil.collapse_json(bytes, 8, self.json_expand_keywords) header = editor.get_extra_metadata_header() fh = fs.open(relpath, 'wb') fh.write(header) fh.write(text) fh.close() fs.close()
def load(self) -> 'File': """ Returns: - contents loaded by assuming file as binary str. """ if not self.exists(): raise FileNotFoundError(self.path.abs) with self.filesystem.open() as fs: with fs.open(self.path.s, 'rb') as fin: return fin.read()
def read_metadata(fs: FS) -> Dict[str, Any]: xml_matches = fs.glob('**/*.xml') safe_matches = fs.glob('**/*.safe') matches = [] matches.extend(safe_matches) matches.extend(xml_matches) metadata = {} for match in matches: with fs.open(match.path) as file: d = xmltodict.parse(file.read()) metadata[simple_name(match.path)] = d import json with open("metadata.json", "w") as f: json.dump(metadata, f) return filter_null_attribute( flatten(metadata, reducer=make_reducer(delimiter='.')))
def iter_inputs(fs, input_list): import hashlib # for filesystems that don't provide this info for input in input_list: if input["path"] is not None: path = input["path"] with fs.open(path, "rb") as input_file: m = hashlib.sha256() while True: data = input_file.read(8192) if not data: break m.update(data) content_hash = m.hexdigest() def make_content_loader(input_file_path): def content_loader(): with fs.open(input_file_path, "rb") as input_file: return input_file.read() return content_loader yield (path, input, content_hash, make_content_loader(path))
def content_loader(): with fs.open("assets/" + fn, "rb") as f: return f.read()
fs.create('a1/a2.txt', 5) except Exception, e: print e try: fs.deldir('b1') except Exception, e: print e fs.listdir() fs.listdir('a1') try: fs.deldir('c1') except Exception, e: print e fs.getcwd() fd = fs.open('a1/a2.txt', 'r') try: fd2 = fs.open('a1/b.txt', 'r') except Exception, e: print e try: fs.write(fd, 'hello\n') except Exception, e: print e try: fs.write(fd + 1, 'hello\n') except Exception, e: print e fd3 = fs.open('/a0/a1/a2.txt', 'w') print fd == fd3 fs.write(fd, 'hello\n')
#on a directory fs.chdir('a') fs.mkdir('b2') fs.mkdir('/a/b3') #now on drectory b3 fs.chdir('b3') fs.mkdir('/a/b1/c1') print fs.listdir('/a/b1') fs.create('/a/b3/fc', 30) fcd = fs.open('/a/b3/fc', 'w') fs.write(fcd, '\nnow we needtousegitagain\n') fs.close(fcd) fcd1 = fs.open('/a/b3/fc', 'r') print fs.readlines(fcd1) print fs.read(fcd1, 5) fs.seek(fcd1, 5) print fs.read(fcd1, 10) fs.close(fcd1) fs.suspend() #fs.open('/fa','r') fs.chdir('..') #resume is not sure fs.resume('abc.fssave') fs.create('fb', 29) fbd = fs.open('fb', 'w')
def save(self, data: 'str or bytes'): if isinstance(data, str): data = data.encode() with self.filesystem.open() as fs: with fs.open(self.path.s, 'wb') as fout: return fout.write(data)
def content_loader(): with fs.open(input_file_path, "rb") as input_file: return input_file.read()