def make_cbz(self, cbzpath=None): ''' Create a CBZ file from the images in the MOBI file. Return the path to the created CBZ file. ''' if cbzpath is None: mobibase, mobiext = splitext(basename(self.path)) cbzpath = mobibase + '.cbz' if existspath(cbzpath): raise ValueError("CBZ path %r already exists" % (cbzpath,)) with self.extracted() as df: dirpath, rfilepath = df imagepaths = sorted(glob(joinpath(dirpath, 'mobi8/OEBPS/Images/*.*'))) info("write %s", cbzpath) try: with pfx_call(ZipFile, cbzpath, 'x', compression=ZIP_STORED) as cbz: for imagepath in imagepaths: pfx_call(cbz.write, imagepath, arcname=basename(imagepath)) except FileExistsError as e: error("CBZ already eixsts: %r: %s", cbzpath, e) return 1 except Exception: if existspath(cbzpath): pfx_call(os.unlink, cbzpath) raise return cbzpath
def on_spawn(self): ''' Actions to perform before commencing the ssh tunnel. Initially remove local socket paths. ''' options = self.ssh_options() for localforward in options['localforward']: local, remote = localforward.split(None, 1) if '/' in local: with Pfx("remove %r", local): try: os.remove(local) except OSError as e: if e.errno == errno.ENOENT: pass else: raise else: info("removed") if (options['controlmaster'] == [ 'true', ] and options['controlpath'] != [ 'none', ]): controlpath, = options['controlpath'] with Pfx("remove %r", controlpath): try: os.remove(controlpath) except OSError as e: if e.errno == errno.ENOENT: pass else: raise else: info("removed")
def _embiggen_lmdb(self, new_map_size=None): if new_map_size is None: new_map_size = self.map_size * 2 self.map_size = new_map_size info("change LMDB map_size to %d", self.map_size) # reopen the database return self._reopen_lmdb()
def importAddresses(self, fp): ''' Import addresses into groups from the file `fp`. Import lines are of the form: group[,...] email_address ''' with Pfx(str(fp)): lineno = 0 for line in fp: lineno += 1 with Pfx(str(lineno)): if not line.endswith('\n'): error("unexpected EOF") break try: groups, addr = line.strip().split(None, 1) except ValueError: error("no addresses") continue if not addr: info("SKIP - no address") try: A = self.getAddressNode(addr) except ValueError as e: error("bad address: %s: %s", addr, e) continue A.GROUPs.update(groups.split(',')) # forget the old mapping self._address_groups = None
def wait(self): ''' Wait for the Later to be finished. ''' f = self._finished if not f.is_set(): info("Later.WAIT: %r", self) if not self._finished.wait(5.0): warning(" Later.WAIT TIMED OUT")
def test_func(): with Pfx("main.test_func: shcmd=%r", test_shcmd): argv = ['sh', '-c', test_shcmd] if test_uid != uid: argv = ['su', test_username, 'exec ' + quotecmd(argv)] shcmd_ok = callproc(argv, stdin=DEVNULL) == 0 if not quiet: info("exit status != 0") return shcmd_ok
def get_Archive(self, name, missing_ok=False): ''' Obtain the named Archive from a Store in the archives list. ''' with Pfx("%s.get_Archive(%r)", self, name): for S, fnptn in self.archive_path: if fnmatch(name, fnptn): info("%s.get_Archive(%r): matched %r, fetching from %r", self.name, name, fnptn, S.name) return S.get_Archive(name, missing_ok=missing_ok) raise KeyError("no such Archive")
def test(self, trace=False): ''' Ping the target as a test. ''' if trace: info("run %r", self.ping_argv) retcode = subprocess.call(self.ping_argv, stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL) return retcode == 0
def delete(self, where, *where_argv): ''' Delete rows. ''' sql = 'delete from %s where %s' % (self.table_name, where) sqlargs = where_argv C = self.conn.cursor() info("SQL: %s %r", sql, sqlargs) with Pfx("SQL %r: %r", sql, sqlargs): C.execute(sql, sqlargs) self.conn.commit() C.close()
def startup(self): if self.meta_store is not None: self.meta_store.open() archive = self.archive D = archive.last.dirent if D is None: info("%r: no archive entries, create empty topdir Dir", archive) D = Dir('.') archive.update(D) self.topdir = D super().startup()
def initdir(self): ''' Init a directory and its "data" subdirectory. ''' topdirpath = self.topdirpath if not isdirpath(topdirpath): info("mkdir %r", topdirpath) with Pfx("mkdir(%r)", topdirpath): os.mkdir(topdirpath) datasubdirpath = joinpath(topdirpath, 'data') if not isdirpath(datasubdirpath): info("mkdir %r", datasubdirpath) with Pfx("mkdir(%r)", datasubdirpath): os.mkdir(datasubdirpath)
def __setitem__(self, key: bytes, binary_entry: bytes): # pylint: disable=import-error,import-outside-toplevel import lmdb while True: try: with self._txn(write=True) as txn: txn.put(key, binary_entry, overwrite=True) txn.commit() except lmdb.MapFullError as e: info("%s", e) self._resize_needed = True else: return
def update(self, D2, path=None): ''' Update this Dir with changes from `D2` which is presumed to be new. Note: this literally attaches nodes from `D2` into this Dir's tree where possible. TODO: replace with code from vt.merge. ''' if path is None: path = self.pathto() with Pfx("update(%r)", path): for name in D2: with Pfx(name): E2 = D2[name] if name in self: # conflict # TODO: support S_IFWHT whiteout entries E1 = self[name] if E1.uuid == E2.uuid: # same file assert E1.type == E2.type if E2.meta.ctime > E1.meta.ctime: info("update meta") E1.meta.update(E2.meta.items()) if E1.block != E2.block: if E2.mtime > E1.mtime: # TODO: E1.flush _after_ backend update? or before? info("update block => %s", E2.block) E1.block = E2.block E1.meta.mtime = E2.mtime else: # distinct objects if E1.isdir and E2.isdir: # merge subtrees E1.update(E2) elif E1.isfile and E2.isfile and E1.block == E2.block: # file with same content, fold # TODO: use Block.compare_content if different blocks if E2.meta.ctime > E1.meta.ctime: info("update meta") E1.meta.update(E2.meta.items()) else: # different content # add other object under a different name new_name = self.new_name(name) info("add new entry as %r: %s", new_name, E2) self[new_name] = E2 else: # new item # NB: we don't recurse into new Dirs, not needed info("add new entry: %s", E2) self[name] = E2
def forward(self, target): ''' Obtain the named Portfwd, creating it if necessary. ''' try: P = self._forwards[target] except KeyError: info("instantiate new target %r", target) P = Portfwd(target, ssh_config=self.ssh_config, trace=self.trace, flags=self.flags, conditions=self.target_conditions[target]) self._forwards[target] = P return P
def alert(self, msg, *a): ''' Issue an alert message via the "alert" command. ''' if self.quiet: return if a: msg = msg % a alert_argv = [ 'alert', '-g', self.group_name, 'SVCD %s: %s' % (self.name, msg) ] if self.trace: info("alert: %s: %s" % (self.name, msg)) LockedPopen(alert_argv, stdin=DEVNULL)
def export_to_calibre( self, calibre, *, doit=True, make_cbz=False, replace_format=False, once=False, ): ''' Export this Kindle book to a Calibre instance, return the `CalibreBook`. Parameters: * `calibre`: the `CalibreTree` * `doit`: if false, just recite actions; default `True` * `make_cbz`: create a CBZ file after the initial import * `replace_format`: if true, export even if the `AZW3` format is already present ''' with Pfx(self.asin): azw_path = self.extpath('azw') dbid = self.tags.auto.calibre.dbid if dbid: # book already present in calibre cbook = calibre[dbid] with Pfx("calibre %d: %s", dbid, cbook.title): formats = cbook.formats_as_dict() if ((set(('AZW3', 'AZW', 'MOBI')) & set(formats.keys())) and not replace_format): info("format AZW3 already present, not adding") elif 'CBZ' in formats: info("format CBZ format present, not adding AZW3") else: if doit: calibre.add_format( azw_path, dbid, force=replace_format, ) else: info("add %s", azw_path) else: # book does not have a known dbid, presume not added if doit: dbid = calibre.add(azw_path) self.tags['calibre.dbid'] = dbid cbook = calibre[dbid] formats = cbook.formats_as_dict() else: info("calibre.add %s", azw_path) # AZW added, check if a CBZ is required if make_cbz: if doit: with Pfx("calibre %d: %s", dbid, cbook.title): cbook.make_cbz(replace_format=replace_format) else: print( "create CBZ from the imported AZW3, then remove the AZW3" ) return cbook
def __init__(self, libpath=None): ''' Open the Calibre library stored at `libpath`. If `libpath` is not supplied, use $CALIBRE_LIBRARY_PATH or DEFAULT_LIBRARY. ''' if libpath is None: libpath = os.environ.get('CALIBRE_LIBRARY_PATH', envsub(DEFAULT_LIBRARY)) info("%s: libpath=%r", self.__class__.__name__, libpath) if not os.path.isdir(libpath): raise ValueError("not a directory: %r" % (libpath, )) self.path = libpath self._lock = RLock() self.metadbpath = self.pathto(METADB_NAME) self.metadb = CalibreMetaDB(self, self.metadbpath) self.table = self.metadb.table
def setxattr(self, inum, xattr_name, xattr_value): ''' Set the extended attribute `xattr_name` to `xattr_value` on inode `inum`. ''' if self.readonly: OS_EROFS("fs is read only") E = self.i2E(inum) xattr_name = Meta.xattrify(xattr_name) if not xattr_name.startswith(XATTR_VT_PREFIX): # ordinary attribute, set it and return E.meta.setxattr(xattr_name, xattr_value) return # process special attribute names with Pfx("%s.setxattr(%d,%r,%r)", self, inum, xattr_name, xattr_value): suffix = xattr_name[len(XATTR_VT_PREFIX):] with Pfx(suffix): if suffix == 'block': # update the Dirent's content directly if not E.isfile: OS_EINVAL("tried to update the data content of a nonfile: %s", E) block_s = Meta.xattrify(xattr_value) B, offset = parse(block_s) if offset < len(block_s): OS_EINVAL("unparsed text after trancription: %r", block_s[offset:]) if not isBlock(B): OS_EINVAL("not a Block transcription") info("%s: update .block directly to %r", E, str(B)) E.block = B return if suffix == 'control': argv = shlex.split(xattr_value.decode('utf-8')) if not argv: OS_EINVAL("no control command") op = argv.pop(0) with Pfx(op): if op == 'cache': if argv: OS_EINVAL("extra arguments: %r", argv) B = E.block if B.indirect: X("ADD BLOCK CACHE FOR %s", B) bm = self.block_cache.get_blockmap(B) X("==> BLOCKMAP: %s", bm) else: X("IGNORE BLOCK CACHE for %s: not indirect", B) return OS_EINVAL("unrecognised control command") OS_EINVAL("invalid %r prefixed name", XATTR_VT_PREFIX)
def start(self): ''' Start all nonrunning targets, stop all running nonrequired targets. ''' required = self.targets_required() for target in required: P = self.forward(target) if target not in self.targets_running: info("start target %r", target) P.start() self.targets_running[target] = P running = list(self.targets_running.keys()) for target in running: if target not in required: info("stop target %r", target) P = self.targets_running[target] P.stop() del self.targets_running[target]
def test_func(self): ''' Servuice test function: probe all the conditions. ''' with Pfx("%s[%s].test_func", type(self).__name__, self.name): for condition in self.conditions: with Pfx("precondition %s", condition): if not condition.probe(): if self.verbose: info('FAILED') return False if self.test_shcmd: with Pfx("test_shcmd %r", self.test_shcmd): shcmd_ok = os.system(self.test_shcmd) == 0 if not shcmd_ok: info('FAILED') return False return True
def make_maildir(path): ''' Create a new maildir at `path`. The path must not already exist. ''' info("make_maildir %s", path) made = [] os.mkdir(path) made.append(path) for subdir in 'tmp', 'new', 'cur': subdirpath = os.path.join(path, subdir) try: os.mkdir(subdirpath) except OSError: for dirpath in reversed(made): os.rmdir(dirpath) raise made.append(subdirpath)
def update_columns(self, update_columns, update_argv, where, *where_argv): ''' Update specific row columns. ''' sql = ( 'update %s set %s where %s' % ( self.table_name, ','.join("%s=?" % (column_name,) for column_name in update_columns), where ) ) sqlargs = list(update_argv) + list(where_argv) C = self.conn.cursor() info("SQL: %s %r", sql, sqlargs) with Pfx("SQL %r: %r", sql, sqlargs): C.execute(sql, sqlargs) self.conn.commit() C.close()
def _update_frame(self, frameid, newtext): ''' Set frame identified by `frameid` to have text `newtext`. Set self.modified to True if `newtext` is different from any existing text. ''' frame = self.get_frame(frameid) if frame is None: info("%s: NEW %r", frameid, newtext) frame = self._frame(frameid, newtext) self.tag.append(frame) self.modified = True else: oldtext = frame['text'] if oldtext == newtext: debug("%s: UNCHANGED %r", frameid, oldtext) return info("%s: UPDATE %r => %r", frameid, oldtext, newtext) frame['text'] = newtext self.tag[self.tag.index(frameid)] = frame self.modified = True
def add_path(self, new_path: str, indexed_to=0) -> DataFileState: ''' Insert a new path into the map. Return its `DataFileState`. ''' info("new path %r", shortpath(new_path)) with self._lock: c = self._modify( 'INSERT INTO filemap(`path`, `indexed_to`) VALUES (?, ?)', (new_path, 0), return_cursor=True ) if c: filenum = c.lastrowid self._map(new_path, filenum, indexed_to=indexed_to) c.close() DFstate = self.n_to_DFstate[filenum] else: # already mapped DFState = self.path_to_DFstate[new_path] return DFstate
def edit_column(self, column_name, where=None): ''' Open an interactive editor on the values of a column. ''' with Pfx("edit_column(%s, %r)", column_name, where): id_column = self.id_column edit_lines = [] for row in self.select(where=where): edit_line = "%d:%s" % (row[id_column], row[column_name]) edit_lines.append(edit_line) changes = self.edit_strings(sorted(edit_lines, key=lambda _: _.split(':', 1)[1]), errors=lambda msg: warning(msg + ', discarded') ) for old_string, new_string in changes: with Pfx("%s => %s", old_string, new_string): old_id, old_name = old_string.split(':', 1) new_id, new_name = new_string.split(':', 1) if old_id != new_id: error("id mismatch (%s != %s), discarding change") else: self[int(new_id)] = new_name info("updated")
def setDebug(self, flag, value): ''' Set or clear the named debug option. ''' with Pfx("setDebug(%r, %r)", flag, value): if not flag.isalpha() or not hasattr(self.debug, flag): raise AttributeError( "invalid debug flag, know: %s" % (",".join( sorted([F for F in dir(self.debug) if F.isalpha()])), )) if self.debug.flags: info("debug.%s = %s", flag, value) setattr(self.debug, flag, value) if flag == 'debug': # tweak global logging level also logger = logging.getLogger() log_level = logger.getEffectiveLevel() if value: if log_level > logging.DEBUG: logger.setLevel(logging.DEBUG) else: if log_level < logging.INFO: logger.setLevel(logging.INFO)
def cmd_autotag(argv, options): ''' Tag paths based on data from the iTunes library. ''' fstags = options.fstags library = options.library if not argv: argv = ['.'] rules = fstags.config.rules tracks_by_series_season_episode = library.tracks_indexed( ['series', 'season', 'episode_order'], tv_show=True) ##print(pformat(tracks_by_series_season_episode)) with fstags: for top_path in argv: for path in rpaths(top_path): with Pfx(path): tagged_path = TaggedPath(path, fstags) all_tags = tagged_path.all_tags key = (all_tags.title, all_tags.get('season'), all_tags.get('episode')) tracks = tracks_by_series_season_episode.get(key, ()) if tracks: if len(tracks) > 1: warning("multiple tracks: %r", tracks) else: track, = tracks for tag_name, track_attr in ( ('title', 'name'), ('genre', 'genre'), ('release_date', 'release_date'), ): tag_value = all_tags.get(tag_name) if not tag_value: tr_value = getattr( track, track_attr, None) if tr_value is not None: new_tag = Tag(tag_name, tr_value) info("+ %s", new_tag) tagged_path.add(new_tag)
def importMessage(self, msg): ''' Import the message `msg`. Returns the MessageNode. ''' info("import %s->%s: %s" % (msg['from'], msg['to'], msg['subject'])) msgid = msg['message-id'].strip() if (not msgid.startswith('<') or not msgid.endswith('>') or msgid.find("@") < 0): raise ValueError("invalid Message-ID: %s" % (msgid,)) M = self.getMessageNode(msgid) M.MESSAGE = msg M.SUBJECT = msg['subject'] if 'date' in msg: M.DATE = msg['date'] M.FROMs = self.getAddressNodes(*msg.get_all('from', [])) M.RECIPIENTS = self.getAddressNodes( *chain( msg.get_all(hdr, []) for hdr in ('to', 'cc', 'bcc', 'resent-to', 'resent-cc') ) ) refhdr = None try: refhdr = msg['in-reply-to'] except KeyError: try: refhdr = msg['references'] except KeyError: pass if refhdr: refids = [msgid for msgid in refhdr.split() if len(msgid)] if refids: M.PARENT = self.getMessageNode(refids[-1]) return M
def sql(self, query): sqldb = self.subpath('db.sqlite') dblog = self.subpath('db.log.csv') if not os.path.isfile(sqldb): # no db? create it info("create SQLite database...") os.system( "set -x; sqlite '" + sqldb + "' 'create table bugfields (bugnum int, field varchar(64), value varchar(16384));'" ) # populate db from raw data sqlpipe = cs.sh.vpopen(("sqlite", sqldb), "w") for bugnum in self.keys(): bug = self[bugnum] for field in bug.keys(): sqlpipe.write("insert into bugfields values(") sqlpipe.write(bugnum) sqlpipe.write(",'") sqlpipe.write(field) sqlpipe.write("','") sqlpipe.write(bug[field]) sqlpipe.write("';\n") sqlpipe.close() else: # just update the db from the log file info("sync db from log...") if os.path.isfile(dblog): sqlpipe = cs.sh.vpopen(("sqlite", sqldb), "w") dblogfp = file(dblog) os.unlink(dblog) for csvline in dblogfp: csvf = split(csvline, ",", 2) bugnum = csvf[0] field = csvf[1] value = csvf[2] sqlpipe.write("delete from bugfields where bugnum = ") sqlpipe.write(str(bugnum)) sqlpipe.write(" and field = \"") sqlpipe.write(field) sqlpipe.write("\";\n") sqlpipe.write("insert into bugfields values (") sqlpipe.write(str(bugnum)) sqlpipe.write(",\"") sqlpipe.write(field) sqlpipe.write("\",\"") sqlpipe.write(value) sqlpipe.write("\");\n") sqlpipe.close() info("QUERY = %s", query) sqlpipe = cs.sh.vpopen(("sqlite", "-list", sqldb, query)) for row in sqlpipe: yield string.split(chomp(row), '|')
def main_ssh_opts(argv): USAGE = r'''Usage: %s [-F config-file]... [-o opt=value]... host [options...] -F config-file Specific configuration file to read. These accumulate. If no configuration files are specified use: ''' + " ".join(DEFAULT_CONFIGS) + r''' Configuration files are consulted in order and the earlier matching setting of each option is used. -o opt=value Specify an ssh configuration option. Later -o options override earlier ones. Options specified by -o override options from configuration files. host Host name used to match clauses in configuration files. options If specified, print the value of each option in order, each on its own line. If not options are specified, print the value of each option defined by -o or in a configuration file as: option-name option-value''' cmd = argv.pop(0) setup_logging(cmd) usage = USAGE % (cmd,) configs = [] options = {} badopts = False while argv: opt = argv.pop(0) if opt == '--': break if not opt.startswith('-') or len(opt) < 2: argv.insert(0, opt) break with Pfx(opt): if opt == '-F': configs.append(argv.pop(0)) elif opt == '-o': optarg = argv.pop(0) with Pfx(optarg): try: option, optvalue = parse_option(optarg) except ValueError as e: warning("invalid option: %s", e) badopts = True continue info("cmdline: %s = %s", option, optvalue) options[option] = optvalue else: warning("unrecognised option") badopts = True continue if not argv: warning("missing host") badopts = True else: host = argv.pop(0) if not configs: configs = [ envsub(cfg) for cfg in DEFAULT_CONFIGS ] if badopts: print(usage, file=sys.stderr) return 2 xit = 0 for config in configs: if not update_from_file(options, config, host): xit = 1 if argv: for option in argv: print(option, options.get(option.lower(), '')) else: for option in sorted(options.keys()): print(option, options[option]) return xit