def cmd_make_cbz(self, argv): ''' Usage: {cmd} dbids... ''' if not argv: raise GetoptError("missing dbids") options = self.options calibre = options.calibre xit = 0 for dbid_s in argv: with Pfx(dbid_s): try: dbid = int(dbid_s) except ValueError as e: warning("invalid dbid: %s", e) xit = 1 continue cbook = calibre[dbid] with Pfx("%s: make_cbz", cbook.title): cbook.make_cbz() return xit
def loadAddresses(addresses, catmap=None, addrmap=None): ''' Load an address list file. Return return ok (True/False) and maps by category and address key. Existing category and address key maps may be supplied. ''' if catmap is None: catmap = {} if addrmap is None: addrmap = {} ok = True with Pfx(addresses): lineno = 0 with open(addresses) as addrfp: for line in addrfp: lineno += 1 if not line.endswith('\n'): error("line %d: missing newline (unexpected EOF)", lineno) ok = False break line = line.strip() if len(line) == 0 or line[0] == '#': continue try: cats, addr = line.split(None, 1) except ValueError: warning("line %d: bad syntax: %s", lineno, line) ok = False continue if addr.startswith('mailto:'): addr = addr[7:] cats = cats.split(',') addrkey = addressKey(addr) if addrkey is None: warning("line %d: can't parse address \"%s\"", lineno, addr) ok = False continue if "@" not in addrkey: warning("line %d: no \"@\" in \"%s\"", lineno, addrkey) if addrkey in addrmap: info = addrmap[addrkey] else: info = addrmap[addrkey] = AddressInfo(addrkey, addr, set()) info.categories.update(cats) for cat in cats: catmap.setdefault(cat, {})[addrkey] = info return ok, catmap, addrmap
def _tcp_connect(self): # TODO: IPv6 support self.sock = socket(AF_INET) with Pfx("%s.sock.connect(%r)", self, self.sock_bind_addr): try: self.sock.connect(self.sock_bind_addr) except: self.sock.close() self.sock = None raise return OpenSocket(self.sock, False), OpenSocket(self.sock, True)
def startup(self): ''' Open index. ''' index = self.index with Pfx("open %s", index): try: index_open = index.open except AttributeError: warning("no .open method") else: index_open()
def decode_request(rq_type, flags, payload): ''' Decode `(flags,payload)` into a request packet. ''' with Pfx("decode_request(rq_type=%s, flags=0x%02x, payload=%d bytes)", rq_type, flags, len(payload)): request_class = RqType(rq_type).request_class rq, offset = request_class.parse_bytes(payload, parse_flags=flags) if offset < len(payload): warning("%d unparsed bytes remaining in payload", len(payload) - offset) return rq
def delete(self, where, *where_argv): ''' Delete rows. ''' sql = 'delete from %s where %s' % (self.table_name, where) sqlargs = where_argv C = self.conn.cursor() info("SQL: %s %r", sql, sqlargs) with Pfx("SQL %r: %r", sql, sqlargs): C.execute(sql, sqlargs) self.conn.commit() C.close()
def _unixsock_connect(self): self.sock = socket(AF_UNIX) with Pfx("%s.sock.connect(%r)", self, self.socket_path): try: self.sock.connect(self.socket_path) except OSError as e: error("connect fails: %s", e) self.sock.close() self.sock = None raise return OpenSocket(self.sock, False), OpenSocket(self.sock, True)
def apply_opts(self, opts): ''' Command line main switches. ''' options = self.options for opt, val in opts: with Pfx(opt): if opt == '-f': options.ydl_opts.update(cachedir=False) elif opt == '-j': with Pfx(val): try: options.parallel = int(val) except ValueError as e: # pylint: disable=raise-missing-from raise GetoptError("invalid integer: %s" % (e,)) else: if options.parallel < 1: raise GetoptError("must be >= 1") else: raise RuntimeError("unhandled option: %s=%s" % (opt, val))
def mkdir(self, name): ''' Create a subdirectory. ''' if not name or PATHSEP in name: raise ValueError( "name may not be empty or contain PATHSEP %r: %r" % (PATHSEP, name) ) subpath = joinpath(self.path, name) with Pfx("mkdir(%r)", subpath): os.mkdir(subpath) return OSDir(subpath)
def infer(self, path, apply=False): ''' Compare the `{TAGGER_TAG_PREFIX_DEFAULT}.filename_inference` rules to `path`, producing a mapping of prefix=>[Tag] for each rule which infers tags. Return the mapping. If `apply` is true, also apply all the inferred tags to `path` with each `Tag` *name*=*value* applied as *prefix*.*name*=*value*. ''' tagged = self.fstags[path] srcpath = tagged.filepath srcdirpath = dirname(srcpath) inference_mapping = self.inference_mapping(srcdirpath) inferences = defaultdict(list) for prefix, infer_funcs in inference_mapping.items(): with Pfx(prefix): assert isinstance(prefix, str) for infer_func in infer_funcs: try: values = list(infer_func(path)) except Exception as e: # pylint: disable=broad-except warning("skip rule %s: %s", infer_func, e) continue bare_values = [] for value in values: if isinstance(value, Tag): tag = value tag_name = prefix + '.' + tag.name if prefix else tag.name inferences[tag_name] = tag.value else: bare_values.append(value) if bare_values: if len(bare_values) == 1: bare_values = bare_values[0] inferences[prefix] = bare_values break if apply: with Pfx("apply"): for tag_name, values in inferences.items(): tagged[tag_name] = tag.value return inferences
def addpath(self, path): ''' Add a new path to the data structures. ''' with Pfx("addpath(%r)", path): with Pfx("lstat"): S = os.lstat(path) if not S_ISREG(S.st_mode): return if S.st_size < self.min_size: return key = FileInfo.stat_key(S) FI = self.keymap.get(key) if FI: assert FI.key == key FI.paths.add(path) else: FI = FileInfo(S.st_dev, S.st_ino, S.st_size, S.st_mtime, (path,)) assert FI.key == key # pylint: disable=comparison-with-callable self.keymap[key] = FI assert key not in self.sizemap[S.st_size] self.sizemap[S.st_size][key] = FI
def resolve_csv_row(row, lastrow): ''' Transmute a CSV row, resolving omitted TYPE, NAME or ATTR fields. ''' with Pfx("row=%r, lastrow=%r", row, lastrow): t, name, attr, value = row if t == '': t = lastrow[0] if name == '': name = lastrow[1] if attr == '': attr = lastrow[2] return t, name, attr, value
def main(argv): argv = list(argv) cmd = os.path.basename(argv.pop(0)) setup_logging(cmd) xit = 0 badopts = False dburl = None if not argv: error("missing dburl") badopts = True else: dburl = argv.pop(0) if dburl.startswith('$'): envvar = dburl[1:] try: varval = os.environ[envvar] except KeyError as e: error("dburl: no such envvar: %s", dburl) badopts = True else: dburl = varval if not badopts: if not argv: cmdloop = CmdLoop(dburl) cmdloop.prompt = cmd+'> ' cmdloop.cmdloop() else: op = argv.pop(0) DB = SQLA(dburl=dburl) with Pfx(op): try: try: opfunc = getattr(DB, 'op_'+op) except AttributeError as e: if op in DB.table_names: xit = DB.op_table([op] + argv) else: error("unknown op (table_names = %s)", DB.table_names) badopts = True else: xit = opfunc(argv) except GetoptError as e: error(str(e)) badopts = True if badopts: print(usage % (cmd,), file=sys.stderr) return 2 return xit
def _self_check(self): X("SELF CHECK") assert len(self._mapped_keys) == len(self._mapped_subkeys) assert set(self._mapped_keys.values()) == set(self._mapped_subkeys.keys()) assert set(self._mapped_keys.keys()) == set(self._mapped_subkeys.values()) for subk, k in self._mapped_subkeys.items(): with Pfx("subkey %r vs key %r", subk, k): assert self._mapped_keys[k] == subk, ( "subkey %r => %r: self._mapped_keys[key]:%r != subkey:%r" % (subk, k, self._mapped_keys[k], subk) ) return True
def __iter__(self): ''' Generator yielding (unixtime, Dirent) from the archive file. ''' path = self.path with Pfx(path): try: with open(path) as fp: yield from self.parse(fp) except OSError as e: if e.errno == errno.ENOENT: return raise
def _multicall0(stores, method_name, args, kwargs=None): ''' Basic multicall of _bg methods yielding (LF, S) pairs in the order submitted. ''' assert method_name.endswith('_bg') stores = list(stores) if kwargs is None: kwargs = {} for S in stores: with Pfx("%s.%s()", S, method_name): with S: LF = getattr(S, method_name)(*args, **kwargs) yield LF, S # outside Pfx because this is a generator
def __call__(self, context, namespaces, *param_values): with Pfx("$(%s)", self.name): assert type(namespaces) is list, "namespaces = %s" % r(namespaces) if len(param_values) != len(self.params): raise ValueError( "mismatched Macro parameters: self.params = %r (%d items) but got %d param_values: %r" % (self.params, len( self.params), len(param_values), param_values)) if self.params: namespaces = [dict(zip(self.params, param_values)) ] + namespaces return self.mexpr(context, namespaces)
def read_services(fp, start_lineno=1): ''' Parse the services(5) format, yield (prelines, PortInfo(portnum, proto, name, aliases)). ''' textlines = [] for lineno, line in enumerate(fp, start_lineno): with Pfx("%s:%d", fp, lineno): if not line.endswith('\n'): raise ValueError("missing terminating newline") line0 = line[:-1] line = line0.rstrip() comment_pos = line.find('#') if comment_pos >= 0: line = line[:comment_pos].rstrip() comment = line[comment_pos+1:].strip() else: comment = '' words = line.split() if not words: textlines.append(line0) continue if line[0].isspace(): name = None else: name = words.pop(0) with Pfx(name): try: portspec = words.pop(0) except IndexError: raise ValueError("missing portnum/proto") with Pfx(portspec): try: portnum, proto = portspec.split('/') portnum = int(portnum) proto = proto.lower() except ValueError as e: raise ValueError("invalid portspec: %s" % (e,)) yield textlines, PortInfo(portnum, proto, name, words, comment) textlines = [] if textlines: yield textlines, None
def _make_next(self): ''' The inner/recursive/deferred function from _make; only called if out of date. Perform the next unit of work in making this Target. If we complete without blocking, put True or False onto self.made. Otherwise queue a background function to block and resume. ''' with Pfx("_make_next(%r)", self.name): if not self.was_missing and not self.out_of_date: raise RuntimeError("not missing or out of date!") # evaluate the result of Actions or Targets we have just waited for for R in self.Rs: with Pfx("checking %s", R): if isinstance(R, Target): self._apply_prereq(R) elif R.result: pass else: self.fail() if self.ready: break if self.failed: # failure, cease make return Rs = self.Rs = [] actions = self.pending_actions if actions: A = actions.pop(0) self.mdebug("queue action: %s", A) Rs.append(A.act_later(self)) else: self.mdebug("no actions remaining") if Rs: self.mdebug("tasks still to do, requeuing: Rs=%s", ",".join(str(_) for _ in Rs)) self.maker.after(Rs, self._make_next) else: # all done, record success self.succeed()
def _act(self, R, target): ''' Perform this Action on behalf of the Target `target`. Arrange to put the result onto `R`. ''' with Pfx("%s.act(target=%s)", self, target): try: debug("start act...") M = target.maker mdebug = M.debug_make v = self.variant if v == 'shell': debug("shell command") shcmd = self.mexpr(self.context, target.namespaces) if M.no_action or not self.silent: print(shcmd) if M.no_action: mdebug("OK (maker.no_action)") R.put(True) return R.put(self._shcmd(target, shcmd)) return if v == 'make': subtargets = self.mexpr(self.context, target.namespaces).split() mdebug("targets = %s", subtargets) subTs = [M[subtarget] for subtarget in subtargets] def _act_after_make(): # analyse success of targets, update R ok = True mdebug = M.debug_make for T in subTs: if T.result: mdebug("submake \"%s\" OK", T) else: ok = False mdebug("submake \"%s\" FAIL", T) R.put(ok) for T in subTs: mdebug("submake \"%s\"", T) T.require() M.after(subTs, _act_after_make) return raise NotImplementedError("unsupported variant: %s" % (self.variant, )) except Exception as e: error("action failed: %s", e) R.put(False)
def notifier(R): ''' Wrapper for `submitter`. ''' exc_info = R.exc_info if exc_info is None: return submitter(R.result) # report error if prefix: with Pfx(prefix): error("exception: %r", exc_info) else: error("exception: %r", exc_info) return None
def Message(msgfile, headersonly=False): ''' Factory function to accept a file or filename and return an email.message.Message. ''' if isinstance(msgfile, StringTypes): # msgfile presumed to be filename pathname = msgfile with Pfx(pathname): with open(pathname, errors='replace') as mfp: M = Message(mfp, headersonly=headersonly) M.pathname = pathname return M # msgfile presumed to be file-like object return email.parser.Parser().parse(msgfile, headersonly=headersonly)
def load_fs_inode_dirents(self, D): ''' Load entries from an `fs_inode_dirents` Dir into the Inode table. ''' X("LOAD FS INODE DIRENTS:") dump_Dirent(D) for name, E in D.entries.items(): X(" name=%r, E=%r", name, E) with Pfx(name): # get the refcount from the :uuid:refcount" name _, refcount_s = name.split(':')[:2] I = self.add(E) I.refcount = int(refcount_s) X(" I=%s", I)
def edit_column(self, column_name, where=None): ''' Open an interactive editor on the values of a column. ''' with Pfx("edit_column(%s, %r)", column_name, where): id_column = self.id_column edit_lines = [] for row in self.select(where=where): edit_line = "%d:%s" % (row[id_column], row[column_name]) edit_lines.append(edit_line) changes = self.edit_strings(sorted(edit_lines, key=lambda _: _.split(':', 1)[1]), errors=lambda msg: warning(msg + ', discarded') ) for old_string, new_string in changes: with Pfx("%s => %s", old_string, new_string): old_id, old_name = old_string.split(':', 1) new_id, new_name = new_string.split(':', 1) if old_id != new_id: error("id mismatch (%s != %s), discarding change") else: self[int(new_id)] = new_name info("updated")
def editNodes(nodedb, nodes, attrs, editor=None, doCreate=False): ''' Edit multiple nodes interactively using the horizontal dump format. ''' if nodedb is None: nodedb = nodes[0].nodedb if editor is None: editor = os.environ.get('EDITOR', 'vi') with tempfile.NamedTemporaryFile(suffix='.csv') as T: with Pfx(T.name): nodedb.dump_csv_wide(T, nodes=nodes, attrs=attrs) qname = cs.sh.quotestr(T.name) os.system("%s %s" % (editor, qname)) import_csv_wide(nodedb, T.name, doAppend=False)
def file_frombuffer(self, name, bfr): ''' Create a new file from data from a `CornuCopyBuffer`. ''' if not name or PATHSEP in name: raise ValueError( "name may not be empty or contain PATHSEP %r: %r" % (PATHSEP, name) ) subpath = joinpath(self.path, name) with Pfx('write %r', subpath): with open(subpath, 'wb') as f: for data in bfr: f.write(data) return OSFile(subpath)
def _recordings_from_entries(self, entries): ''' Return the recording `TagSet` instances from PlayOn data entries. ''' with self.sqltags: now = time.time() recordings = set() for entry in entries: entry_id = entry['ID'] with Pfx(entry_id): for field, conv in sorted( dict( Episode=int, ReleaseYear=int, Season=int, ##Created=self.from_playon_date, ##Expires=self.from_playon_date, ##Updated=self.from_playon_date, ).items()): try: value = entry[field] except KeyError: pass else: with Pfx("%s=%r", field, value): if value is None: del entry[field] else: try: value2 = conv(value) except ValueError as e: warning("%r: %s", value, e) else: entry[field] = value2 recording = self[entry_id] recording.update(entry, prefix='playon') recording.update(dict(last_updated=now)) recordings.add(recording) return recordings
def update_from_file(options, config, host): ''' Read options from an ssh_config file and update `options`; return true on successful parse. `options`: a mapping of existing option values keyed on option.lower(). `config`: configuration file to read. `host`: host used to select Host clauses to honour. ''' host_lc = host.lower() ok = True with Pfx(config): with open(config) as fp: use_host = False for lineno, line in enumerate(fp, 1): with Pfx(lineno): if not line.endswith('\n'): warning("missing newline") line = line.strip() if not line or line.startswith('#'): continue words = line.split(None, 1) if words[0].lower() == 'host': use_host = False if len(words) == 1: warning("no host patterns") else: for hostptn in words[1].split(): if fnmatch(host_lc, hostptn.lower()): use_host = True break elif use_host: try: option, optvalue = parse_option(line) except ValueError as e: error("invalid option: %s", e) ok = False else: if option not in options: options[option] = optvalue return ok
def modify(self, text, namespaces): ''' Read file contents. ''' newwords = [] for filename in self.words(text): with Pfx(filename): try: with open(filename) as fp: newwords.extend(self.words(fp.read())) except IOError as e: if self.lax: warning("%s", e) raise return " ".join(newwords)
def scan_mp4(bfr): ''' Scan ISO14496 input and yield Box start offsets. This is more complex than the MP3 scanner because Boxes nest in the MP4 structure. ''' from cs.iso14496 import Box with Pfx("parse_mp4"): def run_parser(bfr): for _ in Box.scan(bfr): pass return report_offsets(bfr, run_parser)