def _fh(self, fhndx): try: fh = self._file_handles[fhndx] except IndexError: error("cannot look up FileHandle index %r", fhndx) raise return fh
def make_cbz(self, cbzpath=None): ''' Create a CBZ file from the images in the MOBI file. Return the path to the created CBZ file. ''' if cbzpath is None: mobibase, mobiext = splitext(basename(self.path)) cbzpath = mobibase + '.cbz' if existspath(cbzpath): raise ValueError("CBZ path %r already exists" % (cbzpath,)) with self.extracted() as df: dirpath, rfilepath = df imagepaths = sorted(glob(joinpath(dirpath, 'mobi8/OEBPS/Images/*.*'))) info("write %s", cbzpath) try: with pfx_call(ZipFile, cbzpath, 'x', compression=ZIP_STORED) as cbz: for imagepath in imagepaths: pfx_call(cbz.write, imagepath, arcname=basename(imagepath)) except FileExistsError as e: error("CBZ already eixsts: %r: %s", cbzpath, e) return 1 except Exception: if existspath(cbzpath): pfx_call(os.unlink, cbzpath) raise return cbzpath
def info(self): ''' Read various megacli query command outputs and construct a data structure with the adpater information. ''' cmd_append("megacli -CfgDsply -aAll") Mconfigured = self._parse( self.readcmd('-CfgDsply', '-aAll'), mode_CFGDSPLY ) ##Mconfigured = self._parse(open('CfgDsply.txt'), mode_CFGDSPLY) # record physical drives by id (NB: _not_ enclosure/slot) for A in Mconfigured.adapters.values(): for V in A.virtual_drives.values(): for VDRVn, DRV in V.physical_disks.items(): if DRV.id in A.physical_disks: error("VD drive %d: %s already in A.physical_disks", VDRVn) else: A.physical_disks[DRV.id] = DRV cmd_pop() cmd_append("megacli -PDlist -aAll") Mphysical = self._parse(self.readcmd('-PDlist', '-aAll'), mode_PDLIST) ##Mphysical = self._parse(open('PDList.txt'), mode_PDLIST) for A in Mphysical.adapters.values(): disks = Mconfigured.adapters[A.number].physical_disks for DRVid, DRV in A.physical_disks.items(): cmd_append(str(DRVid)) if DRVid in disks: merge_attrs(disks[DRVid], **DRV.__dict__) else: disks[DRVid] = DRV cmd_pop() cmd_pop() return Mconfigured
def _fetch(self): ''' Fetch the URL content. If there is an HTTPError, report the error, flush the content, set self._fetch_exception. This means that that accessing the self.content property will always attempt a fetch, but return None on error. ''' with Pfx("_fetch(%s)", self): try: with self._response('GET') as opened_url: opened_url = self._response('GET') self.opened_url = opened_url # URL post redirection final_url = opened_url.geturl() if final_url == self: final_url = self else: final_url = URL(final_url, self) self.final_url = final_url self._content = opened_url.read() self._parsed = None except HTTPError as e: error("error with GET: %s", e) self.flush() self._fetch_exception = e
def cmd_rip(self, argv): ''' Usage: {cmd} [-n] [disc_id] Pull the audio into a subdirectory of the current directory. -n No action; recite planned actions. ''' options = self.options fstags = options.fstags dirpath = options.dirpath no_action = False disc_id = None if argv and argv[0] == '-n': no_action = True argv.pop(0) if argv: disc_id = argv.pop(0) if argv: raise GetoptError("extra arguments: %r" % (argv,)) try: rip( options.device, options.mbdb, output_dirpath=dirpath, disc_id=disc_id, fstags=fstags, no_action=no_action, ) except discid.disc.DiscError as e: error("disc error: %s", e) return 1 return 0
def __init__(self, config_map=None, environ=None, default_config=None): if config_map is None: config_map = DEFAULT_CONFIG_MAP if environ is None: environ = os.environ if default_config is None: default_config = DEFAULT_CONFIG_MAP self.environ = environ config = ConfigParser() if isinstance(config_map, str): self.path = path = config_map with Pfx(path): read_ok = False if pathexists(path): try: config.read(path) except OSError as e: error("read error: %s", e) else: read_ok = True else: warning("missing config file") if not read_ok: warning("falling back to default configuration") config.read_dict(default_config) else: self.path = None config.read_dict(config_map) self.map = config self._clause_stores = {} # clause_name => Result->Store self._lock = Lock()
def open(self, E, flags): ''' Open a regular file `E`, allocate FileHandle, return FileHandle index. Increments the kernel reference count. ''' for_read = (flags & O_RDONLY) == O_RDONLY or (flags & O_RDWR) == O_RDWR for_write = (flags & O_WRONLY) == O_WRONLY or (flags & O_RDWR) == O_RDWR for_append = (flags & O_APPEND) == O_APPEND for_trunc = (flags & O_TRUNC) == O_TRUNC debug( "for_read=%s, for_write=%s, for_append=%s", for_read, for_write, for_append ) if for_trunc and not for_write: OS_EINVAL("O_TRUNC requires O_WRONLY or O_RDWR") if for_append and not for_write: OS_EINVAL("O_APPEND requires O_WRONLY or O_RDWR") if (for_write and not for_append) and self.append_only: OS_EINVAL("fs is append_only but no O_APPEND") if for_trunc and self.append_only: OS_EINVAL("fs is append_only but O_TRUNC") if (for_write or for_append) and self.readonly: error("fs is readonly") OS_EROFS("fs is readonly") if E.issym: if flags & O_NOFOLLOW: OS_ELOOP("open symlink with O_NOFOLLOW") OS_EINVAL("open(%s)" % (E,)) elif not E.isfile: OS_EINVAL("open of nonfile: %s" % (E,)) FH = FileHandle(self, E, for_read, for_write, for_append, lock=self._lock) if flags & O_TRUNC: FH.truncate(0) return self._new_file_handle_index(FH)
def cmd_resize(argv): ''' Usage: {cmd} vdipath new_size_mb Resize a .vdi file to new_size_mb, a size in megabytes. ''' if not argv: raise GetoptError("missing vdi") vdipath = argv.pop(0) with Pfx("vdipath %r", vdipath): if not vdipath.endswith('.vdi'): raise GetoptError("does not end with .vdi") if not existspath(vdipath): raise GetoptError("does not exist") if not argv: raise GetoptError("missing new_size_mb") new_size_mb_s = argv.pop(0) with Pfx("new_size_mb %r", new_size_mb_s): try: new_size_mb = int(new_size_mb_s) except ValueError as e: raise GetoptError("not an integer: %s" % (e, )) else: if new_size_mb <= 0: raise GetoptError("must be >0") try: return pfx_call(resizevdi, vdipath, new_size_mb, trace=True) except ValueError as e: error("resize fails: %s", e) return 1
def _run(self, *calargv, subp_options=None): ''' Run a Calibre utility command. Parameters: * `calargv`: an iterable of the calibre command to issue; if the command name is not an absolute path it is expected to come from `self.CALIBRE_BINDIR_DEFAULT` * `subp_options`: optional mapping of keyword arguments to pass to `subprocess.run` ''' X("calargv=%r", calargv) if subp_options is None: subp_options = {} subp_options.setdefault('check', True) cmd, *calargv = calargv if not isabspath(cmd): cmd = joinpath(self.CALIBRE_BINDIR_DEFAULT, cmd) print("RUN", cmd, *calargv) try: cp = pfx_call(run, [cmd, *calargv], **subp_options) except CalledProcessError as cpe: error( "run fails, exit code %s:\n %s", cpe.returncode, ' '.join(map(repr, cpe.cmd)), ) if cpe.stderr: print(cpe.stderr.replace('\n', ' \n'), file=sys.stderr) raise return cp
def cmd_toc(self, argv): ''' Usage: {cmd} [disc_id] Print a table of contents for the current disc. ''' disc_id = None if argv: disc_id = argv.pop(0) if argv: raise GetoptError("extra arguments: %r" % (argv,)) options = self.options MB = options.mbdb if disc_id is None: try: dev_info = discid.read(device=options.device) except discid.disc.DiscError as e: error("disc error: %s", e) return 1 disc_id = dev_info.id with Pfx("discid %s", disc_id): disc = MB.discs[disc_id] print(disc.title) print(", ".join(disc.artist_names)) for tracknum, recording in enumerate(disc.recordings(), 1): print( tracknum, recording.title, '--', ", ".join(recording.artist_names) ) return 0
def update_from_text(self, metatext): ''' Update the Meta fields from the supplied metatext. ''' if metatext.startswith('{'): # wordy JSON encoding of metadata metadata = json.loads(metatext) kvs = metadata.items() else: # old style compact metadata kvs = [] for metafield in metatext.split(';'): metafield = metafield.strip() if not metafield: continue try: k, v = metafield.split(':', 1) except ValueError: error("ignoring bad metatext field (no colon): %r", metafield) continue else: kvs.append((k, v)) for k, v in kvs: if k == 'x': # update the xattrs from `v`, which should be a dict for xk, xv in v.items(): self.setxattr(xk, xv) else: self[k] = v
def data(self): ''' A generator that yields MPEG2 data from the stream. ''' with Pfx("data(%s)", self.dirpath): fp = None lastFileNum = None for rec in self.trunc_records(): wizOffset, fileNum, flags, offset, size = rec if lastFileNum is None or lastFileNum != fileNum: if lastFileNum is not None: fp.close() fp = open(os.path.join(self.dirpath, "%04d" % (fileNum, )), "rb") filePos = 0 lastFileNum = fileNum if filePos != offset: fp.seek(offset) while size > 0: rsize = min(size, 8192) buf = fp.read(rsize) assert len(buf) <= rsize if not buf: error("%s: unexpected EOF", fp) break yield buf size -= len(buf) if lastFileNum is not None: fp.close()
def find_all(self, *a, **kw): ''' Convenience routine to call BeautifulSoup's .find_all() method. ''' parsed = self.parsed if not parsed: error("%s: parse fails", self) return () return parsed.find_all(*a, **kw)
def notify(LF): I, exc_info = LF.join() if exc_info: # report exception error("%s.put(%r): %r", self.name, item, exc_info) self.outQ.close() else: self.defer_iterable(I, self.outQ)
def fsck(self, recurse=False): # pylint: disable=unused-argument ''' Check this LiteralBlock. ''' ok = True data = self._data if len(self) != len(data): error("len(self)=%d, len(data)=%d", len(self), len(data)) ok = False return ok
def notify(LF): I, exc_info = LF.join() if exc_info: # report exception error("%s.put(%r): %r", self.name, I, exc_info) self.outQ.close() else: self.defer_iterable(I, self.outQ) _PipelineStage.shutdown(self)
def rewrite(self): ''' Force a complete rewrite of the CSV file. ''' if self.readonly: error("%s: readonly: rewrite not done", self) return with rewrite_cmgr(self.pathname, backup_ext='', do_rename=True) as outfp: write_csv_file(outfp, self.nodedb.nodedata())
def _dump(dburl, argv): xit = 0 if len(argv) > 0: error("extra arguments: %s" % (argv,)) xit = 2 else: DB = NodeDBFromURL(dburl) DB.dump(sys.stdout) return xit
def _load(dburl, argv): xit = 0 if len(argv) > 0: error("extra arguments: %s" % (argv,)) xit = 2 else: DB = NodeDBFromURL(dburl) DB.load(sys.stdin) return xit
def main(argv): argv = list(argv) xit = 2 cmd = os.path.basename(argv.pop(0)) setup_logging(cmd=cmd) usage = r'''Usage: %s [-F mycnf] run [/path/to/mysqld] [mysqld-options...] %s [-F mycnf] start [/path/to/mysqld] [mysqld-options...] %s [-F mycnf] stop %s [-F mycnf] status ''' % (cmd, cmd, cmd, cmd) badopts = False mycnf = ETC_MYCNF pid_file = PID_FILE try: opts, argv = getopt(argv, 'F:') except GetoptError as e: error(e) badopts = True else: for opt, arg in opts: with Pfx(opt): if opt == '-F': mycnf = arg else: error("unimplemented option") badopts = True if len(argv) == 0: warning("missing op, expected {run,start,stop,status}") badopts = True else: op = argv.pop(0) with Pfx(op): try: if op == 'run': xit = mysqld_start(mycnf, pid_file, argv) elif op == 'start': xit = mysqld_start(mycnf, pid_file, argv) elif op == 'stop': xit = mysqld_stop(mycnf, pid_file, argv) elif op == 'status': xit = mysqld_status(mycnf, pid_file, argv) else: warning("unrecognised operation") badopts = True except GetoptError as e: warning(e) badopts = True if badopts: sys.stderr.write(usage) xit = 2 return xit
def notify(LF): # collect result: queue or report exception item2, exc_info = LF.join() if exc_info: # report exception error("%s.put(%r): %r", self.name, item, exc_info) else: self.outQ.put(item2) self.outQ.close()
def __init__(self, type_, name, *, meta=None, uuid=None, parent=None, prevblock=None, block=None, **kw): ''' Initialise a _Dirent. Parameters: * `type_`: the `DirentType` enum * `name`: the `Dirent`'s name * `meta`: optional metadata * `uuid`: optional identifying UUID; *note*: for `IndirectDirent`s this is a reference to another `Dirent`'s UUID. * `parent`: optional parent Dirent * `prevblock`: optional Block whose contents are the binary transcription of this Dirent's previous state - another Dirent ''' with Pfx("_Dirent(type_=%s,name=%r,...)", type_, name): if not isinstance(type_, int): raise TypeError("type_ is not an int: <%s>%r" % (type(type_), type_)) if name is not None and not isinstance(name, str): raise TypeError("name is neither None nor str: <%s>%r" % (type(name), name)) if kw: error("unsupported keyword arguments: %r", kw) if block is not None: raise ValueError("block is not None: %r" % (block, )) self.type = type_ self.name = name self.uuid = uuid assert prevblock is None or isinstance(prevblock, _Block), \ "not _Block: prevblock=%r" % (prevblock,) self._prev_dirent_blockref = prevblock if not isinstance(meta, Meta): M = Meta( {'a': DEFAULT_DIR_ACL if self.isdir else DEFAULT_FILE_ACL}) if meta is None: pass elif isinstance(meta, str): M.update_from_text(meta) else: raise ValueError("unsupported meta value: %r" % (meta, )) if 'm' not in M: M['m'] = time.time() meta = M if type_ != DirentType.INDIRECT: self.meta = meta self.parent = parent
def _send_loop(self): ''' Send packets upstream. Write every packet directly to self._send. Flush whenever the queue is empty. ''' XX = self.tick ##with Pfx("%s._send", self): with PrePfx("_SEND [%s]", self): with post_condition(("_send is None", lambda: self._send is None)): fp = self._send Q = self._sendQ grace = self.packet_grace for P in Q: sig = (P.channel, P.tag, P.is_request) if sig in self.__sent: raise RuntimeError("second send of %s" % (P, )) self.__sent.add(sig) try: XX(b'>') for bs in P.transcribe_flat(): fp.write(bs) if Q.empty(): # no immediately ready further packets: flush the output buffer if grace > 0: # allow a little time for further Packets to queue XX(b'Sg') sleep(grace) if Q.empty(): # still nothing XX(b'F') fp.flush() else: XX(b'F') fp.flush() except OSError as e: if e.errno == errno.EPIPE: warning("remote end closed") break raise try: XX(b'>EOF') for bs in self.EOF_Packet.transcribe_flat(): fp.write(bs) fp.close() except (OSError, IOError) as e: if e.errno == errno.EPIPE: debug("remote end closed: %s", e) elif e.errno == errno.EBADF: warning("local end closed: %s", e) else: raise except Exception as e: error("(_SEND) UNEXPECTED EXCEPTION: %s %s", e, e.__class__) raise self._send = None
def shutdown(self): ''' Shut down the cache. Stop the worker, close the file cache. ''' self._workQ.close() self._worker.join() if self.cached: error("blocks still in memory cache: %r", self.cached) for cachefile in self.cachefiles: cachefile.close()
def _unixsock_connect(self): self.sock = socket(AF_UNIX) with Pfx("%s.sock.connect(%r)", self, self.socket_path): try: self.sock.connect(self.socket_path) except OSError as e: error("connect fails: %s", e) self.sock.close() self.sock = None raise return OpenSocket(self.sock, False), OpenSocket(self.sock, True)
def main(argv=None): ''' The main command line. ''' if argv is None: argv = sys.argv cmd, args = argv[0], argv[1:] setup_logging(cmd) M = Maker(argv[0]) try: args, badopts = M.getopt(args) except GetoptError as e: warning("bad options: %s", e) badopts = True if badopts: print(usage % (cmd, ), file=sys.stderr) return 2 # gather any macro assignments and apply cmd_ns = {} while args: try: macro = Macro.from_assignment("command line", args[0]) except ValueError: break cmd_ns[macro.name] = macro args.pop(0) # defer __enter__ until after option parsing ok = M.loadMakefiles(M.makefiles) ok = ok and M.loadMakefiles(M.appendfiles) if cmd_ns: M.insert_namespace(cmd_ns) if not ok: error("errors loading Mykefiles") xit = 1 else: if args: targets = args else: target = M.default_target if target is None: targets = () else: targets = (M.default_target.name, ) if not targets: error("no default target") xit = 1 else: with M: xit = 0 if M.make(targets) else 1 return xit
def contains(self, h): ''' Test whether the hashcode `h` is in any of the read Stores. ''' for stores in self.read, self.read2: for S, result, exc_info in self._multicall(stores, 'contains_bg', (h, )): if exc_info: error("exception fetching from %s: %s", S, exc_info) elif result: return True return False
def loadAddresses(addresses, catmap=None, addrmap=None): ''' Load an address list file. Return return ok (True/False) and maps by category and address key. Existing category and address key maps may be supplied. ''' if catmap is None: catmap = {} if addrmap is None: addrmap = {} ok = True with Pfx(addresses): lineno = 0 with open(addresses) as addrfp: for line in addrfp: lineno += 1 if not line.endswith('\n'): error("line %d: missing newline (unexpected EOF)", lineno) ok = False break line = line.strip() if len(line) == 0 or line[0] == '#': continue try: cats, addr = line.split(None, 1) except ValueError: warning("line %d: bad syntax: %s", lineno, line) ok = False continue if addr.startswith('mailto:'): addr = addr[7:] cats = cats.split(',') addrkey = addressKey(addr) if addrkey is None: warning("line %d: can't parse address \"%s\"", lineno, addr) ok = False continue if "@" not in addrkey: warning("line %d: no \"@\" in \"%s\"", lineno, addrkey) if addrkey in addrmap: info = addrmap[addrkey] else: info = addrmap[addrkey] = AddressInfo(addrkey, addr, set()) info.categories.update(cats) for cat in cats: catmap.setdefault(cat, {})[addrkey] = info return ok, catmap, addrmap
def get_content(self, onerror=None): ''' Probe URL for content to avoid exceptions later. Use, and save as .content, `onerror` in the case of HTTPError. ''' try: content = self.content except (HTTPError, URLError, socket.error) as e: error("%s.get_content: %s", self, e) content = onerror self._content = content return content
def __len__(self): try: flags, payload = self.do(LengthRequest()) except StoreError as e: error("connection: %s", e) return None assert flags == 0 length, offset = BSUInt.parse_value_from_bytes(payload) if offset < len(payload): warning("unparsed bytes after BSUInt(length): %r", payload[offset:]) return length