Ejemplo n.º 1
0
    def __init__(self,
                 strm=None,
                 upd_level=None,
                 ansi_mode=None,
                 over_handler=None):
        ''' Initialise the `UpdHandler`.

        Parameters:
        * `strm`: the output stream, default `sys.stderr`.
        * `upd_level`: the magic logging level which updates the status line
          via `Upd`. Default: `STATUS`.
        * `ansi_mode`: if `None`, set from `strm.isatty()`.
          A true value causes the handler to colour certain logging levels
          using ANSI terminal sequences.
    '''
        if strm is None:
            strm = sys.stderr
        if upd_level is None:
            upd_level = STATUS
        if ansi_mode is None:
            ansi_mode = strm.isatty()
        StreamHandler.__init__(self, strm)
        self.upd = Upd(strm)
        self.upd_level = upd_level
        self.ansi_mode = ansi_mode
        self.over_handler = over_handler
        self.__lock = Lock()
Ejemplo n.º 2
0
 def checksum(self):
   ''' Checksum the file contents, used as a proxy for comparing the actual content.
   '''
   csum = self._checksum
   if csum is None:
     path = self.path
     U = Upd()
     pathspace = U.columns - 64
     label = "scan " + (
         path if len(path) < pathspace else '...' + path[-(pathspace - 3):]  # pylint: disable=unsubscriptable-object
     )
     with Pfx("checksum %r", path):
       csum = hashfunc()
       with open(path, 'rb') as fp:
         length = os.fstat(fp.fileno()).st_size
         read_len = 0
         for data in progressbar(
             read_from(fp, rsize=1024*1024),
             label=label,
             total=length,
             units_scale=BINARY_BYTES_SCALE,
             itemlenfunc=len,
             update_frequency=128,
             upd=U,
         ):
           csum.update(data)
           read_len += len(data)
         assert read_len == self.size
     csum = csum.digest()
     self._checksum = csum
   return csum
Ejemplo n.º 3
0
  def __init__(
      self,
      *,
      upd=None,
      fstags=None,
      all_progress=None,
      ydl_opts=None,
      parallel=DEFAULT_PARALLEL,
  ):
    if upd is None:
      upd = Upd()
    if all_progress is None:
      all_progress = OverProgress()
    self.upd = upd
    self.sem = Semaphore(parallel)
    self.proxy0 = upd.insert(0)
    self.fstags = fstags
    self.all_progress = all_progress
    self.ydl_opts = ydl_opts
    self.Rs = []
    self.nfetches = 0
    self._lock = RLock()

    @logexc
    def update0():
      nfetches = self.nfetches
      if nfetches == 0:
        self.proxy0("Idle.")
      else:
        self.proxy0(
            self.all_progress.status(
                "%d %s" % (nfetches, "fetch" if nfetches == 1 else "fetches"),
                upd.columns - 1
            )
        )

    self.update0 = update0
    update0()
Ejemplo n.º 4
0
 def wrapper(*a,
             progress=None,
             progress_name=None,
             progress_total=None,
             progress_report_print=None,
             **kw):
     if progress_name is None:
         progress_name = label or funcname(func)
     if progress_report_print is None:
         progress_report_print = report_print
     if progress is None:
         upd = Upd()
         if not upd.disabled:
             progress = Progress(name=progress_name, total=progress_total)
             with progress.bar(upd=upd, report_print=progress_report_print):
                 return func(*a, progress=progress, **kw)
     return func(*a, progress=progress, **kw)
Ejemplo n.º 5
0
  def __init__(
      self,
      url,
      *,
      fstags,
      upd=None,
      tick=None,
      over_progress=None,
      sem,
      **kw_opts
  ):
    ''' Initialise the manager.

        Parameters:
        * `url`: the URL to download
        * `fstags`: mandatory keyword argument, a `cs.fstags.FSTags` instance
        * `upd`: optional `cs.upd.Upd` instance for progress reporting
        * `tick`: optional callback to indicate state change
        * `over_progress`: an `OverProgress` to which to add each new `Progress` instance
        * `sem`: a shared `Semaphore` governing download parallelism
        * `kw_opts`: other keyword arguments are used to initialise
          the options for the underlying `YoutubeDL` instance
    '''
    if upd is None:
      upd = Upd()
    if tick is None:
      tick = lambda: None
    self.sem = sem
    self.url = url
    self.fstags = fstags
    self.tick = tick
    self.upd = upd
    self.proxy = None
    self.kw_opts = kw_opts
    self.ydl = None
    self.filename = None
    self.over_progress = over_progress
    self.progresses = {}
    self.result = None
    self._warned = set()
Ejemplo n.º 6
0
    def iterbar(
        self,
        it,
        label=None,
        upd=None,
        proxy=None,
        itemlenfunc=None,
        statusfunc=None,
        incfirst=False,
        width=None,
        window=None,
        update_frequency=1,
        update_min_size=None,
        report_print=None,
        runstate=None,
    ):
        ''' An iterable progress bar: a generator yielding values
        from the iterable `it` while updating a progress bar.

        Parameters:
        * `it`: the iterable to consume and yield.
        * `itemlenfunc`: an optional function returning the "size" of each item
          from `it`, used to advance `self.position`.
          The default is to assume a size of `1`.
          A convenient alternative choice may be the builtin function `len`.
        * `incfirst`: whether to advance `self.position` before we
          `yield` an item from `it` or afterwards.
          This reflects whether it is considered that progress is
          made as items are obtained or only after items are processed
          by whatever is consuming this generator.
          The default is `False`, advancing after processing.
        * `label`: a label for the progress bar,
          default from `self.name`.
        * `width`: an optional width expressing how wide the progress bar
          text may be.
          The default comes from the `proxy.width` property.
        * `window`: optional timeframe to define "recent" in seconds;
          if the default `statusfunc` (`Progress.status`) is used
          this is passed to it
        * `statusfunc`: an optional function to compute the progress bar text
          accepting `(self,label,width)`.
        * `proxy`: an optional proxy for displaying the progress bar,
          a callable accepting the result of `statusfunc`.
          The default is a `cs.upd.UpdProxy` created from `upd`,
          which inserts a progress bar above the main status line.
        * `upd`: an optional `cs.upd.Upd` instance,
          used only to produce the default `proxy` if that is not supplied.
          The default `upd` is `cs.upd.Upd()`
          which uses `sys.stderr` for display.
        * `update_frequency`: optional update frequency, default `1`;
          only update the progress bar after this many iterations,
          useful if the iteration rate is quite high
        * `update_min_size`: optional update step size;
          only update the progress bar after an advance of this many units,
          useful if the iteration size increment is quite small
        * `report_print`: optional `print` compatible function
          with which to write a report on completion;
          this may also be a `bool`, which if true will use `Upd.print`
          in order to interoperate with `Upd`.
        * `runstate`: optional `RunState` whose `.cancelled` property can be consulted

        Example use:

            from cs.units import DECIMAL_SCALE
            rows = [some list of data]
            P = Progress(total=len(rows), units_scale=DECIMAL_SCALE)
            for row in P.iterbar(rows, incfirst=True):
                ... do something with each row ...

            f = open(data_filename, 'rb')
            datalen = os.stat(f).st_size
            def readfrom(f):
                while True:
                    bs = f.read(65536)
                    if not bs:
                        break
                    yield bs
            P = Progress(total=datalen)
            for bs in P.iterbar(readfrom(f), itemlenfunc=len):
                ... process the file data in bs ...
    '''
        if label is None:
            label = self.name
        delete_proxy = False
        if proxy is None:
            if upd is None:
                upd = Upd()
            proxy = upd.insert(1)
            delete_proxy = True
        if statusfunc is None:
            statusfunc = lambda P, label, width: P.status(
                label, width, window=window)
        iteration = 0
        last_update_iteration = 0
        last_update_pos = start_pos = self.position

        def update_status(force=False):
            nonlocal self, proxy, statusfunc, label, width
            nonlocal iteration, last_update_iteration, last_update_pos
            if (force or iteration - last_update_iteration >= update_frequency
                    or
                (update_min_size is not None
                 and self.position - last_update_pos >= update_min_size)):
                last_update_iteration = iteration
                last_update_pos = self.position
                proxy(statusfunc(self, label, width or proxy.width))

        update_status(True)
        for iteration, item in enumerate(it):
            length = itemlenfunc(item) if itemlenfunc else 1
            if incfirst:
                self += length
                update_status()
            yield item
            if not incfirst:
                self += length
                update_status()
            if runstate is not None and runstate.cancelled:
                break
        if delete_proxy:
            proxy.delete()
        else:
            update_status(True)
        if report_print:
            if isinstance(report_print, bool):
                report_print = print
            report_print(
                label + (': (cancelled)' if runstate is not None
                         and runstate.cancelled else ':'),
                self.format_counter(self.position - start_pos), 'in',
                transcribe(self.elapsed_time,
                           TIME_SCALE,
                           max_parts=2,
                           skip_zero=True))
Ejemplo n.º 7
0
    def bar(
        self,
        label=None,
        *,
        upd=None,
        proxy=None,
        statusfunc=None,
        width=None,
        window=None,
        report_print=None,
        insert_pos=1,
        deferred=False,
    ):
        ''' A context manager to create and withdraw a progress bar.
        It returns the `UpdProxy` which displays the progress bar.

        Parameters:
        * `label`: a label for the progress bar,
          default from `self.name`.
        * `proxy`: an optional `UpdProxy` to display the progress bar
        * `upd`: an optional `cs.upd.Upd` instance,
          used to produce the progress bar status line if not supplied.
          The default `upd` is `cs.upd.Upd()`
          which uses `sys.stderr` for display.
        * `statusfunc`: an optional function to compute the progress bar text
          accepting `(self,label,width)`.
        * `width`: an optional width expressing how wide the progress bar
          text may be.
          The default comes from the `proxy.width` property.
        * `window`: optional timeframe to define "recent" in seconds;
          if the default `statusfunc` (`Progress.status`) is used
          this is passed to it
        * `report_print`: optional `print` compatible function
          with which to write a report on completion;
          this may also be a `bool`, which if true will use `Upd.print`
          in order to interoperate with `Upd`.
        * `insert_pos`: where to insert the progress bar, default `1`
        * `deferred`: optional flag; if true do not create the
          progress bar until the first update occurs.

        Example use:

            # display progress reporting during upload_filename()
            # which updates the supplied Progress instance
            # during its operation
            P = Progress(name=label)
            with P.bar(report_print=True):
                upload_filename(src, progress=P)

    '''
        if label is None:
            label = self.name
        if upd is None:
            upd = Upd()
        if statusfunc is None:
            statusfunc = lambda P, label, width: P.status(
                label, width, window=window)
        pproxy = [proxy]
        proxy_delete = proxy is None

        def update(P, _):
            proxy = pproxy[0]
            if proxy is None:
                proxy = pproxy[0] = upd.insert(insert_pos, 'LABEL=' + label)
            proxy(statusfunc(P, label, width or proxy.width))

        try:
            if not deferred:
                if proxy is None:
                    proxy = pproxy[0] = upd.insert(insert_pos)
                status = statusfunc(self, label, width or proxy.width)
                proxy(status)
            self.notify_update.add(update)
            start_pos = self.position
            yield pproxy[0]
        finally:
            self.notify_update.remove(update)
            if proxy and proxy_delete:
                proxy.delete()
        if report_print:
            if isinstance(report_print, bool):
                report_print = print
            report_print(
                label + ':', self.format_counter(self.position - start_pos),
                'in',
                transcribe(self.elapsed_time,
                           TIME_SCALE,
                           max_parts=2,
                           skip_zero=True))
Ejemplo n.º 8
0
class UpdHandler(StreamHandler):
    ''' A `StreamHandler` subclass whose `.emit` method
      uses a `cs.upd.Upd` for transcription.
  '''
    def __init__(self,
                 strm=None,
                 upd_level=None,
                 ansi_mode=None,
                 over_handler=None):
        ''' Initialise the `UpdHandler`.

        Parameters:
        * `strm`: the output stream, default `sys.stderr`.
        * `upd_level`: the magic logging level which updates the status line
          via `Upd`. Default: `STATUS`.
        * `ansi_mode`: if `None`, set from `strm.isatty()`.
          A true value causes the handler to colour certain logging levels
          using ANSI terminal sequences.
    '''
        if strm is None:
            strm = sys.stderr
        if upd_level is None:
            upd_level = STATUS
        if ansi_mode is None:
            ansi_mode = strm.isatty()
        StreamHandler.__init__(self, strm)
        self.upd = Upd(strm)
        self.upd_level = upd_level
        self.ansi_mode = ansi_mode
        self.over_handler = over_handler
        self.__lock = Lock()

    def emit(self, logrec):
        ''' Emit a `LogRecord` `logrec`.

        For the log level `self.upd_level` update the status line.
        For other levels write a distinct line
        to the output stream, possibly colourised.
    '''
        upd = self.upd
        if logrec.levelno == self.upd_level:
            line = self.format(logrec)
            with self.__lock:
                upd.out(line)
        else:
            if self.ansi_mode:
                if logrec.levelno >= logging.ERROR:
                    logrec.msg = colourise(logrec.msg, 'red')
                elif logrec.levelno >= logging.WARNING:
                    logrec.msg = colourise(logrec.msg, 'yellow')
            line = self.format(logrec)
            with self.__lock:
                if upd.disabled:
                    self.over_handler.emit(logrec)
                else:
                    upd.nl(line)

    def flush(self):
        ''' Flush the update status.
    '''
        return self.upd.flush()
Ejemplo n.º 9
0
def setup_logging(
    cmd_name=None,
    main_log=None,
    format=None,
    level=None,
    flags=None,
    upd_mode=None,
    ansi_mode=None,
    trace_mode=None,
    module_names=None,
    function_names=None,
    verbose=None,
    supplant_root_logger=False,
):
    ''' Arrange basic logging setup for conventional UNIX command
      line error messaging; return an object with informative attributes.
      That object is also available as the global `cs.logutils.loginfo`.

      Amongst other things, the default logger now includes
      the `cs.pfx` prefix in the message.

      This function runs in two modes:
      - if logging has not been set up, it sets up a root logger
      - if the root logger already has handlers,
        monkey patch the first handler's formatter to prefix the `cs.pfx` state

      Parameters:
      * `cmd_name`: program name, default from `basename(sys.argv[0])`.
        Side-effect: sets `cs.pfx.cmd` to this value.
      * `main_log`: default logging system.
        If `None`, the main log will go to `sys.stderr`;
        if `main_log` is a string, is it used as a filename to
        open in append mode;
        otherwise main_log should be a stream suitable
        for use with `logging.StreamHandler()`.
        The resulting log handler is added to the `logging` root logger.
      * `format`: the message format for `main_log`.
        If `None`, use `DEFAULT_PFX_FORMAT_TTY`
        when `main_log` is a tty or FIFO,
        otherwise `DEFAULT_PFX_FORMAT`.
      * `level`: `main_log` logging level.
        If `None`, infer a level from the environment
        using `infer_logging_level()`.
      * `flags`: a string containing debugging flags separated by commas.
        If `None`, infer the flags from the environment using
        `infer_logging_level()`.
        The following flags have meaning:
        `D`: set cs.logutils.D_mode to True;
        `TDUMP`: attach a signal handler to SIGHUP to do a thread stack dump;
        `TRACE`: enable various noisy tracing facilities;
        `UPD`, `NOUPD`: set the default for `upd_mode` to True or False respectively.
      * `upd_mode`: a Boolean to activate cs.upd as the `main_log` method;
        if `None`, set it to `True` if `flags` contains 'UPD',
        otherwise to `False` if `flags` contains 'NOUPD',
        otherwise set it from `main_log.isatty()`.
        A true value causes the root logger to use `cs.upd` for logging.
      * `ansi_mode`: if `None`,
        set it from `main_log.isatty() and not cs.colourise.env_no_color()`,
        which thus honours the `$NO_COLOR` environment variable
        (see https://no-color.org/ for the convention).
        A true value causes the root logger to colour certain logging levels
        using ANSI terminal sequences (currently only if `cs.upd` is used).
      * `trace_mode`: if `None`, set it according to the presence of
        'TRACE' in flags. Otherwise if `trace_mode` is true, set the
        global `loginfo.trace_level` to `loginfo.level`; otherwise it defaults
        to `logging.DEBUG`.
      * `verbose`: if `None`, then if stderr is a tty then the log
        level is `INFO` otherwise `WARNING`. Otherwise, if `verbose` is
        true then the log level is `INFO` otherwise `WARNING`.
  '''
    global D_mode, loginfo  # pylint: disable=global-statement

    # infer logging modes, these are the initial defaults
    inferred = infer_logging_level(verbose=verbose)
    if level is None:
        level = inferred.level
    if flags is None:
        flags = inferred.flags
    if module_names is None:
        module_names = inferred.module_names
    if function_names is None:
        function_names = inferred.function_names

    if cmd_name is None:
        cmd_name = os.path.basename(sys.argv[0])
    cs.pfx.cmd = cmd_name

    if main_log is None:
        main_log = sys.stderr
    elif isinstance(main_log, str):
        main_log = open(main_log, "a")

    # determine some attributes of main_log
    try:
        fd = main_log.fileno()
    except (AttributeError, IOError):
        is_fifo = False
        ##is_reg = False                        # unused
        is_tty = False
    else:
        st = os.fstat(fd)
        is_fifo = stat.S_ISFIFO(st.st_mode)
        ##is_reg = stat.S_ISREG(st.st_mode)     # unused
        is_tty = stat.S_ISCHR(st.st_mode)

    if getattr(main_log, 'encoding', None) is None:
        main_log = codecs.getwriter("utf-8")(main_log)

    if trace_mode is None:
        trace_mode = 'TRACE' in flags

    if 'D' in flags:
        D_mode = True

    if upd_mode is None:
        if 'UPD' in flags:
            upd_mode = True
        elif 'NOUPD' in flags:
            upd_mode = False
        else:
            upd_mode = is_tty

    if ansi_mode is None:
        ansi_mode = is_tty and not env_no_color()

    if format is None:
        if is_tty or is_fifo:
            format = DEFAULT_PFX_FORMAT_TTY
        else:
            format = DEFAULT_PFX_FORMAT

    if 'TDUMP' in flags:
        # do a thread dump to the main_log on SIGHUP
        # pylint: disable=import-outside-toplevel
        import signal
        import cs.debug as cs_debug

        # pylint: disable=unused-argument
        def handler(sig, frame):
            cs_debug.thread_dump(None, main_log)

        signal.signal(signal.SIGHUP, handler)

    upd_ = Upd()

    root_logger = logging.getLogger()
    if root_logger.handlers:
        # The logging system is already set up.
        # Just monkey patch the leading handler's formatter.
        PfxFormatter.patch_formatter(root_logger.handlers[0].formatter)
    else:
        # Set up a handler etc.
        main_handler = logging.StreamHandler(main_log)
        if upd_mode:
            main_handler = UpdHandler(main_log,
                                      ansi_mode=ansi_mode,
                                      over_handler=main_handler)
            upd_ = main_handler.upd
        root_logger.setLevel(level)
        if loginfo is None:
            # only do this the first time
            # TODO: fix this clumsy hack, some kind of stackable state?
            main_handler.setFormatter(PfxFormatter(format))
            if supplant_root_logger:
                root_logger.handlers.pop(0)
            root_logger.addHandler(main_handler)

    if trace_mode:
        # enable tracing in the thread that called setup_logging
        Pfx._state.trace = info
        trace_level = level
    else:
        trace_level = logging.DEBUG

    if module_names or function_names:
        if importlib is None:
            warning(
                "setup_logging: no importlib (python<2.7?),"
                " ignoring module_names=%r/function_names=%r", module_names,
                function_names)
        else:
            for module_name in module_names:
                try:
                    M = importlib.import_module(module_name)
                except ImportError:
                    warning("setup_logging: cannot import %r", module_name)
                else:
                    M.DEBUG = True
            for module_name, func_name in function_names:
                try:
                    M = importlib.import_module(module_name)
                except ImportError:
                    warning("setup_logging: cannot import %r", module_name)
                    continue
                F = M
                for funcpart in func_name.split('.'):
                    M = F
                    try:
                        F = M.getattr(funcpart)
                    except AttributeError:
                        F = None
                        break
                if F is None:
                    warning("no %s.%s() found", module_name, func_name)
                else:
                    setattr(M, funcpart, _ftrace(F))

    loginfo = NS(
        logger=root_logger,
        level=level,
        verbose=verbose,
        trace_level=trace_level,
        flags=flags,
        module_names=module_names,
        function_names=function_names,
        cmd=cmd_name,
        upd=upd_,
        upd_mode=upd_mode,
        ansi_mode=ansi_mode,
        format=format,
    )

    return loginfo