Exemple #1
0
def dump_chunk(data, leadin, max_width=None, one_line=False):
  ''' Dump a data chunk.
  '''
  if max_width is None:
    _, columns = ttysize(1)
    if columns is None:
      columns = 80
    max_width = columns - 1
  leadin += ' %5d' % (len(data),)
  leadin2 = ' ' * len(leadin)
  data_width = max_width - len(leadin)
  slice_size = (data_width - 1) // 3
  assert slice_size > 0
  doff = 0
  while doff < len(data):
    doff2 = doff + slice_size
    chunk = data[doff:doff2]
    hex_text = hexlify(chunk).decode('utf-8')
    txt_text = ''.join(
        c if c.isprintable() else '.' for c in chunk.decode('iso8859-1')
    )
    print(leadin, txt_text, hex_text)
    if one_line:
      break
    leadin = leadin2
    doff = doff2
Exemple #2
0
 def cmd_account(self, argv):
     ''' Usage: {cmd}
       Report account state.
 '''
     if argv:
         raise GetoptError("extra arguments: %r" % (argv, ))
     api = self.options.api
     for k, v in sorted(api.account().items()):
         print(k, pformat(v))
Exemple #3
0
 def assimilate(self, other, no_action=False):
   ''' Link our primary path to all the paths from `other`. Return success.
   '''
   ok = True
   path = self.path
   opaths = other.paths
   pathprefix = common_path_prefix(path, *opaths)
   vpathprefix = shortpath(pathprefix)
   pathsuffix = path[len(pathprefix):]  # pylint: disable=unsubscriptable-object
   with UpdProxy() as proxy:
     proxy(
         "%s%s <= %r", vpathprefix, pathsuffix,
         list(map(lambda opath: opath[len(pathprefix):], sorted(opaths)))
     )
     with Pfx(path):
       if self is other or self.same_file(other):
         # already assimilated
         return ok
       assert self.same_dev(other)
       for opath in sorted(opaths):
         with Pfx(opath):
           if opath in self.paths:
             warning("already assimilated")
             continue
           if vpathprefix:
             print(
                 "%s: %s => %s" %
                 (vpathprefix, opath[len(pathprefix):], pathsuffix)
             )
           else:
             print("%s => %s" % (opath[len(pathprefix):], pathsuffix))
           if no_action:
             continue
           odir = dirname(opath)
           with NamedTemporaryFile(dir=odir) as tfp:
             with Pfx("unlink(%s)", tfp.name):
               os.unlink(tfp.name)
             with Pfx("rename(%s, %s)", opath, tfp.name):
               os.rename(opath, tfp.name)
             with Pfx("link(%s, %s)", path, opath):
               try:
                 os.link(path, opath)
               except OSError as e:
                 error("%s", e)
                 ok = False
                 # try to restore the previous file
                 with Pfx("restore: link(%r, %r)", tfp.name, opath):
                   os.link(tfp.name, opath)
               else:
                 self.paths.add(opath)
                 opaths.remove(opath)
   return ok
Exemple #4
0
 def cmd_derive(self, argv):
     ''' Usage: {cmd} dirpaths...
       Derive an autofile mapping of tags to directory paths
       from the directory paths suppplied.
 '''
     if not argv:
         raise GetoptError("missing dirpaths")
     tagger = self.options.tagger
     mapping = defaultdict(list)
     tag_names = 'abn', 'invoice', 'vendor'
     for path in argv:
         print("scan", path)
         mapping = tagger.per_tag_auto_file_map(path, tag_names)
         pprint(mapping)
Exemple #5
0
 def cmd_ont(self, argv):
     ''' Usage: {cmd} type_name
 '''
     tagger = self.options.tagger
     if not argv:
         raise GetoptError("missing type_name")
     type_name = argv.pop(0)
     with Pfx("type %r", type_name):
         if argv:
             raise GetoptError("extra arguments: %r" % (argv, ))
     print(type_name)
     for type_value in tagger.ont_values(type_name):
         ontkey = f"{type_name}.{type_value}"
         with Pfx("ontkey = %r", ontkey):
             print(" ", r(type_value), tagger.ont[ontkey])
Exemple #6
0
  def run(self):
    ''' Run the download.
    '''
    url = self.url
    upd = self.upd
    proxy = self.proxy = upd.insert(1)
    proxy.prefix = url + ' '

    with proxy:
      try:
        ydl_opts = {
            'progress_hooks': [self.update_progress],
            'format': DEFAULT_OUTPUT_FORMAT,
            'logger': logging.getLogger(),
            'outtmpl': DEFAULT_OUTPUT_FILENAME_TEMPLATE,
            ##'skip_download': True,
            'writeinfojson': False,
            'updatetime': False,
            'process_info': [self.process_info]
        }
        if self.kw_opts:
          ydl_opts.update(self.kw_opts)
        ydl = self.ydl = YoutubeDL(ydl_opts)

        proxy('await run slot...')
        with self.sem:
          proxy('extract_info...')
          self.tick()
          ie_result = ydl.extract_info(url, download=False, process=True)
          output_path = ydl.prepare_filename(ie_result)
          proxy.prefix = (ie_result.get('title') or output_path) + ' '

          proxy('download...')
          self.tick()
          with LogTime("%s.download(%r)", type(ydl).__name__, url) as LT:
            with ydl:
              ydl.download([url])
        proxy("elapsed %ds, saving metadata ...", LT.elapsed)
        self.tick()

        tagged_path = self.fstags[output_path]
        for key, value in ie_result.items():
          tag_name = FSTAGS_PREFIX + '.' + key
          tagged_path.set(tag_name, value)
        self.fstags.sync()
        print(output_path)
      except DownloadError as e:
        error("download fails: %s", e)
Exemple #7
0
 def _refresh_sqltags_data(api, sqltags, max_age=None):
     ''' Refresh the queue and recordings if any unexpired records are stale
     or if all records are expired.
 '''
     recordings = set(sqltags.recordings())
     need_refresh = (
         # any current recordings whose state is stale
         any(not recording.is_expired() and recording.is_stale(
             max_age=max_age) for recording in recordings) or
         # no recording is current
         not all(recording.is_expired() for recording in recordings))
     if need_refresh:
         print("refresh queue and recordings...")
         Ts = [bg_thread(api.queue), bg_thread(api.recordings)]
         for T in Ts:
             T.join()
Exemple #8
0
 def cmd_fileby(self, argv):
     ''' Usage: {cmd} [-d dirpath] tag_name paths...
       Add paths to the tagger.file_by mapping for the current directory.
       -d dirpath    Adjust the mapping for a different directory.
 '''
     dirpath = '.'
     opts, argv = getopt(argv, 'd:')
     for opt, val in opts:
         with Pfx(opt):
             if opt == '-d':
                 dirpath = val
             else:
                 raise RuntimeError("unhandled option")
     if not argv:
         raise GetoptError("missing tag_name")
     tag_name = argv.pop(0)
     if not Tag.is_valid_name(tag_name):
         raise GetoptError("invalid tag_name: %r" % (tag_name, ))
     if not argv:
         raise GetoptError("missing paths")
     tagged = self.options.fstags[dirpath]
     file_by = tagged.get('tagger.file_by', {})
     paths = file_by.get(tag_name, ())
     if isinstance(paths, str):
         paths = [paths]
     paths = set(paths)
     paths.update(argv)
     homedir = os.environ.get('HOME')
     if homedir and isabspath(homedir):
         homedir_ = homedir + os.sep
         paths = set(
             ('~/' +
              path[len(homedir_):] if path.startswith(homedir_) else path)
             for path in paths)
     file_by[tag_name] = sorted(paths)
     tagged['tagger.file_by'] = file_by
     print("tagger.file_by =", repr(file_by))
Exemple #9
0
 def cmd_test(self, argv):
     ''' Usage: {cmd} path
       Run a test against path.
       Current we try out the suggestions.
 '''
     tagger = self.options.tagger
     fstags = self.options.fstags
     if not argv:
         raise GetoptError("missing path")
     path = argv.pop(0)
     if argv:
         raise GetoptError("extra arguments: %r" % (argv, ))
     tagged = fstags[path]
     changed = True
     while True:
         print(path, *tagged)
         if changed:
             changed = False
             suggestions = tagger.suggested_tags(path)
             for tag_name, values in sorted(suggestions.items()):
                 print(" ", tag_name, values)
             for file_to in tagger.file_by_tags(path, no_link=True):
                 print("=>", shortpath(file_to))
             print("inferred:", repr(tagger.infer(path)))
         try:
             action = input("Action? ").strip()
         except EOFError:
             break
         if action:
             with Pfx(repr(action)):
                 try:
                     if action.startswith('-'):
                         tag = Tag.from_str(action[1:].lstrip())
                         tagged.discard(tag)
                         changed = True
                     elif action.startswith('+'):
                         tag = Tag.from_str(action[1:].lstrip())
                         tagged.add(tag)
                         changed = True
                     else:
                         raise ValueError("unrecognised action")
                 except ValueError as e:
                     warning("action fails: %s", e)
Exemple #10
0
 def cmd_suggest(self, argv):
     ''' Usage: {cmd} pathnames...
       Suggest tags for each pathname.
 '''
     if not argv:
         raise GetoptError("missing pathnames")
     tagger = self.options.tagger
     for path in argv:
         print()
         print(path)
         for tag_name, values in sorted(
                 tagger.suggested_tags(path).items()):
             print(" ", tag_name, *sorted(values))
Exemple #11
0
 def cmd_service(self, argv, locale='en_US'):
     ''' Usage: {cmd} [service_id]
       List services.
 '''
     if argv:
         service_id = argv.pop(0)
     else:
         service_id = None
     if argv:
         raise GetoptError("extra arguments: %r" % (argv, ))
     api = self.options.api
     for service in sorted(api.services(),
                           key=lambda svc: svc['playon.ID']):
         playon = service.subtags('playon')
         if service_id is not None and playon.ID != service_id:
             print("skip", playon.ID)
             continue
         print(playon.ID, playon.Name, playon.LoginMetadata["URL"])
         if service_id is None:
             continue
         for tag in playon:
             print(" ", tag)
Exemple #12
0
 def cmd_refresh(self, argv):
     ''' Usage: {cmd} [queue] [recordings]
       Update the db state from the PlayOn service.
 '''
     api = self.options.api
     if not argv:
         argv = ['queue', 'recordings']
     xit = 0
     Ts = []
     for state in argv:
         with Pfx(state):
             if state == 'queue':
                 print("refresh queue...")
                 Ts.append(bg_thread(api.queue))
             elif state == 'recordings':
                 print("refresh recordings...")
                 Ts.append(bg_thread(api.recordings))
             else:
                 warning("unsupported update target")
                 xit = 1
     print("wait for API...")
     for T in Ts:
         T.join()
     return xit
Exemple #13
0
    def cmd_dl(self, argv):
        ''' Usage: {cmd} [-j jobs] [-n] [recordings...]
          Download the specified recordings, default "pending".
          -j jobs   Run this many downloads in parallel.
                    The default is {DEFAULT_DL_PARALLELISM}.
          -n        No download. List the specified recordings.
    '''
        options = self.options
        sqltags = options.sqltags
        dl_jobs = DEFAULT_DL_PARALLELISM
        no_download = False
        opts, argv = getopt(argv, 'j:n')
        for opt, val in opts:
            with Pfx(opt):
                if opt == '-j':
                    dl_jobs = int(val)
                    if dl_jobs < 1:
                        raise GetoptError(
                            f"invalid jobs, should be >= 1, got: {dl_jobs}")
                elif opt == '-n':
                    no_download = True
                else:
                    raise RuntimeError("unhandled option")
        if not argv:
            argv = ['pending']
        api = options.api
        filename_format = options.filename_format
        sem = Semaphore(dl_jobs)

        @typechecked
        def _dl(dl_id: int, sem):
            try:
                with sqltags:
                    filename = api[dl_id].format_as(filename_format)
                    filename = (filename.lower().replace(' - ', '--').replace(
                        '_', ':').replace(' ', '-').replace(os.sep, ':') + '.')
                    try:
                        api.download(dl_id, filename=filename)
                    except ValueError as e:
                        warning("download fails: %s", e)
                        return None
                    return filename
            finally:
                sem.release()

        xit = 0
        Rs = []
        for arg in argv:
            with Pfx(arg):
                recording_ids = sqltags.recording_ids_from_str(arg)
                if not recording_ids:
                    warning("no recording ids")
                    xit = 1
                    continue
                for dl_id in recording_ids:
                    recording = sqltags[dl_id]
                    with Pfx(recording.name):
                        citation = recording.nice_name()
                        if recording.is_expired():
                            warning("expired, skipping %r", citation)
                            continue
                        if not recording.is_available():
                            warning("not yet available, skipping %r", citation)
                            continue
                        if recording.is_downloaded():
                            warning("already downloaded %r to %r", citation,
                                    recording.download_path)
                        if no_download:
                            recording.ls()
                        else:
                            sem.acquire()  # pylint: disable=consider-using-with
                            Rs.append(
                                bg_result(_dl,
                                          dl_id,
                                          sem,
                                          _extra=dict(dl_id=dl_id)))

        if Rs:
            for R in report_results(Rs):
                dl_id = R.extra['dl_id']
                recording = sqltags[dl_id]
                if not R():
                    print("FAILED", dl_id)
                    xit = 1

        return xit