def cmd_dl(self, argv): ''' Collect messages from a POP3 server and deliver to a Maildir. Usage: {cmd} [{{ssl,tcp}}:]{{netrc_account|[user@]host[!sni_name][:port]}} maildir ''' pop_target = argv.pop(0) maildir_path = argv.pop(0) assert len(argv) == 0 if not isdirpath(maildir_path): raise ValueError("maildir %s: not a directory" % (maildir_path,)) M = Maildir(maildir_path) with POP3(pop_target) as pop3: msg_uid_map = dict(pop3.client_uidl()) print( f'{len(msg_uid_map)} message', ('' if len(msg_uid_map) == 1 else 's'), ('.' if len(msg_uid_map) == 0 else ':'), sep='' ) with ResultSet() as deleRs: with ResultSet() as retrRs: for msg_n in msg_uid_map.keys(): retrRs.add(pop3.dl_bg(msg_n, M, deleRs)) pop3.flush() retrRs.wait() # now the deleRs are all queued pop3.flush() if deleRs: print("wait for DELEs...") deleRs.wait()
def file_by_mapping(self, srcdirpath): ''' Examine the `{TAGGER_TAG_PREFIX_DEFAULT}.file_by` tag for `srcdirpath`. Return a mapping of specific tag values to filing locations derived via `per_tag_auto_file_map`. The file location specification in the tag may be a list or a string (for convenient single locations). For example, I might tag my downloads directory with: {TAGGER_TAG_PREFIX_DEFAULT}.file_by={{"abn":"~/them/me"}} indicating that files with an `abn` tag may be filed in the `~/them/me` directory. That directory is then walked looking for the tag `abn`, and wherever some tag `abn=`*value*` is found on a subdirectory a mapping entry for `abn=`*value*=>*subdirectory* is added. This results in a direct mapping of specific tag values to filing locations, such as: {{ Tag('abn','***********') => ['/path/to/them/me/abn-**-***-***-***'] }} because the target subdirectory has been tagged with `abn="***********"`. ''' assert isdirpath(srcdirpath) assert not srcdirpath.startswith('~') assert '~' not in srcdirpath fstags = self.fstags tagged = fstags[srcdirpath] key = tagged.filepath try: mapping = self._file_by_mappings[key] except KeyError: mapping = defaultdict(set) file_by = self.conf_tag(fstags[srcdirpath].all_tags, 'file_by', {}) # group the tags by file_by target path grouped = defaultdict(set) for tag_name, file_to in file_by.items(): if isinstance(file_to, str): file_to = (file_to, ) for file_to_path in file_to: if not isabspath(file_to_path): if file_to_path.startswith('~'): file_to_path = expanduser(file_to_path) assert isabspath(file_to_path) else: file_to_path = joinpath(srcdirpath, file_to_path) file_to_path = realpath(file_to_path) grouped[file_to_path].add(tag_name) # walk each path for its tag_names of interest for file_to_path, tag_names in sorted(grouped.items()): with Pfx("%r:%r", file_to_path, tag_names): # accrue destination paths by tag values for bare_key, dstpaths in self.per_tag_auto_file_map( file_to_path, tag_names).items(): mapping[bare_key].update(dstpaths) self._file_by_mappings[key] = mapping return mapping
def tagsetses_from_path(cls, ont_path: str): ''' Return `(tagsets,ont_pfx_map)` instance from `ont_path`, being the default `TagSets` and a mapping of name->`TagSets` for various subontologies. If `ont_path` resolves to a file the mapping wil be empty; return an `SQLTags` if `ont_path` ends with `'.sqlite'` otherwise a `TagFile`. If `ont_path` resolves to a directory, scan the entries. An entry named *prefix*`.sqlite` adds a *prefix*->`SQLTags` entry to the mapping. An entry named *prefix*`.tags` adds a *prefix*->`TagFile` entry to the mapping. After the scan, `tagsets` is set from the entry whose prefix was `'_'`, or `None`. ''' ont_pfx_map = {} if isfilepath(ont_path): if ont_path.endswith('.sqlite'): tagsets = SQLTags(ont_path) else: tagsets = TagFile(ont_path) elif isdirpath(ont_path): with Pfx("listdir(%r)", ont_path): for subont_name in os.listdir(ont_path): if not subont_name or subont_name.startswith('.'): continue subont_path = joinpath(ont_path, subont_name) with Pfx(subont_path): if not isfilepath(subont_path): warning("not a file") prefix = cutsuffix(subont_name, '.sqlite') if prefix is not subont_name: ont_pfx_map[prefix] = SQLTags(subont_path) continue prefix = cutsuffix(subont_name, '.tags') if prefix is not subont_name: ont_pfx_map[prefix] = TagFile(subont_path) continue warning("unsupported name, does not end in .sqlite or .tags") continue tagsets = ont_pfx_map.pop('_', None) else: if not ont_path.endswith('.sqlite'): ont_path_sqlite = ont_path + '.sqlite' if isfilepath(ont_path_sqlite): return cls.tagsetses_from_path(ont_path_sqlite) raise ValueError(f"unsupported ont_path={ont_path!r}") return tagsets, ont_pfx_map
def apply_opts(self, opts): ''' Apply the command line options. ''' options = self.options for opt, val in opts: with Pfx(opt): if opt == '-d': options.dirpath = val elif opt == '-D': options.device = val elif opt == '-f': options.force = True elif opt == '-M': options.mbdb_path = val else: raise GetoptError("unimplemented option") if not isdirpath(options.dirpath): raise GetoptError( "output directory: not a directory: %r" % (options.dirpath,) )
def convert( self, dstpath, *, dstfmt=None, max_n=None, timespans=(), extra_opts=None, overwrite=False, use_data=False, ): ''' Transcode video to `dstpath` in FFMPEG compatible `dstfmt`. ''' if dstfmt is None: dstfmt = DEFAULT_MEDIAFILE_FORMAT if use_data: srcpath = None if timespans: raise ValueError("%d timespans but do_copyto is true" % (len(timespans, ))) else: srcpath = self.path # stop path looking like a URL if not os.path.isabs(srcpath): srcpath = os.path.join('.', srcpath) if dstpath is None: dstpath = self.filename(ext=dstfmt) elif dstpath.endswith('/'): dstpath += self.filename(ext=dstfmt) elif isdirpath(dstpath): dstpath = joinpath(dstpath, self.filename(ext=dstfmt)) # stop path looking like a URL if not os.path.isabs(dstpath): dstpath = os.path.join('.', dstpath) ok = True with Pfx(dstpath): if os.path.exists(dstpath): ok = False if max_n is not None: try: dstpath = self.choose_free_path(dstpath, max_n) except ValueError as e: error("file exists: %s", e) else: ok = True else: error("file exists") if not ok: return ok if os.path.exists(dstpath): raise ValueError("dstpath exists") if dstfmt is None: _, ext = os.path.splitext(dstpath) if not ext: raise ValueError( "can't infer output format from dstpath, no extension") dstfmt = ext[1:] fstags = self.fstags with fstags: metatags = list(self.metadata.as_tags(prefix='beyonwiz')) fstags[dstpath].update(metatags) fstags.sync() # compute the metadata for the output format # which may be passed with the input arguments M = self.metadata with Pfx("metadata for dstformat %r", dstfmt): ffmeta_kw = dict( comment=f'Transcoded from {self.path!r} using ffmpeg.') for ffmeta, beymeta in FFMPEG_METADATA_MAPPINGS[dstfmt].items(): with Pfx("%r->%r", beymeta, ffmeta): if beymeta is None: continue elif isinstance(beymeta, str): ffmetavalue = M.get(beymeta, '') elif callable(beymeta): ffmetavalue = beymeta(M) else: raise RuntimeError("unsupported beymeta %s:%r" % (type(beymeta).__name__, beymeta)) assert isinstance( ffmetavalue, str), ("ffmetavalue should be a str, got %s:%r" % (type(ffmetavalue).__name__, ffmetavalue)) ffmeta_kw[ffmeta] = beymeta(M) # set up the initial source path, options and metadata ffinopts = { 'loglevel': 'repeat+error', ##'strict': None, ##'2': None, } ff = ffmpeg.input(srcpath, **ffinopts) if timespans: ffin = ff ff = ffmpeg.concat(*map( lambda timespan: ffin.trim(start=timespan[0], end=timespan[1]), timespans)) ff = ff.output(dstpath, format=dstfmt, metadata=list(map('='.join, ffmeta_kw.items()))) if overwrite: ff = ff.overwrite_output() print('ffmpeg', *map(repr, ff.get_args())) ff.run() return ok
def file_by_tags(self, path: str, prune_inherited=False, no_link=False, do_remove=False): ''' Examine a file's tags. Where those tags imply a location, link the file to that location. Return the list of links made. Parameters: * `path`: the source path to file * `prune_inherited`: optional, default `False`: prune the inherited tags from the direct tags on the target * `no_link`: optional, default `False`; do not actually make the hard link, just report the target * `do_remove`: optional, default `False`; remove source files if successfully linked Note: if `path` is already linked to an implied location that location is also included in the returned list. The filing process is as follows: - for each target directory, initially `dirname(path)`, look for a filing map on tag `file_by_mapping` - for each directory in that mapping which matches a tag from `path`, queue it as an additional target directory - if there were no matching directories, file `path` at the current target directory under the filename returned by `{TAGGER_TAG_PREFIX_DEFAULT}.auto_name` ''' if do_remove and no_link: raise ValueError("do_remove and no_link may not both be true") fstags = self.fstags # start the queue with the resolved `path` tagged = fstags[path] srcpath = tagged.filepath tags = tagged.all_tags # a queue of reference directories q = ListQueue((dirname(srcpath), )) linked_to = [] seen = set() for refdirpath in unrepeated(q, signature=abspath, seen=seen): with Pfx(refdirpath): # places to redirect this file mapping = self.file_by_mapping(refdirpath) interesting_tag_names = {tag.name for tag in mapping.keys()} # locate specific filing locations in the refdirpath refile_to = set() for tag_name in sorted(interesting_tag_names): with Pfx("tag_name %r", tag_name): if tag_name not in tags: continue bare_tag = Tag(tag_name, tags[tag_name]) try: target_dirs = mapping.get(bare_tag, ()) except TypeError as e: warning(" %s not mapped (%s), skipping", bare_tag, e) continue if not target_dirs: continue # collect other filing locations refile_to.update(target_dirs) # queue further locations if they are new if refile_to: new_refile_to = set(map(abspath, refile_to)) - seen if new_refile_to: q.extend(new_refile_to) continue # file locally (no new locations) dstbase = self.auto_name(srcpath, refdirpath, tags) with Pfx("%s => %s", refdirpath, dstbase): dstpath = dstbase if isabspath(dstbase) else joinpath( refdirpath, dstbase) if existspath(dstpath): if not samefile(srcpath, dstpath): warning("already exists, skipping") continue if no_link: linked_to.append(dstpath) else: linkto_dirpath = dirname(dstpath) if not isdirpath(linkto_dirpath): pfx_call(os.mkdir, linkto_dirpath) try: pfx_call(os.link, srcpath, dstpath) except OSError as e: warning("cannot link to %r: %s", dstpath, e) else: linked_to.append(dstpath) fstags[dstpath].update(tags) if prune_inherited: fstags[dstpath].prune_inherited() if linked_to and do_remove: S = os.stat(srcpath) if S.st_nlink < 2: warning("not removing %r, unsufficient hard links (%s)", srcpath, S.st_nlink) else: pfx_call(os.remove, srcpath) return linked_to
def rip( device, mbdb, *, output_dirpath, disc_id=None, fstags=None, no_action=False ): ''' Pull audio from `device` and save in `output_dirpath`. ''' if disc_id is None: dev_info = discid.read(device=device) disc_id = dev_info.id if fstags is None: fstags = FSTags() with Pfx("MB: discid %s", disc_id, print=True): disc = mbdb.discs[disc_id] level1 = ", ".join(disc.artist_names).replace(os.sep, '_') or "NO_ARTISTS" level2 = disc.title or "UNTITLED" if disc.medium_count > 1: level2 += f" ({disc.medium_position} of {disc.medium_count})" subdir = joinpath(output_dirpath, level1, level2) if not isdirpath(subdir): with Pfx("makedirs(%r)", subdir, print=True): os.makedirs(subdir) fstags[subdir].update( TagSet(discid=disc.id, title=disc.title, artists=disc.artist_names) ) for tracknum, recording_id in enumerate(disc.recordings, 1): recording = disc.ontology.metadata('recording', recording_id) track_fstags = TagSet( discid=disc.mbkey, artists=recording.artist_names, title=recording.title, track=tracknum ) track_artists = ", ".join(recording.artist_names) track_base = f"{tracknum:02} - {recording.title} -- {track_artists}".replace( os.sep, '-' ) wav_filename = joinpath(subdir, track_base + '.wav') mp3_filename = joinpath(subdir, track_base + '.mp3') if existspath(mp3_filename): warning("MP3 file already exists, skipping track: %r", mp3_filename) else: with NamedTemporaryFile(dir=subdir, prefix=f"cdparanoia--track{tracknum}--", suffix='.wav') as T: if existspath(wav_filename): info("using existing WAV file: %r", wav_filename) else: argv = ['cdparanoia', '-d', '1', '-w', str(tracknum), T.name] if no_action: print(*argv) else: with Pfx("+ %r", argv, print=True): subprocess.run(argv, stdin=subprocess.DEVNULL, check=True) with Pfx("%r => %r", T.name, wav_filename, print=True): os.link(T.name, wav_filename) if no_action: print("fstags[%r].update(%s)" % (wav_filename, track_fstags)) else: fstags[wav_filename].update(track_fstags) fstags[wav_filename].rip_command = argv argv = [ 'lame', '-q', '7', '-V', '0', '--tt', recording.title or "UNTITLED", '--ta', track_artists or "NO ARTISTS", '--tl', level2, ## '--ty',recording year '--tn', str(tracknum), ## '--tg', recording genre ## '--ti', album cover filename wav_filename, mp3_filename ] if no_action: print(*argv) else: with Pfx("+ %r", argv, print=True): subprocess.run(argv, stdin=subprocess.DEVNULL, check=True) fstags[mp3_filename].conversion_command = argv if no_action: print("fstags[%r].update(%s)" % (mp3_filename, track_fstags)) else: fstags[mp3_filename].update(track_fstags) if not no_action: subprocess.run(['ls', '-la', subdir]) os.system("eject")