Exemple #1
0
 def make_cbz(self, cbzpath=None):
   ''' Create a CBZ file from the images in the MOBI file.
       Return the path to the created CBZ file.
   '''
   if cbzpath is None:
     mobibase, mobiext = splitext(basename(self.path))
     cbzpath = mobibase + '.cbz'
   if existspath(cbzpath):
     raise ValueError("CBZ path %r already exists" % (cbzpath,))
   with self.extracted() as df:
     dirpath, rfilepath = df
     imagepaths = sorted(glob(joinpath(dirpath, 'mobi8/OEBPS/Images/*.*')))
     info("write %s", cbzpath)
     try:
       with pfx_call(ZipFile, cbzpath, 'x', compression=ZIP_STORED) as cbz:
         for imagepath in imagepaths:
           pfx_call(cbz.write, imagepath, arcname=basename(imagepath))
     except FileExistsError as e:
       error("CBZ already eixsts: %r: %s", cbzpath, e)
       return 1
     except Exception:
       if existspath(cbzpath):
         pfx_call(os.unlink, cbzpath)
       raise
   return cbzpath
Exemple #2
0
 def cmd_extract(self, argv):
   ''' Usage: {cmd} mobipath [outdir]
         Extract the contents of the MOBI file mobipath
         into the directory outdir, default based on the mobipath basename.
         Prints the outdir and the name of the top file.
   '''
   outdirpath = None
   if not argv:
     raise GetoptError("missing mobipath")
   mobipath = argv.pop(0)
   if argv:
     outdirpath = argv.pop(0)
   if argv:
     raise GetoptError("extra arguments after cbzpath: %r" % (argv,))
   if not existspath(mobipath):
     raise GetoptError("mobipath does not exist: %r" % (mobipath,))
   if outdirpath is None:
     outdirpath, mobiext = splitext(basename(mobipath))
   if existspath(outdirpath):
     raise GetoptError("outdir already exists: %s" % (outdirpath,))
   MB = Mobi(mobipath)
   extdirpath, rfilepath = MB.extract(outdirpath)
   assert extdirpath == outdirpath
   print(outdirpath)
   print(rfilepath)
Exemple #3
0
 def _autofile(path, *, tagger, no_link, do_remove):
     ''' Wrapper for `Tagger.file_by_tags` which reports actions.
 '''
     if not no_link and not existspath(path):
         warning("no such path, skipped")
         linked_to = []
     else:
         fstags = tagger.fstags
         # apply inferred tags if not already present
         tagged = fstags[path]
         all_tags = tagged.merged_tags()
         for tag_name, tag_value in tagger.infer(path).items():
             if tag_name not in all_tags:
                 tagged[tag_name] = tag_value
         linked_to = tagger.file_by_tags(path,
                                         no_link=no_link,
                                         do_remove=do_remove)
         if linked_to:
             for linked in linked_to:
                 printpath = linked
                 if basename(path) == basename(printpath):
                     printpath = dirname(printpath) + '/'
                 pfxprint('=>', shortpath(printpath))
         else:
             pfxprint('not filed')
     return linked_to
Exemple #4
0
 def cmd_resize(argv):
     ''' Usage: {cmd} vdipath new_size_mb
       Resize a .vdi file to new_size_mb, a size in megabytes.
 '''
     if not argv:
         raise GetoptError("missing vdi")
     vdipath = argv.pop(0)
     with Pfx("vdipath %r", vdipath):
         if not vdipath.endswith('.vdi'):
             raise GetoptError("does not end with .vdi")
         if not existspath(vdipath):
             raise GetoptError("does not exist")
     if not argv:
         raise GetoptError("missing new_size_mb")
     new_size_mb_s = argv.pop(0)
     with Pfx("new_size_mb %r", new_size_mb_s):
         try:
             new_size_mb = int(new_size_mb_s)
         except ValueError as e:
             raise GetoptError("not an integer: %s" % (e, ))
         else:
             if new_size_mb <= 0:
                 raise GetoptError("must be >0")
     try:
         return pfx_call(resizevdi, vdipath, new_size_mb, trace=True)
     except ValueError as e:
         error("resize fails: %s", e)
         return 1
Exemple #5
0
def pull(host, maildir, localmaildir, noop=False, verbose=False):
    localstore = expanduser(joinpath(localmaildir, "store"))
    
    # Get the list of mail we already have locally
    maildir_pattern = re.compile("^([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*(.*)")
    localfiles = [
        maildir_pattern.match(f).group(1) 
        for f in listdir(localstore) if maildir_pattern.match(f)
        ]

    # Make the ssh connection
    np = _SSH(host)

    # This command produces a list of all files in the maildir like:
    #   base-filename timestamp container-directory
    command = """echo {maildir}/{{cur,new}} | tr ' ' '\\n' | while read path ; do ls -1Ugo --time-style=+%s $path | sed -rne "s|[a-zA-Z-]+[ \t]+[0-9]+[ \t]+[0-9]+[ \t]+([0-9]+)[ \t]+([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*|\\2 \\1 $path|p";done""".format(
        maildir=maildir
        )
    if verbose:
        print command
    stdout = np.cmd(command)
    lines = stdout.split("\n")
    maildir_ls = [line.split(" ") for line in lines if len(line.split(" ")) == 3]

    # If we get problems with not finding files in the local list it can help to dump the local list
    #with open("/tmp/mdlog", "w") as fd:
    #    print >>fd, "\n".join(localfiles)
        
    # Loop through the remote files checking the local copies
    for basefile, timestamp, container in maildir_ls:
        if basefile in localfiles:
            if verbose:
                print "found %s" % basefile
        else:
            storefile = joinpath(localstore, basefile)
            if existspath(storefile):
                if verbose:
                    print "exists %s %s" % (basefile, storefile)
            else:
                print "pulling %s %s to %s" % (basefile, container, storefile)
                stdout = np.cmd("cat %s/%s*" % (container, basefile))
                if not noop and len(stdout) > 0:
                    with open(storefile, "w") as fd:
                        fd.write(stdout)
                    try:
                        # Now symlink the store file to the correct location
                        target = joinpath(
                            expanduser(localmaildir), 
                            basename(container),
                            basefile
                            )
                        symlink(abspath(storefile), target)
                    except OSError, e:
                        if e.errno == 17:
                            # file exists
                            pass
                        else:
                            print "%s %s %s" % (e, storefile, target)
Exemple #6
0
def plex_linkpath(
    fstags, filepath, plex_topdirpath, do_hardlink=False, print=None
):
  ''' Link `filepath` into `plex_topdirpath`.

      Parameters:
      * `fstags`: the `FSTags` instance
      * `filepath`: filesystem pathname of file to link into Plex tree
      * `plex_topdirpath`: filesystem pathname of the Plex tree
      * `do_hardlink`: use a hard link if true, otherwise a softlink;
        default `False`
      * `print`: print function for the link action,
        default from `builtins.print`
  '''
  if print is None:
    print = builtins.print
  tagged_path = fstags[filepath]
  subpath = plex_subpath(tagged_path)
  plexpath = joinpath(plex_topdirpath, subpath)
  plexdirpath = dirname(plexpath)
  if do_hardlink:
    if existspath(plexpath):
      if samefile(filepath, plexpath):
        return
      pfx_call(os.unlink, plexpath)
    print(subpath, "<=", basename(filepath))
    if not isdirpath(plexdirpath):
      pfx_call(os.makedirs, plexdirpath)
    pfx_call(os.link, filepath, plexpath)
  else:
    rfilepath = relpath(filepath, plexdirpath)
    if existspath(plexpath):
      try:
        sympath = os.readlink(plexpath)
      except OSError as e:
        warning("readlink(%r): %s", plexpath, e)
      else:
        if rfilepath == sympath:
          return
      pfx_call(os.unlink, plexpath)
    print(subpath, "<=", basename(filepath))
    if not isdirpath(plexdirpath):
      pfx_call(os.makedirs, plexdirpath)
    pfx_call(os.symlink, rfilepath, plexpath)
Exemple #7
0
def _pull(store, localmaildir, noop=False, verbose=False, filterfile=None):
    localstore = expanduser(joinpath(localmaildir, "store"))
    
    # Get the list of mail we already have locally
    maildir_pattern = re.compile(
        "^([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*(.*)"
        )
    localfiles = [
        maildir_pattern.match(f).group(1) 
        for f in listdir(localstore) if maildir_pattern.match(f)
        ]

    # Read in the filters if we have them
    mailfilters = parse_filter(filterfile) if filterfile else []
    mailparser = HeaderOnlyParser() if mailfilters else None
    mdfolder = MdFolder(
        basename(localmaildir), 
        base=dirname(localmaildir)
        ) if mailfilters else None
    # Loop through the remote files checking the local copies
    for basefile, timestamp, container in _list_remote(store, localmaildir, verbose=verbose):
        if basefile in localfiles:
            if verbose:
                print("found %s" % basefile)
        else:
            storefile = joinpath(localstore, basefile)
            if existspath(storefile):
                if verbose:
                    print("exists %s %s" % (basefile, storefile))
            else:
                print("pulling %s %s to %s" % (basefile, container, storefile))
                stdout = store.cmd("cat %s/%s*" % (container, basefile), verbose=verbose)

                if verbose and len(stdout) < 1:
                    print("%s is an error" % storefile)

                if not noop and len(stdout) > 0:
                    with open(storefile, "w") as fd:
                        fd.write(stdout)
                    try:
                        # Now symlink the store file to the correct location
                        target = joinpath(
                            expanduser(localmaildir), 
                            basename(container),
                            basefile
                            )
                        symlink(abspath(storefile), target)
                    except OSError as e:
                        if e.errno == 17:
                            # file exists
                            pass
                        else:
                            print("%s %s %s" % (e, storefile, target))
                            
                    # If we have filters then we should pass the message object to them
                    list(_filter(stdout, mailparser, mailfilters, mdfolder))
Exemple #8
0
 def cmd_make_cbz(self, argv):
   ''' Usage: {cmd} mobipath [cbzpath]
         Unpack a MOBI file and construct a CBZ file.
         Prints the path of the CBZ file to the output.
         The default cbzpath is mobibase.cbz where mobibase is the
         basename of mobipath with its extension removed.
   '''
   if not argv:
     raise GetoptError("missing mobipath")
   mobipath = argv.pop(0)
   mobibase, mobiext = splitext(basename(mobipath))
   if argv:
     cbzpath = argv.pop(0)
   else:
     cbzpath = mobibase + '.cbz'
   if argv:
     raise GetoptError("extra arguments after cbzpath: %r" % (argv,))
   if not existspath(mobipath):
     raise GetoptError("mobipath does not exist: %r" % (mobipath,))
   if existspath(cbzpath):
     raise GetoptError("CBZ already exists: %r" % (cbzpath,))
   MB = Mobi(mobipath)
   outcbzpath = MB.make_cbz(cbzpath)
   print(outcbzpath)
Exemple #9
0
 def extract(self, dirpath=None):
   ''' Extract the contents of the MOBI file into a directory.
       Return `(dirpath,rfilepath)` where `dirpath` is the extracted
       file tree and `filepath` is the relative pathname of the
       primary epub, html or pdf file depending on the mobi type.
   '''
   if dirpath is not None and existspath(dirpath):
     raise ValueError("dirpath %r already exists" % (dirpath,))
   # divert stdout because the mobi library sends some warnings etc to stdout
   with stackattrs(sys, stdout=sys.stderr):
     tmpdirpath, filepath = pfx_call(mobi.extract, self.path)
   rfilepath = relpath(filepath, tmpdirpath)
   if dirpath is None:
     dirpath = tmpdirpath
   else:
     pfx_call(os.rename, tmpdirpath, dirpath)
   return dirpath, rfilepath
Exemple #10
0
 def find_specfile(self):
     ''' Locate the most specific specification file matching our criteria.
     Return `None` if no file matches.
 '''
     for utilpfx in self.util_name, '':
         for term in self.term_name, '':
             for type_ in self.type_name, :
                 base = utilpfx
                 if term:
                     base += '@' + term
                 if base or type_:
                     base += '.'
                 base += type_
                 if not base:
                     continue
                 path = joinpath(self.colors_dirpath, base)
                 if existspath(path):
                     return path
     return None
Exemple #11
0
 def new_datafile(self) -> DataFileState:
   ''' Create a new datafile.
       Return its `DataFileState`.
   '''
   while True:
     filename = str(uuid4()) + self.DATA_DOT_EXT
     pathname = self.datapathto(filename)
     if existspath(pathname):
       error("new datafile path already exists, retrying: %r", pathname)
       continue
     with Pfx(pathname):
       try:
         createpath(pathname)
       except OSError as e:
         if e.errno == errno.EEXIST:
           error("new datafile path already exists")
           continue
         raise
     break
   return self._filemap.add_path(filename)
Exemple #12
0
    def do_storecheck(self, subcmd, opts):
        """${cmd_name}: checks the store for files that may not be in the maildirs.
        """
        from os.path import basename
        from os.path import dirname
        from os.path import exists as existspath
        from os.path import islink
        from os.path import join as joinpath
        maildir = self.maildir
        cur = joinpath(maildir, "cur")
        new = joinpath(maildir, "new")
        store = joinpath(maildir, "store")
        
        found_list = []
        # Loop through the folders checking that everything maps back to the store
        for scandir in [cur, new]:
            for f in os.listdir(scandir):
                filename = joinpath(scandir, f)
                try:
                    assert islink(filename)
                    store_location = os.readlink(filename)
                    assert existspath(store_location) and dirname(store_location) == store
                except AssertionError:
                    print("%s was not a link into the store" % (
                            "/".join([
                                    filename.split("/")[-2],
                                    filename.split("/")[-1]
                                    ])
                            ), 
                          file=self.stdout)
                else:
                    found_list.append(basename(store_location))

        for storefile in os.listdir(store):
            if storefile not in found_list:
                print(
                    "%s found in store but not folders" % joinpath("store", storefile), 
                    file=self.stdout
                    )
Exemple #13
0
 def testfunc(testpath):
   probe_path = joinpath(testpath, self.TOPDIR_MARKER_ENTRY)
   debug("probe %r", probe_path)
   return existspath(probe_path)
Exemple #14
0
                 designerex_path, {})
    renderToFile(joinpath(build_dir, "designer-extras.vcxproj.user"),
                 usertpl_path, {'configurations': ov_env})

    for folderName, folderId, path_sln in projects:
        pathprefix = relpath(dirname(path_sln),
                             normpath(dirname(abspath(outfile))))
        # Sets the correct value for OV_PATH_ROOT; It should be possible to set multiple values separated by newline
        # Unfortunately due to a bug, this is not always possible, see https://connect.microsoft.com/VisualStudio/feedback/details/727324/msvs10-c-deu-debugger-environment-variables-missing-linefeed
        # (This is about german VS 2010, but this is also happening on french VS2013)
        ov_env = {
            type: (None, 'OV_PATH_ROOT=' +
                   joinpath(dist_dir, folderName.lower(), type.upper()))
            for type in build_types
        }
        if not existspath(path_sln):
            print(path_sln, 'does not exist !')
            continue
        print('Parsing', path_sln)
        with open(path_sln, 'r') as f:
            for line in f:
                res = re.match(
                    'Project\("{([\dABCDEF-]+)}"\)\s+=\s+"([\w\-]+)",\s+"([\w\-\\\.]+)",\s+"{([\dABCDEF\-]+)}"',
                    line)
                if res:
                    slnId, projectName, projectPath, projectId = res.groups()
                    content = getContent(f, "EndProject\n")
                    newpath = joinpath(
                        pathprefix,
                        projectPath) if len(content) else projectPath
                    tab = ['Project("{%s}") = "%s", "%s", "{%s}"\n' % (slnId, projectName, newpath, projectId)] \
Exemple #15
0
    def file_by_tags(self,
                     path: str,
                     prune_inherited=False,
                     no_link=False,
                     do_remove=False):
        ''' Examine a file's tags.
        Where those tags imply a location, link the file to that location.
        Return the list of links made.

        Parameters:
        * `path`: the source path to file
        * `prune_inherited`: optional, default `False`:
          prune the inherited tags from the direct tags on the target
        * `no_link`: optional, default `False`;
          do not actually make the hard link, just report the target
        * `do_remove`: optional, default `False`;
          remove source files if successfully linked

        Note: if `path` is already linked to an implied location
        that location is also included in the returned list.

        The filing process is as follows:
        - for each target directory, initially `dirname(path)`,
          look for a filing map on tag `file_by_mapping`
        - for each directory in that mapping which matches a tag from `path`,
          queue it as an additional target directory
        - if there were no matching directories, file `path` at the current
          target directory under the filename
          returned by `{TAGGER_TAG_PREFIX_DEFAULT}.auto_name`
    '''
        if do_remove and no_link:
            raise ValueError("do_remove and no_link may not both be true")
        fstags = self.fstags
        # start the queue with the resolved `path`
        tagged = fstags[path]
        srcpath = tagged.filepath
        tags = tagged.all_tags
        # a queue of reference directories
        q = ListQueue((dirname(srcpath), ))
        linked_to = []
        seen = set()
        for refdirpath in unrepeated(q, signature=abspath, seen=seen):
            with Pfx(refdirpath):
                # places to redirect this file
                mapping = self.file_by_mapping(refdirpath)
                interesting_tag_names = {tag.name for tag in mapping.keys()}
                # locate specific filing locations in the refdirpath
                refile_to = set()
                for tag_name in sorted(interesting_tag_names):
                    with Pfx("tag_name %r", tag_name):
                        if tag_name not in tags:
                            continue
                        bare_tag = Tag(tag_name, tags[tag_name])
                        try:
                            target_dirs = mapping.get(bare_tag, ())
                        except TypeError as e:
                            warning("  %s not mapped (%s), skipping", bare_tag,
                                    e)
                            continue
                        if not target_dirs:
                            continue
                        # collect other filing locations
                        refile_to.update(target_dirs)
                # queue further locations if they are new
                if refile_to:
                    new_refile_to = set(map(abspath, refile_to)) - seen
                    if new_refile_to:
                        q.extend(new_refile_to)
                        continue
                # file locally (no new locations)
                dstbase = self.auto_name(srcpath, refdirpath, tags)
                with Pfx("%s => %s", refdirpath, dstbase):
                    dstpath = dstbase if isabspath(dstbase) else joinpath(
                        refdirpath, dstbase)
                    if existspath(dstpath):
                        if not samefile(srcpath, dstpath):
                            warning("already exists, skipping")
                        continue
                    if no_link:
                        linked_to.append(dstpath)
                    else:
                        linkto_dirpath = dirname(dstpath)
                        if not isdirpath(linkto_dirpath):
                            pfx_call(os.mkdir, linkto_dirpath)
                        try:
                            pfx_call(os.link, srcpath, dstpath)
                        except OSError as e:
                            warning("cannot link to %r: %s", dstpath, e)
                        else:
                            linked_to.append(dstpath)
                            fstags[dstpath].update(tags)
                            if prune_inherited:
                                fstags[dstpath].prune_inherited()
        if linked_to and do_remove:
            S = os.stat(srcpath)
            if S.st_nlink < 2:
                warning("not removing %r, unsufficient hard links (%s)",
                        srcpath, S.st_nlink)
            else:
                pfx_call(os.remove, srcpath)
        return linked_to
Exemple #16
0
def rip(
    device,
    mbdb,
    *,
    output_dirpath,
    disc_id=None,
    fstags=None,
    no_action=False
):
  ''' Pull audio from `device` and save in `output_dirpath`.
  '''
  if disc_id is None:
    dev_info = discid.read(device=device)
    disc_id = dev_info.id
  if fstags is None:
    fstags = FSTags()
  with Pfx("MB: discid %s", disc_id, print=True):
    disc = mbdb.discs[disc_id]
  level1 = ", ".join(disc.artist_names).replace(os.sep, '_') or "NO_ARTISTS"
  level2 = disc.title or "UNTITLED"
  if disc.medium_count > 1:
    level2 += f" ({disc.medium_position} of {disc.medium_count})"
  subdir = joinpath(output_dirpath, level1, level2)
  if not isdirpath(subdir):
    with Pfx("makedirs(%r)", subdir, print=True):
      os.makedirs(subdir)
  fstags[subdir].update(
      TagSet(discid=disc.id, title=disc.title, artists=disc.artist_names)
  )
  for tracknum, recording_id in enumerate(disc.recordings, 1):
    recording = disc.ontology.metadata('recording', recording_id)
    track_fstags = TagSet(
        discid=disc.mbkey,
        artists=recording.artist_names,
        title=recording.title,
        track=tracknum
    )
    track_artists = ", ".join(recording.artist_names)
    track_base = f"{tracknum:02} - {recording.title} -- {track_artists}".replace(
        os.sep, '-'
    )
    wav_filename = joinpath(subdir, track_base + '.wav')
    mp3_filename = joinpath(subdir, track_base + '.mp3')
    if existspath(mp3_filename):
      warning("MP3 file already exists, skipping track: %r", mp3_filename)
    else:
      with NamedTemporaryFile(dir=subdir,
                              prefix=f"cdparanoia--track{tracknum}--",
                              suffix='.wav') as T:
        if existspath(wav_filename):
          info("using existing WAV file: %r", wav_filename)
        else:
          argv = ['cdparanoia', '-d', '1', '-w', str(tracknum), T.name]
          if no_action:
            print(*argv)
          else:
            with Pfx("+ %r", argv, print=True):
              subprocess.run(argv, stdin=subprocess.DEVNULL, check=True)
            with Pfx("%r => %r", T.name, wav_filename, print=True):
              os.link(T.name, wav_filename)
      if no_action:
        print("fstags[%r].update(%s)" % (wav_filename, track_fstags))
      else:
        fstags[wav_filename].update(track_fstags)
        fstags[wav_filename].rip_command = argv
      argv = [
          'lame',
          '-q',
          '7',
          '-V',
          '0',
          '--tt',
          recording.title or "UNTITLED",
          '--ta',
          track_artists or "NO ARTISTS",
          '--tl',
          level2,
          ## '--ty',recording year
          '--tn',
          str(tracknum),
          ## '--tg', recording genre
          ## '--ti', album cover filename
          wav_filename,
          mp3_filename
      ]
      if no_action:
        print(*argv)
      else:
        with Pfx("+ %r", argv, print=True):
          subprocess.run(argv, stdin=subprocess.DEVNULL, check=True)
      fstags[mp3_filename].conversion_command = argv
    if no_action:
      print("fstags[%r].update(%s)" % (mp3_filename, track_fstags))
    else:
      fstags[mp3_filename].update(track_fstags)
  if not no_action:
    subprocess.run(['ls', '-la', subdir])
    os.system("eject")