Example #1
0
 def addLogEntry(self, entry, save=True):
     """This is called when a new log entry is created"""
     # create log directory if it doesn't exist
     # error will be thrown if this is not possible
     _busy = Purr.BusyIndicator()
     self._initIndexDir()
     # discard temporary watchers -- these are only used to keep track of
     # deleted files
     self.temp_watchers = {}
     # ignored entries are only there to carry info on ignored data products
     # All we do is save them, and update DP policies based on them
     if entry.ignore:
         entry.save(self.logdir)
     # proper entries are added to list
     else:
         self.entries.append(entry)
         Purr.progressMessage("Saving new log entry")
         # find previous entry -- skip over "ignore" entries
         for prev in self.entries[-2::-1]:
             if not prev.ignore:
                 break
         else:
             prev = None
         entry.setLogDirectory(self.logdir)
         entry.setPrevUpNextLinks(prev=prev, up=os.path.join("..", Purr.RenderIndex.INDEX))
         entry.save()
         self.timestamp = self.last_scan_timestamp
         # regenerate links of previous entry
         prev and prev.generateIndex()
         # and our log may need to be regenerated
         if save:
             self.save()
     self.updatePoliciesFromEntry(entry, new=True)
Example #2
0
 def addLogEntry(self, entry, save=True):
     """This is called when a new log entry is created"""
     # create log directory if it doesn't exist
     # error will be thrown if this is not possible
     _busy = Purr.BusyIndicator()
     self._initIndexDir()
     # discard temporary watchers -- these are only used to keep track of
     # deleted files
     self.temp_watchers = {}
     # ignored entries are only there to carry info on ignored data products
     # All we do is save them, and update DP policies based on them
     if entry.ignore:
         entry.save(self.logdir)
     # proper entries are added to list
     else:
         self.entries.append(entry)
         Purr.progressMessage("Saving new log entry")
         # find previous entry -- skip over "ignore" entries
         for prev in self.entries[-2::-1]:
             if not prev.ignore:
                 break
         else:
             prev = None
         entry.setLogDirectory(self.logdir)
         entry.setPrevUpNextLinks(prev=prev,
                                  up=os.path.join("..",
                                                  Purr.RenderIndex.INDEX))
         entry.save()
         self.timestamp = self.last_scan_timestamp
         # regenerate links of previous entry
         prev and prev.generateIndex()
         # and our log may need to be regenerated
         if save:
             self.save()
     self.updatePoliciesFromEntry(entry, new=True)
Example #3
0
 def makeDataProducts(self, files, unbanish=False, unignore=False):
     """makes a list of DPs from a list of (filename,quiet) pairs.
     If unbanish is False, DPs with a default "banish" policy will be skipped.
     Symlinks will be resolved, and non-unique filenames removed from list.
     """
     paths = set()
     dps = []
     for filename, quiet in files:
         filename = filename.rstrip('/')
         sourcepath = Purr.canonizePath(filename)
         if sourcepath not in paths:
             paths.add(sourcepath)
             filename = os.path.basename(filename)
             policy, filename, comment = self._default_dp_props.get(
                 filename, ("copy", filename, ""))
             dprintf(4, "%s: default policy is %s,%s,%s\n", sourcepath,
                     policy, filename, comment)
             if policy == "banish":
                 if unbanish:
                     policy = "copy"
                 else:
                     continue
             if unignore and policy == "ignore":
                 policy = "copy"
             dps.append(
                 Purr.DataProduct(filename=filename,
                                  sourcepath=sourcepath,
                                  policy=policy,
                                  comment=comment,
                                  quiet=quiet))
     return sorted(dps, lambda a, b: cmp(a.filename, b.filename))
Example #4
0
 def addWatchedDirectory(self, dirname, watching=Purr.WATCHED, save_config=True):
     """Starts watching the specified directories for changes"""
     # see if we're alredy watching this exact set of directories -- do nothing if so
     dirname = Purr.canonizePath(dirname)
     # do nothing if already watching
     if dirname in self.watched_dirs:
         dprint(1, "addWatchDirectory(): already watching %s\n", dirname)
         # watching=None means do not change the watch-state
         if watching is None:
             return
     else:
         if watching is None:
             watching = Purr.WATCHED
         # make watcher object
         wdir = Purrer.WatchedDir(dirname, mtime=self.timestamp,
                                  watch_patterns=self._watch_patterns, ignore_patterns=self._ignore_patterns)
         # fileset=None indicates error reading directory, so ignore it
         if wdir.fileset is None:
             print("There was an error reading the directory %s, will stop watching it." % dirname)
             self.setWatchingState(dirname, Purr.REMOVED, save_config=True)
             return
         self.watchers[dirname] = wdir
         self.watched_dirs.append(dirname)
         dprintf(2, "watching directory %s, mtime %s, %d files\n",
                 dirname, time.strftime("%x %X", time.localtime(wdir.mtime)), len(wdir.fileset))
         # find files in this directory matching the watch_patterns, and watch them for changes
         watchset = set()
         for patt in self._watch_patterns:
             watchset.update(fnmatch.filter(wdir.fileset, patt))
         for fname in watchset:
             quiet = matches_patterns(fname, self._quiet_patterns)
             fullname = Purr.canonizePath(os.path.join(dirname, fname))
             if fullname not in self.watchers:
                 wfile = Purrer.WatchedFile(fullname, quiet=quiet, mtime=self.timestamp)
                 self.watchers[fullname] = wfile
                 dprintf(3, "watching file %s, timestamp %s, quiet %d\n",
                         fullname, time.strftime("%x %X", time.localtime(wfile.mtime)), quiet)
         # find subdirectories  matching the subdir_patterns, and watch them for changes
         for fname in wdir.fileset:
             fullname = Purr.canonizePath(os.path.join(dirname, fname))
             if os.path.isdir(fullname):
                 for desc, dir_patts, canary_patts in self._subdir_patterns:
                     if matches_patterns(fname, dir_patts):
                         quiet = matches_patterns(fname, self._quiet_patterns)
                         wdir = Purrer.WatchedSubdir(fullname, canary_patterns=canary_patts, quiet=quiet,
                                                     mtime=self.timestamp)
                         self.watchers[fullname] = wdir
                         dprintf(3, "watching subdirectory %s/{%s}, timestamp %s, quiet %d\n",
                                 fullname, ",".join(canary_patts),
                                 time.strftime("%x %X", time.localtime(wdir.mtime)), quiet)
                         break
     # set state and save config
     self.setWatchingState(dirname, watching, save_config=save_config)
Example #5
0
  def regenerate (self):
    Purr.progressMessage("reading %s"%self.dp.filename,sub=True);
    # init fitsfile to None, so that _read() above is forced to re-read it
    fitsfile = pyfits.open(self.dp.fullpath);
    header = fitsfile[0].header;

    dprintf(3,"beginning render of",self.dp.fullpath); t0 = time.time();
    # write out FITS header
    self.headerfile,path,uptodate = self.subproduct("-fitsheader.html");
    if not uptodate:
      title = "FITS header for %s"%self.dp.filename;
      html = """<HTML><BODY><TITLE>%s</TITLE>
      <H2>%s</H2>
      <PRE>"""%(title,title);
      for line in header.ascard:
        line = str(line).replace("<","&lt;").replace(">","&gt;");
        html += line+"\n";
      html += """
      </PRE></BODY></HTML>\n""";
      try:
        file(path,"w").write(html);
      except:
        print "Error writing file %s"%path;
        traceback.print_exc();
        self.headerfile = None;

    # figure out number of images to include
    ndim = header['NAXIS'];
    fitsshape = [ header['NAXIS%d'%i] for i in range(1,ndim+1) ];
    self.cubesize = 'x'.join(map(str,fitsshape));
    if ndim < 2:
      raise TypeError,"can't render one-dimensional FITS files""";
    elif ndim == 2:
      fitsdata_to_images = lambda fdata:[fdata];
      nplanes = 1;
    else:
      ax1 = ax2 = None;
      # find the X/Y axes, by looking at CTYPEx
      # note that the array axes are in reverse order. I.e. if X is FITS axis 1 and Y is axis 2,
      # the array will be of e.g. shape 1,1,NY,NX, while fitsshape is [NX,NY,1,1]
      for i in range(1,ndim+1):
        ctype = header['CTYPE%d'%i];
        if [ prefix for prefix in "RA","GLON","ELON","HLON","SLON" if ctype.startswith(prefix) ] \
            or ctype in ("L","X"):
          ax1 = ndim-i;
        elif [ prefix for prefix in "DEC","GLAT","ELAT","HLAT","SLAT" if ctype.startswith(prefix) ] \
            or ctype in ("M","Y"):
          ax2 = ndim-i;
Example #6
0
 def __init__(self, filename=None, sourcepath=None, fullpath=None,
              policy="copy", comment="",
              timestamp=None, render=None,
              quiet=False, archived=False):
     # This is the absolute pathname to the original data product
     self.sourcepath = Purr.canonizePath(sourcepath)
     # Base filename (w/o path) of data product within the log storage area.
     # Products may be renamed when they are moved or copied over to the log.
     self.filename = filename or (sourcepath and os.path.basename(sourcepath))
     # Full path to the DP within the log storage area.
     # This is None until a DP has been saved.
     self.fullpath = fullpath
     # Handling policy for DP: "copy","move","ignore", etc.
     self.policy = policy
     # Comment associated with DP
     self.comment = comment
     # Once a DP has been saved, this is the timestamp of data product at time of copy
     self.timestamp = timestamp
     # Name of renderer used to render this DP.
     self.render = render
     # if True, DP is watched quietly (i.e. Purr does not pop up windows on update)
     self.quiet = quiet
     # if True, DP has already been archived. This is False for new DPs until they're saved.
     self.archived = archived
     # if True, dp is ignored (policy is "ignore" or "banish")
     # not that policy should not be changed after a DP has been created
     self.ignored = policy in ("ignore", "banish")
Example #7
0
 def _handle_start_DP(self,
                      filename=None,
                      src=None,
                      policy=None,
                      quiet=False,
                      timestamp=0,
                      comment=None,
                      render=None,
                      **kw):
     # dispence with previous DP tag, if any
     self._add_data_product()
     # setup data for this tag
     comment = comment or ""
     try:
         timestamp = float(timestamp)
     except:
         timestamp = time.time()
     if not isinstance(quiet, bool):
         try:
             quiet = bool(int(quiet))
         except:
             quiet = bool(quiet)
     comment = comment.replace("&lt;", "<").replace("&gt;", ">")
     self._new_dp = Purr.DataProduct(filename=filename,
                                     sourcepath=src,
                                     timestamp=timestamp,
                                     comment=comment,
                                     fullpath=os.path.join(
                                         self._dirname, filename or ""),
                                     policy=policy,
                                     render=render,
                                     quiet=quiet,
                                     archived=True)
Example #8
0
 def save(self, refresh=False):
     """Saves the log.
     If refresh is set to a timestamp, will regenerate everything from scratch.
     """
     # create directory if it doesn't exist
     # error will be thrown if this is not possible
     _busy = Purr.BusyIndicator()
     Purr.progressMessage("Generating index in %s" % self.logdir)
     self._initIndexDir()
     # if refresh is True, re-save all entries.
     if refresh:
         refresh = time.time()
         for i, entry in enumerate(self.entries):
             entry.save(refresh=refresh)
     Purr.RenderIndex.writeLogIndex(self.logdir, self.logtitle, self.timestamp, self.entries, refresh=refresh)
     Purr.progressMessage("Wrote %s" % self.logdir)
Example #9
0
 def makeDataProducts(self, files, unbanish=False, unignore=False):
     """makes a list of DPs from a list of (filename,quiet) pairs.
     If unbanish is False, DPs with a default "banish" policy will be skipped.
     Symlinks will be resolved, and non-unique filenames removed from list.
     """
     paths = set()
     dps = []
     for filename, quiet in files:
         filename = filename.rstrip('/')
         sourcepath = Purr.canonizePath(filename)
         if sourcepath not in paths:
             paths.add(sourcepath)
             filename = os.path.basename(filename)
             policy, filename, comment = self._default_dp_props.get(filename, ("copy", filename, ""))
             dprintf(4, "%s: default policy is %s,%s,%s\n", sourcepath, policy, filename, comment)
             if policy == "banish":
                 if unbanish:
                     policy = "copy"
                 else:
                     continue
             if unignore and policy == "ignore":
                 policy = "copy"
             dps.append(Purr.DataProduct(filename=filename, sourcepath=sourcepath,
                                         policy=policy, comment=comment, quiet=quiet))
     return sorted(dps, lambda a, b: cmp(a.filename, b.filename))
Example #10
0
 def __init__(self,
              filename=None,
              sourcepath=None,
              fullpath=None,
              policy="copy",
              comment="",
              timestamp=None,
              render=None,
              quiet=False,
              archived=False):
     # This is the absolute pathname to the original data product
     self.sourcepath = Purr.canonizePath(sourcepath)
     # Base filename (w/o path) of data product within the log storage area.
     # Products may be renamed when they are moved or copied over to the log.
     self.filename = filename or (sourcepath
                                  and os.path.basename(sourcepath))
     # Full path to the DP within the log storage area.
     # This is None until a DP has been saved.
     self.fullpath = fullpath
     # Handling policy for DP: "copy","move","ignore", etc.
     self.policy = policy
     # Comment associated with DP
     self.comment = comment
     # Once a DP has been saved, this is the timestamp of data product at time of copy
     self.timestamp = timestamp
     # Name of renderer used to render this DP.
     self.render = render
     # if True, DP is watched quietly (i.e. Purr does not pop up windows on update)
     self.quiet = quiet
     # if True, DP has already been archived. This is False for new DPs until they're saved.
     self.archived = archived
     # if True, dp is ignored (policy is "ignore" or "banish")
     # not that policy should not be changed after a DP has been created
     self.ignored = policy in ("ignore", "banish")
Example #11
0
 def save(self, refresh=False):
     """Saves the log.
     If refresh is set to a timestamp, will regenerate everything from scratch.
     """
     # create directory if it doesn't exist
     # error will be thrown if this is not possible
     _busy = Purr.BusyIndicator()
     Purr.progressMessage("Generating index in %s" % self.logdir)
     self._initIndexDir()
     # if refresh is True, re-save all entries.
     if refresh:
         refresh = time.time()
         for i, entry in enumerate(self.entries):
             entry.save(refresh=refresh)
     Purr.RenderIndex.writeLogIndex(self.logdir,
                                    self.logtitle,
                                    self.timestamp,
                                    self.entries,
                                    refresh=refresh)
     Purr.progressMessage("Wrote %s" % self.logdir)
Example #12
0
 def attachPurrlog(self, purrlog, watchdirs=[]):
     """Attaches Purr to the given purrlog directory. Arguments are passed to Purrer object as is."""
     # check purrer stack for a Purrer already watching this directory
     dprint(1, "attaching to purrlog", purrlog)
     for i, purrer in enumerate(self.purrer_stack):
         if os.path.samefile(purrer.logdir, purrlog):
             dprint(1, "Purrer object found on stack (#%d),reusing\n", i)
             # found? move to front of stack
             self.purrer_stack.pop(i)
             self.purrer_stack.insert(0, purrer)
             # update purrer with watched directories, in case they have changed
             for dd in (watchdirs or []):
                 purrer.addWatchedDirectory(dd, watching=None)
             break
     # no purrer found, make a new one
     else:
         dprint(1, "creating new Purrer object")
         try:
             purrer = Purr.Purrer(purrlog, watchdirs)
         except Purr.Purrer.LockedError as err:
             # check that we could attach, display message if not
             QMessageBox.warning(
                 self, "Catfight!",
                 """<P><NOBR>It appears that another PURR process (%s)</NOBR>
       is already attached to <tt>%s</tt>, so we're not allowed to touch it. You should exit the other PURR
       process first.</P>""" % (err.args[0], os.path.abspath(purrlog)),
                 QMessageBox.Ok, 0)
             return False
         except Purr.Purrer.LockFailError as err:
             QMessageBox.warning(
                 self, "Failed to obtain lock",
                 """<P><NOBR>PURR was unable to obtain a lock</NOBR>
       on directory <tt>%s</tt> (error was "%s"). The most likely cause is insufficient permissions.</P>"""
                 % (os.path.abspath(purrlog), err.args[0]), QMessageBox.Ok,
                 0)
             return False
         self.purrer_stack.insert(0, purrer)
         # discard end of stack
         self.purrer_stack = self.purrer_stack[:3]
         # attach signals
         self.connect(purrer, SIGNAL("disappearedFile"),
                      self.new_entry_dialog.dropDataProducts)
         self.connect(purrer, SIGNAL("disappearedFile"),
                      self.view_entry_dialog.dropDataProducts)
     # have we changed the current purrer? Update our state then
     # reopen Purr pipes
     self.purrpipes = {}
     for dd, state in purrer.watchedDirectories():
         self.purrpipes[dd] = Purr.Pipe.open(dd)
     if purrer is not self.purrer:
         self.message("Attached to %s" % purrer.logdir, ms=10000)
         dprint(1, "current Purrer changed, updating state")
         # set window title
         path = Kittens.utils.collapseuser(os.path.join(purrer.logdir, ''))
         self.setWindowTitle("PURR - %s" % path)
         # other init
         self.purrer = purrer
         self.new_entry_dialog.hide()
         self.new_entry_dialog.reset()
         dirs = [path for path, state in purrer.watchedDirectories()]
         self.new_entry_dialog.setDefaultDirs(*dirs)
         self.view_entry_dialog.setDefaultDirs(*dirs)
         self.view_entry_dialog.hide()
         self.viewer_dialog.hide()
         self._viewing_ientry = None
         self._setEntries(self.purrer.getLogEntries())
         #      print self._index_paths
         self._viewer_timestamp = None
         self._updateViewer()
         self._updateNames()
         # update directory widgets
         self.wdirlist.clear()
         for pathname, state in purrer.watchedDirectories():
             self.wdirlist.add(pathname, state)
         # Reset _pounce to false -- this will cause checkPounceStatus() into a rescan
         self._pounce = False
         self._checkPounceStatus()
     return True
Example #13
0
 def save(self, dirname=None, refresh=0, refresh_index=True, emit_message=True):
     """Saves entry in the given directory. Data products will be copied over if not
     residing in that directory.
     'refresh' is a timestamp, passed to renderIndex(), causing all data products OLDER than the specified time to be regenerated.
     'refresh_index', if true, causes index files to be re-rendered unconditionally
     """
     if not refresh and not self.updated:
         return
     timestr = time.strftime("%Y%m%d-%H%M%S", time.localtime(self.timestamp))
     Purr.progressMessage("Rendering entry for %s" % timestr)
     if dirname:
         self.pathname = pathname = os.path.join(dirname, "%s-%s" %
                                                 (("ignore" if self.ignore else "entry"), timestr))
     elif not self.pathname:
         raise ValueError("Cannot save entry: pathname not specified")
     else:
         pathname = self.pathname
     # set timestamp
     if not self.timestamp:
         self.timestamp = int(time.time())
     # get canonized path to output directory
     pathname = Purr.canonizePath(pathname)
     if not os.path.exists(pathname):
         os.mkdir(pathname)
     # now save content
     # get device of pathname -- need to know whether we move or copy
     devnum = os.stat(pathname).st_dev
     # copy data products as needed
     dprintf(2, "saving entry %s, %d data products\n", pathname, len(self.dps))
     dps = []
     for dp in self.dps:
         # if archived, this indicates a previously saved data product, so ignore it
         # if ignored, no need to save the DP -- but keep it in list
         if dp.archived or dp.ignored:
             dprintf(3, "dp %s is archived or ignored, skipping\n", dp.sourcepath)
             dps.append(dp)
             continue
         # file missing for some reason (perhaps it got removed on us?) skip data product entirely
         if not os.path.exists(dp.sourcepath):
             dprintf(2, "data product %s missing, ignoring\n", dp.sourcepath)
             continue
         Purr.progressMessage("archiving %s" % dp.filename, sub=True)
         # get normalized source and destination paths
         dprintf(2, "data product: %s, rename %s, policy %s\n", dp.sourcepath, dp.filename, dp.policy)
         sourcepath = Purr.canonizePath(dp.sourcepath)
         destname = dp.fullpath = os.path.join(pathname, dp.filename)
         dprintf(2, "data product: %s -> %s\n", sourcepath, destname)
         # does the destination product already exist? skip if same file, else remove
         if os.path.exists(destname):
             if os.path.samefile(destname, sourcepath):
                 dprintf(2, "same file, skipping\n")
                 dp.timestamp = os.path.getmtime(destname)
                 dps.append(dp)
                 continue
             if os.system("/bin/rm -fr '%s'" % destname):
                 print("Error removing %s, which is in the way of %s" % (destname, sourcepath))
                 print("This data product is not saved.")
                 continue
         # for directories, compress with tar
         if os.path.isdir(sourcepath):
             sourcepath = sourcepath.rstrip('/')
             if dp.policy == "copy" or dp.policy.startswith("move"):
                 dprintf(2, "archiving to tgz\n")
                 if os.system("tar zcf '%s' -C '%s' '%s'" % (destname,
                                                             os.path.dirname(sourcepath),
                                                             os.path.basename(sourcepath))):
                     print("Error archiving %s to %s" % (sourcepath, destname))
                     print("This data product is not saved.")
                     continue
                 if dp.policy.startswith("move"):
                     os.system("/bin/rm -fr '%s'" % sourcepath)
         # else just a file
         else:
             # now copy/move it over
             if dp.policy == "copy":
                 dprintf(2, "copying\n")
                 if _copy_update(sourcepath, destname):
                     print("Error copying %s to %s" % (sourcepath, destname))
                     print("This data product is not saved.")
                     continue
             elif dp.policy.startswith('move'):
                 if _move_update(sourcepath, destname):
                     print("Error moving %s to %s" % (sourcepath, destname))
                     print("This data product is not saved.")
                     continue
         # success, set timestamp and append
         dp.timestamp = os.path.getmtime(destname)
         dp.archived = True
         dps.append(dp)
     # reset list of data products
     self.dps = dps
     # now write out content
     self.cached_include = os.path.join(pathname, 'index.include.html')
     self.cached_include_valid = False
     self.index_file = os.path.join(pathname, "index.html")
     self.generateIndex(refresh=refresh, refresh_index=refresh_index and time.time())
     self.updated = False
Example #14
0
 def addWatchedDirectory(self,
                         dirname,
                         watching=Purr.WATCHED,
                         save_config=True):
     """Starts watching the specified directories for changes"""
     # see if we're alredy watching this exact set of directories -- do nothing if so
     dirname = Purr.canonizePath(dirname)
     # do nothing if already watching
     if dirname in self.watched_dirs:
         dprint(1, "addWatchDirectory(): already watching %s\n", dirname)
         # watching=None means do not change the watch-state
         if watching is None:
             return
     else:
         if watching is None:
             watching = Purr.WATCHED
         # make watcher object
         wdir = Purrer.WatchedDir(dirname,
                                  mtime=self.timestamp,
                                  watch_patterns=self._watch_patterns,
                                  ignore_patterns=self._ignore_patterns)
         # fileset=None indicates error reading directory, so ignore it
         if wdir.fileset is None:
             print(
                 "There was an error reading the directory %s, will stop watching it."
                 % dirname)
             self.setWatchingState(dirname, Purr.REMOVED, save_config=True)
             return
         self.watchers[dirname] = wdir
         self.watched_dirs.append(dirname)
         dprintf(2, "watching directory %s, mtime %s, %d files\n", dirname,
                 time.strftime("%x %X", time.localtime(wdir.mtime)),
                 len(wdir.fileset))
         # find files in this directory matching the watch_patterns, and watch them for changes
         watchset = set()
         for patt in self._watch_patterns:
             watchset.update(fnmatch.filter(wdir.fileset, patt))
         for fname in watchset:
             quiet = matches_patterns(fname, self._quiet_patterns)
             fullname = Purr.canonizePath(os.path.join(dirname, fname))
             if fullname not in self.watchers:
                 wfile = Purrer.WatchedFile(fullname,
                                            quiet=quiet,
                                            mtime=self.timestamp)
                 self.watchers[fullname] = wfile
                 dprintf(
                     3, "watching file %s, timestamp %s, quiet %d\n",
                     fullname,
                     time.strftime("%x %X",
                                   time.localtime(wfile.mtime)), quiet)
         # find subdirectories  matching the subdir_patterns, and watch them for changes
         for fname in wdir.fileset:
             fullname = Purr.canonizePath(os.path.join(dirname, fname))
             if os.path.isdir(fullname):
                 for desc, dir_patts, canary_patts in self._subdir_patterns:
                     if matches_patterns(fname, dir_patts):
                         quiet = matches_patterns(fname,
                                                  self._quiet_patterns)
                         wdir = Purrer.WatchedSubdir(
                             fullname,
                             canary_patterns=canary_patts,
                             quiet=quiet,
                             mtime=self.timestamp)
                         self.watchers[fullname] = wdir
                         dprintf(
                             3,
                             "watching subdirectory %s/{%s}, timestamp %s, quiet %d\n",
                             fullname, ",".join(canary_patts),
                             time.strftime("%x %X",
                                           time.localtime(wdir.mtime)),
                             quiet)
                         break
     # set state and save config
     self.setWatchingState(dirname, watching, save_config=save_config)
Example #15
0
 def _attach(self, purrlog, watchdirs=None):
     """Attaches Purr to a purrlog directory, and loads content.
     Returns False if nothing new has been loaded (because directory is the same),
     or True otherwise."""
     purrlog = os.path.abspath(purrlog)
     dprint(1, "attaching to purrlog", purrlog)
     self.logdir = purrlog
     self.indexfile = os.path.join(self.logdir, "index.html")
     self.logtitle = "Unnamed log"
     self.timestamp = self.last_scan_timestamp = time.time()
     self._initIndexDir()
     # reset internal state
     self.ignorelistfile = None
     self.autopounce = False
     self.watched_dirs = []
     self.entries = []
     self._default_dp_props = {}
     self.watchers = {}
     self.temp_watchers = {}
     self.attached = False
     self._watching_state = {}
     # check that we hold a lock on the directory
     self.lockfile = os.path.join(self.logdir, ".purrlock")
     # try to open lock file for r/w
     try:
         self.lockfile_fd = os.open(self.lockfile, os.O_RDWR | os.O_CREAT)
     except:
         raise Purrer.LockFailError(
             "failed to open lock file %s for writing" % self.lockfile)
     # try to acquire lock on the lock file
     try:
         fcntl.lockf(self.lockfile_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
     except:
         other_lock = os.fdopen(self.lockfile_fd, 'r').read()
         self.lockfile_fd = None
         raise Purrer.LockedError(other_lock)
     # got lock, write our ID to the lock file
     global _lockstring
     try:
         self.lockfile_fobj = os.fdopen(self.lockfile_fd, 'w')
         self.lockfile_fobj.write(_lockstring)
         self.lockfile_fobj.flush()
         os.fsync(self.lockfile_fd)
     except:
         raise
     #      raise Purrer.LockFailError("cannot write to lock file %s"%self.lockfile)
     # load log state if log directory already exists
     if os.path.exists(self.logdir):
         _busy = Purr.BusyIndicator()
         if os.path.exists(self.indexfile):
             try:
                 parser = Purr.Parsers.LogIndexParser()
                 for line in open(self.indexfile):
                     parser.feed(line)
                 self.logtitle = parser.title or self.logtitle
                 self.timestamp = parser.timestamp or self.timestamp
                 dprintf(
                     2, "attached log '%s', timestamp %s\n", self.logtitle,
                     time.strftime("%x %X", time.localtime(self.timestamp)))
             except:
                 traceback.print_exc()
                 print("Error parsing %s, reverting to defaults" %
                       self.indexfile)
         # load log entries
         entries = []
         for fname in os.listdir(self.logdir):
             pathname = os.path.join(self.logdir, fname)
             if Purr.LogEntry.isValidPathname(pathname):
                 try:
                     entry = Purr.LogEntry(load=pathname)
                     dprint(2, "loaded log entry", pathname)
                 except:
                     print("Error loading entry %s, skipping" % fname)
                     traceback.print_exc()
                     continue
                 entries.append(entry)
             else:
                 dprint(2, fname, "is not a valid Purr entry")
         # sort log entires by timestamp
         entries.sort(lambda a, b: cmp(a.timestamp, b.timestamp))
         self.setLogEntries(entries, save=False)
         # update own timestamp
         if entries:
             self.timestamp = max(self.timestamp, entries[-1].timestamp)
     # else logfile doesn't exist, create it
     else:
         self._initIndexDir()
     # load configuration if it exists
     # init config file
     self.dirconfig = configparser.RawConfigParser()
     self.dirconfigfile = os.path.join(self.logdir, "dirconfig")
     if os.path.exists(self.dirconfigfile):
         try:
             self.dirconfig.read(self.dirconfigfile)
         except:
             print("Error loading config file %s" % self.dirconfigfile)
             traceback.print_exc()
         # load directory configuration
         for dirname in self.dirconfig.sections():
             try:
                 watching = self.dirconfig.getint(dirname, "watching")
             except:
                 watching = Purr.WATCHED
             dirname = os.path.expanduser(dirname)
             self.addWatchedDirectory(dirname, watching, save_config=False)
     # start watching the specified directories
     for name in (watchdirs or []):
         self.addWatchedDirectory(name, watching=None)
     # Finally, go through list of ignored files and mark their watchers accordingly.
     # The ignorelist is a list of lines of the form "timestamp filename", giving the timestamp when a
     # file was last "ignored" by the purrlog user.
     self.ignorelistfile = os.path.join(self.logdir, "ignorelist")
     if os.path.exists(self.ignorelistfile):
         # read lines from file, ignore exceptions
         ignores = {}
         try:
             for line in open(self.ignorelistfile).readlines():
                 timestamp, policy, filename = line.strip().split(" ", 2)
                 # update dictiornary with latest timestamp
                 ignores[filename] = int(timestamp), policy
         except:
             print("Error reading %s" % self.ignorelistfile)
             traceback.print_exc()
         # now scan all listed files, and make sure their watchers' mtime is no older than the given
         # last-ignore-timestamp. This ensures that we don't pounce on these files after restarting purr.
         for filename, (timestamp, policy) in ignores.items():
             watcher = self.watchers.get(filename, None)
             if watcher:
                 watcher.mtime = max(watcher.mtime, timestamp)
     # init complete
     self.attached = True
     return True
Example #16
0
    def regenerate(self):
        Purr.progressMessage("reading %s" % self.dp.filename, sub=True)
        # init fitsfile to None, so that _read() above is forced to re-read it
        fitsfile = fits.open(self.dp.fullpath)
        header = fitsfile[0].header

        dprintf(3, "beginning render of", self.dp.fullpath);
        t0 = time.time()
        # write out FITS header
        self.headerfile, path, uptodate = self.subproduct("-fitsheader.html")
        if not uptodate:
            title = "FITS header for %s" % self.dp.filename
            html = """<HTML><BODY><TITLE>%s</TITLE>
      <H2>%s</H2>
      <PRE>""" % (title, title)
            for line in header.ascard:
                line = str(line).replace("<", "&lt;").replace(">", "&gt;")
                html += line + "\n"
            html += """
      </PRE></BODY></HTML>\n"""
            try:
                open(path, "w").write(html)
            except:
                print("Error writing file %s" % path)
                traceback.print_exc()
                self.headerfile = None

        # figure out number of images to include
        ndim = header['NAXIS']
        fitsshape = [header['NAXIS%d' % i] for i in range(1, ndim + 1)]
        self.cubesize = 'x'.join(map(str, fitsshape))
        if ndim < 2:
            raise TypeError("can't render one-dimensional FITS files""")
        elif ndim == 2:
            fitsdata_to_images = lambda fdata: [fdata]
            nplanes = 1
        else:
            ax1 = ax2 = None
            # find the X/Y axes, by looking at CTYPEx
            # note that the array axes are in reverse order. I.e. if X is FITS axis 1 and Y is axis 2,
            # the array will be of e.g. shape 1,1,NY,NX, while fitsshape is [NX,NY,1,1]
            for i in range(1, ndim + 1):
                ctype = header['CTYPE%d' % i]
                if [prefix for prefix in ("RA", "GLON", "ELON", "HLON", "SLON") if ctype.startswith(prefix)] \
                        or ctype in ("L", "X"):
                    ax1 = ndim - i
                elif [prefix for prefix in ("DEC", "GLAT", "ELAT", "HLAT", "SLAT") if ctype.startswith(prefix)] \
                        or ctype in ("M", "Y"):
                    ax2 = ndim - i
            if ax1 is None or ax2 is None:
                ax1, ax2 = 1, 0
            arrshape = fitsshape[-1::-1]
            # this is how many planes we render, at most
            nplanes = max(self.getOption('fits-nimage'), 1)
            slices = []
            baseslice = [0] * ndim
            baseslice[ax1] = baseslice[ax2] = None
            imgshape = (arrshape[min(ax1, ax2)], arrshape[max(ax1, ax2)])
            while len(slices) < nplanes:
                slices.append(tuple(baseslice))
                for idim in range(ndim):
                    if baseslice[idim] != None:
                        baseslice[idim] += 1
                        if baseslice[idim] < arrshape[idim]:
                            break
                        else:
                            baseslice[idim] = 0
                else:
                    break
            nplanes = len(slices)

            # OK, slices contains how many slices to return
            def fitsdata_to_images(fdata, slices=slices, imgshape=imgshape):
                dprint(3, "fitsdata_to_images", slices, fdata.shape);
                t0 = time.time()
                # reshape to collapse into a 3D cube
                img = [fdata[i].reshape(imgshape) for i in slices]
                dprint(3, "collecting images took", time.time() - t0, "secs");
                t0 = time.time()
                return img

        # OK, now cycle over all images
        dprintf(3, "%s: rendering %d planes\n", self.dp.fullpath, nplanes);
        t0 = time.time()

        self.imgrec = [None] * nplanes
        # get number of bins (0 or None means no histogram)
        nbins = self.getOption("fits-hist-nbin")
        # see if histogram clipping is enabled, set hclip to None if not
        self.hclip = hclip = self.getOption("fits-hist-clip")
        if hclip == 1 or not nbins:
            hclip = None

        tsize_img = self.getOption("image-thumbnail-width"), self.getOption("image-thumbnail-height")
        tsize_hist = self.getOption("hist-thumbnail-width"), self.getOption("hist-thumbnail-height")
        self.hist_size = self.getOption("hist-width"), self.getOption("hist-height")

        # filled once we read the data
        images = None

        for num_image in range(nplanes):
            # do we have a cached status record for this image?
            recfile, recpath, uptodate = self.subproduct("-%d-stats" % num_image)
            if uptodate:
                dprintf(3, "%s(%d): stats file %s up-to-date, reading in\n", self.dp.fullpath, num_image, recfile)
                try:
                    self.imgrec[num_image] = pickle.load(file(recpath))
                    continue
                except:
                    print("Error reading stats file %s, regenerating everything" % recpath)
                    traceback.print_exc()
            # out of date, so we regenerate everything
            # build up record of stuff associated with this image
            rec = self.imgrec[num_image] = Kittens.utils.recdict()

            # generate paths for images
            rec.fullimage, img_path = self.subproductPath("-%d-full.png" % num_image)
            rec.thumbnail, img_thumb = self.subproductPath("-%d-thumb.png" % num_image)
            if pychart:
                rec.histogram_full, hf_path = self.subproductPath("-%d-hist-full.png" % num_image)
                rec.histogram_zoom, hz_path = self.subproductPath("-%d-hist-zoom.png" % num_image)
                rec.histogram_full_thumb, hf_thumb = self.subproductPath("-%d-hist-full-thumb.png" % num_image)
                rec.histogram_zoom_thumb, hz_thumb = self.subproductPath("-%d-hist-zoom-thumb.png" % num_image)

            # need to read in data at last
            if not images:
                dprint(3, "reading data");
                t0 = time.time()
                fitsdata = fitsfile[0].data
                dprint(3, "reading data took", time.time() - t0, "secs");
                t0 = time.time()
                fitsfile = None
                images = fitsdata_to_images(fitsdata)
                dprint(3, "converting to images took", time.time() - t0, "secs");
                t0 = time.time()
                fitsdata = None

            data = images[num_image]

            title = self.dp.filename
            if nplanes > 1:
                title += ", plane #%d" % num_image
            Purr.progressMessage("rendering %s" % title, sub=True)

            # min/max data values
            dprint(3, "rendering plane", num_image);
            t0 = time.time()
            datamask = ~numpy.isfinite(data)
            dprint(3, "making mask took", time.time() - t0, "secs");
            t0 = time.time()
            datamin, datamax = scipy.ndimage.measurements.extrema(data, datamask, False)[:2]
            dprint(3, "computing min/max took", time.time() - t0, "secs");
            t0 = time.time()
            rec.datamin, rec.datamax = datamin, datamax
            # mean and sigma
            rec.datamean = scipy.ndimage.measurements.mean(data, datamask, False)
            dprint(3, "computing mean took", time.time() - t0, "secs");
            t0 = time.time()
            rec.datastd = scipy.ndimage.measurements.standard_deviation(data, datamask, False)
            dprint(3, "computing std took", time.time() - t0, "secs");
            t0 = time.time()
            # thumbnail files will be "" if images are small enough to be inlined.
            # these will be None if no histogram clipping is applied
            rec.clipmin, rec.clipmax = None, None
            dprintf(3, "%s plane %d: datamin %g, datamax %g\n", self.dp.fullpath, num_image, rec.datamin, rec.datamax)
            # compute histogram of data only if this is enabled,
            # and either pychart is available (so we can produce plots), or histogram clipping is in effect
            if datamin != datamax and nbins and (pychart or hclip):
                dprintf(3, "%s plane %d: computing histogram\n", self.dp.fullpath, num_image)
                counts = scipy.ndimage.measurements.histogram(data, datamin, datamax, nbins, labels=datamask,
                                                              index=False);  # needed for 1.3+ to avoid warnings
                edges = datamin + (datamax - datamin) * (numpy.arange(nbins, dtype=float) + .5) / nbins
                dprint(3, "computing histogram took", time.time() - t0, "secs");
                t0 = time.time()
                # render histogram
                if pychart:
                    try:
                        self._make_histogram(hf_path, "Histogram of %s" % title, edges, counts)
                        dprint(3, "rendering histogram took", time.time() - t0, "secs");
                        t0 = time.time()
                    except:
                        print("Error rendering histogram %s" % hf_path)
                        traceback.print_exc()
                        rec.histogram_full = None
                    # if histogram was rendered, make a thumbnail
                    if rec.histogram_full:
                        self.makeThumb(hf_path, hf_thumb, tsize_hist)
                    else:
                        rec.histogram_full_thumb = None
                # now, compute clipped data if needed
                if hclip:
                    # find max point in histogram
                    ic = counts.argmax()
                    # compute number of points that need to be included, given the clip factor
                    target_count = int(data.size * hclip)
                    ih0 = ih1 = ic
                    totcount = counts[ic]
                    # find how many bins to include around ic, stopping when we hit the edge
                    while totcount < target_count:
                        if ih0 > 0:
                            ih0 -= 1
                            totcount += counts[ih0]
                        if ih1 < nbins - 1:
                            ih1 += 1
                            totcount += counts[ih1]
                        # just in case
                        if ih0 <= 0 and ih1 >= nbins - 1:
                            break
                    # and these are the clipping limits
                    datamin = float(edges[ih0])
                    if ih1 >= nbins - 1:
                        ih1 = nbins - 1;  # and datamax is already the clipping limit
                    else:
                        ih1 += 1
                        datamax = float(edges[ih1])
                    rec.clipmin, rec.clipmax = datamin, datamax
                    dprintf(3, "%s plane %d: clipping to %g,%g\n", self.dp.fullpath, num_image, rec.clipmin,
                            rec.clipmax)
                    # render zoomed histogram
                    if pychart:
                        if rec.clipmax != rec.clipmin:
                            zcounts = scipy.ndimage.measurements.histogram(data, rec.clipmin, rec.clipmax, nbins,
                                                                           labels=datamask,
                                                                           index=False);  # needed for 1.3+ to avoid warnings
                            zedges = rec.clipmin + (rec.clipmax - rec.clipmin) * (
                                        numpy.arange(nbins, dtype=float) + .5) / nbins
                            try:
                                self._make_histogram(hz_path, "Histogram zoom of %s" % title, zedges, zcounts)
                                dprint(3, "rendering zoomed histogram took", time.time() - t0, "secs");
                                t0 = time.time()
                            except:
                                print("Error rendering histogram %s" % hz_path)
                                traceback.print_exc()
                                rec.histogram_zoom = None
                        else:  # no meaningful zoomed area to render
                            rec.histogram_zoom = None
                        # if histogram was rendered, make a thumbnail
                        if rec.histogram_zoom:
                            histogram_zoom_thumb = self.makeThumb(hz_path, hz_thumb, tsize_hist)
                        else:
                            rec.histogram_zoom_thumb = None
                    # clip data
                    data = numpy.clip(data, datamin, datamax)
                # end of clipping
            # else no histogram for whatever reason
            else:
                rec.histogram_full = rec.histogram_zoom = rec.histogram_full_thumb = rec.histogram_zoom_thumb = None
            # ok, data has been clipped if need be. Rescale it to 8-bit integers
            t0 = time.time()
            datarng = datamax - datamin
            if datarng:
                data = (data - datamin) * (255 / datarng)
                data = data.round().astype('uint8')
                data[datamask] = 255
            else:
                data = numpy.zeros(data.shape, dtype='uint8')
            dprintf(3, "%s plane %d: rescaled to %d:%d in %f seconds\n", self.dp.fullpath, num_image, data.min(),
                    data.max(), time.time() - t0);
            t0 = time.time()
            # generate PNG image
            img = None
            try:
                img = PIL.Image.frombuffer('L', data.shape[-1::-1], numpy.getbuffer(data), "raw", 'L', 0, -1)
                dprint(3, "image frombuffer took", time.time() - t0, "secs");
                t0 = time.time()
                # img = PIL.Image.new('L',data.shape)
                # dprint(3,"new image took",time.time()-t0,"secs"); t0 = time.time()
                # imgdata = data.reshape((data.size,))
                # dprint(3,"data.reshape took",time.time()-t0,"secs"); t0 = time.time()
                # img.putdata(imgdata)
                # dprint(3,"putdata took",time.time()-t0,"secs"); t0 = time.time()
                # img = img.transpose(PIL.Image.FLIP_TOP_BOTTOM)
                # dprint(3,"transpose took",time.time()-t0,"secs"); t0 = time.time()
                img.save(img_path, 'PNG')
                dprint(3, "saving took", time.time() - t0, "secs");
                t0 = time.time()
            except:
                print("Error rendering image %s" % path)
                traceback.print_exc()
                rec.fullimage = img = None
            # if image was rendered, make a thumbnail
            if rec.fullimage:
                thumb = self.makeThumb(img_path, img_thumb, tsize_img, img=img)
                dprint(3, "rendering thumbnail took", time.time() - t0, "secs");
                t0 = time.time()
                # None means thumbnail failed
                if thumb is None:
                    rec.thumbnail = None
                # else perhaps image is its own thumbnail
                elif thumb is img_path:
                    rec.thumbnail = rec.fullimage
            else:
                rec.thumbnail = None
            # write stats
            try:
                pickle.dump(rec, file(recpath, 'w'))
            except:
                print("Error writing stats file  %s" % recpath)
                traceback.print_exc()
Example #17
0
 def restore_from_archive(self, parent=None):
     """Function to restore a DP from archived copy
     Asks for confirmation along the way if parent is not None
     (in which case it will be the parent widget for confirmation dialogs)
     """
     from PyQt4.Qt import QMessageBox
     exists = os.path.exists(self.sourcepath)
     if parent:
         msg = """<P>Do you really want to restore <tt>%s</tt> from
         this entry's copy of <tt>%s</tt>?</P>""" % (self.sourcepath,
                                                     self.filename)
         exists = os.path.exists(self.sourcepath)
         if exists:
             msg += """<P>Current file exists, and will be overwritten.</P>"""
             if QMessageBox.warning(parent, "Restoring from archive", msg,
                                    QMessageBox.Yes,
                                    QMessageBox.No) != QMessageBox.Yes:
                 return False
         else:
             if QMessageBox.question(parent, "Restoring from archive", msg,
                                     QMessageBox.Yes,
                                     QMessageBox.No) != QMessageBox.Yes:
                 return False
     busy = Purr.BusyIndicator()
     # remove file if in the way
     if exists:
         if os.system("/bin/rm -fr '%s'" % self.sourcepath):
             busy = None
             if parent:
                 QMessageBox.warning(
                     parent, "Error removing file", """<P>
         There was an error removing %s. Archived copy was not restored.
         The text console may have more information.</P>""" %
                     self.sourcepath, QMessageBox.Ok, 0)
             return False
     # unpack archived file
     if self.fullpath.endswith('.tgz'):
         parent_dir = os.path.dirname(self.sourcepath.rstrip('/'))
         os.system("/bin/rm -fr %s" % self.sourcepath)
         if os.system("tar zxf '%s' -C '%s'" % (self.fullpath, parent_dir)):
             busy = None
             if parent:
                 QMessageBox.warning(
                     parent, "Error unpacking file", """<P>
         There was an error unpacking the archived version to %s. The text console may have more information.</P>"""
                     % self.sourcepath, QMessageBox.Ok, 0)
             return False
     # else simply copy over
     else:
         if os.system("/bin/cp -a '%s' '%s'" %
                      (self.fullpath, self.sourcepath)):
             busy = None
             if parent:
                 QMessageBox.warning(
                     parent, "Error copying file", """<P>
         There was an error copying the archived version to %s. The text console may have more information.</P>"""
                     % self.sourcepath, QMessageBox.Ok, 0)
             return False
     busy = None
     if parent:
         QMessageBox.information(
             parent, "Restored file", """<P>Restored %s from this entry's
       archived copy.</P>""" % self.sourcepath, QMessageBox.Ok, 0)
     return True
Example #18
0
      # need to read in data at last
      if not images:
        dprint(3,"reading data"); t0 = time.time();
        fitsdata = fitsfile[0].data;
        dprint(3,"reading data took",time.time()-t0,"secs"); t0 = time.time();
        fitsfile = None;
        images = fitsdata_to_images(fitsdata);
        dprint(3,"converting to images took",time.time()-t0,"secs"); t0 = time.time();
        fitsdata = None;

      data = images[num_image];

      title = self.dp.filename;
      if nplanes > 1:
        title += ", plane #%d"%num_image;
      Purr.progressMessage("rendering %s"%title,sub=True);

      # min/max data values
      dprint(3,"rendering plane",num_image); t0 = time.time();
      datamask = ~numpy.isfinite(data);
      dprint(3,"making mask took",time.time()-t0,"secs"); t0 = time.time();
      datamin,datamax = scipy.ndimage.measurements.extrema(data,datamask,False)[:2];
      dprint(3,"computing min/max took",time.time()-t0,"secs"); t0 = time.time();
      rec.datamin,rec.datamax = datamin,datamax;
      # mean and sigma
      rec.datamean = scipy.ndimage.measurements.mean(data,datamask,False);
      dprint(3,"computing mean took",time.time()-t0,"secs"); t0 = time.time();
      rec.datastd = scipy.ndimage.measurements.standard_deviation(data,datamask,False);
      dprint(3,"computing std took",time.time()-t0,"secs"); t0 = time.time();
      # thumbnail files will be "" if images are small enough to be inlined.
      # these will be None if no histogram clipping is applied
Example #19
0
 def save(self,
          dirname=None,
          refresh=0,
          refresh_index=True,
          emit_message=True):
     """Saves entry in the given directory. Data products will be copied over if not
     residing in that directory.
     'refresh' is a timestamp, passed to renderIndex(), causing all data products OLDER than the specified time to be regenerated.
     'refresh_index', if true, causes index files to be re-rendered unconditionally
     """
     if not refresh and not self.updated:
         return
     timestr = time.strftime("%Y%m%d-%H%M%S",
                             time.localtime(self.timestamp))
     Purr.progressMessage("Rendering entry for %s" % timestr)
     if dirname:
         self.pathname = pathname = os.path.join(
             dirname,
             "%s-%s" % (("ignore" if self.ignore else "entry"), timestr))
     elif not self.pathname:
         raise ValueError("Cannot save entry: pathname not specified")
     else:
         pathname = self.pathname
     # set timestamp
     if not self.timestamp:
         self.timestamp = int(time.time())
     # get canonized path to output directory
     pathname = Purr.canonizePath(pathname)
     if not os.path.exists(pathname):
         os.mkdir(pathname)
     # now save content
     # get device of pathname -- need to know whether we move or copy
     devnum = os.stat(pathname).st_dev
     # copy data products as needed
     dprintf(2, "saving entry %s, %d data products\n", pathname,
             len(self.dps))
     dps = []
     for dp in self.dps:
         # if archived, this indicates a previously saved data product, so ignore it
         # if ignored, no need to save the DP -- but keep it in list
         if dp.archived or dp.ignored:
             dprintf(3, "dp %s is archived or ignored, skipping\n",
                     dp.sourcepath)
             dps.append(dp)
             continue
         # file missing for some reason (perhaps it got removed on us?) skip data product entirely
         if not os.path.exists(dp.sourcepath):
             dprintf(2, "data product %s missing, ignoring\n",
                     dp.sourcepath)
             continue
         Purr.progressMessage("archiving %s" % dp.filename, sub=True)
         # get normalized source and destination paths
         dprintf(2, "data product: %s, rename %s, policy %s\n",
                 dp.sourcepath, dp.filename, dp.policy)
         sourcepath = Purr.canonizePath(dp.sourcepath)
         destname = dp.fullpath = os.path.join(pathname, dp.filename)
         dprintf(2, "data product: %s -> %s\n", sourcepath, destname)
         # does the destination product already exist? skip if same file, else remove
         if os.path.exists(destname):
             if os.path.samefile(destname, sourcepath):
                 dprintf(2, "same file, skipping\n")
                 dp.timestamp = os.path.getmtime(destname)
                 dps.append(dp)
                 continue
             if os.system("/bin/rm -fr '%s'" % destname):
                 print("Error removing %s, which is in the way of %s" %
                       (destname, sourcepath))
                 print("This data product is not saved.")
                 continue
         # for directories, compress with tar
         if os.path.isdir(sourcepath):
             sourcepath = sourcepath.rstrip('/')
             if dp.policy == "copy" or dp.policy.startswith("move"):
                 dprintf(2, "archiving to tgz\n")
                 if os.system("tar zcf '%s' -C '%s' '%s'" %
                              (destname, os.path.dirname(sourcepath),
                               os.path.basename(sourcepath))):
                     print("Error archiving %s to %s" %
                           (sourcepath, destname))
                     print("This data product is not saved.")
                     continue
                 if dp.policy.startswith("move"):
                     os.system("/bin/rm -fr '%s'" % sourcepath)
         # else just a file
         else:
             # now copy/move it over
             if dp.policy == "copy":
                 dprintf(2, "copying\n")
                 if _copy_update(sourcepath, destname):
                     print("Error copying %s to %s" %
                           (sourcepath, destname))
                     print("This data product is not saved.")
                     continue
             elif dp.policy.startswith('move'):
                 if _move_update(sourcepath, destname):
                     print("Error moving %s to %s" % (sourcepath, destname))
                     print("This data product is not saved.")
                     continue
         # success, set timestamp and append
         dp.timestamp = os.path.getmtime(destname)
         dp.archived = True
         dps.append(dp)
     # reset list of data products
     self.dps = dps
     # now write out content
     self.cached_include = os.path.join(pathname, 'index.include.html')
     self.cached_include_valid = False
     self.index_file = os.path.join(pathname, "index.html")
     self.generateIndex(refresh=refresh,
                        refresh_index=refresh_index and time.time())
     self.updated = False