class ToolEnv(object): """ Global data Attributes: app QApplication shows list of Path objects to shows current the directory that the app what started in platform one of "windows", "linux" or "mac" clipboard Clipboard object iconPath path to all the icon PNGs appPath path to the application Methods: getShots() get a list of Path's to all shots for the given show getIcon() return QIcon for the named icon """ DATEFORMAT = "yyyyMMdd" def __init__(self,application=None): if application: self.app = application else: global app self.app = app self.current = Path().getcwd() # directory where the program was started self.platform = "linux" if sys.platform.lower().startswith("win"): self.platform = "windows" elif sys.platform.lower().startswith("dar"): self.platform = "mac" #myIcons = dict( [(path.namebase,QIcon(path)) for path in Path(r'C:\Documents and Settings\doug\My Documents\images\icons').files('*.png')]) self.appPath = Path(__file__).dirname().abspath().dirname() if not self.appPath.isdir(): self.appPath = Path().getcwd() self.iconPath = self.appPath / "images" / "icons" if not self.iconPath.isdir(): raise Exception( "no icon directory '%s'" % self.iconPath) def getIcon(self,name): """ Return a QIcon given a name of a PNG icon in the app's resources or blank icon if not found. """ p = self.iconPath / ("%s.png" % name) if p.exists(): icon = QIcon(str(p)) icon.isDummy = False else: icon = QIcon() icon.isDummy = True return icon
def load_plugins(self, path, autocreate = False): path = Path(path) logger.debug ("Loadplugins: " + path) if path.isdir(): logger.debug("Adding dir: " + path) for p in path.files("*.py"): self.load_plugins(p, autocreate) else: logger.debug("Examining file: " + path) m = imp.load_source(path.namebase, path) self.add_plugins(m, autocreate)
def __init__(self, parent, manager, shelf_path = None, *args, **kw): super(ShelveConfigAdapter, self).__init__(parent = parent, manager = manager, *args, **kw) if shelf_path is None: shelf_path = self.get_storage_dir() / self.__quote() else: from path import path as Path shelf_path = Path(shelf_path) if not shelf_path.isabs(): shelf_path = self.get_storage_dir() / shelf_path if shelf_path.isdir(): shelf_path = shelf_path / self.__quote() #logger.debug("shelf path: %s" % (shelf_path, )) self.__shelf_path = shelf_path
def __init__(self, str_or_folder=''): if isinstance(str_or_folder, set): self.jpegs = str_or_folder return if not isinstance(str_or_folder, Path): str_or_folder = Path(str_or_folder) if not str_or_folder.exists(): pass if str_or_folder.isdir(): self.store(str_or_folder) else: print "State initializer must be a directory, or a string representation of some data."
class PickleShareDB(UserDict.DictMixin): """ The main 'connection' object for PickleShare database """ def __init__(self,root): """ Return a db object that will manage the specied directory""" self.root = Path(root).expanduser().abspath() if not self.root.isdir(): self.root.makedirs() # cache has { 'key' : (obj, orig_mod_time) } self.cache = {} def get_path (self,key): self.root / key + ".db" def __getitem__(self,key): """ db['key'] reading """ fil = self.root / key try: mtime = (fil.stat()[stat.ST_MTIME]) except OSError: raise KeyError(key) if fil in self.cache and mtime == self.cache[fil][1]: return self.cache[fil][0] try: # The cached item has expired, need to read obj = pickle.load(fil.open()) except: raise KeyError(key) self.cache[fil] = (obj,mtime) return obj def __setitem__(self,key,value): """ db['key'] = 5 """ fil = self.root / key parent = fil.parent if parent and not parent.isdir(): parent.makedirs() pickled = pickle.dump(value,fil.open('w')) try: self.cache[fil] = (value,fil.mtime) except OSError,e: if e.errno != 2: raise
class PickleShareDB(UserDict.DictMixin): """ The main 'connection' object for PickleShare database """ def __init__(self, root): """ Return a db object that will manage the specied directory""" self.root = Path(root).expanduser().abspath() if not self.root.isdir(): self.root.makedirs() # cache has { 'key' : (obj, orig_mod_time) } self.cache = {} def __getitem__(self, key): """ db['key'] reading """ fil = self.root / key try: mtime = (fil.stat()[stat.ST_MTIME]) except OSError: raise KeyError(key) if fil in self.cache and mtime == self.cache[fil][1]: return self.cache[fil][0] try: # The cached item has expired, need to read obj = pickle.load(fil.open()) except: raise KeyError(key) self.cache[fil] = (obj, mtime) return obj def __setitem__(self, key, value): """ db['key'] = 5 """ fil = self.root / key parent = fil.parent if parent and not parent.isdir(): parent.makedirs() pickled = pickle.dump(value, fil.open('w')) try: self.cache[fil] = (value, fil.mtime) except OSError, e: if e.errno != 2: raise
class PickleShareDB(collections.MutableMapping): """ The main 'connection' object for PickleShare database """ def __init__(self, root): """ Return a db object that will manage the specied directory""" self.root = Path(root).expanduser().abspath() if not self.root.isdir(): self.root.makedirs_p() # cache has { 'key' : (obj, orig_mod_time) } self.cache = {} def __getitem__(self, key): """ db['key'] reading """ fil = self.root / key try: mtime = fil.stat()[stat.ST_MTIME] except OSError: raise KeyError(key) if fil in self.cache and mtime == self.cache[fil][1]: return self.cache[fil][0] try: # The cached item has expired, need to read with fil.open("rb") as f: obj = pickle.loads(f.read()) except: raise KeyError(key) self.cache[fil] = (obj, mtime) return obj def __setitem__(self, key, value): """ db['key'] = 5 """ fil = self.root / key parent = fil.parent if parent and not parent.isdir(): parent.makedirs() # We specify protocol 2, so that we can mostly go between Python 2 # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete. with fil.open("wb") as f: pickle.dump(value, f, protocol=2) try: self.cache[fil] = (value, fil.mtime) except OSError as e: if e.errno != errno.ENOENT: raise def hset(self, hashroot, key, value): """ hashed set """ hroot = self.root / hashroot if not hroot.isdir(): hroot.makedirs() hfile = hroot / gethashfile(key) d = self.get(hfile, {}) d.update({key: value}) self[hfile] = d def hget(self, hashroot, key, default=_sentinel, fast_only=True): """ hashed get """ hroot = self.root / hashroot hfile = hroot / gethashfile(key) d = self.get(hfile, _sentinel) # print "got dict",d,"from",hfile if d is _sentinel: if fast_only: if default is _sentinel: raise KeyError(key) return default # slow mode ok, works even after hcompress() d = self.hdict(hashroot) return d.get(key, default) def hdict(self, hashroot): """ Get all data contained in hashed category 'hashroot' as dict """ hfiles = self.keys(hashroot + "/*") hfiles.sort() last = len(hfiles) and hfiles[-1] or "" if last.endswith("xx"): # print "using xx" hfiles = [last] + hfiles[:-1] all = {} for f in hfiles: # print "using",f try: all.update(self[f]) except KeyError: print("Corrupt", f, "deleted - hset is not threadsafe!") del self[f] self.uncache(f) return all def hcompress(self, hashroot): """ Compress category 'hashroot', so hset is fast again hget will fail if fast_only is True for compressed items (that were hset before hcompress). """ hfiles = self.keys(hashroot + "/*") all = {} for f in hfiles: # print "using",f all.update(self[f]) self.uncache(f) self[hashroot + "/xx"] = all for f in hfiles: p = self.root / f if p.basename() == "xx": continue p.remove() def __delitem__(self, key): """ del db["key"] """ fil = self.root / key self.cache.pop(fil, None) try: fil.remove() except OSError: # notfound and permission denied are ok - we # lost, the other process wins the conflict pass def _normalized(self, p): """ Make a key suitable for user's eyes """ return str(self.root.relpathto(p)).replace("\\", "/") def keys(self, globpat=None): """ All keys in DB, or all keys matching a glob""" if globpat is None: files = self.root.walkfiles() else: files = [Path(p) for p in glob.glob(self.root / globpat)] return [self._normalized(p) for p in files if p.isfile()] def __iter__(self): return iter(self.keys()) def __len__(self): return len(self.keys()) def uncache(self, *items): """ Removes all, or specified items from cache Use this after reading a large amount of large objects to free up memory, when you won't be needing the objects for a while. """ if not items: self.cache = {} for it in items: self.cache.pop(it, None) def waitget(self, key, maxwaittime=60): """ Wait (poll) for a key to get a value Will wait for `maxwaittime` seconds before raising a KeyError. The call exits normally if the `key` field in db gets a value within the timeout period. Use this for synchronizing different processes or for ensuring that an unfortunately timed "db['key'] = newvalue" operation in another process (which causes all 'get' operation to cause a KeyError for the duration of pickling) won't screw up your program logic. """ wtimes = [0.2] * 3 + [0.5] * 2 + [1] tries = 0 waited = 0 while 1: try: val = self[key] return val except KeyError: pass if waited > maxwaittime: raise KeyError(key) time.sleep(wtimes[tries]) waited += wtimes[tries] if tries < len(wtimes) - 1: tries += 1 def getlink(self, folder): """ Get a convenient link for accessing items """ return PickleShareLink(self, folder) def __repr__(self): return "PickleShareDB('%s')" % self.root
class PickleShareDB(collections.MutableMapping): """ The main 'connection' object for PickleShare database """ def __init__(self,root): """ Return a db object that will manage the specied directory""" self.root = Path(root).expanduser().abspath() if not self.root.isdir(): self.root.makedirs_p() # cache has { 'key' : (obj, orig_mod_time) } self.cache = {} def __getitem__(self,key): """ db['key'] reading """ fil = self.root / key try: mtime = (fil.stat()[stat.ST_MTIME]) except OSError: raise KeyError(key) if fil in self.cache and mtime == self.cache[fil][1]: return self.cache[fil][0] try: # The cached item has expired, need to read with fil.open("rb") as f: obj = pickle.loads(f.read()) except: raise KeyError(key) self.cache[fil] = (obj,mtime) return obj def __setitem__(self,key,value): """ db['key'] = 5 """ fil = self.root / key parent = fil.parent if parent and not parent.isdir(): parent.makedirs() # We specify protocol 2, so that we can mostly go between Python 2 # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete. with fil.open('wb') as f: pickle.dump(value, f, protocol=2) try: self.cache[fil] = (value,fil.mtime) except OSError as e: if e.errno != errno.ENOENT: raise def hset(self, hashroot, key, value): """ hashed set """ hroot = self.root / hashroot if not hroot.isdir(): hroot.makedirs() hfile = hroot / gethashfile(key) d = self.get(hfile, {}) d.update( {key : value}) self[hfile] = d def hget(self, hashroot, key, default = _sentinel, fast_only = True): """ hashed get """ hroot = self.root / hashroot hfile = hroot / gethashfile(key) d = self.get(hfile, _sentinel ) #print "got dict",d,"from",hfile if d is _sentinel: if fast_only: if default is _sentinel: raise KeyError(key) return default # slow mode ok, works even after hcompress() d = self.hdict(hashroot) return d.get(key, default) def hdict(self, hashroot): """ Get all data contained in hashed category 'hashroot' as dict """ hfiles = self.keys(hashroot + "/*") hfiles.sort() last = len(hfiles) and hfiles[-1] or '' if last.endswith('xx'): # print "using xx" hfiles = [last] + hfiles[:-1] all = {} for f in hfiles: # print "using",f try: all.update(self[f]) except KeyError: print("Corrupt",f,"deleted - hset is not threadsafe!") del self[f] self.uncache(f) return all def hcompress(self, hashroot): """ Compress category 'hashroot', so hset is fast again hget will fail if fast_only is True for compressed items (that were hset before hcompress). """ hfiles = self.keys(hashroot + "/*") all = {} for f in hfiles: # print "using",f all.update(self[f]) self.uncache(f) self[hashroot + '/xx'] = all for f in hfiles: p = self.root / f if p.basename() == 'xx': continue p.remove() def __delitem__(self,key): """ del db["key"] """ fil = self.root / key self.cache.pop(fil,None) try: fil.remove() except OSError: # notfound and permission denied are ok - we # lost, the other process wins the conflict pass def _normalized(self, p): """ Make a key suitable for user's eyes """ return str(self.root.relpathto(p)).replace('\\','/') def keys(self, globpat = None): """ All keys in DB, or all keys matching a glob""" if globpat is None: files = self.root.walkfiles() else: files = [Path(p) for p in glob.glob(self.root/globpat)] return [self._normalized(p) for p in files if p.isfile()] def __iter__(self): return iter(self.keys()) def __len__(self): return len(self.keys()) def uncache(self,*items): """ Removes all, or specified items from cache Use this after reading a large amount of large objects to free up memory, when you won't be needing the objects for a while. """ if not items: self.cache = {} for it in items: self.cache.pop(it,None) def waitget(self,key, maxwaittime = 60 ): """ Wait (poll) for a key to get a value Will wait for `maxwaittime` seconds before raising a KeyError. The call exits normally if the `key` field in db gets a value within the timeout period. Use this for synchronizing different processes or for ensuring that an unfortunately timed "db['key'] = newvalue" operation in another process (which causes all 'get' operation to cause a KeyError for the duration of pickling) won't screw up your program logic. """ wtimes = [0.2] * 3 + [0.5] * 2 + [1] tries = 0 waited = 0 while 1: try: val = self[key] return val except KeyError: pass if waited > maxwaittime: raise KeyError(key) time.sleep(wtimes[tries]) waited+=wtimes[tries] if tries < len(wtimes) -1: tries+=1 def getlink(self,folder): """ Get a convenient link for accessing items """ return PickleShareLink(self, folder) def __repr__(self): return "PickleShareDB('%s')" % self.root