コード例 #1
0
ファイル: Package.py プロジェクト: tubav/teagle
	def __init__(self, package, executable, fork = False, workingdir = None, makepidfile = False, daemonargs = None, ssd = "/sbin/start-stop-daemon", ldpath = None, outfile = "/dev/null", *args, **kw):
		super(StartStopDaemonController, self).__init__(package = package, *args, **kw)

		executable =  Path(executable)
		if not executable.isabs():
			executable = self.package.installdir / executable
		self.__executable = unicode(executable)

		if workingdir is not None:
			workingdir = Path(workingdir)
			if not workingdir.isabs():
				workingdir = self.package.installdir / workingdir
			self.__workingdir = unicode(workingdir)
		else:
			self.__workingdir = self.package.installdir

		if ldpath is not None:
			if not isinstance(ldpath, (list, set, tuple, frozenset)):
				ldpath = [ ldpath ]
			ldpath = tuple(set(ldpath))
		self.__ldpath = ldpath

		self.__makepidfile = makepidfile
		self.__daemonargs = daemonargs
		self.__fork = fork
		self.__ssd = ssd
		self.__outfile = outfile
コード例 #2
0
ファイル: __init__.py プロジェクト: userzimmermann/QtQuery
    def run(self):
        """The actual conda command action called by Command base.
        """
        from path import path as Path
        import yaml

        metadir = Path('.conda')
        metadir.mkdir_p()
        metafile = metadir / 'meta.yaml'
        buildfile = metadir / 'build.sh'

        def conda_req(req):
            """conda wants space between requirement name and version specs.
            """
            return re.sub(r'([=<>]+)', r' \1', str(req))

        requirements = list(map(conda_req, REQUIRES))
        # Also add all extra requirements
        #  (conda doesn't seem to have such an extra features management):
        for extra in EXTRAS.values():
            requirements.extend(map(conda_req, extra))

        meta = { # to be dumped to meta.yaml
          'package': {
            'name': NAME,
            'version': str(VERSION),
            },
          'source': {
            'fn': '%s-%s.tar.gz' % (NAME, VERSION),
            # The absolute path to the sdist in dist/
            'url': 'file://%s' % os.path.realpath(os.path.join(
              'dist', '%s-%s.tar.gz' % (NAME, VERSION)))
            },
          'requirements': {
            'build': [
              'python',
              'pyyaml',
              ] + requirements,
            'run': [
              'python',
              ] + requirements,
            },
          'about': {
            'home': URL,
            'summary': DESCRIPTION,
            },
          }
        with open(metafile, 'w') as f:
            yaml.dump(meta, f, default_flow_style=False)

        with open(buildfile, 'w') as f:
            f.write('#!/bin/bash'
                    '\n\n'
                    '$PYTHON setup.py install'
                    '\n')

        status = call(['conda', 'build', metadir])
        if not status:
            sys.exit(status)
コード例 #3
0
ファイル: namespace.py プロジェクト: cgdougm/PyQtEditWidgets
 def read(self,file,localFuncDict={}):
     p = Path(file)
     for line in p.lines(retain=False):
         if line.strip() == '' or line.strip().startswith('#'):  continue
         if '=' not in line:
             raise SyntaxError, line
         lhs, rhs = line.split('=',1)
         key = lhs.strip()
         value = eval(rhs,localFuncDict,globals())
         self.set(key,value)
コード例 #4
0
ファイル: PackageAdapter.py プロジェクト: tubav/teagle
	def load_plugins(self, path, autocreate = False):
		path = Path(path)
		logger.debug ("Loadplugins: " + path)
		if path.isdir():
			logger.debug("Adding dir: " + path)
			for p in path.files("*.py"):
				self.load_plugins(p, autocreate)
		else:
			logger.debug("Examining file: " + path)
			m = imp.load_source(path.namebase, path)
			self.add_plugins(m, autocreate)
コード例 #5
0
    def export(self, hashVal=None, hashPath=None, tags=None, galleries=None):
        """
        The export function needs to:
        - Move source image to asset folder
        - Rename to guid.ext
        - Save thumbnail, small, and image versions
        """
        hashVal = hashVal or self.hash
        hashPath = hashPath or self.parent / hashVal + self.ext
        
        self.source = hashPath.replace('\\', '/').replace(ROOT, '')
        galleries = galleries or []
        tags = tags or []

        imagefile = Path(ROOT + self.source.name)
        
        workImage = pilImage.open(imagefile)

        if imagefile.ext in ('.tif', '.tiff', 'psd'):
            png = imagefile.parent / imagefile.namebase + '.png'
            workImage.save(png)
            workImage = pilImage.open(png)
            imagefile.move(imagefile.replace(self.hash, self.title))
            self.source = png.replace(ROOT, '')

        formats = [
            ('image', FROG_IMAGE_SIZE_CAP),
            ('small', FROG_IMAGE_SMALL_SIZE),
        ]
        for i, n in enumerate(formats):
            if workImage.size[0] > n[1] or workImage.size[1] > n[1]:
                workImage.thumbnail((n[1], n[1]), pilImage.ANTIALIAS)
                setattr(self, n[0], self.source.name.replace(hashVal, '_' * i + hashVal))
                workImage.save(ROOT + getattr(self, n[0]).name)
            else:
                setattr(self, n[0], self.source)

        self.generateThumbnail()

        for gal in galleries:
            g = Gallery.objects.get(pk=int(gal))
            g.images.add(self)

        self.tagArtist()

        for tagName in tags:
            tag = Tag.objects.get_or_create(name=tagName)[0]
            self.tags.add(tag)

        if not self.guid:
            self.guid = self.getGuid().guid
        self.save()
コード例 #6
0
ファイル: subprocess.py プロジェクト: tubav/OpenTeagle_API
	def __check_cmd(self, cmd, env):
		self.logger.debug("ssd env: " + str(env))
		self.logger.debug("ssd command: " + ' '.join(cmd))

		outfile = self.__outfile
		if outfile:
			outfile = Path(outfile).open("a")

		try:
			subprocess.check_call(cmd, stdout = outfile, stderr = subprocess.STDOUT, close_fds = True, cwd = self.__workingdir, env = env)
		finally:
			if outfile is not None:
				outfile.close()
コード例 #7
0
ファイル: namespace.py プロジェクト: cgdougm/PyQtEditWidgets
 def write(self,file,prefix='',append=False):
     p = Path(file)
     if not append:
         p.write_text('')
     for k,v in self._data.items():
         name = "%s.%s" % (prefix,k) if prefix else k
         if isinstance(v,self.__class__):
             v.write(file,name,append=True)
         else:
             try:
                 r = repr(v)
             except:
                 print '***',k
             p.write_lines(["%s = %s" % (name, repr(v))],append=True)
コード例 #8
0
    def export(self, hashVal, hashPath, tags=None, galleries=None):
        '''
        The export function needs to:
        - Move source image to asset folder
        - Rename to guid.ext
        - Save thumbnail, small, and image versions
        '''
        
        self.source = hashPath.replace('\\', '/').replace(ROOT, '')
        galleries = galleries or []
        tags = tags or []

        imagefile = Path(ROOT + self.source.name)
        
        workImage = pilImage.open(imagefile)

        if imagefile.ext in ('.tif', '.tiff'):
            png = imagefile.parent / imagefile.namebase + '.png'
            workImage.save(png)
            workImage = pilImage.open(png)
            imagefile.move(imagefile.replace(self.hash, self.title))
            self.source = png.replace(ROOT, '')

        formats = [
            ('image', FROG_IMAGE_SIZE_CAP),
            ('small', FROG_IMAGE_SMALL_SIZE),
            ('thumbnail', FROG_THUMB_SIZE),
        ]
        for i,n in enumerate(formats):
            if workImage.size[0] > n[1] or workImage.size[1] > n[1]:
                workImage.thumbnail((n[1], n[1]), pilImage.ANTIALIAS)
                setattr(self, n[0], self.source.name.replace(hashVal, '_' * i + hashVal))
                workImage.save(ROOT + getattr(self, n[0]).name)
            else:
                setattr(self, n[0], self.source)

        for gal in galleries:
            g = Gallery.objects.get(pk=int(gal))
            g.images.add(self)

        self.tagArtist()

        for tagName in tags:
            tag = Tag.objects.get_or_create(name=tagName)[0]
            self.tags.add(tag)

        if not self.guid:
            self.guid = self.getGuid().guid
        self.save()
コード例 #9
0
ファイル: ResourceAdapter.py プロジェクト: tubav/teagle
		def __init__(self, parent, manager, shelf_path = None, *args, **kw):		
			super(ShelveConfigAdapter, self).__init__(parent = parent, manager = manager, *args, **kw)
			
			if shelf_path is None:
				shelf_path = self.get_storage_dir() / self.__quote() 
			else: 
				from path import path as Path
				shelf_path = Path(shelf_path)
				if not shelf_path.isabs():
					shelf_path = self.get_storage_dir() / shelf_path
				if shelf_path.isdir():
					shelf_path = shelf_path / self.__quote()
					
			#logger.debug("shelf path: %s" % (shelf_path, ))
			self.__shelf_path = shelf_path
コード例 #10
0
ファイル: Package.py プロジェクト: tubav/teagle
	def deploy_shared(klass, adapter):
		source = adapter.repodir / klass.__name__.lower() / "shared"
		target = klass.get_shareddir(adapter)

		source.checkdir()
		if target.exists():
			logger.warning(target + " already exists. Removing it.")
			rmtree(target)

		td = Path(tempfile.mkdtemp())
		try:
			temptarget = td / "shared"
			source.copytree(temptarget)
			temptarget.move(target)
		finally:
			td.rmtree()
コード例 #11
0
ファイル: file.py プロジェクト: dsaran/packagehelper
    def __init__(self, path=None, basepath=None, parse=False):
        """ Constructor.
        @param path 'path' instance or a string representing the file"
        @param basepath 'path' instance or a string representing package path.
        @param parse (optional) if the filename must be parsed."""
        self._database = None
        self._type = None
        self._name = None
        self._basepath = None
        self._path = None

        if basepath:
            self._basepath = basepath
            if not self._basepath.endswith(sep):
                self._basepath += sep

        if path:
            self._path = path
            if not hasattr(self._path, 'splitall'):
                self._path = Path(self._path)

            fileDetails = self._path.splitall()
            self._name = fileDetails[-1]

        if parse:
            try:
                db = fileDetails[-4].upper()
                dbUser = fileDetails[-3].upper()
                self._database = Database(db, dbUser)
                self._type = fileDetails[-2].upper()
                log.debug("File Created [db: %s, dbUser: %s, type: %s, name: %s]"%\
                            (db, dbUser, self._type, self._name))
            except IndexError:
                log.warn("Unknown file path: " + path)
                raise ValueError
コード例 #12
0
 def __init__(self, root):
     """ Return a db object that will manage the specied directory"""
     self.root = Path(root).expanduser().abspath()
     if not self.root.isdir():
         self.root.makedirs_p()
     # cache has { 'key' : (obj, orig_mod_time) }
     self.cache = {}
コード例 #13
0
class Source(list):
    def __init__(self, rootpath, ignore = [r'\.pyc$']):
        self.rootpath = Path(rootpath).abspath()
        self.ignore = [re.compile(pattern) for pattern in ignore]
        self.extend(
          path for path in self.rootpath.walkfiles()
          if not any(r.search(path.ext) for r in self.ignore))
コード例 #14
0
ファイル: monitor.py プロジェクト: antroy/Home
 def __init__(self, str_or_folder=''):
     
     if isinstance(str_or_folder, set):
         self.jpegs = str_or_folder
         return
     
     if not isinstance(str_or_folder, Path):
         str_or_folder = Path(str_or_folder)
     
     if not str_or_folder.exists():
         pass
     
     if str_or_folder.isdir():
         self.store(str_or_folder)
     else:
         print "State initializer must be a directory, or a string representation of some data."
コード例 #15
0
ファイル: pickleshare.py プロジェクト: lpp1985/lpp_Script
 def __init__(self,root):
     """ Return a db object that will manage the specied directory"""
     self.root = Path(root).expanduser().abspath()
     if not self.root.isdir():
         self.root.makedirs_p()
     # cache has { 'key' : (obj, orig_mod_time) }
     self.cache = {}
コード例 #16
0
ファイル: Merger.py プロジェクト: tubav/teagle
	def merge(self, package, dir):
		source = Path(dir) / "image"

		source.checkdir()

		target = package.basedir
		if target.exists() and not target.isdir():
			raise Exception("'%s' is not a directory" % (target, ))
		if not target.exists():
			target.mkdir(077)

		target = package.installdir
		if target.exists():
			raise Exception("'%s' already exists" % (target, ))

		source.move(target)
コード例 #17
0
ファイル: monitor.py プロジェクト: antroy/Home
 def load(datafile):
     print "Loading from file %s" % datafile
     if not isinstance(datafile, Path):
         datafile = Path(datafile)
     
     out = set()
     
     if datafile.exists():
         fh = file(datafile)
         for line in fh:
             out.add(line.strip())
         fh.close()
                     
         return State(out)
     else:
         print "Path %s does not exist - creating empty state..."
         return State()
コード例 #18
0
 def compare_actual_folder_with_tree(self, root: path, tree: Tree):
     root_name = tree.root
     root_path = root.joinpath(root_name)
     print(root_path)
     self.assertTrue(root_path.exists(), "The path {} should exist, but doesn't".format(root_path))
     children = tree.children(root_name)
     for children in children:
         subtree = tree.subtree(children.identifier)
         self.compare_actual_folder_with_tree(root_path, subtree)
コード例 #19
0
def create_interpret_folder_if_necessary(interpret: str, target_music_folder: path, ask_before_copy: bool) -> path:
    target_interpret_folder = target_music_folder.joinpath(interpret)
    if target_interpret_folder.exists():
        print("The following Interpret folder will be used: {}".format(target_interpret_folder))
    else:
        print("The following Interpret folder will be created: {}".format(target_interpret_folder))
        if (ask_before_copy and click.confirm('Are you okay with this?')) or not ask_before_copy:
            target_interpret_folder.mkdir()
    return target_interpret_folder
コード例 #20
0
ファイル: toolenv.py プロジェクト: cgdougm/PyQtMorph
class ToolEnv(object):
    """
    Global data
    Attributes:
        app         QApplication
        shows       list of Path objects to shows
        current     the directory that the app what started in
        platform    one of "windows", "linux" or "mac"
        clipboard   Clipboard object
        iconPath    path to all the icon PNGs
        appPath     path to the application
    Methods:
        getShots()  get a list of Path's to all shots for the given show
        getIcon()   return QIcon for the named icon
    """
    
    DATEFORMAT = "yyyyMMdd"

    def __init__(self,application=None):
        
        if application:
            self.app = application
        else:
            global app
            self.app = app
        
        self.current = Path().getcwd() # directory where the program was started
        
        self.platform = "linux"
        if sys.platform.lower().startswith("win"):
            self.platform = "windows"
        elif sys.platform.lower().startswith("dar"):
            self.platform = "mac"
        
        
        #myIcons = dict( [(path.namebase,QIcon(path)) for path in Path(r'C:\Documents and Settings\doug\My Documents\images\icons').files('*.png')])
        self.appPath = Path(__file__).dirname().abspath().dirname()
        if not self.appPath.isdir():
            self.appPath = Path().getcwd()
        self.iconPath = self.appPath / "images" / "icons"
        if not self.iconPath.isdir():
            raise Exception( "no icon directory '%s'" % self.iconPath)

    def getIcon(self,name):
        """
        Return a QIcon given a name of a PNG icon in the app's resources
        or blank icon if not found.
        """
        p = self.iconPath / ("%s.png" % name)
        if p.exists():
            icon = QIcon(str(p))
            icon.isDummy = False
        else:
            icon = QIcon()
            icon.isDummy = True
        return icon
コード例 #21
0
ファイル: pickleshare.py プロジェクト: mickg10/DARLAB
class PickleShareDB(UserDict.DictMixin):
    """ The main 'connection' object for PickleShare database """
    def __init__(self,root):
        """ Return a db object that will manage the specied directory"""
        self.root = Path(root).expanduser().abspath()
        if not self.root.isdir():
            self.root.makedirs()
        # cache has { 'key' : (obj, orig_mod_time) }
        self.cache = {}

    def get_path (self,key):
        self.root / key + ".db"
    def __getitem__(self,key):
        """ db['key'] reading """
        fil = self.root / key 
        try:
            mtime = (fil.stat()[stat.ST_MTIME])
        except OSError:
            raise KeyError(key)

        if fil in self.cache and mtime == self.cache[fil][1]:
            return self.cache[fil][0]
        try:
            # The cached item has expired, need to read
            obj = pickle.load(fil.open())
        except:
            raise KeyError(key)
            
        self.cache[fil] = (obj,mtime)
        return obj
    
    def __setitem__(self,key,value):
        """ db['key'] = 5 """
        fil = self.root / key
        parent = fil.parent
        if parent and not parent.isdir():
            parent.makedirs()
        pickled = pickle.dump(value,fil.open('w'))
        try:
            self.cache[fil] = (value,fil.mtime)
        except OSError,e:
            if e.errno != 2:
                raise
コード例 #22
0
class PickleShareDB(UserDict.DictMixin):
    """ The main 'connection' object for PickleShare database """
    def __init__(self, root):
        """ Return a db object that will manage the specied directory"""
        self.root = Path(root).expanduser().abspath()
        if not self.root.isdir():
            self.root.makedirs()
        # cache has { 'key' : (obj, orig_mod_time) }
        self.cache = {}

    def __getitem__(self, key):
        """ db['key'] reading """
        fil = self.root / key
        try:
            mtime = (fil.stat()[stat.ST_MTIME])
        except OSError:
            raise KeyError(key)

        if fil in self.cache and mtime == self.cache[fil][1]:
            return self.cache[fil][0]
        try:
            # The cached item has expired, need to read
            obj = pickle.load(fil.open())
        except:
            raise KeyError(key)

        self.cache[fil] = (obj, mtime)
        return obj

    def __setitem__(self, key, value):
        """ db['key'] = 5 """
        fil = self.root / key
        parent = fil.parent
        if parent and not parent.isdir():
            parent.makedirs()
        pickled = pickle.dump(value, fil.open('w'))
        try:
            self.cache[fil] = (value, fil.mtime)
        except OSError, e:
            if e.errno != 2:
                raise
コード例 #23
0
ファイル: monitor.py プロジェクト: antroy/Home
def message(changes, monitorConf):
    
    changed_folders = {}
    
    for change in changes:
        change = Path(change)
        parent, name = change.splitpath()
        if not changed_folders.has_key(parent):
            changed_folders[parent] = 1
        else:
            changed_folders[parent] = changed_folders[parent] + 1

    gallery_template = "%s (%d photo%s added)"
    
    def plural(v):
        if v is 1: return ''
        else: return "'s"
    
    galleries = [gallery_template % (format_path(k, monitorConf.base_url), v, plural(v)) for k, v in changed_folders.iteritems()]
    
    return monitorConf.template % "\n".join(galleries)
コード例 #24
0
def integrate(source_download_folder: path, target_music_folder: path, ask_before_copy: bool):
    if not target_music_folder.exists():
        raise IntegrationError("The target folder '{}' doesn't exist.".format(target_music_folder))
    latest_folder = source_download_folder.joinpath(get_latest_folder(source_download_folder))
    print("Analyzing latest file {}".format(latest_folder))
    if len(latest_folder.listdir()) == 0:
        raise IntegrationError("The latest folder '{}' has no child folder.".format(latest_folder))
    downloads_album_folder = latest_folder.listdir()[0]
    album = album_parser.parse(downloads_album_folder.basename(), "-")
    print("Parsed album folder name {}. Result: {}".format(downloads_album_folder.basename(), album))
    target_interpret_folder = create_interpret_folder_if_necessary(album.interpret, target_music_folder, ask_before_copy)

    wanted_target_album_folder = target_interpret_folder.joinpath(album.name)
    if wanted_target_album_folder.exists():
        raise IntegrationError("The target album folder {} already exists.".format(wanted_target_album_folder))
    print("I'm going to copy \n\t{} \n\tto \n\t{}".format(downloads_album_folder, wanted_target_album_folder))
    if (ask_before_copy and click.confirm('Are you okay with this?')) or not ask_before_copy:
        downloads_album_folder.copytree(wanted_target_album_folder)
        click.echo("Copied successfully")
        print("Cleanup: Removing old folder in downloads folder: {}".format(latest_folder))
        latest_folder.rmtree()
        return wanted_target_album_folder
コード例 #25
0
ファイル: toolenv.py プロジェクト: cgdougm/PyQtMorph
 def __init__(self,application=None):
     
     if application:
         self.app = application
     else:
         global app
         self.app = app
     
     self.current = Path().getcwd() # directory where the program was started
     
     self.platform = "linux"
     if sys.platform.lower().startswith("win"):
         self.platform = "windows"
     elif sys.platform.lower().startswith("dar"):
         self.platform = "mac"
     
     
     #myIcons = dict( [(path.namebase,QIcon(path)) for path in Path(r'C:\Documents and Settings\doug\My Documents\images\icons').files('*.png')])
     self.appPath = Path(__file__).dirname().abspath().dirname()
     if not self.appPath.isdir():
         self.appPath = Path().getcwd()
     self.iconPath = self.appPath / "images" / "icons"
     if not self.iconPath.isdir():
         raise Exception( "no icon directory '%s'" % self.iconPath)
コード例 #26
0
def create_dummy_download_folder(root: path, tree: Tree) -> path:
    root_name = tree.root
    root_path = root.joinpath(root_name)

    if not root_path.exists():
        print("Creating {}".format(root_path))
        if root_name.endswith(".mp3"):
            root_path.touch()
        else:
            root_path.mkdir()
        time.sleep(0.01)  # sleep to ensure that the created folders don't have the same ctime

    children = tree.children(root_name)
    for children in children:
        subtree = tree.subtree(children.identifier)
        create_dummy_download_folder(root_path, subtree)
    return root_path
コード例 #27
0
ファイル: processor.py プロジェクト: dsaran/packagehelper
    def _process_other(self):
        xmls = self._get_files("*.xml")
        shellscripts = self._get_files("*.sh")
        basedir = self.package.full_path
        os.chdir(basedir)
        otherfiles = []

        if len(xmls) > 0:
            self.package.has_xml = True
            xmlpath = basedir.joinpath("XML")
            if not xmlpath.exists():
                xmlpath.mkdir()

            for xml in xmls:
                file = Path(xml.path)

                if not file.dirname() == xmlpath:
                    file.move(xmlpath)
                otherfiles.append(xml)
        
        if len(shellscripts) > 0:
            self.package.has_shellscript = True
            shpath = basedir.joinpath("SH")
            if not shpath.exists():
                shpath.mkdir()

            for sh in shellscripts:
                file = Path(sh.path)
 
                if not file.dirname() == shpath:
                    file.move(shpath)
                otherfiles.append(sh)

        log.info("Cleaning up empty directories...")
        self._clean_directory(basedir)

        return otherfiles
コード例 #28
0
ファイル: tc_domain.py プロジェクト: dsaran/packagehelper
    def setUp(self):
        self.base_path = Mock()
        file1 = Mock()
        file2 = Mock()
        self.base_path.abspath.return_value = '/tmp/'
        file1.getInitScript.return_value = ['file 1 init']
        file2.getInitScript.return_value = ['file 2 init']
        file1.getScript.return_value = ['file 1 script']
        file2.getScript.return_value = ['file 2 script']
        file1.getFinalScript.return_value = ['file 1 end']
        file2.getFinalScript.return_value = ['file 2 end']
        self.install_script = InstallScript('filename.sql', content=[file1, file2])

        self.expected_data = ['SPOOL filename.log',
                         'file 1 init', 'file 2 init',
                         'file 1 script', 'file 2 script',
                         'file 1 end', 'file 2 end',
                         'SPOOL OFF']

        self.test_dir = Path('TEST_OUTPUT')
        self.test_dir.mkdir()

        self.expected_script = self.test_dir / Path(self.install_script.name)
コード例 #29
0
class PickleShareDB(collections.MutableMapping):
    """ The main 'connection' object for PickleShare database """

    def __init__(self, root):
        """ Return a db object that will manage the specied directory"""
        self.root = Path(root).expanduser().abspath()
        if not self.root.isdir():
            self.root.makedirs_p()
        # cache has { 'key' : (obj, orig_mod_time) }
        self.cache = {}

    def __getitem__(self, key):
        """ db['key'] reading """
        fil = self.root / key
        try:
            mtime = fil.stat()[stat.ST_MTIME]
        except OSError:
            raise KeyError(key)

        if fil in self.cache and mtime == self.cache[fil][1]:
            return self.cache[fil][0]
        try:
            # The cached item has expired, need to read
            with fil.open("rb") as f:
                obj = pickle.loads(f.read())
        except:
            raise KeyError(key)

        self.cache[fil] = (obj, mtime)
        return obj

    def __setitem__(self, key, value):
        """ db['key'] = 5 """
        fil = self.root / key
        parent = fil.parent
        if parent and not parent.isdir():
            parent.makedirs()
        # We specify protocol 2, so that we can mostly go between Python 2
        # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete.
        with fil.open("wb") as f:
            pickle.dump(value, f, protocol=2)
        try:
            self.cache[fil] = (value, fil.mtime)
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise

    def hset(self, hashroot, key, value):
        """ hashed set """
        hroot = self.root / hashroot
        if not hroot.isdir():
            hroot.makedirs()
        hfile = hroot / gethashfile(key)
        d = self.get(hfile, {})
        d.update({key: value})
        self[hfile] = d

    def hget(self, hashroot, key, default=_sentinel, fast_only=True):
        """ hashed get """
        hroot = self.root / hashroot
        hfile = hroot / gethashfile(key)

        d = self.get(hfile, _sentinel)
        # print "got dict",d,"from",hfile
        if d is _sentinel:
            if fast_only:
                if default is _sentinel:
                    raise KeyError(key)

                return default

            # slow mode ok, works even after hcompress()
            d = self.hdict(hashroot)

        return d.get(key, default)

    def hdict(self, hashroot):
        """ Get all data contained in hashed category 'hashroot' as dict """
        hfiles = self.keys(hashroot + "/*")
        hfiles.sort()
        last = len(hfiles) and hfiles[-1] or ""
        if last.endswith("xx"):
            # print "using xx"
            hfiles = [last] + hfiles[:-1]

        all = {}

        for f in hfiles:
            # print "using",f
            try:
                all.update(self[f])
            except KeyError:
                print("Corrupt", f, "deleted - hset is not threadsafe!")
                del self[f]

            self.uncache(f)

        return all

    def hcompress(self, hashroot):
        """ Compress category 'hashroot', so hset is fast again

        hget will fail if fast_only is True for compressed items (that were
        hset before hcompress).

        """
        hfiles = self.keys(hashroot + "/*")
        all = {}
        for f in hfiles:
            # print "using",f
            all.update(self[f])
            self.uncache(f)

        self[hashroot + "/xx"] = all
        for f in hfiles:
            p = self.root / f
            if p.basename() == "xx":
                continue
            p.remove()

    def __delitem__(self, key):
        """ del db["key"] """
        fil = self.root / key
        self.cache.pop(fil, None)
        try:
            fil.remove()
        except OSError:
            # notfound and permission denied are ok - we
            # lost, the other process wins the conflict
            pass

    def _normalized(self, p):
        """ Make a key suitable for user's eyes """
        return str(self.root.relpathto(p)).replace("\\", "/")

    def keys(self, globpat=None):
        """ All keys in DB, or all keys matching a glob"""

        if globpat is None:
            files = self.root.walkfiles()
        else:
            files = [Path(p) for p in glob.glob(self.root / globpat)]
        return [self._normalized(p) for p in files if p.isfile()]

    def __iter__(self):
        return iter(self.keys())

    def __len__(self):
        return len(self.keys())

    def uncache(self, *items):
        """ Removes all, or specified items from cache

        Use this after reading a large amount of large objects
        to free up memory, when you won't be needing the objects
        for a while.

        """
        if not items:
            self.cache = {}
        for it in items:
            self.cache.pop(it, None)

    def waitget(self, key, maxwaittime=60):
        """ Wait (poll) for a key to get a value

        Will wait for `maxwaittime` seconds before raising a KeyError.
        The call exits normally if the `key` field in db gets a value
        within the timeout period.

        Use this for synchronizing different processes or for ensuring
        that an unfortunately timed "db['key'] = newvalue" operation
        in another process (which causes all 'get' operation to cause a
        KeyError for the duration of pickling) won't screw up your program
        logic.
        """

        wtimes = [0.2] * 3 + [0.5] * 2 + [1]
        tries = 0
        waited = 0
        while 1:
            try:
                val = self[key]
                return val
            except KeyError:
                pass

            if waited > maxwaittime:
                raise KeyError(key)

            time.sleep(wtimes[tries])
            waited += wtimes[tries]
            if tries < len(wtimes) - 1:
                tries += 1

    def getlink(self, folder):
        """ Get a convenient link for accessing items  """
        return PickleShareLink(self, folder)

    def __repr__(self):
        return "PickleShareDB('%s')" % self.root
コード例 #30
0
def clear(folder: path):
    if folder.exists():
        folder.rmtree()
    folder.makedirs()
コード例 #31
0
    then open up the default SQLiteManager with the default configuration.
    """
    cmdlib.unregisterServerCommand("convert_sqlite_to_mysql")
    sourcerpg.players.clearList()
    sourcerpg.DATABASE_STORAGE_METHOD = sourcerpg.SQLiteManager
    sourcerpg.database.save()
    sourcerpg.database.close()
    sourcerpg.database = sourcerpg.DATABASE_STORAGE_METHOD(
        sourcerpg.databasePath)
    sourcerpg.es_map_start({})
    for player in es.getUseridList():
        sourcerpg.players.addPlayer(player)
    es.server.queuecmd("mp_restartgame 1")


oldCFGPath = Path(es.getAddonPath("sourcerpg")).joinpath(
    "addons", "mysql_connection", "config.cfg")
newCFGPath = Path(str(es.ServerVar("eventscripts_gamedir"))).joinpath(
    "cfg", "sourcerpg", "mysql_config.cfg")
if oldCFGPath.exists():
    oldCFGPath.copy(str(newCFGPath))
    oldCFGPath.remove()
config = cfglib.AddonCFG(str(newCFGPath))
config.text("MySQL Connection Version %s" % sourcerpg.info.version)
config.text("This is the configuration for your mysql server")
mysqlHostName = config.cvar("sourcerpg_mysql_host", "localhost",
                            "The IP / HostName of the MySQL server")
mysqlUser = config.cvar("sourcerpg_mysql_user", "Root",
                        "The username for the MySQL connection")
mysqlPassword = config.cvar(
    "sourcerpg_mysql_password", "Password",
    "The passworfd for the user of the MySQL connection")
コード例 #32
0
ファイル: tc_domain.py プロジェクト: dsaran/packagehelper
class InstallScriptTests(TestCase):

    def setUp(self):
        self.base_path = Mock()
        file1 = Mock()
        file2 = Mock()
        self.base_path.abspath.return_value = '/tmp/'
        file1.getInitScript.return_value = ['file 1 init']
        file2.getInitScript.return_value = ['file 2 init']
        file1.getScript.return_value = ['file 1 script']
        file2.getScript.return_value = ['file 2 script']
        file1.getFinalScript.return_value = ['file 1 end']
        file2.getFinalScript.return_value = ['file 2 end']
        self.install_script = InstallScript('filename.sql', content=[file1, file2])

        self.expected_data = ['SPOOL filename.log',
                         'file 1 init', 'file 2 init',
                         'file 1 script', 'file 2 script',
                         'file 1 end', 'file 2 end',
                         'SPOOL OFF']

        self.test_dir = Path('TEST_OUTPUT')
        self.test_dir.mkdir()

        self.expected_script = self.test_dir / Path(self.install_script.name)

    def tearDown(self):
        self.test_dir.rmtree()
 
    def testScriptCreation(self):
        """ Given a Script with files data should be generated correctly"""

        self.install_script._write_script = Mock()
 
        self.install_script.create(self.base_path)

        write_script_method = self.install_script._write_script

        write_script_method.assert_called_with(self.base_path, self.expected_data)
 
    def testWriteScript(self):
        """ When a script is written it should be created on filesystem correctly"""
        self.when_install_script_is_written()

        self.then_install_script_should_exists_on_filesystem()

        self.then_install_script_data_should_be_correct()
        
    def testExistingScriptMovedBeforeCreation(self):
        """ Given a script with same name exists it should be moved to scriptname.bak"""
        self.given_exists_file_with_same_name()

        self.when_install_script_is_written()

        self.then_existing_file_should_be_backed_up()

        self.then_install_script_data_should_be_correct()


    def testScriptNameDoesNotEndWithSQL(self):
        """ Given script name doesn't end with SQL spool should be to scriptname.log"""
        self.given_script_name_doesnt_end_with_sql()

        self.when_install_script_is_created()

        self.then_spool_should_be_to_scriptname_log()

    ############
    # Behavior #
    ############
    def given_script_name_doesnt_end_with_sql(self):
        self.install_script.name = 'filename'
        self.expected_script = self.test_dir / Path(self.install_script.name)

    def given_exists_file_with_same_name(self):
        self.existing_file = self.test_dir/self.install_script.name
        self.existing_file.touch()

        self.assertTrue(self.existing_file.exists())

    def when_install_script_is_written(self):
        self.install_script._write_script(self.test_dir, self.expected_data)

    def when_install_script_is_created(self):
        self.install_script._write_script = Mock()

        self.install_script.create(self.test_dir)

    def then_install_script_should_exists_on_filesystem(self):
        self.assertTrue(self.expected_script.exists(), "File not created")

    def then_install_script_data_should_be_correct(self):
        written_lines = self.expected_script.lines(retain=False)

        self.assertEquals(self.expected_data, written_lines, "Written data should match expected.")

    def then_existing_file_should_be_backed_up(self):
        moved_file = self.existing_file + '.bak' 
        self.assertTrue(moved_file.exists(), "Backup file not created")

    def then_spool_should_be_to_scriptname_log(self):
        self.install_script._write_script.assert_called_with(self.test_dir, self.expected_data)
コード例 #33
0
ファイル: pickleshare.py プロジェクト: lpp1985/lpp_Script
class PickleShareDB(collections.MutableMapping):
    """ The main 'connection' object for PickleShare database """
    def __init__(self,root):
        """ Return a db object that will manage the specied directory"""
        self.root = Path(root).expanduser().abspath()
        if not self.root.isdir():
            self.root.makedirs_p()
        # cache has { 'key' : (obj, orig_mod_time) }
        self.cache = {}


    def __getitem__(self,key):
        """ db['key'] reading """
        fil = self.root / key
        try:
            mtime = (fil.stat()[stat.ST_MTIME])
        except OSError:
            raise KeyError(key)

        if fil in self.cache and mtime == self.cache[fil][1]:
            return self.cache[fil][0]
        try:
            # The cached item has expired, need to read
            with fil.open("rb") as f:
                obj = pickle.loads(f.read())
        except:
            raise KeyError(key)

        self.cache[fil] = (obj,mtime)
        return obj

    def __setitem__(self,key,value):
        """ db['key'] = 5 """
        fil = self.root / key
        parent = fil.parent
        if parent and not parent.isdir():
            parent.makedirs()
        # We specify protocol 2, so that we can mostly go between Python 2
        # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete.
        with fil.open('wb') as f:
            pickle.dump(value, f, protocol=2)
        try:
            self.cache[fil] = (value,fil.mtime)
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise

    def hset(self, hashroot, key, value):
        """ hashed set """
        hroot = self.root / hashroot
        if not hroot.isdir():
            hroot.makedirs()
        hfile = hroot / gethashfile(key)
        d = self.get(hfile, {})
        d.update( {key : value})
        self[hfile] = d



    def hget(self, hashroot, key, default = _sentinel, fast_only = True):
        """ hashed get """
        hroot = self.root / hashroot
        hfile = hroot / gethashfile(key)

        d = self.get(hfile, _sentinel )
        #print "got dict",d,"from",hfile
        if d is _sentinel:
            if fast_only:
                if default is _sentinel:
                    raise KeyError(key)

                return default

            # slow mode ok, works even after hcompress()
            d = self.hdict(hashroot)

        return d.get(key, default)

    def hdict(self, hashroot):
        """ Get all data contained in hashed category 'hashroot' as dict """
        hfiles = self.keys(hashroot + "/*")
        hfiles.sort()
        last = len(hfiles) and hfiles[-1] or ''
        if last.endswith('xx'):
            # print "using xx"
            hfiles = [last] + hfiles[:-1]

        all = {}

        for f in hfiles:
            # print "using",f
            try:
                all.update(self[f])
            except KeyError:
                print("Corrupt",f,"deleted - hset is not threadsafe!")
                del self[f]

            self.uncache(f)

        return all

    def hcompress(self, hashroot):
        """ Compress category 'hashroot', so hset is fast again

        hget will fail if fast_only is True for compressed items (that were
        hset before hcompress).

        """
        hfiles = self.keys(hashroot + "/*")
        all = {}
        for f in hfiles:
            # print "using",f
            all.update(self[f])
            self.uncache(f)

        self[hashroot + '/xx'] = all
        for f in hfiles:
            p = self.root / f
            if p.basename() == 'xx':
                continue
            p.remove()



    def __delitem__(self,key):
        """ del db["key"] """
        fil = self.root / key
        self.cache.pop(fil,None)
        try:
            fil.remove()
        except OSError:
            # notfound and permission denied are ok - we
            # lost, the other process wins the conflict
            pass

    def _normalized(self, p):
        """ Make a key suitable for user's eyes """
        return str(self.root.relpathto(p)).replace('\\','/')

    def keys(self, globpat = None):
        """ All keys in DB, or all keys matching a glob"""

        if globpat is None:
            files = self.root.walkfiles()
        else:
            files = [Path(p) for p in glob.glob(self.root/globpat)]
        return [self._normalized(p) for p in files if p.isfile()]

    def __iter__(self):
        return iter(self.keys())

    def __len__(self):
        return len(self.keys())

    def uncache(self,*items):
        """ Removes all, or specified items from cache

        Use this after reading a large amount of large objects
        to free up memory, when you won't be needing the objects
        for a while.

        """
        if not items:
            self.cache = {}
        for it in items:
            self.cache.pop(it,None)

    def waitget(self,key, maxwaittime = 60 ):
        """ Wait (poll) for a key to get a value

        Will wait for `maxwaittime` seconds before raising a KeyError.
        The call exits normally if the `key` field in db gets a value
        within the timeout period.

        Use this for synchronizing different processes or for ensuring
        that an unfortunately timed "db['key'] = newvalue" operation
        in another process (which causes all 'get' operation to cause a
        KeyError for the duration of pickling) won't screw up your program
        logic.
        """

        wtimes = [0.2] * 3 + [0.5] * 2 + [1]
        tries = 0
        waited = 0
        while 1:
            try:
                val = self[key]
                return val
            except KeyError:
                pass

            if waited > maxwaittime:
                raise KeyError(key)

            time.sleep(wtimes[tries])
            waited+=wtimes[tries]
            if tries < len(wtimes) -1:
                tries+=1

    def getlink(self,folder):
        """ Get a convenient link for accessing items  """
        return PickleShareLink(self, folder)

    def __repr__(self):
        return "PickleShareDB('%s')" % self.root
コード例 #34
0
    This executes when this addon is unloaded. We want to save this database,
    then open up the default SQLiteManager with the default configuration.
    """
    cmdlib.unregisterServerCommand("convert_sqlite_to_mysql")
    sourcerpg.players.clearList()
    sourcerpg.DATABASE_STORAGE_METHOD = sourcerpg.SQLiteManager
    sourcerpg.database.save()
    sourcerpg.database.close()
    sourcerpg.database = sourcerpg.DATABASE_STORAGE_METHOD(sourcerpg.databasePath)
    sourcerpg.es_map_start({})
    for player in es.getUseridList():
        sourcerpg.players.addPlayer(player)
    es.server.queuecmd("mp_restartgame 1")


oldCFGPath = Path(es.getAddonPath("sourcerpg")).joinpath("addons", "mysql_connection", "config.cfg")
newCFGPath = Path(str(es.ServerVar("eventscripts_gamedir"))).joinpath("cfg", "sourcerpg", "mysql_config.cfg")
if oldCFGPath.exists():
    oldCFGPath.copy(str(newCFGPath))
    oldCFGPath.remove()
config = cfglib.AddonCFG(str(newCFGPath))
config.text("MySQL Connection Version %s" % sourcerpg.info.version)
config.text("This is the configuration for your mysql server")
mysqlHostName = config.cvar("sourcerpg_mysql_host", "localhost", "The IP / HostName of the MySQL server")
mysqlUser = config.cvar("sourcerpg_mysql_user", "Root", "The username for the MySQL connection")
mysqlPassword = config.cvar("sourcerpg_mysql_password", "Password", "The passworfd for the user of the MySQL connection")
mysqlDatabase = config.cvar("sourcerpg_mysql_database", "sourcerpg_players", "The database used to store the information, leave as default unless you know what you're doing\n// Note: This will create the database if it doesn't already exist and you have the permissions")
mysqlUnixSocket = config.cvar("sourcerpg_mysql_unix_socket", "", """If you are connecting via an unix socket, please input the path to the mysql.sock.
// NOTE: This is a VERY advanced technique and should be left alone by almost everyone.
// This will only work on linux systems on a localhost, so remote servers won't work
// with this. Leave blank for no socket (default)""")
コード例 #35
0
#
# carefree-objects is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with carefree-objects.  If not, see <http://www.gnu.org/licenses/>.

"""libcarefree_objects

.. moduleauthor:: Stefan Zimmermann <*****@*****.**>
"""
__all__ = 'PREFIX', 'INCLUDE_PATH', 'LIB_PATH',

from path import path as Path

from .libcfo import __version__, __requires__


# Determine the location prefix of libcarefree_object's data_files
PREFIX = Path(__path__[0])
with PREFIX:
    if Path('PREFIX').exists():
        with Path('PREFIX').open() as f:
            PREFIX = Path(f.read().strip())

INCLUDE_PATH = PREFIX.joinpath('include')

LIB_PATH = PREFIX.joinpath('lib')