def daily_scan(self, cache_size=20, max_mirror_ping_avg=1.0):
        """Pings the cache file and handles post-ping operations.

        This function loads the cached mirrors from the cache file
        and do the following operations:

                Calls run_daily() function to ping and handle post-ping operations.

                Saves all the mirrors to file.

        In case there is no cache file or the cache file contains less mirrors than
        the half of the provided cache size, it calls a full run again.

        :param parser: the parser that will be used. (Parser-like class)
        :param cache_size: the size of the mirrors to be saved in cache. (int)
        """

        logger.debug('daily scan')

        if not path.exists(CACHED_MIRRORS_FILE_NAME) or self._file_len(
                CACHED_MIRRORS_FILE_NAME) < int(cache_size) / 2:
            logger.debug(
                'There was not a cached mirrors file, or number of cached mirrors'
                ' entries was less than half of the given cache_size')
            self.full_scan()
        else:
            cache = CacheManager(fastest_mirrors=FastestMirrors(),
                                 cache_size=cache_size)
            cache.load(max_mirror_ping_time=max_mirror_ping_avg)
            self._run_daily(cache.cache_mirrors.keys(), cache_size=cache_size)
Example #2
0
    def __init__(self, cache_manager: cache.CacheManager,
                 discord_manager: discord.DiscordManager):
        super().__init__()

        self.discord = discord_manager

        auth = tweepy.OAuthHandler(os.environ['TWITTER_API_KEY'],
                                   os.environ['TWITTER_API_SECRET'])
        auth.set_access_token(os.environ['TWITTER_USER_KEY'],
                              os.environ['TWITTER_USER_SECRET'])

        api = tweepy.API(auth)

        twitter_cache = cache_manager.get_data('twitter')

        twitter_accounts = os.environ['TWITTER_ACCOUNTS'].split(',')

        follow = []

        for screen_name in twitter_accounts:
            if screen_name in twitter_cache:
                follow.append(twitter_cache[screen_name])
            else:
                user_id = str(api.get_user(screen_name).id)
                follow.append(user_id)
                twitter_cache[screen_name] = user_id

        cache_manager.set_data('twitter', twitter_cache)

        self.myStream = tweepy.Stream(auth=auth, listener=self)
        self.myStream.filter(follow=follow, is_async=True)
Example #3
0
 def __init__(self):
     from db import ConnectionManager
     self.connection_manager = ConnectionManager(XConfig.get('db'))
     self.cache_manager = CacheManager(XConfig.get('cache'))
     self.entity_list = {}
     self.use_cache = False
     self.use_preload = True
     self.use_validator = False
     self.bad_entitys = []
Example #4
0
    def __init__(self):
        assert not Project._singleton
        Project._singleton = self

        self.path = None
        self.cacheManager = CacheManager()
        self.assetLibrary = AssetLibrary()

        self.info = {'name': 'Name', 'author': 'author', 'version': '1.0.0'}

        self.config = {}
Example #5
0
    def __init__(self):
        assert not Project._singleton
        Project._singleton = self

        self.cacheManager = CacheManager()
        self.assetLibrary = AssetLibrary()

        self.path = None
        self.editorPath = None
        self.editorLuaPath = None
        self.gamePath = None

        self.info = None
        self.config = None
Example #6
0
File: asset.py Project: tommo/gii
	def getCacheFile( self, name, **option ):
		cacheFile = self.cacheFiles.get( name, None )
		if cacheFile: return cacheFile
		if option.get( 'affirm', True ):
			cacheFile = CacheManager.get().getCacheFile( self.getPath(), name, **option )
			self.cacheFiles[ name ] = cacheFile
			return cacheFile
		else:
			return None
Example #7
0
 def __init__(self):
     from db import ConnectionManager
     self.connection_manager = ConnectionManager(XConfig.get('db'))
     self.cache_manager = CacheManager(XConfig.get('cache'))
     self.entity_list = {}
     self.use_cache = False
     self.use_preload = True
     self.use_validator = False
     self.bad_entitys = []
Example #8
0
 def getCacheFile(self, name, **option):
     cacheFile = self.cacheFiles.get(name, None)
     if cacheFile: return cacheFile
     if option.get('affirm', True):
         cacheFile = CacheManager.get().getCacheFile(
             self.getPath(), name, **option)
         self.cacheFiles[name] = cacheFile
         return cacheFile
     else:
         return None
    def _run_daily(self, list_of_mirrors=None, cache_size=20):
        """Pings the provided mirrors and handles post-ping operations.

        This function pings the provided operations and
        handles the following post-ping operations:
            Saves to cache: the cache will be used on daily scans.

            Adds to blacklist: mirrors that will be added to blacklist
            will not be pinged on daily scans.

            Switches to fastest mirror: sets the fastest mirror as the
            default upstream mirror.

        :param list_of_mirrors: the mirrors that should be pinged. (list)
        :param cache_size: the size of the mirrors to be saved in cache. (int)
        :returns sorted_mirrors: the result of the ping operation. (dict)
        """

        if not list_of_mirrors:
            list_of_mirrors = self._list_of_mirrors

        pinger = FastestMirrors()
        pinger.sort_mirrors_by_ping_avg(mirrors=list_of_mirrors)
        sorted_mirrors = pinger.sorted_mirrors

        cache = CacheManager(fastest_mirrors=pinger, cache_size=cache_size)
        cache.set_cached_mirrors_from_list()
        cache.save()

        self._parser.switch_to_fastest_mirror(
            mirror=next(iter(cache.cache_mirrors.keys())))
        return sorted_mirrors
Example #10
0
File: asset.py Project: tommo/gii
	def saveAssetTable( self, **option ):
		outputPath = option.get( 'path', self.assetIndexPath )		
		deployContext  = option.get( 'deploy_context',  False )
		mapping = deployContext and deployContext.fileMapping
		table={}
		for path, node in self.assetTable.items():
			item={}
			table[ path ]=item
			#common
			item['type']        = node.getType()
			item['groupType']   = node.getGroupType()
			item['filePath']    = node.getFilePath() or False
			item['tags']        = node.getTags()
			#oebjectfiles
			if mapping:
				mapped = {}
				for key, path in node.objectFiles.items():
					mapped[ key ] = mapping.get( path, path )
				item['objectFiles'] = mapped
			else:
				item['objectFiles'] = node.objectFiles

			item['dependency']  = node.dependency
			item['fileTime']    = node.getFileTime()
			#non deploy information
			if not deployContext:
				item['manager']     = node.managerName
				item['deploy']      = node.deployState
				item['cacheFiles']  = node.cacheFiles
				item['properties']  = node.properties
		 		item['modifyState'] = node.modifyState
				#mark cache files
				for name, cacheFile in node.cacheFiles.items():
					CacheManager.get().touchCacheFile( cacheFile )
				node.saveMetaDataTable()	

		if not jsonHelper.trySaveJSON( table, outputPath, 'asset index' ):
			return False
		logging.info( 'asset table saved' )
		return True	
Example #11
0
    def saveAssetTable(self, **option):
        outputPath = option.get('path', self.assetIndexPath)
        deployContext = option.get('deploy_context', False)
        mapping = deployContext and deployContext.fileMapping
        table = {}
        for path, node in self.assetTable.items():
            item = {}
            table[path] = item
            #common
            item['type'] = node.getType()
            item['groupType'] = node.getGroupType()
            item['filePath'] = node.getFilePath() or False
            item['tags'] = node.getTags()
            #oebjectfiles
            if mapping:
                mapped = {}
                for key, path in node.objectFiles.items():
                    mapped[key] = mapping.get(path, path)
                item['objectFiles'] = mapped
            else:
                item['objectFiles'] = node.objectFiles

            item['dependency'] = node.dependency
            item['fileTime'] = node.getFileTime()
            #non deploy information
            if not deployContext:
                item['manager'] = node.managerName
                item['deploy'] = node.deployState
                item['cacheFiles'] = node.cacheFiles
                item['properties'] = node.properties
                item['modifyState'] = node.modifyState
                #mark cache files
                for name, cacheFile in node.cacheFiles.items():
                    CacheManager.get().touchCacheFile(cacheFile)
                node.saveMetaDataTable()

        if not jsonHelper.trySaveJSON(table, outputPath, 'asset index'):
            return False
        logging.info('asset table saved')
        return True
Example #12
0
	def __init__(self):
		assert not Project._singleton
		Project._singleton = self

		self.cacheManager = CacheManager() 
		self.assetLibrary = AssetLibrary()

		self.path      	= None
		self.editorPath = None
		self.editorLuaPath = None
		self.gamePath 	= None

		self.info 		= None
		self.config 	= None
Example #13
0
	def __init__(self):
		assert not Project._singleton
		Project._singleton = self

		self.path      = None
		self.cacheManager = CacheManager() 
		self.assetLibrary = AssetLibrary()

		self.info = {
			'name'    : 'Name',
			'author'  : 'author',
			'version' : '1.0.0'
		}

		self.config = {}
Example #14
0
 def __init__(self,
              port,
              pluginUUID,
              registerEvent,
              info,
              loop,
              process_manager,
              os_manager,
              cache_manager=CacheManager()):
     self.port = port
     self.pluginUUID = pluginUUID
     self.registerEvent = registerEvent
     self.info = info
     self.loop = loop
     self.process_manager = process_manager
     self.cache_manager = cache_manager
     self.os_manager = os_manager
Example #15
0
File: asset.py Project: tommo/gii
	def scanProject(self): #scan 
		self.projectScanScheduled = False
		logging.info('scan project in:' + self.rootAbsPath )
		#scan meta files first ( will be used in asset importing )
		#TODO
		#check missing asset
		for assetPath, node in self.assetTable.copy().items():
			if not self.assetTable.has_key( assetPath ): #already removed(as child of removed node)
				continue
			#check parentnode
			if not node.getParent():
				self.unregisterAssetNode( node )
				continue

			if node.isVirtual(): #don't check virtual node's file
				continue

			filePath = node.getAbsFilePath()
			#file deleted
			if not os.path.exists( filePath ):
				node.modifyState = 'removed'
				self.unregisterAssetNode( node )
				continue
			#file become ignored
			if self.checkFileIgnorable( filePath ):
				node.modifyState = 'ignored'
				self.unregisterAssetNode( node )
				continue

		#check new asset
		for currentDir, dirs, files in os.walk( unicode(self.rootAbsPath) ):
			relDir = os.path.relpath( currentDir, self.rootAbsPath )

			for filename in files:
				if self.checkFileIgnorable(filename):
					continue
				
				nodePath = self.fixPath( relDir + '/' + filename )
				absPath  = self.getAbsPath( nodePath )
				mtime    = os.path.getmtime( absPath )
				bundle   = self._getParentBundle( nodePath )

				if bundle:
					if mtime > bundle.getFileTime():
						bundle.markModified()
					if not bundle.checkObjectFiles():
						bundle.markModified()
				else:
					if not self.getAssetNode( nodePath ): #new
						self.initAssetNode( nodePath )
					else:
						node = self.getAssetNode( nodePath ) #modified
						if mtime > node.getFileTime():
							node.markModified()
						if not node.checkObjectFiles():
							node.markModified()

			dirs2 = dirs[:]
			for dirname in dirs2:
				if self.checkFileIgnorable(dirname):
					dirs.pop(dirs.index(dirname)) #skip walk this
					continue
				nodePath = self.fixPath( relDir + '/' + dirname )
				if not self.getAssetNode( nodePath ):
					self.initAssetNode( nodePath )	

		self.importModifiedAssets()
		self.saveAssetTable()
		CacheManager.get().save()
Example #16
0
class UnitOfWork(object):
    '''
    工作单元
    
    @note: 由于数据库连接线程安全的限制,工作单元只提供thread local的访问实例,不提供进程级实例
    '''
    def __init__(self):
        from db import ConnectionManager
        self.connection_manager = ConnectionManager(XConfig.get('db'))
        self.cache_manager = CacheManager(XConfig.get('cache'))
        self.entity_list = {}
        self.use_cache = False
        self.use_preload = True
        self.use_validator = False
        self.bad_entitys = []

    def idgenerator(self):

        if not hasattr(self, '_idgenerator') or not self._idgenerator:
            connection = self.connection_manager.get(
                XConfig.get('idgenerator.db'))
            self._idgenerator = IdGenerator(
                connection,
                XConfig.get('idgenerator.count') or 5)

        return self._idgenerator

    def register(self, entity):
        '''注册实体到工作单元
        '''

        cls_name = entity.__class__.__name__

        if self.entity_list.get(cls_name) is None:
            self.entity_list[cls_name] = {}

        self.entity_list[cls_name][str(entity.getId())] = entity
        entity._unitofwork = self

    def commit(self):
        '''
        '''

        deletes = []
        updates = []
        news = []
        db_names = set()

        for entity_class_name in self.entity_list.keys():
            entity_dict = self.entity_list.get(entity_class_name)
            for entity_id in entity_dict.keys():
                entity = entity_dict.get(entity_id)
                if entity.isDelete():
                    entity.onDelete()
                elif entity.isNew():
                    entity.onNew()
                elif entity.isDirty():
                    entity.onUpdate()
                else:
                    continue

        self.bad_entitys = []
        for entity_class_name in self.entity_list.keys():
            entity_dict = self.entity_list.get(entity_class_name)
            for entity_id in entity_dict.keys():
                entity = entity_dict.get(entity_id)
                if entity.isDelete():
                    deletes.append(entity)
                elif entity.isNew():
                    news.append(entity)
                elif entity.isDirty():
                    updates.append(entity)
                else:
                    continue

                if entity.isLoadedFromCache():
                    raise ModifyBasedCacheError(
                        "%s(%s) is loaded from cache, so can't be modified!!" %
                        (entity.__class__.__name__, entity.id))

                if self.use_validator and not entity.doValidate():
                    self.bad_entitys.append(entity)

                db_names.add(entity._db)

        if self.use_validator and self.bad_entitys:
            return False

        for name in db_names:
            connection = self.connection_manager.get(name)
            if connection and name == connection.name:
                connection.begin()

        try:
            for entitys in [deletes, updates, news]:
                for entity in entitys:
                    self.sync(entity)

            for name in db_names:
                connection = self.connection_manager.get(name)
                if name == connection.name:
                    connection.commit()

            for entity in deletes:
                try:
                    cache = self.cache_manager.get(entity._cache)
                    if not cache:
                        continue

                    cache_key = self.makeKey(entity.__class__, entity.id)
                    cache.delete(cache_key)
                except:
                    logging.exception("delete cache fail")

            for entitys in [updates, news]:
                for entity in entitys:
                    try:
                        cache = self.cache_manager.get(entity._cache)
                        if not cache:
                            continue
                        cache_key = self.makeKey(entity.__class__, entity.id)
                        cache.set(cache_key, entity.getCacheDict())
                    except:
                        logging.exception("set cache fail")

            return True
        except:
            logging.exception("[XWEB] COMMIT FAILED, ROLLBACK")
            for name in db_names:
                connection = self.connection_manager.get(name)
                if name == connection.name:
                    connection.rollback()
            return False
        finally:
            self.entity_list.clear()

    def getEntityInMemory(self, cls, entity_id):
        cls_name = cls.__name__
        if self.entity_list.get(cls_name) is None:
            return None

        return self.entity_list.get(cls_name).get(str(entity_id))

    def getList(self, cls, entity_ids, **kwargs):

        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        query_db_ids = []
        query_cache_ids = []
        for entity_id in entity_ids:
            entity = self.getEntityInMemory(cls, entity_id)
            if not entity:
                query_cache_ids.append(entity_id)

        if query_cache_ids:
            if self.use_cache:

                keys = [
                    self.makeKey(cls, entity_id)
                    for entity_id in query_cache_ids
                ]
                cache_name = cls._cache_name
                cache = self.cache_manager.get(cache_name)
                if not cache:
                    raise ValueError(
                        'CACHE DOES NOT EXSITS WHEN USE_CACHE IS TRUE')

                entitys = cache.getList(keys)
                for entity_id, key in zip(query_cache_ids, keys):
                    cache_dict = entitys.get(key)
                    if cache_dict:
                        entity = cls(**cache_dict)
                        entity._is_new = False
                        entity._is_delete = False
                        entity._is_dirty = False
                        entity._load_from_cache = True
                        entity._db = 'default'
                        entity._cache = cache_name
                        self.register(entity)
                        logging.debug("[XWEB] LOAD ENTITY %s FROM CACHE: %s" %
                                      (entity, cache_name))
                    else:
                        query_db_ids.append(entity_id)
            else:
                query_db_ids = query_cache_ids

            entitys = connection.getEntityList(cls, query_db_ids)

            if not entitys:
                return []

            first_entity = entitys[0]
            first_entity.setProps('entity_ids_in_query', entity_ids)
            for entity in entitys:
                self.register(entity)
                entity.setProps('first_entity_in_query', first_entity.id)
                logging.debug("[XWEB] LOAD ENTITY %s FROM DB: %s" %
                              (entity, db_conn))

        return [
            self.getEntityInMemory(cls, entity_id) for entity_id in entity_ids
            if self.getEntityInMemory(cls, entity_id)
        ]

    def getListByCond(self, criteria, **kwargs):

        if not isinstance(criteria, QueryCriteria):
            return []

        cls = criteria.entity_cls

        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        entity_ids = connection.fetchEntityIds(criteria)

        return self.getList(cls, entity_ids, **kwargs)

    def fetchRowsByCond(self, cr, **kwargs):

        cls = cr.entity_cls
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        return connection.fetchRowsByCond(cr)

    def fetchRowByCond(self, cr, **kwargs):

        cls = cr.entity_cls
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        return connection.fetchRowByCond(cr)

    def getListByCond2(self, cls, condition=None, args=[], **kwargs):

        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        rows = connection.queryRowsByCond(cls, condition, args)

        results = []

        for row in rows:

            data = {}

            for k, v in zip(cls.allKeys(), row):
                data[k] = v

            entity_id = tuple([data.get(k) for k in cls.primaryKey()])

            entity = self.getEntityInMemory(cls, entity_id)

            if not entity:
                entity = connection.createEntity(cls, row)
                self.register(entity)
                key = self.makeKey(cls, entity_id)
                cache_name = cls._cache_name
                cache = self.cache_manager.get(cache_name)
                if cache:
                    cache.set(key, entity.getCacheDict())

            results.append(entity)

        return results

    def get(self, cls, entity_id, **kwargs):  #@ReservedAssignment

        entity = self.getEntityInMemory(cls, entity_id)
        if entity:
            return entity

        key = self.makeKey(cls, entity_id)
        cache_name = cls._cache_name
        cache = self.cache_manager.get(cache_name)
        if self.use_cache and cache:

            cache_dict = cache.get(key)
            if cache_dict:
                entity = cls(**cache_dict)
                entity._is_new = False
                entity._is_delete = False
                entity._is_dirty = False
                entity._load_from_cache = True
                entity._db = 'default'
                entity._cache = cache_name
                self.register(entity)
                logging.debug("[XWEB] LOAD ENTITY %s FROM CACHE: %s" %
                              (entity, cache_name))
                return entity

        db_conn = cls.dbName(entity_id=entity_id, **kwargs)
        connection = self.connection_manager.get(db_conn)
        entity = connection.getEntity(cls, entity_id)

        if entity is None:
            return None

        self.register(entity)

        if cache:
            cache.set(key, entity.getCacheDict())
            logging.debug("[XWEB] LOAD ENTITY %s FROM DB: %s" %
                          (entity, db_conn))

        return entity

    def sync(self, entity):
        connection = self.connection_manager.get(entity._db)

        if entity.isNew():
            if connection.insert(entity):
                entity._is_dirty = False
                entity._is_new = False
                entity.is_delete = False
                return True
        elif entity.isDelete():
            if connection.delete(entity):
                entity._is_dirty = False
                entity._is_new = False
                entity.is_delete = True
                return True
        elif entity.isDirty():
            if connection.update(entity):
                entity._is_dirty = False
                entity._is_new = False
                entity.is_delete = False
                return True
        else:
            raise EntityStatusError()

        return False

    def makeKey(self, cls, entity_id):
        return "%s:%s:%s:%s" % (XConfig.get('app_name'), cls.__name__,
                                entity_id, cls._version)

    def close(self):
        self.connection_manager.close()

    # static method
    @classmethod
    def inst(cls):
        thread = threading.currentThread()

        if not hasattr(thread, 'unitofwork') or not thread.unitofwork:
            thread.unitofwork = UnitOfWork()

        return thread.unitofwork

    @classmethod
    def Commit(cls):
        cls.inst().commit()

    @classmethod
    def Close(cls):
        cls.inst().close()

    @classmethod
    def Reset(cls, force=False):
        thread = threading.currentThread()

        if hasattr(thread, 'unitofwork') and thread.unitofwork:
            if not force:
                unitofwork = thread.unitofwork
                unitofwork.entity_list = {}
                unitofwork.bad_entitys = {}
                unitofwork.use_cache = True
                unitofwork.use_preload = False
            else:
                del thread.unitofwork

    @classmethod
    def reset(cls, force=False):
        cls.Reset(force)
Example #17
0
class Project(object):
	_singleton=None
	@staticmethod
	def get():		
		return Project._singleton

	@staticmethod
	def findProject( path = None ):
		#TODO: return project info dict instead of path?
		path = os.path.abspath( path or '' )
		opath = None
		while path and not ( path in ( '', '/','\\' ) ):
			if   os.path.exists( path + '/' + _GII_ENV_CONFIG_DIR ) \
			and  os.path.exists( path + '/' + _GII_INFO_FILE ) :
				#get info
				info = jsonHelper.tryLoadJSON( path + '/' + _GII_INFO_FILE )
				info['path'] = path
				return info
			#go up level
			opath = path
			path = os.path.dirname( path )
			if path == opath: break
		return None

	def __init__(self):
		assert not Project._singleton
		Project._singleton = self

		self.path      = None
		self.cacheManager = CacheManager() 
		self.assetLibrary = AssetLibrary()

		self.info = {
			'name'    : 'Name',
			'author'  : 'author',
			'version' : '1.0.0'
		}

		self.config = {}

	def isLoaded( self ):
		return self.path != None

	def _initPath( self, path ):
		self.path = path

		self.binaryPath        = path + '/' + _GII_BINARY_DIR
		self.gamePath          = path + '/' + _GII_GAME_DIR

		self.envPath           = path + '/' + _GII_ENV_DIR
		self.envPackagePath    = path + '/' + _GII_ENV_PACKAGE_DIR
		self.envDataPath       = path + '/' + _GII_ENV_DATA_DIR
		self.envConfigPath     = path + '/' + _GII_ENV_CONFIG_DIR
		self.envLibPath        = path + '/' + _GII_ENV_LIB_DIR

		self.assetPath         = path + '/' + _GII_ASSET_DIR

		self.scriptLibPath     = path + '/' + _GII_SCRIPT_LIB_DIR

		self.hostPath          = path + '/' + _GII_HOST_DIR
		self.hostExtensionPath = path + '/' + _GII_HOST_EXTENSION_DIR

	def _affirmDirectories( self ):
		#mkdir - lv1
		_affirmPath( self.binaryPath )

		_affirmPath( self.envPath )
		_affirmPath( self.envPackagePath )
		_affirmPath( self.envDataPath )
		_affirmPath( self.envLibPath )
		_affirmPath( self.envConfigPath )

		_affirmPath( self.gamePath )
		_affirmPath( self.assetPath )
		_affirmPath( self.scriptLibPath )
		
		_affirmPath( self.hostPath )
		_affirmPath( self.hostExtensionPath )
		
	def init( self, path, name ):
		info = Project.findProject( path )
		if info:
			raise ProjectException( 'Gii project already initialized:' + info['path'] )
		#
		path = os.path.realpath(path)
		if not os.path.isdir(path):
			raise ProjectException('%s is not a valid path' % path)
		self._initPath( path )
		#
		logging.info( 'copy template contents' )
		from MainModulePath import getMainModulePath
		def ignore( src, names ):
			return ['.DS_Store']
		shutil.copytree( getMainModulePath('template/host'), self.getPath('host'), ignore )
		shutil.copytree( getMainModulePath('template/game'), self.getPath('game'), ignore )
		shutil.copy( getMainModulePath('template/.gitignore'), self.getPath() )
		#
		self._affirmDirectories()

		try:
			self.cacheManager.init( _GII_ENV_CONFIG_DIR, self.envConfigPath )
		except OSError,e:
			raise ProjectException('error creating cache folder:%s' % e)

		self.assetLibrary.load( _GII_ASSET_DIR, self.assetPath, self.path, self.envConfigPath )

		signals.emitNow('project.init', self)
		logging.info( 'project initialized: %s' % path )
		self.info['name'] = name
		self.saveConfig()
		self.save()
		return True	
Example #18
0
class Project(object):
    _singleton = None

    @staticmethod
    def get():
        return Project._singleton

    def __init__(self):
        assert not Project._singleton
        Project._singleton = self

        self.cacheManager = CacheManager()
        self.assetLibrary = AssetLibrary()

        self.path = None
        self.editorPath = None
        self.editorLuaPath = None
        self.gamePath = None

        self.info = None
        self.config = None

    def _initPath(self, path):
        self.path = path
        self.editorPath = path + '/' + _PROJECT_EDITOR_DIR
        self.editorLuaPath = self.editorPath + '/' + _PROJECT_LUA_DIR
        self.gamePath = path + '/' + _PROJECT_LUA_DIR

    def _affirmDirectories(self):
        _affirmPath(self.gamePath)

    def isLoaded(self):
        return self.path != None

    def init(self, path):
        signals.emitNow('project.init', self)

        # self.assetLibrary.load( _GII_ASSET_DIR, self.assetPath, self.path, self.envConfigPath )

    def load(self, path):
        if not path:
            path = self.path
            if not self.path: return False

        if not os.path.exists(path + '/' + _PROJECT_INFO_FILE): return False

        self._initPath(path)
        self._affirmDirectories()
        self.info = jsonHelper.tryLoadJSON(
            self.getBasePath(_PROJECT_INFO_FILE))

        if not os.path.exists(self.editorPath):
            os.makedirs(self.editorPath)
        self.config = jsonHelper.tryLoadJSON(
            self.getBasePath(_PROJECT_EDITOR_DIR + '/' + _PROJECT_CONFIG_FILE))
        if not self.config:
            self.config = {}
            self.saveConfig()

        # self.cacheManager.load( _GII_ENV_CONFIG_DIR, self.envConfigPath )
        # self.assetLibrary.load( _GII_ASSET_DIR, self.assetPath, self.path, self.envConfigPath )

        self.loaded = True
        signals.emitNow('project.preload', self)
        signals.emitNow('project.load', self)

        return True

    def save(self):
        signals.emitNow('project.presave', self)

        if self.getBasePath():
            jsonHelper.trySaveJSON(self.info,
                                   self.getBasePath(_PROJECT_INFO_FILE))

        #save asset & cache
        self.assetLibrary.save()
        self.cacheManager.clearFreeCacheFiles()
        self.cacheManager.save()

        signals.emitNow('project.save', self)
        return True

    def saveConfig(self):
        if self.getBasePath():
            jsonHelper.trySaveJSON(
                self.config,
                self.getBasePath(_PROJECT_EDITOR_DIR + '/' +
                                 _PROJECT_CONFIG_FILE))

    def getPath(self, path=None):
        return self.getBasePath(path)

    def getBasePath(self, path=None):
        if path is None:
            return self.path
        return os.path.join(self.path, path)

##----------------------------------------------------------------##

    def getConfigDict(self):
        return self.config

    def getConfig(self, key, default=None):
        if self.config != None:
            return self.config.get(key, default)
        return default

    def setConfig(self, key, value):
        if self.config != None:
            self.config[key] = value

##----------------------------------------------------------------##

    def getEditorLuaPath(self):
        if self.editorLuaPath:
            if os.path.exists(self.editorLuaPath):
                return self.editorLuaPath
        return None

    def getEditorAssetsPath(self):
        return self.editorPath + '/' + _PROJECT_ASSETS_DIR

##----------------------------------------------------------------##

    def getAssetLibrary(self):
        return self.assetLibrary

    def loadAssetLibrary(self):
        #load cache & assetlib
        self.assetLibrary.loadAssetTable()
Example #19
0
class Project(object):
	_singleton=None
	@staticmethod
	def get():		
		return Project._singleton

	def __init__(self):
		assert not Project._singleton
		Project._singleton = self

		self.cacheManager = CacheManager() 
		self.assetLibrary = AssetLibrary()

		self.path      	= None
		self.editorPath = None
		self.editorLuaPath = None
		self.gamePath 	= None

		self.info 		= None
		self.config 	= None

	def _initPath( self, path ):
		self.path = path
		self.editorPath = path + '/' + _PROJECT_EDITOR_DIR
		self.editorLuaPath = self.editorPath + '/' + _PROJECT_LUA_DIR
		self.gamePath   = path + '/' + _PROJECT_LUA_DIR

	def _affirmDirectories( self ):
		_affirmPath( self.gamePath )

	def isLoaded( self ):
		return self.path != None

	def init(self, path):
		signals.emitNow('project.init', self)

		# self.assetLibrary.load( _GII_ASSET_DIR, self.assetPath, self.path, self.envConfigPath )

	def load(self, path):
		if not path:
			path = self.path
			if not self.path: return False

		if not os.path.exists( path + '/' + _PROJECT_INFO_FILE ): return False

		self._initPath( path )
		self._affirmDirectories()
		self.info = jsonHelper.tryLoadJSON( self.getBasePath( _PROJECT_INFO_FILE ) )

		if not os.path.exists( self.editorPath ):
			os.makedirs( self.editorPath )
		self.config = jsonHelper.tryLoadJSON( self.getBasePath( _PROJECT_EDITOR_DIR + '/' + _PROJECT_CONFIG_FILE ) )
		if not self.config:
			self.config = {}
			self.saveConfig()

		# self.cacheManager.load( _GII_ENV_CONFIG_DIR, self.envConfigPath )
		# self.assetLibrary.load( _GII_ASSET_DIR, self.assetPath, self.path, self.envConfigPath )

		self.loaded = True
		signals.emitNow( 'project.preload', self )
		signals.emitNow( 'project.load', self )

		return True

	def save(self):
		signals.emitNow('project.presave', self)

		if self.getBasePath():
			jsonHelper.trySaveJSON( self.info, self.getBasePath( _PROJECT_INFO_FILE ) )

		#save asset & cache
		self.assetLibrary.save()
		self.cacheManager.clearFreeCacheFiles()
		self.cacheManager.save()

		signals.emitNow( 'project.save', self )
		return True

	def saveConfig( self ):
		if self.getBasePath():
			jsonHelper.trySaveJSON( self.config, self.getBasePath( _PROJECT_EDITOR_DIR + '/' + _PROJECT_CONFIG_FILE ))

	def getPath( self, path = None ):
		return self.getBasePath( path )
		
	def getBasePath( self, path = None ):
		if path is None:
			return self.path
		return os.path.join( self.path, path )

##----------------------------------------------------------------##
	def getConfigDict( self ):
		return self.config

	def getConfig( self, key, default = None ):
		if self.config != None:
			return self.config.get( key, default )
		return default

	def setConfig( self, key, value ):
		if self.config != None:
			self.config[ key ] = value

##----------------------------------------------------------------##
	def getEditorLuaPath( self ):
		if self.editorLuaPath:
			if os.path.exists( self.editorLuaPath ):
				return self.editorLuaPath
		return None

	def getEditorAssetsPath( self ):
		return self.editorPath + '/' + _PROJECT_ASSETS_DIR

##----------------------------------------------------------------##
	def getAssetLibrary( self ):
		return self.assetLibrary

	def loadAssetLibrary( self ):
		#load cache & assetlib
		self.assetLibrary.loadAssetTable()
Example #20
0
class Project(object):
    _singleton = None

    @staticmethod
    def get():
        return Project._singleton

    @staticmethod
    def findProject(path=None):
        #TODO: return project info dict instead of path?
        path = os.path.abspath(path or '')
        opath = None
        while path and not (path in ('', '/', '\\')):
            if   os.path.exists( path + '/' + _GII_ENV_CONFIG_DIR ) \
            and  os.path.exists( path + '/' + _GII_INFO_FILE ) :
                #get info
                info = jsonHelper.tryLoadJSON(path + '/' + _GII_INFO_FILE)
                info['path'] = path
                return info
            #go up level
            opath = path
            path = os.path.dirname(path)
            if path == opath: break
        return None

    def __init__(self):
        assert not Project._singleton
        Project._singleton = self

        self.path = None
        self.cacheManager = CacheManager()
        self.assetLibrary = AssetLibrary()

        self.info = {'name': 'Name', 'author': 'author', 'version': '1.0.0'}

        self.config = {}

    def isLoaded(self):
        return self.path != None

    def _initPath(self, path):
        self.path = path

        self.binaryPath = path + '/' + _GII_BINARY_DIR
        self.gamePath = path + '/' + _GII_GAME_DIR

        self.envPath = path + '/' + _GII_ENV_DIR
        self.envPackagePath = path + '/' + _GII_ENV_PACKAGE_DIR
        self.envDataPath = path + '/' + _GII_ENV_DATA_DIR
        self.envConfigPath = path + '/' + _GII_ENV_CONFIG_DIR
        self.envLibPath = path + '/' + _GII_ENV_LIB_DIR

        self.assetPath = path + '/' + _GII_ASSET_DIR

        self.scriptLibPath = path + '/' + _GII_SCRIPT_LIB_DIR

        self.hostPath = path + '/' + _GII_HOST_DIR
        self.hostExtensionPath = path + '/' + _GII_HOST_EXTENSION_DIR

    def _affirmDirectories(self):
        #mkdir - lv1
        _affirmPath(self.binaryPath)

        _affirmPath(self.envPath)
        _affirmPath(self.envPackagePath)
        _affirmPath(self.envDataPath)
        _affirmPath(self.envLibPath)
        _affirmPath(self.envConfigPath)

        _affirmPath(self.gamePath)
        _affirmPath(self.assetPath)
        _affirmPath(self.scriptLibPath)

        _affirmPath(self.hostPath)
        _affirmPath(self.hostExtensionPath)

    def init(self, path, name):
        info = Project.findProject(path)
        if info:
            raise ProjectException('Gii project already initialized:' +
                                   info['path'])
        #
        path = os.path.realpath(path)
        if not os.path.isdir(path):
            raise ProjectException('%s is not a valid path' % path)
        self._initPath(path)
        #
        logging.info('copy template contents')
        from MainModulePath import getMainModulePath

        def ignore(src, names):
            return ['.DS_Store']

        shutil.copytree(getMainModulePath('template/host'),
                        self.getPath('host'), ignore)
        shutil.copytree(getMainModulePath('template/game'),
                        self.getPath('game'), ignore)
        shutil.copy(getMainModulePath('template/.gitignore'), self.getPath())
        #
        self._affirmDirectories()

        try:
            self.cacheManager.init(_GII_ENV_CONFIG_DIR, self.envConfigPath)
        except OSError, e:
            raise ProjectException('error creating cache folder:%s' % e)

        self.assetLibrary.load(_GII_ASSET_DIR, self.assetPath, self.path,
                               self.envConfigPath)

        signals.emitNow('project.init', self)
        logging.info('project initialized: %s' % path)
        self.info['name'] = name
        self.saveConfig()
        self.save()
        return True
Example #21
0
class CacheHandler:

    cache = CacheManager()
Example #22
0
    def scanProject(self):  #scan
        self.projectScanScheduled = False
        logging.info('scan project in:' + self.rootAbsPath)
        #scan meta files first ( will be used in asset importing )
        #TODO
        #check missing asset
        for assetPath, node in self.assetTable.copy().items():
            if not self.assetTable.has_key(
                    assetPath):  #already removed(as child of removed node)
                continue
            #check parentnode
            if not node.getParent():
                self.unregisterAssetNode(node)
                continue

            if node.isVirtual():  #don't check virtual node's file
                continue

            filePath = node.getAbsFilePath()
            #file deleted
            if not os.path.exists(filePath):
                node.modifyState = 'removed'
                self.unregisterAssetNode(node)
                continue
            #file become ignored
            if self.checkFileIgnorable(filePath):
                node.modifyState = 'ignored'
                self.unregisterAssetNode(node)
                continue

        #check new asset
        for currentDir, dirs, files in os.walk(unicode(self.rootAbsPath)):
            relDir = os.path.relpath(currentDir, self.rootAbsPath)

            for filename in files:
                if self.checkFileIgnorable(filename):
                    continue

                nodePath = self.fixPath(relDir + '/' + filename)
                absPath = self.getAbsPath(nodePath)
                mtime = os.path.getmtime(absPath)
                bundle = self._getParentBundle(nodePath)

                if bundle:
                    if mtime > bundle.getFileTime():
                        bundle.markModified()
                    if not bundle.checkObjectFiles():
                        bundle.markModified()
                else:
                    if not self.getAssetNode(nodePath):  #new
                        self.initAssetNode(nodePath)
                    else:
                        node = self.getAssetNode(nodePath)  #modified
                        if mtime > node.getFileTime():
                            node.markModified()
                        if not node.checkObjectFiles():
                            node.markModified()

            dirs2 = dirs[:]
            for dirname in dirs2:
                if self.checkFileIgnorable(dirname):
                    dirs.pop(dirs.index(dirname))  #skip walk this
                    continue
                nodePath = self.fixPath(relDir + '/' + dirname)
                if not self.getAssetNode(nodePath):
                    self.initAssetNode(nodePath)

        self.importModifiedAssets()
        self.saveAssetTable()
        CacheManager.get().save()
Example #23
0
class UnitOfWork(object):
    '''
    工作单元
    
    @note: 由于数据库连接线程安全的限制,工作单元只提供thread local的访问实例,不提供进程级实例
    '''
    
    def __init__(self):
        from db import ConnectionManager
        self.connection_manager = ConnectionManager(XConfig.get('db'))
        self.cache_manager = CacheManager(XConfig.get('cache'))
        self.entity_list = {}
        self.use_cache = False
        self.use_preload = True
        self.use_validator = False
        self.bad_entitys = []
        
    def idgenerator(self):
        
        if not hasattr(self, '_idgenerator') or not self._idgenerator:
            connection = self.connection_manager.get(XConfig.get('idgenerator.db'))
            self._idgenerator = IdGenerator(connection, XConfig.get('idgenerator.count') or 5)
            
        return self._idgenerator
        
    
    def register(self, entity):
        '''注册实体到工作单元
        '''
        
        cls_name = entity.__class__.__name__
        
        if self.entity_list.get(cls_name) is None:
            self.entity_list[cls_name] = {}
            
        self.entity_list[cls_name][str(entity.getId())] = entity
        entity._unitofwork = self
        
    def commit(self):
        '''
        '''
        
        deletes = []
        updates = []
        news = []
        db_names = set()
        
        for entity_class_name in self.entity_list.keys():
            entity_dict = self.entity_list.get(entity_class_name)
            for entity_id in entity_dict.keys():
                entity = entity_dict.get(entity_id)
                if entity.isDelete():
                    entity.onDelete()
                elif entity.isNew():
                    entity.onNew()
                elif entity.isDirty():
                    entity.onUpdate()
                else:
                    continue
                
        self.bad_entitys = []
        for entity_class_name in self.entity_list.keys():
            entity_dict = self.entity_list.get(entity_class_name)
            for entity_id in entity_dict.keys():
                entity = entity_dict.get(entity_id)
                if entity.isDelete():
                    deletes.append(entity)
                elif entity.isNew():
                    news.append(entity)
                elif entity.isDirty():
                    updates.append(entity)
                else:
                    continue
                
                if entity.isLoadedFromCache():
                    raise ModifyBasedCacheError("%s(%s) is loaded from cache, so can't be modified!!"%(
                        entity.__class__.__name__, entity.id))
                
                if self.use_validator and not entity.doValidate():
                    self.bad_entitys.append(entity)
                    
                db_names.add(entity._db)
                
        if self.use_validator and self.bad_entitys:
            return False
                
        for name in db_names:
            connection = self.connection_manager.get(name)
            if connection and name == connection.name:
                connection.begin()
                
        try:
            for entitys in [deletes, updates, news]:
                for entity in entitys:
                    self.sync(entity)
                    
            for name in db_names:
                connection = self.connection_manager.get(name)
                if name == connection.name:
                    connection.commit()
            
            for entity in deletes:
                try:
                    cache = self.cache_manager.get(entity._cache)
                    if not cache:
                        continue
                    
                    cache_key = self.makeKey(entity.__class__, entity.id)
                    cache.delete(cache_key)
                except:
                    logging.exception("delete cache fail")
                
            for entitys in [updates, news]:
                for entity in entitys:
                    try:
                        cache = self.cache_manager.get(entity._cache)
                        if not cache:
                            continue
                        cache_key = self.makeKey(entity.__class__, entity.id)
                        cache.set(cache_key, entity.getCacheDict())
                    except:
                        logging.exception("set cache fail")
                    
            return True
        except:
            logging.exception("[XWEB] COMMIT FAILED, ROLLBACK")
            for name in db_names:
                connection = self.connection_manager.get(name)
                if name == connection.name:
                    connection.rollback()
            return False
        finally:
            self.entity_list.clear()
                    
        
    def getEntityInMemory(self, cls, entity_id):
        cls_name = cls.__name__
        if self.entity_list.get(cls_name) is None:
            return None
        
        return self.entity_list.get(cls_name).get(str(entity_id))
    
    def getList(self, cls, entity_ids, **kwargs):
        
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        query_db_ids = []
        query_cache_ids = []
        for entity_id in entity_ids:
            entity = self.getEntityInMemory(cls, entity_id)
            if not entity:
                query_cache_ids.append(entity_id)
                
        if query_cache_ids:
            if self.use_cache:
                    
                keys = [self.makeKey(cls, entity_id) for entity_id in query_cache_ids]
                cache_name = cls._cache_name
                cache = self.cache_manager.get(cache_name)
                if not cache:
                    raise ValueError('CACHE DOES NOT EXSITS WHEN USE_CACHE IS TRUE')
                
                entitys = cache.getList(keys)
                for entity_id, key in zip(query_cache_ids, keys):
                    cache_dict = entitys.get(key)
                    if cache_dict:
                        entity = cls(**cache_dict)
                        entity._is_new = False
                        entity._is_delete = False
                        entity._is_dirty = False
                        entity._load_from_cache = True
                        entity._db = 'default'
                        entity._cache = cache_name
                        self.register(entity)
                        logging.debug("[XWEB] LOAD ENTITY %s FROM CACHE: %s"%(entity, cache_name))
                    else:
                        query_db_ids.append(entity_id)
            else:
                query_db_ids = query_cache_ids
        
            entitys = connection.getEntityList(cls, query_db_ids)
    
            if not entitys:
                return []
            
            first_entity = entitys[0]
            first_entity.setProps('entity_ids_in_query', entity_ids)
            for entity in entitys:
                self.register(entity)
                entity.setProps('first_entity_in_query', first_entity.id)
                logging.debug("[XWEB] LOAD ENTITY %s FROM DB: %s"%(entity, db_conn))
            
        return [self.getEntityInMemory(cls, entity_id) for entity_id in entity_ids if self.getEntityInMemory(cls, entity_id)]
    
    def getListByCond(self, criteria, **kwargs):
        
        if not isinstance(criteria, QueryCriteria):
            return []
        
        cls = criteria.entity_cls
        
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        entity_ids = connection.fetchEntityIds(criteria)
        
        return self.getList(cls, entity_ids, **kwargs)
    
    
    def fetchRowsByCond(self, cr, **kwargs):
        
        cls = cr.entity_cls
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        return connection.fetchRowsByCond(cr)
    
    
    def fetchRowByCond(self, cr, **kwargs):
        
        cls = cr.entity_cls
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        return connection.fetchRowByCond(cr)
        
    def getListByCond2(self, cls, condition=None, args=[], **kwargs):
        
        db_conn = cls.dbName(**kwargs)
        connection = self.connection_manager.get(db_conn)
        rows = connection.queryRowsByCond(cls, condition, args)
        
        results = []
        
        for row in rows:
            
            data = {}
            
            for k,v in zip(cls.allKeys(), row):
                data[k] = v
                
            entity_id = tuple([data.get(k) for k in cls.primaryKey()])
            
            entity = self.getEntityInMemory(cls, entity_id)
            
            if not entity:
                entity = connection.createEntity(cls, row)
                self.register(entity)
                key = self.makeKey(cls, entity_id)
                cache_name = cls._cache_name
                cache = self.cache_manager.get(cache_name)
                if cache:
                    cache.set(key, entity.getCacheDict())
                
            results.append(entity)
                
        return results
    
    
    def get(self, cls, entity_id, **kwargs): #@ReservedAssignment
        
        entity = self.getEntityInMemory(cls, entity_id)
        if entity:
            return entity
        
        key = self.makeKey(cls, entity_id)
        cache_name = cls._cache_name
        cache = self.cache_manager.get(cache_name)
        if self.use_cache and cache:
            
            cache_dict = cache.get(key)
            if cache_dict:
                entity = cls(**cache_dict)
                entity._is_new = False
                entity._is_delete = False
                entity._is_dirty = False
                entity._load_from_cache = True
                entity._db = 'default'
                entity._cache = cache_name
                self.register(entity)
                logging.debug("[XWEB] LOAD ENTITY %s FROM CACHE: %s"%(entity, cache_name))
                return entity
        
        db_conn = cls.dbName(entity_id=entity_id, **kwargs)
        connection = self.connection_manager.get(db_conn)
        entity = connection.getEntity(cls, entity_id)
        
        if entity is None:
            return None
        
        self.register(entity)
        
        if cache:
            cache.set(key, entity.getCacheDict())
            logging.debug("[XWEB] LOAD ENTITY %s FROM DB: %s"%(entity, db_conn))
        
        return entity
        
    
    def sync(self, entity):
        connection = self.connection_manager.get(entity._db)
        
        if entity.isNew():
            if connection.insert(entity):
                entity._is_dirty = False
                entity._is_new = False
                entity.is_delete = False
                return  True
        elif entity.isDelete():
            if connection.delete(entity):
                entity._is_dirty = False
                entity._is_new = False
                entity.is_delete = True
                return  True
        elif entity.isDirty():
            if connection.update(entity):
                entity._is_dirty = False
                entity._is_new = False
                entity.is_delete = False
                return True
        else:
            raise EntityStatusError()
        
        return False
        
    def makeKey(self, cls, entity_id):
        return "%s:%s:%s:%s"%(XConfig.get('app_name'),
                              cls.__name__, entity_id, cls._version)
        
    def close(self):
        self.connection_manager.close()
    
    
    # static method
    @classmethod
    def inst(cls):
        thread = threading.currentThread()
        
        if not hasattr(thread, 'unitofwork') or not thread.unitofwork:
            thread.unitofwork = UnitOfWork()
            
        return thread.unitofwork
    
    @classmethod
    def Commit(cls):
        cls.inst().commit()
    
    @classmethod
    def Close(cls):
        cls.inst().close()
        
    @classmethod
    def Reset(cls, force=False):
        thread = threading.currentThread()
        
        if hasattr(thread, 'unitofwork') and thread.unitofwork:
            if not force:
                unitofwork = thread.unitofwork
                unitofwork.entity_list = {}
                unitofwork.bad_entitys = {}
                unitofwork.use_cache = True
                unitofwork.use_preload = False
            else:
                del thread.unitofwork
        
    @classmethod
    def reset(cls, force=False):
        cls.Reset(force)
Example #24
0
def InvalidateJobListCache(vcName):
    CacheManager.Invalidate("GetAllPendingJobs", vcName)
    DataManager.GetAllPendingJobs(vcName)
Example #25
0
# -*- coding : utf-8 -*-
import sys

from subprocess import CalledProcessError
from cache import CacheManager
from processor import SSHAuthFileProcessor
from handler import SSHAuthEventHandler
from watcher import SSHAuthWatcher
from env import AUTH_LOG_FILE_PATH

if __name__ == "__main__":
    try:
        cache = CacheManager()
        processor = SSHAuthFileProcessor(AUTH_LOG_FILE_PATH, cache=cache)
        handler = SSHAuthEventHandler(processor)
        SSHAuthWatcher(AUTH_LOG_FILE_PATH, handler).run()
    except CalledProcessError:
        print('Cannot fetch the last line')
        sys.exit(1)