def __init__(self, config_store_directory, loop=None, notify_callback=None): super().__init__(loop, notify_callback) self.config_store_directory = os.path.abspath( os.path.expanduser(config_store_directory)) self.os_util = OSUtils() if not self.os_util.directory_exists(self.config_store_directory): self.os_util.makedirs(self.config_store_directory)
def __init__(self, directory, config_name, logger=None, merge=True): self.directory = directory self.config_name = config_name self.os_util = OSUtils() self.merge_file = merge self.logger = logger or logging.getLogger(__name__) if not self.os_util.directory_exists(self.directory): raise Exception('Json file data store directory not exist: %s' % self.directory) self.file_name = config_name + self._extension self.file_path = os.path.join(self.directory, self.file_name)
def iter_backend(self): for _, _, file_names in OSUtils().walk(self.config_store_directory): for file_name in file_names: config_name, extension = os.path.splitext(file_name) if extension != self._extension: continue yield config_name
class JsonFileBackend(BaseBackend): __charset__ = "utf-8" _extension = '.json' def __init__(self, directory, config_name, logger=None, merge=True): self.directory = directory self.config_name = config_name self.os_util = OSUtils() self.merge_file = merge self.logger = logger or logging.getLogger(__name__) if not self.os_util.directory_exists(self.directory): raise Exception('Json file data store directory not exist: %s' % self.directory) self.file_name = config_name + self._extension self.file_path = os.path.join(self.directory, self.file_name) def exists(self): return self.os_util.directory_exists(self.file_path) def read(self): try: with io.open(self.file_path, encoding=self.__charset__) as open_file: source_data = json.load(open_file) self.logger.debug("Backend read json: {}".format(self.file_path)) except IOError: self.logger.debug( "Backend read json: {} (Ignored, file not Found)".format( self.file_path)) source_data = {} return source_data def write(self, source_data): if self.exists() and self.merge_file: with io.open(self.file_path, encoding=self.__charset__) as open_file: object_merge(json.load(open_file), source_data) with io.open(self.file_path, "w", encoding=self.__charset__) as open_file: json.dump(source_data, open_file)
def __init__(self, app, os_utils=None, logger=None, log_file_name=None, store_type=None): self.app = app self.debug = self.app.config.get('DEBUG', True) self.max_connection = self.app.config.get('MAX_CONNECTION', 1024) self.os_utils = os_utils or OSUtils() self.logger = logger or logging.getLogger(__name__) self.log_file_name = log_file_name self.store_type = store_type or self.app.config.get( 'STORE_TYPE', self._default_store_type) self.store_backend = None self.rt_log = self.app.config.get("RT_SERVER_LOG", True) config_logging(self.log_file_name, self.logger) self.init_store_backend_instance()
def init_config_store(): global config_store_state config_store_state = {} os_utils = OSUtils() store_path = os_utils.abspath(STORE_DIRECTORY) if not os_utils.directory_exists(store_path): os_utils.makedirs(store_path) for _, _, file_names in OSUtils().walk(store_path): for file_name in file_names: config_name, extension = os.path.splitext(file_name) if extension not in extension_backend: continue config_store_state[config_name] = ConfigManager(config_name) connected.setdefault(config_name, set()) logger.info('Load config store object: %s', config_name)
class JsonFileBackend(BaseBackend): _extension = '.json' __visit_name__ = 'json_file' @classmethod def configuration_schema(cls): return { 'config_store_directory': { 'required': False, 'type': 'string', 'desc': '数据存储目录', 'default': '~/rtconfig/data' } } def __init__(self, config_store_directory, loop=None, notify_callback=None): super().__init__(loop, notify_callback) self.config_store_directory = os.path.abspath( os.path.expanduser(config_store_directory)) self.os_util = OSUtils() if not self.os_util.directory_exists(self.config_store_directory): self.os_util.makedirs(self.config_store_directory) def get_file_path(self, config_name): file_name = config_name + self._extension return os.path.join(self.config_store_directory, file_name) def read(self, config_name, default=None, check_exist=False): file_path = self.get_file_path(config_name) try: with io.open(file_path, encoding=self.__charset__) as open_file: source_data = json.load(open_file) logger.debug("Backend read json: {}".format(file_path)) except IOError: logger.debug("Backend read json: {} (Ignored, file not Found)".format(file_path)) if check_exist: raise ProjectNoFoundException(config_name=config_name) source_data = default or {} return source_data async def write(self, config_name, source_data, merge=False): file_path = self.get_file_path(config_name) if self.os_util.file_exists(file_path) and merge: with io.open(file_path, encoding=self.__charset__) as open_file: object_merge(json.load(open_file), source_data) with io.open(file_path, "w", encoding=self.__charset__) as open_file: json.dump(source_data, open_file) await self.publish('callback_config_changed', config_name) def iter_backend(self): for _, _, file_names in OSUtils().walk(self.config_store_directory): for file_name in file_names: config_name, extension = os.path.splitext(file_name) if extension != self._extension: continue yield config_name async def delete(self, config_name): file_path = self.get_file_path(config_name) if self.os_util.file_exists(file_path): self.os_util.remove_file(file_path) await self.publish('callback_config_changed', config_name)
def __init__(self, app): self.app = app self.os_util = OSUtils()