def _extract_archive(src_file_path: str, dst_dir_path): """Extract theme archive """ # Extract all files with ZipFile(src_file_path) as z_file: z_file.extractall(dst_dir_path) # Check if the archive contents only single directory, move its files up orig_cwd = getcwd() chdir(dst_dir_path) files_list = glob('*') if len(files_list) == 1 and path.isdir(files_list[0]): top_directory = files_list[0] chdir(top_directory) f_names = glob('*') + glob('.*') for f_name in f_names: if f_name not in ('.', '..'): move(f_name, dst_dir_path) chdir('..') rmdir(top_directory) chdir(orig_cwd) logger.debug( "Theme files successfully extracted from file '{}' to directory '{}'". format(src_file_path, dst_dir_path))
def on_cleanup(): success, failed = _util.cleanup_files(_reg.get('paths.session'), _reg.get('router.session_ttl', 86400)) for f_path in success: _logger.debug('Obsolete session file removed: {}'.format(f_path)) for f_path, e in failed: _logger.error('Error while removing obsolete session file {}: {}'.format(f_path, e))
def handle_command(self, name: str, msg: Union[types.Message, types.CallbackQuery]): """Hook """ logger.debug('{}: Command received: {}'.format(self.__class__, msg)) raise error.CommandExecutionError( lang.t('telegram@unknown_command', {'command': name}))
def _cleanup_tmp_files(): success, failed = _util.cleanup_files(_reg.get('paths.tmp'), 86400) # 24h for f_path in success: _logger.debug('Obsolete tmp file removed: {}'.format(f_path)) for f_path, e in failed: _logger.error('Error while removing obsolete tmp file {}: {}'.format(f_path, e))
def cleanup(): success, failed = _util.cleanup_files(_reg.get('paths.log'), _reg.get('logger.file_ttl', 2592000)) # 30d for f_path in success: _logger.debug('Obsolete log file removed: {}'.format(f_path)) for f_path, e in failed: _logger.error('Error while removing obsolete log file {}: {}'.format(f_path, e))
def install(archive_path: str, delete_zip_file: bool = True): """Install a theme from a zip-file """ logger.debug( 'Requested theme installation from zip-file {}'.format(archive_path)) # Create temporary directory tmp_dir_path = util.mk_tmp_dir(subdir='theme') try: # Extract archive to the temporary directory _extract_archive(archive_path, tmp_dir_path) # Try to initialize the theme to ensure everything is okay theme = _theme.Theme('tmp.theme.{}'.format( path.basename(tmp_dir_path))) # Install required pip packages for pkg_name, pkg_version in theme.requires['packages'].items(): logger.info( "Theme '{}' requires pip package '{} {}', going to install it". format(theme.name, pkg_name, pkg_name, pkg_version)) pip.install(pkg_name, pkg_version, True, reg.get('debug')) # Install required plugins for p_name, p_version in theme.requires['plugins'].items(): if not plugman.is_installed(p_name, VersionRange(p_version)): logger.info( "Theme '{}' requires plugin '{}', installing...".format( theme.name, p_name, p_version)) plugman.install(p_name, VersionRange(p_version)) # Theme has been successfully initialized, so now it can be moved to the 'themes' package dst_path = path.join(_themes_path, theme.name) if path.exists(dst_path): logger.warn( "Existing theme installation at '{}' will be replaced with new one" .format(dst_path)) rmtree(dst_path) # Move directory to the final location move(tmp_dir_path, dst_path) logger.debug("'{}' has been successfully moved to '{}'".format( tmp_dir_path, dst_path)) reload.reload() finally: # Remove temporary directory if path.exists(tmp_dir_path): rmtree(tmp_dir_path) # Remove ZIP file if delete_zip_file: unlink(archive_path)
def get_pool(uid: str) -> _Pool: """Get a pool """ try: if _dbg: _logger.debug("POOL GET: '{}'.".format(uid)) return _pools[uid] except KeyError: raise _error.PoolNotExist(uid)
def pytsite_cleanup(): root = _path.join(_reg.get('paths.static'), 'image', 'resize') ttl = _reg.get('file_storage_odm.static_ttl', 2592000) # 1 month success, failed = _util.cleanup_files(root, ttl) for f_path in success: _logger.debug('Obsolete static file removed: {}'.format(f_path)) for f_path, e in failed: _logger.error('Error while removing obsolete static file {}: {}'.format(f_path, e))
def create_pool(uid: str) -> _Pool: """Create a new pool """ if uid in _pools: raise _error.PoolExists(uid) _pools[uid] = _Pool(uid, get_driver) if _dbg: _logger.debug("POOL CREATED: {}".format(uid)) return _pools[uid]
def _on_pre_delete(self, **kwargs): super()._on_pre_delete(**kwargs) c_user = auth.get_current_user() # Admins have unrestricted permissions if c_user.is_admin: return # Check current user's permissions to DELETE entities if not self.odm_auth_check_entity_permissions(PERM_DELETE): logger.debug( f'Current user login: {auth.get_current_user().login}') raise errors.ForbidDeletion( f"Insufficient permissions to delete entity '{self.ref}'")
def build_translations(pkg_name: str): """Compile translations """ # Manage with recursive calls if pkg_name in _building_translations: return _building_translations.append(pkg_name) # Build dependencies for dep_pkg_name in package_info.requires_plugins(pkg_name): dep_pkg_name = 'plugins.' + dep_pkg_name if lang.is_package_registered(dep_pkg_name): build_translations(dep_pkg_name) output_file = path.join(assets_dst('assetman'), 'translations.json') # Prepare data structure if path.exists(output_file): data = util.load_json(output_file) else: data = {'langs': {}, 'translations': {}} # Update languages information data['langs'] = lang.langs() # Build translations structure for lang_code in lang.langs(): if lang_code not in data['translations']: data['translations'][lang_code] = {} logger.info('Compiling translations for {} ({})'.format( pkg_name, lang_code)) data['translations'][lang_code][ pkg_name] = lang.get_package_translations(pkg_name, lang_code) # Create output directory output_dir = path.dirname(output_file) if not path.exists(output_dir): makedirs(output_dir, 0o755, True) # Write translations to teh file with open(output_file, 'wt', encoding='utf-8') as f: logger.debug("Writing translations into '{}'".format(output_file)) f.write(json.dumps(data))
def handle_private_message(self, msg: Union[types.Message, types.CallbackQuery]): """Hook """ logger.debug('{}: Private message received: {}'.format( self.__class__, msg))
def handle_pre_checkout_query(self, query: types.PreCheckoutQuery): """Hook """ logger.debug('{}: Pre checkout query received: {}'.format( self.__class__, query))
def handle_inline_query(self, query: types.InlineQuery): """Hook """ logger.debug('{}: Inline query received: {}'.format( self.__class__, query))
def handle_channel_post(self, msg: types.Message): """Hook """ logger.debug('{}: Channel post received: {}'.format( self.__class__, msg))
def handle_chosen_inline_result(self, result: types.ChosenInlineResult): """Hook """ logger.debug('{}: Chosen inline result received: {}'.format( self.__class__, result))
def load(self): """Load the theme """ from plugins import assetman # Check for requirements try: package_info.check_requirements(self._package_name) except package_info.error.Error as e: raise RuntimeError('Error while loading theme {}: {}'.format(self._package_name, e)) # Create translations directory lang_dir = path.join(self._path, 'res', 'lang') if not path.exists(lang_dir): makedirs(lang_dir, 0o755, True) # Create translation stub files for lng in lang.langs(): lng_f_path = path.join(lang_dir, '{}.yml'.format(lng)) if not path.exists(lng_f_path): with open(lng_f_path, 'wt'): pass # Register translation resources lang.register_package(self._package_name) # Register template resources tpl_path = path.join(self._path, 'res', 'tpl') if not path.exists(tpl_path): makedirs(tpl_path, 0o755, True) tpl.register_package(self._package_name) # Register assetman resources assets_path = path.join(self._path, 'res', 'assets') if not path.exists(assets_path): makedirs(assets_path, 0o755, True) assetman.register_package(self._package_name) # Load required plugins for pn, pv in self._requires['plugins'].items(): plugman.load(pn, VersionRange(pv)) # Load theme's module try: self._module = import_module(self._package_name) if hasattr(self._module, 'theme_load') and callable(self._module.theme_load): self._module.theme_load() # theme_load_{env.type}() hook env_type = reg.get('env.type') hook_names = ['theme_load_{}'.format(env_type)] if env_type == 'wsgi': hook_names.append('theme_load_uwsgi') for hook_name in hook_names: if hasattr(self._module, hook_name): getattr(self._module, hook_name)() logger.debug("Theme '{}' successfully loaded".format(self._package_name)) except Exception as e: raise _error.ThemeLoadError("Error while loading theme package '{}': {}".format(self._package_name, e)) # Compile assets if not reg.get('theme.compiled'): assetman.setup() assetman.build(self._package_name) reg.put('theme.compiled', True) self._is_loaded = True return self
def handle_callback_query(self, query: types.CallbackQuery): """Hook """ logger.debug('{}: Callback query received: {}'.format( self.__class__, query))
def handle_shipping_query(self, query: types.ShippingQuery): """Hook """ logger.debug('{}: Shipping query received: {}'.format( self.__class__, query))
def build(pkg_name: str, debug: bool = _DEBUG, mode: str = None, watch: bool = False): """Compile assets """ pkg_name = resolve_package(pkg_name) src = assets_src(pkg_name) dst = assets_dst(pkg_name) public_path = assets_public_path(pkg_name) timestamps_path = path.join(assets_dst('assetman'), 'timestamps.json') mode = mode or ('development' if debug else 'production') # Build translations if lang.is_package_registered(pkg_name): build_translations(pkg_name) # Building is possible only if 'webpack.config.js' exists webpack_config = path.join(src, 'webpack.config.js') if not path.exists(webpack_config): return # Clear destination directory if path.exists(dst): rmtree(dst) # Create output directory for timestamps file timestamps_dir_path = path.dirname(timestamps_path) if not path.exists(timestamps_dir_path): makedirs(timestamps_dir_path, 0o755, True) # It is important timestamps file to be exists before start building process if not path.isfile(timestamps_path): with open(timestamps_path, 'wt', encoding='utf-8') as f: f.write(json.dumps({})) # Collect webpack's config parts from all the packages webpack_parts = [] root_dir = reg.get('paths.root') + '/' for p in _packages.values(): if path.exists(path.join(p[0], 'webpack.part.js')): webpack_parts.append(p[0].replace(root_dir, '')) # Run webpack console.print_info( lang.t('assetman@compiling_assets_for_package', {'package': pkg_name})) args = [ '--mode', mode, '--config', webpack_config, '--context', assets_src(pkg_name), '--output-path', dst, '--output-public-path', public_path, '--env.NODE_ENV', mode, '--env.root_dir', root_dir, '--env.config_parts', ','.join(webpack_parts), '--watch', str(watch).lower(), ] _run_node_bin('webpack-cli', args, watch or debug) # Update timestamps _BUILD_TS.put(pkg_name, int(time.time())) with open(timestamps_path, 'wt', encoding='utf-8') as f: logger.debug(f"Writing timestamps into '{timestamps_path}'") f.write(json.dumps({k: _BUILD_TS.get(k) for k in _BUILD_TS.keys()}))