def handle_inspect(**kwargs): """ Handle inspect actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): inspect_file(in_fs.getsyspath(basename)) elif in_fs.isdir(basename): inspect_dir(in_fs.opendir(basename)) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
def build_folders(source, destination_temp, standard, root): """Transform the repos' folder structure to that of the register and build HTML pages for each standard. """ source_fs = OSFS(source) print "Processing %s ... " % standard['id'] standard_fs = source_fs.opendir(standard['id']) # list all artifacts of a standard artifacts = standard_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove(".git") for artifact in artifacts: # check whether artifact folder exists in destination_temp if root.exists('%s/%s' % (destination_temp, artifact)) == False: root.makedir('%s/%s' % (destination_temp, artifact)) # copy standard folders from source to destination_temp in desired structure root.copydir('%s/%s/%s' % (source, standard['id'], artifact), '%s/%s/%s' % (destination_temp, artifact, standard['id'])) html = create_standard_webpage(standard, artifacts) # check whether register/standard exists if root.exists('%s/%s' % (destination_temp, standard['id'])) == False: root.makedir('%s/%s' % (destination_temp, standard['id'])) # write standard HTML page to register/standard/index.html with codecs.open('%s/%s/index.html' % (destination_temp, standard['id']), 'w', encoding='utf8') as f: f.write(html) # copy web assets root.copydir('web/assets', '%s/r' % destination_temp, overwrite=True)
def cmd_scan(self, *params, **options): favicons_fs = OSFS(settings.MEDIA_ROOT).makeopendir('favicons') for path in favicons_fs.walkdirs(wildcard="???"): icon_fs = favicons_fs.opendir(path) if icon_fs.isfile('scan.pik'): icon_sizes = ','.join(str(s) for s in sorted(int(p.split('.')[0][4:]) for p in icon_fs.listdir(wildcard='icon*.png'))) if not icon_sizes: continue favicon, created = FavIcon.import_(icon_fs.open('scan.pik')) if favicon is None: continue old_sizes = favicon.sizes favicon.sizes = icon_sizes favicon.update() favicon.save() if created: print "New object:\t", path else: print path if old_sizes != favicon.sizes: print "Icon sizes changed!\t", path favicon.export(icon_fs.open('scan.pik', 'w'))
def assertFS(path, remainder=None): """Assert existence of fs. Step up one path segment until the directory exists, create missing directories and return filesystem. Args: path (basestring) remainder (basestring) Returns: fs """ if not os.path.isdir(path): head, tail = os.path.split(path) return assertFS(head, "/".join(filter(bool, [tail, remainder]))) fs = OSFS(path) if remainder is not None: fs.makedirs(unicode(remainder)) fs = fs.opendir(unicode(remainder)) return fs
def build_folders(source, destination_temp, standards, root): print "Building register..." source_fs = OSFS(source) # iterate over all standards in source directory for standard in standards: print "Processing %s ... " % standard['id'] standard_fs = source_fs.opendir(standard['id']) # list all sub standards of a standard artifacts = standard_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove(".git") for artifact in artifacts: # check whether artifact folder exists in destination_temp if root.exists('%s/%s' % (destination_temp, artifact)) == False: root.makedir('%s/%s' % (destination_temp, artifact)) # copy standard folders from source to destination_temp in desired structure root.copydir('%s/%s/%s' % (source, standard['id'], artifact), '%s/%s/%s' % (destination_temp, artifact, standard['id'])) # create standard HTML page html = create_standard_webpage(standard, artifacts) # check whether standard folder exists in register root if root.exists('%s/%s' % (destination_temp, standard['id'])) == False: root.makedir('%s/%s' % (destination_temp, standard['id'])) # write standard HTML page to register/standard/index.html with codecs.open('%s/%s/index.html' % (destination_temp, standard['id']), 'w', encoding='utf8') as f: f.write(html)
def get_artifacts(root, build_path, sources_path, standard): source_fs = OSFS(ospath.join(root.getsyspath('.'), build_path, sources_path)) # print "Processing %s ... " % standard['id'] standard_fs = source_fs.opendir(standard['id']) artifacts = standard_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove(".git") return artifacts
def get_artifacts(root, build_path, sources_path, standard): source_fs = OSFS( ospath.join(root.getsyspath('.'), build_path, sources_path)) # print "Processing %s ... " % standard['id'] standard_fs = source_fs.opendir(standard['id']) artifacts = standard_fs.listdir(dirs_only=True) if '.git' in artifacts: artifacts.remove(".git") return artifacts
def eggifySingle(srcFS, src, destFS, dest, config=None): """ Eggify single source to single destination. Args: src (basestring) dest (basestring) Raises: MissingDestinationException """ if dest is None: raise MissingDestinationException() if config is None: config = {} if src.startswith("/") or src[1] == ":": head, tail = os.path.split(src) srcFS = OSFS(head) src = tail if srcFS.isfile(unicode(src)): assertFS(destFS.getsyspath(unicode(dest))) workingDir = srcFS.getsyspath(unicode("/")) devnull = open(os.devnull, 'w') cmd = ["python", src, "bdist_egg"] if "purge" in config.keys() and config["purge"]: cmd.append("--exclude-source-files") subprocess.check_call(cmd, cwd=workingDir, stdout=devnull, stderr=devnull) if srcFS.isdir(unicode("dist")): distFS = srcFS.opendir(unicode("dist")) for name in reversed(sorted(distFS.listdir("/"))): if name.endswith(".egg"): destEggFS = destFS.opendir(unicode(dest)) # remove existing eggs removeOldEggs(destEggFS, name) eggSrcPath = distFS.getsyspath(unicode(name)) eggDestPath = destEggFS.getsyspath(unicode(name)) copy_file(distFS, unicode(name), destEggFS, unicode(name)) print "copied {} to {}".format(eggSrcPath, eggDestPath) break
def handle_preview(**kwargs): """ Handle preview actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None, 'output': None, 'prefix': None, 'layer': None, 'num_threads': None, 'multithreading': 1 } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # open output filesystem out_fs = assure_fs(args.output) # join layer layer = None if args.layer: layer = ' '.join(args.layer) # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): filename, extension = os.path.splitext(basename) # prepend prefix to filename if args.prefix: filename = args.prefix + filename out_name = unicode(filename + '.jpg') preview_file(in_fs.getsyspath(basename), out_fs.getsyspath(out_name), layer) elif in_fs.isdir(basename): preview_dir(in_fs.opendir(basename), out_fs, args.num_threads, bool(args.multithreading), prefix=args.prefix, layer=layer) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
def build(fs, settings_path="settings.ini", rebuild=False, archive=None, master_settings=None): """Build a project""" if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) if isinstance(settings_path, string_types): settings_path = [settings_path] if archive is None: archive = Archive(fs) context = Context() syspath = fs.getsyspath('/', allow_none=True) cwd = os.getcwd() if syspath is not None: os.chdir(syspath) try: root = context.root root['libs'] = archive.libs root['apps'] = archive.apps root['fs'] = FSWrapper(fs) log.debug("reading settings from {}".format(textual_list(settings_path))) archive.cfg = SettingsContainer.read(fs, settings_path, master=master_settings) root['settings'] = SettingsContainer.from_dict(archive.cfg['settings']) startup_path = archive.cfg.get('project', 'startup') docs_location = archive.cfg.get('project', 'location') archive.init_settings() root['console'] = archive.console root['debug'] = archive.debug root['_rebuild'] = rebuild parser = Parser(archive, fs.opendir(docs_location), startup_path) doc = parser.parse() if doc is None: raise errors.StartupFailedError('unable to parse "{}"'.format(startup_path)) archive.build(doc, fs=fs) return archive, context, doc finally: os.chdir(cwd)
def cmd_import(self, *params, **options): """ Imports an icon set """ media_fs = OSFS(settings.MEDIA_ROOT) iconsets_fs = media_fs.opendir('iconsets') try: iconset_name = params[0] iconsets = iconsets_fs.listdir(wildcard = iconset_name, dirs_only=True) except IndexError, e: print "<catalog name, or wildcard>" return
def build(fs, settings_path="settings.ini", rebuild=False, archive=None, master_settings=None): """Build a project""" if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) if archive is None: archive = Archive(fs) context = Context() syspath = fs.getsyspath('/', allow_none=True) cwd = os.getcwd() if syspath is not None: os.chdir(syspath) try: root = context.root root['libs'] = archive.libs root['apps'] = archive.apps root['fs'] = FSWrapper(fs) archive.cfg = SettingsContainer.read(fs, settings_path, master=master_settings) root['settings'] = SettingsContainer.from_dict(archive.cfg['settings']) startup_path = archive.cfg.get('project', 'startup') docs_location = archive.cfg.get('project', 'location') archive.init_settings() root['console'] = archive.console root['debug'] = archive.debug root['_rebuild'] = rebuild parser = Parser(archive, fs.opendir(docs_location), startup_path) doc = parser.parse() archive.build(doc, fs=fs) return archive, context, doc finally: os.chdir(cwd)
def cmd_makepreviews(self, *params, **options): PREVIEW_ICON_SIZE = 32 WIDTH_COUNT = 11 BORDER = 5 ICON_DIMENSIONS = (BORDER*2 + PREVIEW_ICON_SIZE) preview_width = ICON_DIMENSIONS * WIDTH_COUNT media_fs = OSFS(settings.MEDIA_ROOT) media_fs.makedir('iconsetpreviews', allow_recreate=True) previews_fs = media_fs.opendir('iconsetpreviews') for catalog in IconCatalog.objects.all(): for category in catalog.get_categories(): filename = "%s.%s.jpg" % (catalog.name, category) icons = catalog.icon_set.filter(category=category).order_by('name') num_icons = icons.count() icons_height_count = (num_icons + WIDTH_COUNT-1) // WIDTH_COUNT preview_height = icons_height_count * ICON_DIMENSIONS preview_img = Image.new('RGB', (preview_width, preview_height), (255, 255, 255)) print preview_width, preview_height for i, icon in enumerate(icons): y, x = divmod(i, WIDTH_COUNT) pth = icon.path.replace('[SIZE]', str(PREVIEW_ICON_SIZE)) icon_pth = media_fs.getsyspath(pth) img = Image.open(icon_pth) if img.size[0] != img.size[1]: img = img.crop((0, 0, PREVIEW_ICON_SIZE, PREVIEW_ICON_SIZE)) try: preview_img.paste(img, (x*ICON_DIMENSIONS+BORDER, y*ICON_DIMENSIONS+BORDER), img) except ValueError: preview_img.paste(img, (x*ICON_DIMENSIONS+BORDER, y*ICON_DIMENSIONS+BORDER)) sys_filename = previews_fs.getsyspath(filename) print sys_filename preview_img.save(previews_fs.getsyspath(filename), quality=75)
def cmd_optimize(self, *params, **options): """ Optimizes icon images. """ media_fs = OSFS(settings.MEDIA_ROOT) iconsets_fs = media_fs.opendir('iconsets') count = 0 for filename in list(iconsets_fs.walkfiles(wildcard="*.png")): sys_filename = iconsets_fs.getsyspath(filename) try: os.system('optipng -o3 -q "%s"' % sys_filename) except Exception, e: print "Error:", str(e) else: print "Optimized", filename #sys_filename = iconsets_fs.getsyspath(filename) #img = Image.open(sys_filename) #img.save(sys_filename, optimize=True) #print '.', count += 1
def cmd_makeadminpreviews(self, *params, **options): try: iconset = params[0] except IndexError: iconset = '' icon_fs = OSFS(settings.MEDIA_ROOT).opendir('iconsets') if params: icon_fs = icon_fs.opendir(params[0]) done_dirs = set() for path in icon_fs.walkfiles(wildcard='*.png'): dirpath = dirname(path) png_path = icon_fs.getsyspath(path) img = Image.open(png_path).convert('RGBA') background_img = Image.new('RGB', img.size, (255, 255, 255)) background_img.paste(img, None, img) new_path = os.path.splitext(png_path)[0] + '.jpg' background_img.save(new_path) if dirpath not in done_dirs: print "Generating admin previews in %s/*" % dirpath done_dirs.add(dirpath)
def build( fs, settings_path="settings.ini", rebuild=False, archive=None, strict=False, master_settings=None, test_build=False, develop=False, ): """Build a project""" if isinstance(fs, string_types): if "://" in fs: fs = open_fs(fs) else: fs = OSFS(fs) if isinstance(settings_path, string_types): settings_path = [settings_path] try: syspath = fs.getsyspath("/") except NoSysPath: syspath = None cwd = os.getcwd() if syspath is not None: os.chdir(syspath) try: log.debug("reading settings from {}".format( textual_list(settings_path))) cfg = SettingsContainer.read(fs, settings_path, master=master_settings) if "customize" in cfg: customize_location = cfg.get("customize", "location") if customize_location: settings_path = cfg.get("customize", "settings", "settings.ini") startup_log.info("customizing '%s'", customize_location) customize_fs = open_fs(cfg.get("customize", "location")) cfg = SettingsContainer.read(customize_fs, settings_path, master=cfg) overlay_fs = MultiFS() overlay_fs.add_fs("project", fs) overlay_fs.add_fs("custom", customize_fs, write=True) fs = overlay_fs try: syspath = fs.getsyspath("/") except NoSysPath: pass else: os.chdir(syspath) if archive is None: archive = Archive(fs, strict=strict, test_build=test_build, develop=develop) context = Context() archive.cfg = cfg root = context.root root["libs"] = archive.libs root["apps"] = archive.apps root["fs"] = FSWrapper(fs) root["settings"] = SettingsContainer.from_dict(archive.cfg["settings"]) startup_path = archive.cfg.get("project", "startup") docs_location = archive.cfg.get("project", "location") archive.init_settings() root["console"] = archive.console root["debug"] = archive.debug root["_rebuild"] = rebuild parser = Parser(archive, fs.opendir(docs_location), startup_path) doc = parser.parse() if doc is None: raise errors.StartupFailedError( 'unable to parse "{}"'.format(startup_path)) archive.build(doc, fs=fs) return fs, archive, context, doc finally: os.chdir(cwd) gc.collect()
def handle_rechannel(**kwargs): """ Handle rechannel actions. Args: **kwargs (dict): Arguments """ default_args = { 'input': None, 'output': None, 'prefix': None, 'map': None, 'num_threads': None, 'multithreading': 1 } default_args.update(kwargs) args = dict_to_namedtuple(default_args) # open output filesystem out_fs = assure_fs(args.output) # split map path dirname, basename = os.path.split(unicode(args.map)) try: map_fs = OSFS(dirname) if map_fs.isfile(basename): with map_fs.open(basename) as file_handle: try: layer_map = json.loads(file_handle.read()) except Exception as error: console.error(error) return else: console.error('Map {} does not exist.'.format(args.map)) return except CreateFailed: console.error('Map parent directory {} does not exist.'.format(args.map)) return # split input path dirname, basename = os.path.split(unicode(args.input)) # open input filesystem try: in_fs = OSFS(dirname) if in_fs.isfile(basename): # prepend prefix to basename if args.prefix: basename = args.prefix + basename rechannel_file(in_fs.getsyspath(basename), out_fs.getsyspath(basename), layer_map) elif in_fs.isdir(basename): rechannel_dir(in_fs.opendir(basename), out_fs, layer_map, args.num_threads, bool(args.multithreading), prefix=args.prefix) except CreateFailed: console.error('Input {} does not exist.'.format(args.input)) return
class QualityStandard: """Stores information about a quality standard.""" def __init__(self, resource_root_dir: types_path_like): """Create a ``QualityStandard`` instance. Parameters ---------- resource_root_dir : The path to the resource root directory of the standard """ from fs.osfs import OSFS self._name = None self._max_version = None self._versions = {} if isinstance(resource_root_dir, Path): resource_root_dir = resource_root_dir.as_posix() if isinstance(resource_root_dir, str): self._filesystem = OSFS(resource_root_dir) else: self._filesystem = resource_root_dir manifest_dir = self._filesystem.opendir("manifests") manifest_files = [ file.name for file in self._filesystem.filterdir( "manifests", ["*.yml", "*.yaml"]) ] for filename in manifest_files: # stem of pyfilesystem cuts after first . qs_name, version = split_tag_version( filename[:filename.rindex(".")]) if self._name is None: self._name = qs_name self._max_version = version else: if qs_name != self._name: raise ValueError("Inconsistent naming of manifest files") if self._max_version < version: self._max_version = version with manifest_dir.open(filename, "r") as stream: content = yaml.load(stream, Loader=yaml.SafeLoader) self._versions[version] = { "manifest_file_mapping": { content["id"]: filename }, "schema_file_mapping": { mapping["uri"]: (f"{mapping['file']}.yaml") for mapping in content["tags"] }, } def _map_file_content(self, file_mapping: dict, directory: str, version: AsdfVersion) -> ResourceMappingProxy: """Get a mapping between an URI and a file content. Parameters ---------- file_mapping : Dict A dictionary containing the mapping between URI and the file path directory: Directory that contains the files. This is either 'schemas' or 'mappings' version : AsdfVersion The version of the standard. Returns ------- ResourceMappingProxy : Mapping between an URI and a file content """ content_mapping = { uri: self._filesystem.open(f"{directory}/{filename}").read() for uri, filename in file_mapping.items() } return ResourceMappingProxy(content_mapping, package_name=self._name, package_version=version) @property def name(self) -> str: """Get the quality standards name.""" return self._name def get_mappings(self, version: Union[AsdfVersion, str] = None): """Get the manifest and schema mapping for the specified version. Parameters ---------- version : Union[AsdfVersion, str] Requested standard version. If `None` is provided, the latest will be used. Returns ------- ResourceMappingProxy : Manifest mapping ResourceMappingProxy : Schema mapping """ if version is None: version = self._max_version elif not isinstance(version, AsdfVersion): version = AsdfVersion(version) file_mappings = self._versions[version] manifest_mapping = self._map_file_content( file_mappings["manifest_file_mapping"], "manifests", version) schema_mapping = self._map_file_content( file_mappings["schema_file_mapping"], "schemas", version) return manifest_mapping, schema_mapping
try: src = sys.argv[1] dst = sys.argv[2] except IndexError: src = '~/projects/linkstop/webapp/linkstop/media/faviconsx' dst = '~/projects/linkstop/webapp/linkstop/media/favicons' src_fs = OSFS(src) dst_fs = OSFS(dst) count = 0 max_count = 2 for path in src_fs.listdir(dirs_only=True): icon_fs = src_fs.opendir(path) if icon_fs.isfile('scan.pik'): try: icon = pickle.load(icon_fs.open('scan.pik')) except Exception, e: print "%s (%s)" % (str(e), path) continue normalized_url = icon['normalized_url'] out_dir = hash_path(normalized_url) + '/' + url_to_filename(normalized_url) print out_dir dest_dir_fs = dst_fs.makeopendir(out_dir, recursive=True) movedir(icon_fs, dest_dir_fs, overwrite=True, ignore_errors=True)
def build_server(fs, settings_path, server_element="main", no_console=False, rebuild=False, validate_db=False, breakpoint=False, master_settings=None): """Build a server""" start = time() archive = Archive() console = archive.console try: archive, context, doc = build(fs, settings_path, rebuild=rebuild, master_settings=master_settings) console = archive.console except errors.ParseError as e: if not no_console: line, col = e.position console.document_error(text_type(e), e.path, e.code, line, col) return None except errors.ElementError as element_error: if not no_console: line = element_error.source_line col = 0 console.document_error(text_type(element_error), element_error.element._location, element_error.element._code, line, col) raise errors.StartupFailedError('Failed to build project') if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) archive.project_fs = fs try: app, server = doc.get_element(server_element) except errors.ElementNotFoundError: raise errors.StartupFailedError( "no <server> element called '{}' found in the project (check setting [project]/startup)" .format(server_element)) error_msg = None docs_location = archive.cfg.get('project', 'location') try: server.startup(archive, context, fs.opendir(docs_location), breakpoint=breakpoint) except errors.StartupFailedError as error: error_msg = text_type(error) #raise except errors.ElementError as e: raise except Exception as e: failed = render_failed_documents(archive, console, no_console=no_console) if failed: raise errors.StartupFailedError( "{} document(s) failed to build".format(failed)) if hasattr(e, '__moyaconsole__'): e.__moyaconsole__(console) error_msg = text_type(e) raise errors.StartupFailedError(error_msg or 'Failed to build project') failed = render_failed_documents(archive, console, no_console=no_console) if failed: raise errors.StartupFailedError(error_msg or 'Failed to build project') # archive.finalize() archive.init_media() archive.init_data() if validate_db: from . import db if db.validate_all(archive, console) == 0: startup_log.debug('models validated successfully') else: msg = "Models failed to validate, see 'moya db validate' for more information" raise errors.StartupFailedError(msg) startup_log.info("%s built %.1fms", server, (time() - start) * 1000.0) return ServerBuildResult(archive=archive, context=context, server=server)
def build_server(fs, settings_path, server_element="main", no_console=False, rebuild=False, validate_db=False, breakpoint=False, master_settings=None): """Build a server""" start = time() archive = Archive() console = archive.console try: archive, context, doc = build(fs, settings_path, rebuild=rebuild, master_settings=master_settings) console = archive.console except errors.ParseError as e: if not no_console: line, col = e.position console.document_error(text_type(e), e.path, e.code, line, col) return None except errors.ElementError as element_error: if not no_console: line = element_error.source_line col = 0 console.document_error(text_type(element_error), element_error.element._location, element_error.element._code, line, col) raise errors.StartupFailedError('Failed to build project') if isinstance(fs, string_types): if '://' in fs: fs = fsopendir(fs) else: fs = OSFS(fs) archive.project_fs = fs try: app, server = doc.get_element(server_element) except errors.ElementNotFoundError: raise errors.StartupFailedError("no <server> element called '{}' found in the project (check setting [project]/startup)".format(server_element)) error_msg = None docs_location = archive.cfg.get('project', 'location') try: server.startup(archive, context, fs.opendir(docs_location), breakpoint=breakpoint) except errors.StartupFailedError as error: error_msg = text_type(error) #raise except errors.ElementError as e: raise except Exception as e: failed = render_failed_documents(archive, console, no_console=no_console) if failed: raise errors.StartupFailedError("{} document(s) failed to build".format(failed)) if hasattr(e, '__moyaconsole__'): e.__moyaconsole__(console) error_msg = text_type(e) raise errors.StartupFailedError(error_msg or 'Failed to build project') failed = render_failed_documents(archive, console, no_console=no_console) if failed: raise errors.StartupFailedError(error_msg or 'Failed to build project') # archive.finalize() archive.init_media() archive.init_data() if validate_db: from . import db if db.validate_all(archive, console) == 0: startup_log.debug('models validated successfully') else: msg = "Models failed to validate, see 'moya db validate' for more information" raise errors.StartupFailedError(msg) startup_log.info("%s built %.1fms", server, (time() - start) * 1000.0) return ServerBuildResult(archive=archive, context=context, server=server)
# Turn on captcha for registration if int(myconf.take('recaptcha.use')): auth.settings.captcha = Recaptcha2(request, myconf.take('recaptcha.site_key'), myconf.take('recaptcha.secret_key')) # ----------------------------------------------------------------------------- # IMPORT the CKEDITOR PLUGIN TO GIVE A WYSIWYG EDITOR FOR BLOGS AND NEWS # -- OK, so this editor is neat but one issue is that it dumps files into the # root of uploads, which is messy # -- Ordinarily, this would be controlled by the upload_folder setting but # this is hardcoded in the module. Could edit it there but you can also use # a fs object to provide a folder # -- You'd think it might be possible to have multiple upload folders but # it turns out to be quite hard to switch the settings # ----------------------------------------------------------------------------- ckeditor = CKEditor(db) app_root = request.folder app_root_fs = OSFS(app_root) if not app_root_fs.exists('uploads/news_and_blogs/'): blog_fs = app_root_fs.makeopendir('uploads/news_and_blogs/') else: blog_fs = app_root_fs.opendir('uploads/news_and_blogs/') ckeditor.settings.uploadfs = blog_fs ckeditor.settings.table_upload_name = 'ckeditor_uploads' ckeditor.define_tables(fake_migrate=True)