def new_category(blog_id): from core.models import db with db.atomic() as txn: user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_editor(user, blog) category_list = [n for n in blog.categories] category = Category(id=0, title='', blog=blog) top_level_category = Category( id=None, title='[Top-level category]', parent=None ) category_list.insert(0, top_level_category) tags = template_tags( blog=blog, user=user) if request.method == "POST": with db.atomic() as txn: category_title = request.forms.getunicode('category_title') try: parent_category = int(request.forms.getunicode('category_parent')) except ValueError: parent_category = None with db.atomic() as txn: category = Category(blog=blog, title=category_title, parent_category=parent_category ) category.save() redirect('{}/blog/{}/category/{}'.format( BASE_URL, blog.id, category.id)) tpl = template('edit/category', category=category, category_list=category_list, menu=generate_menu('blog_new_category', category), search_context=(search_contexts['sites'], None), **tags.__dict__) return tpl
def new_category(blog_id): from core.models import db with db.atomic() as txn: user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_editor(user, blog) category_list = [n for n in blog.categories] category = Category(id=0, title='', blog=blog) top_level_category = Category(id=None, title='[Top-level category]', parent=None) category_list.insert(0, top_level_category) tags = template_tags(blog=blog, user=user) if request.method == "POST": with db.atomic() as txn: category_title = request.forms.getunicode('category_title') try: parent_category = int( request.forms.getunicode('category_parent')) except ValueError: parent_category = None with db.atomic() as txn: category = Category(blog=blog, title=category_title, parent_category=parent_category) category.save() redirect('{}/blog/{}/category/{}'.format(BASE_URL, blog.id, category.id)) tpl = template('edit/category', category=category, category_list=category_list, menu=generate_menu('blog_new_category', category), search_context=(search_contexts['sites'], None), **tags.__dict__) return tpl
def recreate_database(): from core.models import (db, User, Site, Blog, Page, PageCategory, KeyValue, Tag, TagAssociation, Category, Theme, Template, TemplateRevision, TemplateMapping, Media, FileInfo, Queue, Permission, MediaAssociation, PageRevision, FileInfoContext, Plugin, Log, PluginData, ThemeData) db.connect() with db.atomic(): db.drop_tables((User, Site, Blog, Page, PageCategory, KeyValue, Tag, TagAssociation, Category, Theme, Template, TemplateRevision, TemplateMapping, Media, FileInfo, Queue, Permission, MediaAssociation, PageRevision, FileInfoContext, Plugin, Log, PluginData, ThemeData), safe=True) db.create_tables((User, Site, Blog, Page, PageCategory, KeyValue, Tag, TagAssociation, Category, Theme, Template, TemplateRevision, TemplateMapping, Media, FileInfo, Queue, Permission, MediaAssociation, PageRevision, FileInfoContext, Plugin, Log, PluginData, ThemeData), safe=False) settings.DB.create_index_table() db.execute_sql(settings.DB.post_recreate()) db.close()
def disable_plugin(plugin_id): with db.atomic(): try: plugin_to_disable = Plugin.select().where(Plugin.id == plugin_id).get() except BaseException: raise ("Plugin not found") else: if plugin_to_disable.enabled is True: plugin_to_disable.enabled = False plugin_to_disable.save()
def process_queue(blog): ''' Processes the jobs currently in the queue for the selected blog. ''' with db.atomic(): queue_control = publishing_lock(blog, True) if queue_control is None: return 0 queue_control.data_string = 'Running' queue_control.save() queue = Queue.select().order_by( Queue.priority.desc(), Queue.date_touched.desc()).where( Queue.blog == blog, Queue.is_control == False).limit(MAX_BATCH_OPS) queue_length = queue.count() if queue_length > 0: logger.info( "Queue job #{} @ {} (blog #{}, {} items) started.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_length)) for q in queue: try: job_type.action[q.job_type](q) except BaseException: raise else: remove_from_queue(q.id) queue_control = Queue.get(Queue.blog == blog, Queue.is_control == True) queue_control.data_integer -= queue_length if queue_control.data_integer <= 0: queue_control.delete_instance() logger.info("Queue job #{} @ {} (blog #{}) finished.".format( queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id)) else: queue_control.data_string = 'Paused' queue_control.save() logger.info( "Queue job #{} @ {} (blog #{}) continuing with {} items left.". format(queue_control.id, date_format(queue_control.date_touched), queue_control.blog.id, queue_length)) return queue_control.data_integer
def wrapper(*a, **ka): db.connect() try: with db.atomic(): fn = func(*a, **ka) except LoggedException as e: raise exc_info()[0](e.msg) except DBError as e: raise LoggedException(error_text.format(e, request.url)) db.close() return fn
def disable_plugin(plugin_id): with db.atomic(): try: plugin_to_disable = Plugin.select().where( Plugin.id == plugin_id).get() except BaseException: raise ("Plugin not found") else: if plugin_to_disable.enabled is True: plugin_to_disable.enabled = False plugin_to_disable.save()
def refresh_theme(theme_id): ''' imports JSON and refreshes the selected theme with it ''' with open(os.path.join(APPLICATION_PATH, 'install', 'templates.json') , "r", encoding='utf-8') as input_file: theme_string = input_file.read() with db.atomic(): theme = Theme.load(theme_id) theme.json = theme_string theme.save()
def blog_apply_theme(blog_id, theme_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog) theme = Theme.load(theme_id) tags = template_tags(blog=blog, user=user) from core.utils import Status if request.forms.getunicode('confirm') == user.logout_nonce: from core.models import db with db.atomic() as txn: blog.apply_theme(theme, user) status = Status( type='success', close=False, message=''' Theme <b>{}</b> was successfully applied to blog <b>{}</b>.</p> It is recommended that you <a href="{}">republish this blog.</a> '''.format(theme.for_display, blog.for_display, '{}/blog/{}/republish'.format( BASE_URL, blog.id)) ) else: status = Status( type='warning', close=False, message=''' You are about to apply theme <b>{}</b> to blog <b>{}</b>.</p> <p>This will OVERWRITE AND REMOVE ALL EXISTING TEMPLATES on this blog!</p> '''.format(theme.for_display, blog.for_display), url='{}/blog/{}/themes'.format( BASE_URL, blog.id), yes={'id':'delete', 'name':'confirm', 'label':'Yes, I want to apply this theme', 'value':user.logout_nonce}, no={'label':'No, don\'t apply this theme', 'url':'{}/blog/{}/themes'.format( BASE_URL, blog.id)} ) tags.status = status if reason is None else reason return report(tags, 'blog_apply_theme', [blog, theme])
def refresh_theme(theme_id): ''' imports JSON and refreshes the selected theme with it ''' with open(os.path.join(APPLICATION_PATH, 'install', 'templates.json'), "r", encoding='utf-8') as input_file: theme_string = input_file.read() with db.atomic(): theme = Theme.load(theme_id) theme.json = theme_string theme.save()
def disable_plugin(plugin_id): with db.atomic(): try: plugin_to_disable = Plugin.select().where(Plugin.id == plugin_id).get() except BaseException: raise ("Plugin not found") else: if plugin_to_disable.enabled is False: bottle.redirect(BASE_PATH + "/system/plugins") else: plugin_to_disable.enabled = False plugin_to_disable.save() from core.boot import reboot reboot()
def blog_apply_theme(blog_id, theme_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog) theme = Theme.load(theme_id) tags = template_tags(blog=blog, user=user) from core.utils import Status if request.forms.getunicode('confirm') == user.logout_nonce: from core.models import db with db.atomic() as txn: blog.apply_theme(theme, user) status = Status(type='success', close=False, message=''' Theme <b>{}</b> was successfully applied to blog <b>{}</b>.</p> It is recommended that you <a href="{}">republish this blog.</a> '''.format(theme.for_display, blog.for_display, '{}/blog/{}/republish'.format(BASE_URL, blog.id))) else: status = Status(type='warning', close=False, message=''' You are about to apply theme <b>{}</b> to blog <b>{}</b>.</p> <p>This will OVERWRITE AND REMOVE ALL EXISTING TEMPLATES on this blog!</p> '''.format(theme.for_display, blog.for_display), url='{}/blog/{}/themes'.format(BASE_URL, blog.id), yes={ 'id': 'delete', 'name': 'confirm', 'label': 'Yes, I want to apply this theme', 'value': user.logout_nonce }, no={ 'label': 'No, don\'t apply this theme', 'url': '{}/blog/{}/themes'.format(BASE_URL, blog.id) }) tags.status = status if reason is None else reason return report(tags, 'blog_apply_theme', [blog, theme])
def emit(self, record): with db.atomic() as nested_txn: msg = self.format(record) level = self.level log_record = Log(message=msg, level=level) log_record.save() if date_diff > 0: delete_date = datetime.datetime.utcnow() - datetime.timedelta( days=date_diff) delete_older_logs = Log.delete().where(Log.date < delete_date) delete_older_logs.execute()
def emit(self, record): with db.atomic() as nested_txn: msg = self.format(record) level = self.level log_record = Log( message=msg, level=level) log_record.save() if date_diff > 0: delete_date = datetime.datetime.utcnow() - datetime.timedelta(days=date_diff) delete_older_logs = Log.delete().where( Log.date < delete_date) delete_older_logs.execute()
def reparent_page(page_id, blog_id): with db.atomic(): page = Page.load(page_id) blog = Blog.load(blog_id) page.blog = blog.id page.text += "\n" # stupid hack, we should have a force-save option # also, have .save options kw, not args # Reparent any existing media # Delete any existing categories # Migrate/re-add any existing tags # Remove and regenerate basename, permalink, etc. # Create new fileinfo from core.error import PageNotChanged try: page.save(page.user) except PageNotChanged: pass return "OK"
def new_template(blog_id, tpl_type): with db.atomic() as txn: user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_designer(user, blog) auth.check_template_lock(blog) mappings_index = template_mapping_index.get(tpl_type, None) if mappings_index is None: raise Exception('Mapping type not found') template = Template( blog=blog, theme=blog.theme, template_type=tpl_type, publishing_mode=publishing_mode.do_not_publish, body='', ) template.save(user) template.title = 'Untitled Template #{}'.format(template.id) template.save(user) if tpl_type != template_type.media: new_template_mapping = TemplateMapping( template=template, is_default=True, path_string="'" + utils.create_basename(template.title, blog) + "'") new_template_mapping.save() from core.cms import fileinfo fileinfo.build_mapping_xrefs((new_template_mapping, )) from settings import BASE_URL redirect(BASE_URL + '/template/{}/edit'.format(template.id))
def tag_delete(blog_id, tag_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) auth.check_tag_editing_lock(blog) try: tag = Tag.get(Tag.id == tag_id, Tag.blog == blog_id) except Tag.DoesNotExist: raise Tag.DoesNotExist("No such tag #{} in blog {}.".format( tag_id, blog.for_log)) from settings import BASE_URL tag_page_count = tag.pages.count() if request.forms.getunicode('confirm') == user.logout_nonce: from core.models import db if tag_page_count > 0: p_count = tag.pages.published.count() from core.cms import queue with db.atomic() as txn: queue.queue_page_actions(tag.pages.published) queue.queue_ssi_actions(blog) queue.queue_index_actions(blog, True) recommendation = ''' <p><b>{}</b> pages affected by this change have been pushed to the queue.</p> '''.format(p_count) else: recommendation = ''' <p>No pages were associated with this tag.</p> ''' with db.atomic() as txn: tag.delete_instance(recursive=True) status = Status( type='success', close=False, message=''' Tag <b>{}</b> was successfully deleted from blog <b>{}</b>.</p>{} '''.format(tag.for_log, blog.for_display, recommendation) ) else: if tag_page_count > 0: recommendation = ''' <p><b>There are still <a target="_blank" href="{}/blog/{}/tag/{}/pages">{} pages</a> associated with this tag.</b></p> '''.format(BASE_URL, blog.id, tag.id, tag_page_count) tag_modified = tag_recently_modified(tag) if tag_modified: recommendation += "<p><b>" + tag_modified + "</b></p>" else: recommendation = '' status = Status( type='warning', close=False, message=''' You are about to delete tag <b>{}</b> in blog <b>{}</b>.</p>{} '''.format(tag.for_listing, blog.for_display, recommendation), url='{}/blog/{}/tag/{}/delete'.format( BASE_URL, blog.id, tag.id), yes={'id':'delete', 'name':'confirm', 'label':'Yes, I want to delete this tag', 'value':user.logout_nonce}, no={'label':'No, don\'t delete this tag', 'url':'{}/blog/{}/tag/{}'.format( BASE_URL, blog.id, tag.id)} ) tags = template_tags( user=user) tags.status = status return report(tags, 'blog_delete_tag', tag)
def blog_import(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog, True) tags = template_tags(blog=blog, user=user) import os, settings import_path = os.path.join(settings.APPLICATION_PATH, "data", "import.json") tags.status = reason if request.method == "POST": from core.models import db tpl = '' with db.atomic() as txn: import json from core.utils import string_to_date import_path = request.forms.getunicode('import_path') with open(import_path, 'r', encoding='utf8') as f: json_data = json.load(f) from core.models import page_status, MediaAssociation, Category from core.error import PageNotChanged from core.libs.peewee import InterfaceError from core.cms import media_filetypes format_str = "<b>{}</b> / (<i>{}</i>)" # TODO: go in chunks of 50 or something? # allow graceful disconnection? for n in json_data: q = [] n_id = n['id'] q.append("Checking {}".format(n_id)) changed = False found = False match = Page.kv_get('legacy_id', n_id) if match.count() > 0: if match[0].object_ref.blog == blog: found = True q.append(match[0].key + "/" + match[0].value + " / Exists: " + format_str.format(n['title'], n_id)) existing_entry = Page.load(match[0].objectid) update = existing_entry.kv_get('update').count() # raise Exception(update) q.append('{} / {}'.format( string_to_date( n['modified_date']).replace(tzinfo=None), existing_entry.modified_date)) if string_to_date(n['modified_date']).replace( tzinfo=None ) <= existing_entry.modified_date and update == 0: q.append('Existing page {} not changed.'.format( existing_entry.id)) else: changed = True q.append( 'Updating data for existing page {}.'.format( existing_entry.id)) existing_entry.title = n['title'] existing_entry.text = n['text'] existing_entry.basename = n['basename'] existing_entry.excerpt = n['excerpt'] existing_entry.created_date = string_to_date( n['created_date']).replace(tzinfo=None) existing_entry.modified_date = string_to_date( n['modified_date']).replace(tzinfo=None) existing_entry.publication_date = string_to_date( n['publication_date']).replace(tzinfo=None) try: existing_entry.save( user, False, False, 'New revision from import') except PageNotChanged: pass except InterfaceError: raise Exception( "Error saving {}. Check the JSON to make sure it's valid." .format(n_id)) for media in existing_entry.media: media.kv_del() existing_entry.clear_categories() existing_entry.clear_kvs() existing_entry.clear_tags() existing_entry.clear_media() entry = existing_entry if found is False: q.append("Creating: " + format_str.format(n['title'], n_id)) changed = True new_entry = Page( title=n['title'], text=n['text'], basename=n['basename'], excerpt=n['excerpt'], user=user, blog=blog, created_date=string_to_date(n['created_date']), publication_date=string_to_date(n['publication_date']), modified_date=string_to_date(n['modified_date']), ) new_entry.modified_date = new_entry.publication_date if n['status'] in ('Publish', 'Published', 'Live'): new_entry.status = page_status.published new_entry.save(user) entry = new_entry q.append("New ID: {}".format(entry.id)) # Everything from here on out is if changed: # Register a legacy ID for the page entry.kv_set("legacy_id", n["id"]) entry.kv_set("legacy_user", n["user_id"]) # Category assignments categories = n['categories'] if categories == []: saved_page_category = PageCategory.create( page=entry, category=blog.default_category, primary=True).save() else: primary = True for category in categories: cat_exists = False category_id = category['id'] existing_category = Category.kv_get( 'legacy_id', category_id) if existing_category.count() > 0: if existing_category[ 0].object_ref.blog == blog: cat_exists = True if cat_exists is False: q.append('Created new category {}/{}'.format( category_id, category['name'])) new_category = Category.create( blog=blog, title=category['name'], parent_category=getattr( category, 'parent', None)) new_category.save() new_category.kv_set('legacy_id', category_id) else: new_category = Category.load( existing_category[0].objectid) q.append( 'Added to existing category {}/{}'.format( new_category.id, category['name'])) saved_page_category = PageCategory.create( page=entry, category=new_category, primary=primary).save() primary = False # Check to make sure a default category exists for the whole blog. # If not, assign one based on the lowest ID. # This can always be reassigned later. # Register tags tags_added, tags_existing, _ = Tag.add_or_create( n['tags'], page=entry) q.append('Tags added: {}'.format(','.join( n.tag for n in tags_added))) q.append('Tags existing: {}'.format(','.join( n.tag for n in tags_existing))) # Register KVs kvs = n['kvs'] for key in kvs: if key != "": value = kvs[key] entry.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) # Register media media = n['media'] for m in media: if 'path' not in m: continue path = os.path.split(m['path']) try: new_media = Media.get(Media.url == m['url']) except: new_media = Media(filename=path[1], path=m['path'], url=m['url'], type=media_filetypes.image, created_date=string_to_date( m['created_date']), modified_date=string_to_date( m['modified_date']), friendly_name=m['friendly_name'], user=user, blog=blog, site=blog.site) # TODO: RBF try: new_media.save() except Exception: continue media_association = MediaAssociation(media=new_media, page=entry) media_association.save() # Save legacy ID to KV on media if 'id' in m: new_media.kv_set('legacy_id', m['id']) q.append('IMG: {}'.format(new_media.url)) # add tags for media q.append('Tags: {}'.format(m['tags'])) new_tags = Tag.add_or_create(m['tags'], media=new_media) kvs = m['kvs'] for key in kvs: value = kvs[key] new_media.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) fileinfo.build_pages_fileinfos((entry, )) fileinfo.build_archives_fileinfos((entry, )) tpl += ('<p>'.join(q)) + '<hr/>' return tpl # TODO: # Import or create categories as needed # Categories in export will need to have parent-child data # categories should have legacy identifiers where possible too # Import image files, assign those legacy KV identifiers # Modify URLs for imported images in posts # Make importing of image assets optional else: tpl = template( 'ui/ui_blog_import', menu=generate_menu('blog_import', blog), # search_context=(search_context['blog'], blog), import_path=import_path, **tags.__dict__) return tpl
if nowait is False and Queue.is_insert_active(n): skip = 'Insert in progress for blog {}. Skipping this run.'.format( n.id) elif Queue.control_jobs(n).count() > 0: skip = 'Job already running for blog {}. Skipping this run.'.format( n.id) if skip: print(skip) scheduled_page_report.append(skip) continue for p in scheduled_pages.where(Page.blog == b).distinct(): scheduled_page_report.append('Scheduled pages:') try: with db.atomic() as txn: scheduled_page_report.append('{} -- on {}'.format( p.title, p.publication_date)) p.status = page_status.published p.save(p.user, no_revision=True) queue_page_actions((p, )) blogs_to_check[p.blog.id] = p.blog except Exception as e: problem = 'Problem with page {}: {}'.format(n.title, e) print(problem) scheduled_page_report.append(problem) queue_index_actions(n) queue_ssi_actions(n)
def step_4_pre(): if get_ini('main', 'DO_DB_CHECK') is None: store_ini('main', 'DO_DB_CHECK', 'Y') from core.utils import reboot reboot() report = [] from core.models import db, Template try: db.connect() except: raise db.close() report.append("Database connection successful.") from settings import DB DB.recreate_database() report.append("Database tables created successfully.") username = "******" email = get_ini("user", "email") password = get_ini("user", "password") blog_path = get_ini("install", "blog_path") from core.utils import encrypt_password p_key = get_ini('key', 'PASSWORD_KEY') password = encrypt_password(password, p_key) db.connect() with db.atomic(): from core.models import Site new_site = Site.create( name="Your first site", description="The description for your first site.", url=get_ini('main', 'base_url_root'), path=blog_path) report.append("Initial site created successfully.") from core.models import User new_user = User( name='Administrator', email=email, encrypted_password=password) new_user.save_pwd() from core.auth import role new_user_permissions = new_user.add_permission( permission=role.SYS_ADMIN, site=new_site ) new_user_permissions.save() report.append("Initial admin user created successfully.") plugindir = _join((_s.APPLICATION_PATH, 'data', 'plugins')) import shutil # TODO: warn on doing this? # this should only happen with a totally fresh install, not an upgrade install_directory = _join((_s.APPLICATION_PATH, _s.INSTALL_SRC_PATH)) if (os.path.isdir(plugindir)): shutil.rmtree(plugindir) shutil.copytree(_join((install_directory, 'plugins')), plugindir) report.append("Default plugins copied successfully to data directory.") themedir = _join((_s.APPLICATION_PATH, 'data', 'themes')) if (os.path.isdir(themedir)): shutil.rmtree(themedir) shutil.copytree(_join((install_directory, 'themes')), themedir) report.append("Default themes copied successfully to data directory.") from core import plugins for x in os.listdir(plugindir): if (os.path.isdir(_join((plugindir, x))) is True and x != '__pycache__'): new_plugin = plugins.register_plugin(x, enable=True) report.append("New plugin '{}' installed successfully.".format( new_plugin.name)) from settings.defaults import DEFAULT_THEME from core.models import Theme new_theme = Theme.install_to_system(DEFAULT_THEME) report.append("Default theme created and installed successfully to system.") from core.models import Blog new_blog = Blog( site=new_site, name="Your first blog", description="The description for your first blog.", url=new_site.url, path=new_site.path, local_path=new_site.path, theme=new_theme ) # TODO: add blog-level permission for new user as well new_blog.setup(new_user, new_theme) # TODO: Add default post report.append("Initial blog created successfully with default theme.") db.close() output_file_name = _join((_s.APPLICATION_PATH + _s.DATA_FILE_PATH, _s.INI_FILE_NAME)) config_parser = ConfigParser() sections = ('db', 'path', 'key') for s in sections: for name, value in parser.items(s): try: config_parser.add_section(s) except DuplicateSectionError: pass config_parser.set(s, name, value) # if request.environ['HTTP_HOST'] == _s.DEFAULT_LOCAL_ADDRESS + _s.DEFAULT_LOCAL_PORT: # config_parser.add_section('server') # config_parser.set('server', 'DESKTOP_MODE', 'True') try: with open(output_file_name, "w", encoding='utf-8') as output_file: config_parser.write(output_file) except BaseException as e: raise SetupError(str(e.__class__.__name__) + ": " + str(e)) try: os.remove(config_file_name) except OSError as e: from core.error import not_found if not_found(e) is False: raise e except Exception as e: raise e finished = ''' <p>Installation is complete. <a href="{}">Return to the main page to begin using the application.</a> <script> $.get('/reboot',function(data){{}}); </script> '''.format(_s.BASE_URL) return {'report':report, 'finished':finished}
def db_recreate(table_name='FileInfo'): ''' Utility function for recreating a database table after changes ''' from core import models table_model = getattr(models, table_name) table_ref = table_model._meta.db_table from settings import DB, EXPORT_FILE_PATH from core.libs.playhouse.dataset import DataSet n = [] n.append("Beginning export process... Writing files to {}.".format(APPLICATION_PATH + EXPORT_FILE_PATH)) xdb = DataSet(DB.dataset_connection()) if os.path.isdir(APPLICATION_PATH + EXPORT_FILE_PATH) is False: os.makedirs(APPLICATION_PATH + EXPORT_FILE_PATH) with xdb.transaction(): table = xdb[table_ref] n.append("Exporting table: " + table_name) filename = APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_ref + '.json' table.freeze(format='json', filename=filename) xdb.close() n.append("Export process ended.") with db.atomic(): db.drop_tables((table_model,), safe=True) db.create_tables((table_model,), safe=False) try: with xdb.transaction(): n.append("Loading table " + table_name) try: table = xdb[table_ref] except: n.append("<p>Sorry, couldn't create table ", table_name) else: filename = (APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_ref + '.json') if os.path.exists(filename): try: table.thaw(format='json', filename=filename, strict=True) except Exception as e: n.append("<p>Sorry, error:{}".format(e)) else: n.append("No data for table " + table_name) except Exception as e: n.append('Ooops: {}'.e) else: xdb.query(DB.post_import()) xdb.close() # DB.recreate_indexes() n.append("Import process ended.") return ('<p>'.join(n))
def delete_blog(blog_id): with db.atomic(): blog = Blog.load(blog_id) blog.delete_instance(recursive=True) return "Blog {} deleted".format(blog_id)
def tag_delete(blog_id, tag_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) auth.check_tag_editing_lock(blog) try: tag = Tag.get(Tag.id == tag_id, Tag.blog == blog_id) except Tag.DoesNotExist: raise Tag.DoesNotExist("No such tag #{} in blog {}.".format( tag_id, blog.for_log)) from settings import BASE_URL tag_page_count = tag.pages.count() if request.forms.getunicode('confirm') == user.logout_nonce: from core.models import db if tag_page_count > 0: p_count = tag.pages.published.count() from core.cms import queue with db.atomic() as txn: queue.queue_page_actions(tag.pages.published) queue.queue_ssi_actions(blog) queue.queue_index_actions(blog, True) recommendation = ''' <p><b>{}</b> pages affected by this change have been pushed to the queue.</p> '''.format(p_count) else: recommendation = ''' <p>No pages were associated with this tag.</p> ''' with db.atomic() as txn: tag.delete_instance(recursive=True) status = Status(type='success', close=False, message=''' Tag <b>{}</b> was successfully deleted from blog <b>{}</b>.</p>{} '''.format(tag.for_log, blog.for_display, recommendation)) else: if tag_page_count > 0: recommendation = ''' <p><b>There are still <a target="_blank" href="{}/blog/{}/tag/{}/pages">{} pages</a> associated with this tag.</b></p> '''.format(BASE_URL, blog.id, tag.id, tag_page_count) tag_modified = tag_recently_modified(tag) if tag_modified: recommendation += "<p><b>" + tag_modified + "</b></p>" else: recommendation = '' status = Status(type='warning', close=False, message=''' You are about to delete tag <b>{}</b> in blog <b>{}</b>.</p>{} '''.format(tag.for_listing, blog.for_display, recommendation), url='{}/blog/{}/tag/{}/delete'.format( BASE_URL, blog.id, tag.id), yes={ 'id': 'delete', 'name': 'confirm', 'label': 'Yes, I want to delete this tag', 'value': user.logout_nonce }, no={ 'label': 'No, don\'t delete this tag', 'url': '{}/blog/{}/tag/{}'.format(BASE_URL, blog.id, tag.id) }) tags = template_tags(user=user) tags.status = status return report(tags, 'blog_delete_tag', tag)
def tag_edit(blog_id, tag_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_editor(user, blog) auth.check_tag_editing_lock(blog) try: tag = Tag.get(Tag.id == tag_id, Tag.blog == blog_id) except Tag.DoesNotExist: raise Tag.DoesNotExist("No such tag #{} in blog {}.".format( tag_id, blog.for_log)) tags = template_tags(user=user) from core.utils import html_escape if request.method == "POST": new_tag_name = request.forms.getunicode('tag_name') if new_tag_name != tag.tag: try: Tag.get(Tag.tag == new_tag_name) except Tag.DoesNotExist: tag_count = tag.pages.count() msg = "Tag changed from {} to <b>{}</b>. {} pages (and their archives) have been queued for republishing.".format( tag.for_log, html_escape(new_tag_name), tag_count) tag.tag = new_tag_name tag.save() if tag_count > 0: from core.cms import queue from core.models import db with db.atomic() as txn: queue.queue_page_actions(tag.pages.published) queue.queue_ssi_actions(blog) queue.queue_index_actions(blog, True) tags.status = Status(type='info', message=msg) else: msg = "Tag not renamed. A tag with the name '{}' already exists.".format( html_escape(new_tag_name)) tags.status = Status(type='danger', message=msg, no_sure=True) else: tag_modified = tag_recently_modified(tag) if tag_modified: tags.status = Status(type='danger', message=tag_modified, no_sure=True) tpl = template('edit/tag', menu=generate_menu('blog_edit_tag', tag), search_context=(search_contexts['sites'], None), tag=tag, **tags.__dict__) return tpl
def db_recreate(table_name='FileInfo'): ''' Utility function for recreating a database table after changes ''' from core import models table_model = getattr(models, table_name) table_ref = table_model._meta.db_table from settings import DB, EXPORT_FILE_PATH from core.libs.playhouse.dataset import DataSet n = [] n.append("Beginning export process... Writing files to {}.".format( APPLICATION_PATH + EXPORT_FILE_PATH)) xdb = DataSet(DB.dataset_connection()) if os.path.isdir(APPLICATION_PATH + EXPORT_FILE_PATH) is False: os.makedirs(APPLICATION_PATH + EXPORT_FILE_PATH) with xdb.transaction(): table = xdb[table_ref] n.append("Exporting table: " + table_name) filename = APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_ref + '.json' table.freeze(format='json', filename=filename) xdb.close() n.append("Export process ended.") with db.atomic(): db.drop_tables((table_model, ), safe=True) db.create_tables((table_model, ), safe=False) try: with xdb.transaction(): n.append("Loading table " + table_name) try: table = xdb[table_ref] except: n.append("<p>Sorry, couldn't create table ", table_name) else: filename = (APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_ref + '.json') if os.path.exists(filename): try: table.thaw(format='json', filename=filename, strict=True) except Exception as e: n.append("<p>Sorry, error:{}".format(e)) else: n.append("No data for table " + table_name) except Exception as e: n.append('Ooops: {}'.e) else: xdb.query(DB.post_import()) xdb.close() # DB.recreate_indexes() n.append("Import process ended.") return ('<p>'.join(n))
if clear_job: Queue.stop(n) if nowait is False and Queue.is_insert_active(n): skip = 'Insert in progress for blog {}. Skipping this run.'.format(n.id) elif Queue.control_jobs(n).count() > 0: skip = 'Job already running for blog {}. Skipping this run.'.format(n.id) if skip: print (skip) scheduled_page_report.append(skip) continue for p in scheduled_pages.where(Page.blog == b).distinct(): scheduled_page_report.append('Scheduled pages:') try: with db.atomic() as txn: scheduled_page_report.append('{} -- on {}'.format(p.title, p.publication_date)) p.status = page_status.published p.save(p.user, no_revision=True) queue_page_actions((p,)) blogs_to_check[p.blog.id] = p.blog except Exception as e: problem = 'Problem with page {}: {}'.format(n.title, e) print (problem) scheduled_page_report.append(problem) queue_index_actions(n) queue_ssi_actions(n) waiting = Queue.job_counts(blog=n)
def apply_theme_test(blog_id, theme_id): blog = get_blog(blog_id) theme = get_theme(theme_id) with db.atomic(): n = mgmt.theme_apply_to_blog(theme, blog) return n
def tag_edit(blog_id, tag_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_editor(user, blog) auth.check_tag_editing_lock(blog) try: tag = Tag.get(Tag.id == tag_id, Tag.blog == blog_id) except Tag.DoesNotExist: raise Tag.DoesNotExist("No such tag #{} in blog {}.".format( tag_id, blog.for_log)) tags = template_tags( user=user) from core.utils import html_escape if request.method == "POST": new_tag_name = request.forms.getunicode('tag_name') if new_tag_name != tag.tag: try: Tag.get(Tag.tag == new_tag_name) except Tag.DoesNotExist: tag_count = tag.pages.count() msg = "Tag changed from {} to <b>{}</b>. {} pages (and their archives) have been queued for republishing.".format( tag.for_log, html_escape(new_tag_name), tag_count) tag.tag = new_tag_name tag.save() if tag_count > 0: from core.cms import queue from core.models import db with db.atomic() as txn: queue.queue_page_actions(tag.pages.published) queue.queue_ssi_actions(blog) queue.queue_index_actions(blog, True) tags.status = Status( type='info', message=msg ) else: msg = "Tag not renamed. A tag with the name '{}' already exists.".format( html_escape(new_tag_name) ) tags.status = Status( type='danger', message=msg, no_sure=True) else: tag_modified = tag_recently_modified(tag) if tag_modified: tags.status = Status( type='danger', message=tag_modified, no_sure=True) tpl = template('edit/tag', menu=generate_menu('blog_edit_tag', tag), search_context=(search_contexts['sites'], None), tag=tag, **tags.__dict__) return tpl
def step_4_pre(): if get_ini('main', 'DO_DB_CHECK') is None: store_ini('main', 'DO_DB_CHECK', 'Y') from core.utils import reboot reboot() report = [] from core.models import db, Template try: db.connect() except: raise db.close() report.append("Database connection successful.") from settings import DB DB.recreate_database() report.append("Database tables created successfully.") username = "******" email = get_ini("user", "email") password = get_ini("user", "password") blog_path = get_ini("install", "blog_path") from core.utils import encrypt_password p_key = get_ini('key', 'PASSWORD_KEY') password = encrypt_password(password, p_key) db.connect() with db.atomic(): from core.models import Site new_site = Site.create( name="Your first site", description="The description for your first site.", url=get_ini('main', 'base_url_root'), path=blog_path) report.append("Initial site created successfully.") from core.models import User new_user = User(name='Administrator', email=email, encrypted_password=password) new_user.save_pwd() from core.auth import role new_user_permissions = new_user.add_permission( permission=role.SYS_ADMIN, site=new_site) new_user_permissions.save() report.append("Initial admin user created successfully.") plugindir = _join((_s.APPLICATION_PATH, 'data', 'plugins')) import shutil # TODO: warn on doing this? # this should only happen with a totally fresh install, not an upgrade install_directory = _join((_s.APPLICATION_PATH, _s.INSTALL_SRC_PATH)) if (os.path.isdir(plugindir)): shutil.rmtree(plugindir) shutil.copytree(_join((install_directory, 'plugins')), plugindir) report.append("Default plugins copied successfully to data directory.") themedir = _join((_s.APPLICATION_PATH, 'data', 'themes')) if (os.path.isdir(themedir)): shutil.rmtree(themedir) shutil.copytree(_join((install_directory, 'themes')), themedir) report.append("Default themes copied successfully to data directory.") from core import plugins for x in os.listdir(plugindir): if (os.path.isdir(_join((plugindir, x))) is True and x != '__pycache__'): new_plugin = plugins.register_plugin(x, enable=True) report.append("New plugin '{}' installed successfully.".format( new_plugin.name)) from settings.defaults import DEFAULT_THEME from core.models import Theme new_theme = Theme.install_to_system(DEFAULT_THEME) report.append( "Default theme created and installed successfully to system.") from core.models import Blog new_blog = Blog(site=new_site, name="Your first blog", description="The description for your first blog.", url=new_site.url, path=new_site.path, local_path=new_site.path, theme=new_theme) # TODO: add blog-level permission for new user as well new_blog.setup(new_user, new_theme) # TODO: Add default post report.append("Initial blog created successfully with default theme.") db.close() output_file_name = _join( (_s.APPLICATION_PATH + _s.DATA_FILE_PATH, _s.INI_FILE_NAME)) config_parser = ConfigParser() sections = ('db', 'path', 'key') for s in sections: for name, value in parser.items(s): try: config_parser.add_section(s) except DuplicateSectionError: pass config_parser.set(s, name, value) # if request.environ['HTTP_HOST'] == _s.DEFAULT_LOCAL_ADDRESS + _s.DEFAULT_LOCAL_PORT: # config_parser.add_section('server') # config_parser.set('server', 'DESKTOP_MODE', 'True') try: with open(output_file_name, "w", encoding='utf-8') as output_file: config_parser.write(output_file) except BaseException as e: raise SetupError(str(e.__class__.__name__) + ": " + str(e)) try: os.remove(config_file_name) except OSError as e: from core.error import not_found if not_found(e) is False: raise e except Exception as e: raise e finished = ''' <p>Installation is complete. <a href="{}">Return to the main page to begin using the application.</a> <script> $.get('/reboot',function(data){{}}); </script> '''.format(_s.BASE_URL) return {'report': report, 'finished': finished}
def blog_import (blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog, True) tags = template_tags(blog=blog, user=user) import os, settings import_path = os.path.join( settings.APPLICATION_PATH, "data", "import.json") tags.status = reason if request.method == "POST": from core.models import db tpl = '' with db.atomic() as txn: import json from core.utils import string_to_date import_path = request.forms.getunicode('import_path') with open(import_path, 'r', encoding='utf8') as f: json_data = json.load(f) from core.models import page_status, MediaAssociation, Category from core.error import PageNotChanged from core.libs.peewee import InterfaceError from core.cms import media_filetypes format_str = "<b>{}</b> / (<i>{}</i>)" # TODO: go in chunks of 50 or something? # allow graceful disconnection? for n in json_data: q = [] n_id = n['id'] q.append("Checking {}".format(n_id)) changed = False found = False match = Page.kv_get('legacy_id', n_id) if match.count() > 0: if match[0].object_ref.blog == blog: found = True q.append(match[0].key + "/" + match[0].value + " / Exists: " + format_str.format(n['title'], n_id)) existing_entry = Page.load(match[0].objectid) update = existing_entry.kv_get('update').count() # raise Exception(update) q.append('{} / {}'.format(string_to_date(n['modified_date']).replace(tzinfo=None), existing_entry.modified_date )) if string_to_date(n['modified_date']).replace(tzinfo=None) <= existing_entry.modified_date and update == 0: q.append('Existing page {} not changed.'.format(existing_entry.id)) else: changed = True q.append('Updating data for existing page {}.'.format(existing_entry.id)) existing_entry.title = n['title'] existing_entry.text = n['text'] existing_entry.basename = n['basename'] existing_entry.excerpt = n['excerpt'] existing_entry.created_date = string_to_date(n['created_date']).replace(tzinfo=None) existing_entry.modified_date = string_to_date(n['modified_date']).replace(tzinfo=None) existing_entry.publication_date = string_to_date(n['publication_date']).replace(tzinfo=None) try: existing_entry.save(user, False, False, 'New revision from import') except PageNotChanged: pass except InterfaceError: raise Exception("Error saving {}. Check the JSON to make sure it's valid.".format(n_id)) for media in existing_entry.media: media.kv_del() existing_entry.clear_categories() existing_entry.clear_kvs() existing_entry.clear_tags() existing_entry.clear_media() entry = existing_entry if found is False: q.append("Creating: " + format_str.format(n['title'], n_id)) changed = True new_entry = Page( title=n['title'], text=n['text'], basename=n['basename'], excerpt=n['excerpt'], user=user, blog=blog, created_date=string_to_date(n['created_date']), publication_date=string_to_date(n['publication_date']), modified_date=string_to_date(n['modified_date']), ) new_entry.modified_date = new_entry.publication_date if n['status'] in ('Publish', 'Published', 'Live'): new_entry.status = page_status.published new_entry.save(user) entry = new_entry q.append("New ID: {}".format(entry.id)) # Everything from here on out is if changed: # Register a legacy ID for the page entry.kv_set("legacy_id", n["id"]) entry.kv_set("legacy_user", n["user_id"]) # Category assignments categories = n['categories'] if categories == []: saved_page_category = PageCategory.create( page=entry, category=blog.default_category, primary=True).save() else: primary = True for category in categories: cat_exists = False category_id = category['id'] existing_category = Category.kv_get('legacy_id', category_id) if existing_category.count() > 0: if existing_category[0].object_ref.blog == blog: cat_exists = True if cat_exists is False: q.append('Created new category {}/{}'.format( category_id, category['name'] )) new_category = Category.create( blog=blog, title=category['name'], parent_category=getattr(category, 'parent', None) ) new_category.save() new_category.kv_set('legacy_id', category_id ) else: new_category = Category.load(existing_category[0].objectid) q.append('Added to existing category {}/{}'.format( new_category.id, category['name'] )) saved_page_category = PageCategory.create( page=entry, category=new_category, primary=primary ).save() primary = False # Check to make sure a default category exists for the whole blog. # If not, assign one based on the lowest ID. # This can always be reassigned later. # Register tags tags_added, tags_existing, _ = Tag.add_or_create( n['tags'], page=entry) q.append('Tags added: {}'.format(','.join(n.tag for n in tags_added))) q.append('Tags existing: {}'.format(','.join(n.tag for n in tags_existing))) # Register KVs kvs = n['kvs'] for key in kvs: if key != "": value = kvs[key] entry.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) # Register media media = n['media'] for m in media: if 'path' not in m: continue path = os.path.split(m['path']) try: new_media = Media.get(Media.url == m['url']) except: new_media = Media( filename=path[1], path=m['path'], url=m['url'], type=media_filetypes.image, created_date=string_to_date(m['created_date']), modified_date=string_to_date(m['modified_date']), friendly_name=m['friendly_name'], user=user, blog=blog, site=blog.site ) # TODO: RBF try: new_media.save() except Exception: continue media_association = MediaAssociation( media=new_media, page=entry) media_association.save() # Save legacy ID to KV on media if 'id' in m: new_media.kv_set('legacy_id', m['id']) q.append('IMG: {}'.format(new_media.url)) # add tags for media q.append('Tags: {}'.format(m['tags'])) new_tags = Tag.add_or_create(m['tags'], media=new_media) kvs = m['kvs'] for key in kvs: value = kvs[key] new_media.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) fileinfo.build_pages_fileinfos((entry,)) fileinfo.build_archives_fileinfos((entry,)) tpl += ('<p>'.join(q)) + '<hr/>' return tpl # TODO: # Import or create categories as needed # Categories in export will need to have parent-child data # categories should have legacy identifiers where possible too # Import image files, assign those legacy KV identifiers # Modify URLs for imported images in posts # Make importing of image assets optional else: tpl = template('ui/ui_blog_import', menu=generate_menu('blog_import', blog), # search_context=(search_context['blog'], blog), import_path=import_path, **tags.__dict__) return tpl