def edit(page, content, editor='sysop'): obj = { 'content': content, 'editor': editor, 'date': time.strftime('%Y-%m-%d %H:%M:%S') } filename = check_or_create_dir(WIKI_PATH + page) + '/' + 'data.json' content_list = [] try: with open(filename) as fp: content_list = json.loads(fp.read()) except: pass content_list.append(obj) with open(filename, 'w') as fp: fp.write(json.dumps(content_list)) write_to_recent({ 'page': page, 'editor': obj['editor'], 'date': obj['date'] }) search.add_to_index(obj['content'], page) return True return False
def create_article_from_template(rawText, title, filename, author): articlePath = os.path.join(blueprint.config['SITE_ROOT_DIR'], filename[:10].replace('-','/')) if not os.path.exists(articlePath): try: os.makedirs(articlePath) except: flash("There was a problem creating directories: %s" % articlePath, category='error') article_file_path = os.path.join(articlePath, "%s.html"%filename[17:]) content = {'title': title, 'html': markdown(rawText, output_format='html5'), 'url': "%s%s%s/%s.html" % (blueprint.config['SITE_URL'], blueprint.config['SITE_ROOT_URL'], filename[:10].replace('-','/'), filename[17:]) } template_env = jinja2.Environment(loader=jinja2.FileSystemLoader(blueprint.config['SITE_TEMPLATES_DIR'])) template = template_env.get_template('article.html') try: with codecs.open(article_file_path, encoding='utf-8', mode='w') as articleFile: articleFile.write(template.render(content = content, author = author, pubDate = get_pub_date(filename)[1])) except jinja2.TemplateError: flash("There was a problem generating the template for: %s" % article_file_path, category='error') except: flash("There was a problem accessing file: %s" % article_file_path, category='error') else: search.add_to_index(rawText, content['url']) rebuild_archive_indexes() rebuild_homepage(author)
def reindex(cls): for obj in cls.query.all(): try: print(obj.id) add_to_index(cls.__tablename__, obj) except: traceback.print_exc() continue
def after_commit(cls, session): for obj in session._changes['add']: add_to_index(cls.__tablename__, obj) for obj in session._changes['update']: add_to_index(cls.__tablename__, obj) for obj in session._changes['delete']: remove_from_index(cls.__tablename__, obj) session._changes = None
def after_commit(cls, session): for obj in session._changes['add']: if isinstance(obj, SearchableMixin): add_to_index(obj.__tablename__, obj) for obj in session._changes['update']: if isinstance(obj, SearchableMixin): add_to_index(obj.__tablename__, obj) for obj in session._changes['delete']: if isinstance(obj, SearchableMixin): remove_from_index(obj.__tablename__, obj) session._changes = None
def reindex_by_leginfo_ids(cls, leginfo_ids): # sqlite doesn't allow very long queries # if we have a lot of ids in leginfo_ids and try to make query for all of # them at once, we'll get an error # so we query by parts # number of ids to query at once step = 20 i = 0 while i < len(leginfo_ids): ids_part = leginfo_ids[i:i+step] for obj in cls.query.filter(cls.leginfo_id.in_(ids_part)).all(): add_to_index(cls.__tablename__, obj) i += step
def edit(page, content, editor = 'sysop'): obj = { 'content' : content, 'editor' : editor, 'date' : time.strftime('%Y-%m-%d %H:%M:%S') } filename = check_or_create_dir(WIKI_PATH + page) + '/' + 'data.json' content_list = [] try: with open(filename) as fp: content_list = json.loads(fp.read()) except: pass content_list.append(obj) with open(filename, 'w') as fp: fp.write(json.dumps(content_list)) write_to_recent({'page':page, 'editor':obj['editor'], 'date': obj['date']}) search.add_to_index(obj['content'],page) return True return False
def reindex(cls): for obj in cls.query: add_to_index(cls.__tablename__, obj)