def create_pages(page_ids=None, count=0): if not page_ids: page_ids = [] count += 1 new_page_ids = [] if count >= 4: return False if not page_ids: for i in range(randint(1, 2)): page = Page(name='Page %s' % i, content='Page content %s' % i) page.save() new_page_ids.append(page.id) for page_id in page_ids: for i in range(randint(1, 3)): page = Page(name='Page %s %s' % (page_id, i), content='Page content %s %s' % (page_id, i), parent_id=page_id) page.save() new_page_ids.append(page.id) for i in range(3): create_pages(page_ids=new_page_ids, count=count) return True
def make_page(self): root = User.objects.filter(username="******").first() if not self.page: club_root = Page.objects.filter( name=settings.SITH_CLUB_ROOT_PAGE).first() if root and club_root: public = Group.objects.filter( id=settings.SITH_GROUP_PUBLIC_ID).first() p = Page(name=self.unix_name) p.parent = club_root p.save(force_lock=True) if public: p.view_groups.add(public) p.save(force_lock=True) if self.parent and self.parent.page: p.parent = self.parent.page self.page = p self.save() elif self.page and self.page.name != self.unix_name: self.page.unset_lock() self.page.name = self.unix_name self.page.save(force_lock=True) elif (self.page and self.parent and self.parent.page and self.page.parent != self.parent.page): self.page.unset_lock() self.page.parent = self.parent.page self.page.save(force_lock=True)
def test_access_child_page_ok(self): """ Should display a page correctly """ parent = Page(name="guy", owner_group=Group.objects.filter(id=1).first()) parent.save(force_lock=True) page = Page( name="bibou", owner_group=Group.objects.filter(id=1).first(), parent=parent ) page.save(force_lock=True) response = self.client.get( reverse("core:page", kwargs={"page_name": "guy/bibou"}) ) self.assertTrue(response.status_code == 200) self.assertTrue( '<a href="/page/guy/bibou/edit">\\xc3\\x89diter</a>' in str(response.content) )
def save_page(page, user, blog=None): ''' Saves edits to a page in the CMS. Note that this function does _not_ perform permission checking. In other words, it doesn't verify if the user described in the `user` parameter does in fact have permissions to edit the page in question. :param page: Page object whose data is to be saved. If this is None, then it is assumed that we are creating a new page. :param user: The user object associated with the save action for this page. If this is a newly-created page, the page's user will be set to this. :param blog: The blog object under which the page will be created, if this is a newly-created page. ''' getunicode = request.forms.getunicode # invalidate_cache() save_action = int(request.forms.get('save')) original_page_status = page_status.unpublished new_basename = getunicode('basename') if page is None: # CREATE NEW PAGE ENTRY page = Page() page.user = user.id page.blog = blog.id page.basename = create_basename(getunicode('page_title'), page.blog) original_page_basename = page.basename time_now = datetime.datetime.utcnow() page.publication_date = time_now page.created_date = time_now else: # UPDATE EXISTING ENTRY # Queue neighbor actions for page BEFORE modification if page.status == page_status.published: if not (save_action & save_action_list.UNPUBLISH_PAGE): queue_page_actions((page.next_page, page.previous_page),no_neighbors=True, no_archive=True) queue_page_archive_actions(page) original_page_status = page.status original_page_basename = page.basename page.modified_date = datetime.datetime.utcnow() change_basename = False if new_basename is not None: if new_basename == "": change_basename = True new_basename = create_basename(getunicode('page_title'), page.blog) if new_basename != original_page_basename: change_basename = True new_publication_date = datetime.datetime.strptime( request.forms.get('publication_date'), DATE_FORMAT) if change_basename: page.basename = create_basename(new_basename, page.blog) page.publication_date = page._date_to_utc(page.blog.timezone, new_publication_date).replace(tzinfo=None) page.title = getunicode('page_title') page.text = getunicode('page_text') page.status = page_status.modes[int(request.forms.get('publication_status'))] page.tag_text = getunicode('page_tag_text') page.excerpt = getunicode('page_excerpt') change_note = getunicode('change_note') msg = [] # UNPUBLISH if ( (save_action & save_action_list.UNPUBLISH_PAGE and page.status == page_status.published) or # unpublished a published page (original_page_status == page_status.published and page.status == page_status.unpublished) # set a published page to draft ): unpublish_page(page) msg.append("Page <b>{}</b> unpublished successfully.") # SET UNPUBLISHED TO PUBLISHED elif original_page_status == page_status.unpublished and (save_action & save_action_list.UPDATE_LIVE_PAGE): page.status = page_status.published msg.append("Set to publish.") # SAVE DRAFT if (save_action & save_action_list.SAVE_TO_DRAFT): try: save_result = page.save(user, False, False, change_note) except PageNotChanged: save_result = (None, None) msg.append("Page <b>{}</b> saved successfully.") # Assign categories for page categories = [] for n in request.forms.allitems(): if n[0][:8] == 'cat-sel-': try: category_id = int(n[0][8:]) except ValueError: category_id = None else: categories.append(category_id) if not categories: categories.append(blog.default_category.id) msg.append(" Default category auto-assigned for page.") page_categories = [] primary = None for n in page.categories: if n.category.id not in categories: delete_category = PageCategory.delete().where( PageCategory.id == n.id) delete_category.execute() else: page_categories.append(n.category.id) if n.primary is True: primary = n for n in categories: if n not in page_categories: new_page_category = PageCategory.create( page=page, category=Category.load(n, blog_id=page.blog.id), primary=False) if primary is None: n = page.categories[0] n.primary = True n.save() delete_page_fileinfo(page) build_archives_fileinfos((page,)) build_pages_fileinfos((page,)) # UPDATE TAGS if getunicode('tag_text') is not None: import json tag_text = json.loads(getunicode('tag_text')) add_tags_to_page(tag_text, page) delete_orphaned_tags(page.blog) # QUEUE CHANGES FOR PUBLICATION (if any) if ((save_action & save_action_list.UPDATE_LIVE_PAGE) and (page.status == page_status.published)): queue_ssi_actions(page.blog) queue_page_actions((page,)) queue_index_actions(page.blog) msg.append(" Live page updated.") # DETECT ANY PAGE CHANGES if ( (save_action & (save_action_list.SAVE_TO_DRAFT + save_action_list.UPDATE_LIVE_PAGE)) and (save_result[1]) is None): msg.append(" (Page unchanged.)") # RETURN REPORT tags = template_tags(page=page, user=user) status = Status( type='success', message=' / '.join(msg), vals=(page.for_log,) ) tags.status = status tags._save_action = save_action tags._save_action_list = save_action_list return tags
def _load_page(self, doc, div, issue): dmdid = div.attrib['DMDID'] mods = dmd_mods(doc, dmdid) page = Page() seq_string = mods.xpath('string(.//mods:extent/mods:start)', namespaces=ns) try: page.sequence = int(seq_string) except ValueError as e: raise BatchLoaderException( "could not determine sequence number for page from '%s'" % seq_string) page.number = mods.xpath('string(.//mods:detail[@type="page number"])', namespaces=ns).strip() reel_number = mods.xpath( 'string(.//mods:identifier[@type="reel number"])', namespaces=ns).strip() try: reel = models.Reel.objects.get(number=reel_number, batch=self.current_batch) page.reel = reel except models.Reel.DoesNotExist as e: if reel_number: reel = models.Reel(number=reel_number, batch=self.current_batch, implicit=True) reel.save() page.reel = reel else: _logger.warn("unable to find reel number in page metadata") _logger.info("Assigned page sequence: %s" % page.sequence) _section_dmdid = div.xpath( 'string(ancestor::mets:div[@TYPE="np:section"]/@DMDID)', namespaces=ns) if _section_dmdid: section_mods = dmd_mods(doc, _section_dmdid) section_label = section_mods.xpath( 'string(.//mods:detail[@type="section label"]/mods:number[1])', namespaces=ns).strip() if section_label: page.section_label = section_label page.issue = issue _logger.info("Saving page. issue date: %s, page sequence: %s" % (issue.date_issued, page.sequence)) # TODO - consider the possibility of executing the file name # assignments (below) before this page.save(). page.save() notes = [] for mods_note in mods.xpath('.//mods:note', namespaces=ns): type = mods_note.xpath('string(./@type)') label = mods_note.xpath('string(./@displayLabel)') text = mods_note.xpath('string(.)').strip() note = models.PageNote(type=type, label=label, text=text) notes.append(note) page.notes.set(notes, bulk=False) # there's a level indirection between the METS structmap and the # details about specific files in this package ... # so we have to first get the FILEID from the issue div in the # structmap and then use it to look up the file details in the # larger document. for fptr in div.xpath('./mets:fptr', namespaces=ns): file_id = fptr.attrib['FILEID'] file_el = doc.xpath('.//mets:file[@ID="%s"]' % file_id, namespaces=ns)[0] file_type = file_el.attrib['USE'] # get the filename relative to the storage location file_name = file_el.xpath('string(./mets:FLocat/@xlink:href)', namespaces=ns) file_name = urllib.parse.urljoin(doc.docinfo.URL, file_name) file_name = self.storage_relative_path(file_name) if file_type == 'master': page.tiff_filename = file_name elif file_type == 'service': page.jp2_filename = file_name try: # extract image dimensions from technical metadata for jp2 for admid in file_el.attrib['ADMID'].split(' '): length, width = get_dimensions(doc, admid) if length and width: page.jp2_width = width page.jp2_length = length break except KeyError as e: _logger.info( "Could not determine dimensions of jp2 for issue: %s page: %s... trying harder..." % (page.issue, page)) im = Image.open(page.jp2_abs_filename) page.jp2_width, page.jp2_length = im.size if not page.jp2_width: raise BatchLoaderException( "No jp2 width for issue: %s page: %s" % (page.issue, page)) if not page.jp2_length: raise BatchLoaderException( "No jp2 length for issue: %s page: %s" % (page.issue, page)) elif file_type == 'derivative': page.pdf_filename = file_name elif file_type == 'ocr': page.ocr_filename = file_name if page.ocr_filename: # don't incurr overhead of extracting ocr text, word coordinates # and indexing unless the batch loader has been set up to do it if self.PROCESS_OCR: self.process_ocr(page) else: _logger.info("No ocr filename for issue: %s page: %s" % (page.issue, page)) _logger.debug("saving page: %s" % page.url) page.save() return page
def save_page(page, user, blog=None): ''' Saves edits to a page in the CMS. Note that this function does _not_ perform permission checking. In other words, it doesn't verify if the user described in the `user` parameter does in fact have permissions to edit the page in question. ''' save_action = int(request.forms.get('save')) blog_new_page = False original_page_status = page_status.unpublished if page is None: blog_new_page = True page = Page() page.user = user.id page.blog = blog.id page.basename = create_basename(request.forms.getunicode('page_title'), page.blog) original_page_basename = page.basename page.publication_date = datetime.datetime.now() page.created_date = datetime.datetime.now() else: original_page_status = page.status original_page_basename = page.basename page.modified_date = datetime.datetime.now() if request.forms.getunicode('basename') is not None: if request.forms.getunicode('basename') != "": if original_page_basename != request.forms.getunicode( 'basename'): page.basename = create_basename( request.forms.getunicode('basename'), page.blog) else: page.basename = create_basename( request.forms.getunicode('page_title'), page.blog) if original_page_basename != page.basename: delete_page_fileinfo(page) if page.basename == "": page.basename = create_basename(request.forms.getunicode('page_title'), page.blog) original_page_basename = page.basename page.title = request.forms.getunicode('page_title') page.text = request.forms.getunicode('page_text') page.status = page_status.modes[int( request.forms.get('publication_status'))] page.publication_date = datetime.datetime.strptime( request.forms.get('publication_date'), '%Y-%m-%d %H:%M:%S') page.tag_text = request.forms.getunicode('page_tag_text') page.excerpt = request.forms.getunicode('page_excerpt') change_note = request.forms.getunicode('change_note') # Save to draft only # Save and publish # Save and exit # Republish and exit # Unpublish (and exit) # Delete (and unpublish) (and exit) msg = "" # UNPUBLISH if ((save_action & save_action_list.UNPUBLISH_PAGE and page.status == page_status.published) or # unpublished a published page (original_page_status == page_status.published and page.status == page_status.unpublished) or # set a published page to draft (save_action & save_action_list.DELETE_PAGE ) # delete a page, regardless of status ): pass # DELETE; IMPLIES UNPUBLISH if (save_action & save_action_list.DELETE_PAGE): pass # UNPUBLISHED TO PUBLISHED if original_page_status == page_status.unpublished and ( save_action & save_action_list.UPDATE_LIVE_PAGE): page.status = page_status.published # SAVE DRAFT if (save_action & save_action_list.SAVE_TO_DRAFT): backup_only = True if request.forms.getunicode( 'draft') == "Y" else False try: save_result = page.save(user, False, backup_only, change_note) except PageNotChanged: save_result = (None, None) if blog_new_page: default_blog_category = Category.get(Category.blog == blog.id, Category.default == True) saved_page_category = PageCategory.create( page=page, category=default_blog_category, primary=True) msg += ("Page <b>{}</b> saved.") # SET TAGS # when to do this? # what happens when we delete a page? # all tags for a page have to be deassigned. if request.forms.getunicode('tag_text') is not None: tag_text = json.loads(request.forms.getunicode('tag_text')) add_tags_to_page(tag_text, page) delete_orphaned_tags() # BUILD FILEINFO IF NO DELETE ACTION if not (save_action & save_action_list.DELETE_PAGE): build_pages_fileinfos((page, )) build_archives_fileinfos((page, )) # PUBLISH CHANGES if (save_action & save_action_list.UPDATE_LIVE_PAGE) and ( page.status == page_status.published): queue_page_actions(page) queue_index_actions(page.blog) msg += (" Live page updated.") if (save_action & (save_action_list.SAVE_TO_DRAFT + save_action_list.UPDATE_LIVE_PAGE)) and (save_result[1]) is None: msg += (" (Page unchanged.)") tags = template_tags(page_id=page.id, user=user) status = Status(type='success', message=msg, vals=(page.title, )) tags.status = status return tags
def save_page(page, user, blog=None): ''' Saves edits to a page in the CMS. Note that this function does _not_ perform permission checking. In other words, it doesn't verify if the user described in the `user` parameter does in fact have permissions to edit the page in question. :param page: Page object whose data is to be saved. If this is None, then it is assumed that we are creating a new page. :param user: The user object associated with the save action for this page. If this is a newly-created page, the page's user will be set to this. :param blog: The blog object under which the page will be created, if this is a newly-created page. ''' getunicode = request.forms.getunicode # invalidate_cache() save_action = int(request.forms.get('save')) original_page_status = page_status.unpublished new_basename = getunicode('basename') if page is None: # CREATE NEW PAGE ENTRY page = Page() page.user = user.id page.blog = blog.id page.basename = create_basename(getunicode('page_title'), page.blog) original_page_basename = page.basename time_now = datetime.datetime.utcnow() page.publication_date = time_now page.created_date = time_now else: # UPDATE EXISTING ENTRY # Queue neighbor actions for page BEFORE modification if page.status == page_status.published: if not (save_action & save_action_list.UNPUBLISH_PAGE): queue_page_actions((page.next_page, page.previous_page), no_neighbors=True, no_archive=True) queue_page_archive_actions(page) original_page_status = page.status original_page_basename = page.basename page.modified_date = datetime.datetime.utcnow() change_basename = False if new_basename is not None: if new_basename == "": change_basename = True new_basename = create_basename(getunicode('page_title'), page.blog) if new_basename != original_page_basename: change_basename = True new_publication_date = datetime.datetime.strptime( request.forms.get('publication_date'), DATE_FORMAT) if change_basename: page.basename = create_basename(new_basename, page.blog) page.publication_date = page._date_to_utc( page.blog.timezone, new_publication_date).replace(tzinfo=None) page.title = getunicode('page_title') page.text = getunicode('page_text') page.status = page_status.modes[int( request.forms.get('publication_status'))] page.tag_text = getunicode('page_tag_text') page.excerpt = getunicode('page_excerpt') change_note = getunicode('change_note') msg = [] # UNPUBLISH if ((save_action & save_action_list.UNPUBLISH_PAGE and page.status == page_status.published) or # unpublished a published page (original_page_status == page_status.published and page.status == page_status.unpublished ) # set a published page to draft ): unpublish_page(page) msg.append("Page <b>{}</b> unpublished successfully.") # SET UNPUBLISHED TO PUBLISHED elif original_page_status == page_status.unpublished and ( save_action & save_action_list.UPDATE_LIVE_PAGE): page.status = page_status.published msg.append("Set to publish.") # SAVE DRAFT if (save_action & save_action_list.SAVE_TO_DRAFT): try: save_result = page.save(user, False, False, change_note) except PageNotChanged: save_result = (None, None) msg.append("Page <b>{}</b> saved successfully.") # Assign categories for page categories = [] for n in request.forms.allitems(): if n[0][:8] == 'cat-sel-': try: category_id = int(n[0][8:]) except ValueError: category_id = None else: categories.append(category_id) if not categories: categories.append(blog.default_category.id) msg.append(" Default category auto-assigned for page.") page_categories = [] primary = None for n in page.categories: if n.category.id not in categories: delete_category = PageCategory.delete().where( PageCategory.id == n.id) delete_category.execute() else: page_categories.append(n.category.id) if n.primary is True: primary = n for n in categories: if n not in page_categories: new_page_category = PageCategory.create( page=page, category=Category.load(n, blog_id=page.blog.id), primary=False) if primary is None: n = page.categories[0] n.primary = True n.save() delete_page_fileinfo(page) build_archives_fileinfos((page, )) build_pages_fileinfos((page, )) # UPDATE TAGS if getunicode('tag_text') is not None: import json tag_text = json.loads(getunicode('tag_text')) add_tags_to_page(tag_text, page) delete_orphaned_tags(page.blog) # QUEUE CHANGES FOR PUBLICATION (if any) if ((save_action & save_action_list.UPDATE_LIVE_PAGE) and (page.status == page_status.published)): queue_ssi_actions(page.blog) queue_page_actions((page, )) queue_index_actions(page.blog) msg.append(" Live page updated.") # DETECT ANY PAGE CHANGES if ((save_action & (save_action_list.SAVE_TO_DRAFT + save_action_list.UPDATE_LIVE_PAGE)) and (save_result[1]) is None): msg.append(" (Page unchanged.)") # RETURN REPORT tags = template_tags(page=page, user=user) status = Status(type='success', message=' / '.join(msg), vals=(page.for_log, )) tags.status = status tags._save_action = save_action tags._save_action_list = save_action_list return tags
def handle(self, *args, **options): os.environ["DJANGO_COLORS"] = "nocolor" Site(id=4000, domain=settings.SITH_URL, name=settings.SITH_NAME).save() root_path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) Group(name="Root").save() Group(name="Public").save() Group(name="Subscribers").save() Group(name="Old subscribers").save() Group(name="Accounting admin").save() Group(name="Communication admin").save() Group(name="Counter admin").save() Group(name="Banned from buying alcohol").save() Group(name="Banned from counters").save() Group(name="Banned to subscribe").save() Group(name="SAS admin").save() Group(name="Forum admin").save() Group(name="Pedagogy admin").save() self.reset_index("core", "auth") root = User( id=0, username="******", last_name="", first_name="Bibou", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=True, is_staff=True, ) root.set_password("plop") root.save() profiles_root = SithFile(parent=None, name="profiles", is_folder=True, owner=root) profiles_root.save() home_root = SithFile(parent=None, name="users", is_folder=True, owner=root) home_root.save() # Page needed for club creation p = Page(name=settings.SITH_CLUB_ROOT_PAGE) p.set_lock(root) p.save() club_root = SithFile(parent=None, name="clubs", is_folder=True, owner=root) club_root.save() SithFile(parent=None, name="SAS", is_folder=True, owner=root).save() main_club = Club( id=1, name=settings.SITH_MAIN_CLUB["name"], unix_name=settings.SITH_MAIN_CLUB["unix_name"], address=settings.SITH_MAIN_CLUB["address"], ) main_club.save() bar_club = Club( id=2, name=settings.SITH_BAR_MANAGER["name"], unix_name=settings.SITH_BAR_MANAGER["unix_name"], address=settings.SITH_BAR_MANAGER["address"], ) bar_club.save() launderette_club = Club( id=84, name=settings.SITH_LAUNDERETTE_MANAGER["name"], unix_name=settings.SITH_LAUNDERETTE_MANAGER["unix_name"], address=settings.SITH_LAUNDERETTE_MANAGER["address"], ) launderette_club.save() self.reset_index("club") for b in settings.SITH_COUNTER_BARS: g = Group(name=b[1] + " admin") g.save() c = Counter(id=b[0], name=b[1], club=bar_club, type="BAR") c.save() g.editable_counters.add(c) g.save() self.reset_index("counter") Counter(name="Eboutic", club=main_club, type="EBOUTIC").save() Counter(name="AE", club=main_club, type="OFFICE").save() home_root.view_groups.set([ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first() ]) club_root.view_groups.set([ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first() ]) home_root.save() club_root.save() Sith(weekmail_destinations="[email protected] [email protected]").save() Weekmail().save() p = Page(name="Index") p.set_lock(root) p.save() p.view_groups.set([settings.SITH_GROUP_PUBLIC_ID]) p.set_lock(root) p.save() PageRev( page=p, title="Wiki index", author=root, content=""" Welcome to the wiki page! """, ).save() p = Page(name="services") p.set_lock(root) p.save() p.view_groups.set([settings.SITH_GROUP_PUBLIC_ID]) p.set_lock(root) PageRev( page=p, title="Services", author=root, content=""" | | | | | :---: | :---: | :---: | :---: | | [Eboutic](/eboutic) | [Laverie](/launderette) | Matmat | [Fichiers](/file) | | SAS | Weekmail | Forum | | """, ).save() p = Page(name="launderette") p.set_lock(root) p.save() p.set_lock(root) PageRev(page=p, title="Laverie", author=root, content="Fonctionnement de la laverie").save() # Here we add a lot of test datas, that are not necessary for the Sith, but that provide a basic development environment if not options["prod"]: # Adding user Skia skia = User( username="******", last_name="Kia", first_name="S'", email="*****@*****.**", date_of_birth="1942-06-12", ) skia.set_password("plop") skia.save() skia.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] skia.save() skia_profile_path = os.path.join(root_path, "core/fixtures/images/3.jpg") with open(skia_profile_path, "rb") as f: name = str(skia.id) + "_profile.jpg" skia_profile = SithFile( parent=profiles_root, name=name, file=resize_image(Image.open(BytesIO(f.read())), 400, "JPEG"), owner=skia, is_folder=False, mime_type="image/jpeg", size=os.path.getsize(skia_profile_path), ) skia_profile.file.name = name skia_profile.save() skia.profile_pict = skia_profile skia.save() # Adding user public public = User( username="******", last_name="Not subscribed", first_name="Public", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=False, is_staff=False, ) public.set_password("plop") public.save() public.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] public.save() # Adding user Subscriber subscriber = User( username="******", last_name="User", first_name="Subscribed", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=False, is_staff=False, ) subscriber.set_password("plop") subscriber.save() subscriber.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] subscriber.save() # Adding user old Subscriber old_subscriber = User( username="******", last_name="Subscriber", first_name="Old", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=False, is_staff=False, ) old_subscriber.set_password("plop") old_subscriber.save() old_subscriber.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] old_subscriber.save() # Adding user Counter admin counter = User( username="******", last_name="Ter", first_name="Coun", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=False, is_staff=False, ) counter.set_password("plop") counter.save() counter.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] counter.groups.set([ Group.objects.filter( id=settings.SITH_GROUP_COUNTER_ADMIN_ID).first().id ]) counter.save() # Adding user Comptable comptable = User( username="******", last_name="Able", first_name="Compte", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=False, is_staff=False, ) comptable.set_password("plop") comptable.save() comptable.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] comptable.groups.set([ Group.objects.filter( id=settings.SITH_GROUP_ACCOUNTING_ADMIN_ID).first().id ]) comptable.save() # Adding user Guy u = User( username="******", last_name="Carlier", first_name="Guy", email="*****@*****.**", date_of_birth="1942-06-12", is_superuser=False, is_staff=False, ) u.set_password("plop") u.save() u.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] u.save() # Adding user Richard Batsbak r = User( username="******", last_name="Batsbak", first_name="Richard", email="*****@*****.**", date_of_birth="1982-06-12", ) r.set_password("plop") r.save() r.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] r.save() # Adding syntax help page p = Page(name="Aide_sur_la_syntaxe") p.save(force_lock=True) with open(os.path.join(root_path) + "/doc/SYNTAX.md", "r") as rm: PageRev(page=p, title="Aide sur la syntaxe", author=skia, content=rm.read()).save() p.view_groups.set([settings.SITH_GROUP_PUBLIC_ID]) p.save(force_lock=True) p = Page(name="Services") p.save(force_lock=True) p.view_groups.set([settings.SITH_GROUP_PUBLIC_ID]) p.save(force_lock=True) PageRev( page=p, title="Services", author=skia, content=""" | | | | | :---: | :---: | :---: | | [Eboutic](/eboutic) | [Laverie](/launderette) | Matmat | | SAS | Weekmail | Forum| """, ).save() # Subscription default_subscription = "un-semestre" # Root s = Subscription( member=User.objects.filter(pk=root.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Skia s = Subscription( member=User.objects.filter(pk=skia.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Counter admin s = Subscription( member=User.objects.filter(pk=counter.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Comptable s = Subscription( member=User.objects.filter(pk=comptable.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Richard s = Subscription( member=User.objects.filter(pk=r.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # User s = Subscription( member=User.objects.filter(pk=subscriber.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Old subscriber s = Subscription( member=User.objects.filter(pk=old_subscriber.pk).first(), subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start( datetime(year=2012, month=9, day=4)) s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Clubs Club( name="Bibo'UT", unix_name="bibout", address="46 de la Boustifaille", parent=main_club, ).save() guyut = Club( name="Guy'UT", unix_name="guyut", address="42 de la Boustifaille", parent=main_club, ) guyut.save() Club(name="Woenzel'UT", unix_name="woenzel", address="Woenzel", parent=guyut).save() Membership(user=skia, club=main_club, role=3, description="").save() troll = Club( name="Troll Penché", unix_name="troll", address="Terre Du Milieu", parent=main_club, ) troll.save() refound = Club( name="Carte AE", unix_name="carte_ae", address="Jamais imprimée", parent=main_club, ) refound.save() # Counters Customer(user=skia, account_id="6568j", amount=0).save() Customer(user=r, account_id="4000k", amount=0).save() p = ProductType(name="Bières bouteilles") p.save() c = ProductType(name="Cotisations") c.save() r = ProductType(name="Rechargements") r.save() verre = ProductType(name="Verre") verre.save() cotis = Product( name="Cotis 1 semestre", code="1SCOTIZ", product_type=c, purchase_price="15", selling_price="15", special_selling_price="15", club=main_club, ) cotis.save() cotis2 = Product( name="Cotis 2 semestres", code="2SCOTIZ", product_type=c, purchase_price="28", selling_price="28", special_selling_price="28", club=main_club, ) cotis2.save() refill = Product( name="Rechargement 15 €", code="15REFILL", product_type=r, purchase_price="15", selling_price="15", special_selling_price="15", club=main_club, ) refill.save() barb = Product( name="Barbar", code="BARB", product_type=p, purchase_price="1.50", selling_price="1.7", special_selling_price="1.6", club=main_club, ) barb.save() cble = Product( name="Chimay Bleue", code="CBLE", product_type=p, purchase_price="1.50", selling_price="1.7", special_selling_price="1.6", club=main_club, ) cble.save() cons = Product( name="Consigne Eco-cup", code="CONS", product_type=verre, purchase_price="1", selling_price="1", special_selling_price="1", club=main_club, ) cons.id = 1152 cons.save() dcons = Product( name="Déconsigne Eco-cup", code="DECO", product_type=verre, purchase_price="-1", selling_price="-1", special_selling_price="-1", club=main_club, ) dcons.id = 1151 dcons.save() Product( name="Corsendonk", code="CORS", product_type=p, purchase_price="1.50", selling_price="1.7", special_selling_price="1.6", club=main_club, ).save() Product( name="Carolus", code="CARO", product_type=p, purchase_price="1.50", selling_price="1.7", special_selling_price="1.6", club=main_club, ).save() mde = Counter.objects.filter(name="MDE").first() mde.products.add(barb) mde.products.add(cble) mde.products.add(cons) mde.products.add(dcons) mde.sellers.add(skia) mde.save() eboutic = Counter.objects.filter(name="Eboutic").first() eboutic.products.add(barb) eboutic.products.add(cotis) eboutic.products.add(cotis2) eboutic.products.add(refill) eboutic.save() refound_counter = Counter(name="Carte AE", club=refound, type="OFFICE") refound_counter.save() refound_product = Product( name="remboursement", code="REMBOURS", purchase_price="0", selling_price="0", special_selling_price="0", club=refound, ) refound_product.save() # Accounting test values: BankAccount(name="AE TG", club=main_club).save() BankAccount(name="Carte AE", club=main_club).save() ba = BankAccount(name="AE TI", club=main_club) ba.save() ca = ClubAccount(name="Troll Penché", bank_account=ba, club=troll) ca.save() gj = GeneralJournal(name="A16", start_date=date.today(), club_account=ca) gj.save() credit = AccountingType(code="74", label="Subventions d'exploitation", movement_type="CREDIT") credit.save() debit = AccountingType( code="606", label="Achats non stockés de matières et fournitures(*1)", movement_type="DEBIT", ) debit.save() debit2 = AccountingType( code="604", label="Achats d'études et prestations de services(*2)", movement_type="DEBIT", ) debit2.save() buying = AccountingType(code="60", label="Achats (sauf 603)", movement_type="DEBIT") buying.save() comptes = AccountingType(code="6", label="Comptes de charge", movement_type="DEBIT") comptes.save() simple = SimplifiedAccountingType(label="Je fais du simple 6", accounting_type=comptes) simple.save() woenzco = Company(name="Woenzel & co") woenzco.save() operation_list = [ ( 27, "J'avais trop de bière", "CASH", None, buying, "USER", skia.id, "", None, ), ( 4000, "Ceci n'est pas une opération... en fait si mais non", "CHECK", None, debit, "COMPANY", woenzco.id, "", 23, ), ( 22, "C'est de l'argent ?", "CARD", None, credit, "CLUB", troll.id, "", None, ), ( 37, "Je paye CASH", "CASH", None, debit2, "OTHER", None, "tous les étudiants <3", None, ), (300, "Paiement Guy", "CASH", None, buying, "USER", skia.id, "", None), (32.3, "Essence", "CASH", None, buying, "OTHER", None, "station", None), ( 46.42, "Allumette", "CHECK", None, credit, "CLUB", main_club.id, "", 57, ), ( 666.42, "Subvention de far far away", "CASH", None, comptes, "CLUB", main_club.id, "", None, ), ( 496, "Ça, c'est un 6", "CARD", simple, None, "USER", skia.id, "", None, ), ( 17, "La Gargotte du Korrigan", "CASH", None, debit2, "CLUB", bar_club.id, "", None, ), ] for op in operation_list: operation = Operation( journal=gj, date=date.today(), amount=op[0], remark=op[1], mode=op[2], done=True, simpleaccounting_type=op[3], accounting_type=op[4], target_type=op[5], target_id=op[6], target_label=op[7], cheque_number=op[8], ) operation.clean() operation.save() # Adding user sli sli = User( username="******", last_name="Li", first_name="S", email="*****@*****.**", date_of_birth="1942-06-12", ) sli.set_password("plop") sli.save() sli.view_groups = [ Group.objects.filter( name=settings.SITH_MAIN_MEMBERS_GROUP).first().id ] sli.save() sli_profile_path = os.path.join(root_path, "core/fixtures/images/5.jpg") with open(sli_profile_path, "rb") as f: name = str(sli.id) + "_profile.jpg" sli_profile = SithFile( parent=profiles_root, name=name, file=resize_image(Image.open(BytesIO(f.read())), 400, "JPEG"), owner=sli, is_folder=False, mime_type="image/jpeg", size=os.path.getsize(sli_profile_path), ) sli_profile.file.name = name sli_profile.save() sli.profile_pict = sli_profile sli.save() # Adding user Krophil krophil = User( username="******", last_name="Phil'", first_name="Kro", email="*****@*****.**", date_of_birth="1942-06-12", ) krophil.set_password("plop") krophil.save() krophil_profile_path = os.path.join(root_path, "core/fixtures/images/6.jpg") with open(krophil_profile_path, "rb") as f: name = str(krophil.id) + "_profile.jpg" krophil_profile = SithFile( parent=profiles_root, name=name, file=resize_image(Image.open(BytesIO(f.read())), 400, "JPEG"), owner=krophil, is_folder=False, mime_type="image/jpeg", size=os.path.getsize(krophil_profile_path), ) krophil_profile.file.name = name krophil_profile.save() krophil.profile_pict = krophil_profile krophil.save() # Adding user Com Unity comunity = User( username="******", last_name="Unity", first_name="Com", email="*****@*****.**", date_of_birth="1942-06-12", ) comunity.set_password("plop") comunity.save() comunity.groups.set( [Group.objects.filter(name="Communication admin").first().id]) comunity.save() Membership( user=comunity, club=bar_club, start_date=timezone.now(), role=settings.SITH_CLUB_ROLES_ID["Board member"], ).save() # Adding user tutu tutu = User( username="******", last_name="Tu", first_name="Tu", email="*****@*****.**", date_of_birth="1942-06-12", ) tutu.set_password("plop") tutu.save() tutu.groups.set([settings.SITH_GROUP_PEDAGOGY_ADMIN_ID]) tutu.save() # Adding subscription for sli s = Subscription( member=User.objects.filter(pk=sli.pk).first(), subscription_type=list(settings.SITH_SUBSCRIPTIONS.keys())[0], payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() StudentCard(uid="9A89B82018B0A0", customer=sli.customer).save() # Adding subscription for Krophil s = Subscription( member=User.objects.filter(pk=krophil.pk).first(), subscription_type=list(settings.SITH_SUBSCRIPTIONS.keys())[0], payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Com Unity s = Subscription( member=comunity, subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() # Tutu s = Subscription( member=tutu, subscription_type=default_subscription, payment_method=settings.SITH_SUBSCRIPTION_PAYMENT_METHOD[0][0], ) s.subscription_start = s.compute_start() s.subscription_end = s.compute_end( duration=settings.SITH_SUBSCRIPTIONS[s.subscription_type] ["duration"], start=s.subscription_start, ) s.save() Selling( label=dcons.name, product=dcons, counter=mde, unit_price=dcons.selling_price, club=main_club, quantity=settings.SITH_ECOCUP_LIMIT + 3, seller=skia, customer=krophil.customer, ).save() # Add barman to counter c = Counter.objects.get(id=2) c.sellers.add(User.objects.get(pk=krophil.pk)) c.save() # Create an election public_group = Group.objects.get(id=settings.SITH_GROUP_PUBLIC_ID) subscriber_group = Group.objects.get( name=settings.SITH_MAIN_MEMBERS_GROUP) ae_board_group = Group.objects.get( name=settings.SITH_MAIN_BOARD_GROUP) el = Election( title="Élection 2017", description="La roue tourne", start_candidature="1942-06-12 10:28:45+01", end_candidature="2042-06-12 10:28:45+01", start_date="1942-06-12 10:28:45+01", end_date="7942-06-12 10:28:45+01", ) el.save() el.view_groups.add(public_group) el.edit_groups.add(ae_board_group) el.candidature_groups.add(subscriber_group) el.vote_groups.add(subscriber_group) el.save() liste = ElectionList(title="Candidature Libre", election=el) liste.save() listeT = ElectionList(title="Troll", election=el) listeT.save() pres = Role(election=el, title="Président AE", description="Roi de l'AE") pres.save() resp = Role(election=el, title="Co Respo Info", max_choice=2, description="Ghetto++") resp.save() cand = Candidature(role=resp, user=skia, election_list=liste, program="Refesons le site AE") cand.save() cand = Candidature( role=resp, user=sli, election_list=liste, program="Vasy je deviens mon propre adjoint", ) cand.save() cand = Candidature(role=resp, user=krophil, election_list=listeT, program="Le Pôle Troll !") cand.save() cand = Candidature( role=pres, user=sli, election_list=listeT, program="En fait j'aime pas l'info, je voulais faire GMC", ) cand.save() # Forum room = Forum( name="Salon de discussions", description="Pour causer de tout", is_category=True, ) room.save() Forum(name="AE", description="Réservé au bureau AE", parent=room).save() Forum(name="BdF", description="Réservé au bureau BdF", parent=room).save() hall = Forum( name="Hall de discussions", description="Pour toutes les discussions", parent=room, ) hall.save() various = Forum(name="Divers", description="Pour causer de rien", is_category=True) various.save() Forum(name="Promos", description="Réservé aux Promos", parent=various).save() ForumTopic(forum=hall) # News friday = timezone.now() while friday.weekday() != 4: friday += timedelta(hours=6) friday.replace(hour=20, minute=0, second=0) # Event n = News( title="Apero barman", summary="Viens boire un coup avec les barmans", content="Glou glou glou glou glou glou glou", type="EVENT", club=bar_club, author=subscriber, is_moderated=True, moderator=skia, ) n.save() NewsDate( news=n, start_date=timezone.now() + timedelta(hours=70), end_date=timezone.now() + timedelta(hours=72), ).save() n = News( title="Repas barman", summary="Enjoy la fin du semestre!", content="Viens donc t'enjailler avec les autres barmans aux " "frais du BdF! \o/", type="EVENT", club=bar_club, author=subscriber, is_moderated=True, moderator=skia, ) n.save() NewsDate( news=n, start_date=timezone.now() + timedelta(hours=72), end_date=timezone.now() + timedelta(hours=84), ).save() n = News( title="Repas fromager", summary="Wien manger du l'bon fromeug'", content="Fô viendre mangey d'la bonne fondue!", type="EVENT", club=bar_club, author=subscriber, is_moderated=True, moderator=skia, ) n.save() NewsDate( news=n, start_date=timezone.now() + timedelta(hours=96), end_date=timezone.now() + timedelta(hours=100), ).save() n = News( title="SdF", summary="Enjoy la fin des finaux!", content="Viens faire la fête avec tout plein de gens!", type="EVENT", club=bar_club, author=subscriber, is_moderated=True, moderator=skia, ) n.save() NewsDate( news=n, start_date=friday + timedelta(hours=24 * 7 + 1), end_date=timezone.now() + timedelta(hours=24 * 7 + 9), ).save() # Weekly n = News( title="Jeux sans faim", summary="Viens jouer!", content="Rejoins la fine équipe du Troll Penché et viens " "d'amuser le Vendredi soir!", type="WEEKLY", club=troll, author=subscriber, is_moderated=True, moderator=skia, ) n.save() for i in range(10): NewsDate( news=n, start_date=friday + timedelta(hours=24 * 7 * i), end_date=friday + timedelta(hours=24 * 7 * i + 8), ).save() # Create som data for pedagogy UV( code="PA00", author=User.objects.get(id=0), credit_type=settings.SITH_PEDAGOGY_UV_TYPE[3][0], manager="Laurent HEYBERGER", semester=settings.SITH_PEDAGOGY_UV_SEMESTER[3][0], language=settings.SITH_PEDAGOGY_UV_LANGUAGE[0][0], department=settings.SITH_PROFILE_DEPARTMENTS[-2][0], credits=5, title="Participation dans une association étudiante", objectives= "* Permettre aux étudiants de réaliser, pendant un semestre, un projet culturel ou associatif et de le valoriser.", program= """* Semestre précédent proposition d'un projet et d'un cahier des charges * Evaluation par un jury de six membres * Si accord réalisation dans le cadre de l'UV * Compte-rendu de l'expérience * Présentation""", skills= """* Gérer un projet associatif ou une action éducative en autonomie: * en produisant un cahier des charges qui -définit clairement le contexte du projet personnel -pose les jalons de ce projet -estime de manière réaliste les moyens et objectifs du projet -définit exactement les livrables attendus * en étant capable de respecter ce cahier des charges ou, le cas échéant, de réviser le cahier des charges de manière argumentée. * Relater son expérience dans un rapport: * qui permettra à d'autres étudiants de poursuivre les actions engagées * qui montre la capacité à s'auto-évaluer et à adopter une distance critique sur son action.""", key_concepts="""* Autonomie * Responsabilité * Cahier des charges * Gestion de projet""", hours_THE=121, hours_TE=4, ).save()
def blog_import(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog, True) tags = template_tags(blog=blog, user=user) import os, settings import_path = os.path.join(settings.APPLICATION_PATH, "data", "import.json") tags.status = reason if request.method == "POST": from core.models import db tpl = '' with db.atomic() as txn: import json from core.utils import string_to_date import_path = request.forms.getunicode('import_path') with open(import_path, 'r', encoding='utf8') as f: json_data = json.load(f) from core.models import page_status, MediaAssociation, Category from core.error import PageNotChanged from core.libs.peewee import InterfaceError from core.cms import media_filetypes format_str = "<b>{}</b> / (<i>{}</i>)" # TODO: go in chunks of 50 or something? # allow graceful disconnection? for n in json_data: q = [] n_id = n['id'] q.append("Checking {}".format(n_id)) changed = False found = False match = Page.kv_get('legacy_id', n_id) if match.count() > 0: if match[0].object_ref.blog == blog: found = True q.append(match[0].key + "/" + match[0].value + " / Exists: " + format_str.format(n['title'], n_id)) existing_entry = Page.load(match[0].objectid) update = existing_entry.kv_get('update').count() # raise Exception(update) q.append('{} / {}'.format( string_to_date( n['modified_date']).replace(tzinfo=None), existing_entry.modified_date)) if string_to_date(n['modified_date']).replace( tzinfo=None ) <= existing_entry.modified_date and update == 0: q.append('Existing page {} not changed.'.format( existing_entry.id)) else: changed = True q.append( 'Updating data for existing page {}.'.format( existing_entry.id)) existing_entry.title = n['title'] existing_entry.text = n['text'] existing_entry.basename = n['basename'] existing_entry.excerpt = n['excerpt'] existing_entry.created_date = string_to_date( n['created_date']).replace(tzinfo=None) existing_entry.modified_date = string_to_date( n['modified_date']).replace(tzinfo=None) existing_entry.publication_date = string_to_date( n['publication_date']).replace(tzinfo=None) try: existing_entry.save( user, False, False, 'New revision from import') except PageNotChanged: pass except InterfaceError: raise Exception( "Error saving {}. Check the JSON to make sure it's valid." .format(n_id)) for media in existing_entry.media: media.kv_del() existing_entry.clear_categories() existing_entry.clear_kvs() existing_entry.clear_tags() existing_entry.clear_media() entry = existing_entry if found is False: q.append("Creating: " + format_str.format(n['title'], n_id)) changed = True new_entry = Page( title=n['title'], text=n['text'], basename=n['basename'], excerpt=n['excerpt'], user=user, blog=blog, created_date=string_to_date(n['created_date']), publication_date=string_to_date(n['publication_date']), modified_date=string_to_date(n['modified_date']), ) new_entry.modified_date = new_entry.publication_date if n['status'] in ('Publish', 'Published', 'Live'): new_entry.status = page_status.published new_entry.save(user) entry = new_entry q.append("New ID: {}".format(entry.id)) # Everything from here on out is if changed: # Register a legacy ID for the page entry.kv_set("legacy_id", n["id"]) entry.kv_set("legacy_user", n["user_id"]) # Category assignments categories = n['categories'] if categories == []: saved_page_category = PageCategory.create( page=entry, category=blog.default_category, primary=True).save() else: primary = True for category in categories: cat_exists = False category_id = category['id'] existing_category = Category.kv_get( 'legacy_id', category_id) if existing_category.count() > 0: if existing_category[ 0].object_ref.blog == blog: cat_exists = True if cat_exists is False: q.append('Created new category {}/{}'.format( category_id, category['name'])) new_category = Category.create( blog=blog, title=category['name'], parent_category=getattr( category, 'parent', None)) new_category.save() new_category.kv_set('legacy_id', category_id) else: new_category = Category.load( existing_category[0].objectid) q.append( 'Added to existing category {}/{}'.format( new_category.id, category['name'])) saved_page_category = PageCategory.create( page=entry, category=new_category, primary=primary).save() primary = False # Check to make sure a default category exists for the whole blog. # If not, assign one based on the lowest ID. # This can always be reassigned later. # Register tags tags_added, tags_existing, _ = Tag.add_or_create( n['tags'], page=entry) q.append('Tags added: {}'.format(','.join( n.tag for n in tags_added))) q.append('Tags existing: {}'.format(','.join( n.tag for n in tags_existing))) # Register KVs kvs = n['kvs'] for key in kvs: if key != "": value = kvs[key] entry.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) # Register media media = n['media'] for m in media: if 'path' not in m: continue path = os.path.split(m['path']) try: new_media = Media.get(Media.url == m['url']) except: new_media = Media(filename=path[1], path=m['path'], url=m['url'], type=media_filetypes.image, created_date=string_to_date( m['created_date']), modified_date=string_to_date( m['modified_date']), friendly_name=m['friendly_name'], user=user, blog=blog, site=blog.site) # TODO: RBF try: new_media.save() except Exception: continue media_association = MediaAssociation(media=new_media, page=entry) media_association.save() # Save legacy ID to KV on media if 'id' in m: new_media.kv_set('legacy_id', m['id']) q.append('IMG: {}'.format(new_media.url)) # add tags for media q.append('Tags: {}'.format(m['tags'])) new_tags = Tag.add_or_create(m['tags'], media=new_media) kvs = m['kvs'] for key in kvs: value = kvs[key] new_media.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) fileinfo.build_pages_fileinfos((entry, )) fileinfo.build_archives_fileinfos((entry, )) tpl += ('<p>'.join(q)) + '<hr/>' return tpl # TODO: # Import or create categories as needed # Categories in export will need to have parent-child data # categories should have legacy identifiers where possible too # Import image files, assign those legacy KV identifiers # Modify URLs for imported images in posts # Make importing of image assets optional else: tpl = template( 'ui/ui_blog_import', menu=generate_menu('blog_import', blog), # search_context=(search_context['blog'], blog), import_path=import_path, **tags.__dict__) return tpl
def blog_import (blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog, True) tags = template_tags(blog=blog, user=user) import os, settings import_path = os.path.join( settings.APPLICATION_PATH, "data", "import.json") tags.status = reason if request.method == "POST": from core.models import db tpl = '' with db.atomic() as txn: import json from core.utils import string_to_date import_path = request.forms.getunicode('import_path') with open(import_path, 'r', encoding='utf8') as f: json_data = json.load(f) from core.models import page_status, MediaAssociation, Category from core.error import PageNotChanged from core.libs.peewee import InterfaceError from core.cms import media_filetypes format_str = "<b>{}</b> / (<i>{}</i>)" # TODO: go in chunks of 50 or something? # allow graceful disconnection? for n in json_data: q = [] n_id = n['id'] q.append("Checking {}".format(n_id)) changed = False found = False match = Page.kv_get('legacy_id', n_id) if match.count() > 0: if match[0].object_ref.blog == blog: found = True q.append(match[0].key + "/" + match[0].value + " / Exists: " + format_str.format(n['title'], n_id)) existing_entry = Page.load(match[0].objectid) update = existing_entry.kv_get('update').count() # raise Exception(update) q.append('{} / {}'.format(string_to_date(n['modified_date']).replace(tzinfo=None), existing_entry.modified_date )) if string_to_date(n['modified_date']).replace(tzinfo=None) <= existing_entry.modified_date and update == 0: q.append('Existing page {} not changed.'.format(existing_entry.id)) else: changed = True q.append('Updating data for existing page {}.'.format(existing_entry.id)) existing_entry.title = n['title'] existing_entry.text = n['text'] existing_entry.basename = n['basename'] existing_entry.excerpt = n['excerpt'] existing_entry.created_date = string_to_date(n['created_date']).replace(tzinfo=None) existing_entry.modified_date = string_to_date(n['modified_date']).replace(tzinfo=None) existing_entry.publication_date = string_to_date(n['publication_date']).replace(tzinfo=None) try: existing_entry.save(user, False, False, 'New revision from import') except PageNotChanged: pass except InterfaceError: raise Exception("Error saving {}. Check the JSON to make sure it's valid.".format(n_id)) for media in existing_entry.media: media.kv_del() existing_entry.clear_categories() existing_entry.clear_kvs() existing_entry.clear_tags() existing_entry.clear_media() entry = existing_entry if found is False: q.append("Creating: " + format_str.format(n['title'], n_id)) changed = True new_entry = Page( title=n['title'], text=n['text'], basename=n['basename'], excerpt=n['excerpt'], user=user, blog=blog, created_date=string_to_date(n['created_date']), publication_date=string_to_date(n['publication_date']), modified_date=string_to_date(n['modified_date']), ) new_entry.modified_date = new_entry.publication_date if n['status'] in ('Publish', 'Published', 'Live'): new_entry.status = page_status.published new_entry.save(user) entry = new_entry q.append("New ID: {}".format(entry.id)) # Everything from here on out is if changed: # Register a legacy ID for the page entry.kv_set("legacy_id", n["id"]) entry.kv_set("legacy_user", n["user_id"]) # Category assignments categories = n['categories'] if categories == []: saved_page_category = PageCategory.create( page=entry, category=blog.default_category, primary=True).save() else: primary = True for category in categories: cat_exists = False category_id = category['id'] existing_category = Category.kv_get('legacy_id', category_id) if existing_category.count() > 0: if existing_category[0].object_ref.blog == blog: cat_exists = True if cat_exists is False: q.append('Created new category {}/{}'.format( category_id, category['name'] )) new_category = Category.create( blog=blog, title=category['name'], parent_category=getattr(category, 'parent', None) ) new_category.save() new_category.kv_set('legacy_id', category_id ) else: new_category = Category.load(existing_category[0].objectid) q.append('Added to existing category {}/{}'.format( new_category.id, category['name'] )) saved_page_category = PageCategory.create( page=entry, category=new_category, primary=primary ).save() primary = False # Check to make sure a default category exists for the whole blog. # If not, assign one based on the lowest ID. # This can always be reassigned later. # Register tags tags_added, tags_existing, _ = Tag.add_or_create( n['tags'], page=entry) q.append('Tags added: {}'.format(','.join(n.tag for n in tags_added))) q.append('Tags existing: {}'.format(','.join(n.tag for n in tags_existing))) # Register KVs kvs = n['kvs'] for key in kvs: if key != "": value = kvs[key] entry.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) # Register media media = n['media'] for m in media: if 'path' not in m: continue path = os.path.split(m['path']) try: new_media = Media.get(Media.url == m['url']) except: new_media = Media( filename=path[1], path=m['path'], url=m['url'], type=media_filetypes.image, created_date=string_to_date(m['created_date']), modified_date=string_to_date(m['modified_date']), friendly_name=m['friendly_name'], user=user, blog=blog, site=blog.site ) # TODO: RBF try: new_media.save() except Exception: continue media_association = MediaAssociation( media=new_media, page=entry) media_association.save() # Save legacy ID to KV on media if 'id' in m: new_media.kv_set('legacy_id', m['id']) q.append('IMG: {}'.format(new_media.url)) # add tags for media q.append('Tags: {}'.format(m['tags'])) new_tags = Tag.add_or_create(m['tags'], media=new_media) kvs = m['kvs'] for key in kvs: value = kvs[key] new_media.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) fileinfo.build_pages_fileinfos((entry,)) fileinfo.build_archives_fileinfos((entry,)) tpl += ('<p>'.join(q)) + '<hr/>' return tpl # TODO: # Import or create categories as needed # Categories in export will need to have parent-child data # categories should have legacy identifiers where possible too # Import image files, assign those legacy KV identifiers # Modify URLs for imported images in posts # Make importing of image assets optional else: tpl = template('ui/ui_blog_import', menu=generate_menu('blog_import', blog), # search_context=(search_context['blog'], blog), import_path=import_path, **tags.__dict__) return tpl