def register_media(filename, path, user, **ka): media = Media( filename=filename, path=path, type=media_filetypes.types[os.path.splitext(filename)[1][1:]], user=user, friendly_name=ka.get('friendly_name', filename)) media.save() if 'page' in ka: page = ka['page'] association = MediaAssociation(media=media, page=page, blog=page.blog, site=page.blog.site) association.save() media.blog = page.blog media.site = page.blog.site media.url = page.blog.url + page.blog.media_path + "/" + media.filename media.save() return media
def item_pre_registration(request): image = None if request.method=='POST': try: uid = request.POST.get('uniqueid') category = request.POST.get('category') description = request.POST.get('description') tags = request.POST.get('tags') title = tags media = request.POST.get('media1') new_item = PreRegisteredItem() new_item.unique_id = uid new_item.title = title new_item.tags = tags new_item.description = description new_item.category = category new_item.owner = request.user new_item.save() photo = Media() photo.of_item = new_item photo.media_type = "PHOTO" save_base64image_to_media(photo, media) photo.save() image = photo.data return HttpResponse(json.dumps({'result': 'OK', 'image':image.url}), content_type="application/json") except Exception as e: traceback.print_exc() return HttpResponse(json.dumps({'result': 'ERROR'}), content_type="application/json")
def item_registration(request): if request.method == 'POST': try: uid = request.POST.get('uniqueid') title = request.POST.get('title') category = request.POST.get('category') description = request.POST.get('description') tags = request.POST.get('tags') location = request.POST.get('location') photos = json.loads(request.POST['media']) new_item = Item() new_item.title = title new_item.unique_id = uid new_item.tags = tags new_item.description = description new_item.location = request.POST.get('location') new_item.category = category new_item.date_field = datetime.datetime.now().strftime("%Y-%m-%d") new_item.time_field = datetime.datetime.now().strftime("%H:%M:%S") new_item.found_by_user = request.user new_item.save() for media in photos: photo = Media() photo.of_item = new_item photo.media_type = "PHOTO" save_base64image_to_media(photo, media) photo.save() call_command('update_index') return HttpResponse(json.dumps({ 'result': 'OK', 'pk': new_item.pk }), content_type="application/json") except Exception as e: traceback.print_exc() return HttpResponse(json.dumps({'result': 'ERROR'}), content_type="application/json") context = RequestContext(request, { 'request': request, 'user': request.user }) return render_to_response('public/registerfounditem.html', context_instance=context)
def item_registration(request): if request.method=='POST': try: uid = request.POST.get('uniqueid') title = request.POST.get('title') category = request.POST.get('category') description = request.POST.get('description') tags = request.POST.get('tags') location = request.POST.get('location') photos = json.loads(request.POST['media']) new_item = Item() new_item.title = title new_item.unique_id = uid new_item.tags = tags new_item.description = description new_item.location = request.POST.get('location') new_item.category = category new_item.date_field = datetime.datetime.now().strftime("%Y-%m-%d") new_item.time_field = datetime.datetime.now().strftime("%H:%M:%S") new_item.found_by_user = request.user new_item.save() for media in photos: photo = Media() photo.of_item = new_item photo.media_type = "PHOTO" save_base64image_to_media(photo, media) photo.save() call_command('update_index') return HttpResponse(json.dumps({'result': 'OK', 'pk':new_item.pk}), content_type="application/json") except Exception as e: traceback.print_exc() return HttpResponse(json.dumps({'result': 'ERROR'}), content_type="application/json") context = RequestContext(request, {'request': request, 'user': request.user }) return render_to_response('public/registerfounditem.html', context_instance=context)
def item_pre_registration(request): image = None if request.method == 'POST': try: uid = request.POST.get('uniqueid') category = request.POST.get('category') description = request.POST.get('description') tags = request.POST.get('tags') title = tags media = request.POST.get('media1') new_item = PreRegisteredItem() new_item.unique_id = uid new_item.title = title new_item.tags = tags new_item.description = description new_item.category = category new_item.owner = request.user new_item.save() photo = Media() photo.of_item = new_item photo.media_type = "PHOTO" save_base64image_to_media(photo, media) photo.save() image = photo.data return HttpResponse(json.dumps({ 'result': 'OK', 'image': image.url }), content_type="application/json") except Exception as e: traceback.print_exc() return HttpResponse(json.dumps({'result': 'ERROR'}), content_type="application/json")
def _create_items(self): call_command('migrate', 'auth') sys.stdout.write("\n==========Auth App Migrated===========\n") call_command('migrate') sys.stdout.write("\n==========Other Apps Migrated===========\n") call_command('syncdb', interactive=True) Item.objects.all().delete() CustomUser.objects.all().delete() CustomUser.objects.create_superuser(username='******', password='******', email='*****@*****.**') user1 = CustomUser() user1.username = "******" user1.first_name = "Nikolaus" user1.last_name = "Mickaelson" user1.email = "*****@*****.**" user1.prefered_way_of_contact = "IBF" user1.phone_number = "12345673" user1.set_password('nick') user1.save() user2 = CustomUser() user2.username = "******" user2.first_name = "Mark" user2.last_name = "Johnson" user2.email = "*****@*****.**" user2.prefered_way_of_contact = "PHONE" user2.phone_number = "122456141" user2.set_password("mark") user2.save() pitem = PreRegisteredItem() pitem.unique_id = '' pitem.title = "Green Adidas Bag " pitem.tags = "Bag" pitem.description = 'Green Bag lost near Southampton' pitem.location = "Southampton" pitem.category = "Bag" pitem.owner = CustomUser.objects.filter(username='******')[0] pitem.lost = True pitem.save() photo = Media() photo.of_item = pitem photo.media_type = "PHOTO" save_url_to_image(photo, 'http://www.fashionvortex.com/image/cache/data/Medici/MF-2475-Gr-a-600x600.jpg') photo.save() tphone = Item() tphone.unique_id = '123456789' tphone.title = "Black Samsung Galaxy S6 34GB" tphone.tags = "Black Samsung Galaxy S6 34GB" tphone.description = 'Black Samsung Galaxy S6 found in Stile' tphone.location = "Southampton" tphone.category = "Electronics" tphone.date_field = "2015-09-15" tphone.time_field = "14:33::22" tphone.found_by_user = user1 tphone.save() tbag = Item() tbag.description = 'Green bag found on the poll edge at "Summer Time"' tbag.title = "Bag Green" tbag.tags = "Bag Green" tbag.location = "london" tbag.category = "Bag" tbag.date_field = "2016-09-09" tbag.time_field = "10:33::22" tbag.found_by_user = user1 tbag.save() photo = Media() photo.of_item = tbag photo.media_type = "PHOTO" save_url_to_image(photo, 'http://www.suggestcamera.com/wp-content/uploads/2015/08/81K-jtyW82L._SL1500_.jpg') photo.save() tbag = Item() tbag.description = 'Green bag found on the poll edge at "Summer Time"' tbag.title = "Big Bag" tbag.tags = "Bag" tbag.location = "london" tbag.category = "Bag" tbag.date_field = "2016-09-09" tbag.time_field = "10:33::22" tbag.found_by_user = user1 tbag.save() photo = Media() photo.of_item = tbag photo.media_type = "PHOTO" save_url_to_image(photo, 'http://i.dailymail.co.uk/i/pix/2013/11/13/article-2505060-0B22502B000005DC-342_634x422.jpg') photo.save() tLeptop = Item() tLeptop.unique_id = '098765432' tLeptop.description = '15 inch Dell found in Winchester"' tLeptop.title = "Dell Leptop Inspiron" tLeptop.tags = "Leptop Dell Black" tLeptop.location = "london" tLeptop.category = "Electronics" tLeptop.date_field = "2015-09-18" tLeptop.time_field = "10:33::22" tLeptop.found_by_user = user1 tLeptop.save() tLaptop = Item() tLaptop.unique_id = '123456788' tLaptop.description = 'Apple MacBook 15" found at Hartley Library' tLaptop.title = "Apple MacBook" tLaptop.tags = "Apple MacBook" tLaptop.location = "Southampton" tLaptop.category = "Electronics" tLaptop.date_field = "2015-11-16" tLaptop.time_field = "22:35::22" tLaptop.found_by_user = user1 tLaptop.save() photo = Media() photo.of_item = tLaptop photo.media_type = "PHOTO" save_url_to_image(photo, 'http://static.trustedreviews.com/94/9736c1/5242/15168-crw398s.jpg') photo.save() tIDCard = Item() tIDCard.unique_id = '123459876' tIDCard.description = 'Passport found outside Sprinkles' tIDCard.title = "UK EU e-passport" tIDCard.tags = "Passport UK EU e-passport" tIDCard.location = "Southampton" tIDCard.category = "ID/Cards" tIDCard.date_field = "2015-07-23" tIDCard.time_field = "12:07::22" tIDCard.found_by_user = user1 tIDCard.save() photo = Media() photo.of_item = tIDCard photo.media_type = "PHOTO" save_url_to_image(photo, 'http://i.telegraph.co.uk/multimedia/archive/01595/mp-passport-pa_1595880b.jpg') photo.save() tBook = Item() tBook.unique_id = '121212123' tBook.description = 'Dan Brown The Lost Symbol paperback edition' tBook.title = "The Lost Symbol Paperback" tBook.tags = "Dan Brown The Lost Symbol Paperback " tBook.location = "Bournemouth" tBook.category = "Books" tBook.date_field = "2015-09-30" tBook.time_field = "17:53:28" tBook.found_by_user = user2 tBook.save() photo = Media() photo.of_item = tBook photo.media_type = "PHOTO" save_url_to_image(photo, 'http://thumbs1.ebaystatic.com/d/l225/m/mIuB9Oannj3xR0YhYCIiEZg.jpg') photo.save() tScarf = Item() tScarf.unique_id = '666777888' tScarf.description = 'Grey Scarf with Dark Grey Stripes' tScarf.title = "Scarf" tScarf.tags = "Scarf Grey Dark Grey Stripes " tScarf.location = "Surrey" tScarf.category = "Clothes" tScarf.date_field = "2015-10-28" tScarf.time_field = "13:53:28" tScarf.found_by_user = user2 tScarf.save() photo = Media() photo.of_item = tScarf photo.media_type = "PHOTO" save_url_to_image(photo, 'http://assets3.howtospendit.ft-static.com/images/52/46/d7/5246d742-1619-46b4-83c8-9e9d726203da_three_eighty.png') photo.save() tNecklace = Item() tNecklace.unique_id = '898998989' tNecklace.title = 'Black Leather necklace' tNecklace.tags = 'Black Leather necklace' tNecklace.description = "leather necklace black men unisex" tNecklace.location = "Glasgow" tNecklace.category = "Accessories" tNecklace.date_field = "2015-11-28" tNecklace.time_field = "13:27:28" tNecklace.found_by_user = user2 tNecklace.save() photo = Media() photo.of_item = tNecklace photo.media_type = "PHOTO" save_url_to_image(photo, 'http://cdn.notonthehighstreet.com/system/product_images/images/001/615/301/original_mens-leather-necklace.jpg') photo.save() tHobbit = Item() tHobbit.unique_id = '454647489' tHobbit.title = 'J R R Tolkien -' tHobbit.tags = 'J R R Tolkien - The Hobbit Hard Cover' tHobbit.description = "tolkien hobbit the hobbit hardcover" tHobbit.location = "Eastleigh" tHobbit.category = "Books" tHobbit.date_field = "2015-10-30" tHobbit.time_field = "10:41:28" tHobbit.found_by_user = user2 tHobbit.save() photo = Media() photo.of_item = tHobbit photo.media_type = "PHOTO" save_url_to_image(photo, 'https://i.ytimg.com/vi/X75pnPtqhvE/maxresdefault.jpg') photo.save() tPlayer = Item() tPlayer.unique_id = '145897123' tPlayer.title = 'Sony Walkman MP4 Player Black' tPlayer.tags = 'Sony Walkman MP4 Player Black' tPlayer.description = "sony walkman mp4 player mp3 black " tPlayer.location = "London" tPlayer.category = "Electronics" tPlayer.date_field = "2015-10-30" tPlayer.time_field = "10:41:28" tPlayer.found_by_user = user2 tPlayer.save() photo = Media() photo.of_item = tPlayer photo.media_type = "PHOTO" save_url_to_image(photo, 'https://i.ytimg.com/vi/PI_nQ3MSSHI/maxresdefault.jpg') photo.save() tDog = Item() tDog.unique_id = '321654987' tDog.title = 'Chihuahua' tDog.tags = 'Lost Chihuahua found on Portswood Road' tDog.description = "chihuahua dog portswood southampton lost " tDog.location = "Southampton" tDog.category = "Animal" tDog.date_field = "2015-11-17" tDog.time_field = "22:41:28" tDog.found_by_user = user2 tDog.save() photo = Media() photo.of_item = tDog photo.media_type = "PHOTO" save_url_to_image(photo, 'https://canophilia.files.wordpress.com/2014/04/chihuahua_4.jpg') photo.save() tHobbit = Item() tHobbit.unique_id = '125678991' tHobbit.title = 'Adele - Rolling in the Deep' tHobbit.tags = 'Adele - Rolling in the Deep CD Album' tHobbit.description = "adele rolling in the deep cd album" tHobbit.location = "Manchester" tHobbit.category = "Other" tHobbit.date_field = "2015-09-27" tHobbit.time_field = "13:44:28" tHobbit.found_by_user = user2 tHobbit.save() photo = Media() photo.of_item = tHobbit photo.media_type = "PHOTO" save_url_to_image(photo, 'http://thumbs2.ebaystatic.com/d/l225/m/mQTzqU9kSL8uIcBHIkfwOqA.jpg') photo.save() tMug = Item() tMug.unique_id = '123654897' tMug.description = 'Found this mug at the Solent Library, 2nd Level' tMug.title = "Mug" tMug.tags = "mug white solent southampton" tMug.location = "Southampton" tMug.category = "Other" tMug.date_field = "2015-10-06" tMug.time_field = "09:13:28" tMug.found_by_user = user2 tMug.save() photo = Media() photo.of_item = tMug photo.media_type = "PHOTO" save_url_to_image(photo, 'https://s-media-cache-ak0.pinimg.com/736x/7c/01/a9/7c01a9440c8e8afde4b11ab4acbfcd3d.jpg') photo.save() sys.stdout.write("\n==========Database Re-populated===========\n") call_command('rebuild_index')
def post(self, request): media = Media(file=request.data['file']) media.save() data = Serializer(media, model=Media).data return Response(data, status=status.HTTP_200_OK)
def blog_import(blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog, True) tags = template_tags(blog=blog, user=user) import os, settings import_path = os.path.join(settings.APPLICATION_PATH, "data", "import.json") tags.status = reason if request.method == "POST": from core.models import db tpl = '' with db.atomic() as txn: import json from core.utils import string_to_date import_path = request.forms.getunicode('import_path') with open(import_path, 'r', encoding='utf8') as f: json_data = json.load(f) from core.models import page_status, MediaAssociation, Category from core.error import PageNotChanged from core.libs.peewee import InterfaceError from core.cms import media_filetypes format_str = "<b>{}</b> / (<i>{}</i>)" # TODO: go in chunks of 50 or something? # allow graceful disconnection? for n in json_data: q = [] n_id = n['id'] q.append("Checking {}".format(n_id)) changed = False found = False match = Page.kv_get('legacy_id', n_id) if match.count() > 0: if match[0].object_ref.blog == blog: found = True q.append(match[0].key + "/" + match[0].value + " / Exists: " + format_str.format(n['title'], n_id)) existing_entry = Page.load(match[0].objectid) update = existing_entry.kv_get('update').count() # raise Exception(update) q.append('{} / {}'.format( string_to_date( n['modified_date']).replace(tzinfo=None), existing_entry.modified_date)) if string_to_date(n['modified_date']).replace( tzinfo=None ) <= existing_entry.modified_date and update == 0: q.append('Existing page {} not changed.'.format( existing_entry.id)) else: changed = True q.append( 'Updating data for existing page {}.'.format( existing_entry.id)) existing_entry.title = n['title'] existing_entry.text = n['text'] existing_entry.basename = n['basename'] existing_entry.excerpt = n['excerpt'] existing_entry.created_date = string_to_date( n['created_date']).replace(tzinfo=None) existing_entry.modified_date = string_to_date( n['modified_date']).replace(tzinfo=None) existing_entry.publication_date = string_to_date( n['publication_date']).replace(tzinfo=None) try: existing_entry.save( user, False, False, 'New revision from import') except PageNotChanged: pass except InterfaceError: raise Exception( "Error saving {}. Check the JSON to make sure it's valid." .format(n_id)) for media in existing_entry.media: media.kv_del() existing_entry.clear_categories() existing_entry.clear_kvs() existing_entry.clear_tags() existing_entry.clear_media() entry = existing_entry if found is False: q.append("Creating: " + format_str.format(n['title'], n_id)) changed = True new_entry = Page( title=n['title'], text=n['text'], basename=n['basename'], excerpt=n['excerpt'], user=user, blog=blog, created_date=string_to_date(n['created_date']), publication_date=string_to_date(n['publication_date']), modified_date=string_to_date(n['modified_date']), ) new_entry.modified_date = new_entry.publication_date if n['status'] in ('Publish', 'Published', 'Live'): new_entry.status = page_status.published new_entry.save(user) entry = new_entry q.append("New ID: {}".format(entry.id)) # Everything from here on out is if changed: # Register a legacy ID for the page entry.kv_set("legacy_id", n["id"]) entry.kv_set("legacy_user", n["user_id"]) # Category assignments categories = n['categories'] if categories == []: saved_page_category = PageCategory.create( page=entry, category=blog.default_category, primary=True).save() else: primary = True for category in categories: cat_exists = False category_id = category['id'] existing_category = Category.kv_get( 'legacy_id', category_id) if existing_category.count() > 0: if existing_category[ 0].object_ref.blog == blog: cat_exists = True if cat_exists is False: q.append('Created new category {}/{}'.format( category_id, category['name'])) new_category = Category.create( blog=blog, title=category['name'], parent_category=getattr( category, 'parent', None)) new_category.save() new_category.kv_set('legacy_id', category_id) else: new_category = Category.load( existing_category[0].objectid) q.append( 'Added to existing category {}/{}'.format( new_category.id, category['name'])) saved_page_category = PageCategory.create( page=entry, category=new_category, primary=primary).save() primary = False # Check to make sure a default category exists for the whole blog. # If not, assign one based on the lowest ID. # This can always be reassigned later. # Register tags tags_added, tags_existing, _ = Tag.add_or_create( n['tags'], page=entry) q.append('Tags added: {}'.format(','.join( n.tag for n in tags_added))) q.append('Tags existing: {}'.format(','.join( n.tag for n in tags_existing))) # Register KVs kvs = n['kvs'] for key in kvs: if key != "": value = kvs[key] entry.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) # Register media media = n['media'] for m in media: if 'path' not in m: continue path = os.path.split(m['path']) try: new_media = Media.get(Media.url == m['url']) except: new_media = Media(filename=path[1], path=m['path'], url=m['url'], type=media_filetypes.image, created_date=string_to_date( m['created_date']), modified_date=string_to_date( m['modified_date']), friendly_name=m['friendly_name'], user=user, blog=blog, site=blog.site) # TODO: RBF try: new_media.save() except Exception: continue media_association = MediaAssociation(media=new_media, page=entry) media_association.save() # Save legacy ID to KV on media if 'id' in m: new_media.kv_set('legacy_id', m['id']) q.append('IMG: {}'.format(new_media.url)) # add tags for media q.append('Tags: {}'.format(m['tags'])) new_tags = Tag.add_or_create(m['tags'], media=new_media) kvs = m['kvs'] for key in kvs: value = kvs[key] new_media.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) fileinfo.build_pages_fileinfos((entry, )) fileinfo.build_archives_fileinfos((entry, )) tpl += ('<p>'.join(q)) + '<hr/>' return tpl # TODO: # Import or create categories as needed # Categories in export will need to have parent-child data # categories should have legacy identifiers where possible too # Import image files, assign those legacy KV identifiers # Modify URLs for imported images in posts # Make importing of image assets optional else: tpl = template( 'ui/ui_blog_import', menu=generate_menu('blog_import', blog), # search_context=(search_context['blog'], blog), import_path=import_path, **tags.__dict__) return tpl
def blog_import (blog_id): user = auth.is_logged_in(request) blog = Blog.load(blog_id) permission = auth.is_blog_publisher(user, blog) reason = auth.check_template_lock(blog, True) tags = template_tags(blog=blog, user=user) import os, settings import_path = os.path.join( settings.APPLICATION_PATH, "data", "import.json") tags.status = reason if request.method == "POST": from core.models import db tpl = '' with db.atomic() as txn: import json from core.utils import string_to_date import_path = request.forms.getunicode('import_path') with open(import_path, 'r', encoding='utf8') as f: json_data = json.load(f) from core.models import page_status, MediaAssociation, Category from core.error import PageNotChanged from core.libs.peewee import InterfaceError from core.cms import media_filetypes format_str = "<b>{}</b> / (<i>{}</i>)" # TODO: go in chunks of 50 or something? # allow graceful disconnection? for n in json_data: q = [] n_id = n['id'] q.append("Checking {}".format(n_id)) changed = False found = False match = Page.kv_get('legacy_id', n_id) if match.count() > 0: if match[0].object_ref.blog == blog: found = True q.append(match[0].key + "/" + match[0].value + " / Exists: " + format_str.format(n['title'], n_id)) existing_entry = Page.load(match[0].objectid) update = existing_entry.kv_get('update').count() # raise Exception(update) q.append('{} / {}'.format(string_to_date(n['modified_date']).replace(tzinfo=None), existing_entry.modified_date )) if string_to_date(n['modified_date']).replace(tzinfo=None) <= existing_entry.modified_date and update == 0: q.append('Existing page {} not changed.'.format(existing_entry.id)) else: changed = True q.append('Updating data for existing page {}.'.format(existing_entry.id)) existing_entry.title = n['title'] existing_entry.text = n['text'] existing_entry.basename = n['basename'] existing_entry.excerpt = n['excerpt'] existing_entry.created_date = string_to_date(n['created_date']).replace(tzinfo=None) existing_entry.modified_date = string_to_date(n['modified_date']).replace(tzinfo=None) existing_entry.publication_date = string_to_date(n['publication_date']).replace(tzinfo=None) try: existing_entry.save(user, False, False, 'New revision from import') except PageNotChanged: pass except InterfaceError: raise Exception("Error saving {}. Check the JSON to make sure it's valid.".format(n_id)) for media in existing_entry.media: media.kv_del() existing_entry.clear_categories() existing_entry.clear_kvs() existing_entry.clear_tags() existing_entry.clear_media() entry = existing_entry if found is False: q.append("Creating: " + format_str.format(n['title'], n_id)) changed = True new_entry = Page( title=n['title'], text=n['text'], basename=n['basename'], excerpt=n['excerpt'], user=user, blog=blog, created_date=string_to_date(n['created_date']), publication_date=string_to_date(n['publication_date']), modified_date=string_to_date(n['modified_date']), ) new_entry.modified_date = new_entry.publication_date if n['status'] in ('Publish', 'Published', 'Live'): new_entry.status = page_status.published new_entry.save(user) entry = new_entry q.append("New ID: {}".format(entry.id)) # Everything from here on out is if changed: # Register a legacy ID for the page entry.kv_set("legacy_id", n["id"]) entry.kv_set("legacy_user", n["user_id"]) # Category assignments categories = n['categories'] if categories == []: saved_page_category = PageCategory.create( page=entry, category=blog.default_category, primary=True).save() else: primary = True for category in categories: cat_exists = False category_id = category['id'] existing_category = Category.kv_get('legacy_id', category_id) if existing_category.count() > 0: if existing_category[0].object_ref.blog == blog: cat_exists = True if cat_exists is False: q.append('Created new category {}/{}'.format( category_id, category['name'] )) new_category = Category.create( blog=blog, title=category['name'], parent_category=getattr(category, 'parent', None) ) new_category.save() new_category.kv_set('legacy_id', category_id ) else: new_category = Category.load(existing_category[0].objectid) q.append('Added to existing category {}/{}'.format( new_category.id, category['name'] )) saved_page_category = PageCategory.create( page=entry, category=new_category, primary=primary ).save() primary = False # Check to make sure a default category exists for the whole blog. # If not, assign one based on the lowest ID. # This can always be reassigned later. # Register tags tags_added, tags_existing, _ = Tag.add_or_create( n['tags'], page=entry) q.append('Tags added: {}'.format(','.join(n.tag for n in tags_added))) q.append('Tags existing: {}'.format(','.join(n.tag for n in tags_existing))) # Register KVs kvs = n['kvs'] for key in kvs: if key != "": value = kvs[key] entry.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) # Register media media = n['media'] for m in media: if 'path' not in m: continue path = os.path.split(m['path']) try: new_media = Media.get(Media.url == m['url']) except: new_media = Media( filename=path[1], path=m['path'], url=m['url'], type=media_filetypes.image, created_date=string_to_date(m['created_date']), modified_date=string_to_date(m['modified_date']), friendly_name=m['friendly_name'], user=user, blog=blog, site=blog.site ) # TODO: RBF try: new_media.save() except Exception: continue media_association = MediaAssociation( media=new_media, page=entry) media_association.save() # Save legacy ID to KV on media if 'id' in m: new_media.kv_set('legacy_id', m['id']) q.append('IMG: {}'.format(new_media.url)) # add tags for media q.append('Tags: {}'.format(m['tags'])) new_tags = Tag.add_or_create(m['tags'], media=new_media) kvs = m['kvs'] for key in kvs: value = kvs[key] new_media.kv_set(key, value) q.append('KV: {}:{}'.format(key, value)) fileinfo.build_pages_fileinfos((entry,)) fileinfo.build_archives_fileinfos((entry,)) tpl += ('<p>'.join(q)) + '<hr/>' return tpl # TODO: # Import or create categories as needed # Categories in export will need to have parent-child data # categories should have legacy identifiers where possible too # Import image files, assign those legacy KV identifiers # Modify URLs for imported images in posts # Make importing of image assets optional else: tpl = template('ui/ui_blog_import', menu=generate_menu('blog_import', blog), # search_context=(search_context['blog'], blog), import_path=import_path, **tags.__dict__) return tpl