def setObject(d,o) : try : k = NID.objects.get(id = o) t = k.ref._meta.module_name u = 0 r = 0 y = [] h = [] if (t == 'objecttype' or t =='metatype') : p = Objecttype.objects.get(id = o) h = p.get_members u = len(h) for i in h : if(str(i.title) == d['title']) : return "Object",d['title'],"already exists for",p.title else : r = r + 1 if r == u : y = Gbobject(title = d['title'],slug = d['slug']) y.save() y.objecttypes.add(p) return y.id else : return "Not of type Objecttype or metatype" except NID.DoesNotExist : return "Node does not exist"
def set_object(d, o): """ Given a objecttypeid and a dictionary of title,slug,it creates an Object for that objecttypeid => metaWeblog.setAttributetype(d['title' = '',slug = ''],objecttypeid)""" try: k = NID.objects.get(id=o) t = k.ref._meta.module_name u = 0 r = 0 y = [] h = [] if (t == 'objecttype' or t == 'metatype'): p = Objecttype.objects.get(id=o) h = p.get_members u = len(h) for i in h: if (str(i.title) == d['title']): return "Object", d['title'], "already exists for", p.title else: r = r + 1 if r == u: y = Gbobject(title=d['title'], slug=d['slug']) y.save() y.objecttypes.add(p) return y.id else: return "Not of type Objecttype or metatype" except NID.DoesNotExist: return "Node does not exist"
def set_object(d,o) : """ Given a objecttypeid and a dictionary of title,slug,it creates an Object for that objecttypeid => metaWeblog.setAttributetype(d['title' = '',slug = ''],objecttypeid)""" try : k = NID.objects.get(id = o) t = k.ref._meta.module_name u = 0 r = 0 y = [] h = [] if (t == 'objecttype' or t =='metatype') : p = Objecttype.objects.get(id = o) h = p.get_members u = len(h) for i in h : if(str(i.title) == d['title']) : return "Object",d['title'],"already exists for",p.title else : r = r + 1 if r == u : y = Gbobject(title = d['title'],slug = d['slug']) y.save() y.objecttypes.add(p) return y.id else : return "Not of type Objecttype or metatype" except NID.DoesNotExist : return "Node does not exist"
def import_gbobjects(self, feed_gbobjects): """Import gbobjects""" for feed_gbobject in feed_gbobjects: self.write_out('> %s... ' % feed_gbobject.title) creation_date = datetime(*feed_gbobject.date_parsed[:6]) slug = slugify(feed_gbobject.title)[:255] if Gbobject.objects.filter( creation_date__year=creation_date.year, creation_date__month=creation_date.month, creation_date__day=creation_date.day, slug=slug): self.write_out( self.style.NOTICE('SKIPPED (already imported)\n')) continue objecttypes = self.import_objecttypes(feed_gbobject) gbobject_dict = { 'title': feed_gbobject.title[:255], 'content': feed_gbobject.description, 'excerpt': feed_gbobject.get('summary'), 'status': PUBLISHED, 'creation_date': creation_date, 'start_publication': creation_date, 'last_update': datetime.now(), 'slug': slug } if not gbobject_dict['excerpt'] and self.auto_excerpt: gbobject_dict['excerpt'] = truncate_words( strip_tags(feed_gbobject.description), 50) if self.Objecttype_tag: gbobject_dict['tags'] = self.import_tags(objecttypes) gbobject = Gbobject(**gbobject_dict) gbobject.save() gbobject.objecttypes.add(*objecttypes) gbobject.sites.add(self.SITE) if self.default_author: gbobject.authors.add(self.default_author) elif feed_gbobject.get('author_detail'): try: user = User.objects.create_user( slugify(feed_gbobject.author_detail.get('name')), feed_gbobject.author_detail.get('email', '')) except IntegrityError: user = User.objects.get(username=slugify( feed_gbobject.author_detail.get('name'))) gbobject.authors.add(user) self.write_out(self.style.ITEM('OK\n'))
def import_gbobjects(self, feed_gbobjects): """Import gbobjects""" for feed_gbobject in feed_gbobjects: self.write_out('> %s... ' % feed_gbobject.title) creation_date = datetime(*feed_gbobject.date_parsed[:6]) slug = slugify(feed_gbobject.title)[:255] if Gbobject.objects.filter(creation_date__year=creation_date.year, creation_date__month=creation_date.month, creation_date__day=creation_date.day, slug=slug): self.write_out(self.style.NOTICE( 'SKIPPED (already imported)\n')) continue objecttypes = self.import_objecttypes(feed_gbobject) gbobject_dict = {'title': feed_gbobject.title[:255], 'content': feed_gbobject.description, 'excerpt': feed_gbobject.get('summary'), 'status': PUBLISHED, 'creation_date': creation_date, 'start_publication': creation_date, 'last_update': datetime.now(), 'slug': slug} if not gbobject_dict['excerpt'] and self.auto_excerpt: gbobject_dict['excerpt'] = truncate_words( strip_tags(feed_gbobject.description), 50) if self.Objecttype_tag: gbobject_dict['tags'] = self.import_tags(objecttypes) gbobject = Gbobject(**gbobject_dict) gbobject.save() gbobject.objecttypes.add(*objecttypes) gbobject.sites.add(self.SITE) if self.default_author: gbobject.authors.add(self.default_author) elif feed_gbobject.get('author_detail'): try: user = User.objects.create_user( slugify(feed_gbobject.author_detail.get('name')), feed_gbobject.author_detail.get('email', '')) except IntegrityError: user = User.objects.get( username=slugify(feed_gbobject.author_detail.get('name'))) gbobject.authors.add(user) self.write_out(self.style.ITEM('OK\n'))
def inner_parse(id): """Gets a dict, parses and saves it""" dict = srv.metaWeblog.dict_id(id) pattern = "^(\d{4})(\d{2})(\d{2}).(\d{2}).(\d{2}).(\d{2})$" cd = DateTime().make_comparable(dict['creation_date'])[1] lu = DateTime().make_comparable(dict['last_update'])[1] ep = DateTime().make_comparable(dict['end_publication'])[1] sp = DateTime().make_comparable(dict['start_publication'])[1] def group(value): return value.group(1, 2, 3, 4, 5, 6) cd = group(re.search(pattern, cd)) lu = group(re.search(pattern, lu)) ep = group(re.search(pattern, ep)) sp = group(re.search(pattern, sp)) def str_to_int(string): return [int(x) for x in string] cd = str_to_int(cd) lu = str_to_int(lu) ep = str_to_int(ep) sp = str_to_int(sp) dict['creation_date'] = datetime(*cd) dict['last_update'] = datetime(*lu) dict['end_publication'] = datetime(*ep) dict['start_publication'] = datetime(*sp) Gbobject(**dict).save()
def setUp(self): params = { 'title': 'My gbobject', 'content': 'My content', 'slug': 'my-gbobject' } self.gbobject = Gbobject(**params) self.original_debug = settings.DEBUG self.original_rendering = models_settings.MARKUP_LANGUAGE settings.DEBUG = False
def import_posts(self): Objecttype = self.get_Objecttype() self.write_out(self.style.STEP("- Importing gbobjects\n")) for post in self.blogger_manager.get_posts(self.blogger_blog_id): creation_date = convert_blogger_timestamp(post.published.text) status = DRAFT if is_draft(post) else PUBLISHED title = post.title.text or "" content = post.content.text or "" slug = slugify(post.title.text or get_post_id(post))[:255] try: gbobject = Gbobject.objects.get(creation_date=creation_date, slug=slug) output = self.style.NOTICE("> Skipped %s (already migrated)\n" % gbobject) except Gbobject.DoesNotExist: gbobject = Gbobject(status=status, title=title, content=content, creation_date=creation_date, slug=slug) if self.default_author: gbobject.author = self.default_author gbobject.tags = ",".join([slugify(cat.term) for cat in post.Objecttype]) gbobject.last_update = convert_blogger_timestamp(post.updated.text) gbobject.save() gbobject.sites.add(self.SITE) gbobject.objecttypes.add(Objecttype) gbobject.authors.add(self.default_author) try: self.import_comments(gbobject, post) except gdata_service.RequestError: # comments not available for this post pass output = self.style.ITEM( "> Migrated %s + %s comments\n" % (gbobject.title, len(Comment.objects.for_model(gbobject))) ) self.write_out(output)
def import_posts(self): Objecttype = self.get_Objecttype() self.write_out(self.style.STEP('- Importing gbobjects\n')) for post in self.blogger_manager.get_posts(self.blogger_blog_id): creation_date = convert_blogger_timestamp(post.published.text) status = DRAFT if is_draft(post) else PUBLISHED title = post.title.text or '' content = post.content.text or '' slug = slugify(post.title.text or get_post_id(post))[:255] try: gbobject = Gbobject.objects.get(creation_date=creation_date, slug=slug) output = self.style.NOTICE( '> Skipped %s (already migrated)\n' % gbobject) except Gbobject.DoesNotExist: gbobject = Gbobject(status=status, title=title, content=content, creation_date=creation_date, slug=slug) if self.default_author: gbobject.author = self.default_author gbobject.tags = ','.join( [slugify(cat.term) for cat in post.Objecttype]) gbobject.last_update = convert_blogger_timestamp( post.updated.text) gbobject.save() gbobject.sites.add(self.SITE) gbobject.objecttypes.add(Objecttype) gbobject.authors.add(self.default_author) try: self.import_comments(gbobject, post) except gdata_service.RequestError: # comments not available for this post pass output = self.style.ITEM( '> Migrated %s + %s comments\n' % (gbobject.title, len(Comment.objects.for_model(gbobject)))) self.write_out(output)