def load_press_releases(): """ Transfers all press releases from the old database API for all programs into the new database creating objects of the PressRelease model """ for post, program_id in NAClient().get_press_releases(): if post['status'] == "published": try: post_parent_program = get_program(program_id) parent_program_press_releases_homepage = get_content_homepage( post_parent_program, ProgramPressReleasesPage, 'Press Releases', ) press_release_slug = slugify(post['title']) new_press_release = PressRelease.objects.filter(slug=press_release_slug).first() if not new_press_release and press_release_slug: new_press_release = PressRelease( search_description='', seo_title='', depth=5, show_in_menus=False, slug=press_release_slug, title=post['title'], date=get_post_date(post['publish_at']), subheading=post['sub_headline'], body=json.dumps([ { 'type': 'paragraph', 'value': post['content'] } ]), attachment=json.dumps( get_attachments( post['attachments'], press_release_slug ) ), story_excerpt=get_summary(post['summary']), story_image=download_image( post['cover_image_url'], press_release_slug + "_image.jpeg" ), ) parent_program_press_releases_homepage.add_child(instance=new_press_release) new_press_release.save() get_post_authors(new_press_release, post['authors']) connect_programs_to_post(new_press_release, post['programs']) elif new_press_release and press_release_slug and need_to_update_post(post['modified']): new_press_release.search_description = '' new_press_release.seo_title = '' new_press_release.depth = 5 new_press_release.date = get_post_date(post['publish_at']) new_press_release.show_in_menus = False new_press_release.slug = press_release_slug new_press_release.title = post['title'] new_press_release.body = json.dumps([ { 'type': 'paragraph', 'value': post['content'] } ]) new_press_release.attachment=json.dumps( get_attachments( post['attachments'], press_release_slug ) ) new_press_release.story_image = download_image( post['cover_image_url'], press_release_slug + "_image.jpeg" ) new_press_release.subheading=post['sub_headline'] new_press_release.save() get_post_authors(new_press_release, post['authors']) connect_programs_to_post(new_press_release, post['programs']) except django.db.utils.IntegrityError: pass
def transform_itn_to_articles(): """ Reads a CSV of in the news posts that have been identified to be transformed into article post types. In order to preserve any edits or cleanup that has been done, this uses the new production database to get the content not the API. """ article_mapping = inthenews_to_article_mapping() for item in article_mapping: print article_mapping[item]['id'] slug = slugify(article_mapping[item]['title']) print(slug) old_post = Quoted.objects.filter(slug=slug).first() if old_post: try: print("found existing post") parent_program = old_post.get_ancestors()[2] parent_program_articles_homepage = get_content_homepage( parent_program, ProgramArticlesPage, 'Articles', ) new_post = Article( title=old_post.title, date=old_post.date, slug=old_post.slug, body=old_post.body, depth=old_post.depth, programs=old_post.programs.all(), source=old_post.source, source_url=old_post.source_url, authors=old_post.authors.all(), story_excerpt=old_post.story_excerpt, story_image=old_post.story_image, ) parent_program_articles_homepage.add_child(instance=new_post) new_post.save() print("--------saved new post--------") except django.core.exceptions.ValidationError: existing_article = Article.objects.filter(slug=slug).first() if existing_article: print( "found in the news that already was turned into an article - going to update it now" ) existing_article.title = old_post.title existing_article.date = old_post.date existing_article.slug = old_post.slug existing_article.body = old_post.body existing_article.depth = old_post.depth existing_article.programs = old_post.programs.all() existing_article.source = old_post.source existing_article.source_url = old_post.source_url existing_article.authors = old_post.authors.all() existing_article.story_excerpt = old_post.story_excerpt existing_article.story_image = old_post.story_image existing_article.save() print('existing article has been updated!') else: with open("missing_itn_pieces.txt", "a") as myfile: myfile.write(article_mapping[item]['id'] + " " + slug + "\n") print('did not find the old post here')
def load_events(): """ Goes through the events for each program and creates or updates events as necessary using the data from the event_data dictionary """ for post, program_id in NAClient().get_events(): if post['status'] == "published": try: post_parent_program = get_program(program_id) parent_program_events_homepage = get_content_homepage( post_parent_program, ProgramEventsPage, 'Events', ) event_slug = slugify(post['title']) new_event = Event.objects.filter(slug=event_slug).first() event_data = get_event_data(post) if not new_event and event_slug: new_event = Event( search_description='', seo_title='', depth=5, show_in_menus=False, slug=event_slug, title=post['title'], subheading=post['sub_headline'], date=event_data['date'], end_date=event_data['end_date'], start_time=event_data['start_time'], end_time=event_data['end_time'], host_organization=event_data['host_organization'], street_address=event_data['street_address'], city=event_data['city'], state=event_data['state'], zipcode=event_data['zipcode'], rsvp_link=event_data['rsvp_link'], body=json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]), soundcloud_url=post['soundcloud_url'], story_image=download_image(post['cover_image_url'], event_slug + "_image.jpeg"), story_excerpt=get_summary(post['summary']), ) parent_program_events_homepage.add_child( instance=new_event) new_event.save() connect_programs_to_post(new_event, post['programs']) elif new_event and event_slug and need_to_update_post( post['modified']): new_event.search_description = '' new_event.seo_title = '' new_event.depth = 5 new_event.date = event_data['date'] new_event.end_date = event_data['end_date'] new_event.start_time = event_data['start_time'] new_event.end_time = event_data['end_time'] new_event.host_organization = event_data[ 'host_organization'] new_event.street_address = event_data['street_address'] new_event.city = event_data['city'] new_event.state = event_data['state'] new_event.zipcode = event_data['zipcode'] new_event.rsvp_link = event_data['rsvp_link'] new_event.show_in_menus = False new_event.slug = event_slug new_event.title = post['title'] new_event.subheading = post['sub_headline'] new_event.body = json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]) new_event.story_image = download_image( post['cover_image_url'], event_slug + "_image.jpeg") new_event.story_excerpt = get_summary(post['summary']) new_event.soundcloud_url = post['soundcloud_url'] new_event.save() connect_programs_to_post(new_event, post['programs']) except django.db.utils.IntegrityError: pass
def load_education_blog_posts(): """ Transferring blog posts from EdCentral CSV for Education Policy Program """ education_blog_mapping = edcentral_blog_mapping() for post in education_blog_mapping: if post['title'] == 'title': pass else: print(post['title']) post_parent = get_program('5') parent_blog_homepage = get_content_homepage( post_parent, ProgramBlogPostsPage, 'EdCentral', ) ed_blog_post_slug = post['slug'] print(ed_blog_post_slug) new_blog_post = BlogPost.objects.filter( slug=ed_blog_post_slug).first() if not new_blog_post and ed_blog_post_slug: new_blog_post = BlogPost( search_description='', seo_title='', depth=5, show_in_menus=False, slug=ed_blog_post_slug, title=post['title'], date=post['real_date'], body=json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]), story_excerpt=get_summary(post['excerpt']), ) parent_blog_homepage.add_child(instance=new_blog_post) new_blog_post.save() print( "-------------------ADDED NEW EDCENTRAL POST----------------------" ) get_education_authors(new_blog_post, post['author']) connect_subprograms_to_post( new_blog_post, clean_subprograms_for_ed(post['categories'])) elif new_blog_post and ed_blog_post_slug: new_blog_post.search_description = '' new_blog_post.seo_title = '' new_blog_post.depth = 5 new_blog_post.show_in_menus = False new_blog_post.slug = ed_blog_post_slug new_blog_post.title = post['title'] new_blog_post.date = post['real_date'] new_blog_post.body = json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]) new_blog_post.story_excerpt = get_summary(post['excerpt']) get_education_authors(new_blog_post, post['author']) connect_subprograms_to_post( new_blog_post, clean_subprograms_for_ed(post['categories'])) print( "-------------------UPDATED EXISTING EDCENTRAL POST----------------------" ) new_blog_post.save()
def load_podcasts(): """ Transfers all podcasts from the old database API for all programs into the new database creating objects of the Podcast model """ for post, program_id in NAClient().get_podcasts(): if post['status'] == "published": try: post_parent_program = get_program(program_id) parent_program_podcasts_homepage = get_content_homepage( post_parent_program, ProgramPodcastsPage, 'Podcasts', ) podcast_slug = slugify(post['title']) new_podcast = Podcast.objects.filter(slug=podcast_slug).first() if not new_podcast and podcast_slug: new_podcast = Podcast( search_description='', seo_title='', depth=5, show_in_menus=False, slug=podcast_slug, title=post['title'], date=get_post_date(post['publish_at']), subheading=post['sub_headline'], body=json.dumps([ { 'type': 'paragraph', 'value': post['content'] } ]), soundcloud=json.dumps([ { 'type': 'soundcloud_embed', 'value': post['soundcloud_url'] } ]), story_excerpt=get_summary(post['summary']), ) parent_program_podcasts_homepage.add_child( instance=new_podcast ) print("new podcast") print(post['id']) new_podcast.save() get_post_authors(new_podcast, post['authors']) connect_programs_to_post(new_podcast, post['programs']) elif new_podcast and podcast_slug and need_to_update_post(post['modified']): new_podcast.search_description = '' new_podcast.seo_title = '' new_podcast.depth = 5 new_podcast.date = get_post_date(post['publish_at']) new_podcast.show_in_menus = False new_podcast.slug = podcast_slug new_podcast.title = post['title'] new_podcast.body = json.dumps([ { 'type': 'paragraph', 'value': post['content'] } ]) new_podcast.soundcloud=json.dumps([ { 'type': 'soundcloud_embed', 'value': post['soundcloud_url'] } ]) new_podcast.subheading=post['sub_headline'] print("updating podcast") print(post['id']) new_podcast.save() get_post_authors(new_podcast, post['authors']) connect_programs_to_post(new_podcast, post['programs']) except django.db.utils.IntegrityError: pass
def load_general_blogs(): """ Used the old database API to retrieve articles and then using cleaned CSV data, turns the appropriate content items into blog posts """ general_blog_mapping = load_general_blog_mapping() for post, program_id in NAClient().get_general_blogs(): if post['status'] == "published": post_id = str(post['id']) print(post_id) mapped_blog_post = general_blog_mapping.get(post_id, None) if mapped_blog_post: print(post['id']) print("found this id above in the csv - adding blog") mapped_programs = mapped_blog_post['program'].split(',') program_id = str(program_id) print('these are the mapped programs') print(mapped_programs) if program_id in mapped_programs: print(program_id) print("found program id above in the mapped programs") post_parent = get_program(program_id) parent_blog_homepage = get_content_homepage( post_parent, ProgramBlogPostsPage, 'Our Blog', ) general_blog_post_slug = post['slug'] general_blog_post = BlogPost.objects.filter( slug=general_blog_post_slug).first() if not general_blog_post and general_blog_post_slug: general_blog_post = BlogPost( search_description='', seo_title='', depth=5, show_in_menus=False, slug=general_blog_post_slug, title=post['title'], date=get_post_date(post['publish_at']), subheading=post['sub_headline'], body=json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]), story_excerpt=get_summary(post['summary']), story_image=download_image( post['cover_image_url'], general_blog_post_slug + "_image.jpeg"), ) parent_blog_homepage.add_child( instance=general_blog_post) general_blog_post.save() get_post_authors(general_blog_post, post['authors']) connect_programs_to_post(general_blog_post, post['programs']) print( "----------------------ADDED NEW BLOG POST------") print(post_id)
def load_asset_blogs(): """ Used the old database API to retrieve Asset Building articles and then using cleaned CSV data, turns the appropriate content items into blog posts """ asset_blog_mapping = load_asset_blog_mapping() for post in NAClient().get_asset_blog_posts(): if post['status'] == "published": post_id = str(post['id']) print(post_id) mapped_asset_blog_post = asset_blog_mapping.get(post_id, None) if mapped_asset_blog_post: if mapped_asset_blog_post['initiative']: print("adding asset initiative blog") print(mapped_asset_blog_post['initiative']) post_parent = get_subprogram( 'Asset Building', mapped_asset_blog_post['initiative']) parent_blog_homepage = get_content_homepage( post_parent, ProgramBlogPostsPage, mapped_asset_blog_post['blog'], ) asset_blog_post_slug = post['slug'] new_blog_post = BlogPost.objects.filter( slug=asset_blog_post_slug).first() if not new_blog_post and asset_blog_post_slug: new_blog_post = BlogPost( search_description='', seo_title='', depth=6, show_in_menus=False, slug=asset_blog_post_slug, title=post['title'], date=get_post_date(post['publish_at']), subheading=post['sub_headline'], body=json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]), story_excerpt=get_summary(post['summary']), story_image=download_image( post['cover_image_url'], asset_blog_post_slug + "_image.jpeg"), ) parent_blog_homepage.add_child(instance=new_blog_post) new_blog_post.save() get_post_authors(new_blog_post, post['authors']) else: print("adding asset blog") print(post['id']) post_parent = get_program('15') parent_blog_homepage = get_content_homepage( post_parent, ProgramBlogPostsPage, 'Our Blog', ) asset_blog_post_slug = post['slug'] new_blog_post = BlogPost.objects.filter( slug=asset_blog_post_slug).first() if not new_blog_post and asset_blog_post_slug: new_blog_post = BlogPost( search_description='', seo_title='', depth=5, show_in_menus=False, slug=asset_blog_post_slug, title=post['title'], date=get_post_date(post['publish_at']), subheading=post['sub_headline'], body=json.dumps([{ 'type': 'paragraph', 'value': post['content'] }]), story_excerpt=get_summary(post['summary']), story_image=download_image( post['cover_image_url'], asset_blog_post_slug + "_image.jpeg"), ) parent_blog_homepage.add_child(instance=new_blog_post) new_blog_post.save() get_post_authors(new_blog_post, post['authors'])