def add_children(parent, num, slug): for i in range(num): child = parent.add_child( instance=BlogPage(title="{} child {}".format(slug, i), slug="{}-child-{}".format(slug, i))) child.tags.add(u'tag{}'.format(i)) child.tags.add(u'shared-tag') publish_changes(child)
def check_page_content(self, page_cls, field): page = page_cls(slug='slug', title='title') publish_page(child=page) set_stream_data(page, field, [atomic.email_signup]) publish_changes(child=page) response = self.client.get('/slug/') self.assertContains(response, 'Email Sign Up')
def add_children(parent, num, slug): for i in range(num): child = parent.add_child( instance=BlogPage( title="{} child {}".format(slug, i), slug="{}-child-{}".format(slug, i) ) ) child.tags.add(u'tag{}'.format(i)) child.tags.add(u'shared-tag') publish_changes(child)
def update_chart_blocks(self, date_published, last_updated): """ Update date_published on all chart blocks """ for page in BrowsePage.objects.all(): chart_blocks = filter( lambda item: item['type'] == 'chart_block', page.specific.content.stream_data ) if not chart_blocks: continue for chart in chart_blocks: chart['value']['date_published'] = date_published chart['value']['last_updated_projected_data'] = last_updated publish_changes(page.specific)
def update_chart_blocks(self, date_published, last_updated): """ Update date_published on all chart blocks """ for page in BrowsePage.objects.all(): chart_blocks = filter( lambda item: item['type'] == 'chart_block', page.specific.content.stream_data ) if not chart_blocks: continue for chart in chart_blocks: chart['value']['date_published'] = date_published chart['value']['last_updated_projected_data'] = last_updated publish_changes(page.specific)
def run(): for page in CFGOVPage.objects.all(): changes = False tableblocks = get_tableblocks(page) for tableblock in tableblocks: rows = tableblock['value']['data'] for row in rows: for idx, item in enumerate(row): if not item: continue soup = BeautifulSoup(item, 'html.parser') links = soup.findAll('a') if links and convert_links(links): # Set the item to the modified HTML row[idx] = str(soup) changes = True if changes: publish_changes(page.specific)
def handle(self, *args, **options): """Read markets from file into update dicts.""" with open(self.expand_path(options['snapshot_file'])) as json_data: data = json.load(json_data) markets = data['markets'] snapshots = self.get_data_snapshots() for market in markets: key = market['market_key'] snapshot_stream_data = self.find_data_snapshot(key, snapshots) if not snapshot_stream_data: # Market may not have been added to Wagtail yet # noqa logger.warning('Market key {} not found'.format(key)) continue # Update snapshot fields with the provided values snapshot = snapshot_stream_data[0]['value'] snapshot['last_updated_projected_data'] = market['data_month'] snapshot['num_originations'] = market['num_originations'] snapshot['value_originations'] = market['value_originations'] snapshot['year_over_year_change'] = market['year_over_year_change'] # Update inquiry index info if it exists for this market if "inquiry_yoy_change" in market: snapshot['inquiry_month'] = market['inquiry_month'] snapshot['inquiry_year_over_year_change'] = \ market['inquiry_yoy_change'] else: snapshot['inquiry_month'] = "" snapshot['inquiry_year_over_year_change'] = "" # Update tightness index info if it exists for this market if "tightness_yoy_change" in market: snapshot['tightness_month'] = market['tightness_month'] snapshot['tightness_year_over_year_change'] = \ market['tightness_yoy_change'] else: snapshot['tightness_month'] = "" snapshot['tightness_year_over_year_change'] = "" # Publish changes to the browse page the data snapshot lives on page = BrowsePage.objects.get(pk=snapshot['page']) del snapshot['page'] set_stream_data(page, 'content', snapshot_stream_data) publish_changes(page)
def run(): for page in CFGOVPage.objects.all(): changes = False tableblocks = get_tableblocks(page) for tableblock in tableblocks: rows = tableblock['value']['data'] for row in rows: for idx, item in enumerate(row): if not item: continue soup = BeautifulSoup(item, 'html.parser') links = soup.findAll('a') if links and convert_links(links): # Set the item to the modified HTML row[idx] = str(soup) changes = True if changes: publish_changes(page.specific)
def update_chart_blocks(self, date_published, last_updated, markets): """ Update date_published on all chart blocks """ for page in BrowsePage.objects.all(): chart_blocks = filter(lambda item: item['type'] == 'chart_block', page.specific.content.stream_data) if not chart_blocks: continue for chart in chart_blocks: chart_options = chart['value'] chart['value']['date_published'] = date_published if chart_options['chart_type'] == 'line-index': last_updated_inquiry = get_inquiry_month( markets, chart_options['data_source']) chart['value']['last_updated_projected_data'] = \ last_updated_inquiry else: chart['value']['last_updated_projected_data'] = \ last_updated publish_changes(page.specific)
def handle(self, *args, **options): # Read markets from file into update dicts with open(self.expand_path(options['snapshot_file'])) as json_data: data = json.load(json_data) markets = data['markets'] snapshots = self.get_data_snapshots() for market in markets: key = market['market_key'] snapshot = self.find_data_snapshot(key, snapshots) if not snapshot: # Market may not have been added to Wagtail yet logger.warn('Market key {} not found'.format(key)) continue # Update snapshot fields with the provided values snapshot['last_updated_projected_data'] = market['data_month'] snapshot['num_originations'] = market['num_originations'] snapshot['value_originations'] = market['value_originations'] snapshot['year_over_year_change'] = market['year_over_year_change'] # Update inquiry index info if it exists for this market if "inquiry_yoy_change" in market: snapshot['inquiry_month'] = market['inquiry_month'] snapshot['inquiry_year_over_year_change'] = \ market['inquiry_yoy_change'] else: snapshot['inquiry_month'] = "" snapshot['inquiry_year_over_year_change'] = "" # Update tightness index info if it exists for this market if "tightness_yoy_change" in market: snapshot['tightness_month'] = market['tightness_month'] snapshot['tightness_year_over_year_change'] = \ market['tightness_yoy_change'] else: snapshot['tightness_month'] = "" snapshot['tightness_year_over_year_change'] = "" # Publish changes to the browse page the data snapshot lives on page = snapshot['page'] publish_changes(page.specific)
def create_tableblocks_for_every_table(apps, schema_editor): for p in chain( BrowsePage.objects.all(), SublandingPage.objects.all(), LearnPage.objects.all(), DocumentDetailPage.objects.all(), ): update_content_tables(wagtail_page=p) publish_changes(child=p) for p in chain( BlogPage.objects.all(), BrowseFilterablePage.objects.all(), BrowsePage.objects.all(), DemoPage.objects.all(), LearnPage.objects.all(), SublandingFilterablePage.objects.all(), SublandingPage.objects.all(), ): update_full_width_text_tables(wagtail_page=p) publish_changes(child=p)
def create_tableblocks_for_every_table(apps, schema_editor): logger.info('Updating tables in content field') cf_page_types = ( 'BrowsePage', 'SublandingPage', 'LearnPage', 'DocumentDetailPage', ) for cf_page_type in cf_page_types: page_cls = apps.get_model('v1', cf_page_type) for p in page_cls.objects.all(): if update_tables_in_content_field(wagtail_page=p): publish_changes(child=p) logger.info('Updating tables in full width text organisms') fwt_page_types = ( 'BlogPage', 'BrowseFilterablePage', 'BrowsePage', 'DemoPage', 'LearnPage', 'SublandingFilterablePage', 'SublandingPage', ) for fwt_page_type in fwt_page_types: page_cls = apps.get_model('v1', fwt_page_type) for p in page_cls.objects.all(): if update_tables_in_full_width_text_organisms(wagtail_page=p): publish_changes(child=p)
def handle(self, *args, **options): # Read markets from file into update dicts with open(self.expand_path(options['snapshot_file'])) as json_data: data = json.load(json_data) markets = data['markets'] snapshots = self.get_data_snapshots() for market in markets: key = market['market_key'] snapshot = self.find_data_snapshot(key, snapshots) if not snapshot: # Market may not have been added to Wagtail yet logger.warn('Market key {} not found'.format(key)) continue # Update snapshot fields with the provided values snapshot['last_updated_projected_data'] = market['data_month'] snapshot['num_originations'] = market['num_originations'] snapshot['value_originations'] = market['value_originations'] snapshot['year_over_year_change'] = market['year_over_year_change'] # Publish changes to the browse page the data snapshot lives on page = snapshot['page'] publish_changes(page.specific)
def create_tableblocks_for_every_table(apps, schema_editor): logger.info("Updating tables in content field") for p in chain( BrowsePage.objects.all(), SublandingPage.objects.all(), LearnPage.objects.all(), DocumentDetailPage.objects.all(), ): if update_tables_in_content_field(wagtail_page=p): publish_changes(child=p) logger.info("Updating tables in full width text organisms") for p in chain( BlogPage.objects.all(), BrowseFilterablePage.objects.all(), BrowsePage.objects.all(), DemoPage.objects.all(), LearnPage.objects.all(), SublandingFilterablePage.objects.all(), SublandingPage.objects.all(), ): if update_tables_in_full_width_text_organisms(wagtail_page=p): publish_changes(child=p)
def update_chart_blocks(self, date_published, last_updated, markets): """ Update date_published on all chart blocks """ for page in BrowsePage.objects.all(): chart_blocks = filter( lambda item: item['type'] == 'chart_block', page.specific.content.stream_data ) if not chart_blocks: continue for chart in chart_blocks: chart_options = chart['value'] chart['value']['date_published'] = date_published if chart_options['chart_type'] == 'line-index': last_updated_inquiry = get_inquiry_month( markets, chart_options['data_source'] ) chart['value']['last_updated_projected_data'] = \ last_updated_inquiry else: chart['value']['last_updated_projected_data'] = \ last_updated publish_changes(page.specific)