def archive_news(): """ A cron callable script which archives news automatically This should be run through the zope client script running machinery: bin/zeo_client run bin/archive_news """ site = get_plone_site() _archive_news(site)
def get_broken_links(): """ A cron callable script to get data regarding broken links This should be run through the zope client script running machinery,: bin/zeo_client run bin/get_broken_links """ site = get_plone_site() compute_broken_links(site)
def sync_adaptecca_casestudies(): """ A cron callable script to get AdapteCCA case studies This should be run through the zope client script running machinery,: bin/zeo_client run bin/get_case_studies """ site = get_plone_site() adapteCCA = AdapteCCACaseStudyImporter(site) adapteCCA()
def send_reminders(): """ A cron callable script to send reminders This should be run through the zope client script running machinery, like so: bin/www1 run bin/send_mayoradapt_reminders """ site = get_plone_site() _send_reminders(site)
def import_drmkc(): """ A cron callable script to get DRMKC projects This should be run through the zope client script running machinery,: bin/zeo_client run bin/import_drmkc """ site = get_plone_site() # import pdb; pdb.set_trace() drmkc = DRMKCImporter(site) drmkc()
def main(): """ Run the sync import process This should be run through the zope client script running machinery, like: GISPASS="******" bin/www1 run bin/sync_to_arcgis It will consume all messages found in the queue and then exit """ from eea.climateadapt.rabbitmq import consume_messages from eea.climateadapt.scripts import get_plone_site site = get_plone_site() consume_messages( partial(_consume_msg, context=site), queue='eea.climateadapt.casestudies' )
def main(): """ Run the sync import process This should be run through the zope client script running machinery, like so: GISPASS="******" bin/www1 run bin/sync_to_arcgis It will consume all messages found in the queue and then exit """ from eea.climateadapt.rabbitmq import consume_messages from eea.climateadapt.scripts import get_plone_site site = get_plone_site() consume_messages( partial(_consume_msg, context=site), queue='eea.climateadapt.casestudies' )
def main(): site = get_plone_site() data = get_source_data() base_folder = site["knowledge"]["european-climate-data-explorer"] annot = IAnnotations(base_folder) annot._p_changed = True annot["c3s_json_data"] = {"data": data, "fetched": datetime.datetime.now()} for indicator_identifier in data["indicators"]: save_indicator(data["indicators"][indicator_identifier], site, data) #for theme_id in data["themes"]: # theme_folder = base_folder[theme_id] # theme_folder.text = RichTextValue( # data["themes"][theme_id]["description"] # ) # print("Updated description for", theme_folder) transaction.commit() print("Total items:" + str(len(data["indicators"])))
def refresh_analytics_data(site=None): if site is None: site = get_plone_site() _refresh_analytics_data(site)
def trigger_content_rule(message): setattr(threadlocals, MESSAGE_KEY, message) site = get_plone_site() notify(IndicatorMessageEvent(site))