def dump_schemas_zopectl_handler(app, args): """Handler for the 'bin/instance dump_schemas' zopectl command. """ setup_logging('opengever.base.schemadump.schema') setup_logging('opengever.base.schemadump.field') setup_plone(get_first_plone_site(app)) dump_schemas() dump_oggbundle_schemas()
def import_oggbundle(app, args): """Handler for the 'bin/instance import' zopectl command. """ setup_logging() bundle_path = sys.argv[3] log.info("Importing OGGBundle %s" % bundle_path) plone = setup_plone(get_first_plone_site(app)) # mark request with GEVER layer alsoProvides(plone.REQUEST, IOpengeverBaseLayer) # Don't use a separate ZODB connection to issue sequence numbers in # order to avoid conflict errors during OGGBundle import alsoProvides(plone.REQUEST, INoSeparateConnectionForSequenceNumbers) transmogrifier = Transmogrifier(plone) IAnnotations(transmogrifier)[BUNDLE_PATH_KEY] = bundle_path with DisabledLDAP(plone): transmogrifier(u'opengever.bundle.oggbundle') bundle = IAnnotations(transmogrifier)[BUNDLE_KEY] timings = bundle.stats['timings'] duration = timings['done_post_processing'] - timings['start_loading'] log.info("Duration: %.2fs" % duration.total_seconds()) log.info("Committing transaction...") transaction.get().note("Finished import of OGGBundle %r" % bundle_path) transaction.commit() log.info("Done.")
def import_oggbundle(app, args): """Handler for the 'bin/instance import' zopectl command. """ setup_logging() # Discard the first three arguments, because they're not "actual" arguments # but cruft that we get because of the way bin/instance [zopectl_cmd] # scripts work. args = parse_args(sys.argv[3:]) log.info("Importing OGGBundle %s" % args.bundle_path) plone = setup_plone(get_first_plone_site(app)) # mark request with GEVER layer alsoProvides(plone.REQUEST, IOpengeverBaseLayer) # Don't use a separate ZODB connection to issue sequence numbers in # order to avoid conflict errors during OGGBundle import alsoProvides(plone.REQUEST, INoSeparateConnectionForSequenceNumbers) # Add index to track imported GUIDs (if it doesn't exist yet) add_guid_index() transmogrifier = Transmogrifier(plone) ann = IAnnotations(transmogrifier) ann[BUNDLE_PATH_KEY] = args.bundle_path ann[INTERMEDIATE_COMMITS_KEY] = not args.no_intermediate_commits solr_enabled = api.portal.get_registry_record( 'opengever.base.interfaces.ISearchSettings.use_solr', default=False) if solr_enabled: # Check if solr is running conn = getUtility(ISolrConnectionManager).connection if conn.get('/schema').status == -1: raise Exception( "Solr isn't running, but solr reindexing is enabled. " "Skipping solr reindexing via `--skip-solr`.") else: # Disable collective indexing as it can lead to too many # subtransactions unpatch_collective_indexing() with DisabledLDAP(plone): transmogrifier(u'opengever.bundle.oggbundle') bundle = IAnnotations(transmogrifier)[BUNDLE_KEY] timings = bundle.stats['timings'] if 'migration_finished' in timings: duration = timings['migration_finished'] - timings['start_loading'] log.info("Duration: %.2fs" % duration.total_seconds()) log.info("Committing transaction...") transaction.get().note("Finished import of OGGBundle %r" % args.bundle_path) transaction.commit() log.info("Done.")
def import_oggbundle(app, args): """Handler for the 'bin/instance import' zopectl command. """ setup_logging() # Discard the first three arguments, because they're not "actual" arguments # but cruft that we get because of the way bin/instance [zopectl_cmd] # scripts work. args = parse_args(sys.argv[3:]) log.info("Importing OGGBundle %s" % args.bundle_path) plone = setup_plone(get_first_plone_site(app)) # mark request with GEVER layer alsoProvides(plone.REQUEST, IOpengeverBaseLayer) # Don't use a separate ZODB connection to issue sequence numbers in # order to avoid conflict errors during OGGBundle import alsoProvides(plone.REQUEST, INoSeparateConnectionForSequenceNumbers) # Add index to track imported GUIDs (if it doesn't exist yet) add_guid_index() transmogrifier = Transmogrifier(plone) ann = IAnnotations(transmogrifier) ann[BUNDLE_PATH_KEY] = args.bundle_path ann[INTERMEDIATE_COMMITS_KEY] = not args.no_intermediate_commits ann[SKIP_SOLR_KEY] = not args.skip_solr solr_enabled = api.portal.get_registry_record( 'opengever.base.interfaces.ISearchSettings.use_solr', default=False) if solr_enabled and not ann[SKIP_SOLR_KEY]: # Check if solr is running conn = getUtility(ISolrConnectionManager).connection if conn.get('/schema').status == -1: raise Exception( "Solr isn't running, but solr reindexing is enabled. " "Skipping solr reindexing via `--skip-solr`.") with DisabledLDAP(plone): transmogrifier(u'opengever.bundle.oggbundle') bundle = IAnnotations(transmogrifier)[BUNDLE_KEY] timings = bundle.stats['timings'] if 'migration_finished' in timings: duration = timings['migration_finished'] - timings['start_loading'] log.info("Duration: %.2fs" % duration.total_seconds()) log.info("Committing transaction...") transaction.get().note( "Finished import of OGGBundle %r" % args.bundle_path) transaction.commit() log.info("Done.")
def sync_ogds_zopectl_handler(app, args): # Set Zope's default StreamHandler's level to INFO (default is WARNING) # to make sure sync_ogds()'s output gets logged on console stream_handler = logger.root.handlers[0] stream_handler.setLevel(logging.INFO) plone = setup_plone(get_first_plone_site(app)) sync_ogds(plone) transaction.commit()
def run_nightly_jobs_handler(app, args): global logger # Make sure unhandled exceptions get logged to Sentry register_sentry_except_hook() logger = setup_logger() # Discard the first three arguments, because they're not "actual" arguments # but cruft that we get because of the way bin/instance [zopectl_cmd] # scripts work. args = parse_args(sys.argv[3:]) force = args.force for plone_site in all_plone_sites(app): plone_site = setup_plone(plone_site) invoke_nightly_job_runner(plone_site, force, logger)
def send_digest_zopectl_handler(app, args): # Set Zope's default StreamHandler's level to INFO (default is WARNING) # to make sure send_digests()'s output gets logged on console stream_handler = logger.root.handlers[0] stream_handler.setLevel(logging.INFO) plone = setup_plone(get_first_plone_site(app)) # Set up the language based on site wide preferred language. We do this # so all the i18n and l10n machinery down the line uses the right language. lang_tool = api.portal.get_tool('portal_languages') lang = lang_tool.getPreferredLanguage() plone.REQUEST.environ['HTTP_ACCEPT_LANGUAGE'] = lang plone.REQUEST.setupLocale() DigestMailer().send_digests() transaction.commit()
def import_oggbundle(app, args): """Handler for the 'bin/instance import' zopectl command. """ setup_logging() # Discard the first three arguments, because they're not "actual" arguments # but cruft that we get because of the way bin/instance [zopectl_cmd] # scripts work. args = parse_args(sys.argv[3:]) log.info("Importing OGGBundle %s" % args.bundle_path) plone = setup_plone(get_first_plone_site(app)) # mark request with GEVER layer alsoProvides(plone.REQUEST, IOpengeverBaseLayer) # Don't use a separate ZODB connection to issue sequence numbers in # order to avoid conflict errors during OGGBundle import alsoProvides(plone.REQUEST, INoSeparateConnectionForSequenceNumbers) # Add index to track imported GUIDs (if it doesn't exist yet) add_guid_index() transmogrifier = Transmogrifier(plone) ann = IAnnotations(transmogrifier) ann[BUNDLE_PATH_KEY] = args.bundle_path ann[INTERMEDIATE_COMMITS_KEY] = not args.no_intermediate_commits with DisabledLDAP(plone): transmogrifier(u'opengever.bundle.oggbundle') bundle = IAnnotations(transmogrifier)[BUNDLE_KEY] timings = bundle.stats['timings'] if 'migration_finished' in timings: duration = timings['migration_finished'] - timings['start_loading'] log.info("Duration: %.2fs" % duration.total_seconds()) log.info("Committing transaction...") transaction.get().note( "Finished import of OGGBundle %r" % args.bundle_path) transaction.commit() log.info("Done.")
def import_oggbundle(app, args): """Handler for the 'bin/instance import' zopectl command. """ setup_logging() # Discard the first three arguments, because they're not "actual" arguments # but cruft that we get because of the way bin/instance [zopectl_cmd] # scripts work. args = parse_args(sys.argv[3:]) log.info("Importing OGGBundle %s" % args.bundle_path) plone = setup_plone(get_first_plone_site(app)) # mark request with GEVER layer alsoProvides(plone.REQUEST, IOpengeverBaseLayer) # Don't use a separate ZODB connection to issue sequence numbers in # order to avoid conflict errors during OGGBundle import alsoProvides(plone.REQUEST, INoSeparateConnectionForSequenceNumbers) # Add index to track imported GUIDs (if it doesn't exist yet) add_guid_index() transmogrifier = Transmogrifier(plone) ann = IAnnotations(transmogrifier) ann[BUNDLE_PATH_KEY] = args.bundle_path ann[INTERMEDIATE_COMMITS_KEY] = not args.no_intermediate_commits with DisabledLDAP(plone): transmogrifier(u'opengever.bundle.oggbundle') bundle = IAnnotations(transmogrifier)[BUNDLE_KEY] timings = bundle.stats['timings'] if 'migration_finished' in timings: duration = timings['migration_finished'] - timings['start_loading'] log.info("Duration: %.2fs" % duration.total_seconds()) log.info("Committing transaction...") transaction.get().note("Finished import of OGGBundle %r" % args.bundle_path) transaction.commit() log.info("Done.")
def import_oggbundle(app, args): """Handler for the 'bin/instance import' zopectl command. """ # Set Zope's default StreamHandler's level to INFO (default is WARNING) # to make sure output gets logged on console stream_handler = logging.root.handlers[0] stream_handler.setLevel(logging.INFO) bundle_path = sys.argv[3] log.info("Importing OGGBundle %s" % bundle_path) plone = setup_plone(get_first_plone_site(app)) transmogrifier = Transmogrifier(plone) IAnnotations(transmogrifier)[BUNDLE_PATH_KEY] = bundle_path transmogrifier(u'opengever.setup.oggbundle') log.info("Committing transaction...") transaction.commit() log.info("Done.")
def generate_remind_notifications_zopectl_handler(app, args): stream_handler = logger.root.handlers[0] stream_handler.setLevel(logging.INFO) logger.setLevel(logging.INFO) plone = setup_plone(get_first_plone_site(app)) # XXX This should not be necessary, but it seems that language negotiation # fails somewhere down the line. # Set up the language based on site wide preferred language. We do this # so all the i18n and l10n machinery down the line uses the right language. lang_tool = api.portal.get_tool('portal_languages') lang = lang_tool.getPreferredLanguage() plone.REQUEST.environ['HTTP_ACCEPT_LANGUAGE'] = lang plone.REQUEST.setupLocale() logger.info('Start generate remind notifications...') created = create_reminder_notifications() transaction.commit() logger.info('Successfully created {} notifications'.format(created))
def generate_overdue_notifications_zopectl_handler(app, args): # Set Zope's default StreamHandler's level to INFO (default is WARNING) # to make sure send_digests()'s output gets logged on console stream_handler = logger.root.handlers[0] stream_handler.setLevel(logging.INFO) logger.setLevel(logging.INFO) for plone_site in all_plone_sites(app): plone = setup_plone(plone_site) # XXX This should not be necessary, but it seems that language negotiation # fails somewhere down the line. # Set up the language based on site wide preferred language. We do this # so all the i18n and l10n machinery down the line uses the right language. lang_tool = api.portal.get_tool('portal_languages') lang = lang_tool.getPreferredLanguage() plone.REQUEST.environ['HTTP_ACCEPT_LANGUAGE'] = lang plone.REQUEST.setupLocale() created = DossierOverdueActivityGenerator()() logger.info('Successfully created {} notifications'.format(created)) transaction.commit()
def test_setup_plone_sets_security_context_to_system_processes(self): self.assert_current_user('test-user') setup_plone(self.portal) self.assert_current_user('System Processes')
def main(app, argv=sys.argv[1:]): options, args = parser.parse_args() mode = options.mode.lower() if options.mode else None if not options.mode: parser.print_help() parser.error( 'Please specify the "mode" with "bin/instance run <yourscript> -m ' 'reindex | history | store | activate"\n' ) if options.plone_path: plone = app.unrestrictedTraverse(options.plone_path) else: plone = get_first_plone_site(app) setup_plone(plone) converter = getUtility(IBumblebeeConverter) if mode == 'reindex': LOG.info("Start indexing objects...") converter.reindex() return transaction.commit() elif mode == 'history': LOG.info("Start creating checksums for portal repository ...") repository = api.portal.get_tool('portal_repository') catalog = api.portal.get_tool('portal_catalog') brains = catalog.unrestrictedSearchResults( {'object_provides': 'ftw.bumblebee.interfaces.IBumblebeeable'}) for brain in ProgressLogger( 'Create checksums for objects in portal repository', brains, logger=LOG): obj = brain.getObject() versions = repository.getHistory(obj) if IOGMailMarker.providedBy(obj): if len(versions) > 0: LOG.warning('Found mail with versions: {}'.format('/'.join(obj.getPhysicalPath()))) continue for version in versions: # we have to calculate the checksum on the "restored" object # returned by `portal_repository`. The archived object does not # contain an accessible file without `portal_repository` magic. version_checksum = IBumblebeeDocument(version.object).calculate_checksum() archived_obj = Archeologist(obj, version).excavate() annotations = IAnnotations(archived_obj) annotations[DOCUMENT_CHECKSUM_ANNOTATION_KEY] = version_checksum archived_obj._p_changed = True return transaction.commit() elif mode == 'store': LOG.info("Start storing objects...") if not options.reset: LOG.warning( "You started storing without reseting the timestamp. " "Already converted objects will be skipped.") return converter.store(deferred=True, reset_timestamp=options.reset) elif mode == 'activate': api.portal.set_registry_record( 'is_feature_enabled', True, interface=IGeverBumblebeeSettings) LOG.info("activating bumblebee feature in registry.") return transaction.commit() else: parser.print_help() parser.error('You entered an invalid mode: {}\n'.format(mode))