def main(url=None, latest=None, generation=None): if not url: parser = create_karl_argparser(description=__doc__) parser.add_argument( '-g', '--generation', type=int, help="Schema generation to evolve to") parser.add_argument( '-l', '--latest', action='store_true', help="Evolve to the latest generation") args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) root, registry = env['root'], env['registry'] settings = registry.settings url = settings['repozitory_db_string'] latest = args.latest generation = args.generation root._p_jar.close() evolver = KarlEvolver(url) if latest: generation = 999999999 if generation is not None: evolver.evolve(generation) else: evolver.list() evolver.close()
def main(): parser = create_karl_argparser( description="Send expiring password reminder emails") parser.add_argument( '-H', '--hours', type=int, dest='hours_ahead', default=24, help="Hours ahead to remind user about password expiration") args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) now = datetime.datetime.now() start = now + datetime.timedelta(hours=args.hours_ahead) end = start + datetime.timedelta(hours=24) try: send_reminders(env, start, end) except: transaction.abort() raise else: transaction.commit()
def main(argv=sys.argv): default_interval = 6 * 3600 # 6 hours parser = create_karl_argparser(description='Send digest emails.') parser.add_argument('-d', '--daemon', action='store_true', help="Run in daemon mode.") parser.add_argument('-i', '--interval', type=int, default=default_interval, help="Interval in seconds between executions in " "daemon mode. Default is %d." % default_interval) parser.add_argument('-f', '--frequency', dest='frequency', default='daily', help='Digest frequency: daily/weekly/biweekly.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer, registry = env['root'], env['closer'], env['registry'] if args.daemon: f = daemonize_function(digest, args.interval) only_one(f, registry, 'digest')(root, closer, registry, args.frequency) else: only_one(digest, registry, 'digest')(root, closer, registry, args.frequency) closer() import gc gc.collect() # Work around relstorage cache bug.
def main(argv=sys.argv): parser = create_karl_argparser( description='Generate sample content in the database.' ) parser.add_argument('-c', '--communities', dest='communities', help='Number of communities to add (default 10)', default=10) parser.add_argument('--dry-run', dest='dryrun', action='store_true', help="Don't actually commit the transaction") parser.add_argument('-m', '--more-files', dest='more_files', action='store_true', help="Create many files in the first community (default false)") args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] try: add_sample_users(root) for i in range(int(args.communities)): try: add_sample_community( root, more_files=(args.more_files and i==0) ) except TypeError: # fall back for old versions that do not support more_files add_sample_community(root) except: transaction.abort() raise else: if args.dryrun: transaction.abort() else: transaction.commit()
def main(argv=sys.argv): default_interval = 300 parser = create_karl_argparser(description='Process incoming mail.') parser.add_argument('-d', '--daemon', action='store_true', help="Run in daemon mode.") parser.add_argument('-i', '--interval', type=int, default=default_interval, help="Interval in seconds between executions in " "daemon mode. Default is %d." % default_interval) args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) if not is_normal_mode(env['registry']): log.info("Cannot run mailin: Running in maintenance mode.") sys.exit(2) try: if args.daemon: daemonize_function(mailin, args.interval)(args, env, parser) else: mailin(args, env, parser) finally: env['closer']()
def main(argv=sys.argv): default_interval = 300 parser = create_karl_argparser(description='Send outgoing mail.') parser.add_argument('--server', '-s', default="localhost", metavar='HOST', help='SMTP server host name. Default is localhost.', ) parser.add_argument('--port', '-P', type=int, default=25, metavar='PORT', help='Port of SMTP server. Default is 25.', ) parser.add_argument('--username', '-u', help='Username, if authentication is required') parser.add_argument('--password', '-p', help='Password, if authentication is required') parser.add_argument('--force-tls', '-f', action='store_true', help='Require that TLS be used.') parser.add_argument('--no-tls', '-n', action='store_true', help='Require that TLS not be used.') parser.add_argument('-d', '--daemon', action='store_true', help="Run in daemon mode.") parser.add_argument('-i', '--interval', type=int, default=default_interval, help="Interval in seconds between executions in " "daemon mode. Default is %d." % default_interval) args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) if args.daemon: f = daemonize_function(mailout, args.interval) only_one(f, env['registry'], 'mailout')(args, env) else: only_one(mailout, env['registry'], 'mailout')(args, env)
def dump(argv=sys.argv): parser = create_karl_argparser( description='Dump people directory configuration.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root = env['root'] print >> args.out, dump_peopledir(root['people'])
def main(argv=sys.argv): parser = create_karl_argparser( description='Sync users to external data source.' ) parser.add_argument('--username', '-U', default=None, help='Username for BASIC Auth') parser.add_argument('--password', '-P', default=None, help='Password for BASIC Auth') parser.add_argument('--password-file', '-F', default=None, help='Read password for BASIC Auth from file') parser.add_argument('url', help="URL of data source.") args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer, registry = env['root'], env['closer'], env['registry'] if args.password and args.password_file: args.parser.error('cannot set both --password and --password-file') if args.password_file: with open(args.password_file) as f: password = f.read().strip('\n') else: password = args.password log.info("Syncing users at %s" % args.url) sync = UserSync(root) only_one(sync, registry, 'usersync')(args.url, args.username, password) transaction.commit() closer()
def main(argv=sys.argv): default_interval = 120 parser = create_karl_argparser( description='Sync staff profiles and login to OSI GSA.' ) parser.add_argument('-d', '--daemon', action='store_true', help="Run in daemon mode.") parser.add_argument('-i', '--interval', type=int, default=default_interval, help="Interval in seconds between executions in " "daemon mode. Default is %d." % default_interval) parser.add_argument('-u', '--user', help='Login username for GSA.') parser.add_argument('-p', '--password', help='Password for GSA.') parser.add_argument('-x', '--check-last-sync', default=None, help='Check that last sync w/ GSA happened w/in a' 'given interval (in minutes). If not, exit with ' 'a non-zero status code; if os, exit normally.') parser.add_argument('url', help='URL of GSA datasource.') parser.add_argument('-t', '--timeout', default=90, help='Timeout for GSA request (default 15 sec).') args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) if not is_normal_mode(env['registry']): log.info("Cannot run mailin: Running in maintenance mode.") sys.exit(2) f = only_one(_sync, env['registry'], 'gsa_sync') if args.daemon: daemonize_function(f, args.interval)(args, env) else: f(args, env)
def main(argv=sys.argv): parser = create_karl_argparser( description='Add a fake blog tool to community for receiving mailin ' 'trace emails.') parser.add_argument('community', help='Community name.') parser.add_argument('file', help='Path to file to touch when a tracer ' 'email is received.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] root, closer = args.get_root(args.inst) community = find_communities(root).get(args.community) if community is None: args.parser.error('Could not find community: %s' % args.community) blog = community.get('blog') if blog is not None: if len(blog) > 0: args.parser.error('Cannot replace blog with blog entries.') else: del community['blog'] community['blog'] = blog = MailinTraceBlog() out = args.out print >> out, 'Added mailin trace tool at: %s' % model_path(blog) settings = root._p_jar.root.instance_config settings['mailin_trace_file'] = args.file print >> out, 'The mailin trace file is: %s' % args.file transaction.commit() print >> out, ('You must restart the mailin daemon in order for the new ' 'settings to take effect.')
def edit_feed(argv=sys.argv): parser = create_karl_argparser(description='Edit a feed.') parser.add_argument('name', help='Identifier of feed in database.') parser.add_argument('-t', '--title', help='Override title of feed.') parser.add_argument('--use-feed-title', action='store_true', help='Use feed title. Undoes previous override.') parser.add_argument('-m', '--max', type=int, help='Maximum number of entries to keep at a time.') parser.add_argument('-u', '--url', help='URL of feed.') parser.set_defaults(func=edit_feed, parser=parser) args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] feed = get_feed(root, args.name) if feed is None: args.parser.error("No such feed: %s" % args.name) if args.max is not None: feed.max_entries = args.max if args.url is not None: feed.url = args.url if args.use_feed_title: feed.title = None feed.override_title = False elif args.title is not None: feed.title = args.title feed.override_title = True transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser( description='Add a fake blog tool to community for receiving mailin ' 'trace emails.' ) parser.add_argument('community', help='Community name.') parser.add_argument('file', help='Path to file to touch when a tracer ' 'email is received.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] root, closer = args.get_root(args.inst) community = find_communities(root).get(args.community) if community is None: args.parser.error('Could not find community: %s' % args.community) blog = community.get('blog') if blog is not None: if len(blog) > 0: args.parser.error('Cannot replace blog with blog entries.') else: del community['blog'] community['blog'] = blog = MailinTraceBlog() out = args.out print >> out, 'Added mailin trace tool at: %s' % model_path(blog) settings = root._p_jar.root.instance_config settings['mailin_trace_file'] = args.file print >> out, 'The mailin trace file is: %s' % args.file transaction.commit() print >> out, ('You must restart the mailin daemon in order for the new ' 'settings to take effect.')
def add_feed(argv=sys.argv): parser = create_karl_argparser(description='Add a new feed.') parser.add_argument('-t', '--title', help='Override title of feed.') parser.add_argument('-m', '--max', type=int, default=0, help='Maximum number of entries to keep at a time.') parser.add_argument('name', help='Identifier of feed in database.') parser.add_argument('url', help='URL of feed.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] feed = get_feed(root, args.name) if feed is not None: args.parser.error("Feed already exists with name: %s" % args.name) name = args.name override_title = args.title max_entries = args.max url = args.url container = root.get('feeds') if container is None: container = create_content(IFeedsContainer) root['feeds'] = container assert name not in container, "Feed already exists: %s" % name feed = create_content(IFeed, override_title) feed.url = url feed.max_entries = max_entries container[name] = feed feed.override_title = bool(override_title) transaction.commit()
def load(argv=sys.argv, peopleconf=peopleconf, root=None): # last args for unit testing parser = create_karl_argparser( description='Load people directory configuration.' ) parser.add_argument('-f', '--force-reindex', action='store_true', help='Reindex the people directory unconditionally.') parser.add_argument('filename', help='Name of XML to load.') args = parser.parse_args(argv[1:]) if root is None: # only untrue during unit testing env = args.bootstrap(args.config_uri) root = env['root'] force_reindex = args.force_reindex tree = etree.parse(args.filename) if 'people' in root and not isinstance(root['people'], PeopleDirectory): # remove the old people directory del root['people'] if 'people' not in root: root['people'] = PeopleDirectory() force_reindex = True peopleconf(root['people'], tree, force_reindex=force_reindex) transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser( description='Switches the text index of an instance to use ' 'either zope.index or pgtextindex.' ) parser.add_argument('--pg', dest='convert_to', action='store_const', const='pg', help='Convert the database to pgtextindex.') parser.add_argument('--zope', dest='convert_to', action='store_const', const='zope', help='Convert the database to zope.index.') parser.add_argument('--show', action='store_true', default=False, help='Show which index type in currently in use. ' 'Performs no action.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) site, closer = env['root'], env['closer'] if args.show: print >> args.out, ( 'Current text index type: %s' % get_index_type(args, site)) return status = getattr(site, '_reindex_text_status', None) if status == 'reindexing': reindex_text(args, site) else: switch_text_index(args, site) reindex_text(args, site)
def load(argv=sys.argv, peopleconf=peopleconf, root=None): # last args for unit testing parser = create_karl_argparser( description='Load people directory configuration.') parser.add_argument('-f', '--force-reindex', action='store_true', help='Reindex the people directory unconditionally.') parser.add_argument('filename', help='Name of XML to load.') args = parser.parse_args(argv[1:]) if root is None: # only untrue during unit testing env = args.bootstrap(args.config_uri) root = env['root'] force_reindex = args.force_reindex tree = etree.parse(args.filename) if 'people' in root and not isinstance(root['people'], PeopleDirectory): # remove the old people directory del root['people'] if 'people' not in root: root['people'] = PeopleDirectory() force_reindex = True peopleconf(root['people'], tree, force_reindex=force_reindex) transaction.commit()
def dump(argv=sys.argv): parser = create_karl_argparser( description='Dump people directory configuration.' ) args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root = env['root'] print >> args.out, dump_peopledir(root['people'])
def remove_feed(argv=sys.argv): parser = create_karl_argparser(description='Remove a feed.') parser.add_argument('name', help='Name of feed.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] feeds = root.get('feeds') if not feeds or args.name not in feeds: args.parser.error("No such feed: %s" % args.name) del feeds[args.name] transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser(description="Change creator of content.") parser.add_argument('--batch-size', type=int, default=500, help='Number of objects to initialize per transaction.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] repo = find_repo(root) if repo is None: args.parser.error("No repository is configured.") init_repozitory(repo, root, args.batch_size) closer()
def list_feeds(argv=sys.argv): parser = create_karl_argparser(description='List feeds.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] feeds = root.get('feeds') if feeds is None or len(feeds) == 0: print >> args.out, 'No feeds configured.' return for name in sorted(feeds.keys()): feed = feeds.get(name) print >> args.out, "%s:" % name print >> args.out, "\turl: %s" % feed.url print >> args.out, "\ttitle: %s" % feed.title print >> args.out, "\tmax entries: %d" % feed.max_entries
def main(argv=sys.argv): parser = create_karl_argparser( "Backdate created and modified timestamps of content." ) parser.add_argument('date', type=parse_date) parser.add_argument('path') args = parser.parse_args(argv[1:]) config_uri = args.config_uri env = args.bootstrap(config_uri) root, closer = env['root'], env['closer'] content = find_resource(root, args.path) content.created = content.modified = args.date reindex_content(content, None) transaction.commit() closer()
def main(argv=sys.argv): parser = create_karl_argparser("Change creator of content.") parser.add_argument('user') parser.add_argument('path') args = parser.parse_args(argv[1:]) config_uri = args.config_uri env = args.bootstrap(config_uri) root, closer = env['root'], env['closer'] content = find_resource(root, args.path) profiles = find_profiles(root) if not args.user in profiles: args.parser.error("No such user: %s" % args.user) content.creator = content.modified_by = args.user reindex_content(content, None) transaction.commit() closer()
def main(argv=sys.argv): parser = create_karl_argparser( description="Remove local Deny ACLs from forums." ) parser.add_argument( '-l', '--limit', dest='limit', help='Number of forums to process' ) parser.add_argument( '--dry-run', dest='dryrun', action='store_true', help="Don't actually commit the transaction" ) parser.add_argument('path') parser.set_defaults(limit=0, dryrun=False) args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] searcher = ICatalogSearch(root) kw = {'interfaces': [IForumTopic], 'path': args.path} numdocs, docids, resolver = searcher(**kw) limit = int(args.limit) print "Removing Deny acl from forum topics under %s" % args.path if limit > 0: print "Limiting to %d topics" % limit if limit == 0: limit = numdocs # make sure it's a list and not a btree docids = list(docids) for docid in docids[:limit]: topic = resolver(docid) if DENY_ACL in topic.__acl__: acl = topic.__acl__ acl.remove(DENY_ACL) topic.__acl__ = acl print "Removed Deny ACL from %s" % topic.title if args.dryrun: print '*** aborting ***' transaction.abort() else: print '*** committing ***' transaction.commit() closer()
def main(argv=sys.argv): parser = create_karl_argparser( description=__doc__ ) parser.add_argument('-O', '--output', dest='output', default='.', help="Path to the directory where reports should be written.", metavar='DIR') args = parser.parse_args(argv[1:]) config_uri = args.config_uri env = args.bootstrap(config_uri) root, closer, registry = env['root'], env['closer'], env['registry'] folder = registry.settings.get('statistics_folder') if folder is None: folder = os.path.abspath(args.output) if not os.path.exists(folder): os.makedirs(folder) generate_reports(root, folder)
def main(argv=sys.argv): logging.basicConfig() log.setLevel(logging.INFO) parser = create_karl_argparser( description="Move content to another folder", ) parser.add_argument('-d', '--dry-run', dest='dry_run', action="store_true", default=False, help="Don't commit the transaction") parser.add_argument('-S', '--security-state', dest='security_state', default=None, help="Force workflow transition to given state. By " "default no transition is performed.") parser.add_argument('source') parser.add_argument('dest') args = parser.parse_args(argv[1:]) if not args.source or not args.dest: parser.error("Source content and destination folder are required") env = args.bootstrap(args.config_uri) root = env['root'] try: move_content(root, args.source, args.dest, args.security_state) except: transaction.abort() raise else: if args.dry_run: log.info("Aborting transaction.") transaction.abort() else: log.info("Committing transaction.") transaction.commit()
def update_feeds(argv=sys.argv): default_interval = 1800 # 30 minutes parser = create_karl_argparser( description='Get new entries from feeds.' ) parser.add_argument('-d', '--daemon', action='store_true', help="Run in daemon mode.") parser.add_argument('-i', '--interval', type=int, default=default_interval, help="Interval in seconds between executions in " "daemon mode. Default is %d." % default_interval) parser.add_argument('-f', '--force', action='store_true', help='Force reload of feed entries.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) if args.daemon: daemonize_function(_update_feeds, args.interval)(args, env) else: _update_feeds(args, env)
def main(argv=sys.argv): parser = create_karl_argparser( description='Bring database up to date with code.') parser.add_argument('--latest', action='store_true', help='Update to latest versions.') parser.add_argument('--force-version', dest='force_version', help='Force to run again.', type=int) args = parser.parse_args(argv[1:]) out = args.out env = args.bootstrap(args.config_uri) root = env['root'] print >> out, "=" * 78 managers = list(getUtilitiesFor(IEvolutionManager)) for pkg_name, factory in managers: __import__(pkg_name) pkg = sys.modules[pkg_name] VERSION = pkg.VERSION print >> out, 'Package %s' % pkg_name manager = factory(root, pkg_name, VERSION, 0) if args.force_version: manager.set_db_version(args.force_version) db_version = manager.get_db_version() print >> out, 'Code at software version %s' % VERSION print >> out, 'Database at version %s' % db_version if VERSION <= db_version: print >> out, 'Nothing to do' elif args.latest: evolve_to_latest(manager) ver = manager.get_db_version() print >> out, 'Evolved %s to %s' % (pkg_name, ver) else: print >> out, 'Not evolving (use --latest to do actual evolution)' print >> out, '' transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser( description='Open a debug session with a Karl instance.' ) parser.add_argument('-S', '--script', default=None, help='Script to run. If not specified will start ' 'an interactive session.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] cprt = ('Type "help" for more information. "app" is the karl Pyramid ' 'application.') script = args.script if script is None: banner = "Python %s on %s\n%s" % (sys.version, sys.platform, cprt) interact(banner, local=env) else: code = compile(open(script).read(), script, 'exec') exec code in env
def main(): parser = create_karl_argparser(description="Send expiring password reminder emails") parser.add_argument('-H', '--hours', type=int, dest='hours_ahead', default=24, help="Hours ahead to remind user about password expiration") args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) now = datetime.datetime.now() start = now + datetime.timedelta(hours=args.hours_ahead) end = start + datetime.timedelta(hours=24) try: send_reminders(env, start, end) except: transaction.abort() raise else: transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser( description='Set password expiration date for user.') parser.add_argument('username', help="Username.") parser.add_argument('date', help="Reset date (YYYY-MM-DD).") args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer, registry = env['root'], env['closer'], env['registry'] profiles = root['profiles'] if args.username not in profiles: log.info("User %s not found." % args.username) else: log.info("Setting password expiration date for user %s" % args.username) expiration_date = datetime.strptime(args.date, '%Y-%m-%d') profiles[args.username].password_expiration_date = expiration_date log.info("Date set to: %s" % expiration_date) transaction.commit() closer()
def main(argv=sys.argv): parser = create_karl_argparser( description='Open a debug session with a Karl instance.') parser.add_argument('-S', '--script', default=None, help='Script to run. If not specified will start ' 'an interactive session.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] cprt = ('Type "help" for more information. "app" is the karl Pyramid ' 'application.') script = args.script if script is None: banner = "Python %s on %s\n%s" % (sys.version, sys.platform, cprt) interact(banner, local=env) else: code = compile(open(script).read(), script, 'exec') exec code in env
def main(argv=sys.argv): parser = create_karl_argparser(description=__doc__) parser.add_argument( '-O', '--output', dest='output', default='.', help="Path to the directory where reports should be written.", metavar='DIR') args = parser.parse_args(argv[1:]) config_uri = args.config_uri env = args.bootstrap(config_uri) root, closer, registry = env['root'], env['closer'], env['registry'] folder = registry.settings.get('statistics_folder') if folder is None: folder = os.path.abspath(args.output) if not os.path.exists(folder): os.makedirs(folder) generate_reports(root, folder)
def main(argv=sys.argv): parser = create_karl_argparser( description='Bring database up to date with code.' ) parser.add_argument('--latest', action='store_true', help='Update to latest versions.') parser.add_argument('--force-version', dest='force_version', help='Force to run again.', type=int) args = parser.parse_args(argv[1:]) out = args.out env = args.bootstrap(args.config_uri) root = env['root'] print >> out, "=" * 78 managers = list(getUtilitiesFor(IEvolutionManager)) for pkg_name, factory in managers: __import__(pkg_name) pkg = sys.modules[pkg_name] VERSION = pkg.VERSION print >> out, 'Package %s' % pkg_name manager = factory(root, pkg_name, VERSION, 0) if args.force_version: manager.set_db_version(args.force_version) db_version = manager.get_db_version() print >> out, 'Code at software version %s' % VERSION print >> out, 'Database at version %s' % db_version if VERSION <= db_version: print >> out, 'Nothing to do' elif args.latest: evolve_to_latest(manager) ver = manager.get_db_version() print >> out, 'Evolved %s to %s' % (pkg_name, ver) else: print >> out, 'Not evolving (use --latest to do actual evolution)' print >> out, '' transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser(description="Export a community's data.") parser.add_argument('--community', dest='community', action='store', help=('Community to export. This should be the slug. ' 'Can separate with , to specify multiple ' 'communities as in "c1,c2,c3".')) args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) site, registry, closer = env['root'], env['registry'], env['closer'] slugs = args.community.split(',') for community_slug in slugs: community = lookup_community(site, community_slug) if community is None: log.error('%s community not found' % community_slug) continue log.info('Found community: %s. Exporting ...' % community_slug) export(community)
def main(argv=sys.argv): default_interval = 6 * 3600 # 6 hours parser = create_karl_argparser( description='Send digest emails.' ) parser.add_argument('-d', '--daemon', action='store_true', help="Run in daemon mode.") parser.add_argument('-i', '--interval', type=int, default=default_interval, help="Interval in seconds between executions in " "daemon mode. Default is %d." % default_interval) parser.add_argument('-f', '--frequency', dest='frequency', default='daily', help='Digest frequency: daily/weekly/biweekly.') args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer, registry = env['root'], env['closer'], env['registry'] if args.daemon: f = daemonize_function(digest, args.interval) only_one(f, registry, 'digest')(root, closer, registry, args.frequency) else: only_one(digest, registry, 'digest')( root, closer, registry, args.frequency) closer()
def main(argv=sys.argv): parser = create_karl_argparser( description='Generate sample content in the database.') parser.add_argument('-c', '--communities', dest='communities', help='Number of communities to add (default 10)', default=10) parser.add_argument('--dry-run', dest='dryrun', action='store_true', help="Don't actually commit the transaction") parser.add_argument( '-m', '--more-files', dest='more_files', action='store_true', help="Create many files in the first community (default false)") args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env['root'], env['closer'] try: add_sample_users(root) for i in range(int(args.communities)): try: add_sample_community(root, more_files=(args.more_files and i == 0)) except TypeError: # fall back for old versions that do not support more_files add_sample_community(root) except: transaction.abort() raise else: if args.dryrun: transaction.abort() else: transaction.commit()
def main(argv=sys.argv): parser = create_karl_argparser(description="Reassign tags owned by users who no longer exist.") parser.add_argument("-a", "--assign_to", dest="assign_to", help="User to reassign tags to") parser.add_argument("--dry-run", dest="dryrun", action="store_true", help="Don't actually commit the transaction") parser.set_defaults(assign_to="admin", dryrun=False) args = parser.parse_args(argv[1:]) env = args.bootstrap(args.config_uri) root, closer = env["root"], env["closer"] profiles = root["profiles"] engine = root.tags assign_to = args.assign_to print "searching for tags with non-existing user" for user in list(engine.getUsers()): if not user in profiles: print "Reassigning tags by missing user '%s' to '%s'" % (user, assign_to) engine.reassign(user, assign_to) if args.dryrun: print "*** aborting ***" transaction.abort() else: print "*** committing ***" transaction.commit() closer()
def main(argv=sys.argv): parser = create_karl_argparser(description="Save metrics to the ZODB") parser.add_argument('--year', dest='year', action='store', help="4 digit year") parser.add_argument('--month', dest='month', action='store', help="Integer month (January is 1)") parser.add_argument('--monthly', dest='monthly', action='store', help='Generate metrics for the previous month.' ' Useful for cron.') parser.add_argument('--range-year-begin', dest='range_year_begin', action='store', help='Beginning year for metrics generation.') parser.add_argument('--range-year-end', dest='range_year_end', action='store', help='End year for metrics generation (inclusive).') parser.add_argument('--range-month-begin', dest='range_month_begin', action='store', help='Beginning month for metrics generation.') parser.add_argument('--range-month-end', dest='range_month_end', action='store', help='End month for metrics generation (inclusive).') args = parser.parse_args(sys.argv[1:]) env = args.bootstrap(args.config_uri) site, registry, closer = env['root'], env['registry'], env['closer'] if args.monthly is not None: # in cron jobs, generate metrics for the previous month # this is much easier to specify than on the last possible # moment of the current month now = datetime.now() cur_year, cur_month = now.year, now.month prev_year, prev_month = prior_month(cur_year, cur_month) generate_metrics(site, prev_year, prev_month) elif args.range_year_begin is not None: # when mass generating metrics, provide year/month start/end ranges year_begin, year_end = (int(args.range_year_begin), int(args.range_year_end)) assert year_begin > 2000, 'Invalid begin year' assert year_end > 2000, 'Invalid end year' assert year_end >= year_begin, 'Begin year > End year' month_begin, month_end = (int(args.range_month_begin), int(args.range_month_end)) assert month_begin >= 1 and month_begin <= 12, 'Bad begin month' assert month_end >= 1 and month_end <= 12, 'Bad end month' for year in range(year_begin, year_end + 1): if year == year_begin: monthstart = month_begin else: monthstart = 1 if year == year_end: monthend = month_end else: monthend = 12 for month in range(monthstart, monthend + 1): print 'generating metrics for %s/%s' % (year, month) generate_metrics(site, year, month) transaction.commit() elif args.year: # generate metrics for a particular year/month year, month = int(args.year), int(args.month) assert year > 2000, "Invalid year" assert month >= 1 and month <= 12, "Invalid month" generate_metrics(site, year, month) transaction.commit()