def main(): parser = argparse.ArgumentParser(description='Transform private data to public data') parser.add_argument('-i', dest='ini', help="The path to the ini file", default="/opt/edware/conf/smarter.ini") parser.add_argument('-t', dest='tenant', help="target tenant", required=True) parser.add_argument('-d', dest='daemon', action='store_true', default=False, help="daemon mode") parser.add_argument('-p', dest='pidfile', help="pid file for daemon") parser.add_argument('-c', dest='cache_only', action='store_true', default=False, help="recache only, only works without -d option") parser.add_argument('-n', dest='no_recache', action='store_true', default=False, help="do not recache") args = parser.parse_args() config = configparser.ConfigParser() config.read(args.ini) global private_tenant private_tenant = args.tenant global public_tenant public_tenant = args.tenant global no_recache no_recache = args.no_recache daemon = args.daemon pid_file = (args.pidfile if args.pidfile is not None else PID_FILE).format(args.tenant) if daemon: create_daemon(pid_file) run_cron_job(config['app:main'], 'migrate_public.', process) while True: time.sleep(1000) else: global cache_only cache_only = args.cache_only process(config['app:main'])
def run_cron_recache(settings): ''' Configure and run cron job to flush and re-cache reports :param dict settings: configuration for the application ''' run_cron_job(settings, 'trigger.recache.', precached_task)
def run_cron_prepdf(settings): ''' Configure and run cron job to regenerate pdfs for students with new assessment data :param dict settings: configuration for the application ''' run_cron_job(settings, 'trigger.pdf.', prepdf_task)
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ initialize_db(settings) configure(settings) # set beaker cache region set_cache_regions_from_settings(settings) config = Configurator(settings=settings, root_factory=RootFactory) # include edauth. Calls includeme config.include(edauth) # Pass edauth the roles/permission mapping that is defined in hpz edauth.set_roles(RootFactory.__acl__) # include add routes from frs. Calls includeme config.include(frs) config.include(swi) config.include(services) config.scan() # Clean up old files from HPZ run_cron_job(settings, HPZ_EXPIRATION + '.', cleanup) logger.info("HPZ Started") return config.make_wsgi_app()
def main(): parser = argparse.ArgumentParser(description='Process udl trigger args') parser.add_argument('-p', dest='pidfile', default='/opt/edware/run/edudl2-filegrabber.pid', help="pid file for edudl2 trigger daemon") parser.add_argument('-d', dest='daemon', action='store_true', default=False, help="daemon mode for udl trigger") parser.add_argument('-i', dest='ini_file', default='/opt/edware/conf/smarter.ini', help="smarter ini file for logging configs") args = parser.parse_args() file = args.ini_file logging.config.fileConfig(file) ini_file = read_ini(file) config = get_config_from_ini(ini_file, '') daemon_mode = args.daemon pid_file = args.pidfile if daemon_mode: create_daemon(pid_file) # get file-grabber and reassembly config file config_for_grabber = copy.deepcopy(config) file_grabber_configs = {} for key in config_for_grabber.keys(): key_values = key.split('.') if key_values[0] == FILE_GRABBER: key_values.pop(0) name = key_values.pop(0) file_grabber_config = file_grabber_configs.get(name, {}) file_grabber_config[FILE_GRABBER + '.' + '.'.join(key_values)] = config[ '.'.join([FILE_GRABBER, name]) + '.' + '.'.join(key_values)] file_grabber_configs[name] = file_grabber_config if file_grabber_configs: # setup cron for file_grabber_config in file_grabber_configs: run_cron_job(file_grabber_configs[file_grabber_config], FILE_GRABBER + '.', rsync) while True: time.sleep(1)
def main(): parser = argparse.ArgumentParser(description='Process udl trigger args') parser.add_argument('-p', dest='pidfile', default='/opt/edware/run/edudl2-filegrabber.pid', help="pid file for edudl2 trigger daemon") parser.add_argument('-d', dest='daemon', action='store_true', default=False, help="daemon mode for udl trigger") parser.add_argument('-i', dest='ini_file', default='/opt/edware/conf/smarter.ini', help="smarter ini file for logging configs") args = parser.parse_args() file = args.ini_file logging.config.fileConfig(file) ini_file = read_ini(file) config = get_config_from_ini(ini_file, '') daemon_mode = args.daemon pid_file = args.pidfile if daemon_mode: create_daemon(pid_file) # get file-grabber and reassembly config file config_for_grabber = copy.deepcopy(config) file_grabber_configs = {} for key in config_for_grabber.keys(): key_values = key.split('.') if key_values[0] == FILE_GRABBER: key_values.pop(0) name = key_values.pop(0) file_grabber_config = file_grabber_configs.get(name, {}) file_grabber_config[FILE_GRABBER + '.' + '.'.join(key_values)] = config['.'.join([FILE_GRABBER, name]) + '.' + '.'.join(key_values)] file_grabber_configs[name] = file_grabber_config if file_grabber_configs: # setup cron for file_grabber_config in file_grabber_configs: run_cron_job(file_grabber_configs[file_grabber_config], FILE_GRABBER + '.', rsync) while True: time.sleep(1)
def run_cron_sync_file(settings): ''' Configure and run cron job to copy csv files to UDL landing zone for. :param dict settings: configuration for the application ''' run_cron_job(settings, 'trigger.assessment.', move_to_staging)
def run_cron_migrate(settings): run_cron_job(settings, 'migrate.conductor.', migrate_task)
config_path_file = args.ini_file hour = args.hour report_hour = args.report_hour daemon_mode = args.daemon pid_file = args.pidfile if daemon_mode: create_daemon(pid_file) # get udl2 configuration as nested and flat dictionary udl2_conf, udl2_flat_conf = read_ini_file(config_path_file) initialize_all_db(udl2_conf, udl2_flat_conf) email_to = udl2_flat_conf.get(UDL_REPORT_MAIL_TO) subject = udl2_flat_conf.get(UDL_REPORT_SUBJECT) email_from = udl2_flat_conf.get(UDL_REPORT_MAIL_FROM) enabled = udl2_flat_conf.get(UDL_REPORT_ENABLED) start_date = today = datetime.datetime.today().strftime('%Y-%m-%d %H:00:00') if enabled is not None and enabled.lower() == 'true': generate_report_settings = {'report.enable': 'True', 'report.schedule.cron.hour': hour, 'report.schedule.cron.minute': '0', 'report.schedule.cron.second': '0', 'hour': report_hour, 'mail_to': email_to, 'subject': subject, 'mail_from': email_from} run_cron_job(copy.deepcopy(generate_report_settings), 'report.', generate_report_for_cron) while True: time.sleep(1)
daemon_mode = args.daemon pid_file = args.pidfile if daemon_mode: create_daemon(pid_file) # get udl2 configuration as nested and flat dictionary udl2_conf, udl2_flat_conf = read_ini_file(config_path_file) initialize_all_db(udl2_conf, udl2_flat_conf) email_to = udl2_flat_conf.get(UDL_REPORT_MAIL_TO) subject = udl2_flat_conf.get(UDL_REPORT_SUBJECT) email_from = udl2_flat_conf.get(UDL_REPORT_MAIL_FROM) enabled = udl2_flat_conf.get(UDL_REPORT_ENABLED) start_date = today = datetime.datetime.today().strftime( '%Y-%m-%d %H:00:00') if enabled is not None and enabled.lower() == 'true': generate_report_settings = { 'report.enable': 'True', 'report.schedule.cron.hour': hour, 'report.schedule.cron.minute': '0', 'report.schedule.cron.second': '0', 'hour': report_hour, 'mail_to': email_to, 'subject': subject, 'mail_from': email_from } run_cron_job(copy.deepcopy(generate_report_settings), 'report.', generate_report_for_cron) while True: time.sleep(1)