def run_each_job(conf_dict, uploadurl): if not os.path.exists(CYCLIC_PATH): os.makedirs(CYCLIC_PATH, 02775) # split each cyclic jobs cyclic_jobs_path = get_cyclic_jobs(conf_dict) split_job_params = ' '.join(cyclic_jobs_path) split_cmd = "%s split -j %s -o %s" % (SRC+'/bin/pst', split_job_params, CYCLIC_PATH) split_output = subprocess.check_output(split_cmd, shell=True) split_jobs_path = [split_file.split(' => ')[1] for split_file in split_output.split('\n') if split_file] # run each splited jobs sync_dir = conf_dict.get('sync', {}).get('dir') sync_list = conf_dict.get('sync', {}).get('jobs') wait_timeout = conf_dict.get('sync', {}).get('timeout') if sync_dir and sync_list and 'all' in sync_list: try: lock = LockFile(sync_dir + os.sep + 'all') lock.acquire() for unit_jobfile in split_jobs_path: run_cmd = "%s run -j %s -u %s" % (SRC + '/bin/pst', unit_jobfile, uploadurl) run_shell_cmd(run_cmd) print("Remove: %s" % unit_jobfile) os.remove(unit_jobfile) lock.release() except KeyboardInterrupt: if lock.is_locked(): lock.release() raise else: for unit_jobfile in split_jobs_path: run_cmd = "%s run -j %s -u %s" % (SRC + '/bin/pst', unit_jobfile, uploadurl) # add lock to sync there, e.g. run doker and host test same time testcase_name = common.load_conf(unit_jobfile).get('testcase') if sync_dir and sync_list and testcase_name in sync_list: lock = LockFile(sync_dir + os.sep + testcase_name) try: lock.acquire(timeout=wait_timeout) run_shell_cmd(run_cmd) lock.release() except LockTimeout as e: print e except KeyboardInterrupt: if lock.is_locked(): lock.release() raise else: run_shell_cmd(run_cmd) print("Remove: %s" % unit_jobfile) os.remove(unit_jobfile)
def main(): # get param from configFile conf = common.load_conf(os.path.join( os.path.dirname(__file__), 'conf/db_script.conf')) # create arg parser arg_parser = argparse.ArgumentParser( prog='dbscript', description='a set of db scripts to rebase/update db schema') # create sub commands sub_parsers = arg_parser.add_subparsers( dest='command', help='sub command help') # sub command base sub_parser = sub_parsers.add_parser( 'base', help='create db baseline base on target database') sub_parser.add_argument('-o', '--output', help='the output directory') # sub command init sub_parser = sub_parsers.add_parser( 'init', help='initialize database according to db baseline') sub_parser.add_argument('-i', '--input', help='the input directory') # sub command update sub_parser = sub_parsers.add_parser( 'update', help='update database schema') sub_parser.add_argument('-v', '--version', help='the db script version to be run', default=None) # common args arg_parser.add_argument('-H', '--host', help='database host', default=conf.get('db', 'host')) arg_parser.add_argument('-P', '--port', help='database port', default=conf.get('db', 'port')) arg_parser.add_argument('-u', '--user', help='database user', default=conf.get('db', 'user')) arg_parser.add_argument('-d', '--database', help='database schema', default=conf.get('db', 'database')) arg_parser.add_argument('-s', '--script_home', help='db script home', default=conf.get('db', 'script_home')) args = arg_parser.parse_args() logger.debug(vars(args)) if args.command not in ('base', 'init', 'update'): logger.error("unknown command: {0}".format(args.command)) arg_parser.print_help() return db_password = getpass.getpass("Please input db password:") db_util = common.DBUtil(args.host, args.port, args.user, db_password, args.database) db_util.connect() script_home = args.script_home if args.command == 'base': db_create_baseline.execute(db_util, script_home) elif args.command == 'init': db_init.execute(db_util, script_home) elif args.command == 'update': db_update.execute(db_util, script_home, args.version)
def __init__(self, *args, **kwargs): """ Build a new Clitter instance. Options passed in will override a ~/.clitterrc """ self._conf = common.load_conf(kwargs.get('options')) self._api = common.load_api(self._conf) self._history = common.load_history(self._conf) for channel in ('public','friends'): self._fetch(channel) self.colors = self._color_gen() self.people_colors = {}
def aozan_main(): """Aozan main method. """ # Define command line parser parser = OptionParser(usage='usage: ' + Globals.APP_NAME_LOWER_CASE + '.sh [options] conf_file') parser.add_option('-q', '--quiet', action='store_true', dest='quiet', default=False, help='quiet') parser.add_option('-v', '--version', action='store_true', dest='version', help='Aozan version') parser.add_option('-e', '--exit-code', action='store_true', dest='exit_code', help='Returns non zero exit code if a step fails') parser.add_option('-c', '--conf', action='store_true', dest='conf', help='Default Aozan configuration, loads before configuration file.') # Parse command line arguments (options, args) = parser.parse_args() # Print Aozan current version if options.version: print Globals.WELCOME_MSG sys.exit(0) # Print default configuration option if options.conf: print common.print_default_configuration() sys.exit(0) # If no argument print usage if len(args) < 1: parser.print_help() sys.exit(1) # Create configuration object conf = LinkedHashMap() # Set the default value in the configuration object common.set_default_conf(conf) # Use default (US) locale Locale.setDefault(Globals.DEFAULT_LOCALE) # Check if OS is Linux if not SystemUtils.isLinux(): sys.stderr.write('ERROR: Aozan can not be executed. Operating system is not Linux\n') sys.exit(1) # Check if configuration file exists conf_file = args[0] if not os.path.isfile(conf_file): sys.stderr.write('ERROR: Aozan can not be executed. Configuration file is missing: ' + \ conf_file + '\n') sys.exit(1) # Load Aozan conf file common.load_conf(conf, conf_file) # End of Aozan if aozan is not enable if common.is_conf_value_defined(AOZAN_ENABLE_KEY, 'false', conf): sys.exit(0) # Init logger try: Common.initLogger(conf[AOZAN_LOG_PATH_KEY], conf[AOZAN_LOG_LEVEL_KEY]) except AozanException, exp: common.exception_msg(exp, conf)
#!/usr/bin/env python import os import sys import subprocess SRC = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) LIB_PATH = SRC + '/lib' sys.path.insert(0, LIB_PATH) import common conf_file = SRC + '/etc/pst_server.yaml' conf_dict = common.load_conf(conf_file) conf_dict['pst_server']['port'] = os.getenv('PST_SERVER_PORT', 8080) conf_dict['influxdb']['ip'] = os.getenv('INFLUXDB_HOST', '127.0.0.1') conf_dict['influxdb']['port'] = os.getenv('INFLUXDB_PORT', 8086) conf_dict['influxdb']['user'] = os.getenv('INFLUXDB_USER', 'root') conf_dict['influxdb']['pass'] = os.getenv('INFLUXDB_PASS', 'root') conf_dict['influxdb']['dbname'] = os.getenv('INFLUXDB_DBNAME', 'pst_results') common.save_yaml(conf_file, conf_dict) common.unify_localtime()
for unit_jobfile in split_jobs_path: run_cmd = "%s run -j %s -u %s" % (SRC + '/bin/pst', unit_jobfile, uploadurl) # add lock to sync there, e.g. run doker and host test same time testcase_name = common.load_conf(unit_jobfile).get('testcase') if sync_dir and sync_list and testcase_name in sync_list: lock = LockFile(sync_dir + os.sep + testcase_name) try: lock.acquire(timeout=wait_timeout) run_shell_cmd(run_cmd) lock.release() except LockTimeout as e: print e except KeyboardInterrupt: if lock.is_locked(): lock.release() raise else: run_shell_cmd(run_cmd) print("Remove: %s" % unit_jobfile) os.remove(unit_jobfile) common.unify_localtime() conf_dict = common.load_conf(SRC + '/etc/autorun_conf.yaml') uploadurl = get_upload_url(conf_dict) schedule.every(conf_dict["runtime"]).seconds.do(run_each_job, conf_dict, uploadurl) while True: schedule.run_pending() time.sleep(1)
import os import common import time import logging logging.basicConfig(level=logging.DEBUG, filename='daemon.log', filemode='a') CONF = common.load_conf(None) HISTORIES = common.load_history(CONF) API = common.load_api(CONF) def running(): try: os.stat( common.RUN_FILE ) return True except: return False return False def _fetch(channel): try: if channel == "public": return api.GetPublicTimeline() elif channel == "friends": return api.GetFriendsTimeline(CONF.username) except: return None def update():