def main(): # Read arguments args = get_args() assert os.path.isdir(args.workspace), "not a directory: " + args.workspace # Read the configuration file config = get_conf(args) # Dictionary to store job results job_status = dict() # Number of iles nfiles = 0 for f in os.listdir(args.workspace): if fnmatch.fnmatch(f, '*.params'): nfiles += 1 bar = Bar('Processing', max=nfiles) for f in os.listdir(args.workspace): if fnmatch.fnmatch(f, '*.params'): expt_params = os.path.join(args.workspace, f) prm_str = get_param_str(expt_params) path, ext = os.path.splitext(f) job_num = get_job_num(path) expt_dir = os.path.join(args.workspace, path) for f2 in os.listdir(expt_dir): if fnmatch.fnmatch(f2, '*.txt'): # job log output log = os.path.join(expt_dir, f2) status = get_job_status(log, config) job_status[job_num] = status break bar.next() # End the progress bar bar.finish() if args.verbose: for n in job_status: print('n='+n+' status='+str(job_status[n])) if True: print('JOB STATUS') print('--------------------------') import collections stats = collections.defaultdict(int) for n in job_status: stats[job_status[n]] += 1 total = 0 for k in stats: total += stats[k] print(str(k) + ' : ' + str(stats[k])) print('--------------------------') print('TOTAL: ' + str(total))
def file(file): base = os.path.basename(file) downloadsDir = util.get_conf()['rtorrent']['downloads'] absPath = os.path.join(downloadsDir, file) sendfilePath = os.path.join('/sendfile', file) response = make_response() response.headers['Content-Disposition'] = 'filename="' + base + '"' response.headers['Content-Type'] = '' response.headers['X-Accel-Redirect'] = sendfilePath return response
def __init__(self, queue_name, aws_access_key_id=None, aws_secret_access_key=None, region=None): confdata = util.get_conf() aws_access_key_id = aws_access_key_id or confdata['access_key_id'] aws_secret_access_key = aws_secret_access_key or confdata['secret_access_key'] region = region or confdata['region'] self.sqs_client = boto3.client('sqs', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region) self.sqs_resource = boto3.resource('sqs', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region) self.queue_name = queue_name self.region = region
def main(): # Get command line arguments args, leftovers = get_brute_args() # Pop the script arguments from leftovers args.brute_script_arg = [] while len(leftovers) > 0 and is_script_arg(leftovers[0]): args.brute_script_arg += [ leftovers.pop(0) ] # Read the configuration file, if any config = get_conf(args) # Get absolute workspace path args.brute_dir = os.path.abspath(args.brute_dir) print('workspace = ' + args.brute_dir) # Get absolute script path args.brute_script = os.path.abspath(args.brute_script) print('script = ' + args.brute_script) # Make sure the script exists assert os.path.isfile(args.brute_script), "script argument is not a file" # Get the product of parameters params = get_job_params(leftovers) print(str(len(params)) + ' tasks') if not args.brute_no_prompt: proceed = yesno('Submit?') if not proceed: sys.exit(0) # Run the grid search loader = MyLoader() MyLoader.args = args MyLoader.params = params MyLoader.config = config sys.exit(DoitMain(loader).run(['--backend', 'json']))
def get_UTP_path(self): """To get the common conf like path of UTP.""" UT_group = "utp" UT_path = util.get_conf(self.conf_path,UT_group,self.utp_name) return UT_path
if not os.path.exists(dir_path): os.makedirs(dir_path) # 实例化一个 rotate file 的处理器,让日志文件旋转生成 fh = logging.handlers.RotatingFileHandler(filename=logfile, mode='a', maxBytes=max_size, backupCount=backup_count, encoding='utf-8') fh.setLevel(level[file_level]) fh.setFormatter(formatter) log.addHandler(fh) if console: # 实例化一个流式处理器,将日志输出到终端 ch = logging.StreamHandler() ch.setLevel(level[console_level]) ch.setFormatter(formatter) log.addHandler(ch) return log if __name__ == '__main__': from Doctopus.utils.util import get_conf conf = get_conf('../conf/conf.toml')['log_configuration'] log = setup_logging(conf) log.info("测试脚本") log.error("错误信息")
# -*- coding: utf-8 -*- import time import subprocess import psutil import traceback from util import get_conf from logging_init import setup_logging conf = get_conf('./conf/conf.toml') log = setup_logging(conf['log']) class Monitor(object): def __init__(self, conf): #log.debug(conf) self.process_name = conf['process'] self.memory_limit = conf['memory_limit'] self.restart_cmd = conf['restart_cmd'] self.process = self.get_process(conf['process']) def get_process(self, name='influxdb'): """ get specified process """ self.process_id = self.get_process_pid(name) return psutil.Process(self.process_id) def get_process_pid(self, name='influxdb'): """ get specified process id if it is alived
import os.path import time import rtorrent import util import uwsgi import gevent app = Flask(__name__) app.config['MAX_CONTENT_LENGTH'] = 10 * 1024 * 1024 app.permanent_session_lifetime = timedelta(weeks=4) bcrypt = Bcrypt(app) app.secret_key = util.get_conf()['carson']['secret_key'] def get_db(): db = getattr(g, '_database', None) if db is None: db = g._database = sqlite3.connect('carson.db') db.row_factory = sqlite3.Row return db @app.teardown_appcontext def close_connection(exception): db = getattr(g, '_database', None) if db is not None: db.close() @app.before_request