def configure(conf): env = conf.env config.check_home(conf, 'GLUT') if env['PLATFORM'] == 'darwin': config.configure('osx', conf) if not (conf.CheckOSXFramework('GLUT') and conf.CheckCHeader('GLUT/glut.h')): raise Exception, 'Need GLUT' else: if env['PLATFORM'] == 'win32': freeglutlib = 'freeglut_static' glutlib = 'glut32' env.AppendUnique(CPPDEFINES = ['FREEGLUT_STATIC']) else: freeglutlib = glutlib = 'glut' config.require_header(conf, 'GL/glut.h') if not (config.check_lib(conf, freeglutlib) or config.check_lib(conf, glutlib)): raise Exception, 'Need glut' return True
def run(app_name, worker): parser = argparse.ArgumentParser(description="Admin Server Help") parser.add_argument( "-c", "--config", type=str, nargs="*", help="List of configuration files to import (python modules)" ) parser.add_argument("-m", "--method", help="all, invoices or the like") parser.add_argument("action", choices=["start", "stop", "restart", "run"]) cmd_args = parser.parse_args() config.configure(cmd_args.config or []) import logging_config logging_config.configure(app_name) daemon = worker(os.path.join(config.pid_dir, "{}.pid".format(app_name))) if cmd_args.action == "start": daemon.start() elif cmd_args.action == "stop": daemon.stop() elif cmd_args.action == "restart": daemon.restart() elif cmd_args.action == "run": if not cmd_args.method: daemon.run() else: attr = getattr(daemon, cmd_args.method, None) if attr is None: logger.critical("method unknown") else: attr() daemon.close()
def main(): '''monica helps you order food from the timeline''' arguments = docopt(__doc__, version=__version__) if arguments['configure'] and flag: configure() if arguments['cuisine']: if arguments['list']: cuisine('list') else: cuisine(arguments['<cuisine-id>']) elif arguments['surprise']: surprise() elif arguments['reviews']: reviews(arguments['<restaurant-id>']) elif arguments['search']: search(arguments['QUERY']) elif arguments['budget']: try: money = arguments['<budget>'] money = float(money) budget(money) except: print 'Budget should be a number!' elif arguments['restaurant']: restaurant(arguments['<restaurant-id>']) else: print(__doc__)
def processData(year, domain, embedding): """ entry point of the preprocessing functions """ PARSER = {'2014': parse2014} parser = PARSER[year] # process train data aim = 'train' cf.configure(year, domain, embedding, aim) trainDataPath = cf.FOLDER + cf.DATA_FILE trainData = parser(trainDataPath, year, domain, aim, True) # use tokenize instead of clean up for baselines trainData['text'] = cleanup(trainData['text']) trainData['aspect'] = cleanup(trainData['aspect']) trainVocabulary = createTempVocabulary(trainData['text'], aim) writeCSV(trainData, cf.FOLDER + 'rest_train_2014_processed.csv') # # process test data aim = 'test' cf.configure(year, domain, embedding, aim) testDataPath = cf.FOLDER + cf.DATA_FILE testData = parser(testDataPath, year, domain, aim, True) testData['text'] = cleanup(testData['text']) testData['aspect'] = cleanup(testData['aspect']) testVocabulary = createTempVocabulary(testData['text'], aim) writeCSV(testData, cf.FOLDER + 'rest_test_2014_processed.csv') # export the final embedding dictionary by combining the dict from train and test data createVocabulary( cf.FOLDER + '%s_filtered_train.txt' % cf.WORD2VEC_FILE[0:-4], cf.FOLDER + '%s_filtered_test.txt' % cf.WORD2VEC_FILE[0:-4], cf.EMBEDDING_PATH) # sampling from the processed train and test data splitData() # calculate word positions respect to aspect term(s) getPositions()
def main(): '''monica helps you order food from the timeline''' arguments = docopt(__doc__, version=__version__) if arguments['configure'] and flag: configure() if arguments['cuisine']: if arguments['list']: cuisine('list') else: cuisine(arguments['<cuisine-id>']) elif arguments['surprise']: surprise() elif arguments['reviews']: reviews(arguments['<restaurant-id>']) elif arguments['search']: search(arguments['QUERY']) elif arguments['budget']: try: money = arguments['<budget>'] money = float(money) budget(money) except: print 'Budget should be a number!' elif arguments['restaurant']: restaurant(arguments['<restaurant-id>']) else: print (__doc__)
def configure(conf): env = conf.env config.check_home(conf, 'GL') if env['PLATFORM'] == 'darwin': config.configure('osx', conf) if not (conf.CheckOSXFramework('OpenGL') and conf.CheckCHeader('OpenGL/gl.h')): raise Exception, 'Need OpenGL' else: if env['PLATFORM'] == 'win32': glulib = 'glu32' gllib = 'opengl32' else: glulib = 'GLU' gllib = 'GL' config.require_lib(conf, gllib) config.require_lib(conf, glulib) return True
def configure(conf): env = conf.env if os.environ.has_key('FREETYPE2_INCLUDE'): env.AppendUnique(CPPPATH = os.environ['FREETYPE2_INCLUDE'].split(os.pathsep)) else: try: env.ParseConfig('freetype-config --cflags') except OSError: pass if os.environ.has_key('FREETYPE2_LIBPATH'): env.AppendUnique(LIBPATH = [os.environ['FREETYPE2_LIBPATH']]) if env['PLATFORM'] == 'darwin': config.configure('osx', conf) if not conf.CheckOSXFramework('CoreServices'): raise Exception, 'Need CoreServices framework' if not conf.CheckOSXFramework('ApplicationServices'): raise Exception, 'Need ApplicationServices framework' config.require_header(conf, 'ft2build.h') config.require_lib(conf, 'freetype') config.check_home(conf, 'zlib', lib_suffix = '') # TODO Hack! config.require_lib(conf, 'z') return True
def _main(): import loader config.configure() models = loader.load_models() return generate(models)
def configure(conf): env = conf.env config.check_home(conf, 'GLUT') if env['PLATFORM'] == 'darwin': config.configure('osx', conf) if not (conf.CheckOSXFramework('GLUT') and conf.CheckCHeader('GLUT/glut.h')): raise Exception, 'Need GLUT' else: if env['PLATFORM'] == 'win32': freeglutlib = 'freeglut_static' glutlib = 'glut32' env.AppendUnique(CPPDEFINES=['FREEGLUT_STATIC']) else: freeglutlib = glutlib = 'glut' config.require_header(conf, 'GL/glut.h') if not (config.check_lib(conf, freeglutlib) or config.check_lib(conf, glutlib)): raise Exception, 'Need glut' return True
def configure(self, args): if not args.package and not args.save: configure() elif not args.package and args.save: configure(path=args.save) else: configure_package(args.package)
def register(): print 'registering what.cd plugin' sb_dir = qc.query('scatterbrainz_dir') what_dir =os.path.join(sb_dir,'external/dbs/what') if not os.path.isdir(what_dir): os.mkdir(what_dir) print "Register what.cd for which SB user?" sb_user = raw_input('username: '******'what',sb_user): print "Plugin already registered for " + sb_user print "... exiting" exit(1) print "\nWhat.cd username?" what_user = raw_input('username: '******'password: '******'...configuring' wc.configure(what_user,what_pass,what_dir,sb_user) print '...initializing' wc.init(sb_user) import dbs.config.prefs as prefs w_user = prefs.readPref('what_user',sb_user) print 'Success! set up what.cd pluging for ' +sb_user+' with what.cd account: ' + w_user pc.register('what',sb_user) exit(0)
def test_ctmrg_AKLT_4SITE(self): cfg.configure(args) torch.set_num_threads(args.omp_cores) model = akltS2.AKLTS2() def lattice_to_site(coord): vx = (coord[0] + abs(coord[0]) * 2) % 2 vy = (coord[1] + abs(coord[1]) * 2) % 2 return (vx, vy) state = read_ipeps(args.instate, vertexToSite=lattice_to_site) def ctmrg_conv_f(state, env, history, ctm_args=cfg.ctm_args): with torch.no_grad(): if not history: history = dict({"log": []}) dist = float('inf') list_rdm = [] for coord, site in state.sites.items(): rdm2x1 = rdm.rdm2x1(coord, state, env) rdm1x2 = rdm.rdm1x2(coord, state, env) list_rdm.extend([rdm2x1, rdm1x2]) if len(history["log"]) > 1: dist = 0. for i in range(len(list_rdm)): dist += torch.dist(list_rdm[i], history["rdm"][i], p=2).item() history["rdm"] = list_rdm history["log"].append(dist) if dist < ctm_args.ctm_conv_tol: log.info({ "history_length": len(history['log']), "history": history['log'] }) return True, history return False, history ctm_env_init = ENV(args.chi, state) init_env(state, ctm_env_init) ctm_env_init, *ctm_log = ctmrg.run(state, ctm_env_init, conv_check=ctmrg_conv_f) e_curr0 = model.energy_2x1_1x2(state, ctm_env_init) obs_values0, obs_labels = model.eval_obs(state, ctm_env_init) obs_dict = dict(zip(obs_labels, obs_values0)) eps = 1.0e-12 self.assertTrue(e_curr0 < eps) for coord, site in state.sites.items(): self.assertTrue(obs_dict[f"m{coord}"] < eps, msg=f"m{coord}") for l in ["sz", "sp", "sm"]: self.assertTrue(abs(obs_dict[f"{l}{coord}"]) < eps, msg=f"{l}{coord}")
def main(args): # Create PulsON440 object radar = PulsON440() #hand radar object of to configurator config.RADAR_OBJ = radar #configure config.configure(args) """
def config_controller(vmlist): sys_open = open("./roots","r"); hacked_vm = sys_open.readlines(); config_final =list(); configure_list = open("./config/config_list", "a+"); config_lis = configure_list.readlines(); config_list = list(); for entry in config_lis: name = entry.split(); name = name[0]; config_list.append(name); for vm in vmlist: if vm in hacked_vm: #extract timestamp for v in hacked_vm: temp = v.split(); if vm in v: _time = temp[1]; else: continue; result = check_time_stamp(vm,_time); if result: config_result = configure(vm); if config_result: config_final.append(vm); continue; else: return 0; else: st = "change sysmap for %s" %vm; return st; else: #check configure list if vm in config_list: config_final.append(vm); continue; else: config_result = configure(vm); if config_result: config_final.append(vm); continue; else: return 0; configure_list.close(); if len(config_final) == len(vmlist): configure_list = open("./config/config_list","a+"); for vm in config_final: if vm in config_list: continue; else: configure_list.write("\n"+vm+" "+strftime("%Y-%m-%d %H:%M:%S", gmtime())); configure_list.close(); return 1;
def connect(): if not path.exists("authkey.txt"): print("\n") configure() collect() os.system("code") time.sleep(3) open_serv() sys.exit()
def main(): config.configure() start_date, end_date = util.prepareDate() print("Started program from = %s - To = %s" % (start_date, end_date)) exchanges = config.get('exchanges') for exchange in exchanges: if (is_predicted(exchange, end_date)): print('Allready Predicted for ', exchange) continue #crowler.load(exchange, start_date, end_date) result, date = rigration.start(exchange, start_date, end_date) util.write_result(exchange, result, date)
def main(): if not os.path.isfile(config.config_name): config.configure() username, password = config.get_login() api.login(username, password) # examples # ----------------------------------------------------------------- api.get_userinfo(16251, "suomynona", 150863) # supports usernames and userids api.get_items(50000, 50001) # returns inventid 50000,50001 api.get_inventory("hampa", offset=1) # returns 2nd page of hampa's inv api.send_tc("example", 999999999) api.send_items("example", 50000, 50001)
def cli(ctx): configure() if not ctx.invoked_subcommand: # behave as if --help click.echo(ctx.command.get_help(ctx)) elif ctx.invoked_subcommand != "config": sp = get_spotify_client() if not sp.devices().get("devices"): ctx.fail( "No device detected! Try opening spotify on your phone or computer." ) elif not sp.current_playback(): ctx.fail( "Your spotify app is currently inactive. Try issuing a command with it first." )
def node_mapping(self, vnr, algorithm, arg): """求解节点映射问题""" print("node mapping...") node_map = {} # 如果刚开始映射,那么需要对所选用的算法进行配置 if algorithm != 'RLNL': if self.agent is None: self.agent = configure(self, algorithm, arg) node_map = self.agent.run(self, vnr) else: nodeenv = NodeEnv(self.net) nodeenv.set_vnr(vnr) nodep = nodepolicy(nodeenv.action_space.n, nodeenv.observation_space.shape) nodeobservation = nodeenv.reset() for vn_id in range(vnr.number_of_nodes()): sn_id = nodep.choose_max_action(nodeobservation, nodeenv.sub, vnr.nodes[vn_id]['cpu'], vnr.number_of_nodes()) if sn_id == -1: break else: # 执行一次action,获取返回的四个数据 nodeobservation, _, done, info = nodeenv.step(sn_id) node_map.update({vn_id: sn_id}) # 使用指定的算法进行节点映射并得到节点映射集合 # 返回节点映射集合 return node_map
def run_process(terminados, idCurso, directorio, usuario, password, color=salidas.colores[0]): driver = config.configure() global global_driver global_driver = driver signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGTERM, handler) salidas.color_default = color login.login(driver, usuario, password) if terminados: cr.ciclar_cursos_hasta_terminados(driver) cursos = cr.regresar_cursos(driver) try: cr.extraer_evidencias_lista_cursos(driver, cursos, [idCurso], directorio, terminados) except KeyError as e: print( 'El id dado %s no existe, aseguráte de no estar usando el nrc, lista opciones de ids con -l o --listar, si es un curso terminado aseguráte de activar la opción -t' % e) return except Exception as e: print('{"Error": "%s"}' % e.__str__()) return finally: driver.close() salidas.imprimir_salida('Fin de extracción') print('{"OK": "OK"}')
def main(): # 0) Parse command line arguments and configure simulation parameters cfg.configure(args) torch.set_num_threads(args.omp_cores) torch.manual_seed(args.seed) # 1) Read IPEPS from .json file state_ini = read_ipeps_c4v(path + "tensors/input-states/tensor_Ta4.json") # 2) Initialize environment and convergence criterion ctm_env_init = ENV_C4V(args.chi, state_ini) init_env(state_ini, ctm_env_init) def ctmrg_conv_rdm2x1(state_ini, env, history, ctm_args=cfg.ctm_args): with torch.no_grad(): if not history: history = dict({"log": []}) rdm2x1 = rdm2x1_sl(state_ini, env, force_cpu=ctm_args.conv_check_cpu) dist = float('inf') if len(history["log"]) > 1: dist = torch.dist(rdm2x1, history["rdm"], p=2).item() # update history history["rdm"] = rdm2x1 history["log"].append(dist) return False, history # 3) Execute CTM algorithm ctm_env_init, *ctm_log = ctmrg_c4v.run(state_ini, ctm_env_init, conv_check=ctmrg_conv_rdm2x1) # 4) Initialize parameters gate = build_gate() Ta_coef = [0, 0, 0, 0, 1, 0, 0, 0] Tb_coef = [*[1] * 13] # 5) Run optimization run_optimization(Ta_coef=Ta_coef, Tb_coef=Tb_coef, gate=gate, env=ctm_env_init, optimizer_class=torch.optim.SGD, n_iter=args.n, lr=args.lr)
def listar_cursos(terminados=False): driver = config.configure() usuario, password = credenciales.recuperar_credenciales() login.login(driver, usuario, password) if terminados: cr.ir_a_cursos_terminados(driver) cursos = cr.regresar_cursos(driver) print(cr.ver_cursos(cursos))
def reload_configuration(): simple, regex, admin = configure() _rules['simple'] = simple _rules['regex'] = regex _rules['admin'] = admin logger.info('Reloaded %d rules' % (len(simple) + len(regex)))
def extraer_cookies(usuario, password): driver = config.configure() login.login(driver, usuario, password) all_cookies = driver.get_cookies() cookies = {} for s_cookie in all_cookies: cookies[s_cookie["name"]] = s_cookie["value"] driver.close() return cookies
def test_ctmrg_Ladders_VBS1x2(self): cfg.configure(args) cfg.print_config() torch.set_num_threads(args.omp_cores) model = coupledLadders.COUPLEDLADDERS_D2_BIPARTITE(alpha=args.alpha) state = read_ipeps(args.instate) def ctmrg_conv_energy(state, env, history, ctm_args=cfg.ctm_args): with torch.no_grad(): if not history: history = [] e_curr = model.energy_2x1_1x2(state, env) history.append([e_curr.item()]) if len(history) > 1 and abs(history[-1][0] - history[-2][0] ) < ctm_args.ctm_conv_tol: return True, history return False, history ctm_env_init = ENV(args.chi, state) init_env(state, ctm_env_init) ctm_env_init, *ctm_log = ctmrg.run(state, ctm_env_init, conv_check=ctmrg_conv_energy) e_curr0 = model.energy_2x1_1x2(state, ctm_env_init) obs_values0, obs_labels = model.eval_obs(state, ctm_env_init) obs_dict = dict(zip(obs_labels, obs_values0)) eps = 1.0e-12 self.assertTrue(abs(e_curr0 - (-0.375)) < eps) for coord, site in state.sites.items(): self.assertTrue(obs_dict[f"m{coord}"] < eps, msg=f"m{coord}") self.assertTrue(obs_dict[f"SS2x1{coord}"] < eps, msg=f"SS2x1{coord}") for l in ["sz", "sp", "sm"]: self.assertTrue(abs(obs_dict[f"{l}{coord}"]) < eps, msg=f"{l}{coord}") for coord in [(0, 0)]: self.assertTrue(abs(obs_dict[f"SS1x2{coord}"] - (-0.75)) < eps, msg=f"SS1x2{coord}")
def configure(conf): env = conf.env # pthread if env['PLATFORM'] != 'win32': config.configure('pthreads', conf) # wsock32 if env['PLATFORM'] == 'win32': config.require_lib(conf, 'wsock32') config.check_home(conf, 'libsqlite', '', '') if config.check_lib(conf, 'sqlite3') and \ config.check_header(conf, 'sqlite3.h'): env.AppendUnique(CPPDEFINES = ['HAVE_LIBSQLITE']) return True return False
def configure(conf): env = conf.env # pthread if env['PLATFORM'] != 'win32': config.configure('pthreads', conf) # wsock32 if env['PLATFORM'] == 'win32': config.require_lib(conf, 'wsock32') config.check_home(conf, 'libsqlite', '', '') if config.check_lib(conf, 'sqlite3') and \ config.check_header(conf, 'sqlite3.h'): env.AppendUnique(CPPDEFINES=['HAVE_LIBSQLITE']) return True return False
def test_ctmrg_RVB(self): cfg.configure(args) torch.set_num_threads(args.omp_cores) model = j1j2.J1J2_C4V_BIPARTITE(j1=args.j1, j2=args.j2) energy_f = model.energy_1x1_lowmem state = read_ipeps_c4v(args.instate) def ctmrg_conv_energy(state, env, history, ctm_args=cfg.ctm_args): with torch.no_grad(): if not history: history = [] e_curr = energy_f(state, env, force_cpu=ctm_args.conv_check_cpu) history.append([e_curr.item()]) if len(history) > 1 and abs(history[-1][0] - history[-2][0] ) < ctm_args.ctm_conv_tol: return True, history return False, history ctm_env_init = ENV_C4V(args.chi, state) init_env(state, ctm_env_init) ctm_env_init, *ctm_log = ctmrg_c4v.run(state, ctm_env_init, conv_check=ctmrg_conv_energy) e_curr0 = energy_f(state, ctm_env_init) obs_values0, obs_labels = model.eval_obs(state, ctm_env_init) obs_dict = dict(zip(obs_labels, obs_values0)) eps_e = 1.0e-8 eps_m = 1.0e-14 self.assertTrue(abs(e_curr0 - (-0.47684229)) < eps_e) self.assertTrue(obs_dict["m"] < eps_m) for l in ["sz", "sp", "sm"]: self.assertTrue(abs(obs_dict[l]) < eps_m)
def test_ctmrg_AKLT(self): cfg.configure(args) torch.set_num_threads(args.omp_cores) model = akltS2.AKLTS2_C4V_BIPARTITE() state = read_ipeps_c4v(args.instate) def ctmrg_conv_f(state, env, history, ctm_args=cfg.ctm_args): with torch.no_grad(): if not history: history=dict({"log": []}) rdm2x1= rdm2x1_sl(state, env, force_cpu=ctm_args.conv_check_cpu) dist= float('inf') if len(history["log"]) > 1: dist= torch.dist(rdm2x1, history["rdm"], p=2).item() history["rdm"]=rdm2x1 history["log"].append(dist) if dist<ctm_args.ctm_conv_tol: log.info({"history_length": len(history['log']), "history": history['log']}) return True, history return False, history ctm_env_init = ENV_C4V(args.chi, state) init_env(state, ctm_env_init) ctm_env_init, *ctm_log = ctmrg_c4v.run(state, ctm_env_init, conv_check=ctmrg_conv_f) e_curr0 = model.energy_1x1(state, ctm_env_init) obs_values0, obs_labels = model.eval_obs(state,ctm_env_init) obs_dict=dict(zip(obs_labels,obs_values0)) eps=1.0e-14 self.assertTrue(e_curr0 < eps) self.assertTrue(obs_dict["m"] < eps) for l in ["sz","sp","sm"]: self.assertTrue(abs(obs_dict[l]) < eps)
def runConfig(self): DI24 = self.DI24.value() DO24 = self.DO24.value() DI72 = self.DI72.value() DO72 = self.DO72.value() DI110 = self.DI110.value() DO110 = self.DO110.value() AI = self.AI.value() AO = self.AO.value() SolFound, PCA_output, numPCAs = config.configure(DI24, DO24, DI72, DO72, DI110, DO110, AI, AO) if SolFound is True: labeltext = "<html><head/><body>" for key in PCA_output: labeltext += ("<p>" + key + ": " + str(PCA_output[key]) + "</p>") labeltext += ("<p> Total number of PCAs: " + str(numPCAs) + "</p>") labeltext += "</body></html>" self.label_10.setText(labeltext)
def main(): template, regex, batch, options = config.configure() BASE_CMD = 'loadtester -b %s -o diagnostic-data/process%s.csv' # make directory structure cwd = os.getcwd() try: shutil.rmtree(join(cwd, 'diagnostic-data')) except: pass os.mkdir(join(cwd, 'diagnostic-data')) # form commands cmd = BASE_CMD cmds = [cmd % (options.batch_file, p) for p in range(options.processes)] if options.number: cmds = [c + ' -n %s' % options.number for c in cmds] if options.timeout: cmds = [c + ' -t %s' % options.timeout for c in cmds] if options.grepargs: cmds = [c + ' -g %s' % options.grepargs for c in cmds] if options.follow_redirects: cmds = [c + ' -r %s' % options.follow_redirects for c in cmds] if options.host: cmds = [c + ' -H %s' % options.host for c in cmds] if options.cfg_file: cmds = [c + ' -c %s' % options.cfg_file for c in cmds] cmds = [c + ' &' for c in cmds] # generate shell script script = open('_ltstart.sh', 'w') try: shell = '#!/bin/sh' for c in cmds: shell = shell + '\n' + c script.write(shell) finally: script.close() # finish up and run path_to_executable = join(cwd, '_ltstart.sh') os.chmod(path_to_executable, 755) p = subprocess.call([path_to_executable]) print 'return code is: ', p
def configure(conf): env = conf.env have_xml_parser = False # libexpat if config.configure('expat', conf, False): have_xml_parser = True else: # Glib 2.0 env.ParseConfig('pkg-config --cflags --libs glib-2.0') env.ParseConfig('pkg-config --cflags --libs gthread-2.0') if conf.CheckCHeader('glib.h'): env.AppendUnique(CPPDEFINES=['HAVE_GLIB']) have_xml_parser = True if not have_xml_parser: raise Exception, 'Need either libexpat or glib2.0 XML parser' return have_xml_parser
def configure(conf): env = conf.env have_xml_parser = False # libexpat if config.configure("expat", conf, False): have_xml_parser = True else: # Glib 2.0 env.ParseConfig("pkg-config --cflags --libs glib-2.0") env.ParseConfig("pkg-config --cflags --libs gthread-2.0") if conf.CheckCHeader("glib.h"): env.AppendUnique(CPPDEFINES=["HAVE_GLIB"]) have_xml_parser = True if not have_xml_parser: raise Exception, "Need either libexpat or glib2.0 XML parser" return have_xml_parser
def main(args): """Evaluation corresponding to given argparse arguments.""" # device device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') agent, env, eval_env = configure(args) agent = agent.to(device) # load checkpoints if directory is not empty agent_file = join(args.logdir, 'best_agent.pt') R = - np.inf if exists(agent_file): state_dict = torch.load(agent_file) R = state_dict["return"] info(f"eval> Loading agent with return {R}...") agent.load_state_dict(state_dict) else: raise ValueError(f"{agent_file} does not exists, no agent available...") evaluate(args.dt, 0, eval_env, agent, args.time_limit, eval_return=True, video=True, progress_bar=True, no_log=True)
def init_app(environment=None): app = Flask(__name__) print "Configuring app" app = configure(app, environment) register_blueprints(app) # DB Initilization db.init_app(app) # webpack intialization webpack.init_app(app) @app.route('/') def index(): return render_template('index.html') @app.route('/assets/<path:path>') def assets(path): return send_from_directory('build/public', path) return app
def test_admin_settings(self): try: with open('by-admin.rules', 'w') as admin_file: admin_file.write(""" admin: path: /admin/ui username: usr password: md5: abcd1234 """) _, _, admin = configure() self.assertIsNotNone(admin) self.assertEqual(admin.path, '/admin/ui') self.assertEqual(admin.username, 'usr') self.assertEqual(admin.password, 'abcd1234') self.assertEqual(admin.algorithm, hashlib.md5) finally: if os.path.exists('by-admin.rules'): os.remove('by-admin.rules')
def run_process(terminados, idCurso, directorio, usuario, password, color=salidas.colores[0]): driver = config.configure() salidas.color_default = color login.login(driver, usuario, password) if terminados: cr.ciclar_cursos_hasta_terminados(driver) cursos = cr.regresar_cursos(driver) try: cr.extraer_evidencias_lista_cursos(driver, cursos, [idCurso], directorio, terminados) except KeyError as e: print( 'El id dado %s no existe, aseguráte de no estar usando el nrc, lista opciones de ids con -l o --listar, si es un curso terminado aseguráte de activar la opción -t' % e) exit(1) finally: driver.close() salidas.imprimir_salida('Fin de extracción')
from StringIO import StringIO from ConfigParser import SafeConfigParser from twisted.test import proto_helpers from twisted.trial import unittest from twisted.internet import task, error import stomper import inject import logging logging.basicConfig(level=logging.DEBUG, format='%(message)s', ) import config config.configure() class TestStompProtocolFactory(unittest.TestCase): class _MyFakeConnector( proto_helpers._FakeConnector ): connectionAttempts = 0 def connect(self): self.connectionAttempts += 1 #God bless Python! proto_helpers._FakeConnector = _MyFakeConnector def setUp(self): self.factory = StompProtocolFactory() def _fakeConnect(self_): self_.connectionAttempts += 1
def main(): config.configure() cli.main()
def _configure(self, **kwargs): self.config = configure(**kwargs)
from flask import Flask from flask import render_template from flask.ext.socketio import SocketIO, emit from hashlib import sha256 import sys app = Flask(__name__) app.config['SECRET_KEY'] = 'replaceme' app.config['ADMIN_URL'] = '/admin' app.config['DEBUG'] = True # Replace the above secrets and specify other overrides here, or alternatively, # create a config.py file that has a configure(app) function that adds these. try: import config config.configure(app) except ImportError: pass socketio = SocketIO(app) admin_secret = app.config['SECRET_KEY'] + "ADMIN_SECRET" app.config['ADMIN_SECRET'] = sha256(admin_secret.encode('utf-8')).hexdigest() # eetvoorkeur relies completely on a run-time state. This means that the state # is reset whenever the app is restarted. Future versions might rely on a # database of some kind, but for now, this was the easiest prototype. state = {"step": 1, "options": [{'name': 'Albert Heijn', 'votes': 0}, {'name': 'De Fest', 'votes': 0}, {'name': 'Lotus', 'votes': 0}, ],
def generate(env): # We break unitsync if the filename of the shared object has a 'lib' prefix. # It is also nicer for the AI shared objects. env['LIBPREFIX'] = '' # I don't see any reason to make this configurable --tvo. # Note that commenting out / setting this to `None' will break the buildsystem. env['builddir'] = 'build' # SCons chokes in env.SConsignFile() if path doesn't exist. if not os.path.exists(env['builddir']): os.makedirs(env['builddir']) # Avoid spreading .sconsign files everywhere - keep this line # Use os.path.abspath() here because somehow the argument to SConsignFile is relative to the # directory of the toplevel trunk/SConstruct and not the current directory, trunk/rts/SConstruct. env.SConsignFile(os.path.abspath(os.path.join(env['builddir'], 'scons_signatures'))) usrcachefile = os.path.join(env['builddir'], 'usropts.py') intcachefile = os.path.join(env['builddir'], 'intopts.py') usropts = Options(usrcachefile) intopts = Options(intcachefile) #user visible options usropts.AddOptions( #permanent options ('platform', 'Set to linux, freebsd or windows', None), ('debug', 'Set to yes to produce a binary with debug information', 0), ('syncdebug', 'Set to yes to enable the sync debugger', False), ('optimize', 'Enable processor optimizations during compilation', 1), ('profile', 'Set to yes to produce a binary with profiling information', False), ('fpmath', 'Set to 387 or SSE on i386 and AMD64 architectures', '387'), ('prefix', 'Install prefix', '/usr/local'), ('datadir', 'Data directory', '$prefix/games/taspring'), ('strip', 'Discard symbols from the executable (only when neither debugging nor profiling)', True), #porting options - optional in a first phase ('disable_avi', 'Set to no to turn on avi support', True), ('disable_clipboard', 'Set to no to turn on clipboard code', True), #other ported parts ('use_tcmalloc', 'Use tcmalloc from goog-perftools for memory allocation', False), ('use_mmgr', 'Use memory manager', False), ('cachedir', 'Cache directory (see scons manual)', None)) #internal options intopts.AddOptions( ('LINKFLAGS', 'linker flags'), ('LIBPATH', 'library path'), ('LIBS', 'libraries'), ('CCFLAGS', 'c compiler flags'), ('CXXFLAGS', 'c++ compiler flags'), ('CPPDEFINES', 'c preprocessor defines'), ('CPPPATH', 'c preprocessor include path'), ('CC', 'c compiler'), ('CXX', 'c++ compiler'), ('spring_defines','extra c preprocessor defines for spring'), ('is_configured', 'configuration version stamp')) usropts.Update(env) intopts.Update(env) env.Help(usropts.GenerateHelpText(env)) # Use this to avoid an error message 'how to make target configure ?' env.Alias('configure', None) if not 'configure' in sys.argv and not ((env.has_key('is_configured') and env['is_configured'] == 3) or env.GetOption('clean')): print "Not configured or configure script updated. Run `scons configure' first." print "Use `scons --help' to show available configure options to `scons configure'." env.Exit(1) if 'configure' in sys.argv: # be paranoid, unset existing variables for key in ['platform', 'debug', 'optimize', 'profile', 'prefix', 'datadir', 'cachedir', 'strip', 'disable_avi', 'use_tcmalloc', 'use_mmgr', 'LINKFLAGS', 'LIBPATH', 'LIBS', 'CCFLAGS', 'CXXFLAGS', 'CPPDEFINES', 'CPPPATH', 'is_configured', 'spring_defines']: if env.has_key(key): env.__delitem__(key) print "\nNow configuring. If something fails, consult `config.log' for details.\n" #parse cmdline def makeHashTable(args): table = { } for arg in args: if len(arg) > 1: lst = arg.split('=') if len(lst) < 2: continue key = lst[0] value = lst[1] if len(key) > 0 and len(value) > 0: table[key] = value return table args = makeHashTable(sys.argv) env['is_configured'] = 3 if args.has_key('platform'): env['platform'] = args['platform'] else: env['platform'] = detect.platform() fix_windows_spawn(env) if os.environ.has_key('CC'): env['CC'] = os.environ['CC'] if os.environ.has_key('CXX'): env['CXX'] = os.environ['CXX'] gcc_version = config.check_gcc_version(env) # Declare some helper functions for boolean and string options. def bool_opt(key, default): if args.has_key(key): if args[key] == 'no' or args[key] == 'false' or args[key] == '0': env[key] = False elif args[key] == 'yes' or args[key] == 'true' or args[key] == '1': env[key] = True else: print "\ninvalid", key, "option, must be one of: yes, true, no, false, 0, 1." env.Exit(1) else: env[key] = default def string_opt(key, default): if args.has_key(key): env[key] = args[key] else: env[key] = default # Use single precision constants only. # This should be redundant with the modifications done by tools/double_to_single_precision.sed. # Other options copied from streflop makefiles. env['CCFLAGS'] = ['-fsingle-precision-constant', '-frounding-math', '-fsignaling-nans', '-mieee-fp'] # profile? bool_opt('profile', False) if env['profile']: print "profiling enabled,", env.AppendUnique(CCFLAGS=['-pg'], LINKFLAGS=['-pg']) else: print "profiling NOT enabled,", # debug? if args.has_key('debug'): level = args['debug'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '3' else: level = '0' if int(level) == 0: print "debugging NOT enabled,", env['debug'] = 0 elif int(level) >= 1 and int(level) <= 3: print "level", level, "debugging enabled,", env['debug'] = level # MinGW gdb chokes on the dwarf debugging format produced by '-ggdb', # so use the more generic '-g' instead. if env['platform'] == 'windows' or env['syncdebug']: env.AppendUnique(CCFLAGS=['-g'], CPPDEFINES=['DEBUG', '_DEBUG']) else: env.AppendUnique(CCFLAGS=['-ggdb'+level], CPPDEFINES=['DEBUG', '_DEBUG']) else: print "\ninvalid debug option, must be one of: yes, true, no, false, 0, 1, 2, 3." env.Exit(1) # optimize? if args.has_key('optimize'): level = args['optimize'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '2' else: if env['debug']: level = '0' else: level = '2' if level == 's' or level == 'size' or (int(level) >= 1 and int(level) <= 3): print "level", level, "optimizing enabled" if level != '2': print "WARNING: for sync it is recommended to compile with level 2 optimization" env['optimize'] = level #archflags = detect.processor(gcc_version >= ['3','4','0']) # -fstrict-aliasing causes constructs like: # float f = 10.0f; int x = *(int*)&f; # to break. # Since those constructs are used in the netcode and MathTest code, we disable the optimization. env.AppendUnique(CCFLAGS=['-O'+level, '-pipe', '-fno-strict-aliasing', '-frename-registers']) elif int(level) == 0: print "optimizing NOT enabled", env['optimize'] = 0 else: print "\ninvalid optimize option, must be one of: yes, true, no, false, 0, 1, 2, 3, s, size." env.Exit(1) # Must come before the '-fvisibility=hidden' code. bool_opt('syncdebug', False) string_opt('fpmath', '387') # If sync debugger is on, disable inlining, as it makes it much harder to follow backtraces. if env['syncdebug']: # Disable all possible inlining, just to be sure. env['CCFLAGS'] += ['-fno-default-inline', '-fno-inline', '-fno-inline-functions', '-fno-inline-functions-called-once'] # It seems only gcc 4.0 and higher supports this. if gcc_version >= ['4','0','0']: env['CCFLAGS'] += ['-fvisibility=hidden'] # Allow easy switching between 387 and SSE fpmath. if env['fpmath']: env['CCFLAGS'] += ['-mfpmath='+env['fpmath']] if env['fpmath'] == 'sse': print "WARNING: SSE math vs X87 math is unsynced!" print "WARNING: Do not go online with the binary you are currently building!" env['CCFLAGS'] += ['-msse', '-msse2'] env['CXXFLAGS'] = env['CCFLAGS'] # Do not do this anymore because it may severely mess up our carefully selected options. # Print a warning and ignore them instead. # fall back to environment variables if neither debug nor optimize options are present if not args.has_key('debug') and not args.has_key('optimize'): if os.environ.has_key('CFLAGS'): #print "using CFLAGS:", os.environ['CFLAGS'] #env['CCFLAGS'] = SCons.Util.CLVar(os.environ['CFLAGS']) print "WARNING: attempt to use environment CFLAGS has been ignored." if os.environ.has_key('CXXFLAGS'): #print "using CXXFLAGS:", os.environ['CXXFLAGS'] #env['CXXFLAGS'] = SCons.Util.CLVar(os.environ['CXXFLAGS']) print "WARNING: attempt to use environment CXXFLAGS has been ignored." #else: # env['CXXFLAGS'] = env['CCFLAGS'] bool_opt('strip', True) bool_opt('disable_avi', True) bool_opt('disable_clipboard', True) bool_opt('use_tcmalloc', False) bool_opt('use_mmgr', False) string_opt('prefix', '/usr/local') string_opt('datadir', '$prefix/games/taspring') string_opt('cachedir', None) # Make a list of preprocessor defines. env.AppendUnique(CPPDEFINES = ['_REENTRANT', '_SZ_ONE_DIRECTORY']) spring_defines = ['DIRECT_CONTROL_ALLOWED'] # Add define specifying type of floating point math to use. if env['fpmath']: if env['fpmath'] == 'sse': spring_defines += ['STREFLOP_SSE'] if env['fpmath'] == '387': spring_defines += ['STREFLOP_X87'] # Add/remove SYNCDEBUG to enable/disable sync debugging. if env['syncdebug']: spring_defines += ['SYNCDEBUG'] # Don't define this: it causes a full recompile when you change it, even though it is only used in Main.cpp, # and some AIs maybe. Just make exceptions in SConstruct. #defines += ['SPRING_DATADIR="\\"'+env['datadir']+'\\""'] if env['disable_clipboard']: spring_defines += ['NO_CLIPBOARD'] if env['disable_avi'] : spring_defines += ['NO_AVI'] if env['use_mmgr'] : spring_defines += ['USE_MMGR'] env['spring_defines'] = spring_defines include_path = ['rts', 'rts/System'] include_path += ["lua/luabind", "lua/lua/include"] lib_path = ['rts/lib/streflop'] if env['platform'] == 'freebsd': include_path += ['/usr/local/include', '/usr/X11R6/include', '/usr/X11R6/include/GL'] lib_path += ['/usr/local/lib', '/usr/X11R6/lib'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread']) elif env['platform'] == 'linux': include_path += ['/usr/include', '/usr/include/GL'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread'], LINKFLAGS = ['-Wl,-E']) elif env['platform'] == 'darwin': include_path += ['/usr/include', '/usr/local/include', '/opt/local/include', '/usr/X11R6/include'] lib_path += ['/opt/local/lib', '/usr/local/lib'] env['SHLINKFLAGS'] = '$LINKFLAGS -dynamic' env['SHLIBSUFFIX'] = '.dylib' elif env['platform'] == 'windows': include_path += ['mingwlibs\\include'] lib_path += ['mingwlibs\\lib'] if os.environ.has_key('MINGDIR'): include_path += [os.path.join(os.environ['MINGDIR'], 'include')] lib_path += [os.path.join(os.environ['MINGDIR'], 'lib')] else: print 'ERROR: MINGDIR environment variable not set and MSVC build unsupported.' print 'Set it to your Dev-Cpp or MinGW install directory (e.g. C:\\Dev-Cpp) and try again.' env.Exit(1) env.AppendUnique(CCFLAGS = ['-mthreads'], CXXFLAGS = ['-mthreads'], LINKFLAGS = ['-mwindows']) # use '-pthreads' for Solaris, according to /usr/include/boost/config/requires_threads.hpp env.AppendUnique(CPPPATH=include_path, LIBPATH=lib_path) config.configure(env, conf_dir=os.path.join(env['builddir'], 'sconf_temp')) env.AppendUnique(LIBS=['streflop']) usropts.Save(usrcachefile, env) intopts.Save(intcachefile, env) #Should we strip the exe? if env.has_key('strip') and env['strip'] and not env['debug'] and not env['profile'] and not env.GetOption('clean'): env['strip'] = True else: env['strip'] = False #BuildDir support code if env['builddir']: for d in filelist.list_directories(env, 'rts'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) for d in filelist.list_directories(env, 'lua'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) env.BuildDir(os.path.join(env['builddir'], 'tools/unitsync'), 'tools/unitsync', duplicate = False) for d in filelist.list_directories(env, 'AI'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) #CacheDir support code if env.has_key('cachedir') and env['cachedir']: if not os.path.exists(env['cachedir']): os.makedirs(env['cachedir']) env.CacheDir(env['cachedir']) fix_windows_spawn(env)
def main(): from config import configure config, args = configure() worker = WorkerChain(args, config) worker.run()
from bottle import Bottle, static_file, jinja2_template as template, request, run from config import configure, registermainuser from admin.controllers import admincontroller from controllers import contactcontroller from models.cmsmodels import Posts, Themes import math configure() registermainuser() application = Bottle() app = application app.mount('/admin', admincontroller.app) app.mount('/contact', contactcontroller.app) @app.get('/<page:int>') @app.get('/') def index(page=1): searchcriteria = request.query.getunicode('search') try: returned_posts = Posts.objects.order_by('-date').skip((int(page) - 1) * 10).limit( 10) if not searchcriteria else Posts.objects(title__icontains=searchcriteria).order_by('-date').skip( (int(page) - 1) * 10).limit(10) postcount = Posts.objects().count() if not searchcriteria else Posts.objects(title__icontains=searchcriteria).count() theme = Themes.objects.get(isactive=True) if theme: data = { "posts": returned_posts, "count": postcount, "ceil": math.ceil(postcount / 10),
usage() sys.exit() elif o in ('-b', '--batch'): batchFile = a elif o in ('-s', '--search'): searchString = a.strip() elif o in ('-l', '--lookup'): lookupString = a.strip() elif o in ('-d', '--debug'): set_debug(True) else: assert False, "unhandled option" if (searchString == None or len(searchString) == 0) and (lookupString == None or len(lookupString) == 0) \ and (batchFile == None or len(batchFile) == 0): usage() sys.exit(1) debug('Initializing...') vocabulary, grammar, generator = config.configure() if lookupString != None and len(lookupString) > 0: sys.exit(lookup(lookupString, vocabulary)) elif batchFile != None and len(batchFile) > 0: batch(batchFile, vocabulary, grammar, generator) else: sys.exit(search(searchString, vocabulary, grammar, generator)) if __name__ == '__main__': main()
def main(): app = QtGui.QApplication(sys.argv) ## Startup message/server info print "\n*******************************************************************************" print "** " + APPNAME + " " + VERSION + " - BarcodeOverIP-server for Python 2.7.2 " print "** Website: https://code.google.com/p/barcodeoverip/" print "** Written By: Tyler H. Jones, February 2012" print "** Licensed Under Apache-2.0 License. (C) 2012 - Tyler H. Jones (tylerjones.me) " print "*******************************************************************************" #print "** Options: --verbose || -v : Be verbose with terminal/log messages" #print "** --config || -c : Specify config file (Defualt: ./settings.conf)" #print "*******************************************************************************\n" ## Setup logging verbose = True log = logger.logger() log.setup(0, verbose) ## Validate config file config_response = config.configure() if config_response != "Error": log.info("", "Config file found... Loaded OK!") conf = shelve.open(".config") for i,v in config_response.iteritems(): conf[i] = v conf.close() else: sys.exit() ## Make the config dictionary config = shelve.open(".config") ############################################################################################### ## Variable Declarations ############################################################################################### host = config["BindIP"] port = int(config["BindPort"]) m = hashlib.new('sha1') if(config["Password"].strip() != "" and config["Password"].upper().strip() != "NONE"): m.update(config["Password"]) server_hash = m.hexdigest() else: server_hash = "none" print "\nPassword (SHA1 Hex Digest): " + server_hash + "\n" error_codes = {'ERR1':'Invalid data format and/or syntax!', 'ERR2':'No data was sent!', 'ERR3':'Invalid Command Sent!', 'ERR4':'Missing/Empty Command Argument(s) Recvd.', 'ERR5':'Invalid command syntax!', 'ERR6':'Invalid Auth Syntax!', 'ERR7':'Access Denied!', 'ERR8':'Server Timeout, Too Busy to Handle Request!', 'ERR9':'Unknown Data Transmission Error', 'ERR10':'Auth required.', 'ERR11':'Invalid Password.', 'ERR12':'Not logged in.', 'ERR13':'Incorrect Username/Password!', 'ERR14':'Invalid Login Command Syntax.', 'ERR19':'Unknown Auth Error' } w = QtGui.QWidget() w.resize(250, 150) w.move(300, 300) w.setWindowTitle('Simple') w.show() ############################################################################################### ## Start the server ############################################################################################### #threadListener = Listener() # Define the listener thread conf = shelve.open(".config") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((host, port)) s.listen(5) log.info("Server Socket Created successfully!", "Listening Host/IP: " + host + " (Port: " + str(port) + ")") if host == "0.0.0.0": print "\n(0.0.0.0) = Listening on all interfaces...\nAll available network interfaces on system:" for ifaceName in interfaces(): addresses = [i['addr'] for i in ifaddresses(ifaceName).setdefault(AF_INET, [{'addr':'No IP addr'}] )] print " -- %s: %s" % (ifaceName, ', '.join(addresses)) print "\nSince the server is listening on all interfaces, you can connect to it using any of the above addresses, provided that you are in address range of the one you choose.\n" try: while 1: (clientsock, clientaddr) = s.accept() log.info("Incoming Connection!", "From: " + str(clientsock.getpeername())) #TODO: Use a fork/thread instead of a function call handleConnection(clientsock) clientsock.close() except (KeyboardInterrupt, SystemExit): # Wait for a keyboard interupt log.info("Keyboard Interupt", "Received keyboard interrupt, quitting threads") #threadListener.stop() # Stop the thread sys.exit(0) sys.exit(app.exec_())
def generate(env): # We break unitsync if the filename of the shared object has a 'lib' prefix. # It is also nicer for the AI shared objects. env["LIBPREFIX"] = "" # I don't see any reason to make this configurable --tvo. # Note that commenting out / setting this to `None' will break the buildsystem. env["builddir"] = "build" # SCons chokes in env.SConsignFile() if path doesn't exist. if not os.path.exists(env["builddir"]): os.makedirs(env["builddir"]) # Avoid spreading .sconsign files everywhere - keep this line # Use os.path.abspath() here because somehow the argument to SConsignFile is relative to the # directory of the toplevel trunk/SConstruct and not the current directory, trunk/rts/SConstruct. env.SConsignFile(os.path.abspath(os.path.join(env["builddir"], "scons_signatures"))) usrcachefile = os.path.join(env["builddir"], "usropts.py") intcachefile = os.path.join(env["builddir"], "intopts.py") usropts = Options(usrcachefile) intopts = Options(intcachefile) # user visible options usropts.AddOptions( # permanent options ("platform", "Set to linux, freebsd or windows", None), ("debug", "Set to yes to produce a binary with debug information", 0), ("optimize", "Enable processor optimizations during compilation", 1), ("profile", "Set to yes to produce a binary with profiling information", False), ("prefix", "Install prefix", "/usr/local"), ("datadir", "Data directory", "$prefix/games/taspring"), ("strip", "Discard symbols from the executable (only when neither debugging nor profiling)", True), # porting options - optional in a first phase ("disable_avi", "Set to no to turn on avi support", True), ("disable_clipboard", "Set to no to turn on clipboard code", True), # other ported parts ("disable_lua", "Set to no to turn on Lua support", True), ("use_tcmalloc", "Use tcmalloc from goog-perftools for memory allocation", False), ("use_mmgr", "Use memory manager", False), ("cachedir", "Cache directory (see scons manual)", None), ) # internal options intopts.AddOptions( ("LINKFLAGS", "linker flags"), ("LIBPATH", "library path"), ("LIBS", "libraries"), ("CCFLAGS", "c compiler flags"), ("CXXFLAGS", "c++ compiler flags"), ("CPPDEFINES", "c preprocessor defines"), ("CPPPATH", "c preprocessor include path"), ("CC", "c compiler"), ("CXX", "c++ compiler"), ("is_configured", "configuration version stamp"), ) usropts.Update(env) intopts.Update(env) env.Help(usropts.GenerateHelpText(env)) # Use this to avoid an error message 'how to make target configure ?' env.Alias("configure", None) if not "configure" in sys.argv and not ( (env.has_key("is_configured") and env["is_configured"] == 1) or env.GetOption("clean") ): print "Not configured or configure script updated. Run `scons configure' first." print "Use `scons --help' to show available configure options to `scons configure'." env.Exit(1) if "configure" in sys.argv: # be paranoid, unset existing variables for key in [ "platform", "debug", "optimize", "profile", "prefix", "datadir", "cachedir", "strip", "disable_avi", "disable_lua", "disable_aio", "use_tcmalloc", "use_mmgr", "LINKFLAGS", "LIBPATH", "LIBS", "CCFLAGS", "CXXFLAGS", "CPPDEFINES", "CPPPATH", "is_configured", ]: if env.has_key(key): env.__delitem__(key) print "\nNow configuring. If something fails, consult `config.log' for details.\n" # parse cmdline def makeHashTable(args): table = {} for arg in args: if len(arg) > 1: lst = arg.split("=") if len(lst) < 2: continue key = lst[0] value = lst[1] if len(key) > 0 and len(value) > 0: table[key] = value return table args = makeHashTable(sys.argv) env["is_configured"] = 1 if args.has_key("platform"): env["platform"] = args["platform"] else: env["platform"] = detect.platform() fix_windows_spawn(env) if os.environ.has_key("CC"): env["CC"] = os.environ["CC"] if os.environ.has_key("CXX"): env["CXX"] = os.environ["CXX"] gcc_version = config.check_gcc_version(env) # Declare some helper functions for boolean and string options. def bool_opt(key, default): if args.has_key(key): if args[key] == "no" or args[key] == "false" or args[key] == "0": env[key] = False elif args[key] == "yes" or args[key] == "true" or args[key] == "1": env[key] = True else: print "\ninvalid", key, "option, must be one of: yes, true, no, false, 0, 1." env.Exit(1) else: env[key] = default def string_opt(key, default): if args.has_key(key): env[key] = args[key] else: env[key] = default # start with empty FLAGS, in case everything is disabled. env["CCFLAGS"] = [] # profile? bool_opt("profile", False) if env["profile"]: print "profiling enabled,", env.AppendUnique(CCFLAGS=["-pg"], LINKFLAGS=["-pg"]) else: print "profiling NOT enabled,", # debug? if args.has_key("debug"): level = args["debug"] if level == "no" or level == "false": level = "0" elif level == "yes" or level == "true": level = "3" else: level = "0" if int(level) == 0: print "debugging NOT enabled,", env["debug"] = 0 elif int(level) >= 1 and int(level) <= 3: print "level", level, "debugging enabled,", env["debug"] = level # MinGW gdb chokes on the dwarf debugging format produced by '-ggdb', # so use the more generic '-g' instead. if env["platform"] == "windows": env.AppendUnique(CCFLAGS=["-g"], CPPDEFINES=["DEBUG", "_DEBUG"]) else: env.AppendUnique(CCFLAGS=["-ggdb" + level], CPPDEFINES=["DEBUG", "_DEBUG"]) else: print "\ninvalid debug option, must be one of: yes, true, no, false, 0, 1, 2, 3." env.Exit(1) # optimize? if args.has_key("optimize"): level = args["optimize"] if level == "no" or level == "false": level = "0" elif level == "yes" or level == "true": level = "2" else: if env["debug"]: level = "0" else: level = "1" if level == "s" or level == "size" or (int(level) >= 1 and int(level) <= 3): print "level", level, "optimizing enabled" env["optimize"] = level archflags = detect.processor(gcc_version >= ["3", "4", "0"]) env.AppendUnique(CCFLAGS=["-O" + level, "-pipe"] + archflags) elif int(level) == 0: print "optimizing NOT enabled,", env["optimize"] = 0 else: print "\ninvalid optimize option, must be one of: yes, true, no, false, 0, 1, 2, 3, s, size." env.Exit(1) # it seems only gcc 4.0 and higher supports this if gcc_version >= ["4", "0", "0"]: env["CCFLAGS"] += ["-fvisibility=hidden"] env["CXXFLAGS"] = env["CCFLAGS"] # This is broken, first, scons passes it to .c files compilation too (which is an error), # second, linking of a shared object fails with: # /usr/bin/ld: build/tools/unitsync/unitsync.os: relocation R_X86_64_PC32 against `std::basic_string<char, std::char_traits<char>, std::allocator<char> >::~basic_string()@@GLIBCXX_3.4' can not be used when making a shared object; recompile with -fPIC # Even though -fPIC was passed on compilation of each object. # env['CXXFLAGS'] += ['-fvisibility-inlines-hidden'] # fall back to environment variables if neither debug nor optimize options are present if not args.has_key("debug") and not args.has_key("optimize"): if os.environ.has_key("CFLAGS"): print "using CFLAGS:", os.environ["CFLAGS"] env["CCFLAGS"] = SCons.Util.CLVar(os.environ["CFLAGS"]) if os.environ.has_key("CXXFLAGS"): print "using CXXFLAGS:", os.environ["CXXFLAGS"] env["CXXFLAGS"] = SCons.Util.CLVar(os.environ["CXXFLAGS"]) else: env["CXXFLAGS"] = env["CCFLAGS"] bool_opt("strip", True) bool_opt("disable_avi", True) bool_opt("disable_clipboard", True) bool_opt("disable_lua", True) bool_opt("use_tcmalloc", False) bool_opt("use_mmgr", False) string_opt("prefix", "/usr/local") string_opt("datadir", "$prefix/games/taspring") string_opt("cachedir", None) defines = ["_REENTRANT", "DIRECT_CONTROL_ALLOWED", "_SZ_ONE_DIRECTORY"] # Don't define this: it causes a full recompile when you change it, even though it is only used in Main.cpp, # and some AIs maybe. Just make exceptions in SConstruct. # defines += ['SPRING_DATADIR="\\"'+env['datadir']+'\\""'] if env["disable_clipboard"]: defines += ["NO_CLIPBOARD"] if env["disable_avi"]: defines += ["NO_AVI"] if env["disable_lua"]: defines += ["NO_LUA"] if env["use_mmgr"]: defines += ["USE_MMGR"] env.AppendUnique(CPPDEFINES=defines) include_path = ["rts", "rts/System"] if not env["disable_lua"]: include_path += ["lua/luabind", "lua/lua/include"] lib_path = [] if env["platform"] == "freebsd": include_path += ["/usr/local/include", "/usr/X11R6/include", "/usr/X11R6/include/GL"] lib_path += ["/usr/local/lib", "/usr/X11R6/lib"] env.AppendUnique(CCFLAGS=["-pthread"], CXXFLAGS=["-pthread"]) elif env["platform"] == "linux": include_path += ["/usr/include", "/usr/include/GL"] env.AppendUnique(CCFLAGS=["-pthread"], CXXFLAGS=["-pthread"], LINKFLAGS=["-Wl,-E"]) elif env["platform"] == "darwin": include_path += ["/usr/include", "/usr/local/include", "/opt/local/include", "/usr/X11R6/include"] lib_path += ["/opt/local/lib", "/usr/local/lib"] env["SHLINKFLAGS"] = "$LINKFLAGS -dynamic" env["SHLIBSUFFIX"] = ".dylib" elif env["platform"] == "windows": include_path += ["mingwlibs\\include"] lib_path += ["mingwlibs\\lib"] if os.environ.has_key("MINGDIR"): include_path += [os.path.join(os.environ["MINGDIR"], "include")] lib_path += [os.path.join(os.environ["MINGDIR"], "lib")] else: print "ERROR: MINGDIR environment variable not set and MSVC build unsupported." print "Set it to your Dev-Cpp or MinGW install directory (e.g. C:\\Dev-Cpp) and try again." env.Exit(1) env.AppendUnique(CCFLAGS=["-mthreads"], CXXFLAGS=["-mthreads"], LINKFLAGS=["-mwindows"]) # use '-pthreads' for Solaris, according to /usr/include/boost/config/requires_threads.hpp env.AppendUnique(CPPPATH=include_path, LIBPATH=lib_path) config.configure(env, conf_dir=os.path.join(env["builddir"], "sconf_temp")) usropts.Save(usrcachefile, env) intopts.Save(intcachefile, env) # Should we strip the exe? if env.has_key("strip") and env["strip"] and not env["debug"] and not env["profile"] and not env.GetOption("clean"): env["strip"] = True else: env["strip"] = False # BuildDir support code if env["builddir"]: for d in filelist.list_directories(env, "rts"): env.BuildDir(os.path.join(env["builddir"], d), d, duplicate=False) if not env["disable_lua"]: for d in filelist.list_directories(env, "lua"): env.BuildDir(os.path.join(env["builddir"], d), d, duplicate=False) env.BuildDir(os.path.join(env["builddir"], "tools/unitsync"), "tools/unitsync", duplicate=False) for d in filelist.list_directories(env, "AI"): env.BuildDir(os.path.join(env["builddir"], d), d, duplicate=False) # CacheDir support code if env.has_key("cachedir") and env["cachedir"]: if not os.path.exists(env["cachedir"]): os.makedirs(env["cachedir"]) env.CacheDir(env["cachedir"]) fix_windows_spawn(env)
def gen_cmake_command(options, arguments): """ Generate CMake command based on options and arguments. """ command = [] command.append('CXX=%s' % arguments['--cxx']) command.append('cmake') command.append('-DEXTRA_CXXFLAGS="%s"' % arguments['--extra-cxx-flags']) command.append('-DENABLE_CODE_COVERAGE=%s' % arguments['--coverage']) command.append('-DCMAKE_BUILD_TYPE=%s' % arguments['--type']) command.append('-G "%s"' % arguments['--generator']) if(arguments['--cmake-options']): command.append('%s' % arguments['--cmake-options']) return ' '.join(command) try: arguments = docopt.docopt(options, argv=None) except docopt.DocoptExit: sys.stderr.write('ERROR: bad input to %s\n' % sys.argv[0]) sys.stderr.write(options) sys.exit(-1) root_directory = os.path.dirname(os.path.realpath(__file__)) build_path = arguments['<builddir>'] cmake_command = '%s %s' % (gen_cmake_command(options, arguments), root_directory) configure(root_directory, build_path, cmake_command, arguments['--show'])
def generate(env): # We break unitsync if the filename of the shared object has a 'lib' prefix. # It is also nicer for the AI shared objects. env['LIBPREFIX'] = '' # I don't see any reason to make this configurable --tvo. # Note that commenting out / setting this to `None' will break the buildsystem. env['builddir'] = 'build' # SCons chokes in env.SConsignFile() if path doesn't exist. if not os.path.exists(env['builddir']): os.makedirs(env['builddir']) # Avoid spreading .sconsign files everywhere - keep this line # Use os.path.abspath() here because somehow the argument to SConsignFile is relative to the # directory of the toplevel trunk/SConstruct and not the current directory, trunk/rts/SConstruct. env.SConsignFile(os.path.abspath(os.path.join(env['builddir'], 'scons_signatures'))) usrcachefile = os.path.join(env['builddir'], 'usropts.py') intcachefile = os.path.join(env['builddir'], 'intopts.py') usropts = Options(usrcachefile) intopts = Options(intcachefile) #user visible options usropts.AddOptions( #permanent options ('platform', 'Set to linux, freebsd or windows', None), ('debug', 'Set to yes to produce a binary with debug information', 0), ('optimize', 'Enable processor optimizations during compilation', 1), ('profile', 'Set to yes to produce a binary with profiling information', False), ('prefix', 'Install prefix', '/usr/local'), ('datadir', 'Data directory', '$prefix/games/taspring'), ('strip', 'Discard symbols from the executable (only when neither debugging nor profiling)', True), #porting options - optional in a first phase ('disable_avi', 'Set to no to turn on avi support', True), ('disable_clipboard', 'Set to no to turn on clipboard code', True), #other ported parts ('disable_lua', 'Set to no to turn on Lua support', True), ('use_tcmalloc', 'Use tcmalloc from goog-perftools for memory allocation', False), ('use_mmgr', 'Use memory manager', False), ('cachedir', 'Cache directory (see scons manual)', None)) #internal options intopts.AddOptions( ('LINKFLAGS', 'linker flags'), ('LIBPATH', 'library path'), ('LIBS', 'libraries'), ('CCFLAGS', 'c compiler flags'), ('CXXFLAGS', 'c++ compiler flags'), ('CPPDEFINES', 'c preprocessor defines'), ('CPPPATH', 'c preprocessor include path'), ('is_configured', 'configuration version stamp')) usropts.Update(env) intopts.Update(env) env.Help(usropts.GenerateHelpText(env)) # Use this to avoid an error message 'how to make target configure ?' env.Alias('configure', None) if not 'configure' in sys.argv and not ((env.has_key('is_configured') and env['is_configured'] == 1) or env.GetOption('clean')): print "Not configured or configure script updated. Run `scons configure' first." print "Use `scons --help' to show available configure options to `scons configure'." env.Exit(1) if 'configure' in sys.argv: # be paranoid, unset existing variables for key in ['platform', 'debug', 'optimize', 'profile', 'prefix', 'datadir', 'cachedir', 'strip', 'disable_avi', 'disable_lua', 'disable_aio', 'use_tcmalloc', 'use_mmgr', 'LINKFLAGS', 'LIBPATH', 'LIBS', 'CCFLAGS', 'CXXFLAGS', 'CPPDEFINES', 'CPPPATH', 'is_configured']: if env.has_key(key): env.__delitem__(key) print "\nNow configuring. If something fails, consult `config.log' for details.\n" #parse cmdline def makeHashTable(args): table = { } for arg in args: if len(arg) > 1: lst = arg.split('=') if len(lst) < 2: continue key = lst[0] value = lst[1] if len(key) > 0 and len(value) > 0: table[key] = value return table args = makeHashTable(sys.argv) env['is_configured'] = 1 if args.has_key('platform'): env['platform'] = args['platform'] else: env['platform'] = detect.platform() fix_windows_spawn(env) gcc_version = config.check_gcc_version(env) # Declare some helper functions for boolean and string options. def bool_opt(key, default): if args.has_key(key): if args[key] == 'no' or args[key] == 'false' or args[key] == '0': env[key] = False elif args[key] == 'yes' or args[key] == 'true' or args[key] == '1': env[key] = True else: print "\ninvalid", key, "option, must be one of: yes, true, no, false, 0, 1." env.Exit(1) else: env[key] = default def string_opt(key, default): if args.has_key(key): env[key] = args[key] else: env[key] = default # start with empty FLAGS, in case everything is disabled. env['CCFLAGS'] = [] # profile? bool_opt('profile', False) if env['profile']: print "profiling enabled,", env.AppendUnique(CCFLAGS=['-pg'], LINKFLAGS=['-pg']) else: print "profiling NOT enabled,", # debug? if args.has_key('debug'): level = args['debug'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '3' else: level = '0' if int(level) == 0: print "debugging NOT enabled,", env['debug'] = 0 elif int(level) >= 1 and int(level) <= 3: print "level", level, "debugging enabled,", env['debug'] = level # MinGW gdb chokes on the dwarf debugging format produced by '-ggdb', # so use the more generic '-g' instead. if env['platform'] == 'windows': env.AppendUnique(CCFLAGS=['-g'], CPPDEFINES=['DEBUG', '_DEBUG']) else: env.AppendUnique(CCFLAGS=['-ggdb'+level], CPPDEFINES=['DEBUG', '_DEBUG']) else: print "\ninvalid debug option, must be one of: yes, true, no, false, 0, 1, 2, 3." env.Exit(1) # optimize? if args.has_key('optimize'): level = args['optimize'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '2' else: if env['debug']: level = '0' else: level = '1' if level == 's' or level == 'size' or (int(level) >= 1 and int(level) <= 3): print "level", level, "optimizing enabled" env['optimize'] = level archflags = detect.processor(gcc_version >= ['3','4','0']) env.AppendUnique(CCFLAGS=['-O'+level, '-pipe']+archflags) elif int(level) == 0: print "optimizing NOT enabled,", env['optimize'] = 0 else: print "\ninvalid optimize option, must be one of: yes, true, no, false, 0, 1, 2, 3, s, size." env.Exit(1) # it seems only gcc 4.0 and higher supports this if gcc_version >= ['4','0','0']: env['CCFLAGS'] += ['-fvisibility=hidden'] env['CXXFLAGS'] = env['CCFLAGS'] # This is broken, first, scons passes it to .c files compilation too (which is an error), # second, linking of a shared object fails with: # /usr/bin/ld: build/tools/unitsync/unitsync.os: relocation R_X86_64_PC32 against `std::basic_string<char, std::char_traits<char>, std::allocator<char> >::~basic_string()@@GLIBCXX_3.4' can not be used when making a shared object; recompile with -fPIC # Even though -fPIC was passed on compilation of each object. #env['CXXFLAGS'] += ['-fvisibility-inlines-hidden'] # fall back to environment variables if neither debug nor optimize options are present if not args.has_key('debug') and not args.has_key('optimize'): if os.environ.has_key('CFLAGS'): print "using CFLAGS:", os.environ['CFLAGS'] env['CCFLAGS'] = SCons.Util.CLVar(os.environ['CFLAGS']) if os.environ.has_key('CXXFLAGS'): print "using CXXFLAGS:", os.environ['CXXFLAGS'] env['CXXFLAGS'] = SCons.Util.CLVar(os.environ['CXXFLAGS']) else: env['CXXFLAGS'] = env['CCFLAGS'] bool_opt('strip', True) bool_opt('disable_avi', True) bool_opt('disable_clipboard', True) bool_opt('disable_lua', True) bool_opt('use_tcmalloc', False) bool_opt('use_mmgr', False) string_opt('prefix', '/usr/local') string_opt('datadir', '$prefix/games/taspring') string_opt('cachedir', None) defines = ['_REENTRANT', 'DIRECT_CONTROL_ALLOWED', '_SZ_ONE_DIRECTORY'] #Don't define this: it causes a full recompile when you change it, even though it is only used in Main.cpp, #and some AIs maybe. Just make exceptions in SConstruct. #defines += ['SPRING_DATADIR="\\"'+env['datadir']+'\\""'] if env['disable_clipboard']: defines += ['NO_CLIPBOARD'] if env['disable_avi'] : defines += ['NO_AVI'] if env['disable_lua'] : defines += ['NO_LUA'] if env['use_mmgr'] : defines += ['USE_MMGR'] env.AppendUnique(CPPDEFINES = defines) include_path = ['rts', 'rts/System'] lib_path = [] if env['platform'] == 'freebsd': include_path += ['/usr/local/include', '/usr/X11R6/include', '/usr/X11R6/include/GL'] lib_path += ['/usr/local/lib', '/usr/X11R6/lib'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread']) elif env['platform'] == 'linux': include_path += ['/usr/include', '/usr/include/GL'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread']) elif env['platform'] == 'darwin': include_path += ['/usr/include', '/usr/local/include', '/opt/local/include', '/usr/X11R6/include'] lib_path += ['/opt/local/lib', '/usr/local/lib'] env['SHLINKFLAGS'] = '$LINKFLAGS -dynamic' env['SHLIBSUFFIX'] = '.dylib' elif env['platform'] == 'windows': include_path += ['mingwlibs\\include'] lib_path += ['mingwlibs\\lib'] if os.environ.has_key('MINGDIR'): include_path += [os.path.join(os.environ['MINGDIR'], 'include')] lib_path += [os.path.join(os.environ['MINGDIR'], 'lib')] else: print 'ERROR: MINGDIR environment variable not set and MSVC build unsupported.' print 'Set it to your Dev-Cpp or MinGW install directory (e.g. C:\\Dev-Cpp) and try again.' env.Exit(1) env.AppendUnique(CCFLAGS = ['-mthreads'], CXXFLAGS = ['-mthreads'], LINKFLAGS = ['-mwindows']) # use '-pthreads' for Solaris, according to /usr/include/boost/config/requires_threads.hpp env.AppendUnique(CPPPATH=include_path, LIBPATH=lib_path) config.configure(env, conf_dir=os.path.join(env['builddir'], 'sconf_temp')) usropts.Save(usrcachefile, env) intopts.Save(intcachefile, env) #Should we strip the exe? if env.has_key('strip') and env['strip'] and not env['debug'] and not env['profile'] and not env.GetOption('clean'): env['strip'] = True else: env['strip'] = False #BuildDir support code if env['builddir']: for d in filelist.list_directories(env, 'rts'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) env.BuildDir(os.path.join(env['builddir'], 'tools/unitsync'), 'tools/unitsync', duplicate = False) for d in filelist.list_directories(env, 'AI'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) #CacheDir support code if env.has_key('cachedir') and env['cachedir']: if not os.path.exists(env['cachedir']): os.makedirs(env['cachedir']) env.CacheDir(env['cachedir']) fix_windows_spawn(env)
#!/bin/python import socket, MySQLdb, shelve, threading, socket, shutil, time, datetime import config RecvBuffer = 80 ## Validate config file config_response = config.configure() if config_response != "Error": print "*** Config read OK ***" conf = shelve.open(".config") for i,v in config_response.iteritems(): conf[i] = v conf.close() else: sys.exit() ## Make the config dictionary config = shelve.open(".config") dbTable = config["MySQL_table"] ListenerHost = config["ServerIP"] ListenerPort = int(config["ServerPort"]) ############################################################################################### ############################################################################################### ## Classes ## Listener Thread class Listener(threading.Thread): def __init__(self):
def generate(env): # Fix scons & gcc borkedness (scons not looking in PATH for gcc # and mingw gcc 4.1 linker crashing if TMP or TEMP isn't set). env['ENV']['PATH'] = os.environ['PATH'] if os.environ.has_key('MINGDIR'): env['ENV']['MINGDIR'] = os.environ['MINGDIR'] if os.environ.has_key('TMP'): env['ENV']['TMP'] = os.environ['TMP'] if os.environ.has_key('TEMP'): env['ENV']['TEMP'] = os.environ['TEMP'] #parse cmdline def makeHashTable(args): table = { } for arg in args: if len(arg) > 1: lst = arg.split('=') if len(lst) < 2: continue key = lst[0] value = lst[1] if len(key) > 0 and len(value) > 0: table[key] = value return table args = makeHashTable(sys.argv) # We break unitsync if the filename of the shared object has a 'lib' prefix. # It is also nicer for the AI shared objects. env['LIBPREFIX'] = '' # I don't see any reason to make this configurable --tvo. # Note that commenting out / setting this to `None' will break the buildsystem. env['builddir'] = '#build' if args.has_key('builddir'): env['builddir'] = args['builddir'] bd = filelist.getAbsDir(env, env['builddir']) # SCons chokes in env.SConsignFile() if path doesn't exist. if not os.path.exists(bd): os.makedirs(bd) # Avoid spreading .sconsign files everywhere - keep this line env.SConsignFile(os.path.join(bd, 'scons_signatures')) usrcachefile = os.path.join(bd, 'usropts.py') intcachefile = os.path.join(bd, 'intopts.py') usropts = Options(usrcachefile) intopts = Options(intcachefile) #user visible options usropts.AddOptions( #permanent options ('platform', 'Set to linux, freebsd or windows', None), ('gml', 'Set to yes to enable the OpenGL Multithreading Library', False), ('gmlsim', 'Set to yes to enable parallel threads for Sim/Draw', False), ('gmldebug', 'Set to yes to enable GML call debugging', False), ('debug', 'Set to yes to produce a binary with debug information', 0), ('debugdefines', 'Set to no to suppress DEBUG and _DEBUG preprocessor #defines (use to add symbols to release build)', True), ('syncdebug', 'Set to yes to enable the sync debugger', False), ('synccheck', 'Set to yes to enable sync checker & resyncer', True), ('synctrace', 'Enable sync tracing', False), ('optimize', 'Enable processor optimizations during compilation', 1), ('arch', 'CPU architecture to use', 'auto'), ('profile', 'Set to yes to produce a binary with profiling information', False), ('profile_generate', 'Set to yes to compile with -fprofile-generate to generate profiling information', False), ('profile_use', 'Set to yes to compile with -fprofile-use to use profiling information', False), ('ai_interfaces', 'Which AI Interfaces (and AIs using them) to build [all|native|java|none]', 'all'), ('cpppath', 'Set path to extra header files', []), ('libpath', 'Set path to extra libraries', []), ('fpmath', 'Set to 387 or SSE on i386 and AMD64 architectures', 'sse'), ('prefix', 'Install prefix used at runtime', '/usr/local'), ('installprefix', 'Install prefix used for installion', '$prefix'), ('builddir', 'Build directory, used at build-time', '#build'), ('mingwlibsdir', 'MinGW libraries dir', '#mingwlibs'), ('datadir', 'Data directory (relative to prefix)', 'share/games/spring'), ('bindir', 'Directory for executables (rel. to prefix)', 'games'), ('libdir', 'Directory for AI plugin modules (rel. to prefix)', 'lib/spring'), ('strip', 'Discard symbols from the executable (only when neither debugging nor profiling)', False), #porting options - optional in a first phase ('disable_avi', 'Set to no to turn on avi support', 'False on windows, True otherwise'), #other ported parts ('use_tcmalloc', 'Use tcmalloc from goog-perftools for memory allocation', False), ('use_nedmalloc', 'Use nedmalloc for memory allocation', False), ('use_mmgr', 'Use memory manager', False), ('use_gch', 'Use gcc precompiled header', True), ('dc_allowed', 'Specifies whether FPS mode (Direct Control) is allowed in game', True), ('cachedir', 'Cache directory (see scons manual)', None)) #internal options intopts.AddOptions( ('LINKFLAGS', 'linker flags'), ('LIBPATH', 'library path'), ('LIBS', 'libraries'), ('CCFLAGS', 'c compiler flags'), ('CXXFLAGS', 'c++ compiler flags'), ('CPPDEFINES', 'c preprocessor defines'), ('CPPPATH', 'c preprocessor include path'), ('CC', 'c compiler'), ('CXX', 'c++ compiler'), ('RANLIB', 'ranlib'), ('AR', 'ar'), ('spring_defines','extra c preprocessor defines for spring'), ('streflop_extra','extra options for streflop Makefile'), ('is_configured', 'configuration version stamp')) usropts.Update(env) intopts.Update(env) env.Help(usropts.GenerateHelpText(env)) # make the build dir globally absolute env['builddir'] = bd # Use this to avoid an error message 'how to make target configure ?' env.Alias('configure', None) if not 'configure' in sys.argv and not ((env.has_key('is_configured') and env['is_configured'] == 8) or env.GetOption('clean')): print "Not configured or configure script updated. Run `scons configure' first." print "Use `scons --help' to show available configure options to `scons configure'." env.Exit(1) # Dont throw an exception if scons -c is run before scons configure (this is done by debian build system for example) if not env.has_key('is_configured') and env.GetOption('clean'): print "Not configured: nothing to clean" env.Exit(0) if 'configure' in sys.argv: # be paranoid, unset existing variables for key in ['platform', 'gml', 'gmlsim', 'gmldebug', 'debug', 'optimize', 'profile', 'profile_use', 'profile_generate', 'cpppath', 'libpath', 'prefix', 'installprefix', 'builddir', 'mingwlibsdir', 'datadir', 'bindir', 'libdir', 'cachedir', 'strip', 'disable_avi', 'use_tcmalloc', 'use_nedmalloc', 'use_mmgr', 'use_gch', 'ai_interfaces', 'LINKFLAGS', 'LIBPATH', 'LIBS', 'CCFLAGS', 'CXXFLAGS', 'CPPDEFINES', 'CPPPATH', 'CC', 'CXX', 'is_configured', 'spring_defines', 'arch']: if env.has_key(key): env.__delitem__(key) print "\nNow configuring. If something fails, consult `config.log' for details.\n" env['is_configured'] = 8 if args.has_key('platform'): env['platform'] = args['platform'] else: env['platform'] = detect.platform() fix_windows_spawn(env) if os.environ.has_key('CC'): env['CC'] = os.environ['CC'] else: env['CC'] = 'gcc' if os.environ.has_key('CXX'): env['CXX'] = os.environ['CXX'] else: env['CXX'] = 'g++' # select proper tools for win crosscompilation by default is_crosscompiling = env['platform'] == 'windows' and os.name != 'nt' if os.environ.has_key('AR'): env['AR'] = os.environ['AR'] elif is_crosscompiling: env['AR'] = 'i586-mingw32msvc-ar' if os.environ.has_key('RANLIB'): env['RANLIB'] = os.environ['RANLIB'] elif is_crosscompiling: env['RANLIB'] = 'i586-mingw32msvc-ranlib' gcc_version = config.check_gcc_version(env) print 'Toolchain options:' print 'CC=%s' % env['CC'] print 'CXX=%s' % env['CXX'] print 'AR=%s' % env['AR'] print 'RANLIB=%s' % env['RANLIB'] print # Declare some helper functions for boolean and string options. def bool_opt(key, default): if args.has_key(key): if args[key] == 'no' or args[key] == 'false' or args[key] == '0': env[key] = False elif args[key] == 'yes' or args[key] == 'true' or args[key] == '1': env[key] = True else: print "\ninvalid", key, "option, must be one of: yes, true, no, false, 0, 1." env.Exit(1) else: env[key] = default def string_opt(key, default): if args.has_key(key): env[key] = args[key] else: env[key] = default def stringarray_opt(key, default): if args.has_key(key): env[key] = args[key].split(';') else: env[key] = default # Use single precision constants only. # This should be redundant with the modifications done by tools/double_to_single_precision.sed. # Other options copied from streflop makefiles. env['CCFLAGS'] = ['-fsingle-precision-constant', '-frounding-math', '-fsignaling-nans', '-mieee-fp'] # set architecture if 'arch' in args and args['arch'] != 'auto': arch = args['arch'] if not arch or arch == 'none': print 'Configuring for default architecture' marchFlag = '' else: print 'Configuring for', arch marchFlag = '-march=' + arch else: bits, archname = platform.architecture() if bits == '32bit' or env['platform'] == 'windows': print 'Configuring for i686' marchFlag = '-march=i686' else: print 'Configuring for default architecture' marchFlag = '' env['CCFLAGS'] += [marchFlag] env['streflop_extra'] = [marchFlag] # profile? bool_opt('profile', False) if env['profile']: print "profiling enabled,", env.AppendUnique(CCFLAGS=['-pg'], LINKFLAGS=['-pg']) else: print "profiling NOT enabled,", # debug? gcc_warnings = [ '-Wchar-subscripts', '-Wformat=2', '-Winit-self', '-Wimplicit', '-Wmissing-braces', '-Wparentheses', '-Wsequence-point', '-Wreturn-type', '-Wswitch', '-Wtrigraphs', '-Wunused', '-Wuninitialized', '-Wunknown-pragmas' ] if args.has_key('debug'): level = args['debug'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '3' else: level = '0' if int(level) == 0: print "debugging NOT enabled,", env['debug'] = 0 elif int(level) >= 1 and int(level) <= 3: print "level", level, "debugging enabled,", env['debug'] = level # MinGW gdb chokes on the dwarf debugging format produced by '-ggdb', # so use the more generic '-g' instead. # MinGW 4.2.1 gdb does not like the DWARF2 debug format generated by default, # so produce STABS instead if env['platform'] == 'windows' or env['syncdebug']: env.AppendUnique(CCFLAGS=['-gstabs']) else: env.AppendUnique(CCFLAGS=['-ggdb'+level]) # We can't enable -Wall because that silently enables an order of # initialization warning for initializers in constructors that # can not be disabled. (and it takes days to fix them all in code) env.AppendUnique(CFLAGS=gcc_warnings, CCFLAGS=gcc_warnings) if not args.has_key('debugdefines') or not args['debugdefines']: env.AppendUnique(CPPDEFINES=['DEBUG', '_DEBUG', 'NO_CATCH_EXCEPTIONS']) else: env.AppendUnique(CPPDEFINES=['NDEBUG']) else: print "\ninvalid debug option, must be one of: yes, true, no, false, 0, 1, 2, 3." env.Exit(1) if args.has_key('debugdefines') and args['debugdefines']: env.AppendUnique(CPPDEFINES= ['DEBUG', '_DEBUG', 'NO_CATCH_EXCEPTIONS'], CFLAGS=gcc_warnings, CCFLAGS=gcc_warnings) # optimize? if args.has_key('optimize'): level = args['optimize'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '2' else: if env['debug']: level = '0' else: level = '2' if level == 's' or level == 'size' or (int(level) >= 1 and int(level) <= 3): print "level", level, "optimizing enabled" env['optimize'] = level #archflags = detect.processor(gcc_version >= ['3','4','0']) # -fstrict-aliasing causes constructs like: # float f = 10.0f; int x = *(int*)&f; # to break. # Since those constructs are used in the netcode and MathTest code, we disable the optimization. env.AppendUnique(CCFLAGS=['-O'+level, '-pipe', '-fno-strict-aliasing']) # MinGW 4.2 compiled binaries insta crash with this on... #if int(level) <= 2: # env.AppendUnique(CCFLAGS=['-finline-functions','-funroll-loops']) elif int(level) == 0: print "optimizing NOT enabled", env['optimize'] = 0 else: print "\ninvalid optimize option, must be one of: yes, true, no, false, 0, 1, 2, 3, s, size." env.Exit(1) # Generate profiling information? (for profile directed optimization) bool_opt('profile_generate', False) if env['profile_generate']: print "build will generate profiling information" env.AppendUnique(CCFLAGS=['-fprofile-generate'], LINKFLAGS=['-fprofile-generate']) # Use profiling information? (for profile directed optimization) bool_opt('profile_use', False) if env['profile_use']: print "build will use profiling information" env.AppendUnique(CCFLAGS=['-fprofile-use'], LINKFLAGS=['-fprofile-use']) # Must come before the '-fvisibility=hidden' code. bool_opt('syncdebug', False) bool_opt('synccheck', True) bool_opt('synctrace', False) string_opt('fpmath', 'sse') # If sync debugger is on, disable inlining, as it makes it much harder to follow backtraces. if env['syncdebug']: # Disable all possible inlining, just to be sure. env['CCFLAGS'] += ['-fno-default-inline', '-fno-inline', '-fno-inline-functions', '-fno-inline-functions-called-once'] # It seems only gcc 4.0 and higher supports this. if gcc_version >= ['4','0','0'] and env['platform'] != 'windows': env['CCFLAGS'] += ['-fvisibility=hidden'] # Allow easy switching between 387 and SSE fpmath. if env['fpmath']: env['CCFLAGS'] += ['-mfpmath='+env['fpmath']] env['streflop_extra'] += ['-mfpmath='+env['fpmath']] if env['fpmath'] == '387': print "WARNING: SSE math vs X87 math is unsynced!" print "WARNING: Do not go online with the binary you are currently building!" else: env['CCFLAGS'] += ['-msse'] env['streflop_extra'] += ['-msse'] env['CXXFLAGS'] = env['CCFLAGS'] # Do not do this anymore because it may severely mess up our carefully selected options. # Print a warning and ignore them instead. # fall back to environment variables if neither debug nor optimize options are present if not args.has_key('debug') and not args.has_key('optimize'): if os.environ.has_key('CFLAGS'): #print "using CFLAGS:", os.environ['CFLAGS'] #env['CCFLAGS'] = SCons.Util.CLVar(os.environ['CFLAGS']) print "WARNING: attempt to use environment CFLAGS has been ignored." if os.environ.has_key('CXXFLAGS'): #print "using CXXFLAGS:", os.environ['CXXFLAGS'] #env['CXXFLAGS'] = SCons.Util.CLVar(os.environ['CXXFLAGS']) print "WARNING: attempt to use environment CXXFLAGS has been ignored." #else: # env['CXXFLAGS'] = env['CCFLAGS'] # nedmalloc crashes horribly when crosscompiled # on mingw gcc 4.2.1 nedmalloc_default = False bool_opt('gml', False) bool_opt('gmlsim', False) bool_opt('gmldebug', False) bool_opt('strip', False) bool_opt('disable_avi', env['platform'] != 'windows') bool_opt('use_tcmalloc', False) bool_opt('use_nedmalloc', nedmalloc_default) bool_opt('use_mmgr', False) bool_opt('use_gch', True) bool_opt('dc_allowed', True) string_opt('prefix', '/usr/local') string_opt('installprefix', '$prefix') string_opt('builddir', '#build'), string_opt('mingwlibsdir', '#mingwlibs'), string_opt('datadir', 'share/games/spring') string_opt('bindir', 'games') string_opt('libdir', 'lib/spring') string_opt('cachedir', None) string_opt('ai_interfaces', 'all') # Make a list of preprocessor defines. env.AppendUnique(CPPDEFINES = ['_REENTRANT', '_SZ_ONE_DIRECTORY']) spring_defines = [] if env['use_gch']: env.AppendUnique(CXXFLAGS = ['-DUSE_PRECOMPILED_HEADER']) print 'Precompiled header enabled' else: print 'Precompiled header disabled' # gml library if env['gml']: env.AppendUnique(CCFLAGS = ['-mno-tls-direct-seg-refs'], CXXFLAGS = ['-mno-tls-direct-seg-refs'], LINKFLAGS = ['-mno-tls-direct-seg-refs']) spring_defines += ['USE_GML'] print 'OpenGL Multithreading Library is enabled' if env['gmlsim']: spring_defines += ['USE_GML_SIM'] print 'Parallel threads for Sim/Draw is enabled' if env['gmldebug']: spring_defines += ['USE_GML_DEBUG'] print 'GML call debugging is enabled' else: print 'GML call debugging is NOT enabled' else: print 'Parallel threads for Sim/Draw is NOT enabled' else: print 'OpenGL Multithreading Library and parallel threads for Sim/Draw are NOT enabled' # Add define specifying type of floating point math to use. if env['fpmath']: if env['fpmath'] == 'sse': spring_defines += ['STREFLOP_SSE'] if env['fpmath'] == '387': spring_defines += ['STREFLOP_X87'] # Add/remove SYNCDEBUG to enable/disable sync debugging. if env['syncdebug']: spring_defines += ['SYNCDEBUG'] if env['synccheck']: spring_defines += ['SYNCCHECK'] if env['synctrace']: spring_defines += ['TRACE_SYNC'] # Don't define this: it causes a full recompile when you change it, even though it is only used in Main.cpp, # and some AIs maybe. Just make exceptions in SConstruct. #defines += ['SPRING_DATADIR="\\"'+env['datadir']+'\\""'] if env['disable_avi'] : spring_defines += ['NO_AVI'] if env['use_mmgr'] : spring_defines += ['USE_MMGR'] if env['dc_allowed'] : spring_defines += ['DIRECT_CONTROL_ALLOWED'] env['spring_defines'] = spring_defines stringarray_opt('cpppath', []) stringarray_opt('libpath', []) include_path = env['cpppath'] + ['#rts', '#rts/System'] include_path += ['#rts/lib/lua/include', '#rts/lib/streflop'] lib_path = env['libpath'] if env['platform'] == 'freebsd': include_path += ['/usr/local/include', '/usr/X11R6/include', '/usr/X11R6/include/GL'] lib_path += ['/usr/local/lib', '/usr/X11R6/lib'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread']) elif env['platform'] == 'linux': include_path += ['/usr/include', '/usr/include/GL'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread'], LINKFLAGS = ['-Wl,-E']) elif env['platform'] == 'darwin': include_path += ['/usr/include', '/usr/local/include', '/opt/local/include', '/usr/X11R6/include'] lib_path += ['/opt/local/lib', '/usr/local/lib'] env['SHLINKFLAGS'] = '$LINKFLAGS -dynamic' env['SHLIBSUFFIX'] = '.dylib' elif env['platform'] == 'windows': include_path += [os.path.join(env['mingwlibsdir'], 'include')] lib_path += [os.path.join(env['mingwlibsdir'], 'lib')] lib_path += [os.path.join(env['mingwlibsdir'], 'dll')] include_path += [os.path.abspath(os.path.join(env['mingwlibsdir'], 'include'))] lib_path += [os.path.abspath(os.path.join(env['mingwlibsdir'], 'lib'))] lib_path += [os.path.abspath(os.path.join(env['mingwlibsdir'], 'dll'))] if os.environ.has_key('MINGDIR'): include_path += [os.path.join(os.environ['MINGDIR'], 'include')] lib_path += [os.path.join(os.environ['MINGDIR'], 'lib')] lib_path += [os.path.join(os.environ['MINGDIR'], 'dll')] else: print 'ERROR: MINGDIR environment variable not set and MSVC build unsupported.' print 'Set it to your Dev-Cpp or MinGW install directory (e.g. C:\\Dev-Cpp) and try again.' env.Exit(1) env.AppendUnique(CCFLAGS = ['-mthreads'], CXXFLAGS = ['-mthreads'], LINKFLAGS = ['-mwindows', '-mthreads']) # use '-pthreads' for Solaris, according to /usr/include/boost/config/requires_threads.hpp env.AppendUnique(CPPPATH=include_path, LIBPATH=lib_path) config.configure(env, conf_dir=os.path.join(env['builddir'], 'sconf_temp')) usropts.Save(usrcachefile, env) intopts.Save(intcachefile, env) # make the prefix absolute env['prefix'] = filelist.getAbsDir(env, env['prefix']) # Substitute prefix in installprefix, and make installprefix absolute env['installprefix'] = filelist.getAbsDir(env, env.subst(env['installprefix'])) # Fix up some suffices for mingw crosscompile. if env['platform'] == 'windows': env['SHLIBSUFFIX'] = '.dll' env['PROGSUFFIX'] = '.exe' #Should we strip the exe? if env.has_key('strip') and env['strip'] and not env['debug'] and not env['profile'] and not env.GetOption('clean'): env['strip'] = True else: env['strip'] = False #BuildDir support code if env['builddir']: for d in filelist.list_directories(env, 'rts'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) for d in filelist.list_directories(env, 'AI'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) #CacheDir support code if env.has_key('cachedir') and env['cachedir']: if not os.path.exists(env['cachedir']): os.makedirs(env['cachedir']) env.CacheDir(env['cachedir']) fix_windows_spawn(env)
def configure_deps(conf): env = conf.env # libfah if env.get('fah', 0): config.configure('libfah', conf) # DIAG Options if env.get('qrdiag', 0): env.AppendUnique(CPPDEFINES = ['HAVE_QRDIAG']) # GUI Options if env.get('gui',0): if env['PLATFORM'] == 'win32': config.require_lib(conf, 'wsock32') else: config.require_lib(conf, 'pthread') env.AppendUnique(CPPDEFINES = ['HAVE_GUI']) # LAPACK have_lapack = False lapack = env.get('lapack', 'any') if lapack == '1' or lapack is True: lapack = 'any' elif lapack == '0' or lapack is False: lapack = 'none' if lapack != 'none': # Intel MKL LAPACK if not have_lapack and lapack in ['any', 'mkl']: have_lapack = config.configure('mkl', conf) if have_lapack: env.AppendUnique(CPPDEFINES = ['HAVE_MKL_LAPACK']) elif lapack == 'mkl': raise Exception, "Missing MKL LAPACK" # System LAPACK if not have_lapack and lapack in ['any', 'system']: have_lapack = config.configure('lapack', conf) if not have_lapack and lapack == 'lapack': raise Exception, "Missing LAPACK" # SimTK LAPACK if not have_lapack and lapack in ['any', 'simtk']: config.check_home(conf, 'simtk_lapack') if (config.check_lib(conf, 'SimTKlapack') and config.check_cxx_header(conf, 'SimTKlapack.h')): env.AppendUnique(CPPDEFINES = ['HAVE_SIMTK_LAPACK']) have_lapack = True elif lapack == 'simtk_lapack': raise Exception, "Missing SimTK LAPACK" if not have_lapack: raise Exception, "Missing LAPACK" # OpenMM openmm = env.get('openmm', 0) if openmm: home = config.check_env('OPENMM_HOME', True) conf.env.AppendUnique(CPPPATH = [home + 'olla/include']) conf.env.AppendUnique(CPPPATH = [home + 'openmmapi/include']) config.require_cxx_header(conf, 'OpenMM.h') config.require_cxx_header(conf, 'openmm/Kernel.h') conf.env.Prepend(LIBPATH = [home]) config.require_lib(conf, 'OpenMM') env.AppendUnique(CPPDEFINES = ['HAVE_OPENMM']) # LTMD OpenMM ltmd = env.get('ltmdopenmm', 0) if ltmd and openmm: config.check_home(conf, 'ltmdopenmm') config.require_lib(conf, 'OpenMMLTMD') env.AppendUnique(CPPDEFINES = ['HAVE_OPENMM_LTMD']) # Gromacs gromacs = env.get('gromacs', 0) if gromacs: config.configure('gromacs', conf) # Gromacs Standard gromacs_standard = env.get('gromacs_standard', 0) if gromacs_standard: config.check_home(conf, 'gromacs') config.require_lib(conf, 'md') config.require_lib(conf, 'gmx') env.AppendUnique(CPPDEFINES = ['HAVE_GROMACS'])
def main(): config.configure() options = config.options if options.declarative: config.interactive = None if options.interactive: config.interactive = True config.schema = None if options.schema: config.schema = options.schema config.example=False if options.example: config.example=True factory = ModelFactory(config) emit(repr(factory)) config.out.close() config.out = sys.stdout print >>config.err, "Output written to %s" % options.output return import formatter formatter.monkey_patch_sa() import sqlalchemy from sqlalchemy.engine.reflection import Inspector db, options = config.engine, config.options metadata = sqlalchemy.MetaData(db) print >>config.err, 'Starting...' conn = db.connect() inspector = Inspector.from_engine(conn) if options.schema != None: reflection_schema=options.schema else: try: reflection_schema = inspector.default_schema_name except NotImplementedError: reflection_schema = None tablenames = inspector.get_table_names(reflection_schema) # fixme: don't set up output until we're sure there's work to do! if options.tables: subset, missing, unglobbed = util.glob_intersection(tablenames, options.tables) for identifier in missing: print >>config.err, 'Table "%s" not found.' % identifier for glob in unglobbed: print >>config.err, '"%s" matched no tables.' % glob if not subset: print >>config.err, "No tables matched!" sys.exit(1) tablenames = subset # some header with imports if options.generictypes: dialect = '' else: d1 = 'from sqlalchemy.databases.%s import *\n' % db.name d2 = 'from sqlalchemy.dialects.%s import *\n' % db.name #Determine with one is correct... dialect = util.select_imports([d1, d2]) header = options.z3c and constants.HEADER_Z3C or constants.HEADER emit(header % {'dialect': dialect, 'encoding': options.encoding}) for tname in tablenames: print >>config.err, "Generating python model for table %s" % ( util.as_sys_str(tname)) table = sqlalchemy.Table(tname, metadata, schema=reflection_schema, autoload=True) if options.schema is None: # we're going to remove the schema from the table so that it # isn't rendered in the output. If we don't put back the # correct value, it may cause errors when other tables reference # this one. original_schema = table.schema table.schema = None else: original_schema = options.schema indent = '' INC = '\n\n' emit(INC) if options.z3c: emit_z3c_objects(constants.TAB, db, options, table) else: emit_table('', db, options, table) table.schema = original_schema if options.z3c: emit(constants.FOOTER_Z3C) # print some example if options.example: emit('\n' + constants.FOOTER_EXAMPLE % { 'url': db.url, 'tablename': tablenames[0]}) if options.output: emit('\n') config.out.close() config.out = sys.stdout print("Output written to %s" % options.output, file=config.err)
def main(): # remember, batch is a dictionary of dictionaries ie # {'http://geography.unc.edu':{filename:'...', 'grepargs':'...'}, ...} template, regex, batch, options = config.configure() # tuples with the following form: # [(dest1, fileobj1), (dest2, fileobj2), ...] args = [(k, open(batch[k]['input'], 'r')) for k in batch] # prep for generating log wrappers # [(destination1, data1), (destination2, data2), ...] data = [(a[0], pygrep(a[1], options.grepargs)) for a in args] # make list of wrapped logs logs = [MultiLogWrapper(d[0], xLog(d[1], template, regex)) for d in data] # instantiate tester and start testing tester = HttpLoadTester(logs, options) # prepare output csv file csv_file = open(options.output, 'w') # [destination,resource,status_recorded,status_recieved,timedelta,timestamp,content-length] writer = csv.writer(csv_file) # start testing print '\nStarting tests at %s' % time.ctime() print 'Warning this may take awhile.' start = time.time() timedeltas = [float(0)] long_urls = [] try: for destination, resp, entry, delta in tester.doTest(count=options.number, timeout=options.timeout): long_urls.append((delta, entry['resource'], resp.status)) if resp.status == 200: timedeltas.append(delta) print '** test passed -- timedelta: %s ms **' % delta else: timedeltas.append(delta) print '** test failed with status: %s -- timedelta %s ms' % (resp.status, delta) # write data to csv file try: d = datetime.strptime(resp['date'], DATETIMEFORMAT) datetimestr = '%s:%s:%s' % (d.hour, d.minute, d.second) writer.writerow([datetimestr, destination, entry['resource'], entry['status'], resp.status, delta, resp['content-length']]) except AttributeError: writer.writerow([destination, entry['resource'], entry['status'], resp.status, delta, resp['content-length']]) finally: [f[1].close() for f in args] csv_file.close() stop = time.time() tm = round(stop - start, 2) number_tests = tester.number_tests number_malformed = tester.number_malformed if tm != 0: requests_per_second = round(float(number_tests) / float(tm), 2) else: requests_per_second = 0 # calculate avg of timedeltas if number_tests != 0: avg_timedelta = round(float(sum(timedeltas))/float(number_tests), 2) else: avg_timedelta = 'N/A' print '\n' print '######################################################' print 'top 5 longest urls:' long_urls.sort(key=lambda x: x[0]) for s in long_urls[-5:]: print '** timedelta: %s ms, status: %s' % (s[0], s[2]) print '==> resource: %s' % s[1] print '\n' print 'avg timedelta is %s ms' % avg_timedelta print 'requests per second is %s requests/seconds' % requests_per_second print 'number of malformed entries is: ', number_malformed print 'number of tests is: ', number_tests print 'finished testing in %s seconds' % tm print '######################################################' print '\n'
def exec_configure(args): print('config.py ' + ' '.join(args)) sys.exit(config.configure(args))
def generate(env): # I don't see any reason to make this configurable --tvo. # Note that commenting out / setting this to `None' will break the buildsystem. env['builddir'] = 'build' # SCons chokes in env.SConsignFile() if path doesn't exist. if not os.path.exists(env['builddir']): os.makedirs(env['builddir']) # Avoid spreading .sconsign files everywhere - keep this line # Use os.path.abspath() here because somehow the argument to SConsignFile is relative to the # directory of the toplevel trunk/SConstruct and not the current directory, trunk/rts/SConstruct. env.SConsignFile(os.path.abspath(os.path.join(env['builddir'], 'scons_signatures'))) usrcachefile = os.path.join(env['builddir'], 'usropts.py') intcachefile = os.path.join(env['builddir'], 'intopts.py') usropts = Options(usrcachefile) intopts = Options(intcachefile) #user visible options usropts.AddOptions( #permanent options ('platform', 'Set to linux, freebsd or windows', None), ('debug', 'Set to yes to produce a binary with debug information', 0), ('optimize', 'Enable processor optimizations during compilation', 1), ('profile', 'Set to yes to produce a binary with profiling information', False), ('prefix', 'Install prefix', '/usr/local'), ('datadir', 'Data directory', '$prefix/games/taspring'), ('strip', 'Discard symbols from the executable (only when neither debugging nor profiling)', True), #porting options - optional in a first phase ('disable_avi', 'Set to no to turn on avi support', True), ('disable_clipboard', 'Set to no to turn on clipboard code', True), #other ported parts ('disable_hpi', 'Set to no to turn on hpi support', False), ('disable_lua', 'Set to no to turn on Lua support', True), ('use_tcmalloc', 'Use tcmalloc from goog-perftools for memory allocation', False), ('use_mmgr', 'Use memory manager', False), ('cachedir', 'Cache directory (see scons manual)', None)) #internal options intopts.AddOptions( ('LINKFLAGS', 'linker flags'), ('LIBPATH', 'library path'), ('LIBS', 'libraries'), ('CCFLAGS', 'c compiler flags'), ('CXXFLAGS', 'c++ compiler flags'), ('CPPDEFINES', 'c preprocessor defines'), ('CPPPATH', 'c preprocessor include path'), ('is_configured', 'configuration version stamp')) usropts.Update(env) intopts.Update(env) env.Help(usropts.GenerateHelpText(env)) # Use this to avoid an error message 'how to make target configure ?' env.Alias('configure', None) if not 'configure' in sys.argv and not ((env.has_key('is_configured') and env['is_configured'] == 1) or env.GetOption('clean')): print "Not configured or configure script updated. Run `scons configure' first." print "Use `scons --help' to show available configure options to `scons configure'." env.Exit(1) if 'configure' in sys.argv: # be paranoid, unset existing variables for key in ['platform', 'debug', 'optimize', 'profile', 'prefix', 'datadir', 'cachedir', 'strip', 'disable_avi', 'disable_hpi', 'disable_lua', 'disable_aio', 'use_tcmalloc', 'use_mmgr', 'LINKFLAGS', 'LIBPATH', 'LIBS', 'CCFLAGS', 'CXXFLAGS', 'CPPDEFINES', 'CPPPATH', 'is_configured']: if env.has_key(key): env.__delitem__(key) print "\nNow configuring. If something fails, consult `config.log' for details.\n" #parse cmdline def makeHashTable(args): table = { } for arg in args: if len(arg) > 1: lst = arg.split('=') if len(lst) < 2: continue key = lst[0] value = lst[1] if len(key) > 0 and len(value) > 0: table[key] = value return table args = makeHashTable(sys.argv) env['is_configured'] = 1 if args.has_key('platform'): env['platform'] = args['platform'] else: env['platform'] = detect.platform() # Declare some helper functions for boolean and string options. def bool_opt(key, default): if args.has_key(key): if args[key] == 'no' or args[key] == 'false' or args[key] == '0': env[key] = False elif args[key] == 'yes' or args[key] == 'true' or args[key] == '1': env[key] = True else: print "\ninvalid", key, "option, must be one of: yes, true, no, false, 0, 1." env.Exit(1) else: env[key] = default def string_opt(key, default): if args.has_key(key): env[key] = args[key] else: env[key] = default # profile? bool_opt('profile', False) if env['profile']: print "profiling enabled,", env.AppendUnique(CCFLAGS=['-pg'], LINKFLAGS=['-pg']) else: print "profiling NOT enabled,", # debug? if args.has_key('debug'): level = args['debug'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '3' else: level = '0' if int(level) == 0: print "debugging NOT enabled,", env['debug'] = 0 elif int(level) >= 1 and int(level) <= 3: print "level", level, "debugging enabled,", env['debug'] = level env.AppendUnique(CCFLAGS=['-ggdb'+level], CPPDEFINES=['DEBUG', '_DEBUG']) else: print "\ninvalid debug option, must be one of: yes, true, no, false, 0, 1, 2, 3." env.Exit(1) # optimize? if args.has_key('optimize'): level = args['optimize'] if level == 'no' or level == 'false': level = '0' elif level == 'yes' or level == 'true': level = '2' else: if env['debug']: level = '0' else: level = '1' if int(level) == 0: print "optimizing NOT enabled,", env['optimize'] = 0 elif (int(level) >= 1 and int(level) <= 3) or level == 's' or level == 'size': print "level", level, "optimizing enabled" env['optimize'] = level archflags = detect.processor(config.check_gcc_version(env) >= ['3','4','0']) env.AppendUnique(CCFLAGS=['-O'+level, '-pipe']+archflags) else: print "\ninvalid optimize option, must be one of: yes, true, no, false, 0, 1, 2, 3, s, size." env.Exit(1) env['CXXFLAGS'] = env['CCFLAGS'] # fall back to environment variables if neither debug nor optimize options are present if not args.has_key('debug') and not args.has_key('optimize'): if os.environ.has_key('CFLAGS'): print "using CFLAGS:", os.environ['CFLAGS'] env['CCFLAGS'] = SCons.Util.CLVar(os.environ['CFLAGS']) if os.environ.has_key('CXXFLAGS'): print "using CXXFLAGS:", os.environ['CXXFLAGS'] env['CXXFLAGS'] = SCons.Util.CLVar(os.environ['CXXFLAGS']) else: env['CXXFLAGS'] = env['CCFLAGS'] bool_opt('strip', True) bool_opt('disable_avi', True) bool_opt('disable_clipboard', True) bool_opt('disable_hpi', False) bool_opt('disable_lua', True) bool_opt('use_tcmalloc', False) bool_opt('use_mmgr', False) string_opt('prefix', '/usr/local') string_opt('datadir', '$prefix/games/taspring') string_opt('cachedir', None) defines = ['_REENTRANT', 'DIRECT_CONTROL_ALLOWED', '_SZ_ONE_DIRECTORY'] defines += ['SPRING_DATADIR="\\"'+env['datadir']+'\\""'] if env['disable_hpi'] : defines += ['NO_HPI'] if env['disable_clipboard']: defines += ['NO_CLIPBOARD'] if env['disable_avi'] : defines += ['NO_AVI'] if env['disable_lua'] : defines += ['NO_LUA'] if env['use_mmgr'] : defines += ['USE_MMGR'] env.AppendUnique(CPPDEFINES = defines) include_path = ['rts', 'rts/System'] lib_path = [] if env['platform'] == 'freebsd': include_path += ['/usr/local/include', '/usr/X11R6/include', '/usr/X11R6/include/GL'] lib_path += ['/usr/local/lib', '/usr/X11R6/lib'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread']) elif env['platform'] == 'linux': include_path += ['/usr/include', '/usr/include/GL', '/usr/include/CEGUI'] env.AppendUnique(CCFLAGS = ['-pthread'], CXXFLAGS = ['-pthread']) elif env['platform'] == 'darwin': include_path += ['/usr/include', '/usr/local/include', '/opt/local/include', '/usr/X11R6/include'] lib_path += ['/opt/local/lib', '/usr/local/lib'] env['SHLINKFLAGS'] = '$LINKFLAGS -dynamic' env['SHLIBSUFFIX'] = '.dylib' elif env['platform'] == 'windows': include_path += ['crashrpt/include'] lib_path += ['crashrpt/lib'] env.AppendUnique(CCFLAGS = ['-mthreads'], CXXFLAGS = ['-mthreads']) # use '-pthreads' for Solaris, according to /usr/include/boost/config/requires_threads.hpp env.AppendUnique(CPPPATH=include_path, LIBPATH=lib_path) config.configure(env, conf_dir=os.path.join(env['builddir'], 'sconf_temp')) usropts.Save(usrcachefile, env) intopts.Save(intcachefile, env) #Should we strip the exe? if env.has_key('strip') and env['strip'] and not env['debug'] and not env['profile'] and not env.GetOption('clean'): env['strip'] = True else: env['strip'] = False #BuildDir support code if env['builddir']: for d in filelist.list_directories(env, 'rts'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) for d in filelist.list_directories(env, 'AI'): env.BuildDir(os.path.join(env['builddir'], d), d, duplicate = False) #CacheDir support code if env.has_key('cachedir') and env['cachedir']: if not os.path.exists(env['cachedir']): os.makedirs(env['cachedir']) env.CacheDir(env['cachedir'])
def main(): config.configure() options = config.options if options.declarative: config.interactive = None if options.interactive: config.interactive = True config.schema = None if options.schema: config.schema = options.schema config.example=False if options.example: config.example=True factory = ModelFactory(config) emit(repr(factory)) config.out.close() config.out = sys.stdout print >>config.err, "Output written to %s" % options.output return import formatter formatter.monkey_patch_sa() import sqlalchemy db, options = config.engine, config.options metadata = sqlalchemy.MetaData(db) print >>config.err, 'Starting...' conn = db.connect() if options.schema != None: reflection_schema=options.schema else: try: reflection_schema = db.dialect.get_default_schema_name(conn) except NotImplementedError: reflection_schema = None tablenames = db.dialect.table_names(conn, reflection_schema) # fixme: don't set up output until we're sure there's work to do! if options.tables: subset, missing, unglobbed = util.glob_intersection(tablenames, options.tables) for identifier in missing: print >>config.err, 'Table "%s" not found.' % identifier for glob in unglobbed: print >>config.err, '"%s" matched no tables.' % glob if not subset: print >>config.err, "No tables matched!" sys.exit(1) tablenames = subset if options.exclude: tablenames = [table for table in tablenames if not re.search(table, options.exclude)] # some header with imports if options.generictypes: dialect = '' else: dialect = 'from sqlalchemy.databases.%s import *\n' % db.name header = options.z3c and constants.HEADER_Z3C or constants.HEADER emit(header % {'dialect': dialect, 'encoding': options.encoding}) for tname in tablenames: print >>config.err, "Generating python model for table %s" % ( util.as_sys_str(tname)) table = sqlalchemy.Table(tname, metadata, schema=reflection_schema, autoload=True) if options.schema is None: # we're going to remove the schema from the table so that it # isn't rendered in the output. If we don't put back the # correct value, it may cause errors when other tables reference # this one. original_schema = table.schema table.schema = None else: original_schema = options.schema INC = '\n\n' if options.z3c: INC = INC + 4*' ' emit('%s%s%s%s = %r' % (INC, options.table_prefix, tname, options.table_suffix, table)) if options.z3c: emit(INC + ('class %(tn)sObject(MappedClassBase): pass\n' 'mapper(%(tn)sObject, %(tn)s)') % {'tn':tname}) table.schema = original_schema # directly print indices after table def if not options.noindex: indexes = [] if not table.indexes: # for certain dialects we need to include index support if hasattr(db.dialect, 'indexloader'): indexes = db.dialect.indexloader(db).indexes(table) else: print >>config.err, 'It seems that this dialect does not support indexes!' else: indexes = list(table.indexes) util.emit(*[repr(index) for index in indexes]) if options.z3c: emit(constants.FOOTER_Z3C) # print some example if options.example: emit('\n' + constants.FOOTER_EXAMPLE % { 'url': unicode(db.url), 'tablename': tablenames[0]}) if options.output: emit('\n') config.out.close() config.out = sys.stdout print >>config.err, "Output written to %s" % options.output
def configure(conf, ignores = ['.svn', '.sconsign.dblite', '.sconf_temp', '*~', '*.o', '*.obj']): env = conf.env config.configure('nsi', conf) config.configure('pkg', conf) config.configure('distpkg', conf) config.configure('app', conf) config.configure('deb', conf) config.configure('rpm', conf) AddMethod(Environment, FindFiles, 'FindFiles') AddMethod(Environment, WriteVariable, 'WriteVariable') AddMethod(Environment, GetPackageName, 'GetPackageName') AddMethod(Environment, GetPackageType, 'GetPackageType') AddMethod(Environment, GetPackageArch, 'GetPackageArch') AddMethod(Environment, CopyToPackage, 'CopyToPackage') AddMethod(Environment, InstallFiles, 'InstallFiles') AddMethod(Environment, RunCommand, 'RunCommand') AddMethod(Environment, ResolvePackageFileMap, 'ResolvePackageFileMap') AddMethod(Environment, WriteStringToFile, 'WriteStringToFile') AddMethod(Environment, ZipDir, 'ZipDir') AddMethod(Environment, Packager, 'Packager') env.SetDefault(package_ignores = ignores) return True