def build_cone_eggs(source_paths, python_executable): log.info("Cleaning temporary ConE egg dir...") utils.recreate_dir(TEMP_CONE_EGG_DIR) log.info("Building ConE eggs...") for source_path in source_paths: ok = utils.build_egg(source_path, TEMP_CONE_EGG_DIR, python_executable) if not ok: raise BuildFailedError()
def gen_cpp(): # cpp cwd = getcwd() try: src = join(REPO, 'src', 'cpp') build_ = join(REPO, '_build', 'cpp') recreate_dir(build_) chdir(build_) call(['cmake', '-G', GENERATOR, src]) finally: chdir(cwd)
def get_model_args(): parser = get_global_parser() # NOTE: 'SURREAL' is a placeholder. The deg is set according to global_config.env -> see below. v parser.add_argument('--deg', type=env2deg, default='SURREAL') parser.add_argument('--seed', '-s', type=int, default=0) parser.add_argument('--is_train', type=utils.str2bool, default=False) parser.add_argument('--resume', type=utils.str2bool, default=False) parser.add_argument('--num_workers', type=int, default=1) parser.add_argument('--save_graphs', type=utils.str2bool, default=False) parser.add_argument('--tensorboard_path', type=str, default=os.path.join(DATA_DIR, 'runs/tensorboard/')) parser.add_argument('--models_save_path', type=str, default=os.path.join(DATA_DIR, 'runs/models/')) parser.add_argument('--save_interval_epoch', type=int, default=10) parser.add_argument('--max_epochs', type=int, default=4) #50 parser.add_argument('--batch_size', type=int, default=2) parser.add_argument('--learning_rate', type=float, default=2e-4) parser.add_argument('--n_test_evals', type=int, default=10) parser.add_argument('--max_test_timestep', type=int, default=40) parser.add_argument('--beta', type=float, default=0.01) parser.add_argument('--visual_state_dim', type=int, default=64) parser.add_argument('--combined_state_dim', type=int, default=94) parser.add_argument('--goal_dim', type=int, default=32) parser.add_argument('--latent_dim', type=int, default=256) config = parser.parse_args() config.env_args = env2args(config.env) config.deg = env2deg(config.env) config.models_save_path = os.path.join(config.models_save_path, 'model_{}_{}_{}/'.format(config.env, config.env_type, config.exp_name)) config.tensorboard_path = os.path.join(config.tensorboard_path, 'model_{}_{}_{}/'.format(config.env, config.env_type, config.exp_name)) config.data_path = os.path.join(config.data_path, '{}_{}/'.format(config.env, config.env_type)) if config.is_train and not config.resume: utils.recreate_dir(config.models_save_path, config.display_warnings) utils.recreate_dir(config.tensorboard_path, config.display_warnings) else: utils.check_n_create_dir(config.models_save_path, config.display_warnings) utils.check_n_create_dir(config.tensorboard_path, config.display_warnings) utils.check_n_create_dir(config.data_path, config.display_warnings) return config
def generate_target(self): recreate_dir(self.target_dir) for cls_id in range(self.clustering_cnt): cls_dir = os.path.join(self.target_dir, str(cls_id)) recreate_dir(cls_dir) cls_arr = self.res_arr[self.res_arr[:, 1] == str(cls_id)] # sort as distance ascending order asc_idx = cls_arr[:, 2].argsort() # verify ascending if len(cls_arr[asc_idx][:, 2]) > 5: assert cls_arr[asc_idx][:, 2][0] < cls_arr[asc_idx][:, 2][5] sort_num = 0 for img_path in cls_arr[asc_idx][:, 0]: sort_num += 1 shutil.copy2( img_path, os.path.join( cls_dir, "{}_{}".format(sort_num, img_path.split('/')[-1])))
def retrieve_dep_eggs(plugin_package): log.info("Cleaning temporary lib egg dir...") utils.recreate_dir(TEMP_LIB_EGG_DIR) log.info("Retrieving dependency eggs...") def copy_eggs(source_dir): log.debug("Copying eggs from '%s'..." % source_dir) for name in os.listdir(source_dir): if name.endswith('.egg'): if PLATFORM_SUBDIR == 'linux' and name.startswith( 'setuptools-0.6c11'): continue else: utils.copy_file(source_path=os.path.join(source_dir, name), target_path=TEMP_LIB_EGG_DIR) dep_dirs_by_package = [(None, os.path.join(ROOT_PATH, '../dep-eggs'))] dep_dirs_by_package.extend( plugin_utils.find_plugin_package_subpaths(PLUGIN_SOURCE_ROOT, 'dep-eggs', plugin_package)) for package_name, dep_dir in dep_dirs_by_package: copy_eggs(dep_dir)
def init_target_dir(target_dir, python_version): BASE_DIR = os.path.normpath( os.path.join(target_dir, INSTALL_SUBDIR, PLATFORM_SUBDIR, python_version)) LIB_DIR = os.path.join(BASE_DIR, 'lib') SCRIPT_DIR = os.path.join(BASE_DIR, 'scripts') utils.recreate_dir(BASE_DIR) utils.recreate_dir(LIB_DIR) utils.recreate_dir(SCRIPT_DIR) return LIB_DIR, SCRIPT_DIR
def main(): ''' Preparation ''' linux = Deal_with_linux() # Check if patch exists on Sunny if os.path.isdir(sunny_patch) != True: print Bcolors.FAIL + "ERROR: No such patch on Sunny!" + Bcolors.ENDC print "\tNo such directory " + sunny_patch sys.exit() # Clear temporary directory. May fall if somebody is "sitting" in it. try: recreate_dir(stage_dir) except: print Bcolors.FAIL + "ERROR: Unable to recreate patch staging directory." + Bcolors.ENDC sys.exit() ''' Database patching ''' # Get list of already applied patches # Function returns list tuples + row count, right now need only tuples, so [0] patches_curr = postgres_exec( db_host, db_name, 'select name from parameter.fdc_patches_log order by id desc;')[0] # Get list of patches from from Sunny if os.path.isdir(sunny_patch + '\\patches') != True: print "NOTICE: No database patch found in build. Assume database patching not required." else: patches_targ = [name for name in os.listdir(sunny_patch + '\\patches')] # Compare installed patches with patches from Sunny. # If latest database patch version lower then on Sunny - install missing patches. print "\nChecking database patch level:" # To handle file name suffixes for directories like "db_0190_20171113_v2.19" additional variable declared to hold max(patches_targ) last_patch_targ = max(patches_targ) last_patch_targ_strip = re.findall( 'db_.*_\d{8}', last_patch_targ)[0] # findall returns list if last_patch_targ_strip == max(patches_curr): print "\tDatabase patch level: " + max(patches_curr) print "\tLatest patch on Sunny: " + last_patch_targ_strip print "\tNo database patch required.\n" elif last_patch_targ_strip > max(patches_curr): print "\tDatabase patch level: " + max(patches_curr) print "\tLatest patch on Sunny: " + last_patch_targ_strip print "\tDatabase needs patching.\n" patches_miss = [] for i in (set(patches_targ) - set(patches_curr)): if i > max(patches_curr): patches_miss.append(i) print "Following database patches will be applied: " + ', '.join( patches_miss) + "\n" for i in patches_miss: # Copy needed patches from Sunny. subprocess.call([ 'xcopy', '/e', '/i', '/q', sunny_patch + '\\patches\\' + i, stage_dir + '\\patches\\' + i ], stdout=dnull, shell=True) # Place patch installer to patch subdirectories. subprocess.call([ 'copy', '/y', db_patch_file, stage_dir + '\\patches\\' + i ], stdout=dnull, shell=True) # Stop tomcat. for i in application_host: print "Stopping application server " + i + "...\n" linux.linux_exec(i, 'sudo systemctl stop tomcat') # Apply database patches # Using sort to execute patches in right order. for i in sorted(patches_miss): print "Applying database patch " + i + "..." # Output to null - nothing usefull there anyway. Result to be analyzed by reading log. subprocess.call([ stage_dir + '\\patches\\' + i + '\\' + db_patch_file, db_host, db_name ], stdout=dnull, stderr=dnull, shell=False, cwd=stage_dir + '\\patches\\' + i) # Search logfile for "finish install patch ods objects try: logfile = open(stage_dir + '\\patches\\' + i + '\\install_db_log.log') except: print Bcolors.FAIL + "\tUnable to read logfile" + stage_dir + "\\patches\\" + i + "\\install_db_log.log. Somethnig wrong with installation.\n" + Bcolors.ENDC sys.exit() loglines = logfile.read() success_marker = loglines.find( 'finsih install patch ods objects') if success_marker != -1: print Bcolors.OKGREEN + "\tDone.\n" + Bcolors.ENDC else: print Bcolors.FAIL + "\tError installing database patch. Examine logfile " + stage_dir + "\\patches\\" + i + "\\install_db_log.log for details\n" + Bcolors.ENDC sys.exit() logfile.close() # Add additional check from database fdc_patches_log? #cur.execute("select name from parameter.fdc_patches_log where name = '" + i + "'") #is_db_patch_applied = postgres_exec ( db_host, db_name, "select name from parameter.fdc_patches_log where name = '" + i + "'" )[0] #if is_db_patch_applied != []: # pass #else: # print "ERROR: Unable to confirm patch installation!" # exit() # Purge panels. purge_panels() else: print "\tDatabase patch level: " + max(patches_curr) print "\t Latest patch on Sunny: " + last_patch_targ_strip print Bcolors.FAIL + "ERROR: Something wrong with database patching!\n" + Bcolors.ENDC sys.exit() ''' Application update TODO: 1. copy war to gudhskpdi-mon, with md5 check. 2. copy from gudhskpdi-mon to app server with md5 check. Use ansible user (cos already has keys and root priveleges) ''' print "Checking java application version:" # glob returns an array, need its first([0]) element to user in md5_check. # Search ods*war file in Sunny's patch directory. TODO what if there are more then one? Like on PTS. if glob(sunny_patch + '\\ods*.war') == []: print Bcolors.FAIL + "ERROR: Unable to locate war file on Sunny!" + Bcolors.ENDC sys.exit() war_path = glob(sunny_patch + '\\ods*.war')[0] # Get application md5 from Sunny. source_md5 = md5_check(war_path) # Get application md5 from target server. # One by one comare of targets with source. # Get hosts_to_update list as result. hosts_to_update = [] for i in application_host: target_md5 = linux.linux_exec( i, 'sudo md5sum ' + app_path + '/' + war_name) if source_md5 != target_md5.split(" ")[0]: print "\tJava application on " + i + " will be updated." hosts_to_update.append(i) # Finish if hosts_to_update empty. if hosts_to_update == []: print Bcolors.OKBLUE + "\tAll application hosts already up to date." + Bcolors.ENDC sys.exit() print "\n" for i in hosts_to_update: # Delete and recreate temporary directory for war file. linux.linux_exec(i, 'rm -rf /tmp/webapps && mkdir /tmp/webapps') # Copy war to target server. print "Copying " + war_path + " to " + i + ":/tmp/webapps/" + war_name + "\n" linux.linux_put(i, war_path, '/tmp/webapps/' + war_name) linux.linux_exec(i, 'sudo chown tomcat.tomcat /tmp/webapps/' + war_name) # Stop tomcat server. print "Stopping application server " + i + "..." linux.linux_exec(i, 'sudo systemctl stop tomcat') #for i in hosts_to_update: print "Applying application patch on " + i + "..." # Delete old application. Both warfile and directory. linux.linux_exec(i, 'sudo rm ' + app_path + '/' + war_name) linux.linux_exec(i, 'sudo rm -rf ' + app_path + '/' + war_fldr) # Copy war to webapps folder. linux.linux_exec( i, 'sudo cp /tmp/webapps/' + war_name + ' ' + app_path + '/' + war_name) print "Starting application server " + i + "..." linux.linux_exec(i, 'sudo systemctl start tomcat') # Check if server really started. tcat_sctl = linux.linux_exec(i, 'sudo systemctl status tomcat') tcat_status = tcat_sctl.find('Active: active ') if tcat_status != -1: print Bcolors.OKGREEN + "\tDone!\n" + Bcolors.ENDC else: print Bcolors.FAIL + "\tFailed!\n" + Bcolors.ENDC print "Waiting 60 seconds for application to (re)deploy..." sleep(60) check_webpage(patch_num, i, target) # Doublecheck md5. for i in hosts_to_update: target_md5 = linux.linux_exec( i, 'sudo md5sum ' + app_path + '/' + war_name) if source_md5 == target_md5.split(" ")[0]: print Bcolors.OKBLUE + "DONE: Application version on " + i + " now matches " + patch_num + "." + Bcolors.ENDC else: print Bcolors.FAIL + "ERROR: Application version on " + i + " still not matches " + patch_num + "!" + Bcolors.ENDC
m.build_inputs_and_outputs(tf.squeeze(gif), tf.squeeze(fdb), tf.squeeze(cmd)) m.build_train_op() print('---------After build graph, get_trainable_dic()------------') get_trainable_dic() # limit memory # config = tf.ConfigProto() # config.gpu_options.allow_growth = True # allocate dynamically # config.gpu_options.per_process_gpu_memory_fraction = 0.8 # maximun alloc gpu50% of MEM with tf.Session() as sess: start_ep = 0 # -------restore------ recreate_dir(FLAGS.log_dir) model_file = tf.train.latest_checkpoint(FLAGS.model_dir) saver = tf.train.Saver(max_to_keep=5) if model_file is not None: print('Use model_file = ' + str(model_file) + '!') saver.restore(sess, model_file) print('---------After build graph, get_trainable_dic()------------') get_trainable_dic() start_ep = int(model_file.rpartition('-')[-1]) + 1 else: print('[I] Initialize all variables') sess.run(tf.global_variables_initializer()) # if memory out check code # op = tf.global_variables_initializer() # run_options = tf.RunOptions(report_tensor_allocations_upon_oom = True)
def get_demons_args(): parser = get_global_parser() # NOTE: 'SURREAL' is a placeholder. The deg is set according to global_config.env -> see below. v parser.add_argument('--deg', type=env2deg, default='SURREAL') parser.add_argument("--collect_by", type=str, default='teleop', choices=[ 'teleop', 'imitation', 'expert', 'policy', 'exploration', 'random' ]) parser.add_argument("--device", type=str, default="keyboard", choices=["keyboard", "spacemouse"]) parser.add_argument("--collect_freq", type=int, default=1) parser.add_argument("--flush_freq", type=int, default=25) # NOTE : RAM Issues, change here : 75 parser.add_argument("--break_traj_success", type=utils.str2bool, default=True) parser.add_argument( "--n_runs", type=int, default=10, #10 help= "no. of runs of traj collection, affective when break_traj_success = False" ) # Imitation model parser.add_argument('--resume', type=utils.str2bool, default=False) parser.add_argument('--train_imitation', type=utils.str2bool, default=False) parser.add_argument('--models_save_path', type=str, default=os.path.join(DATA_DIR, 'runs/imitation-models/')) parser.add_argument('--tensorboard_path', type=str, default=os.path.join(DATA_DIR, 'runs/imitation-tensorboard/')) parser.add_argument('--load_models', type=utils.str2bool, default=True) parser.add_argument('--use_model_perception', type=utils.str2bool, default=True) parser.add_argument('--n_gen_traj', type=int, default=200, help="Number of trajectories to generate by imitation") config = parser.parse_args() config.env_args = env2args(config.env) config.deg = env2deg(config.env) config.data_path = os.path.join( config.data_path, '{}_{}/'.format(config.env, config.env_type)) config.models_save_path = os.path.join( config.models_save_path, '{}_{}/'.format(config.env, config.env_type)) config.tensorboard_path = os.path.join( config.tensorboard_path, '{}_{}_{}/'.format(config.env, config.env_type, config.exp_name)) if config.train_imitation and not config.resume: utils.recreate_dir(config.models_save_path, config.display_warnings) utils.recreate_dir(config.tensorboard_path, config.display_warnings) else: utils.check_n_create_dir(config.models_save_path, config.display_warnings) utils.check_n_create_dir(config.tensorboard_path, config.display_warnings) utils.check_n_create_dir(config.data_path, config.display_warnings) return config
def main(): generate_proto.main() recreate_dir(BUILD) for g in [gen_python, gen_cpp]: g()
def main(argv): # ----------- # Parse args # ----------- parser = OptionParser() parser.add_option("-t", "--target-dir", help="The directory where the test are to be exported.") parser.add_option( "-p", "--plugin-package", help="The plug-in package for exporting plug-in integration tests.", default=None) (options, args) = parser.parse_args() if options.target_dir is None: parser.error("Target directory must be given") PYTHON_VERSION = utils.get_python_version() TARGET_PATH = options.target_dir PLUGIN_PACKAGE = options.plugin_package log.info("Target directory: %s" % TARGET_PATH) log.info("Plug-in package: %r" % PLUGIN_PACKAGE) log.info("Python version: %s" % PYTHON_VERSION) log.info("Cleaning target directory...") utils.recreate_dir(TARGET_PATH) # ------------------------- # Export script test files # ------------------------- log.info("Copying script test files...") SCRIPT_TESTS_DIR = os.path.join(SCRIPTS_SOURCE_ROOT, 'tests') assert os.path.exists(SCRIPT_TESTS_DIR) copy_dir(source_dir=SCRIPT_TESTS_DIR, target_dir=os.path.join(TARGET_PATH, 'tests'), dir_ignore_functions=[ lambda d: d in ('.svn', 'temp', 'export_standalone') ], file_ignore_functions=[ lambda f: f == 'cone.log' or f.endswith('.pyc') ]) log.info("Copying script test overlay files...") copy_dir(source_dir=os.path.join(ROOT_PATH, "export-bat/scripts-tests-overlay"), target_dir=TARGET_PATH, dir_ignore_functions=[lambda d: d == '.svn']) # -------------------------------------- # Export plug-in integration test files # -------------------------------------- log.info("Exporting plug-in integration test files...") subpaths_by_package = plugin_utils.find_plugin_package_subpaths( PLUGIN_SOURCE_ROOT, 'integration-test', PLUGIN_PACKAGE) for package_name, tests_path in subpaths_by_package: log.debug(" Package: %s" % package_name) log.debug(" Path: %s" % tests_path) log.debug(" Copying test files...") target_path = os.path.join(TARGET_PATH, 'plugin-tests', package_name + '_tests') copy_dir(source_dir=tests_path, target_dir=target_path, dir_ignore_functions=[lambda d: d in ('.svn', 'temp')], file_ignore_functions=[ lambda f: f in ('cone.log', 'export_standalone.py') or f.endswith('.pyc') ]) log.debug(" Copying overlay files...") overlay_path = os.path.join( 'export-bat/plugin-integration-test-overlay') copy_dir(source_dir=overlay_path, target_dir=target_path, dir_ignore_functions=[lambda d: d == '.svn']) log.debug(" Exporting extra data...") func = read_export_function_from_file( os.path.join(tests_path, 'export_standalone.py')) if func: log.debug(" Executing export function...") func(target_path) TARGET_EGGS_DIR = os.path.join(TARGET_PATH, 'eggs') # --------------------------- # Copy needed dependency eggs # --------------------------- log.info("Copying library eggs...") DEP_EGGS_DIR = os.path.normpath(os.path.join(ROOT_PATH, '../dep-eggs')) assert os.path.isdir(DEP_EGGS_DIR) DEPENDENCIES = ['simplejson'] for dep in DEPENDENCIES: egg_file_name = find_egg_file(DEP_EGGS_DIR, dep, PYTHON_VERSION) if egg_file_name is None: log.critical( "Could not find egg file for dependency '%s' from '%s'" % (dep, DEP_EGGS_DIR)) return 1 source_path = os.path.join(DEP_EGGS_DIR, egg_file_name) target_path = os.path.join(TARGET_EGGS_DIR, egg_file_name) utils.copy_file(source_path, target_path) # ------------------ # Build needed eggs # ------------------ log.info("Building eggs...") utils.build_egg(os.path.join(SOURCE_ROOT), TARGET_EGGS_DIR) utils.build_egg(os.path.join(SOURCE_ROOT, 'testautomation'), TARGET_EGGS_DIR) return 0