def copy_project_files( targetdir, jamaicaoutputdir, fpgapartname, filestobuild, reachable_functions, syscalls, interfaceResolver, classrefs ): """ Prepare an HLS project. Copies all required files from the local 'projectfiles' dir into targetdir along with any extra required files. Args: targetdir: directory to output to jamaicaoutputdir: absolute path that contains the output of Jamaica builder fgpapartname: string of the fpga part name filestobuild: array of absolute file paths and will be added to the HLS tcl script as source files reachable_functions: array of FuncDecl nodes that are reachable and require translation syscalls: map{string->int} names of function calls that should be translated to PCIe system calls -> ID of call """ mkdir(targetdir) copy_files(project_path("projectfiles", "include"), join(targetdir, "include"), [".h"]) copy_files(project_path("projectfiles", "src"), join(targetdir, "src"), [".h", ".c"]) shutil.copy(join(jamaicaoutputdir, "Main__.h"), join(targetdir, "include")) for f in filestobuild: if ( not os.path.basename(f) == "fpgaporting.c" ): # We needed fpgaporting to perform reachability analysis, but don't rewrite it log().info("Adding source file: " + f) if f.endswith(".c"): # We only parse C files targetfile = os.path.join(targetdir, "src", os.path.basename(f)) rewrite_source_file(f, targetfile, reachable_functions, syscalls, interfaceResolver, classrefs)
def parse_dir(self, src_folder_path, out_folder_path, relative_path=""): # get all files print("===>parse dir %s" % src_folder_path) files = os.listdir(src_folder_path) for filename in files: file_path = os.path.join(src_folder_path, filename) rel_path = os.path.join(relative_path, filename) if os.path.isdir(file_path): if os.path.normpath(rel_path) in self.ignore_folders: # copy to dist utils.copy_files(file_path, os.path.join(out_folder_path, rel_path)) else: if os.path.normpath(rel_path) in self.sub_folders: self.parse_dir(file_path, os.path.join(out_folder_path, rel_path), "") else: self.parse_dir(file_path, out_folder_path, rel_path) if self.remove_source: os.rmdir(file_path) elif os.path.isfile(file_path): self.parse_file(file_path, out_folder_path, rel_path)
def ren_csv_files(path, csv_files_path, raw_bkp_path): csv_path = path + csv_files_path utils.mkdir(csv_path + RENAMED) utils.mkdir(csv_path + NODATA) csv_files = [f for f in os.listdir(csv_path) if f.endswith('.csv')] utils.copy_files(csv_path, raw_bkp_path, csv_files) print('Initiating renaming of {} files'.format(len(csv_files))) success, nodata, error = 0, 0, 0 for file in csv_files: try: df = pd.read_csv(csv_path + file) if len(df.index) > 0: new_name = '{}{}.csv'.format( csv_path + RENAMED, dates.ddmmyyyy_to_yyyy_mm_dd(file[-12:][:8])) os.rename(csv_path + file, new_name) print(new_name, 'file renamed') success += 1 else: new_name = '{}{}'.format(csv_path + NODATA, file) os.rename(csv_path + file, new_name) print(file, 'has no data') nodata += 1 except: print(new_name, 'file rename failed') error += 1 print('{} files renamed, {} files with no data, {} errors'.format( success, nodata, error))
def _on_install(self, _): self.unit.status = MaintenanceStatus("Installing apt packages") install_apt(packages=APT_REQUIREMENTS, update=True) self.unit.status = MaintenanceStatus("Preparing the environment") self._reset_environment() self.unit.status = MaintenanceStatus("Downloading srsLTE from Github") git_clone(GIT_REPO, output_folder=SRC_PATH, branch=GIT_REPO_TAG, depth=1) self.unit.status = MaintenanceStatus("Building srsLTE") shell(SRS_ENB_UE_BUILD_COMMAND) self.unit.status = MaintenanceStatus("Generating configuration files") copy_files(origin=CONFIG_ORIGIN_PATHS, destination=CONFIG_PATHS) self.unit.status = MaintenanceStatus("Generating systemd files") self._configure_srsenb_service() self._configure_srsue_service() service_enable(SRS_ENB_SERVICE) self._stored.installed = True
def safe_execute(self, files: list = None, new_files: list = None, argv: list = None, stdin: str = None, timeout: int = 60): """ :param runnable: The main run file. ie, "/sbin/python". :param files: List of files to be copied to working directory. :param new_files: List of touples (name, bytes) of files to be created. :param argv: List of rgs to be passed to the runnable. Usually a name of a file from 'files' or 'new_files'. :param stdin: Input (str) to be passed to the created process :param timeout: Timeout which the proccess will get terminated after in SECONDS. :return: (return_code, stdout, stderr) """ files = files or [] new_files = new_files or [] argv = argv or [] copy_files(files, self._temp_dir) for file_name, content in new_files: with open(os.path.join(self._temp_dir, file_name), "wb") as f: f.write(content) command_line = [self._executable] command_line.extend(argv) if stdin: stdin = stdin.encode('utf-8') process = subprocess.Popen( command_line, cwd=self._temp_dir, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) HungProcessKiller(process, timeout).start() stdout, stderr = process.communicate(stdin) return process.returncode, stdout, stderr
def restore(): print 'Restoring dotfiles...' source = get_dotfiles_backup_dir() dest = get_home_dir() files = get_dot_files(source) copy_files(files, dest) files = get_dot_files(dest) ensure_files_owned_by_user(get_user(), files)
def backup(): print 'Backing up dotfiles...' # build file list home_dir = get_home_dir() excludes = get_dotfile_excludes() files = get_dot_files(home_dir, excludes) dest = get_dotfiles_backup_dir() copy_files(files, dest)
def test_copy_files(execute_shell_mock, log_mock): log_mock.return_value = log.DEBUG files = ['asdf'] dest = "asdf2" utils.copy_files(files, dest) print(log_mock.mock_calls) execute_shell_mock.assert_called_with(['rsync', '-a', '-vv'] + files + [dest])
def ask_for_save_folder(conversion_type: AstroConvType) -> str: """ Obtains the save folder Lets the user pick between automatic save retrieving/copying or a custom save folder Arguments: conversion_type : Type of save conversion (for automatic folder retrieval purpose) Returns: The save folder path """ while 1: try: Logger.logPrint("Which folder would you like to work with ?") Logger.logPrint( "\t1) Automatically detect and copy my save folder (Please close Astroneer first)" ) Logger.logPrint("\t2) Chose a custom folder") work_choice = input() while work_choice not in ('1', '2'): Logger.logPrint(f'\nPlease choose 1 or 2') work_choice = input() Logger.logPrint(f'folder_type {work_choice}', 'debug') if work_choice == '1': if conversion_type == AstroConvType.WIN2STEAM: astroneer_save_folder = AstroMicrosoftSaveFolder.get_microsoft_save_folder( ) Logger.logPrint( f'Microsoft folder path: {astroneer_save_folder}', 'debug') else: astroneer_save_folder = AstroSteamSaveFolder.get_steam_save_folder( ) Logger.logPrint( f'Steam folder path: {astroneer_save_folder}', 'debug') save_path = ask_copy_target('AstroSaveFolder') utils.copy_files(astroneer_save_folder, save_path) Logger.logPrint(f'Save files copied to: {save_path}') elif work_choice == '2': save_path = ask_custom_folder_path() return save_path except MultipleFolderFoundError: Logger.logPrint( f'\nToo many save folders found ! Please use custom folder mode.' ) except FileNotFoundError as e: Logger.logPrint('\nNo container found in path: ' + save_path) Logger.logPrint(e, 'exception')
def copy_things(): # Tubulin_001.jpg prefix = "Tubulin_" ext = ".jpg" src = "/Users/tannialau/Desktop/161126_EpD12N_CP/181113_EpD12N_Cyto/Tubulin" dest = "/Users/tannialau/Desktop/tannia/output" start = 5 end = 365 multiple = 24 copy_files(prefix, ext, src, dest, start, end, multiple)
def record(self, fold): # save plots save_plot(self.val_record, 'loss', self.args.n_eval, 'tmp/val_loss.png') save_plot(self.val_record, 'f1', self.args.n_eval, 'tmp/val_f1.png') save_plot(self.norm_record, 'grad_norm', self.args.n_eval, 'tmp/grad_norm.png') if self.args.test: save_plots([self.val_record, self.test_record], ['loss', 'f1'], ['val', 'test'], self.args.n_eval) # create subdir for this experiment os.makedirs(self.record_dir, exist_ok=True) subdir = os.path.join(self.models_dir, str_date_time()) if self.args.mode == 'test': subdir += '_test' os.mkdir(subdir) # write model params and results to csv csvlog = os.path.join(subdir, 'info.csv') param_dict = {} for arg in vars(self.args): param_dict[arg] = str(getattr(self.args, arg)) info = torch.load(self.best_info_path) hash = get_hash() if self.args.machine == 'dt' else 'no_hash' passed_args = ' '.join(sys.argv[1:]) param_dict = { 'hash': hash, 'subdir': subdir, **param_dict, **info, 'args': passed_args } dict_to_csv(param_dict, csvlog, 'w', 'index', reverse=False) header = True if fold == 0 else False dict_to_csv(param_dict, self.record_path, 'a', 'columns', reverse=True, header=header) # copy all records to subdir png_files = ['val_loss.png', 'val_f1.png' ] if not self.args.test else ['loss.png', 'f1.png'] csv_files = [ 'val_probs*.csv', 'train_steps.csv', 'submission.csv', 'test_probs.csv' ] copy_files([*png_files, 'models/*.info', *csv_files], 'tmp', subdir) return subdir
def check_cost_and_migrate(): """ "migrate" webserver based on dynamically changing cost """ def get_min_cost_region(): #get the cost map with open("cost.txt") as cost: cost = map(lambda line: line.strip().split(","), cost.readlines()) cost_map = {c[0]:int(c[1]) for c in cost} min_region, min_cost = cost_map.popitem() for region, cost in cost_map.items(): if cost < min_cost: min_cost = cost min_region = region return min_region started = False while True: #The service has not been started #should only happen once if not started: region = get_min_cost_region() print "starting WordPress on {}".format(region) deploy_webserver(region=region) print "WordPress running on {}:8080".format(NODE_MAP[region]) started = True else: new_region = get_min_cost_region() if new_region != region: print "migrating WordPress from {} to {}".format(region, new_region) #first migrate the volume ut.copy_files("/home/ubuntu/", "mysql_volume", NODE_MAP[region], NODE_MAP[new_region]) #stop/remove the current deployment remove_webserver() #start the new deployment deploy_webserver(region=new_region) print "WordPress running on {}:8080".format(NODE_MAP[new_region]) region = new_region time.sleep(10)
def change_and_output_classes(dataset_path, output_path, species): scientific_names_to_class_names = {} for species_dict in species: if species_dict['excludeNoisyWebImages'] != True: scientific_names_to_class_names[ species_dict['scientificName']] = species_dict['className'] print("Number of names: " + str(len(scientific_names_to_class_names))) for root, dirs, files in os.walk(dataset_path): scientific_name = os.path.basename(root) if scientific_name in scientific_names_to_class_names: new_class_name = scientific_names_to_class_names[scientific_name] scientific_names_to_class_names.pop(scientific_name, None) print('Changing %s to %s' % (scientific_name, new_class_name)) dest_dir = join(output_path, new_class_name) copy_files([join(root, x) for x in files], dest_dir) print("Scientific names not changed: " + str(scientific_names_to_class_names.keys()))
def _create_deployment(repo_url, branch): # create temp folder prefix = '{0}_'.format(time.strftime("%Y%m%d")) temp_build_folder = tempfile.mkdtemp(prefix=prefix) try: # clone git repo logger.info('Cloning repo..') clone_folder = os.path.join(temp_build_folder, 'repo') repo_path = utils.clone_repo(repo_url, destination=clone_folder, branch=branch) faaspot_folder = os.path.join(repo_path, FAASPOT_FOLDER) faaspot_config = os.path.join(faaspot_folder, 'faaspot.yml') logger.debug('Repo cloned to: {0}'.format(clone_folder)) # prepare deployment folder logger.debug('Creating deployment folder..') deployment_folder = os.path.join(temp_build_folder, 'deploy') utils.makedir(deployment_folder) logger.debug( 'Deployment folder created: {0}'.format(deployment_folder)) # copy modules from faaspot folder to the deployment folder logger.info('Copying config files into deployment folder..') utils.copy_files(faaspot_folder, deployment_folder) # build package into the deployment folder logger.info('Installing dependencies..') utils.install_libraries(repo_path, deployment_folder) # create a zip from the logger.info('Packaging it..') deployment_zip = os.path.join(temp_build_folder, 'deploy.zip') utils.zip_dir(deployment_folder, deployment_zip) logger.info('Zip file created: {0}'.format(deployment_zip)) yield Deployment(repo_path, faaspot_config, deployment_zip) finally: utils.remove_dir(temp_build_folder)
def build_src_project(bindings, jamaicaoutput, targetdir, syscalls, interfaceResolver, debug, classrefs): """ Construct the software portion of the project. Copy the C source code for the Jamaica project, refactoring the functions that are implemented on the FPGA. Also copies the FPGA interface and build scripts. bindings: A map {id -> java method signature} that gives the ID of each hardware method. Generated from prepare_hls_project.build_from_functions jamaicaoutput: Absolute path of the jamaica builder output directory which contains the source C files targetdir: Absolute path to place output files """ if not os.path.isfile(join(jamaicaoutput, "Main__nc.o")): raise CaicosError("Cannot find file " + str(join(jamaicaoutput, "Main__nc.o")) + ". Ensure that the application has first be been built by Jamaica Builder.") mkdir(targetdir) copy_files(project_path("projectfiles", "juniper_fpga_interface"), join(targetdir, "juniper_fpga_interface")) copy_files(project_path("projectfiles", "malloc_preload"), join(targetdir, "malloc_preload")) refactor_src(bindings, jamaicaoutput, join(targetdir, "src"), debug) if debug: copy_files(project_path("debug_software"), join(targetdir, "src")) generate_interrupt_handler(join(targetdir, "src", "caicos_interrupts.c"), syscalls, interfaceResolver, classrefs) shutil.copy(join(jamaicaoutput, "Main__nc.o"), join(targetdir, "src")) shutil.copy(project_path("projectfiles", "include", "juniperoperations.h"), join(targetdir, "src")) shutil.copy(project_path("projectfiles", "scripts", "run.sh"), targetdir) make_executable([join(targetdir, "run.sh")])
def split_files(source_dir, training_dir, validation_dir, testing_dir, omit_small_classes): class_names_to_training_file_paths = {} for class_dir in sorted(os.listdir(source_dir)): print("Splitting %s" % class_dir) class_dir_path = join(source_dir, class_dir) class_files = os.listdir(class_dir_path) validation_percentage = 10 testing_percentage = 10 n_validation_files = max( 1, math.ceil(len(class_files) * validation_percentage / 100)) n_testing_files = max( 1, math.ceil(len(class_files) * testing_percentage / 100)) n_training_files = len( class_files) - n_validation_files - n_testing_files #print(n_training_files, n_validation_files, n_testing_files) if n_training_files <= 0: if omit_small_classes: print("Skipping", class_dir) continue else: n_validation_files = 0 n_training_files = 1 class_file_paths = set( [join(source_dir, class_dir, x) for x in class_files]) # Set the random's seed to a constant value so that images are split the same way everytime this script is executed. # The training set split will change the moment a set of images for a particular fish changes unfortunately. random_state = np.random.RandomState(42) # sort these files so that they are in a consistent order which is important. validation_file_paths = set( random_state.choice(sorted(class_file_paths), n_validation_files)) class_file_paths = class_file_paths - validation_file_paths testing_file_paths = set( random_state.choice(sorted(class_file_paths), n_testing_files)) class_file_paths = class_file_paths - testing_file_paths #print(len(class_file_paths), n_training_files) #assert(len(class_file_paths) == n_training_files) training_file_paths = class_file_paths # Refresh the folders it in case the list of files has changed. We can't have one file added to more than one set. copy_files(validation_file_paths, join(validation_dir, class_dir), refresh=True) copy_files(testing_file_paths, join(testing_dir, class_dir), refresh=True) copy_files(training_file_paths, join(training_dir, class_dir), refresh=True) class_names_to_training_file_paths[class_dir] = training_file_paths #rmtree(source_dir) return class_names_to_training_file_paths
def copy_project_files(args): path = args.path if path is None: raise Exception( 'copy_project_files: args.path must not be none. please give path to copy files to' ) ensure_path(args.path) if args.param_path is None: param_path = './params/' print(path) if not path.endswith('/'): path += '/' file_list = [ #'cmd.py', 'Models', 'Optimizers', 'Trainers.py', 'DataLoaders.py', #'Analyzer.py', 'utils.py', 'utils_anal.py', 'utils_model.py', 'config_sys.py', ] copy_files(file_list, path_from='./src/', path_to=path + 'src/') file_list = ['main.py', 'params/__init__.py'] copy_files(file_list, path_from='./', path_to=path) param_files = get_param_files(args) #param_files = list(map(lambda file:param_path + file, param_files)) model_file = param_files['model_file'] optimizer_file = param_files['optimizer_file'] trainer_file = param_files['trainer_file'] data_loader_file = param_files['data_loader_file'] component_files = [ model_file, optimizer_file, trainer_file, data_loader_file ] if param_files.get('config_file') is not None: component_files.append(param_files['config_file']) #print(component_files) copy_files(component_files, path_from=param_path, path_to=path + param_path)
def __init__(self, args, input_path): self.output_path = Path(args.split_path) input_files = WavFilesDataset.filter_paths(input_path.glob('**/*'), ['wav']) n_train = int(len(input_files) // args.stems * args.train_ratio) n_val = int(len(input_files) // args.stems * args.val_ratio) if n_val == 0: n_val = 1 n_test = len(input_files) - n_train - n_val assert n_test > 0 stems = [] for s in range(args.stems): dst = Path(self.output_path / f'{input_path.name}_{s}') dst.mkdir(exist_ok=True, parents=True) stem_files = WavFilesDataset.filter_stems(input_files, s) random.shuffle(stem_files) copy_files(stem_files[:n_train], input_path, dst / 'train') copy_files(stem_files[n_train:n_train + n_val], input_path, dst / 'val') copy_files(stem_files[n_train + n_val:], input_path, dst / 'test') logger.info( 'Split stem %s of path %s as follows: Train - %s, Validation - %s, Test - %s', s, input_path.name, n_train, n_val, n_test)
def test_copy_files(execute_shell_mock): files = ['asdf'] dest = "asdf2" utils.copy_files(files, dest) execute_shell_mock.assert_called_with(['rsync', '-a'] + files + [dest])
num_support = int(num_support) model_files = ["emissions.txt", "transitions.txt"] model_directory = "models/%s_support_%s/" % (data_file_base, num_support) inference_files = [ "hmm_%s_support_%s_%s.csv" % (data_file_base, num_support, train_test) for train_test in ["test", "crossval"] ] inference_directory = "results/" os.chdir(os.path.join(dcap_bnet_dir, "bnet")) # run from bnet directory sys.path.append(os.getcwd()) import run_hmm import utils print 'Running HMM for %s support %s. parameters (data) dir is %s. resultsDirectory is %s' % ( data_file_base, num_support, args.parametersDirectory, results_directory) run_hmm.run_hmm(data_file_base, num_support, num_pools=12, num_iterations=5) print 'Client done running HMM! Moving files %s and %s to resultsDirectory %s' % ( model_files, inference_files, results_directory) utils.copy_files(model_files, model_directory, results_directory) utils.copy_files(inference_files, inference_directory, results_directory) os.chdir(start_dir) # switch back print 'Client done moving files! Job finished'
def build_all(config): """ Build a JUNIPER project. Format of the config dictionary is described in the docstring for config_specification. """ utils.log().setLevel(logging.INFO) utils.remove_slots_from_classes(pycparser.c_ast) utils.remove_slots_from_classes(pycparser) try: if config.get("cleanoutput", "false").lower() == "true": if os.path.isdir(config["outputdir"]): for f in os.listdir(config["outputdir"]): shutil.rmtree(join(config["outputdir"], f), ignore_errors=True) mkdir(config["outputdir"]) # Determine output paths swdir = config.get("swoutputdir", join(config["outputdir"], "software")) boarddir = config.get("hwoutputdir", join(config["outputdir"], "hardware")) scriptsdir = config.get("scriptsoutputdir", join(config["outputdir"], "scripts")) hwdir = join(boarddir, "reconfig", config.get("hlsprojectname", "caicos")) if "astcache" in config: astcache.activate_cache(config["astcache"]) # Create board design log().info("Building board design for " + config["targetboard"] + " in " + str(boarddir) + "...") utils.copy_files(project_path("dynamic_board_designs", "common"), boarddir) utils.copy_files(project_path("dynamic_board_designs", config["targetboard"]), boarddir) # Build hardware project log().info("Building hardware project in " + str(hwdir) + "...") if config.get("dev_softwareonly", "false").lower() == "true": log().warning("dev_softwareonly is set. Generating software only.") bindings = __getfakebindings(config["signatures"]) else: bindings, syscalls, interfaceResolver, classrefs = prepare_hls_project.build_from_functions( config["signatures"], config.get("jamaicaoutputdir_hw", config["jamaicaoutputdir"]), hwdir, config.get("additionalhardwarefiles"), config["fpgapart"], config.get("notranslates", []), ) target = join(config["outputdir"], "push.sh") shutil.copyfile(project_path("projectfiles", "scripts", "push.sh"), target) make_executable( [ target, join(boarddir, "build_base.sh"), join(boarddir, "make_reconfig.sh"), join(boarddir, "base", "build_hls.sh"), ] ) # Build software project log().info("Building software project in " + str(swdir) + "...") prepare_src_project.build_src_project( bindings, config["jamaicaoutputdir"], swdir, syscalls, interfaceResolver, config.get("debug", False), classrefs, ) # Output templated Makefile.inc contents = open(project_path("projectfiles", "templates", "Makefile.inc")).read() subs = make_options[config["targetboard"]] subs["SUB_JAMAICATARGET"] = config["jamaicatarget"] template = Template(contents) fout = open(join(swdir, "Makefile.inc"), "w") fout.write(template.safe_substitute(subs)) fout.close() # Output main makefile shutil.copyfile(project_path("projectfiles", "templates", "Makefile"), join(swdir, "Makefile")) # Output scripts folder mkdir(scriptsdir) for fn in ["cmd_template.bat", "program.sh", "rescan.sh", "getoffsets.py"]: shutil.copyfile(project_path("projectfiles", "scripts", fn), join(scriptsdir, fn)) # Output kernel module copy_files(project_path("system_software", "host_kernel_module"), join(swdir, "host_kernel_module")) log().info("caicos done.") except CaicosError, e: log().error("A critical error was encountered:\n\t" + str(e))
if __name__ == '__main__': ### Change items here ############################################## # get structurals datadir = '/home/jagust/graph/data/mri1.5/tr220' anatstr = 'B*/raw/B*_anat.nii.gz' anatomicals = get_files_old_only(datadir, anatstr) #################################################################### # run dartel on cohort gms = utils.get_files(datadir, 'B*/despike_ants_realign/coreg_masks/aparcaseg.nii.gz') wms = utils.get_files(datadir, 'B*/despike_ants_realign/coreg_masks/B*_WM_mask.nii.gz') gms.sort() wms.sort() files = [] pth, nme, ext = filemanip.split_filename(gms[0]) datestr = utils.make_datestr() tmplt_nme = 'dartel_%s'%(datestr) templatedir = '/home/jagust/graph/data/mri1.5/tr220/template' dout = spm_dartel_make(gms, wms, templatedir, tmplt_nme) #template = get_files_old_only(datadir,'B*/anat/vbm8/%s*'%(tmplt_nme)) templatedir, exists = utils.make_dir(datadir,'template') newtemplate = utils.copy_files(template, templatedir) utils.remove_files(template) #flowfieldstmp = utils.get_files(datadir,'*/anat/vbm8/*%s*'%(tmplt_nme)) flowfields = move_flowfields(flowfieldstmp) dartellog = write_dartel_log(newtemplate, flowfields)
with open(config_file) as f: args1= f.read().split("\n") data_file_base, num_support = args1[:2] num_support = int(num_support) model_files = ["emissions.txt", "transitions.txt"] model_directory = "models/%s_support_%s_logreg/" % (data_file_base, num_support) inference_files = ["logistic_reg_hmm_%s_support_%s_%s.csv" % (data_file_base, num_support, train_test) for train_test in ["train", "test", "crossval"]] inference_directory = "results/" os.chdir(os.path.join(dcap_bnet_dir, "bnet")) # run from bnet directory sys.path.append(os.getcwd()) from run_log_reg_hmm_experiments import run_experiments import utils print 'Running HMM for %s support %s. parameters (data) dir is %s. resultsDirectory is %s' % (data_file_base, num_support, args.parametersDirectory, results_directory) run_experiments(data_file_base, num_support, num_pools=12, num_iterations=5) print 'Client done running HMM_logreg! Moving files %s and %s to resultsDirectory %s' % (model_files, inference_files, results_directory) utils.copy_files(model_files, model_directory, results_directory) utils.copy_files(inference_files, inference_directory, results_directory) os.chdir(start_dir) # switch back print 'Client done moving files! Job finished'
def backup(): log.info('Backing up app store preferences (.plist)...') files = build_file_list() dest = get_app_store_preferences_backup_dir() copy_files(files, dest)
'lr': current_lr } for tag, value in info.items(): logger.log_scalar(tag, value, epoch + 1) # Log values and gradients of the model parameters for tag, value in model.named_parameters(): if value.grad is not None: tag = tag.replace('.', '/') if torch.cuda.is_available(): logger.log_histogram(tag, value.data.cpu().numpy(), epoch + 1) logger.log_histogram(tag + '/grad', value.grad.data.cpu().numpy(), epoch + 1) else: logger.log_histogram(tag, value.data.numpy(), epoch + 1) logger.log_histogram(tag + '/grad', value.grad.data.numpy(), epoch + 1) if __name__ == '__main__': U.copy_files(backup_file_list, output_dir_path) with open(output_dir_path + 'description.txt', 'w') as f: f.write(description) f.close() training_loop()
def run( common_args, cmd_argv ): args = docopt(__doc__, argv=cmd_argv) # Verbose option for subcommand vopt = ' -v ' if common_args['-v'] else '' # Default Package name pkg = args['<repo>'] if ( args['-p'] ): pkg = args['-p'] # CLEAN-UP (for a failed overlay adoption) if ( args['clean'] ): tmpdst = os.path.join( PACKAGE_ROOT(), PACKAGE_INFO_DIR(), TEMP_DIR_NAME() ) utils.remove_tree( tmpdst, "error", "warn" ) sys.exit(0) # default branch option branch_opt = "" if ( args['-b'] ): branch_opt = '-b ' + args['-b'] # Get the current time dt_string = time.asctime(time.gmtime()) # check for already adopted json_dict = utils.load_package_file() pkgobj, deptype, pkgidx = utils.json_find_dependency( json_dict, pkg ) if ( pkgobj != None ): sys.exit( f'Package {pkg} already has been adopted as {deptype} dependency' ); # double check if the package has already been adopted (i.e. there was manual edits to the package.json file) if ( not args['overlay'] ): dstpkg = os.path.join( args['<dst>'], pkg) if ( os.path.exists( dstpkg ) ): sys.exit( f"ERROR: The destination - {dstpkg} - already exists" ) # # Adopt: Foreign # if ( args['foreign'] ): # Copy the FO package cmd = f"evie.py {vopt} --scm {common_args['--scm']} copy -p {pkg} {branch_opt} {args['<dst>']} {args['<repo>']} {args['<origin>']} {args['<id>']}" t = utils.run_shell( cmd, common_args['-v'] ) utils.check_results( t, f"ERROR: Failed to make a copy of the repo: {args['<repo>']}", 'copy', 'get-error-msg', common_args['--scm'] ) # update the package.json file dst_pkg_info = os.path.join( args['<dst>'], pkg, PACKAGE_INFO_DIR() ) incoming_semver = utils.get_adopted_semver( dst_pkg_info, args['--semver'], pkg, not args['--nowarn'] ) d = utils.json_create_dep_entry( pkg, "foreign", args['<dst>'], dt_string, incoming_semver, args['-b'], args['<id>'], args['<repo>'], common_args['--scm'], args['<origin>'] ) # Verify there is package file for package being adopted if ( check_for_package_file( d, args ) ): # Check for cyclical deps if ( check_cyclical_deps( json_dict, d, args) == False ): # Remove the package cmd = f"evie.py {vopt} --scm {d['repo']['type']} rm -p {d['pkgname']} {branch_opt} {d['parentDir']} {d['repo']['name']} {d['repo']['origin']} {d['version']['tag']}" t = utils.run_shell( cmd, common_args['-v'] ) utils.check_results( t, f"ERROR: Failed to remove the package: {d['repo']['name']}, 'rm', 'get-error-msg', common_args['--scm']" ) # Display parting message (if there is one) print("Adoption was 'reverted'") utils.display_scm_message( 'rm', 'get-success-msg', common_args['--scm'] ) sys.exit(1) # Save changes utils.json_update_package_file_with_new_dep_entry( json_dict, d, args['--weak'] ) # Display parting message (if there is one) utils.display_scm_message( 'copy', 'get-success-msg', common_args['--scm'] ) # # Adopt: ReadOnly # elif ( args['readonly'] ): # Mount the RO package cmd = f"evie.py {vopt} --scm {common_args['--scm']} mount -p {pkg} {branch_opt} {args['<dst>']} {args['<repo>']} {args['<origin>']} {args['<id>']}" t = utils.run_shell( cmd, common_args['-v'] ) utils.check_results( t, f"ERROR: Failed to mount the repo: {args['<repo>']}", 'mount', 'get-error-msg', common_args['--scm'] ) # update the package.json file dst_pkg_info = os.path.join( args['<dst>'], pkg, PACKAGE_INFO_DIR() ) incoming_semver = utils.get_adopted_semver( dst_pkg_info, args['--semver'], pkg, not args['--nowarn'] ) d = utils.json_create_dep_entry( pkg, "readonly", args['<dst>'], dt_string, incoming_semver, args['-b'], args['<id>'], args['<repo>'], common_args['--scm'], args['<origin>'] ) # Verify there is package file for package being adopted if ( check_for_package_file( d, args ) ): # Check for cyclical deps if ( check_cyclical_deps( json_dict, d, args) == False ): # Remove the package cmd = f"evie.py {vopt} --scm {d['repo']['type']} umount -p {d['pkgname']} {branch_opt} {d['parentDir']} {d['repo']['name']} {d['repo']['origin']} {d['version']['tag']}" t = utils.run_shell( cmd, common_args['-v'] ) utils.check_results( t, f"ERROR: Failed to umount the repo: {d['repo']['name']}, 'umount', 'get-error-msg', common_args['--scm']" ) # Display parting message (if there is one) print("Adoption was 'reverted'") utils.display_scm_message( 'umount', 'get-success-msg', common_args['--scm'] ) sys.exit(1) # Save changes utils.json_update_package_file_with_new_dep_entry( json_dict, d, args['--weak'] ) # Mark files as readonly utils.set_tree_readonly( dstpkg ) # Display parting message (if there is one) utils.display_scm_message( 'mount', 'get-success-msg', common_args['--scm'] ) # # Adopt: overlay # else: # Get a temporary copy of the OV package tmpdst = os.path.join( PACKAGE_INFO_DIR(), TEMP_DIR_NAME() ) cmd = f"evie.py {vopt} --scm {common_args['--scm']} copy --force -p {pkg} {branch_opt} {tmpdst} {args['<repo>']} {args['<origin>']} {args['<id>']}" t = utils.run_shell( cmd, common_args['-v'] ) utils.check_results( t, f"ERROR: Failed to make a copy of the repo: {args['<repo>']}", 'copy', 'get-error-msg', common_args['--scm'] ) # Fail if missing outcast info src_pkg = os.path.join( tmpdst, pkg ) dst_pkg = os.path.join( OVERLAY_PKGS_DIR(), pkg ) dst_pkg_info = os.path.join( dst_pkg, PACKAGE_INFO_DIR() ) src_pkg_info = os.path.join( src_pkg, PACKAGE_INFO_DIR() ) if ( not os.path.isfile( os.path.join(src_pkg_info, PACKAGE_FILE() ) ) ): utils.remove_tree(tmpdst) sys.exit( f"ERROR: Package - {pkg} - does NOT have {PACKAGE_FILE()} file") if ( not os.path.isfile( os.path.join(src_pkg_info, PKG_DIRS_FILE() ) )): utils.remove_tree(tmpdst) sys.exit( f"ERROR: Package - {pkg} - does NOT have {PKG_DIRS_FILE()} file") if ( not os.path.isfile( os.path.join(src_pkg_info, IGNORE_DIRS_FILE() ) )): utils.remove_tree(tmpdst) sys.exit( f"ERROR: Package - {pkg} - does NOT have {IGNORE_DIRS_FILE()} file") # Copy the adoptee's package info directory utils.copy_pkg_info_dir( dst_pkg_info, src_pkg_info ) # Create the dependency entry for the adopted package incoming_semver = utils.get_adopted_semver( dst_pkg_info, args['--semver'], pkg, not args['--nowarn'] ) d = utils.json_create_dep_entry( pkg, "overlay", args['<dst>'], dt_string, incoming_semver, args['-b'], args['<id>'], args['<repo>'], common_args['--scm'], args['<origin>'] ) # Check for cyclical deps if ( check_cyclical_deps( json_dict, d, args) == False ): # Remove the the package from the overlaid directory utils.remove_tree( dst_pkg ) sys.exit("Adoption was 'reverted'") # Copy the adoptee's extra info directories utils.copy_extra_dirs( dst_pkg, src_pkg ) # Get list of directories to copy/overlay dirs = utils.get_adoptee_owned_dirs( os.path.join( tmpdst, pkg, PACKAGE_INFO_DIR()), tmpdst ) if ( dirs != None ): for dir in dirs: src = os.path.join( src_pkg, dir ) dst = os.path.join( PACKAGE_ROOT(), dir ) utils.copy_files( src, dst ) # Clean-up utils.remove_tree( tmpdst ) # Save changes utils.json_update_package_file_with_new_dep_entry( json_dict, d, args['--weak'] ) print( f"Package - {pkg} - adopted as an OVERLAY package. Remember to add the new files to your SCM" )
def backup_microsoft_save_folder(to_path: str) -> str: astroneer_save_folder = get_microsoft_save_folder() utils.copy_files(astroneer_save_folder, to_path) return astroneer_save_folder
print ('Argument List:', str(sys.argv)) if len(sys.argv) <= 2: print ("Arg #1: input folder") print ("Arg #2: output folder") sys.exit("Looks like you did not specify the location of your data") data_src = sys.argv[1] data_output = sys.argv[2] only_folders = utils.get_folder_list(data_src) print(only_folders) mkdir(data_output) mkdir(join(data_output, TRAIN_DIR_NAME)) mkdir(join(data_output, TEST_DIR_NAME)) for folder in only_folders: current_files = utils.get_file_list(join(data_src, folder)) class_size = len(current_files) test_size = int(round((class_size/100)*SPLIT_RATIO)) train_size = class_size - test_size print(join(data_src, folder), ", size = ", class_size, ", test = ", test_size, ", train = ", train_size) print(len(current_files[:test_size])) print(len(current_files[test_size:])) current_train_folder = join(data_output, TRAIN_DIR_NAME, folder) current_test_folder = join(data_output, TEST_DIR_NAME, folder) mkdir(current_train_folder) mkdir(current_test_folder) utils.copy_files(current_files[:test_size], join(data_src, folder), current_test_folder) utils.copy_files(current_files[test_size:], join(data_src, folder), current_train_folder)
from config import directory_config from utils import clear_contents, copy_files final_object_for_scoring = { 'features_list_object_name': "features_top200", 'transformer_object_name': "transformer_count_top200", 'model_object_name': "classifier_rf_v1" } clear_contents(folder=directory_config['root_dir'] + directory_config['final_objects_dir']) copy_files(directory_config['root_dir'], directory_config['features_list_dir'], directory_config['features_list_object_name'] + ".pkl", directory_config['final_objects_dir'], "features") copy_files(directory_config['root_dir'], directory_config['transformer_dir'], directory_config['transformer_object_name'] + ".pkl", directory_config['final_objects_dir'], "transformer") copy_files(directory_config['root_dir'], directory_config['model_object_dir'], directory_config['model_object_name'] + ".pkl", directory_config['final_objects_dir'], "model_object") clear_contents(folder=directory_config['root_dir'] + directory_config['final_predictions_dir']) copy_files(directory_config['root_dir'], directory_config['train_predictions_dir'], directory_config['model_object_name'] + "_train_predictions.csv", directory_config['final_predictions_dir'], "train") copy_files(directory_config['root_dir'], directory_config['test_predictions_dir'], directory_config['model_object_name'] + "_test_predictions.csv", directory_config['final_predictions_dir'], "test")
document.addEventListener("DOMContentLoaded", function(event) { attention_viz_matrix(data); }); </script> </head> <body> <div id="sentences"></div> </body> </html> """ % json.dumps(sentences, ensure_ascii=True).replace("\\n", "") output_files = [ 'attention_viz/attention.js', 'attention_viz/style.css', 'attention_viz/d3.v3.min.js', ] copy_files(output_files, ARGS.output_dir, logger=logging) output_path_overlay = os.path.join(ARGS.output_dir, "index.html") if output_path_overlay is not None: with open(output_path_overlay, "w") as file: logging.info("write index file to %s" % output_path_overlay) file.write(html_string_overlay) output_path_matrix = os.path.join(ARGS.output_dir, "index_matrix.html") if output_path_matrix is not None: with open(output_path_matrix, "w") as file: logging.info("write index file to %s" % output_path_matrix) file.write(html_string_matrix)