def _copy(self, dest, **kwargs): """ Copies the artifacts from a solutions file to destination directory. """ dest = dest.rstrip('/') # create directory if not present if not os.path.exists(dest): os.makedirs(dest) for project_name in self.__get_projects(): # Create the project directory if not present dir_name = '%s/%s' % (dest, project_name) if not os.path.exists(dir_name): os.makedirs(dir_name) artifacts = self.__get_artifacts(project_name) for artifact in artifacts: filename = artifact.split('/')[-1] self.logger.debug('Copying %s to %s/' % (filename, dir_name)) # copyfile or copytree depending on if file or directory. copy(artifact, '%s/%s' % (dir_name, filename)) # copy in envrc.sh if found and create upstart envrc = '%s/envrc.sh' % project_name if os.path.exists(envrc): self.logger.debug('Found envrc.sh for project %s' % project_name) copy(envrc, '%s/%s' % (dir_name, 'envrc.sh')) self.__write_templates(project_name, dir_name)
def make_desktop_file(self, nw_path, export_dest): icon_set = self.get_setting('icon') icon_path = utils.path_join(self.project_dir(), icon_set.value) if os.path.exists(icon_path) and icon_set.value: utils.copy(icon_path, export_dest) icon_path = utils.path_join(export_dest, os.path.basename(icon_path)) else: icon_path = '' name = self.project_name() pdir = self.project_dir() version = self.get_setting('version') desc = self.get_setting('description') dfile_path = utils.path_join(export_dest, u'{}.desktop'.format(name)) file_str = ( u'[Desktop Entry]\n' u'Version={}\n' u'Name={}\n' u'Comment={}\n' u'Exec={}\n' u'Icon={}\n' u'Terminal=false\n' u'Type=Application\n' u'Categories=Utility;Application;\n' ) file_str = file_str.format(version.value, name, desc.value, nw_path, icon_path) with codecs.open(dfile_path, 'w+', encoding='utf-8') as f: f.write(file_str) os.chmod(dfile_path, 0755)
def _archive(self, dest, **kwargs): """ Creates a tarball from output_paths by using the _copy function and stores the logfile in the tarball. """ dest = dest.rstrip('/') # make a temp directory tempath = tempfile.mkdtemp() # use monotool._copy to get all our artifacts # in to the temp directory (tempath). self._copy(tempath) # copy the logfile into tempath so we have # it for later. lf = logfile() filename = lf.split('/')[-1] copy(lf, '%s/%s' % (tempath, filename)) # use git hash of our current directory as part of the # tarball name. ghash, _, _ = run('git rev-parse HEAD') ghash = ghash.rstrip() # remove new line character. app = self.__get_app_name() # create tarball self.logger.info('Saving tarball %s/%s.%s.tar.gz' % (dest, app, ghash)) mktar(tempath, '%s/%s.%s.tar.gz' % (dest, app, ghash)) # clean up by deleting temp directory. delete(tempath)
def _restore_credentials(tempdir, config): ctx.logger.info('Restoring credentials') dump_cred_dir = os.path.join(tempdir, _CRED_DIR) if not os.path.isdir(dump_cred_dir): ctx.logger.info('Missing credentials dir: {0}'.format(dump_cred_dir)) return restored_cred_dir = _create_restored_cred_dir() for dep_node_id in os.listdir(dump_cred_dir): restored_agent_key_path = \ _restore_agent_key_from_dump(dump_cred_dir, restored_cred_dir, dep_node_id) deployment_id, node_id = dep_node_id.split('_') agent_key_path_in_db = _agent_key_path_in_db(config, node_id, deployment_id) agent_key_path_in_db = os.path.expanduser(agent_key_path_in_db) if os.path.isfile(agent_key_path_in_db): with open(agent_key_path_in_db) as key_file: content_1 = key_file.read() with open(restored_agent_key_path) as key_file: content_2 = key_file.read() if content_1 != content_2: raise NonRecoverableError( 'Agent key path already taken: {0}'.format( agent_key_path_in_db)) ctx.logger.debug('Agent key path already exist: {0}'.format( agent_key_path_in_db)) else: utils.copy(restored_agent_key_path, agent_key_path_in_db)
def obs_operator_ensemble(istage): # assumes that prior ensemble is already linked to advance_temp<i>/wrfout_d01 print('running obs operator on ensemble forecast') os.chdir(cluster.dartrundir) if sat_channel: list_ensemble_truths = [] for iens in range(1, exp.n_ens+1): print('observation operator for ens #'+str(iens)) # ens members are already linked to advance_temp<i>/wrfout_d01 copy(cluster.dartrundir+'/advance_temp'+str(iens)+'/wrfout_d01', cluster.dartrundir+'/wrfout_d01') # DART may need a wrfinput file as well, which serves as a template for dimension sizes symlink(cluster.dartrundir+'/wrfout_d01', cluster.dartrundir+'/wrfinput_d01') # add geodata, if istage>0, wrfout is DART output (has coords) if istage == 0: wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', cluster.dartrundir+'/wrfout_d01') # run perfect_model obs (forward operator) os.system('mpirun -np 12 ./perfect_model_obs > /dev/null') # truth values in obs_seq.out are H(x) values true, _ = read_truth_obs_obsseq(cluster.dartrundir+'/obs_seq.out') list_ensemble_truths.append(true) n_obs = len(list_ensemble_truths[0]) np_array = np.full((exp.n_ens, n_obs), np.nan) for i in range(exp.n_ens): np_array[i, :] = list_ensemble_truths[i] return np_array else: raise NotImplementedError()
def save_poster(self): thumbnail_dest = utils.join(self.nfo_path, 'folder.jpg') try: utils.copy(xbmc.translatePath(self.thumbnail), thumbnail_dest) log("save_poster: wrote: %s" % xbmc.translatePath(thumbnail_dest)) except: log("save_poster: failed to write: %s from: %s" % \ (xbmc.translatePath(self.thumbnail), xbmc.translatePath(thumbnail_dest)))
def make_move(self, move): piece = copy(move.piece) f = lambda p: not p.loc in [piece.loc, move.loc] self.pieces = list(filter(f, self.pieces)) piece.loc = copy(move.loc) self.pieces.append(piece)
def restore_agent_resources(agents_dir): ctx.logger.info('Restoring agents in {0}'.format( utils.AGENTS_ROLLBACK_PATH)) if os.path.isdir(agents_dir): utils.remove(agents_dir) utils.mkdir(agents_dir) utils.copy(os.path.join(utils.AGENTS_ROLLBACK_PATH, 'agents', '.'), agents_dir)
def _seperate_one(self, _file): time = self._get_time(_file) folder = self._get_time_folder(time) fd_path = path_join([self.mv_path, folder, '']) if not path_exists(fd_path): create_folder(fd_path) print _file, fd_path print copy(_file, fd_path)
def copy_resources(symbol, resource_path): symbolizers = Symbolizer.objects.filter(rule=symbol) for symbolizer in symbolizers: if hasattr(symbolizer, 'externalgraphicsymbolizer'): local_path = symbolizer.externalgraphicsymbolizer.online_resource.replace( gvsigol.settings.MEDIA_URL, '') file_name = local_path.split('/')[-1] absolute_path = gvsigol.settings.MEDIA_ROOT + local_path utils.copy(absolute_path, resource_path + file_name)
def batch_processing(sc): try: utils.copy("HDFS/new/", "HDFS/master/") texts = sc.textFile("HDFS/master/*/*") if texts: word_count = get_word_count(texts) utils.save_to_mongo(word_count, database="kschool", collection="batch_view") except OSError: print "New directory is empty..."
def addScript(phoneDir, updateRoot, script): print("If you want to run a script before recursively copying the /"+phoneDir) print("folder to your phone, drag it to this window. Otherwise press ENTER") response = raw_input("Script: ") if response != '': copy(response, updateRoot) scriptName = response.split(de())[-1] print "Setting up "+scriptName+" to run before copying /"+phoneDir+"...", script.write("run_program PACKAGE:"+scriptName+"\n") print " Done!"
def archive_diagnostics(archive_dir, time): print('archive obs space diagnostics') mkdir(archive_dir) fout = archive_dir+time.strftime('/%Y-%m-%d_%H:%M_obs_seq.final') copy(cluster.dartrundir+'/obs_seq.final', fout) print(fout, 'saved.') try: copy(cluster.dartrundir+'/obs_coords.pkl', archive_stage+'/obs_coords.pkl') except Exception as e: warnings.warn(str(e))
def create_solution_src_file(problem, solution): for dirpath, dirnames, filenames in os.walk(utils.Templates.Source.dir()): for file_name in filenames: file_ext = file_name.split('.')[-1] if file_ext in solution: src_file = problem.name + '.' + file_ext orig = os.path.join(utils.Templates.Source.dir(), file_name) dest = os.path.join(problem.full_dir, src_file) utils.copy(orig, dest) utils.warning('Não se esqueça de gerar as soluções do problema.')
def copy_libs(self, destination): g_log.debug("Copying libs to %s" % destination) if self._platform == utils.Platform.Linux or self._platform == utils.Platform.OSX: wildcard = '*.a' elif self._platform == utils.Platform.Windows: wildcard = '*.lib' else: g_log.error("Unsupported platform %s" % sys.platform) sys.exit(-1) libs_location = os.path.join(self._boost_path, 'stage/lib') utils.copy(libs_location, destination, wildcard)
def save_fanart(self): cached_fanart = xbmc.getCacheThumbName(self.fanart).replace('.tbn', '') cached_fanart = "special://profile/Thumbnails/%s/%s.jpg" % (cached_fanart[0], cached_fanart) fanart_dest = utils.join(self.nfo_path, 'fanart.jpg') try: utils.copy(xbmc.translatePath(cached_fanart), xbmc.translatePath(fanart_dest)) log("save_fanart: wrote: %s from: %s" % \ (xbmc.translatePath(fanart_dest), xbmc.translatePath(cached_fanart))) except: log("save_fanart: failed to write: %s from: %s" % \ (xbmc.translatePath(fanart_dest), xbmc.translatePath(cached_fanart)))
def splitData(name, inputDir, outputDir, trainOutputDir, testOutputDir): """Split the various outputs created from the image files in inputDir, which reside in outputDir, between trainOutputDir and testOutputDir, reorganizing them in the process. With an input file named *.mhd, the following outputs are moved into the following subdirectories in the destination folder: - *.mha: images - *_zslab.mha: images (also split into PNG slices) - *_zslab_points.mha: points - *_expert.mha: expert - *_zslab_expert.mha: expert (also split into PNG slices) """ # Process files printSectionHeader('Splitting %s data into training and testing' % name) mhdFiles = glob.glob(os.path.join(inputDir, "*", "*.mhd")) for i, mhdFile in enumerate(mhdFiles): print("\n%s file %d/%d : %s" % (name, i + 1, len(mhdFiles), mhdFile)) filePrefix = os.path.basename(os.path.splitext(mhdFile)[0]) # Split equally for training and testing if i % 2 == 0: curOutputDir = trainOutputDir else: curOutputDir = testOutputDir # "suffix" is surround by fileName and '.mha' # "dir" is a subdirectory of curOutputDir suffixesAndDirsForCopying = [ ('', 'images'), # input volume ('_zslab', 'images'), # z-mip slab volume ('_zslab_points', 'points'), # z-mip slab point map ('_expert', 'expert'), # expert volume ('_zslab_expert', 'expert'), # expert z-mip slab volume ] for suffix, dir in suffixesAndDirsForCopying: fileName = filePrefix + suffix + '.mha' utils.copy(os.path.join(outputDir, fileName), os.path.join(curOutputDir, dir, fileName)) # save slabs as pngs saveSlabs( os.path.join(curOutputDir, "images", filePrefix + "_zslab.mha")) saveSlabs( os.path.join(curOutputDir, "expert", filePrefix + "_zslab_expert.mha"))
def prepare(obserr_iszero='.true.'): copy(cluster.scriptsdir + '/../templates/input.eval.nml', rundir_program + '/input.nml') sed_inplace(rundir_program + '/input.nml', '<n_ens>', str(int(exp.n_ens))) sed_inplace(rundir_program + '/input.nml', '<zero_error_obs>', obserr_iszero) sed_inplace(rundir_program + '/input.nml', '<horiz_dist_only>', '.false.') # dummy sed_inplace(rundir_program + '/input.nml', '<vert_norm_hgt>', '5000.0') # dummy append_file(rundir_program + '/input.nml', cluster.scriptsdir + '/../templates/obs_def_rttov.VIS.nml')
def create_icns_for_app(self, icns_path): icon_setting = self.get_setting('icon') mac_app_icon_setting = self.get_setting('mac_icon') icon_path = (mac_app_icon_setting.value if mac_app_icon_setting.value else icon_setting.value) if icon_path: icon_path = utils.path_join(self.project_dir(), icon_path) if not icon_path.endswith('.icns'): save_icns(icon_path, icns_path) else: utils.copy(icon_path, icns_path)
def save_fanart(self): cached_fanart = xbmc.getCacheThumbName(self.fanart).replace('.tbn', '') cached_fanart = "special://profile/Thumbnails/%s/%s.jpg" % ( cached_fanart[0], cached_fanart) fanart_dest = utils.join(self.nfo_path, 'fanart.jpg') try: utils.copy(xbmc.translatePath(cached_fanart), xbmc.translatePath(fanart_dest)) log("save_fanart: wrote: %s from: %s" % \ (xbmc.translatePath(fanart_dest), xbmc.translatePath(cached_fanart))) except: log("save_fanart: failed to write: %s from: %s" % \ (xbmc.translatePath(fanart_dest), xbmc.translatePath(cached_fanart)))
def datasourceToDataset(name=None, valid_split=0.2, test_split=0.2, reindex_only=False, force=False): if name is None: name = 'dataset_'+utils.standardDatetime() basepath = datasets_path + name + '/' basepath_img = basepath + 'img/' if os.path.exists(basepath+trainfile) and not force and not reindex_only: return basepath vehicles_search_path = datasources_path + 'vehicles/*/*.png' nonvehicles_search_path = datasources_path + 'non-vehicles/*/*.png' print(vehicles_search_path,nonvehicles_search_path) vehicles_paths = glob.glob(vehicles_search_path) nonvehicles_paths = glob.glob(nonvehicles_search_path) print('vehicles_paths', len(vehicles_paths)) print('nonvehicles_paths', len(nonvehicles_paths)) paths = [] labels = [] for img_path in vehicles_paths: parts = img_path.split('/') filename = "_".join(parts[-2:]) fullpath = basepath_img + filename if not reindex_only: utils.copy(img_path, fullpath) paths.append(fullpath) labels.append(1) for img_path in nonvehicles_paths: parts = img_path.split('/') filename = "_".join(parts[-2:]) fullpath = basepath_img + filename if not reindex_only: utils.copy(img_path, fullpath) paths.append(fullpath) labels.append(0) train_paths, test_paths, train_labels, test_labels = train_test_split(paths, labels, test_size=test_split) train_paths, valid_paths, train_labels, valid_labels = train_test_split(train_paths, train_labels, test_size=valid_split) train_dataset = {'path': train_paths, 'y': train_labels} valid_dataset = {'path': valid_paths, 'y': valid_labels} test_dataset = {'path': test_paths, 'y': test_labels} saveDataset(basepath + trainfile, train_dataset) saveDataset(basepath + validfile, valid_dataset) saveDataset(basepath + testfile, test_dataset) return basepath
def symlink(base_dir, target_dir, fn): base_path = os.path.join(base_dir, fn) target_path = os.path.join(target_dir, fn) if not os.path.exists(base_path): raise FreelineException('file missing: {}'.format(base_path), ' Maybe you should sync freeline repo') if os.path.exists(target_path): os.remove(target_path) if is_windows_system(): copy(base_path, target_path) else: os.symlink(base_path, target_path)
def get_ready(self, lot, pilot, name, clobber=False): super(Task,self).get_ready(lot, pilot, name, clobber) # Ready the inputs self.inputs = copy(self._inputs()) for input in self.get_all_inputs(): input.get_ready( self.get_lot(), self.get_pilot(), 'poop', self.get_clobber()) # Ready the outputs self.outputs = copy(self._outputs()) for output in self.get_all_outputs(): output.get_ready( self.get_lot(), self.get_pilot(), 'poop', self.get_clobber())
def get_ready(self, lot, pilot, name, clobber=False): super(Task, self).get_ready(lot, pilot, name, clobber) # Ready the inputs self.inputs = copy(self._inputs()) for input in self.get_all_inputs(): input.get_ready(self.get_lot(), self.get_pilot(), 'poop', self.get_clobber()) # Ready the outputs self.outputs = copy(self._outputs()) for output in self.get_all_outputs(): output.get_ready(self.get_lot(), self.get_pilot(), 'poop', self.get_clobber())
def copy_files_to_project_folder(self): old_dir = CWD os.chdir(self.project_dir()) self.logger.info(u'Copying files to {}'.format(self.project_dir())) for sgroup in self.settings['setting_groups']: for setting in sgroup.values(): if setting.copy and setting.type == 'file' and setting.value: f_path = setting.value.replace(self.project_dir(), '') if os.path.isabs(f_path): try: utils.copy(setting.value, self.project_dir()) self.logger.info(u'Copying file {} to {}'.format(setting.value, self.project_dir())) except shutil.Error as e: # same file warning self.logger.warning(u'Warning: {}'.format(e)) finally: setting.value = os.path.basename(setting.value) os.chdir(old_dir)
def is_freeway(self, start, end): locations = [] loc = copy(start) def advance(): loc.x += sign(end.x - loc.x) loc.y += sign(end.y - loc.y) advance() while loc.x != end.x or loc.y != end.y: locations.append(copy(loc)) advance() pieces = set([tuple(p.loc) for p in self.pieces]) locations = set([tuple(l) for l in locations]) return pieces.isdisjoint(locations)
def send_to_destination(destination, groups, move=False, dry_run=False, verbose=False): if not dry_run: utils.mkdir_p(destination) for day, pics in groups.iteritems(): day_dir = os.path.join(destination, day) if not dry_run: utils.mkdir_p(day_dir) for pic in pics: dst_file = os.path.join(day_dir, os.path.basename(pic)) if move: if verbose or dry_run: print 'Moving {} to {}'.format(pic, dst_file) if not dry_run: utils.move(pic, dst_file) else: if verbose or dry_run: print 'Copying {} to {}'.format(pic, dst_file) if not dry_run: utils.copy(pic, dst_file)
def solve_surface_tension(self): for k in range(self.dim): self.calc_n(k, self.level_set.phi) self.calc_kappa(k, self.level_set.phi) self.strategy.d = k start1 = time.perf_counter() self.poisson_solver.full_reinitialize(self.strategy) end1 = time.perf_counter() start2 = time.perf_counter() self.poisson_solver.solve(self.poisson_solve_iterations, self.simulator.verbose) end2 = time.perf_counter() print( f'\033[33msolve surface tension ({k}), init cost {end1 - start1}s, solve cost {end2 - start2}s\033[0m' ) utils.copy(self.poisson_solver.x, self.simulator.velocity[k])
def draw(field, piece, piecePos): s = '.OX0' fieldCopy = copy(field) for row in range(len(piece)): for col in range(len(piece[row])): fieldCopy[row+piecePos[1]][col+piecePos[0]] = 1 for row in fieldCopy: for cell in row: sys.stdout.write('%s' % s[cell]) sys.stdout.write('\n') sys.stdout.flush()
def take_action( old_state: types.GameState, action: types.Action, ) -> t.Optional[types.GameState]: """ Returns new gamestate, or None if action was invalid. This method is expensive because it makes a copy of the gamestate so it doesn't have to mutate the gamestate. Should not be called too much in simulations. """ new_state = utils.copy(old_state) return take_action_mut(new_state, action)
def recycle_output(): update_vars = ['U', 'V', 'T', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QRAIN', 'U10', 'V10', 'T2', 'Q2', 'TSK', 'PSFC', 'CLDFRA'] updates = ','.join(update_vars) print('recycle DART output to be used as input') for iens in range(1, exp.n_ens+1): dart_output = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4) dart_input = cluster.dartrundir+'/advance_temp'+str(iens)+'/wrfout_d01' #print('check for non-monotonic vertical pressure') # convert link to file in order to be able to update the content if os.path.islink(dart_input): l = os.readlink(dart_input) os.remove(dart_input) copy(l, dart_input) # print('move DART output to input: '+dart_output+' -> '+dart_input) # os.rename(dart_output, dart_input) # probably doesnt work print('updating', updates, 'in', dart_input, 'from', dart_output) os.system(cluster.ncks+' -A -v '+updates+' '+dart_output+' '+dart_input)
def run(iens, begin, end, hist_interval=5, radt=5, archive=True): """ Args: archive (bool): if True, write to archivedir of experiment if False, write to WRF run directory """ rundir = cluster.wrf_rundir(iens) print(rundir) copy(cluster.namelist, rundir + '/namelist.input') sed_inplace(rundir + '/namelist.input', '<dx>', str(int(exp.model_dx))) #sed_inplace(rundir+'/namelist.input', '<timestep>', str(int(exp.timestep))) sed_inplace(rundir + '/namelist.input', '<hist_interval>', str(int(hist_interval))) sed_inplace(rundir + '/namelist.input', '<radt>', str(int(radt))) if archive: archdir = cluster.archivedir() + begin.strftime('/%Y-%m-%d_%H:%M/' + str(iens) + '/') os.makedirs(archdir, exist_ok=True) else: archdir = './' print('namelist for run from', begin, end, 'output to', archdir) sed_inplace(rundir + '/namelist.input', '<archivedir>', archdir) # set times for k, v in { '<y1>': '%Y', '<m1>': '%m', '<d1>': '%d', '<HH1>': '%H', '<MM1>': '%M' }.items(): sed_inplace(rundir + '/namelist.input', k, begin.strftime(v)) for k, v in { '<y2>': '%Y', '<m2>': '%m', '<d2>': '%d', '<HH2>': '%H', '<MM2>': '%M' }.items(): sed_inplace(rundir + '/namelist.input', k, end.strftime(v)) ######################### if archive: init_dir = cluster.archivedir() + begin.strftime( '/%Y-%m-%d_%H:%M/') + str(iens) os.makedirs(init_dir, exist_ok=True) try: print('copy wrfinput of this run to archive') wrfin_old = rundir + '/wrfinput_d01' wrfin_arch = init_dir + '/wrfinput_d01' copy(wrfin_old, wrfin_arch) print('copy namelist to archive') copy(rundir + '/namelist.input', init_dir + '/namelist.input') except Exception as e: warnings.warn(str(e))
def set_DART_nml(sat_channel=False, cov_loc_radius_km=32, cov_loc_vert_km=False, just_prior_values=False): """descr""" cov_loc_radian = cov_loc_radius_km/earth_radius_km if just_prior_values: template = cluster.scriptsdir+'/../templates/input.eval.nml' else: template = cluster.scriptsdir+'/../templates/input.nml' copy(template, cluster.dartrundir+'/input.nml') # options are overwritten with settings options = {'<n_ens>': str(int(exp.n_ens)), '<cov_loc_radian>': str(cov_loc_radian)} if cov_loc_vert_km: vert_norm_rad = earth_radius_km*cov_loc_vert_km/cov_loc_radius_km*1000 options['<horiz_dist_only>'] = '.false.' options['<vert_norm_hgt>'] = str(vert_norm_rad) else: options['<horiz_dist_only>'] = '.true.' options['<vert_norm_hgt>'] = '50000.0' # dummy value for key, value in options.items(): sed_inplace(cluster.dartrundir+'/input.nml', key, value) # input.nml for RTTOV if sat_channel > 0: if sat_channel in [1, 2, 3, 12]: # VIS channels rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml' else: # IR channels rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.IR.nml' append_file(cluster.dartrundir+'/input.nml', rttov_nml) else: # append any rttov segment, needs to exist anyway rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.IR.nml' append_file(cluster.dartrundir+'/input.nml', rttov_nml)
def get_tree(self): self.children = [] for piece in self.pieces: for prey in self.pieces: loc = prey.loc move = Move(piece, loc) if move.is_valid(ctx=self): new_board = copy(self) new_board.make_move(move) self.children.append(new_board) for child in self.children: child.get_tree()
def local_setup(repo_owner, scoreboard_name, problems): print '[*] Start local setup' # Create root directory for CTF env. prompt_rmdir_warning(repo_owner) rmdir(repo_owner) mkdir(repo_owner) # Setup local scoreboard repo scoreboard_dir_path = os.path.join(repo_owner, scoreboard_name) if create_local_repo(scoreboard_dir_path): open(os.path.join(scoreboard_dir_path, 'score.csv'), 'w').close() # Setup local problems repo for problem in problems: problem_info = problems[problem] repo_dir_path = os.path.join(repo_owner, \ problem_info['repo_name']) if create_local_repo(repo_dir_path): print '[*] Copy binary' copy(problem_info['bin_src_path'], repo_dir_path) print '[*] Create flag file' create_flag(repo_dir_path) print '[*] Make Dockerfile' create_dockerfile(problem_info, repo_dir_path)
def archive_output(archive_stage): print('archiving output') mkdir(archive_stage) copy(cluster.dartrundir+'/input.nml', archive_stage+'/input.nml') # single members for iens in range(1, exp.n_ens+1): #savedir = archive_stage+'/'+str(iens) #mkdir(savedir) # filter_in = cluster.dartrundir+'/preassim_member_'+str(iens).zfill(4)+'.nc' filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4) copy(filter_out, archive_stage+'/filter_restart_d01.'+str(iens).zfill(4)) # copy mean and sd to archive for f in ['output_mean.nc', 'output_sd.nc']: copy(cluster.dartrundir+'/'+f, archive_stage+'/'+f)
#!/usr/bin/env python # @author Connor Lange # # Copies all the of current Python source files to the sandbox # and optionally publishes them to dist from utils import copy # User module from os import path copy("*.py", path.normpath("../test/testingsandbox")) response = raw_input("Publish? (y/n): ") if response == 'y': copy("*.py", path.normpath("../../dist/"))
def backup_agent_resources(agents_dir): ctx.logger.info('Backing up agents in {0}...'.format(agents_dir)) if not os.path.isdir(utils.AGENTS_ROLLBACK_PATH): utils.mkdir(utils.AGENTS_ROLLBACK_PATH) utils.copy(agents_dir, utils.AGENTS_ROLLBACK_PATH)
targetDir = 'EffekseerForDXLib_143_320a/' dxlibDir = 'DXLib_VC/' effekseerDir = '../Effekseer/' effekseerVSDir = effekseerDir + 'EffekseerRuntime143/Compiled/' utils.cdToScript() utils.rmdir(targetDir) utils.mkdir(targetDir) utils.copytree('docs/', targetDir+'Help/') utils.copytree(dxlibDir+'プロジェクトに追加すべきファイル_VC用/', targetDir+'プロジェクトに追加すべきファイル_VC用/') utils.copy(effekseerVSDir+'include/Effekseer.h', targetDir+'プロジェクトに追加すべきファイル_VC用/') utils.copy(effekseerVSDir+'include/EffekseerRendererDX9.h', targetDir+'プロジェクトに追加すべきファイル_VC用/') utils.copy(effekseerVSDir+'include/EffekseerRendererDX11.h', targetDir+'プロジェクトに追加すべきファイル_VC用/') utils.copy(effekseerVSDir+'lib/VS2015/Debug/Effekseer.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/Effekseer_vs2015_x86_d.lib') utils.copy(effekseerVSDir+'lib/VS2015/Debug/EffekseerRendererDX9.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX9_vs2015_x86_d.lib') utils.copy(effekseerVSDir+'lib/VS2015/Debug/EffekseerRendererDX11.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX11_vs2015_x86_d.lib') utils.copy(effekseerVSDir+'lib/VS2015/Release/Effekseer.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/Effekseer_vs2015_x86.lib') utils.copy(effekseerVSDir+'lib/VS2015/Release/EffekseerRendererDX9.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX9_vs2015_x86.lib') utils.copy(effekseerVSDir+'lib/VS2015/Release/EffekseerRendererDX11.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX11_vs2015_x86.lib') utils.copy(effekseerVSDir+'lib/VS2015WIN64/Debug/Effekseer.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/Effekseer_vs2015_x64_d.lib') utils.copy(effekseerVSDir+'lib/VS2015WIN64/Debug/EffekseerRendererDX9.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX9_vs2015_x64_d.lib') utils.copy(effekseerVSDir+'lib/VS2015WIN64/Debug/EffekseerRendererDX11.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX11_vs2015_x64_d.lib')
def make_output_dirs(self): self.output_err = '' try: self.progress_text = 'Removing old output directory...\n' output_dir = utils.path_join(self.output_dir(), self.project_name()) if os.path.exists(output_dir): utils.rmtree(output_dir, ignore_errors=True) temp_dir = utils.path_join(TEMP_DIR, 'webexectemp') if os.path.exists(temp_dir): utils.rmtree(temp_dir, ignore_errors=True) self.progress_text = 'Making new directories...\n' if not os.path.exists(output_dir): os.makedirs(output_dir) os.makedirs(temp_dir) self.copy_files_to_project_folder() json_file = utils.path_join(self.project_dir(), 'package.json') global_json = utils.get_data_file_path('files/global.json') if self.output_package_json: with codecs.open(json_file, 'w+', encoding='utf-8') as f: f.write(self.generate_json()) with codecs.open(global_json, 'w+', encoding='utf-8') as f: f.write(self.generate_json(global_json=True)) zip_file = utils.path_join(temp_dir, self.project_name()+'.nw') app_nw_folder = utils.path_join(temp_dir, self.project_name()+'.nwf') utils.copytree(self.project_dir(), app_nw_folder, ignore=shutil.ignore_patterns(output_dir)) zip_files(zip_file, self.project_dir(), exclude_paths=[output_dir]) for ex_setting in self.settings['export_settings'].values(): if ex_setting.value: self.progress_text = '\n' name = ex_setting.display_name self.progress_text = u'Making files for {}...'.format(name) export_dest = utils.path_join(output_dir, ex_setting.name) versions = re.findall('(\d+)\.(\d+)\.(\d+)', self.selected_version())[0] minor = int(versions[1]) if minor >= 12: export_dest = export_dest.replace('node-webkit', 'nwjs') if os.path.exists(export_dest): utils.rmtree(export_dest, ignore_errors=True) # shutil will make the directory for us utils.copytree(get_data_path('files/'+ex_setting.name), export_dest, ignore=shutil.ignore_patterns('place_holder.txt')) utils.rmtree(get_data_path('files/'+ex_setting.name), ignore_errors=True) self.progress_text += '.' if 'mac' in ex_setting.name: uncomp_setting = self.get_setting('uncompressed_folder') uncompressed = uncomp_setting.value app_path = utils.path_join(export_dest, self.project_name()+'.app') try: utils.move(utils.path_join(export_dest, 'nwjs.app'), app_path) except IOError: utils.move(utils.path_join(export_dest, 'node-webkit.app'), app_path) plist_path = utils.path_join(app_path, 'Contents', 'Info.plist') plist_dict = plistlib.readPlist(plist_path) plist_dict['CFBundleDisplayName'] = self.project_name() plist_dict['CFBundleName'] = self.project_name() version_setting = self.get_setting('version') plist_dict['CFBundleShortVersionString'] = version_setting.value plist_dict['CFBundleVersion'] = version_setting.value plistlib.writePlist(plist_dict, plist_path) self.progress_text += '.' app_nw_res = utils.path_join(app_path, 'Contents', 'Resources', 'app.nw') if uncompressed: utils.copytree(app_nw_folder, app_nw_res) else: utils.copy(zip_file, app_nw_res) self.create_icns_for_app(utils.path_join(app_path, 'Contents', 'Resources', 'nw.icns')) self.progress_text += '.' else: ext = '' windows = False if 'windows' in ex_setting.name: ext = '.exe' windows = True nw_path = utils.path_join(export_dest, ex_setting.dest_files[0]) if windows: self.replace_icon_in_exe(nw_path) self.compress_nw(nw_path) dest_binary_path = utils.path_join(export_dest, self.project_name() + ext) if 'linux' in ex_setting.name: self.make_desktop_file(dest_binary_path, export_dest) join_files(dest_binary_path, nw_path, zip_file) sevenfivefive = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) os.chmod(dest_binary_path, sevenfivefive) self.progress_text += '.' if os.path.exists(nw_path): os.remove(nw_path) except Exception: error = u''.join([unicode(x) for x in traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])]) self.logger.error(error) self.output_err += error finally: utils.rmtree(temp_dir, ignore_errors=True)
def splitControlTumorData(): # Input data directories controlInputDir = os.path.join(caffe_proj_root, "Controls") tumorInputDir = os.path.join(caffe_proj_root, "LargeTumor") # Output data directories controlOutputDir = os.path.join(hardDrive_proj_root, "controls") tumorOutputDir = os.path.join(hardDrive_proj_root, "tumors") trainOutputDir = os.path.join(hardDrive_proj_root, "training") testOutputDir = os.path.join(hardDrive_proj_root, "testing") # Sanity checks if not os.path.exists(trainOutputDir): os.makedirs(trainOutputDir) if not os.path.exists(testOutputDir): os.makedirs(testOutputDir) # Process control files printSectionHeader('Splitting control data into training and testing') controlMhdFiles = glob.glob(os.path.join(controlInputDir, "*", "*.mhd")) i = 0 for mhdFile in controlMhdFiles: print("\ncontrol file %d/%d : %s" % (i + 1, len(controlMhdFiles), mhdFile)) filePrefix = os.path.basename(os.path.splitext(mhdFile)[0]) # Split equally for training and testing if i % 2 == 0: curOutputDir = trainOutputDir else: curOutputDir = testOutputDir # copy input volume utils.copy(os.path.join(controlOutputDir, filePrefix + ".mha"), os.path.join(curOutputDir, "images", filePrefix + ".mha")) # copy z-mip slab volume utils.copy(os.path.join(controlOutputDir, filePrefix + "_zslab.mha"), os.path.join(curOutputDir, "images", filePrefix + "_zslab.mha")) # copy z-mip slab point map utils.copy(os.path.join(controlOutputDir, filePrefix + "_zslab_points.mha"), os.path.join(curOutputDir, "points", filePrefix + "_zslab_points.mha")) # copy expert volume utils.copy(os.path.join(controlOutputDir, filePrefix + "_expert.mha"), os.path.join(curOutputDir, "expert", filePrefix + "_expert.mha")) # copy expert z-mip slab volume utils.copy(os.path.join(controlOutputDir, filePrefix + "_zslab_expert.mha"), os.path.join(curOutputDir, "expert", filePrefix + "_zslab_expert.mha")) # save slabs as pngs saveSlabs([ os.path.join(curOutputDir, "images", filePrefix + "_zslab.mha"), os.path.join(curOutputDir, "expert", filePrefix + "_zslab_expert.mha") ]) i += 1 # Process tumor files printSectionHeader('Splitting tumor data into training and testing') tumorMhdFiles = glob.glob(os.path.join(tumorInputDir, "*", "*.mhd")) i = 0 for mhdFile in tumorMhdFiles: print("\ntumor file %d / %d : %s" % (i + 1, len(tumorMhdFiles), mhdFile)) filePrefix = os.path.basename(os.path.splitext(mhdFile)[0]) # Split equally for training and testing if i % 2 == 0: curOutputDir = trainOutputDir else: curOutputDir = testOutputDir # copy input volume utils.copy(os.path.join(tumorOutputDir, filePrefix + ".mha"), os.path.join(curOutputDir, "images", filePrefix + ".mha")) # copy z-mip slab volume utils.copy(os.path.join(tumorOutputDir, filePrefix + "_zslab.mha"), os.path.join(curOutputDir, "images", filePrefix + "_zslab.mha")) # copy z-mip slab point map utils.copy(os.path.join(tumorOutputDir, filePrefix + "_zslab_points.mha"), os.path.join(curOutputDir, "points", filePrefix + "_zslab_points.mha")) # copy expert volume utils.copy(os.path.join(tumorOutputDir, filePrefix + "_expert.mha"), os.path.join(curOutputDir, "expert", filePrefix + "_expert.mha")) # copy expert z-mip slab volume utils.copy(os.path.join(tumorOutputDir, filePrefix + "_zslab_expert.mha"), os.path.join(curOutputDir, "expert", filePrefix + "_zslab_expert.mha")) # save slabs as pngs saveSlabs([ os.path.join(curOutputDir, "images", filePrefix + "_zslab.mha"), os.path.join(curOutputDir, "expert", filePrefix + "_zslab_expert.mha") ]) i += 1
""" LOG.info('Connecting to the database') connection = pymysql.connect(host=hostname, port=port, user='******', passwd='skynet', db='skynet') cursor = connection.cursor() try: cursor.execute("SELECT max(parameterNumber) FROM " + parameters) data = cursor.fetchone() return data finally: cursor.close() connection.close() try: utils.copy(PARAMETERS_FROM, PARAMETERS_TO) json_object = utils.read_json(PARAMETERS_FROM) dictionary = json_object[0] # Get the hostname HOSTNAME = dictionary['db-hostname'] # Build the constants we need TABLE_STEM = dictionary['table-stem'] PARAMETERS = TABLE_STEM + '_parameters' PORT_INT = int(dictionary['db-port']) PORT_STR = dictionary['db-port'] max_parameter_number = get_parameter_numbers(PARAMETERS, HOSTNAME, PORT_INT)[0] LOG.info('Max parameter number = %(max_parameter_number)s' % {'max_parameter_number': str(max_parameter_number)})
def copy_headers(self, destination): g_log.debug("Copying headers to %s" % destination) headers_location = os.path.join(self._boost_path, 'boost') utils.copy(headers_location, os.path.join(destination, 'boost'))
print 'moving artists' shutil.move(os.path.join(music_dir,'artists'),os.path.join(output_music_dir,'artists')) print 'moving compilations' shutil.move(os.path.join(music_dir,'compilations'),os.path.join(output_music_dir,'compilations')) print 'moving corrupt' shutil.move(os.path.join(music_dir,'corrupt'),os.path.join(output_music_dir,'corrupt')) print 'moving other' shutil.move(os.path.join(music_dir,'other'),os.path.join(output_music_dir,'other')) if copy_others: for other_path in other_paths: extension = utils.extension(other_path) (basename,filename) = os.path.split(other_path) music_file_output = os.path.join(output_music_dir,'other',extension,basename[len(music_dir)+1:],filename) music_file_output = music_file_output.replace('/other/%s/other/%s' % (extension,extension),'/other/%s'% (extension,)) utils.copy(other_path,music_file_output) print 'output: %s' % (music_file_output,) corrupt_music_paths = [] missing_tags_paths = [] guessed_missing_tags_paths = [] #music_paths = list(reversed(music_paths)) #music_paths = music_paths[:30] for (i,music_file) in enumerate(music_paths): retry = 5 while True: print ''