def submit_single(self, group_name): cmd = """#!/bin/bash #SBATCH --job-name={job_name} #SBATCH --ntasks=1 #SBATCH --cpus-per-task=1 #SBATCH --mem-per-cpu=5000 #SBATCH --output={log}/{group_name}.log #SBATCH --error={err}/{group_name}.log #SBATCH --account=pi-lgrandi #SBATCH --partition={partition} #SBATCH --qos={qos} export PATH=/project/lgrandi/anaconda3/bin:$PATH export PROCESSING_DIR={tmp} cd {cwd} source activate {conda_env} python {script} {indir} {outdir} {log}/{group_name}_id_list.txt rm -rf ${{PROCESSING_DIR}} """ y = cmd.format( job_name='submit{:02}'.format(self.id), group_name=group_name, log=os.path.join(self.log), err=os.path.join(self.log), tmp=os.path.join(os.getcwd(), 'tmp'), cwd=os.getcwd(), partition=self.config['MIDWAYSUBMIT']['partition'], qos=self.config['MIDWAYSUBMIT']['qos'], conda_env=self.config['BASICS']['conda_env'], indir=self.config['PROCESSING']['input'], outdir=os.path.join(self.head_directory, self.config['BASICS']['name']), script=self.config['MIDWAYSUBMIT']['script'], ) submit_job(y) self.y = y self.id += 1
'commissioning', 'pmttrip','trip','_pmttrip', 'source_opening', ], ) datasets= hax.cuts.selection(datasets, datasets['source__type']=='none', 'Source in place') datasets= hax.cuts.selection(datasets, datasets['location'] != '', 'Processed data available') run_numbers = datasets['number'].values dataset_names = datasets['name'] print('Total of %d datasets' % len(run_numbers)) def check_queue(): command='squeue -u jpienaar --partition=%s| wc -l' %queue_name var=subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) (var, err)=var.communicate() var=int(str(var, 'utf-8')) return var #For every run, make and submit the script for dataset in dataset_names[8000::]:# in run_numbers[:1]: while check_queue()>30: print("Jobs in queue: ", check_queue()) time.sleep(60) y = x.format(run=dataset, queue=queue_name) submit_job(y) # Check your jobs with: 'squeue -u <username>' # Check number of submitted jobs with 'squeue -u <username> | wc -l' (is off by +2 btw)