Beispiel #1
0
def upload(sure=False,part=None,bulk=False):

	"""
	Upload the most recent CPT and TPR file to a cluster for continuation.
	Need to re-write the step/part-specific uploader.
	"""

	serial_number()
	default_fns,default_dirs = ['makefile'],['amx']
	default_fns += [os.path.join(root,fn) for root,dirnames,fns 
		in os.walk('./amx') for fn in fns for dn in default_dirs
		if not re.match('.+\.pyc$',fn)!=None]
	default_fns = [i for i in default_fns if not re.match('.+\/amx\/docs',i)]
	last_step,part_num = detect_last()
	if part: 
		part_num = int(part)
		last_step, = [i for i in glob.glob('s%02d-*'%part_num)]
	if not last_step and not bulk: raise Exception('\n[ERROR] no steps to upload (try "bulk" instead)')
	elif last_step and not bulk:
		if not part_num: raise Exception('\n[ERROR] cannot find a part number (did you mean "bulk"?)')
		restart_fns = [last_step+'/md.part%04d.%s'%(part_num,suf) for suf in ['cpt','tpr']]
		restart_fns += [last_step+'/script-continue.sh']
		if not all([os.path.isfile(fn) for fn in restart_fns]):
			error = '[STATUS] could not find necessary upload files (part number %04d)'%part_num
			error += '\n[ERROR] upload only works if there is a TPR for the last CPT part'
			error += "\n[ERROR] missing: %s"%str([fn for fn in restart_fns if not os.path.isfile(fn)])
			raise Exception(error)
		with open('uploads.txt','w') as fp: 
			for fn in restart_fns+default_fns: fp.write(fn+'\n')
	sshname = raw_input('[QUESTION] enter ssh alias for destination machine: ')
	subfolder = raw_input('[QUESTION] enter subfolder on remote machine (default is ~/): ')
	cwd = os.path.basename(os.path.abspath(os.getcwd()))
	if not sure:
		cmd = 'rsync -%s%s ../%s %s:~/%s/%s'%(
			'avin',' --files-from=uploads.txt' if not bulk else ' --exclude=.git',cwd,
			sshname,subfolder,cwd if not bulk else '')
		p = subprocess.Popen(cmd,shell=True,cwd=os.path.abspath(os.getcwd()),executable='/bin/bash')
		log = p.communicate()
	if sure or raw_input('\n[QUESTION] continue [y/N]? ')[:1] not in 'nN':
		cmd = 'rsync -%s%s ../%s %s:~/%s/%s'%(
			'avi',' --files-from=uploads.txt' if not bulk else ' --exclude=.git',cwd,
			sshname,subfolder,cwd if not bulk else '')
		p = subprocess.Popen(cmd,shell=True,cwd=os.path.abspath(os.getcwd()),executable='/bin/bash')
		log = p.communicate()
		if not bulk: os.remove('uploads.txt')
	if p.returncode == 0 and last_step:
		with open('script-%s.log'%last_step.rstrip('/'),'a') as fp:
			destination = '%s:~/%s/%s'%(sshname,subfolder,cwd)
			ts = datetime.datetime.fromtimestamp(time.time()).strftime('%Y.%m.%d.%H%M')
			fp.write("[FUNCTION] upload () {'destination': '%s', 'time': '%s', 'sure': %s}\n"%(
				destination,ts,str(sure)))
	elif p.returncode != 0: 
		print "[STATUS] upload failure (not logged)"
		sys.exit(1)
Beispiel #2
0
def download():

	"""
	Synchronize uploaded files according to log-uploads.
	"""

	regex_upload = '^\[FUNCTION]\s+upload\s+\(\)\s+(\{[^\}]+\})'
	last_step,part_num = detect_last()
	#----infer the log from the number of the last step
	last_step_code = re.search('^([a-z][0-9]+)-',last_step).group(1)
	last_log = [f for f in glob.glob('script-%s-*'%last_step_code)][0]
	with open(last_log) as fp: loglines = fp.readlines()
	upload_records = [i for i in loglines if re.match('^\[FUNCTION]\s+upload',i)]
	if upload_records == []: raise Exception("\n[ERROR] cannot download that which has not been uploaded")
	last_upload = upload_records[-1]
	upload_dict = eval(re.findall(regex_upload,last_upload)[0])
	destination = upload_dict['destination']
	print "[STATUS] log at %s says that this simulation is located at %s"%(last_log,destination)
	try:
		cmd = 'rsync -avin --progress %s/* ./'%destination
		print '[STATUS] running: "%s"'%cmd
		p = subprocess.Popen(cmd,shell=True,cwd=os.path.abspath(os.getcwd()))
		log = p.communicate()
		if p.returncode != 0: raise
		if raw_input('\n[QUESTION] continue [y/N]? ')[:1] not in 'nN':
			cmd = 'rsync -avi --progress %s/* ./'%destination
			print '[STATUS] running "%s"'%cmd
			p = subprocess.Popen(cmd,shell=True,cwd=os.path.abspath(os.getcwd()))
			log = p.communicate()
	except Exception as e:
		import traceback
		#---from omnicalc
		s = traceback.format_exc()
		print "[TRACE] > "+"\n[TRACE] > ".join(s.split('\n'))
		print "[ERROR] failed to find simulation"
		print "[NOTE] find the data on the remote machine via \"find ./ -name serial-%s\""%serial_number()
		sys.exit(1)
Beispiel #3
0
def cluster(**kwargs):

	"""
	Write a cluster header according to the machine configuration.
	Note that we do not log this operation because it only changes the BASH scripts
	"""

	if not 'cluster_header' in machine_configuration: 
		print '[STATUS] no cluster information'
		return
	head = machine_configuration['cluster_header']
	for key,val in machine_configuration.items(): head = re.sub(key.upper(),str(val),head)
	with open('cluster-header.sh','w') as fp: fp.write(head)
	print '[STATUS] wrote cluster-header.sh'
	#---get the most recent step (possibly duplicate code from base)
	last_step,part_num = detect_last()
	if last_step:
		#---code from base.functions.write_continue_script to rewrite the continue script
		with open('amx/procedures/scripts/script-continue.sh','r') as fp: lines = fp.readlines()
		tl = [float(j) if j else 0.0 for j in re.match('^([0-9]+)\:?([0-9]+)?\:?([0-9]+)?',
			machine_configuration['walltime']).groups()]
		maxhours = tl[0]+float(tl[1])/60+float(tl[2])/60/60
		settings = {
			'maxhours':maxhours,
			'nprocs':machine_configuration['nprocs'],
			'tpbconv':gmxpaths['tpbconv'],
			'mdrun':gmxpaths['mdrun'],
			}
		#---! how should we parse multiple modules from the machine_configuration?
		if 'modules' in machine_configuration:
			need_modules = machine_configuration['modules']
			need_modules = [need_modules] if type(need_modules)==str else need_modules
			for m in need_modules: head += "module load %s\n"%m
		for key in ['extend','until']: 
			if key in machine_configuration: settings[key] = machine_configuration[key]
		#---! must intervene above to come up with the correct executables
		setting_text = '\n'.join([
			str(key.upper())+'='+('"' if type(val)==str else '')+str(val)+('"' if type(val)==str else '') 
			for key,val in settings.items()])
		lines = map(lambda x: re.sub('#---SETTINGS OVERRIDES HERE$',setting_text,x),lines)
		script_fn = 'script-continue.sh'
		cont_fn = last_step+script_fn
		print '[STATUS] %swriting %s'%('over' if os.path.isfile(last_step+script_fn) else '',cont_fn)
		with open(last_step+script_fn,'w') as fp:
			for line in lines: fp.write(line)
		os.chmod(last_step+script_fn,0744)
		#---code above from base.functions.write_continue_script		
		with open(cont_fn,'r') as fp: continue_script = fp.read()
		continue_script = re.sub('#!/bin/bash\n','',continue_script)
		cluster_continue = last_step+'/cluster-continue.sh'
		print '[STATUS] writing %s'%cluster_continue
		with open(cluster_continue,'w') as fp: fp.write(head+continue_script)
	#---for each python script in the root directory we write an equivalent cluster script
	pyscripts = glob.glob('script-*.py')
	if len(pyscripts)>0: 
		with open('cluster-header.sh','r') as fp: header = fp.read()
	for script in pyscripts:
		name = re.findall('^script-([\w-]+)\.py$',script)[0]
		with open('cluster-%s.sh'%name,'w') as fp:
			fp.write(header+'\n')
			fp.write('python script-%s.py &> log-%s\n'%(name,name))
		print '[STATUS] wrote cluster-%s.sh'%name