Ejemplo n.º 1
0
def main(argv=None):
	if(argv==None):
		argv = sys.argv
	options = options_desc.parse_args(argv)[0]

	assert(not(options.refine_all and options.extend_all)) 
	
	pool = Pool()
	needy_nodes = pool.where("isa_partition and is_sampled").multilock()
	
	# 1. Trying to detect fake convergence
	for n in pool.where("state == 'converged'"):
		means = kmeans(n.trajectory, k=2)
		d = (means[0] - means[1]).norm2()
		if(d > 2.0 and (options.refine_all or userinput("%s has converged but appears to have a bimodal distribution.\nDo you want to refine?"%n.name, "bool"))): #TODO decide upon threshold (per coordinate?)
			refine(n, options)
	
	# 2. Dealing with not-converged nodes
	for n in pool.where("state == 'not-converged'"):
		if(not(options.refine_all or options.extend_all)):
			choice = userchoice("%s has not converged. What do you want to do?"%n.name, ['_refine', '_extend', '_ignore'])
		if(options.refine_all or choice=="r"):
			refine(n, options)
		elif(options.extend_all or choice=="e"):
			extend(n)
		elif(choice=="i"):
			continue
	
	for n in needy_nodes:
		n.save()
		n.unlock()
			
	zgf_setup_nodes.main()
	zgf_grompp.main()
	zgf_cleanup.main()	
Ejemplo n.º 2
0
def main(argv=None):
    if (argv == None):
        argv = sys.argv
    options = options_desc.parse_args(argv)[0]

    assert (not (options.refine_all and options.extend_all))

    pool = Pool()
    needy_nodes = pool.where("isa_partition and is_sampled").multilock()

    # 1. Trying to detect fake convergence
    for n in pool.where("state == 'converged'"):
        means = kmeans(n.trajectory, k=2)
        d = (means[0] - means[1]).norm2()
        if (d > 2.0 and (options.refine_all or userinput(
                "%s has converged but appears to have a bimodal distribution.\nDo you want to refine?"
                % n.name,
                "bool"))):  #TODO decide upon threshold (per coordinate?)
            refine(n, options)

    # 2. Dealing with not-converged nodes
    for n in pool.where("state == 'not-converged'"):
        if (not (options.refine_all or options.extend_all)):
            choice = userchoice(
                "%s has not converged. What do you want to do?" % n.name,
                ['_refine', '_extend', '_ignore'])
        if (options.refine_all or choice == "r"):
            refine(n, options)
        elif (options.extend_all or choice == "e"):
            extend(n)
        elif (choice == "i"):
            continue

    for n in needy_nodes:
        n.save()
        n.unlock()

    zgf_setup_nodes.main()
    zgf_grompp.main()
    zgf_cleanup.main()
Ejemplo n.º 3
0
def main():
	options = options_desc.parse_args(sys.argv)[0]

	pool = Pool()
	active_nodes = pool.where("isa_partition")

	if options.transition_level == "clusters":
		npz_file = np.load(pool.chi_mat_fn)
		chi_matrix = npz_file['matrix']
		n_clusters = npz_file['n_clusters']

		default_cluster_threshold = options.coreset_power

		# determine cluster
		#TODO this part is too cryptic
		# amount_phi[j] = amount of basis functions per cluster j
		amount_phi=np.ones(n_clusters,dtype=np.uint64)
		amount_phi=amount_phi*len(chi_matrix)
		amount_phi_total=len(chi_matrix)	

		# sort columns of chi and return new sorted args
		arg_sort_cluster=np.argsort(chi_matrix,axis=0)
		# sort columns of chi and return new sorted chi
		# notice that the last row has to be [1 ... 1]
		sort_cluster=np.sort(chi_matrix,axis=0)
		# show_cluster contains arrays of the type [a b] where a is the row
		# and b the column of the entry from chi matrix  where 
		# chi_sorted(a,b) > default_cluster_threshold
		show_cluster=np.argwhere(sort_cluster > 0.5 )

		# from the above it could be clear
		# that the amount of phi function
		# of cluster i is given by x where x the number so that 
		# [x i] is in show_cluster and for all 
		# [y i] in show_cluster we have x>y 
		# we define amount_phi[i]=x	
		for element in show_cluster:
			index=element[0]
			cluster=element[1]
			if amount_phi[cluster]>index:
				amount_phi[cluster]=index

		# create cluster list which contains arrays
		# each array consinst of a set of numbers corresponding to 
		# the phi function of node_number
		cluster=[]
		for i in range(0,n_clusters):
			cluster_set=[]		
			for j in range(amount_phi[i],amount_phi_total):
				#if (j < amount_phi[i] + 3):
					cluster_set.append(arg_sort_cluster[j][i])	
			cluster.append(cluster_set)

		for i in range(len(cluster)):
			counter = 0
			for node_index in cluster[i]:
				counter += 1
				# and ignore nodes which have a higher chi value then default_cluster_threshold
				if( chi_matrix[node_index][i] > default_cluster_threshold and counter>options.min_nodes):
					continue
				
				node = active_nodes[node_index]
				trajectory= node.trajectory
			
				print "-----"
				print "Generating transition nodes for node %s..."%node.name
			
				neighbour_frames = get_indices_equidist(node, options.num_tnodes)
		
				# create transition node for node_index
				for frame_number in neighbour_frames:
					print "Using frame %d as starting configuration."%frame_number
					n = Node()
					n.parent_frame_num = frame_number
					n.parent = node
					n.state = "created"
					n.extensions_counter = 0
					n.extensions_max = options.num_runs-1
					n.extensions_length = options.sampling_length
					n.sampling_length = options.sampling_length
					n.internals = trajectory.getframe(frame_number)
					n.save_mode = options.save_mode
					pool.append(n)
					n.save()
				print "%d transition nodes generated."%options.num_tnodes
				print "-----"

		zgf_setup_nodes.main()
		zgf_grompp.main()
	
		cluster_dict = {}
		for (ic,c) in enumerate(cluster):
			cluster_dict['cluster_%d'%ic] = c

		# save cluster
		np.savez(pool.analysis_dir+"core_set_cluster.npz", **cluster_dict)

	elif options.transition_level == "nodes":
		for node in active_nodes:
			trajectory= node.trajectory
			
			# TODO duplicate code... use the one above
			print "-----"
			print "Generating transition nodes for node %s..."%node.name

			neighbour_frames = get_indices_equidist(node, options.num_tnodes)

			# create transition point for node_index
			for frame_number in neighbour_frames:
				print "Using frame %d as starting configuration."%frame_number
				n = Node()
				n.parent_frame_num = frame_number
				n.parent = node
				n.state = "created"
				n.extensions_counter = 0
				n.extensions_max = options.num_runs-1
				n.extensions_length = options.sampling_length
				n.sampling_length = options.sampling_length
				n.internals = trajectory.getframe(frame_number)
				n.save_mode = options.save_mode
				pool.append(n)
				n.save()
			print "%d transition nodes generated."%options.num_tnodes
			print "-----"

		zgf_setup_nodes.main()
		zgf_grompp.main()


	instructionFile = pool.analysis_dir+"instruction.txt"

	f = open(instructionFile, "w")
	f.write("{'power': %f, 'tnodes': %d, 'level': '%s', 'min_nodes': %d}"%(options.coreset_power, options.num_tnodes, options.transition_level, options.min_nodes))
	f.close()