Beispiel #1
0
def trivial_partitioning(in_task, sdac=False):
    """
    Implements the trival PDB construction: Every variable that
    is mentioned in a primary goal, or in the trigger of a switched
    constraint if the problem has secondary goals, becomes a singleton
    pattern.
    Returns the canonical PDB heuristic over the resulting set.
    """
    logging.info('PDB construction: trival partitioning')
    goal_vars = set()
    for x, v in in_task.Gp:
        goal_vars.add(x)
    if len(in_task.Gs) > 0:
        for t, c in in_task.lp.constraints:
            for x, v in t:
                goal_vars.add(x)

    pattern_collection = []
    for k, var_num in enumerate(goal_vars):
        sig = [var_num]
        logging.info('Pattern #{0}: {1}, {2}'.format(
            k, sig, in_task.task.state_vars[var_num].name))
        p_k = Table(sig)
        p_k.build_relevant_action_set(in_task.actions)
        projected_task, vars_maps, actions_maps = project_hybrid_over_vars(
            in_task, sig)
        projected_task.sdac_abstraction = sdac
        p_k.populate(projected_task, vars_maps, actions_maps)
        if p_k.max_value == 0:
            logging.info('max value is 0, pattern rejected')
            continue
        pattern_collection.append(p_k)

    h = Canonical_Heuristic_Function(in_task, pattern_collection)
    return h
def 	iPDB( in_task, max_num_entries, sdac = False ) :
	from heuristics.projections	import project_hybrid_over_vars
	from heuristics.pdb.pattern	import Table
	from heuristics.pdb		import Canonical_Heuristic_Function
	from search.a_star		import astar_state_sampling	

        import math

        t0 = TIMER_FUN()
	pattern_signatures, pattern_num_entries = select_initial_patterns(in_task)
	
	total_num_entries = sum(pattern_num_entries)

	if total_num_entries > max_num_entries :
		logging.info( 'Num entries ({0}) after initial pattern selection, exceeds the limit ({1})!'.format( total_num_entries, max_num_entries ) )
		sys.exit(1)

	logging.info( 'PDB construction: iPDB (Haslum, 2007)' )

	pattern_collection = []
	new_pattern_signature_list = []
	for k, pattern_signature in enumerate( pattern_signatures ) :
		logging.info( 'Pattern #{0}, signature size: {1}, # entries: {2}'.format( k, len(pattern_signature), pattern_num_entries[k] ) )
		#logging.info( 'Pattern #{0}: {1}'.format( k, pattern_signature ) )
		p_var_names = [ in_task.task.state_vars[i].name for i in pattern_signature ]
		logging.info( 'Pattern #{0}: {1}, {2}'.format( k, pattern_signature, p_var_names ) )
		p_k = Table( pattern_signature )
		p_k.build_relevant_action_set( in_task.actions )
		projected_task, vars_maps, actions_maps = project_hybrid_over_vars( in_task, pattern_signature )
		
		projected_task.sdac_abstraction = sdac
		
		p_k.populate( projected_task, vars_maps, actions_maps )
                #print(p_k.table)
                #for d in p_k.table: 
                    #print(d, p_k.table[d].h_value, p_k.table[d].action)
		
		if p_k.max_value == 0 :
			logging.info( 'Pattern with signature {0} rejected, max h^P() is 0' )
			#pattern_collection.append(None)
			continue
		pattern_collection.append( p_k )
		new_pattern_signature_list.append( pattern_signature )
	pattern_signatures = new_pattern_signature_list

	logging.info( 'Number of initial patterns: {0} ({1})'.format( len(pattern_collection), len(pattern_signature) ) )

	h = Canonical_Heuristic_Function( in_task, [p for p in pattern_collection if p is not None] )

	logging.info( '# of additive subsets in heuristic: {0}'.format( len(h.additive_subsets) ) )		

	h_s0, _ = h.evaluate( in_task.prim_s0 )
	logging.info( 'h(s0) = {0}'.format( h_s0 ) )
	witnesses = astar_state_sampling( in_task, h, (math.floor(h_s0)) * 2, 100 )
	logging.info( 'Witnesses collected: {0}'.format( len(witnesses) ) )
	
	cutoff_value = h_s0 * 1.1 # minimum improvement is a 10%

	task_variables = set( range(0, len(in_task.task.state_vars)) )

	while True :
		candidates = []
		# for each pattern
		for k, pattern_signature in enumerate(pattern_signatures) :
			if pattern_collection[k] is None : continue
			from copy import copy
			pattern_signature = set(pattern_signature)
			# initial candidates for pattern extension
			ext_0 = task_variables - pattern_signature
			if len(ext_0) == 0 : continue
			# filter causally disconnected variables
			ext_1 = ext_0 & pattern_collection[k].relevant_vars
			if len(ext_1) == 0 : continue
			logging.info( 'Candidates for pattern extension: {0}'.format( ext_1 ) )
			for x in ext_1 :
				# check space limits
				new_pattern_size = pattern_num_entries[k] * in_task.task.state_vars[x].domain_size()
				new_database_size = total_num_entries + new_pattern_size - pattern_num_entries[k]
				if new_database_size > max_num_entries : # pattern becomes too big
					logging.info( 'Space requirements surpassed' )
					continue
				new_pattern_signature = copy(pattern_signature)
				new_pattern_signature.add(x)
				new_pattern = Table( new_pattern_signature )
				new_pattern.build_relevant_action_set( in_task.actions )
				projected_task, vars_maps, actions_maps = project_hybrid_over_vars( in_task, pattern_signature )
				
				projected_task.sdac_abstraction = sdac
				
				new_pattern.populate_informed( projected_task, vars_maps, actions_maps, Canonical_Heuristic_Function( in_task, [ pattern_collection[k] ] ) )
                                #print(p_k.table)
                                #for d in p_k.table: 
                                    #print(d, p_k.table[d].h_value, p_k.table[d].action)
				
				if new_pattern.max_value == 0 :
					logging.info( 'new pattern does not contribute any information to the heuristic' )
					# useless pattern
					continue
				new_pattern_collection = [ pattern for l, pattern in enumerate( pattern_collection ) if l != k ]
				new_pattern_collection.append( new_pattern )
				
				new_h = Canonical_Heuristic_Function( in_task, new_pattern_collection )
				new_h_s0, _ = new_h.evaluate( in_task.prim_s0 )
				score = evaluate_over_witnesses( new_h, witnesses )
				logging.info( 'Score: {0}'.format(score) ) 
				if  score > 0 :
					candidates.append( ( score, new_h_s0, k, new_pattern_signature, new_pattern_size, new_pattern, new_pattern_collection, new_h ) )
		if len(candidates) == 0 :
			logging.info( 'PDB construction finished!')
			break
		logging.info( 'New pattern added, prev. value of h(s0) = {0}'.format( h_s0) )
		score, h_s0, index,  new_pattern_signature, new_pattern_size, new_pattern, new_pattern_collection, new_h = max(candidates)
		cutoff_value = h_s0 * 1.1
		logging.info( 'Pattern score: {0}, new value for h(s0) ={1}, cutoff = {2}'.format( score, h_s0, cutoff_value ) )
		pattern_signatures[index] = new_pattern_signature
		pattern_num_entries[index] = new_pattern_size
		pattern_collection = new_pattern_collection
		h = new_h
		witnesses = astar_state_sample( in_task, h, h_s0 * 2, 100 )
		logging.info( 'Witnesses collected: {0}'.format( len(witnesses) ) )

	t1 = TIMER_FUN()
        logging.info( 'iPDB construction time: {0}'.format( t1 - t0 ) )
	return h
Beispiel #3
0
def naive_partitioning_sdac( in_task, max_num_patterns, max_num_entries ) :
	"""
		Partitions in_task variables into as many patterns
		as possible given max_num_entries. Variables are
		assigned to patterns on a random basis

		in_task - Input hybrid task see HybridTask class in model/generic/hybrid/task.py
		max_num_patterns - Maximum number of patterns
		max_num_entries - Maximum number of entries per pattern

		returns a list of pattern databases, already populated
	"""
	logging.info( 'PDB construction: Naive partitioning' )
	variables = {}
	for index, x in enumerate(in_task.task.state_vars) :
		variables[index] = x.domain_size()	

	# such a PDB doesn't make sense
	assert max_num_patterns < len(in_task.task.state_vars)

	pattern_signatures = [ [] for i in xrange(max_num_patterns) ]
	pattern_num_entries = [ 0 for i in xrange(max_num_patterns) ]

	# add one variable to each pattern
	for k, pattern_signature in enumerate( pattern_signatures ) :
		# Choose one variable randomly from the set of variables
		x = random.choice( variables.keys() )
		domain_size = variables[x]
		variables.pop( x ) # remove x
		pattern_signature.append( x )
		pattern_num_entries[ k ] = domain_size

	logging.info( '# Variables after initial allocation to patterns: {0}'.format( len( variables ) ) )
	#logging.info( 'pattern_signatures: {0}'.format(pattern_signatures) ) 
	
	# keep doing this until we don't have any variables left
	while len(variables) > 0 :
		logging.info( '# Remaining variables {0}'.format( len(variables) ) )
		x = random.choice( variables.keys() )
		domain_size = variables[ x ]
		variables.pop( x ) # remove x
		
		# Find a pattern that can accomodate this variable
		allocated = None
		for k, pattern_signature in enumerate( pattern_signatures ) :
			if pattern_num_entries[ k ] * domain_size > max_num_entries :
				logging.info( 'Variable {0} does not fit in pattern {1}, current # entries is {2}, after adding {0} # entries would be {3}'.format( x, k, pattern_num_entries[k], pattern_num_entries[ k ] * domain_size ) )
				continue
			allocated = k
			pattern_signature.append( x )
			pattern_num_entries[k] *= domain_size
			break
 		
		# there was no room to allocate this variable without violating the
		# constraint on the maximum number of entries
		if allocated is None : 			
			logging.info( 'Variable {0} did not fit in any existing pattern, creating new pattern'.format( x ) )
			pattern_signatures.append( [ x ] )
			pattern_num_entries.append( domain_size )

	logging.info( 'All variables allocated to patterns' )
	for k, pattern_signature in enumerate( pattern_signatures ) :
		logging.info( 'Pattern #{0}, signature size: {1}, # entries: {2}'.format( k, len(pattern_signature), pattern_num_entries[k] ) )
	if len( pattern_signatures) > max_num_patterns :
		logging.info( '# of patterns {0}, requested was {1}'.format( len(pattern_signatures), max_num_patterns ) )

	logging.info( 'Projecting task over pattern variables' )
	projected_tasks = []
	for k, signature in enumerate( pattern_signatures ) :
		logging.info( 'Pattern {0}, size {1}:'.format( k, len(signature) ) )
		projected_tasks.append( project_hybrid_over_vars( in_task, signature ) )
	logging.info( 'Projection complete' )

	logging.info( 'Populating pattern databses' )
	database = []
	for k, projection_data in enumerate( projected_tasks ) :
		logging.info( 'Computing values for Pattern {0}, size {1}:'.format( k, len(pattern_signatures[k]) ) )
		p_k = Table( pattern_signatures[k] )
		p_k.build_relevant_action_set( in_task.actions )
		projected_task, vars_maps, actions_maps = projection_data
		p_k.populate_sdac( projected_task, vars_maps, actions_maps ) 
		database.append( p_k )
	logging.info( 'Pattern databases have been populated' )
	
	return 	Canonical_Heuristic_Function( in_task, database )