Exemplo n.º 1
0
def STATEMENT(current, G):
	t = tree("STATEMENT")
	s = {}
	if current.name == "ID":
		current, child, s1 = ASSIGNMENT(current, G) #make sure ASSIGNMENT is returning next(G)
		t.children.append(child)
		s.update(s1)
		return current, t, s #current should be a ;

	elif current.name == "READ":
		t.children.append(tree("READ"))
		current = next(G)
		if not current.name == "LPAREN":
			raise ParserError("READ token is not followed by a (" + getTokenLineInfo(current))
		current, child, s1 = ID_LIST(next(G), G) #should be returning a )
		t.children.append(child) #child should be the ID_LIST tree
		s.update(s1)
		if not current.name == "RPAREN":
			raise ParserError("Missing closing ) in READ statement" + getTokenLineInfo(current))
		return next(G), t, s  #next(G) should be a ;

	elif current.name == "WRITE":
		t.children.append(tree("WRITE"))
		current = next(G)
		if not current.name == "LPAREN":
			raise ParserError("WRITE token is not followed by a (" + getTokenLineInfo(current))
		current, child, s1 = EXPR_LIST(next(G), G) #should be returning a )
		t.children.append(child)
		s.update(s1)
		if not current.name == "RPAREN":
			raise ParserError("Missing closing ) in WRITE statement" + getTokenLineInfo(current))
		return next(G), t, s  #next(G) should be a ;

	else:
		raise ParserError("Inappproriate token to start a statement" + getTokenLineInfo(current))
Exemplo n.º 2
0
def main():
    MUSIC_FORMATS = ['mp3', 'flac', 'waw', 'm4a']
    yes = set(['yes','y', 'ye', ''])
    no = set(['no','n'])

    args = unparse_arguments(MUSIC_FORMATS)

    empty_dirs = find_folders_not_containing(args.path, MUSIC_FORMATS)
    for folder in empty_dirs:
        if args.tree:
            tree.tree(folder, '  ', True)
        else:
            print(folder)

    if empty_dirs:
        print("Do you want to delete listed directories? [yes/no]")
        choice = input().lower()

        if (choice in yes):
            print("Deleting dirs")
            delete_dirs(empty_dirs)
        else:
            print("You have canceled this action")
    else:
        print("Your music library is already clean")
Exemplo n.º 3
0
def init_plat_tree():

    # Create /api end point as root node
    r_api = tree("api", data = get_node_api())

    # Add /api/spb to represent side plane board
    r_spb = tree("spb", data = get_node_spb())
    r_api.addChild(r_spb)

    # Add servers /api/server[1-max]
    num = pal_get_num_slots()
    for i in range(1, num+1):
        r_server = populate_server_node(i)
        if r_server:
            r_api.addChild(r_server)

    # TODO: Need to add /api/nic to represent NIC Mezz Card

    # Add /api/spb/fruid end point
    r_temp = tree("fruid", data = get_node_fruid("spb"))
    r_spb.addChild(r_temp)

    # /api/spb/bmc end point
    r_temp = tree("bmc", data = get_node_bmc())
    r_spb.addChild(r_temp)

    # /api/spb/sensors end point
    r_temp = tree("sensors", data = get_node_sensors("spb"))
    r_spb.addChild(r_temp)

    return r_api
Exemplo n.º 4
0
def IDENT(current, G,):
    global varName1
    t = tree('IDENT')
    if current.name == "REFERENCE":
        current = next(G)
        current.pattern = "ref-" + current.pattern
    if current.name != 'ID':
        raise ParserError("Syntax Error: Error when parsing IDENT: " \
                          + current.line)
    tmp = tree('ID')
    tmp.val = current.pattern
    t.append(tmp)
    g = ""
    try:
        g = dict_[current.pattern][0]
    except:
        pass
    gt = typeOfVar
    try:
        gt = dict_[current.pattern][1]
    except:
        pass
    dict_[current.pattern] = (g, gt) #add symbol to symbol table, will use different values later.
    varScopeDict[scope_variate] = {current.pattern:(g, gt)}
    return t, next(G)
Exemplo n.º 5
0
def FUNCTIONLST(current, G):
    global scope_variate
    t1 = tree("FUNCLIST")
    while(True):
        t = tree("FUNCTION")
        if current.name != "FUNCTION":
            break
        type = next(G)
        funcReturn = tree(type.name)
        funcReturn.val = type.pattern
        t.append(funcReturn)
        name = next(G)
        funcName = tree(name.name)
        funcName.val = name.pattern
        t.append(funcName)
        funcDict[name.pattern] = type.name
        scope_variate = name.pattern
        current = next(G)
        if current.name == "LPAREN":
            holder = False
            current = next(G)
            while(True):
                if current.name == "RPAREN":
                    break
                if current.name == "COMMA":
                    current = next(G)
                t1, current, holder = STATEMENT(current, G)
                t.append(t1)
        current = next(G)
        tt, current = PROGRAM(current, G)
        t.append(tt)
        t1.append(t)
    return t1, current
Exemplo n.º 6
0
def TERM2(current, G):
    t = tree("TERM2")
    s = {}
    # assumes sign returns useful current
    current, child, s1 = SIGN(current, G)
    t.children.append(child)
    s.update(s1)
    current, child, s1 = FACT2(current, G)
    t.children.append(child)
    s.update(s1)
    while current.name == "TIMES" or current.name == "DIVIDE" or current.name == "MODULO":
        if current.name == "TIMES":
            t.children.append(tree("TIMES"))
        elif current.name == "DIVIDE":
            t.children.append(tree("DIVIDE"))
        else:  # current.name == "MODULO"
            t.children.append(tree("MODULO"))
        current, child, s1 = SIGN(next(G), G)
        t.children.append(child)
        s.update(s1)
        # Assumes FACT2 returns useful current
        current, child, s1 = FACT2(current, G)
        t.children.append(child)
        s.update(s1)
    return current, t, s
Exemplo n.º 7
0
Arquivo: nni.py Projeto: JCM333/JPP
def rearrange_tree(t):
	'''Take a tree, and rearrange either a tree or a child. Return the rearranged tree.'''
	if t.size() == 3:
		return do_rearrange(t)
	lsize = t.left.size()
	rsize = t.right.size()
	if lsize < 3 and rsize < 3:
		return do_rearrange(t)
	elif lsize < 3:
		rand = random.randrange(rsize - 1)
		if rand == 0:
			return do_rearrange(t)
		else:
			return tree.tree(children=(t.left, rearrange_tree(t.right)))
	elif rsize < 3:
		rand = random.randrange(lsize - 1)
		if rand == 0:
			return do_rearrange(t)
		else:
			return tree.tree(children=(rearrange_tree(t.left), t.right))
	else:
		rand = random.randrange(lsize + rsize - 3)
		if rand == 0:
			return do_rearrange(t)
		elif rand < lsize - 1:
			return tree.tree(children=(rearrange_tree(t.left), t.right))
		else:
			return tree.tree(children=(t.left, rearrange_tree(t.right)))
Exemplo n.º 8
0
def branchnbound_search(cm):
	'''Do an exhaustive search on all possible trees in the char matrix using
	branch-and-bound methods.'''
	global best_length
	taxa = cm.taxon_set()
	outgroup = cm.get_outgroup()
	taxa.remove(outgroup)
	outgroup_tree = tree.tree(id=outgroup)

	trees = all_trees_rec_bb(taxa, cm, outgroup_tree)

	best_tree = None
	ntrees = 0
	for a_tree in trees:
		ntrees += 1
		real_tree = tree.tree(children=(outgroup_tree, a_tree))
		tree_len = real_tree.length(cm)
		if best_length == None or tree_len < best_length:
			best_tree = [real_tree]
			best_length = tree_len
		elif tree_len == best_length:
			best_tree.append(real_tree)

	print "Total trees examined: %d" % ntrees
	print "Length of best tree(s): %d" % best_length	
	print "Number of best tree(s): %d" % len(best_tree)

	return best_tree
Exemplo n.º 9
0
def IDENT(current, G):
	t = tree("IDENT")
	s = {current.pattern: None}
	if not current.name == "ID":
		raise ParserError("Invalid identifier" + getTokenLineInfo(current))
	t.children.append(tree("ID"))
	return next(G), t, s
Exemplo n.º 10
0
def ARITH_OP(current, G):
	# process the ARITHOP here when building tree before returning the next (G)
	if current.name == "PLUS":
		return next(G), tree("PLUS")
	if current.name == "MINUS":
		return next(G), tree("MINUS")
	else: #this should never happen because only way to get to this function is if current is an arith op
		raise ParserError("Invalid ARITH_OP" + getTokenLineInfo(current))
Exemplo n.º 11
0
def IDENT(current, G):
    t = tree('IDENT')
    if current.name != 'ID':
        raise ParserError("Syntax Error: Error when parsing IDENT: " + current.line)
    tmp = tree('ID')
    tmp.val = current.pattern
    t.append(tmp)
    dict[current.pattern] = ""; #add symbol to symbol table, will use different values later.
    return t, next(G)
Exemplo n.º 12
0
Arquivo: nni.py Projeto: JCM333/JPP
def do_rearrange(t):
	'''Perform a rearrangement on a tree'''
	if t.left.is_terminal:
		child1, child2 = t.right, t.left
	elif t.right.is_terminal:
		child1, child2 = t.left, t.right
	else:
		child1, child2 = t.children()

	gchild1, gchild2 = child1.children()
	return tree.tree(children=(gchild1, tree.tree(children=(gchild2, child2))))
Exemplo n.º 13
0
def IDENT(current, G, vartype="notype"):
    t = tree("IDENT")
    s = {}
    if not vartype == "notype":
        s = {current.pattern: [vartype, 0, 0]} #vartype, decl flag, init flag
    if not current.name == "ID":
        raise ParserError("Syntax Error: Invalid identifier" +
                          getTokenLineInfo(current))
    t.val = current.pattern
    t.children.append(tree("ID", val=current.pattern))
    return next(G), t, s
Exemplo n.º 14
0
def getData(f,words):
    data = open(f,'r')
    lines = data.readlines()
    examples = []
    for i in lines:
        i=i.strip()
        if(len(i) > 0):
            i=i.split('\t')
            if len(i) >= 2:
                e = (tree(i[0]), tree(i[1]))
                examples.append(e)
    return examples
Exemplo n.º 15
0
def R(current, G):
    t = tree("R")
    if(current.name == 'GREATEREQUAL') | (current.name == 'LESSEQUAL') |\
      (current.name == 'EQUAL') | (current.name == 'LESSTHAN') | \
      (current.name == 'GREATERTHAN') | (current.name == 'NOTEQUAL'):
        t.append(tree(current.name))
        current = next(G)
        t2, current = EXP2(current, G)
        t.append(t2)
        return t, current
    else:
        return t, current
Exemplo n.º 16
0
def PROGRAM(current, G):
    t = tree("PROGRAM")
    if current.name == "BEGIN":
        t1 = tree("BEGIN")
        t.append(t1)
        t2, current = STATEMENT_LIST(next(G), G)
        t.append(t2)
        if current.name == "END":
            t.append(tree("END"))
            return t, next(G)
        raise ParserError("Syntax Error: No 'end' at line: " + current.line)
    raise ParserError("Syntax Error: No 'begin' at line: " + current.line)
Exemplo n.º 17
0
def TYPE(current, G):
    t = tree("TYPE")
    s = {}
    if current.name == "INT":
        t.children.append(tree("INT"))
        return next(G), t, s, "INT"
    elif current.name == "BOOL":
        t.children.append(tree("BOOL"))
        return next(G), t, s, "BOOL"
    elif current.name == "STRING":
        t.children.append(tree("STRING"))
        return next(G), t, s, "STRING"
Exemplo n.º 18
0
def trees_adding_bb(t, taxon, cm, og):
	'''Yield all the trees that can be obtained from adding taxon to the tree t'''
	new_tree = tree.tree(children=(t, tree.tree(id=taxon)))
	if best_length == None or tree.tree(children=(og, new_tree)).length(cm) < best_length:
		yield new_tree

	if not t.is_terminal:
		l, r = t.left, t.right
		for a_tree in r.all_trees_left(trees_adding_bb(l, taxon, cm, og)):
			yield a_tree
		for a_tree in l.all_trees_right(trees_adding_bb(r, taxon, cm, og)):
			yield a_tree
Exemplo n.º 19
0
def PRIMARY(current, G):
    global counter
    t = tree('PRIMARY')
    if current.pattern in funcDict.keys():
        tmp = tree("FUNCCALL")
        tmp.val = current.pattern
        paren = next(G)
        current = next(G)
        if current.name == "RPAREN":
            pass
        else:
            t1, current = ID_LIST(current, G)
            if current.name != "RPAREN":
                raise ParserError("not matching parens")
            tmp.append(t1)
            t.append(tmp)
        return t, next(G)
    if current.name == 'INTLIT':
        tmp = tree('INTLIT')
        tmp.val = current.pattern
        tuple1 = ("False", typeOfVar)
        dict_[varName1] = tuple1 #do we need this?
        t.append(tmp)
        return t, next(G)
    if current.name == 'BOOLLIT':
        tmp = tree('BOOLLIT')
        tmp.val = current.pattern
        tuple1 = ("False", typeOfVar)
        dict_[varName1] = tuple1 #do we need this?
        t.append(tmp)
        return t, next(G)
    if current.name == 'LPAREN':
        t1, current = EXPRESSION(next(G), G)
        if current.name != 'RPAREN':
            raise ParserError("Syntax Error: Expected rparen is missing: " \
                              + current.line)
        t.append(t1)
        return t, next(G)
    if current.name == "STRING": #unsure
        tstrlit = tree("STRING")
        tstrlit.val = current.pattern
        t.append(tstrlit)
        # valOfVar = current.pattern
        tuple1 = (current.pattern, "STRING")
        tname = "lit%i" % counter
        strDict[tname] = tuple1
        counter = counter + 1
        ct = next(G)
        return t, ct
    t2, current = IDENT(current, G)
    t.append(t2)
    return t, current
Exemplo n.º 20
0
def SIGN(current, G):
    t = tree("SIGN")
    s = {}
    if current.t_class == "ARITHOP":
        if current.name == "MINUS":
            t.children.append(tree("MINUS"))
            return next(G), t, s
        else:
            raise ParserError(
                "Syntax Error: Invalid sign for number." + getTokenLineInfo(current))
    else:
        t.children.append(tree("LAMBDA"))
        return current, t, s
Exemplo n.º 21
0
def EXPRESSION(current, G):
    t = tree("EXPRESSION")
    t1, current = PRIMARY(current, G)
    t.append(t1)

    while True:
        if (current.name == 'PLUS') | (current.name == 'MINUS'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = PRIMARY(current, G)
            t.append(t2)
        if (current.name != 'PLUS') & (current.name != 'MINUS'):
            return t, current
Exemplo n.º 22
0
def TERM3(current, G):
    t = tree("TERM3")
    t1, current = FACT2(current, G)
    t.append(t1)

    while True:
        if (current.name == 'MULTIPLICATION'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = FACT2(current, G)
            t.append(t2)
        if (current.name != 'MULTIPLICATION'):
            return t, current
Exemplo n.º 23
0
def FACT3(current, G):
    t = tree("FACT3")
    t1, current = PRIMARY(current, G)
    t.append(t1)

    while True:
        if (current.name == 'REMAINDER'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = PRIMARY(current, G)
            t.append(t2)
        if (current.name != 'REMAINDER'):
            return t, current
Exemplo n.º 24
0
def EXPRESSION(current, G):
    t = tree("EXPRESSION")
    t1, current = TERM1(current, G)
    t.append(t1)

    while True:
        if (current.name == 'OR'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = TERM1(current, G)
            t.append(t2)
        if (current.name != 'OR'):
            return t, current
Exemplo n.º 25
0
def FACT2(current, G):
    t = tree("FACT2")
    t1, current = FACT3(current, G)
    t.append(t1)

    while True:
        if (current.name == 'DIVISION'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = FACT3(current, G)
            t.append(t2)
        if (current.name != 'DIVISION'):
            return t, current
Exemplo n.º 26
0
def TERM1(current, G):
    t = tree("TERM1")
    t1, current = FACT1(current, G)
    t.append(t1)

    while True:
        if (current.name == 'AND'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = FACT1(current, G)
            t.append(t2)
        if (current.name != 'AND'):
            return t, current
Exemplo n.º 27
0
def TERM1(current, G):
    t = tree("TERM1")
    s = {}
    current, child, s1 = FACT1(current, G)
    t.children.append(child)
    s.update(s1)
    while current.name == "AND":
        t.children.append(tree("AND"))
        current = next(G)  # move to token after "and"
        current, child, s1 = FACT1(current, G)
        t.children.append(child)
        s.update(s1)
    return current, t, s  # current should be in {),;}
Exemplo n.º 28
0
def TERM2(current, G):
    t = tree("TERM2")
    t1, current = TERM3(current, G)
    t.append(t1)

    while True:
        if (current.name == 'MINUS'): #May have to add ARITHOP in future
            t.append(tree(current.name))
            current = next(G)
            t2, current = TERM3(current, G)
            t.append(t2)
        if (current.name != 'MINUS'):
            return t, current
Exemplo n.º 29
0
def getPPDBData(f,words):
    data = open(f,'r')
    lines = data.readlines()
    examples = []
    for i in lines:
        i=i.strip()
        if(len(i) > 0):
            i=i.split('\t')
            if len(i) == 2:
                e = (tree(i[0], words), tree(i[1], words))
                examples.append(e)
            else:
                print i
    return examples
Exemplo n.º 30
0
def RELATION(current, G):
    t = tree("RELATION")
    s = {}
    if current.t_class == "RELATIONOP":
        t.val = current.pattern
        t.children.append(tree("RELATIONOP", val=current.pattern))
        # assume exp2 returns a useful current
        current, child, s1 = EXP2(next(G), G)
        t.children.append(child)
        s.update(s1)
        return current, t, s
    else:
        t.children.append(tree("LAMBDA"))
        return current, t, s
Exemplo n.º 31
0
def backtracking_multiple_with_DTK2(table,
                                    cell,
                                    k_best,
                                    distributed_vector,
                                    dtk_generator=None,
                                    level=1):
    """
    table is a table of list of tuples (rules, coordinates)
    coord is a pair of triple of numbers (i,j,k)
    recursively find the k best parse trees (is this a beam search?)
    """
    if dtk_generator is None:
        #print ("none")
        dtk_generator = dtk.DT(dimension=4096, operation=operation.random_op)

    def sorting_func(t):
        if t in cache:
            return numpy.dot(cache[t], distributed_vector)
        else:
            cache[t] = dtk_generator.dt(t)
            return numpy.dot(cache[t], distributed_vector)

    def sorting_func_distance_from_1(t):
        if t in cache:
            return 1 / numpy.abs(1 - numpy.dot(cache[t], distributed_vector))
        else:
            tt = dtk_generator.dt(t)
            cache[t] = tt
            return 1 / numpy.abs(1 - numpy.dot(cache[t], distributed_vector))

    #sorting_func = lambda t: numpy.dot(dtk_generator.dt(t) , distributed_vector)

    first_k = sorted(table[cell], key=sorting_func,
                     reverse=True)[:min(level, k_best)]

    l = []
    for regola, coord in first_k:

        if coord is None:
            word = regola.right[0]
            rule_ = rule(word, ("__STOP__", ))
            child = tree(rule_, None)
            t = tree(regola, [child])
            #t = tree(regola, None)
            l.append(t)
            #return l
        else:
            c1, c2 = coord

            for child1 in backtracking_multiple_with_DTK2(
                    table,
                    c1,
                    k_best,
                    distributed_vector,
                    dtk_generator=dtk_generator,
                    level=level + 1):
                for child2 in backtracking_multiple_with_DTK2(
                        table,
                        c2,
                        k_best,
                        distributed_vector,
                        dtk_generator=dtk_generator,
                        level=level + 1):
                    t = tree(regola, [child1, child2])
                    #print (t, sorting_func_distance_from_1(t))
                    l.append(t)

    #print (l[0])
    l = sorted(l, key=sorting_func, reverse=True)[:k_best]
    # for i in l:
    #     print(i, sorting_func(i))
    # print("-")
    return l
Exemplo n.º 32
0
def top_down_reconstruction2(table,
                             cell=None,
                             k_best=5,
                             distributed_vector=None,
                             dtk_generator=None,
                             lista=None):
    #global listone
    """non so se non funziona per bug o il codice è giusto ma concettualmente non puo funzionare"""
    def scorer(t):
        return numpy.dot(dtk_generator.dt(t), distributed_vector)

    if lista is None:
        #se non ho una lista di partenza la costruisco a partire dalla lista dei nodi di partenza
        lista_ = []
        first = table[cell]
        r, c = first[0]
        rule_ = tree(r.left, None)
        score = scorer(rule_)
        rule_.coord = cell
        #print (t, t.coord)
        lista_.append((rule_, score))

    else:
        lista_ = lista
    #print ("prima: ", [str(x[0]) for x in lista])
    lista_ = sorted(lista_, key=lambda x: x[1], reverse=True)
    #print ([(str(x[0]), x[1]) for x in lista_])
    #random.shuffle(lista)

    lista_copy = lista_[:]

    lista_totale = []
    #listone.extend(lista_totale)
    for (t, score) in lista_:
        lista_albero = []  #lista delle possibili espansioni di QUEL'allbero
        #numero_terminali = len(list(t.allTerminalNodes()))

        for index, node in enumerate(t.allTerminalNodes()):
            #print (index, node)

            c1 = node.coord

            if c1 is None:
                continue

            cell = table[c1]
            for elem in cell:
                #print (elem)

                if elem[1] is None:
                    children = [tree(root=elem[0].right[0], children=None)]
                    children[0].coord = None
                    tt = tree(root=node.root, children=children)
                    tt.coord = None
                    ttt = t.add(tt, index)

                    score = scorer(ttt)

                    lista_albero.append((ttt, score))
                    continue
                cell1 = elem[1][0]
                cell2 = elem[1][1]

                #xx = tree(table[cell1][0][0].left)
                xx = tree(elem[0].right[0])
                xx.coord = cell1
                #yy = tree(table[cell2][0][0].left)
                yy = tree(elem[0].right[1])
                yy.coord = cell2
                children = [xx, yy]
                tt = tree(root=node.root, children=children)
                ttt = t.add(tt, index)
                #print (ttt)
                score = scorer(ttt)

                lista_albero.append((ttt, score))
                #lista_albero = list(set(lista_albero))

            #lista_albero = sorted(lista_albero, key=lambda x: x[1])

        #print ([str(x[0]) for x in lista_albero])
        lista_totale.extend(lista_albero)
    lista_totale = list(set(lista_totale))
    #listone.extend(lista_totale)

    lista_totale = sorted(lista_totale, key=lambda x: x[1],
                          reverse=True)[:k_best]
    #print ([str(x[0]) for x in lista_totale])
    #random.shuffle(sorted(lista_totale, key=lambda x: x[1]))
    #lista_totale = lista_totale[:k_best]
    if lista_totale:
        #for x in lista_totale:
        #    print (x[0])
        return top_down_reconstruction2(table, None, k_best,
                                        distributed_vector, dtk_generator,
                                        lista_totale)

    else:
        return lista_copy
Exemplo n.º 33
0
def setup_board_routes(app: Application, write_enabled: bool):

    # Create /api end point as root node
    r_api = tree("api", data=get_node_api())

    # Add /api/spb to represent side plane board
    r_spb = tree("spb", data=get_node_spb())
    r_api.addChild(r_spb)

    # Add /api/mezz to represent Network Mezzaine card
    r_mezz = tree("mezz", data=get_node_mezz())
    r_api.addChild(r_mezz)

    # Add servers /api/server[1-max]
    num = pal_get_num_slots()
    for i in range(1, num + 1):
        populate_server_node(r_api, i)

    # Add /api/spb/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("bmc"))
    r_spb.addChild(r_temp)
    # /api/spb/bmc end point
    r_temp = tree("bmc", data=get_node_bmc())
    r_spb.addChild(r_temp)
    # /api/spb/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("bmc"))
    r_spb.addChild(r_temp)
    # Add /api/spb/fans end point
    r_temp = tree("fans", data=get_node_fans())
    r_spb.addChild(r_temp)
    # /api/spb/logs end point
    r_temp = tree("logs", data=get_node_logs("bmc"))
    r_spb.addChild(r_temp)

    # Add /api/mezz/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("nic"))
    r_mezz.addChild(r_temp)
    # /api/mezz/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("nic"))
    r_mezz.addChild(r_temp)
    # /api/mezz/logs end point
    r_temp = tree("logs", data=get_node_logs("nic"))
    r_mezz.addChild(r_temp)

    r_api.setup(app, write_enabled)
Exemplo n.º 34
0
def KruskalAlgorithm(g):
    '''
    Kruskal algorithm
        Kruskal algorithm is a way of using greedy algorithm 
        to find the minimum spanning tree of a connected network.
        The essence of this algorithm is to add arched into the mst 
        until all vertexes have been added
    '''
    if isinstance(g, network) == False:
        print('ERROR: please input a connected network!')
        return
    if g.mark in ['dg','ug']:
        print('ERROR: please input a connected network!')
        return
    mst = tree()
    vertexGroup = list(range(len(g.vertexes)))
    usedVertexes = []# used for showing the tree 
    vertexesAdd = []# used for showing the tree
    for i in range(len(g.vertexes)):
        usedVertexes.append(0)
        vertexesAdd.append(-1)
    cost = 0
    while len(set(vertexGroup)) > 1:
        minDist = g._network__INFINITY
        s = 0
        e = 0
        for i in range(len(g.vertexes)):
            for j in range(len(g.vertexes)):
                if g.adjMat[i][j] < minDist and vertexGroup[i] != vertexGroup[j]:
                    minDist = g.adjMat[i][j]
                    s = i
                    e = j
        if minDist == g._network__INFINITY:
            print('ERROR: the network is not a connected network!')
            return
        # be careful here !
        # when a vertex is set to a new group 
        # all the vertexes that belong to the same group with this vertex should be included, too
        target = vertexGroup[s]
        origin = vertexGroup[e]
        for i in range(len(vertexGroup)):
            if vertexGroup[i] == origin:
                vertexGroup[i] = target
        if mst.root == -1:
            mst.root = mst.makeNode()
            mst.nodes[mst.root].value = g.vertexes[s]
            usedVertexes[mst.root] = 1
            vertexesAdd[mst.root] = mst.root
        if usedVertexes[s] == 0:
            parentAdd = mst.makeNode()
            mst.nodes[parentAdd].value = g.vertexes[s]
            vertexesAdd[s] = parentAdd
            usedVertexes[s] = 1
        if usedVertexes[e] == 0:
            childAdd = mst.makeNode()
            mst.nodes[childAdd].value = g.vertexes[e]
            vertexesAdd[e] = childAdd
            usedVertexes[e] = 1
        childAdd = vertexesAdd[e]
        parentAdd = vertexesAdd[s]
        newChild = mst.nodes[parentAdd].lchild
        if newChild == -1:
            mst.nodes[parentAdd].lchild = childAdd
            mst.nodes[childAdd].parent = parentAdd    
        else:
            while mst.nodes[newChild].rchild != -1:
                newChild = mst.nodes[newChild].rchild
            mst.nodes[newChild].rchild = childAdd
            mst.nodes[childAdd].parent = newChild
        cost += minDist
    print('the minimum cost is :', cost)
    print('minimum spanning tree is as following:')
    mst.Show()
Exemplo n.º 35
0
def populate_server_node(num):
    prsnt = pal_is_fru_prsnt(num)
    if prsnt == None or prsnt == 0:
        return None

    r_server = tree("server", data=get_node_server(num))
    r_fruid = tree("fruid", data=get_node_fruid("server"))
    r_sensors = tree("sensors", data=get_node_sensors("server"))
    r_logs = tree("logs", data=get_node_logs("server"))
    r_config = tree("config", data=get_node_config("server"))
    r_bios = tree("bios", data=get_node_bios("server"))
    r_server.addChildren([r_fruid, r_sensors, r_logs, r_config, r_bios])

    r_boot_order_trunk = tree("boot-order",
                              data=get_node_bios_boot_order_trunk("server"))
    r_postcode_trunk = tree("postcode",
                            data=get_node_bios_postcode_trunk("server"))
    r_plat_info_trunk = tree("plat-info",
                             data=get_node_bios_plat_info_trunk("server"))
    r_pcie_port_config_trunk = tree(
        "pcie-port-config",
        data=get_node_bios_pcie_port_config_trunk("server"))
    r_bios.addChildren([
        r_boot_order_trunk,
        r_postcode_trunk,
        r_plat_info_trunk,
        r_pcie_port_config_trunk,
    ])

    r_boot_mode = tree("boot_mode", data=get_node_bios_boot_mode("server"))
    r_clear_cmos = tree("clear_cmos", data=get_node_bios_clear_cmos("server"))
    r_force_boot_bios_setup = tree(
        "force_boot_bios_setup", data=get_node_bios_force_boot_setup("server"))
    r_boot_order = tree("boot_order", data=get_node_bios_boot_order("server"))
    r_boot_order_trunk.addChildren(
        [r_boot_mode, r_clear_cmos, r_force_boot_bios_setup, r_boot_order])

    return r_server
Exemplo n.º 36
0
from lex import tokenize
import tree
from code_gen import compile_all
file = "source.txt"
code = open(file).read()
tokens = tokenize(code)
ast = tree.tree(tokens)
# ast.display()
c_code = compile_all(ast)
f = open('output.c', 'w').write(c_code)
# print(tree.name_types)
Exemplo n.º 37
0

def labeled_fscore(t1, t2):
    p = labeled_precision(t1, t2)
    r = labeled_recall(t1, t2)
    return 2 * (p * r / (p + r))


def crossbracketing():
    pass


if __name__ == '__main__':
    s1 = "(S (NP (NNS futures) (NNS traders)) (VP (VP say) (SBAR (SBAR (NP (DT the) (NNP s&p)) (VP (VBD was) (VP (VBG signaling) (SBAR (IN that) (S (NP (DT the) (NNP dow)) (VP (MD could) (VP (VB fall) (NP (RB as) (JJ much))))))))) (SBAR (RB as) (FRAG (NP (CD 200) (NNS points)) (. .))))))"
    s2 = "(S (@S (NP (NNS futures) (NNS traders)) (VP (VBP say) (SBAR (NP (DT the) (NNP s&p)) (VP (VBD was) (VP (VBG signaling) (SBAR (IN that) (S (NP (DT the) (NNP dow)) (VP (MD could) (VP (VB fall) (NP (NP (RB as) (JJ much)) (PP (IN as) (NP (CD 200) (NNS points))))))))))))) (. .))"

    s1 = "(W (X a) (Y b) (Z (c) (d)))"
    s2 = "(W (X a) (Y (Z b) (V (c) (d))))"

    t1 = tree(string=s1)
    t2 = tree(string=s2)

    p = precision(t1, t2)
    r = recall(t1, t2)
    f = fscore(t1, t2)

    print("precision: ", p, "recall: ", r, "fscore: ", f)

    print("lp: ", labeled_precision(t1, t2))
    print("lr: ", labeled_recall(t1, t2))
    print("lf: ", labeled_fscore(t1, t2))
Exemplo n.º 38
0
build = "g++"
options = "-std=c++11"
Name = "EM-MAGIC"
from tree import tree
import os
#call([build,"main.cpp",options,"-o "+name])
cwd = os.getcwd()
files = tree(cwd + "/src")
include = tree(cwd + "/headers", "d")
buildScript = ""
linking = ""
for i in range(len(include)):
    buildScript += "mkdir -p " + "\"" + include[i].replace(
        "/headers", "/build") + "\"" + '\n'
    include[i] = "-I" + "\"" + include[i] + "\""
for item in files:
    name = item
    name = name.replace("/src", "/build")
    name = name.replace(".cpp", ".o")
    if not (os.path.exists(name)
            and os.path.getmtime(item) < os.path.getmtime(name)):
        buildScript += (" ".join([build, "-c", "\"" + item + "\"", options] +
                                 include +
                                 ["-o " + "\"" + name + "\""])) + '\n'

    linking += "\"" + name + "\" "
buildFile = open(cwd + "/scripts/build", 'w')
buildFile.write(buildScript)
buildFile.write(build + " " + linking + options + " -o " + Name + '\n')
buildFile.close()
Exemplo n.º 39
0
def display(cls):
    for line in render_lines(tree(cls)):
        print(line)
Exemplo n.º 40
0
def parser_with_reconstruction3(sentence, grammar, k_best, distributed_vector=None, dtk_generator=None, referenceTable=None, rule_filter=2):
    #uso la grammatica nuova (grammar_2 )
    words = sentence.split()
    n = len(words)

    #initialize TABLE
    P = numpy.zeros((n, n), dtype=object)
    for i, _ in numpy.ndenumerate(P):
        P[i] = []

    #unit production
    for i, word in enumerate(words):
        # to prevent uncovered words we create rule of the form X -> w
        # for each symbol X in the grammar and for each word w in the sentence
        for symbol in grammar.symbols:
            rule = gramm.Rule(symbol,[word])    # create a new rule
            rt = rule.toTree()                  # and transform into tree

            score = numpy.dot(dtk_generator.sn(rt), distributed_vector)
            ## NORMALIZATION
            score = score/numpy.sqrt(numpy.dot(dtk_generator.sn(rt), dtk_generator.sn(rt)))
            rt.score = score

            #P[i][0].append(((rule, None),(rt, score)))
            P[i][0].append(rt)


        #P[i][0] = sorted(P[i][0], key=lambda x: x[1][1], reverse=True)[:2]
        P[i][0] = sorted(P[i][0], key = lambda x: x.score, reverse=True)[:2]

    #non terminal rules
    numero_dtk = 0 #count iterations for debugging purpose
    for i in range(2, n + 1):
        #TODO:
        #add a check if numero_dtk is too high and break returning "not parsed"
        # total_size = len(dtk_generator.dt_cache) + len(dtk_generator.sn_cache) + len(dtk_generator.dtf_cache)
        # total_size_mbytes = (total_size*8*dtk_generator.dimension)/1048576
        # print (i, total_size_mbytes)
        if psutil.virtual_memory().percent > 95:
            return False, None, P

        for j in range(1, n - i + 2):
            for k in range(1, i):
                # look for combination of a tree in leftCell with a tree in rightCell
                leftCell = P[j - 1][k - 1]
                rightCell = P[j + k - 1][i - k - 1]

                for (subtree1, subtree2) in itertools.product(leftCell, rightCell):
                    stringa = subtree1.root + " " + subtree2.root
                    for rule in grammar.nonterminalrules[stringa]:
                        #FILTER on rules with too low score
                        passed, ruleScore = filterRule(rule, dtk_generator, distributed_vector, rule_filter)
                        if passed:
                            rtt = tree(root=rule.left, children=[subtree1, subtree2])
                            score = numpy.dot(dtk_generator.sn(rtt), distributed_vector)
                            ## NORMALIZATION
                            score = score/ruleScore
                            rtt.score = score

                            P[j-1][i-1].append(rtt)

                            numero_dtk = numero_dtk + 1

            #sort rules
            #P[j-1][i-1] = sorted(P[j-1][i-1], key=lambda x: x[1][1], reverse=True)
            P[j-1][i-1] = sorted(P[j-1][i-1], key=lambda x: x.score, reverse=True)
            #another k_best rules where the root is different than the first rule selected before
            #lista_diversi = [x for x in P[j-1][i-1] if x[0][0].left != P[j-1][i-1][0][0][0].left][:k_best]

            lista_diversi = [x for x in P[j-1][i-1] if x.root != P[j-1][i-1][0].root][:k_best]

            P[j-1][i-1] = P[j-1][i-1][:k_best]
            #if the new rules weren't already selected, add them
            if lista_diversi:
                for a in lista_diversi:
                    if a not in P[j-1][i-1]:
                        P[j-1][i-1].append(a)


            #PARTE DI DEBUG
            #se ho una reference, stampo la lista di regole che ho nella casella dopo aver trimmato e la casella corrispettiva
            #al primo errore ritorno Pp (stampata bene per confrontarla con referenceTable)

            if referenceTable is not None:
                if P[j-1][i-1] and referenceTable[i-1][j-1]:
                    lista_alberi = [x[0][0] for x in P[j-1][i-1]]
                    if referenceTable[i-1][j-1] not in lista_alberi:
                        #rule = P[j-1][i-1][0][0][0]

                        print ("cella: ", (i-1, j-1))

                        print ([x[0][0] for x in P[j-1][i-1]], referenceTable[i-1][j-1]) # <- questo caso è FAIL

                        #albero_sbagliato = P[j-1][i-1][0][1][0]
                        #score1 = P[j-1][i-1][0][1][1]
                        alberi_sbagliati = [x[1][0] for x in P[j-1][i-1]]



                        dtk_generator.dt_cache = {}
                        print ("SN: ")

                        for albero_sbagliato in alberi_sbagliati:

                            rtt = tree(root = referenceTable[i-1][j-1].left, children=alberi_sbagliati[0].children)

                            score1 = numpy.dot(dtk_generator.sn(albero_sbagliato), distributed_vector)
                            print (score1, albero_sbagliato)
                        score2 = numpy.dot(dtk_generator.sn(rtt), distributed_vector)
                        print (score2, rtt)

                        dtk_generator.dtf_cache = {}
                        print ("DTF: ")
                        for albero_sbagliato in alberi_sbagliati:
                            score1 = numpy.dot(dtk_generator.dtf(albero_sbagliato), distributed_vector)
                            regola = tree(root=albero_sbagliato.root, children=[tree(albero_sbagliato.children[0].root, None),tree(albero_sbagliato.children[1].root, None)])
                            print ("punteggio regola: ", numpy.dot(dtk_generator.dtf(regola), distributed_vector), regola)
                            print (score1, albero_sbagliato)
                        score2 = numpy.dot(dtk_generator.dtf(rtt), distributed_vector)
                        print (score2, rtt)
                        #return False, None, P
                else:
                    if referenceTable[i-1][j-1]: # e P[][] è vuota
                        pass
                        #print (P[j-1][i-1],referenceTable[i-1][j-1] ) # <- questo caso è FAIL
                        #return False, None, P
                    if P[j-1][i-1]: # e referenceTable è 0
                        pass
                        #print ("ok?", P[j-1][i-1],referenceTable[i-1][j-1] ) # <- questo caso può andar bene

            #FINE DEBUG

    #print (numero_dtk) #number of iteration

    #list of tree in the final cell of the table
    finalList = P[0][-1]
    if finalList:

        #final sort (by DTK)
        finalList = sorted(finalList, key=lambda x: numpy.dot(dtk_generator.dt(x),distributed_vector), reverse=True)
        return True, finalList , P
    else:
        #treeToCYKMatrix.printCYKMatrix(simpleTable(P))
        return False, None, P
fit_group = train_stats['fit_group']

# mutate fit group into a new generation
forest = []
for t in fit_group[:, 0]:
    # make two copies of each model and mutate them
    #new_tree_1 = tree(rand_gen=True, size=np.random.randint(tree_size_range[0], tree_size_range[-1]))
    new_tree_1 = copy.copy(t)
    new_tree_1.mutate()
    forest.append(new_tree_1)
    # scheme 1: second tree will be another mutant of the fit group
    #new_tree_2 = copy.copy(t)
    #new_tree_2.mutate()
    # scheme 2: second tree will be randomly generated
    new_tree_2 = tree(rand_gen=True, size=np.random.randint(tree_size_range[0], tree_size_range[-1]))
    forest.append(new_tree_2)

# simulate new generation and get scores
scores = []
for df in data:
    scores.append([t.basic_simulation(df, freq_weighted_scoring=True) for t in forest])
scores = np.mean(np.array(scores), axis=0)
nans = np.isnan(scores)
scores[nans] = 0

eval_forest = []
for t, s in zip(forest, scores):
    eval_forest.append([t, s])
eval_forest = np.array(eval_forest)
Exemplo n.º 42
0
def parser_with_reconstruction(sentence,
                               grammar,
                               k_best,
                               distributed_vector=None,
                               dtk_generator=None,
                               albero=None):
    #uso la grammatica nuova e i tree giusti
    #also, uso una seconda matrice B[n,n,r] di backpointers

    tk = TK(LAMBDA=0.4)

    words = sentence.split()
    n = len(words)
    r = len(grammar.symbols)

    #initialization of a chart with empty lists
    P = numpy.zeros((n, n, r), dtype=object)
    #B = numpy.zeros((n, n, r), dtype=object)
    for i, _ in numpy.ndenumerate(P):
        P[i] = []
        #B[i] = []

    #unit production
    for i, word in enumerate(words):
        try:
            rules = grammar.terminalrules[word]
        except KeyError:
            print("la parola ", word, " non appare nelle regole")

        for rule in rules:
            #print (rule, rule.toTree())
            rt = rule.toTree()
            index = grammar.symbols[rule.left]
            P[i][0][index].append(((rule, None), (rt, 1)))
            #B[i][0][index].append(None)

        for k in range(r):
            P[i][0][k] = P[i][0][k][:k_best]
            #if len(P[i][0][k]) > 0:
            #    print (len(P[i][0][k]))

    #non terminal rules

    numero_dtk = 0

    for i in range(2, n + 1):
        for j in range(1, n - i + 2):
            for k in range(1, i):
                for rule in grammar.nonterminalrules:
                    a = grammar.symbols[rule.left]
                    b = grammar.symbols[rule.right[0]]
                    c = grammar.symbols[rule.right[1]]

                    if P[j - 1][k - 1][b] and P[j + k - 1][i - k - 1][c]:
                        #rt = rule.toTree()
                        #print (rt)
                        #print (len(P[j - 1][k - 1][b]), " * ", len(P[j + k - 1][i - k - 1][c]))
                        for x, y in itertools.product(
                                P[j - 1][k - 1][b],
                                P[j + k - 1][i - k - 1][c]):

                            subtree1 = x[1][0]
                            subtree2 = y[1][0]
                            rtt = tree(root=rule.left,
                                       children=[subtree1, subtree2])
                            #print (rtt)
                            score = numpy.dot(dtk_generator.dt(rtt),
                                              distributed_vector)
                            #score = tk.evaluate(rtt, albero)
                            numero_dtk = numero_dtk + 1
                            #P[j - 1][i - 1][a].append(((rule, ((j - 1, k - 1, b), (j + k - 1, i - k - 1, c))), (rtt, score)))
                            P[j - 1][i - 1][a].append(
                                ((rule, ((j - 1, k - 1, b),
                                         (j + k - 1, i - k - 1, c))), (rtt,
                                                                       score)))

                            #P[j - 1][i - 1].append(((rule, ((j - 1, k - 1, b), (j + k - 1, i - k - 1, c))), (rtt, score)))

                            #print (P[j - 1][k - 1][a])
                        #B[j - 1][i - 1][a].append(((j - 1, k - 1, b), (j + k - 1, i - k - 1, c)))
                        P[j - 1][i - 1][a] = sorted(P[j - 1][i - 1][a],
                                                    key=lambda x: x[1][1],
                                                    reverse=True)[:k_best]

    print(numero_dtk)

    #find the coordinate of the cell with the final "S"
    fc = None
    if P[0][-1][grammar.start_symbol_index]:
        fc = grammar.start_symbol_index

    if fc is not None:
        return True, P[0][-1][fc]
    else:
        return False, None
Exemplo n.º 43
0
def init_plat_tree():

    # Create /api end point as root node
    r_api = tree("api", data = get_node_api())

    # Add /api/fcb to represent fan control board
    r_fcb = tree("fcb", data = get_node_fcb())
    r_api.addChild(r_fcb)
    # Add /api/pdpb to represent fan control board
    r_pdpb = tree("pdpb", data = get_node_pdpb())
    r_api.addChild(r_pdpb)
    # Add /api/peb to represent fan control board
    r_peb = tree("peb", data = get_node_peb())
    r_api.addChild(r_peb)

    #Add /api/fcb/fans end point
    r_temp = tree("fans", data = get_node_fans())
    r_fcb.addChild(r_temp)
    #Add /api/fcb/fruid end point
    r_temp = tree("fruid", data = get_node_fruid("fcb"))
    r_fcb.addChild(r_temp)
    #Add /api/fcb/sensors end point
    r_temp = tree("sensors", data = get_node_sensors("fcb"))
    r_fcb.addChild(r_temp)
    #Add /api/fcb/logs end point
    r_temp = tree("logs", data = get_node_logs("fcb"))
    r_fcb.addChild(r_temp)

    #Add /api/pdpb/sensors end point
    r_temp = tree("sensors", data = get_node_sensors("pdpb"))
    r_pdpb.addChild(r_temp)
    #Add /api/pdpb/flash end point
    r_temp = tree("flash", data = get_node_flash())
    r_pdpb.addChild(r_temp)
    #Add /api/pdpb/fruid end point
    r_temp = tree("fruid", data = get_node_fruid("pdpb"))
    r_pdpb.addChild(r_temp)
    #Add /api/pdpb/logs end point
    r_temp = tree("logs", data = get_node_logs("pdpb"))
    r_pdpb.addChild(r_temp)

    #Add /api/peb/fruid end point
    r_temp = tree("fruid", data = get_node_fruid("peb"))
    r_peb.addChild(r_temp)
    #Add /api/peb/sensors end point
    r_temp = tree("sensors", data = get_node_sensors("peb"))
    r_peb.addChild(r_temp)
    #Add /api/peb/bmc end point
    r_temp = tree("bmc", data = get_node_bmc())
    r_peb.addChild(r_temp)
    #Add /api/peb/health end point
    r_temp = tree("health", data = get_node_health())
    r_peb.addChild(r_temp)
    #Add /api/peb/logs end point
    r_temp = tree("logs", data = get_node_logs("peb"))
    r_peb.addChild(r_temp)

    return r_api
Exemplo n.º 44
0
##########Parsing program inputs##########
inps = parse()

indata = gamma_reader.gamma_reader(inps.dem_par_name, inps.I_name,
                                   inps.theta_name, inps.phi_name,
                                   inps.cor_name)
indata.read_igram(scale=inps.scale, flip=inps.flipvert, mult=inps.mult)
#indata.read_geom(az = inps.useaz, defgeom = inps.usedefgeom, flip = inps.flipvert) #FIXME az not found
indata.read_geom(flip=inps.flipvert)  #FIXME az not found
indata.read_coherence(flip=inps.flipvert)
indata.mask_igram_by_threshold(inps.corthresh)

sampler = tree.tree(inps.minsize,
                    inps.maxsize,
                    inps.thresh,
                    inps.minres,
                    method=inps.usevar)
sampler.resample(indata)
sampler.write(inps.out_name, rsp=inps.rsp)
npts = len(sampler.xi)

########Plot data
if inps.plot:
    plt.figure('Decomposition')
    plt.jet()
    orgax = plt.subplot(221)
    plt.imshow(indata.phs)
    tks = np.linspace(np.nanmin(indata.phs), np.nanmax(indata.phs), num=5)
    orgax.set_ylabel('Pixels')
    orgax.set_xticklabels([])
Exemplo n.º 45
0
from tree import tree as tree
from initialization import make_galaxy
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.animation as animation

G = 0.0044995611
dt = 1  # Myrs
universe_size = 50
theta = 1.
N = 50
chi = 10.  #softening

bodies = make_galaxy(N, chi, G)

sys = tree(bodies, G, universe_size, dt, theta, chi)

fig, (ax) = plt.subplots(1, 1)
ax.set_xlim(-universe_size / 2., universe_size / 2.)
ax.set_ylim(-universe_size / 2., universe_size / 2.)

ax.set_title('N =' + str(N))

points = ax.plot(*([[], []] * N), marker="o", color='black',
                 markersize=1)  #change the size


def init():
    for j in range(N):
        points[j].set_data([], [])
    return (points)
Exemplo n.º 46
0
# coding: utf-8
import binary_tree
import tree

x=tree.tree(1000)
y=tree.tree(2000)
z=tree.tree(3000)
w=tree.tree(6)
v=tree.tree(7)
r = tree.tree(8)
u = tree.tree(2)
x.AddSuccessor(y)
x.AddSuccessor(z)
c=tree.tree(5)
z.AddSuccessor(c)
y.AddSuccessor(w)
y.AddSuccessor(v)
w.AddSuccessor(r)
y.AddSuccessor(u)            
     
      
        
test = binary_tree.binary_tree(100)
test2 = binary_tree.binary_tree(200)
test3 = binary_tree.binary_tree(300)
test4 = binary_tree.binary_tree(400)
test5 = binary_tree.binary_tree(500)
test6 = binary_tree.binary_tree(600)
test7 = binary_tree.binary_tree(7)

test.AddLeft(test2)
Exemplo n.º 47
0
def calculExecution():
    '''
	Construction of technology and intervention matrices with uncertainties informations
	Construction of Caracterisation facteurs matrix 
	'''

    runButton.config(state="disabled")

    global pb_hD

    system_filename = os.path.join(path, "programme", "..", "databases",
                                   database)  #export from Simapro

    infoFrame1 = Frame(informationsFrame)
    infoFrame1.pack()
    Label(infoFrame1,
          text="Reading the database and constructing the matrices...").pack(
              side=LEFT)

    (system_meta_info, UP_meta_info, UP_list, EF_list, all_flow,
     technology_matrix, intervention_matrix, CF_matrices, CF_categories,
     CF_units, EF_unit, unit_converter, infrastructure_rescale,
     uncertainty_info) = SimaPro_reader(system_filename, impact_method)

    Label(infoFrame1, text="Done").pack()

    CF_matrix = CF_matrices[impact_method]
    CF_categories = CF_categories[impact_method]
    CF_units = CF_units[impact_method]
    CF_categories_name = [re.sub("\W", "_", cat) for cat in CF_categories]

    EF_by_number = {}
    for (compartment, substance, subcompartment) in EF_list:
        EF = [compartment, substance, subcompartment]
        EF_number = EF_list.index(EF)
        EF = (compartment, substance, subcompartment)
        EF_by_number[EF_number] = EF

    ###Transformation of the technology and intervention matrices for testing the fonctionnality of the algorithme.
    infoFrame2 = Frame(informationsFrame)
    infoFrame2.pack()
    Label(infoFrame2,
          text="transformating matrices for tests ...").pack(side=LEFT)
    (technology_matrix, intervention_matrix, uncertainty_info, UP_list,
     CF_transformed) = transformation_matrices(technology_matrix,
                                               intervention_matrix,
                                               uncertainty_info, UP_list,
                                               CF_matrix)
    Label(infoFrame2, text="Done").pack()

    print_results(path + project_name, project_name, UP_list, EF_list,
                  database, impact_method, CF_categories, CF_units, iterations,
                  disaggregation_criterion, uncertainty_info, CF_matrix)

    ###Calculation of the determinists scores
    infoFrame3 = Frame(informationsFrame)
    infoFrame3.pack()
    Label(infoFrame3,
          text="Calculating deterministic scores ...").pack(side=LEFT)
    t0 = time.time()
    #inverse_technology_matrix=spsolve(technology_matrix, identity(technology_matrix.shape[0]))
    inverse_technology_matrix = inv(technology_matrix.todense())
    tinv = time.time() - t0
    #print "temps d'inversion : "+str(tinv)
    Z = (identity(len(technology_matrix.todense())) - technology_matrix)
    inverse_technology_matrix = fix_inverse(Z, inverse_technology_matrix)
    intensity_matrix = matrix(
        intervention_matrix.dot(inverse_technology_matrix))
    all_system_scores, all_unit_scores = calculate_all_scores(
        identity(len(technology_matrix.todense())), intensity_matrix,
        intervention_matrix, CF_matrix)
    Label(infoFrame3, text="Done").pack()

    if correlatedMC.get():

        ###Monte-Carlo in the correlated case and storage of the matrices (laws'parameters)
        Label(
            informationsFrame,
            text="Uncertainty analysis under a fully-correlated assumption..."
        ).pack()

        pb_hD.pack()

        essai = 0
        while 1:
            try:
                os.mkdir(
                    os.path.join(path, project_name,
                                 "correlated_impacts" + str(essai)))
                break
            except:
                essai += 1

        (variables_technologique,
         variables_intervention) = MC_correlated_preparation(
             technology_matrix, intervention_matrix,
             uncertainty_info['technology'], uncertainty_info['intervention'])
        MC(variables_technologique, variables_intervention, CF_matrix,
           CF_categories_name, iterations, UP_list, "all",
           os.path.join(path, project_name,
                        "correlated_impacts" + str(essai)), [], progress)

        infoFrame4 = Frame(informationsFrame)
        infoFrame4.pack()
        Label(infoFrame4,
              text="Printing parameters and covariances ...").pack(side=LEFT)

        sigma_correlated, mu_correlated, sign_correlated = calcul_parameters(
            UP_list, os.path.join(path, project_name, "correlated_impacts"),
            len(CF_categories))

        results_cor = csv.writer(
            open(
                os.path.join(path, project_name,
                             "Monte-Carlo_results_correle.csv"), "wb"))
        results_cor.writerow(
            ["index", "processus"] +
            ["impact " + category for category in CF_categories] +
            ["sign " + category for category in CF_categories] +
            ["mu " + category for category in CF_categories] +
            ["sigma " + category for category in CF_categories])
        for up in range(len(UP_list) - 4):
            results_cor.writerow(
                [up, UP_list[up]] +
                [impact[0] for impact in all_system_scores[:, up].tolist()] +
                [ssign[0] for ssign in sign_correlated[:, up].tolist()] +
                [mmu[0] for mmu in mu_correlated[:, up].tolist()] +
                [sigma[0] for sigma in sigma_correlated[:, up].tolist()])

        ###Calculation of the cariance-covariance matrices

        calcul_vcv(UP_list, impact_method, CF_categories_name,
                   os.path.join(path, project_name))

        Label(infoFrame4, text="Done").pack()

    if nocorrelatedMC.get():

        sigma_correlated, mu_correlated, sign_correlated = calcul_parameters(
            UP_list, os.path.join(path, project_name, "correlated_impacts"),
            len(CF_categories))

        essai = 0
        while 1:
            try:
                os.mkdir(
                    os.path.join(path, project_name,
                                 "nocorrelated_impacts" + str(essai)))
                break
            except:
                essai += 1

        ###Monte-Carlo in the correlated case and storage of the matrices (laws'parameters)
        Label(
            informationsFrame,
            text="Uncertainty analysis under a fully-uncorrelated assumption..."
        ).pack()

        full_results_UP = {}
        full_results_EF = {}
        level_reached = {}
        system_scores = {}
        child_list = {}
        score_list_EF = {}
        coefficient_list = {}
        link_UP_EF_full_result = {}
        systems = []
        for proc in UP_list:
            systems.append({proc: 1})

        processRunned = Label(informationsFrame, text="")
        processRunned.pack()

        for system_number in range(nocorrBegin.get(),
                                   max(nocorrEnd.get(),
                                       len(UP_list) -
                                       4)):  #disaggregation for every system

            processRunned.config(text="Process " + str(system_number))
            pb_hD.pack()

            full_results_UP = {}
            full_results_EF = {}
            level_reached = {}
            system_scores = {}
            child_list = {}
            score_list_EF = {}
            coefficient_list = {}
            link_UP_EF_full_result = {}
            final_demand_vector = build_final_demand_vector(
                systems[system_number], UP_list)
            start_time = time.time()
            full_results_UP = {}
            level_reached = {}

            #Desagregation of the system

            full_results_UP, level_reached, system_scores = systematic_disaggregation_UP(
                disaggregation_criterion, full_results_UP, level_reached,
                system_scores, UP_meta_info, UP_list, EF_list,
                technology_matrix, intervention_matrix, CF_matrix,
                CF_categories, EF_unit, uncertainty_info, intensity_matrix, Z,
                all_system_scores, all_unit_scores, impact_method,
                final_demand_vector, system_number, systems)

            UP_list_desag = construct_UP_list_desag(full_results_UP, UP_list)
            tree(UP_list_desag, UP_meta_info, impact_method, CF_categories,
                 all_system_scores, all_unit_scores, CF_units,
                 os.path.join(path, project_name, "trees"))

            (variables_technologique,
             variables_intervention) = MC_nocorrelated_preparation(
                 technology_matrix, intervention_matrix,
                 uncertainty_info['technology'],
                 uncertainty_info['intervention'], UP_list, UP_list_desag,
                 mu_correlated, sign_correlated, sigma_correlated)
            MC(
                variables_technologique, variables_intervention,
                CF_transformed, CF_categories_name, iterations, UP_list_desag,
                system_number,
                os.path.join(path, project_name,
                             "nocorrelated_impacts" + str(essai)), systems,
                progress)

        infoFrame5 = Frame(informationsFrame)
        infoFrame5.pack()
        Label(infoFrame5, text="Printing parameters ...").pack(side=LEFT)

        sigma_nocorrelated, mu_nocorrelated, sign_nocorrelated = calcul_parameters(
            UP_list[:-4], os.path.join(path,
                                       project_name, "correlated_impacts"),
            len(CF_categories))

        results_nocor = csv.writer(
            open(
                os.path.join(path, project_name,
                             "Monte-Carlo_results_nocorrele.csv"), "wb"))
        results_nocor.writerow(
            ["index", "processus"] +
            ["impact " + category for category in CF_categories] +
            ["sign " + category for category in CF_categories] +
            ["mu " + category for category in CF_categories] +
            ["sigma " + category for category in CF_categories])
        for up in range(len(UP_list) - 4):
            results_nocor.writerow(
                [up, UP_list[up]] +
                [impact[0] for impact in all_system_scores[:, up].tolist()] +
                [ssign[0] for ssign in sign_nocorrelated[:, up].tolist()] +
                [mmu[0] for mmu in mu_nocorrelated[:, up].tolist()] +
                [sigma[0] for sigma in sigma_nocorrelated[:, up].tolist()])

        Label(infoFrame5, text="Done").pack(side=LEFT)
Exemplo n.º 48
0
def setup_board_routes(app: Application, write_enabled: bool):

    # Create /api end point as root node
    r_api = tree("api", data=get_node_api())

    # Add /api/mezz to represent Mezzanine Card
    r_mezz = tree("nic", data=get_node_mezz())
    r_api.addChild(r_mezz)
    # Add /api/iom to represent IO Module
    r_iom = tree("iom", data=get_node_iom())
    r_api.addChild(r_iom)
    # Add /api/dpb to represent Drive Plan Board
    r_dpb = tree("dpb", data=get_node_dpb())
    r_api.addChild(r_dpb)
    # Add /api/scc to represent Storage Controller Card
    r_scc = tree("scc", data=get_node_scc())
    r_api.addChild(r_scc)

    # Add servers /api/slot[1-max]
    num = pal_get_num_slots()
    for i in range(1, num + 1):
        r_server = populate_server_node(i)
        if r_server:
            r_api.addChild(r_server)

    # Add /api/mezz/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("nic"))
    r_mezz.addChild(r_temp)
    # Add /api/mezz/logs end point
    r_temp = tree("logs", data=get_node_logs("nic"))
    r_mezz.addChild(r_temp)

    # Add /api/iom/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("iom"))
    r_iom.addChild(r_temp)
    # Add /api/iom/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("iom"))
    r_iom.addChild(r_temp)
    # Add /api/iom/logs end point
    r_temp = tree("logs", data=get_node_logs("iom"))
    r_iom.addChild(r_temp)
    # Add /api/iom/bmc end point
    r_temp = tree("bmc", data=get_node_bmc())
    r_iom.addChild(r_temp)
    # Add /api/iom/health end point
    r_temp = tree("health", data=get_node_health())
    r_iom.addChild(r_temp)
    # Add /api/iom/identify end point
    r_temp = tree("identify", data=get_node_identify("iom"))
    r_iom.addChild(r_temp)

    # Add /api/dpb/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("dpb"))
    r_dpb.addChild(r_temp)
    # Add /api/dpb/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("dpb"))
    r_dpb.addChild(r_temp)
    # Add /api/dpb/logs end point
    r_temp = tree("logs", data=get_node_logs("dpb"))
    r_dpb.addChild(r_temp)
    # Add /api/dpb/fans end point
    r_temp = tree("fans", data=get_node_fans())
    r_dpb.addChild(r_temp)
    # Add /api/dpb/hdd-status end point
    r_tmp = tree("hdd-status", data=get_node_enclosure_hdd_status())
    r_dpb.addChild(r_tmp)
    # Add /api/dpb/error end point
    r_tmp = tree("error", data=get_node_enclosure_error())
    r_dpb.addChild(r_tmp)
    # Add /api/dpb/flash-health end point
    r_tmp = tree("flash-health", data=get_node_enclosure_flash_health())
    r_dpb.addChild(r_tmp)
    # Add /api/dpb/flash-status end point
    r_tmp = tree("flash-status", data=get_node_enclosure_flash_status())
    r_dpb.addChild(r_tmp)

    # Add /api/scc/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("scc"))
    r_scc.addChild(r_temp)
    # Add /api/scc/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("scc"))
    r_scc.addChild(r_temp)
    # Add /api/scc/logs end point
    r_temp = tree("logs", data=get_node_logs("scc"))
    r_scc.addChild(r_temp)

    r_api.setup(app, write_enabled)
Exemplo n.º 49
0
    ROOT.gROOT.SetBatch()

    ROOT.TMVA.Tools.Instance()
    ROOT.TMVA.PyMethodBase.PyInitialize()

    outFile = ROOT.TFile.Open(options.output, "RECREATE")

    files = []
    xmlTree = ET.parse(options.xml)
    for s in xmlTree.findall('sample'):
        if s.get('id') == options.sample and s.get('tag') == options.tag:
            for child in s:
                files.append(child.text)

    elecPrompt = tr.tree('elecPrompt')
    elecNonPrompt = tr.tree('elecNonPrompt')
    muonPrompt = tr.tree('muonPrompt')
    muonNonPrompt = tr.tree('muonNonPrompt')

    gtr = ROOT.TChain(c.treeName)

    for f in files:
        print f
        gtr.Add(f)

    nEntries = gtr.GetEntries()
    print 'Number of events:', nEntries

    #    dl = mva.mva(gtr, options.model, 'xgb')
    #    tmva = mva.mva(gtr, options.model, 'tmva')
Exemplo n.º 50
0
def run(level_cnt, grid_step, N, eps):
    grid_dim = 2**(level_cnt-1) # Should remain power of two for easy life
    src_list = []
    for i in range(N):
        src_list.append(source.source(grid_dim * np.random.random(),grid_dim *\
                                  np.random.random(), np.random.random()))
        # Map src to nearest lower left grid pnt
        src_list[i].grid = (int(np.floor(src_list[i].x/grid_step)), \
                            int(np.floor(src_list[i].y/grid_step)))
    
    print("Building Tree...")    
    my_tree = tree.tree(src_list, level_cnt)
    my_tree.build()
    
    print("Filling Interaction Lists...")
    interactions = interaction.interaction(level_cnt, my_tree)
    interactions.fill_list()
    
    leaf_start = 2**(2*(level_cnt-1))
    leaf_end = 2*leaf_start
    
    for obs_idx in range(leaf_start, leaf_end):
        for src_idx in range(leaf_start, leaf_end):
            G = interactions.build_G(my_tree.tree[obs_idx], \
                                     my_tree.tree[src_idx])
            if (my_tree.tree[src_idx] == []) or (my_tree.tree[obs_idx] == []):
                U, V = np.array([]), np.array([])
            else:
                U,V = utils.uv_decompose(G, eps)
            
            srcs = np.array([src_list[i] for i in my_tree.tree[src_idx]])
            obs_ids = my_tree.tree[obs_idx]
            src_vec = np.array([src.weight for src in srcs])
            
            interactions.src_vecs[obs_idx][src_idx] = src_vec
            interactions.obs_vecs[obs_idx] = obs_ids
            interactions.uv_list[obs_idx][src_idx] = (U,V)
    
    print('Computing UV Decompositions...')
    for lvl in range(level_cnt-2, 1, -1):
        lb = 2**(2*lvl)
        ub = 2*lb
        for obs_idx in range(lb,ub):
            for src_idx in interactions.list[obs_idx]:
    #        for src_idx in range(lb,ub):
                n = my_tree.get_children(obs_idx,lvl) #rows of merging
                m = my_tree.get_children(src_idx,lvl) #cols of merging
                uv = [[0,0],[0,0]] # index as [row][col]
                for i in range(2):
                    for j in range(2):
                        U1, V1 = interactions.uv_list[n[2*i]][m[2*j]]
                        U2, V2 = interactions.uv_list[n[2*i+1]][m[2*j]]
                        U3, V3 = interactions.uv_list[n[2*i]][m[2*j+1]]
                        U4, V4 = interactions.uv_list[n[2*i+1]][m[2*j+1]]
                        
                        U12,V12 = utils.merge(U1, V1, U2, V2, eps)
                        U34,V34 = utils.merge(U3, V3, U4, V4, eps)
                        # Horizontal merge
                        uv[i][j] = utils.merge(U12, V12, U34, V34, eps, 1)
                
                Um1,Vm1 = utils.merge(uv[0][0][0], uv[0][0][1],\
                                      uv[1][0][0], uv[1][0][1], eps)
                Um2,Vm2 = utils.merge(uv[0][1][0], uv[0][1][1], \
                                      uv[1][1][0], uv[1][1][1], eps)
                src_vec = np.array([])
                obs_ids = []
                for box_idx in m:
                    srcs = np.array([src_list[i] for i in my_tree.tree[box_idx]])
                    src_vec = np.hstack((src_vec, np.array([src.weight \
                                            for src in srcs])))
                for box_idx in n:
                    obss = np.array([src_list[i] for i in my_tree.tree[box_idx]])
                    obs_ids = obs_ids + my_tree.tree[box_idx]
                
                U,V = utils.merge(Um1, Vm1, Um2, Vm2, eps, 1)
                
                interactions.src_vecs[obs_idx][src_idx] = src_vec
                interactions.obs_vecs[obs_idx] = obs_ids
                interactions.uv_list[obs_idx][src_idx] = (U, V)  
    
    fast_time = 0    
    print("Computing Fast Interactions...")
    for obs_box_idx in range(len(interactions.list)):
        obs_srcs_near = my_tree.tree[obs_box_idx]
        obs_srcs_far = interactions.obs_vecs[obs_box_idx]
        obs_pot_near = np.zeros(len(obs_srcs_near))
        obs_pot_far = np.zeros(len(obs_srcs_far))
        for src_box_idx in interactions.list[obs_box_idx]:
    #        src_srcs = my_tree.tree[src_box_idx]
            src_vec = interactions.src_vecs[obs_box_idx][src_box_idx]
    #        src_vec = np.array([src_list[idx].weight for idx in src_srcs])
            U, V = interactions.uv_list[obs_box_idx][src_box_idx]
            if np.size(U) != 0:
                s = time.clock() 
                obs_pot_far += np.dot(U, np.dot(V, src_vec))
                fast_time += time.clock() - s
        #near field interacitons
        obs_pot_near += interactions.compute_near(obs_box_idx)
        for i, obs in enumerate(obs_srcs_near):
            s = time.clock()
            interactions.potentials[obs] += obs_pot_near[i]
            fast_time += time.clock() - s
        for i, obs in enumerate(obs_srcs_far):
            s = time.clock()
            interactions.potentials[obs] += obs_pot_far[i]
            fast_time += time.clock() - s
    
    #Direct Computation
    print("Computing Direct Interactions...")
    idxs = [i for i in range(N)]
    G = interactions.build_G(idxs, idxs)
    src_vec = np.array([src.weight for src in src_list])
    s = time.clock()
    direct_potentials = np.dot(G, src_vec)
    slow_time = time.clock() - s
    #
    error = (lg.norm(interactions.potentials) - lg.norm(direct_potentials))\
            / lg.norm(direct_potentials)
            
    print('Error: ', error)
    print('Fast Time: ', fast_time)
    print('Slow Time: ', slow_time)
        
    return(fast_time, slow_time, error)
    
## old testing code but saving it just incase ### 

#lvl = 2
#obs_idx = 16
#src_idx = 25
#n = my_tree.get_children(obs_idx,lvl) #rows of merging
#m = my_tree.get_children(src_idx,lvl) #cols of merging
#rank = 1
#uv = [[0,0],[0,0]] # index as [row][col]
#for i in range(2):
#    for j in range(2):
#        print(i,j)
#        U1, V1 = interactions.uv_list[n[2*i]][m[2*j]]
#        U2, V2 = interactions.uv_list[n[2*i+1]][m[2*j]]
#        U3, V3 = interactions.uv_list[n[2*i]][m[2*j+1]]
#        U4, V4 = interactions.uv_list[n[2*i+1]][m[2*j+1]]
#        
#        U12,V12 = utils.merge(U1, V1, U2, V2, eps)
#        U34,V34 = utils.merge(U3, V3, U4, V4, eps)
#        # Horizontal merge
#        uv[i][j] = utils.merge(U12, V12, U34, V34, eps, 1)
#
#Um1,Vm1 = utils.merge(uv[0][0][0], uv[0][0][1],\
#                      uv[1][0][0], uv[1][0][1], eps)
#Um2,Vm2 = utils.merge(uv[0][1][0], uv[0][1][1], \
#                      uv[1][1][0], uv[1][1][1], eps)
#
#U,V = utils.merge(Um1, Vm1, Um2, Vm2, eps, 1)
Exemplo n.º 51
0
def pseudotime(data,
               cells,
               var_explained=0.99,
               m=None,
               k=None,
               resid_factor=1.0,
               max_iter=1,
               eps=1e-5,
               sig=0.1,
               gam=1.0,
               verbose=False):

    x = data.copy().astype(np.float64)
    if m is None:
        pca_evrcs = PCA().fit(x.T).explained_variance_ratio_.cumsum()
        m = (pca_evrcs < var_explained).sum()
        m = max(2, m)
    pca = PCA(n_components=m)
    x = pca.fit_transform(x.T).T
    ev = pca.explained_variance_ratio_.cumsum()[m - 1]
    if verbose:
        print 'pca, n_components =', m, 'var explained', round(ev, 3)

    shift = x.mean(1)[:, np.newaxis]
    x -= shift
    scale = np.abs(x).max(1)[:, np.newaxis]
    x /= scale

    # k means
    n = max(x.shape)
    start = time.time()
    if k is None:
        k = n

    c = KMeans(n_clusters=k).fit(x.T).cluster_centers_.T

    if verbose:
        print 'k-means, k =', k, round(time.time() - start, 2), 's'

    for it in range(max_iter):

        last_x = x.copy()
        last_c = c.copy()

        c, stree = rge(x, c, eps=eps, sig=sig, gam=gam, verbose=verbose)

        e_idx = np.stack(np.where(stree))
        e = c[:, e_idx]
        x2 = (x * x).sum(0)[:, np.newaxis]
        d = x2 - 2.0 * x.T.dot(e[:, 0]) + (e[:, 0]**2).sum(0)
        d += x2 - 2.0 * x.T.dot(e[:, 1]) + (e[:, 1]**2).sum(0)
        idx = d.argmin(1)

        a = np.diff(e[..., idx], axis=1).squeeze()
        b = x - e[:, 0, idx]

        proj = []
        for i, (ai, bi) in enumerate(zip(a.T, b.T)):
            proj.append(ai.dot(bi) / ai.dot(ai))
            r = bi - proj[-1] * ai
            bi -= resid_factor * r
            x[:, i] = bi + e[:, 0, idx[i]]

        proj = np.array(proj)
        e = e_idx[:, idx].T

        c_err = ((c - last_c)**2).sum(0) / (c**2).sum(0)
        x_err = ((x - last_x)**2).sum(0) / (x**2).sum(0)

        if verbose:
            print it, c_err.mean(), x_err.mean()

        if c_err.mean() < eps and x_err.mean() < eps:
            break

    stree = np.array(stree)

    x, c = scale * x, scale * c
    x, c = shift + x, shift + c

    x = pca.inverse_transform(x.T).T
    c = pca.inverse_transform(c.T).T

    if verbose:
        fig = plt.figure(figsize=(8, 8))
        ax = plt.gca()
        pca = PCA(n_components=2).fit(np.hstack((x, c)).T)
        x_pca = pca.transform(x.T)
        c_pca = pca.transform(c.T)
        ax.scatter(*x_pca.T, color='k')
        idxs = np.stack(np.where(stree), 1)
        for idx1, idx2 in idxs:
            x1, y1 = c_pca[idx1]
            x2, y2 = c_pca[idx2]
            ax.plot([x1, x2], [y1, y2], 'k-', lw=0.5)
        ax.scatter(*c_pca.T, color='k', s=2)
        ax.axis('off')
        plt.show()

    tr = tree.tree(c, stree)

    stem_cell_data = data[:, cells == 'stem']
    tr.set_root(stem_cell_data)

    tr.get_pseudotime()

    t = []
    for ie, ei in enumerate(e):
        v1, v2 = ei
        t1, t2 = tr.t[v1], tr.t[v2]
        t.append(t1 + proj[ie] * (t2 - t1))

    t = np.array(t)
    t -= t.min()
    t /= t.max()

    return t, tr
Exemplo n.º 52
0
 def __init__(self, model, sentence, ids):
     self.sentence = sentence
     self.tree = tree(model, ids, sentence)
Exemplo n.º 53
0

if __name__ == "__main__":

    #PARAMETER DEFINITION:
    #-grammar:

    #g = grammar(rules)
    #G = grammar.Grammar_(rules)

    distributed = dtk.DT(dimension=1024,
                         LAMBDA=0.4,
                         operation=operation.shuffled_convolution)

    ss = "(S (@S (NP (@NP (@NP (NP (NNP Pierre)(NNP Vinken))(, ,))(ADJP (NP (CD 61)(NNS years))(JJ old)))(, ,))(VP (MD will)(VP (@VP (@VP (VB join)(NP (DT the)(NN board)))(PP (IN as)(NP (@NP (DT a)(JJ nonexecutive))(NN director))))(NP (NNP Nov.)(CD 29)))))(. .))"
    l = tree(string=ss)
    l = tree.binarize(l)
    l = tree.normalize(l)

    sent = tree.sentence(l)
    print(l)

    rules = [grammar.fromTreetoRule(x) for x in l.allRules()]
    g = grammar.Grammar(rules)

    _, p, P = parser_with_reconstruction3(sent, g, 1, distributed.dt(l),
                                          distributed)

    T = treeToCYKMatrix.treeToCYKMatrix(l)
    M = simpleTable(P)
Exemplo n.º 54
0
def parser_with_reconstruction2(sentence,
                                grammar,
                                k_best,
                                distributed_vector=None,
                                dtk_generator=None):
    #uso la grammatica nuova e i tree giusti
    #also, uso una seconda matrice B[n,n,r] di backpointers

    words = sentence.split()
    n = len(words)
    r = len(grammar.symbols)

    #P = n*[n*[r*[[]]]]

    #initialization of a chart with empty lists
    P = numpy.zeros((n, n), dtype=object)
    for i, _ in numpy.ndenumerate(P):
        P[i] = []
        #B[i] = []

    #unit production
    for i, word in enumerate(words):
        try:
            rules = grammar.terminalrules[word]
        except KeyError:
            print("la parola ", word, " non appare nelle regole")

        for rule in rules:
            #print (rule, rule.toTree())
            rt = rule.toTree()
            #score = numpy.dot(dtk_generator.dt(rt), distributed_vector)
            P[i][0].append(((rule, None), (rt, 1)))

        P[i][0] = sorted(P[i][0], key=lambda x: x[1][1], reverse=True)

    #non terminal rules

    numero_dtk = 0

    for i in range(2, n + 1):
        for j in range(1, n - i + 2):
            for k in range(1, i):
                for rule in grammar.nonterminalrules:
                    a = grammar.symbols[rule.left]
                    b = grammar.symbols[rule.right[0]]
                    c = grammar.symbols[rule.right[1]]

                    lista_b = [
                        x for x in P[j - 1][k - 1]
                        if x[0][0].left == rule.right[0]
                    ]
                    lista_c = [
                        x for x in P[j + k - 1][i - k - 1]
                        if x[0][0].left == rule.right[1]
                    ]

                    if lista_b and lista_c:
                        #print (lista_b)
                        #print (lista_c)
                        #rt = rule.toTree()
                        #print (rt)
                        #print (len(P[j - 1][k - 1][b]), " * ", len(P[j + k - 1][i - k - 1][c]))
                        for x, y in itertools.product(lista_b, lista_c):

                            subtree1 = x[1][0]
                            subtree2 = y[1][0]
                            rtt = tree(root=rule.left,
                                       children=[subtree1, subtree2])
                            #score = numpy.dot(dtk_generator.sn(rtt), distributed_vector)
                            score = numpy.dot(dtk_generator.dt(rtt),
                                              distributed_vector)
                            numero_dtk = numero_dtk + 1
                            #P[j - 1][i - 1][a].append(((rule, ((j - 1, k - 1, b), (j + k - 1, i - k - 1, c))), (rtt, score)))
                            #P[j - 1][i - 1][a].append(((rule, ((j - 1, k - 1, b), (j + k - 1, i - k - 1, c))), (rtt, score)))

                            P[j - 1][i - 1].append(
                                ((rule, ((j - 1, k - 1, b),
                                         (j + k - 1, i - k - 1, c))), (rtt,
                                                                       score)))
                            #P[j - 1][i - 1] = P[j - 1][i - 1][:k_best]
                            #print (P[j - 1][k - 1][a])
                        #B[j - 1][i - 1][a].append(((j - 1, k - 1, b), (j + k - 1, i - k - 1, c)))

            P[j - 1][i - 1] = sorted(P[j - 1][i - 1],
                                     key=lambda x: x[1][1],
                                     reverse=True)[:k_best]

    print(numero_dtk)

    #print (P[0][-1])

    #for i, l in enumerate(P):
    #    print (i, l)

    #find the coordinate of the cell with the final "S"
    fc = None

    lista_s = [x for x in P[0][-1] if x[0][0].left == "S"]
    if lista_s:
        return True, [t[1][0] for t in lista_s]
    else:
        return False, None
Exemplo n.º 55
0
def setup_board_routes(app: Application, write_enabled: bool):

    # Create /api end point as root node
    r_api = tree("api", data=get_node_api())

    # Add /api/sled to represent entire SLED
    r_sled = tree("sled", data=get_node_sled())
    r_api.addChild(r_sled)

    # Add mb /api/sled/mb
    r_mb = tree("mb", data=get_node_server_2s(1, "mb"))
    r_sled.addChild(r_mb)

    # Add /api/sled/mb/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("mb"))
    r_mb.addChild(r_temp)

    # /api/sled/mb/bmc end point
    r_temp = tree("bmc", data=get_node_bmc())
    r_mb.addChild(r_temp)

    # /api/sled/mb/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("mb"))
    r_mb.addChild(r_temp)

    # /api/sled/mb/logs end point
    r_temp = tree("logs", data=get_node_logs("mb"))
    r_mb.addChild(r_temp)

    # Add /api/sled/mezz0 to represent Network Mezzaine card
    r_mezz = tree("mezz0", data=get_node_mezz())
    r_sled.addChild(r_mezz)

    # Add /api/mezz/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("nic0"))
    r_mezz.addChild(r_temp)

    # /api/mezz/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("nic0"))
    r_mezz.addChild(r_temp)

    # /api/mezz/logs end point
    r_temp = tree("logs", data=get_node_logs("nic0"))
    r_mezz.addChild(r_temp)

    # Add /api/sled/mezz1 to represent Network Mezzaine card
    r_mezz1 = tree("mezz1", data=get_node_mezz())
    r_sled.addChild(r_mezz1)

    # Add /api/mezz1/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("nic1"))
    r_mezz1.addChild(r_temp)

    # /api/mezz1/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("nic1"))
    r_mezz1.addChild(r_temp)

    # /api/mezz1/logs end point
    r_temp = tree("logs", data=get_node_logs("nic1"))
    r_mezz1.addChild(r_temp)

    r_api.setup(app, write_enabled)
Exemplo n.º 56
0
def parser_with_reconstruction3(sentence,
                                grammar,
                                k_best,
                                distributed_vector=None,
                                dtk_generator=None):
    #uso la grammatica nuova (grammar_2 )

    words = sentence.split()
    n = len(words)
    r = len(grammar.symbols)

    P = numpy.zeros((n, n), dtype=object)
    for i, _ in numpy.ndenumerate(P):
        P[i] = []

    #unit production
    for i, word in enumerate(words):
        try:
            rules = grammar.terminalrules[word]
        except KeyError:
            print("la parola ", word, " non appare nelle regole")

        if rules == []:
            for symbol in grammar.symbols:
                #print (rule, rule.toTree())
                rule = grammar.rule(symbol, [word])
                rt = rule.toTree()
                score = numpy.dot(dtk_generator.dt(rt), distributed_vector)
                P[i][0].append(((rule, None), (rt, score)))
        else:
            for rule in rules:
                #print (rule, rule.toTree())
                rt = rule.toTree()
                score = numpy.dot(dtk_generator.dt(rt), distributed_vector)
                P[i][0].append(((rule, None), (rt, score)))

        P[i][0] = sorted(P[i][0], key=lambda x: x[1][1], reverse=True)[:k_best]

    #non terminal rules
    numero_dtk = 0

    for i in range(2, n + 1):
        for j in range(1, n - i + 2):
            for k in range(1, i):

                #da qui devo cambiare
                # a = grammar.symbols[rule.left]
                # b = grammar.symbols[rule.right[0]]
                # c = grammar.symbols[rule.right[1]]

                # celle da analizzare, contengono una lista di regole: [   'VP': [@S -> NP VP, VP -> MD VP] , ... ]
                # creo combinazioni di regole con primo simbolo dalla prima cella, secondo dalla seconda
                #
                cella_sinistra = P[j - 1][k - 1]
                cella_destra = P[j + k - 1][i - k - 1]

                stringhe = []
                if cella_sinistra and cella_destra:
                    for x, y in itertools.product(cella_sinistra,
                                                  cella_destra):
                        #print (x, y)
                        b = x[0][0].left
                        c = y[0][0].left
                        stringhe.append(b + " " + c)
                stringhe = list(set(stringhe))

                if stringhe:
                    pass
                for stringa in stringhe:
                    if rules:
                        pass
                        #print ("rules: ", len(grammar.nonterminalrules[stringa]))
                    for rule in grammar.nonterminalrules[stringa]:

                        subtree1 = cella_sinistra[0][1][0]
                        subtree2 = cella_destra[0][1][0]

                        rtt = tree(root=rule.left,
                                   children=[subtree1, subtree2])
                        #print (rtt)
                        #score = numpy.dot(dtk_generator.sn(rtt), distributed_vector)
                        score = numpy.dot(dtk_generator.dt(rtt),
                                          distributed_vector)
                        numero_dtk = numero_dtk + 1
                        #P[j - 1][i - 1][a].append(((rule, ((j - 1, k - 1, b), (j + k - 1, i - k - 1, c))), (rtt, score)))
                        #P[j - 1][i - 1][a].append(((rule, ((j - 1, k - 1, b), (j + k - 1, i - k - 1, c))), (rtt, score)))

                        P[j - 1][i - 1].append(
                            ((rule, ((j - 1, k - 1, None),
                                     (j + k - 1, i - k - 1, None))), (rtt,
                                                                      score)))
                        #P[j - 1][i - 1] = P[j - 1][i - 1][:k_best]
                        #print (P[j - 1][k - 1][a])
                        #B[j - 1][i - 1][a].append(((j - 1, k - 1, b), (j + k - 1, i - k - 1, c)))

            P[j - 1][i - 1] = sorted(P[j - 1][i - 1],
                                     key=lambda x: x[1][1],
                                     reverse=True)[:k_best]

    #print (numero_dtk) #number of iteration

    #print (P[0][-1])

    #for i, l in enumerate(P):
    #    print (i, l)

    #find the coordinate of the cell with the final "S"
    fc = None

    lista_s = [x for x in P[0][-1] if x[0][0].left == "S"]

    if lista_s:
        return True, [t[1][0] for t in lista_s], P
    else:
        return False, None
Exemplo n.º 57
0
__author__ = 'Roland'
from tree import tree, parseTree, ExpressionError
from tokenizer import token, Tokenizer

pT = parseTree()
tok = Tokenizer()
assert (pT.buildParseTree(tok.tokenize("1+2")) == tree('+', '1', '2'))
assert (pT.buildParseTree(tok.tokenize("(x+(y*z+2))-3*((5+x)/2-4)")) == tree(
    '-', tree('+', 'x', tree('+', tree('*', 'y', 'z'), '2')),
    tree('*', '3', tree('-', tree('/', tree('+', '5', 'x'), '2'), '4'))))
assert (pT.buildParseTree(tok.tokenize("sin(x)+ln(y)*3")) == tree(
    '+', tree('sin', 'x'), tree('*', tree('ln', 'y'), '3')))
assert (pT.buildParseTree(tok.tokenize('x^y*2-3')) == tree(
    '-', tree('*', tree('^', 'x', 'y'), '2'), '3'))
assert (pT.buildParseTree(tok.tokenize('x=y=5*3-20*sin(x+y)')) == tree(
    '=', 'x',
    tree(
        '=', 'y',
        tree('-', tree('*', '5', '3'),
             tree('*', '20', tree('sin', tree('+', 'x', 'y')))))))
try:  # teste pentru erori
    Tree = pT.buildParseTree(tok.tokenize('x***y'))
    assert (False)
except ExpressionError:
    assert (True)
try:
    Tree = pT.buildParseTree(tok.tokenize('x===y'))
    assert (False)
except ExpressionError:
    assert (True)
try:
Exemplo n.º 58
0
def coeffs(params, output=None, order=1):
    '''
	Returns the results the turbulence code. Automatically selects the
	integration dimension based on the number of supplied parameters.

	Accepts as input an optional argument 'output'. If this is None
	then all quantities are computed and output. Otherwise it must be
	an array of integers of shape (6,6) with 1's in the positions where
	outputs are desired and 0's otherwise.
	'''

    if output is None:
        if len(params) == 9:
            params = [params[4]] + list(params)
            r = coeffs2spherical(*params, order=order)
        elif len(params) == 10:
            params = list(params)
            r = coeffs2spherical(*params, order=order)
        elif len(params) == 12:
            params = [params[7]] + list(params)
            r = coeffs3spherical(*params, order=order)
        elif len(params) == 13:
            params = list(params)
            r = coeffs3spherical(*params, order=order)
        else:
            raise NotImplementedError(
                'Number of parameters does not match any known specification.')
    else:
        output2 = [0 for _ in range(36)]
        for i in range(6):
            for j in range(6):
                if output[i, j]:
                    output2[6 * i + j] = 1

        if len(params) == 10:
            params2 = [output2, params[4]] + list(params)
            mins = [0., 0.]
            maxs = [np.pi, 2 * np.pi]
            co = lambda x: coeffs2sphericalSpecificBox(*x, order=order)
        elif len(params) == 11:
            params2 = [output2] + list(params)
            mins = [0., 0.]
            maxs = [np.pi, 2 * np.pi]
            co = lambda x: coeffs2sphericalSpecificBox(*x, order=order)
        elif len(params) == 13:
            params2 = [output2, params[7]] + list(params)
            mins = [0., 0., 0.]
            maxs = [1., np.pi, 2 * np.pi]
            co = lambda x: coeffs3sphericalSpecificBox(*x, order=order)
        elif len(params) == 14:
            params2 = [output2] + list(params)
            mins = [0., 0., 0.]
            maxs = [1., np.pi, 2 * np.pi]
            co = lambda x: coeffs3sphericalSpecificBox(*x, order=order)
        else:
            raise NotImplementedError(
                'Number of parameters does not match any known specification.')

        def f(x):
            c = None
            if len(params) == 10 or len(params) == 11:
                c = correlator(1.,
                               x[0],
                               x[1],
                               0,
                               0,
                               0,
                               params[0],
                               params[1],
                               params[2],
                               params[3],
                               params[4],
                               params[5],
                               params[6],
                               order=order)
            elif len(params) == 13 or len(params) == 14:
                c = correlator(x[0],
                               x[1],
                               x[2],
                               params[0],
                               params[1],
                               params[2],
                               params[3],
                               params[4],
                               params[5],
                               params[6],
                               params[7],
                               params[8],
                               params[9],
                               order=order)
            else:
                raise NotImplementedError(
                    'Number of parameters does not match any known specification.'
                )

            if np.sum(np.abs(c[2:, 2:])) > 1e-13:
                return np.sum(np.abs(c[2:, 2:]))
            else:
                return 0

        t = tree(mins, maxs, f)
        t.allSplit(1000)

        r = np.zeros((6, 6, 2))

        # For determining the number of evals
        est = sum([c.mean * c.volume for c in t.nonzero])
        vol = sum([c.volume for c in t.nonzero])

        print(vol / (2 * np.pi * np.pi))

        if vol / (2 * np.pi * np.pi) > 0.1:
            params3 = [mins, maxs] + params2
            r = co(params3)
        else:
            for c in t.nonzero:
                params3 = [c.mins, c.maxs] + params2
                params3[-2] = 10 + int(params3[-2] * c.mean * c.volume / est)
                res = co(params3)
                r += res

    return r
Exemplo n.º 59
0
def init_plat_tree():

    # Create /api end point as root node
    r_api = tree("api", data=get_node_api())

    # Add /api/sled to represent entire SLED
    r_sled = tree("sled", data=get_node_sled())
    r_api.addChild(r_sled)

    # Add mb /api/sled/mb
    r_mb = tree("mb", data=get_node_server_2s(1, "mb"))
    r_sled.addChild(r_mb)

    # Add /api/sled/mb/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("mb"))
    r_mb.addChild(r_temp)

    # /api/sled/mb/bmc end point
    r_temp = tree("bmc", data=get_node_bmc())
    r_mb.addChild(r_temp)

    # /api/sled/mb/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("mb"))
    r_mb.addChild(r_temp)

    # /api/sled/mb/logs end point
    r_temp = tree("logs", data=get_node_logs("mb"))
    r_mb.addChild(r_temp)

    # Add /api/sled/mezz to represent Network Mezzaine card
    r_mezz = tree("mezz", data=get_node_mezz())
    r_sled.addChild(r_mezz)

    # Add /api/mezz/fruid end point
    r_temp = tree("fruid", data=get_node_fruid("nic"))
    r_mezz.addChild(r_temp)

    # /api/mezz/sensors end point
    r_temp = tree("sensors", data=get_node_sensors("nic"))
    r_mezz.addChild(r_temp)

    # /api/mezz/logs end point
    r_temp = tree("logs", data=get_node_logs("nic"))
    r_mezz.addChild(r_temp)

    return r_api
    def toUnrootedTree(self):
        ''' Return the tree object of the unrooted 
        version of this topology. '''

        return tree.tree(self.toUnrootedNewick())