def p_expressao_logica(p): '''expressao_logica : expressao_simples | expressao_logica operador_logico expressao_simples''' expressao_logica = Node('expressao_logica') if len(p) == 2: expressao_logica.children = [p[1]] elif len(p) == 4: expressao_logica.children = [p[1], p[2], p[3]] p[0] = expressao_logica pass
def p_expressao_aditiva(p): '''expressao_aditiva : expressao_multiplicativa | expressao_aditiva operador_soma expressao_multiplicativa''' expressao_aditiva = Node('expressao_aditiva') if len(p) == 2: expressao_aditiva.children = [p[1]] elif len(p) == 4: expressao_aditiva.children = [p[1], p[2], p[3]] p[0] = expressao_aditiva pass
def p_declaracao_funcao(p): '''declaracao_funcao : tipo cabecalho | cabecalho''' declaracao_funcao = Node('declaracao_funcao') if len(p) == 2: declaracao_funcao.children = [p[1]] elif len(p) == 3: declaracao_funcao.children = [p[1], p[2]] p[0] = declaracao_funcao pass
def p_expressao_simples(p): '''expressao_simples : expressao_aditiva | expressao_simples operador_relacional expressao_aditiva''' expressao_simples = Node('expressao_simples') if len(p) == 2: expressao_simples.children = [p[1]] elif len(p) == 4: expressao_simples.children = [p[1], p[2], p[3]] p[0] = expressao_simples pass
def p_expressao_unaria(p): '''expressao_unaria : fator | operador_soma fator | operador_negacao fator''' expressao_unaria = Node('expressao_unaria') if len(p) == 2: expressao_unaria.children = [p[1]] elif len(p) == 3: expressao_unaria.children = [p[1], p[2]] p[0] = expressao_unaria pass
def p_lista_argumentos(p): '''lista_argumentos : lista_argumentos VIRGULA expressao | expressao | vazio''' lista_argumentos = Node('lista_argumentos') if len(p) == 2 and p[1]: lista_argumentos.children = [p[1]] if len(p) == 4: VIRGULA = Node('VIRGULA', children=[Node(p[2])]) lista_argumentos.children = [p[1], VIRGULA, p[3]] p[0] = lista_argumentos pass
def buildTree(node, tree, split_info): #this is from the SplitterTry.ipynb file """ :param node: class Node instance :param tree: list :param split_info: ?? #todo :return: """ #leafNodes = [] #will turn this again null in recursive call #split_info = [] #nodeCount+=1 if checkLeaf(node.datafile): #leafNodes.append(datafile) #nodeCount+=1 node = Node(None, None, 'leaf', node.datafile, None) tree.append(node) #pass else: metadata, left, right = computeOptimalSplit(node.datafile) split_info.append(metadata) #nodeCount+=1 node = Node(None, None, metadata['splitting_criteria'], node.datafile, metadata['splitting_value']) #tree.append(node) node.children = [left, right] buildTree(left, tree, split_info) buildTree(right, tree, split_info)
def p_lista_parametros(p): '''lista_parametros : lista_parametros VIRGULA parametro | parametro | vazio''' lista_parametros = Node('lista_parametros') if len(p) == 4: VIRGULA = Node('VIRGULA', children=[Node(p[2])]) lista_parametros.children = [p[1], VIRGULA, p[3]] elif len(p) == 2 and p[1]: lista_parametros.children = [p[1]] p[0] = lista_parametros pass
def term(): temp = factor() while (token == "*"): newTemp = Node(token) match(token) newTemp.children = [temp, factor()] temp = newTemp return temp
def repeat_stmt(): temp = Node("repeat") match("repeat") temp1 = stmt_sequence() match("until") temp2 = exp() temp.children = [temp1, temp2] return temp
def stmt_sequence(): temp = statement() while (token == ";"): newTemp = Node(token) match(token) newTemp.children = [temp, statement()] temp = newTemp return temp
def simple_exp(): temp = term() while (token == "+" or token == "-"): newTemp = Node(token) match(token) newTemp.children = [temp, term()] temp = newTemp return temp
def test_node_children_type(): root = Node("root") with assert_raises( TreeError, "Cannot add non-node object 'string'. It is not a subclass of 'NodeMixin'." ): root.children = ["string"]
def test_node_children_multiple(): root = Node("root") sub = Node("sub") with assert_raises( TreeError, "Cannot add node Node('/sub') multiple times as child."): root.children = [sub, sub]
def exp(): temp = simple_exp() if (token == "<" or token == "="): newTemp = Node(token) match(token) newTemp.children = [temp, simple_exp()] temp = newTemp return temp
def test_tuple_as_children(): """Tuple as children.""" n = Node('foo') with assert_raises( TreeError, "Cannot add non-node object (0, 1, 2). It is not a subclass of 'NodeMixin'." ): n.children = [(0, 1, 2)]
def test_children_setter(): root = Node("root") s0 = Node("sub0") s1 = Node("sub0A") s0a = Node("sub0B") root.children = [s0, s1] s0.children = [s0a] eq_(root.descendants, (s0, s0a, s1)) with assert_raises( LoopError, "Cannot set parent. Node('/root/sub0') cannot be parent of itself." ): s0.children = [s0] # test whether tree is unchanged after LoopError eq_(root.descendants, (s0, s0a, s1)) with assert_raises( LoopError, "Cannot set parent. Node('/root/sub0') is parent of Node('/root/sub0/sub0B')." ): s0a.children = [s0] # test whether tree is unchanged after LoopError eq_(root.descendants, (s0, s0a, s1)) root.children = [s0, s1] s0.children = [s0a] s0a.children = [s1] eq_(root.descendants, (s0, s0a, s1))
def p_corpo(p): '''corpo : corpo acao | vazio''' corpo = Node('corpo', children=[p[1]]) if len(p) == 3: corpo.children = [p[1], p[2]] p[0] = corpo pass
def p_fator(p): '''fator : ABRE_PARENTES expressao FECHA_PARENTES | chamada_funcao | var | numero''' fator = Node('fator') if len(p) == 2: fator.children = [p[1]] if len(p) == 4: ABRE_PARENTES = Node('ABRE_PARENTES', children=[Node(p[1])]) FECHA_PARENTES = Node('FECHA_PARENTES', children=[Node(p[3])]) fator.children = [ABRE_PARENTES, p[2], FECHA_PARENTES] p[0] = fator pass
def _replace_white_products(node: Node) -> None: """Substitute all product parts in a kernel that include stationary and `white` kernels by a `white` kernel. Only replaces product parts that are `white` or stationary: ``` replace_white_products('white * white * rbf * linear') -> 'white * linear' ``` Works inplace on provided node. Parameters ---------- node: Node Node of the AST of a kernel that could contain `white` products. """ if node.is_leaf: return if node.name is gpflow.kernels.Product: white_children = [ child for child in node.children if child.name is gpflow.kernels.White ] if white_children: non_stationary_children = [ child for child in node.children if child.name in [gpflow.kernels.Linear, gpflow.kernels.Polynomial] ] new_kids = [white_children[0]] + non_stationary_children if len(new_kids) == 1: if node.is_root: node.name = new_kids[0].name node.full_name = new_kids[0].full_name else: new_kids[0].parent = node.parent node.parent = None node.children = [] else: node.children = new_kids for child in node.children: _replace_white_products(child)
def p_indice(p): '''indice : indice ABRE_COLCHETES expressao FECHA_COLCHETES | ABRE_COLCHETES expressao FECHA_COLCHETES''' indice = Node('indice') if len(p) == 4: ABRE_COLCHETES = Node('ABRE_COLCHETES', children=[Node(p[1])]) FECHA_COLCHETES = Node('FECHA_COLCHETES', children=[Node(p[3])]) indice.children = [ABRE_COLCHETES, p[2], FECHA_COLCHETES] elif len(p) == 5: ABRE_COLCHETES = Node('ABRE_COLCHETES', children=[Node(p[2])]) FECHA_COLCHETES = Node('FECHA_COLCHETES', children=[Node(p[4])]) indice.children = [p[1], ABRE_COLCHETES, p[3], FECHA_COLCHETES] p[0] = indice pass
def attach(node: Node, ratings: List[int]): children = [] for i, rating in enumerate(ratings): diff = rating - node.name if diff <= 3: child = Node(rating) children.append(child) attach(child, ratings[i:]) else: break node.children = children
def _merge_rbfs(node: Node) -> None: """Merge RBFs that are part of one product. Works inplace on provided node. Parameters ---------- node: Node Node of the AST of a kernel that potentially contains non-merged RBFs. """ if node.is_leaf: return if node.name is gpflow.kernels.Product: rbf_children = [ child for child in node.children if child.name is gpflow.kernels.RBF ] other_children = [ child for child in node.children if child.name is not gpflow.kernels.RBF ] new_kids = other_children + rbf_children[:1] if len(new_kids) == 1: if node.is_root: node.name = new_kids[0].name try: node.full_name = new_kids[0].full_name except AttributeError: pass else: new_kids[0].parent = node.parent node.parent = None node.children = [] else: node.children = new_kids for child in node.children: _merge_rbfs(child)
def test_children_setter_large(): root = Node("root") s0 = Node("sub0") s0b = Node("sub0B") s0a = Node("sub0A") s1 = Node("sub1") s1a = Node("sub1A") s1b = Node("sub1B") s1c = Node("sub1C") s1ca = Node("sub1Ca") root.children = [s0, s1] eq_(root.descendants, (s0, s1)) s0.children = [s0a, s0b] eq_(root.descendants, (s0, s0a, s0b, s1)) s1.children = [s1a, s1b, s1c] eq_(root.descendants, (s0, s0a, s0b, s1, s1a, s1b, s1c)) with assert_raises(TypeError, "'Node' object is not iterable"): s1.children = s1ca eq_(root.descendants, (s0, s0a, s0b, s1, s1a, s1b, s1c))
def _distribute(node: Node) -> None: """Distribute sums and products until no further distribution possible. Works inplace on provided node. This method will create a structure that might contain a `product_1` of a `product_2` and a `kernel`. This if the same as a product of the kernels contained in `product_2` and the `kernel`. `distribute` merges these in the end. Parameters ---------- node: Node Node of the AST of a kernel that potentially contains distributable products or sums. """ if node.is_leaf: return if node.name is gpflow.kernels.Product: # Search on own level (only `node`) and on children, frist result will be distributed. sum_to_distribute = [ child for child in node.children if child.name is gpflow.kernels.Sum ] if sum_to_distribute: sum_to_distr = sum_to_distribute[0] children_to_distribute_to = [ child for child in node.children if child is not sum_to_distr ] node.name = gpflow.kernels.Sum node.full_name = 'Sum' node.children = [] for child in sum_to_distr.children: new_prod = Node(gpflow.kernels.Product, full_name='Product', parent=node) new_kids = [ deepcopy(child) for child in children_to_distribute_to ] if child.name is gpflow.kernels.Product: # Child to distribute to is a `Product`, doing nothing would lead to two nested products. new_kids.extend( [deepcopy(child) for child in child.children]) else: new_kids += [child] for kid in new_kids: kid.parent = new_prod for child in node.children: _distribute(child)
def p_se(p): '''se : SE expressao ENTAO corpo FIM | SE expressao ENTAO corpo SENAO corpo FIM''' se = Node('se') SE = Node('SE', children=[Node(p[1])]) ENTAO = Node('ENTAO', children=[Node(p[3])]) if len(p) == 6: FIM = Node('FIM', children=[Node(p[5])]) se.children = [SE, p[2], ENTAO, p[4], FIM] elif len(p) == 8: SENAO = Node('SENAO', children=[Node(p[5])]) FIM = Node('FIM', children=[Node(p[7])]) se.children = [SE, p[2], ENTAO, p[4], SENAO, p[6], FIM] p[0] = se pass
def if_stmt(): match("if") temp = Node("if") temp1 = exp() match("then") temp2 = stmt_sequence() temp.children = [temp1, temp2] if (token == "else"): match("else") temp3 = stmt_sequence() temp3.parent = temp match("end") return temp
def p_parametro(p): '''parametro : tipo DOIS_PONTOS ID | parametro ABRE_COLCHETES FECHA_COLCHETES''' parametro = Node('parametro') p[1].parent = parametro if p[2] == ':': DOIS_PONTOS = Node('DOIS_PONTOS', children=[Node(p[2])]) ID = Node('ID', children=[Node(p[3])]) parametro.children = [p[1], DOIS_PONTOS, ID] else: ABRE_COLCHETES = Node('ABRE_COLCHETES', children=[Node(p[2])]) FECHA_COLCHETES = Node('FECHA_COLCHETES', children=[Node(p[3])]) parametro.children = [p[1], ABRE_COLCHETES, FECHA_COLCHETES] p[0] = parametro pass
def ra_to_irra(tree): flat_tree = flatten_cnf(copy.deepcopy(tree)) for node in findall(flat_tree, filter_=lambda x: x.name == "Selection"): table_node = node.children[1] join_list = [] where_list = [] having_list = [] if node.children[0].name == "And": for predicate in node.children[0].children: if (all( node_util.is_field(child) for child in predicate.children) and predicate.name == "eq"): join_list.append(predicate) else: if predicate.name == "Or" or all( child.name in ["literal", "Subquery", "Value", "Or"] for child in predicate.children): where_list.append(predicate) else: having_list.append(predicate) predicate.parent = None else: if node.children[0].name == "eq" and all( node_util.is_field(child) for child in node.children[0].children): join_list = [node.children[0]] elif node.children[0].name == "Or": where_list = [node.children[0]] else: if all(child.name in ["literal", "Subquery", "Value", "Or"] for child in node.children[0].children): where_list = [node.children[0]] else: having_list = [node.children[0]] node.children[0].parent = None having_node = ([Node("Having", children=wrap_and(having_list))] if having_list else []) join_on = Node("Join_on", children=join_list) if len(join_on.children) == 0: join_on.children = [table_node] node.children = having_node + wrap_and(where_list) + [join_on] flat_tree = Node("Subquery", children=[flat_tree]) promote(flat_tree) return flat_tree.children[0]
def download_blobs(blobs=[], dir=None, prefix=None, recursive=False): """Download an array of GCP blob objects Keyword arguments: blobs -- GCP blob objects to download dir -- string directory to download into """ if not recursive and len(blobs) == 1: return download_blob(blobs[0], dir) if len(blobs) > 1 and not os.path.isdir(dir): raise myutil.exceptions.CommandException( 'Destination URL must name a directory, bucket, or bucket ' 'subdirectory for the multiple source form of the cp command.' ) # noqa: E128 # First job is to shorten the tree so that the root node is where the prefix ends on # the URL. Ex: gs://foo/a/b should have root node 'b' _fixed_prefix = re.sub('/$', '', prefix) bucket = blobs[0].bucket blob_prefix = None root_node = Node(dir.replace('/', '')) for pre, fill, node in RenderTree(tree_from_list(blobs)): if node.name == _fixed_prefix.rsplit('/', 1)[-1]: # Found the first node matching the last part of our URL. Set it as the root child blob_prefix = os.sep.join([_node.name for _node in node.ancestors ]) # preserve to use it on name rebuild root_node.children = [node] break # Walk the tree and rebuild filenames based on node path, cleaning up data along the way for pre, fill, node in RenderTree(root_node): if node.is_leaf: blob_name = re.sub( '^[/]*', '', blob_prefix + os.sep + os.sep.join([_node.name for _node in node.ancestors][1:]) + os.sep + node.name).replace('//', '/') # noqa filename = os.sep.join([_node.name for _node in node.ancestors ]) + os.sep + node.name mkdir_p(os.path.dirname(filename)) download_from_bucket(name=blob_name, bucket=bucket, filename=filename)