def get_structure_stats(node): num_nodes = len(get_nodes_by_type(node, Node)) sum_nodes = get_nodes_by_type(node, Sum) n_sum_nodes = len(sum_nodes) n_prod_nodes = len(get_nodes_by_type(node, Product)) leaf_nodes = get_nodes_by_type(node, Leaf) n_leaf_nodes = len(leaf_nodes) edges = get_number_of_edges(node) layers = get_depth(node) params = 0 for n in sum_nodes: params += len(n.children) for l in leaf_nodes: params += len(l.parameters) return """---Structure Statistics--- # nodes %s # sum nodes %s # prod nodes %s # leaf nodes %s # params %s # edges %s # layers %s""" % ( num_nodes, n_sum_nodes, n_prod_nodes, n_leaf_nodes, params, edges, layers, )
def get_structure_stats_dict(node): node_types = dict(Counter([type(n) for n in get_nodes_by_type(node)])) num_nodes = len(get_nodes_by_type(node, Node)) edges = get_number_of_edges(node) layers = get_number_of_layers(node) return { 'nodes': num_nodes, 'edges': edges, 'layers': layers }.update(node_types)
def run_experiment(exp, spn, test_data, test_type, exp_lambda): outprefix = path + "/spns/%s/" % (exp) results_file = "%stime_test_%s_ll_%s.txt" % (outprefix, test_type, OS_name) if os.path.isfile(results_file): return print(exp, test_data.shape, test_type) ll, test_time = exp_lambda() np.savetxt(results_file, ll, delimiter=";") import cpuinfo machine = cpuinfo.get_cpu_info()["brand"] adds, muls = fpga_count_ops(spn) test_n = test_data.shape[0] results = OrderedDict() results["Experiment"] = exp results["OS"] = OS_name results["machine"] = machine results["test type"] = test_type results["expected adds"] = adds results["expected muls"] = muls results["input rows"] = test_n results["input cols"] = test_data.shape[1] results["spn nodes"] = len(get_nodes_by_type(spn, Node)) results["spn sum nodes"] = len(get_nodes_by_type(spn, Sum)) results["spn prod nodes"] = len(get_nodes_by_type(spn, Product)) results["spn leaves"] = len(get_nodes_by_type(spn, Leaf)) results["spn edges"] = get_number_of_edges(spn) results["spn layers"] = get_number_of_layers(spn) results["time per task"] = test_time results["time per instance"] = test_time / test_n results["avg ll"] = np.mean(ll, dtype=np.float128) results_file_name = "results.csv" if not os.path.isfile(results_file_name): results_file = open(results_file_name, "w") results_file.write(";".join(results.keys())) results_file.write("\n") else: results_file = open(results_file_name, "a") results_file.write(";".join(map(str, results.values()))) results_file.write("\n") results_file.close()
def test_binary_serialization_roundtrip(tmpdir): """Tests the binary serialization for SPFlow SPNs by round-tripping a simple SPN through serialization and de-serialization and comparing the graph-structure before and after serialization & de-serialization.""" h1 = Histogram([0., 1., 2.], [0.25, 0.75], [1, 1], scope=1) h2 = Histogram([0., 1., 2.], [0.45, 0.55], [1, 1], scope=2) h3 = Histogram([0., 1., 2.], [0.33, 0.67], [1, 1], scope=1) h4 = Histogram([0., 1., 2.], [0.875, 0.125], [1, 1], scope=2) p0 = Product(children=[h1, h2]) p1 = Product(children=[h3, h4]) spn = Sum([0.3, 0.7], [p0, p1]) model = SPNModel(spn, featureValueType="uint32") query = JointProbability(model) binary_file = os.path.join(tmpdir, "test.bin") print(f"Test binary file: {binary_file}") BinarySerializer(binary_file).serialize_to_file(query) deserialized = BinaryDeserializer(binary_file).deserialize_from_file() assert (isinstance(deserialized, JointProbability)) assert (deserialized.batchSize == query.batchSize) assert (deserialized.errorModel.error == query.errorModel.error) assert (deserialized.errorModel.kind == query.errorModel.kind) assert (deserialized.graph.featureType == model.featureType) assert (deserialized.graph.name == model.name) deserialized = deserialized.graph.root assert get_number_of_nodes(spn) == get_number_of_nodes(deserialized) assert get_number_of_nodes(spn, Sum) == get_number_of_nodes(deserialized, Sum) assert get_number_of_nodes(spn, Product) == get_number_of_nodes( deserialized, Product) assert get_number_of_nodes(spn, Histogram) == get_number_of_nodes( deserialized, Histogram) assert get_number_of_edges(spn) == get_number_of_edges(deserialized)
def get_structure_stats_dict(node): node_types = dict(Counter([type(n) for n in get_nodes_by_type(node)])) num_nodes = len(get_nodes_by_type(node, Node)) edges = get_number_of_edges(node) layers = get_depth(node) result = { "nodes": num_nodes, "edges": edges, "layers": layers, "count_per_type": node_types } return result
def get_structure_stats(node): num_nodes = len(get_nodes_by_type(node, Node)) sum_nodes = len(get_nodes_by_type(node, Sum)) prod_nodes = len(get_nodes_by_type(node, Product)) leaf_nodes = len(get_nodes_by_type(node, Leaf)) edges = get_number_of_edges(node) layers = get_number_of_layers(node) return """---Structure Statistics--- # nodes %s # sum nodes %s # prod nodes %s # leaf nodes %s # edges %s # layers %s""" % (num_nodes, sum_nodes, prod_nodes, leaf_nodes, edges, layers)
def get_structure_stats_dict(node): num_nodes = len(get_nodes_by_type(node, Node)) sum_nodes = len( [s for s in get_nodes_by_type(node, Sum) if type(s) == Sum]) prod_nodes = len(get_nodes_by_type(node, Product)) leaf_nodes = len(get_nodes_by_type(node, Leaf)) param_leaf_nodes = len(get_nodes_by_type(node, Parametric)) type_leaf_nodes = len(get_nodes_by_type(node, TypeLeaf)) edges = get_number_of_edges(node) layers = get_number_of_layers(node) return { 'nodes': num_nodes, 'edges': edges, 'layers': layers, 'sum': sum_nodes, 'prod': prod_nodes, 'leaf': leaf_nodes, 'param-leaf': param_leaf_nodes, 'type-leaf': type_leaf_nodes }
def get_structure_stats_dict(node): nodes = get_nodes_by_type(node, Node) num_nodes = len(nodes) node_types = dict(Counter([type(n) for n in nodes])) edges = get_number_of_edges(node) layers = get_depth(node) params = 0 for n in nodes: if isinstance(n, Sum): params += len(n.children) if isinstance(n, Leaf): params += len(n.parameters) result = { "nodes": num_nodes, "params": params, "edges": edges, "layers": layers, "count_per_type": node_types } return result
def get_num_edges(spn): return get_number_of_edges(spn)
IdentityNumeric : identity_expectation} evidence = np.array([[None, None, None], [NominalRange([0]), None, None], [NominalRange([0]), NominalRange([1]), None]]) expect = Expectation(root_node, feature_scope=set([2]), evidence_scope=set([0,1]), evidence=evidence, node_expectation=node_expectation_support, node_likelihood=inference_support_ranges) print("Expectations:") print(expect) print() #Marginalize from spn.algorithms import Marginalization marg_spn = Marginalization.marginalize(root_node, keep=set([2])) plot_spn(marg_spn, "marg_spn.pdf") #Statistics from spn.structure.Base import get_number_of_edges, Node num_nodes = len(get_nodes_by_type(root_node, Node)) num_leafs = len(get_nodes_by_type(root_node, Leaf)) num_sums = len(get_nodes_by_type(root_node, Sum)) num_products = len(get_nodes_by_type(root_node, Product)) edges = get_number_of_edges(root_node) print(num_nodes) print(num_leafs) print(num_sums) print(num_products) print(edges)