def test_graphframe(lulesh_caliper_json): """Sanity test a GraphFrame object with known data.""" gf = GraphFrame() gf.from_caliper(str(lulesh_caliper_json)) assert len(gf.dataframe.groupby('name')) == 24
def test_filter_squash_unify_caliper_data(lulesh_caliper_json): """Sanity test a GraphFrame object with known data.""" gf1 = GraphFrame.from_caliper(str(lulesh_caliper_json)) gf2 = GraphFrame.from_caliper(str(lulesh_caliper_json)) assert gf1.graph is not gf2.graph gf1_index_names = gf1.dataframe.index.names gf2_index_names = gf2.dataframe.index.names gf1.dataframe.reset_index(inplace=True) gf2.dataframe.reset_index(inplace=True) # indexes are the same since we are reading in the same dataset assert all(gf1.dataframe["node"] == gf2.dataframe["node"]) gf1.dataframe.set_index(gf1_index_names, inplace=True) gf2.dataframe.set_index(gf2_index_names, inplace=True) squash_gf1 = gf1.filter(lambda x: x["name"].startswith("Calc")) squash_gf2 = gf2.filter(lambda x: x["name"].startswith("Calc")) squash_gf1.unify(squash_gf2) assert squash_gf1.graph is squash_gf2.graph squash_gf1.dataframe.reset_index(inplace=True) squash_gf2.dataframe.reset_index(inplace=True) # Indexes should still be the same after unify. Sort indexes before comparing. assert all(squash_gf1.dataframe["node"] == squash_gf2.dataframe["node"]) squash_gf1.dataframe.set_index(gf1_index_names, inplace=True) squash_gf2.dataframe.set_index(gf2_index_names, inplace=True)
def test_graphframe_to_literal(lulesh_caliper_json): """Sanity test a GraphFrame object with known data.""" gf = GraphFrame.from_caliper(str(lulesh_caliper_json)) graph_literal = gf.to_literal() gf2 = GraphFrame.from_literal(graph_literal) assert len(gf.graph) == len(gf2.graph)
def test_lulesh_cali(lulesh_caliper_cali): """Sanity check the Caliper reader ingesting a .cali file.""" grouping_attribute = "function" default_metric = "sum(sum#time.duration),inclusive_sum(sum#time.duration)" query = "select function,%s group by %s format json-split" % ( default_metric, grouping_attribute, ) gf = GraphFrame.from_caliper(str(lulesh_caliper_cali), query) assert len(gf.dataframe.groupby("name")) == 18
def test_graphframe(lulesh_caliper_json): """Sanity test a GraphFrame object with known data.""" gf = GraphFrame.from_caliper(str(lulesh_caliper_json)) assert len(gf.dataframe.groupby("name")) == 24 for col in gf.dataframe.columns: if col in ("time (inc)", "time"): assert gf.dataframe[col].dtype == np.float64 elif col in ("nid", "rank"): assert gf.dataframe[col].dtype == np.int64 elif col in ("name", "node"): assert gf.dataframe[col].dtype == np.object
def test_inclusive_time_calculation(lulesh_caliper_json): """Validate update_inclusive_columns() on known dataset containing per-rank data.""" gf = GraphFrame.from_caliper(str(lulesh_caliper_json)) # save original time (inc) column for correctness check gf.dataframe["orig_inc_time"] = gf.dataframe["time (inc)"] # remove original time (inc) column since it will be generated by update_inclusive_columns() del gf.dataframe["time (inc)"] gf.update_inclusive_columns() assert all(gf.dataframe["time (inc)"].values == gf.dataframe["orig_inc_time"].values)
def test_lulesh_json_stream(lulesh_caliper_cali): """Sanity check the Caliper reader ingesting a JSON string literal.""" cali_query = which("cali-query") grouping_attribute = "function" default_metric = "sum(sum#time.duration),inclusive_sum(sum#time.duration)" query = "select function,%s group by %s format json-split" % ( default_metric, grouping_attribute, ) cali_json = subprocess.Popen( [cali_query, "-q", query, lulesh_caliper_cali], stdout=subprocess.PIPE) gf = GraphFrame.from_caliper(cali_json.stdout) assert len(gf.dataframe.groupby("name")) == 18
def test_tree(lulesh_caliper_json): """Sanity test a GraphFrame object with known data.""" gf = GraphFrame.from_caliper(str(lulesh_caliper_json)) output = ConsoleRenderer(unicode=True, color=False).render( gf.graph.roots, gf.dataframe, metric_column="time", precision=3, name_column="name", expand_name=False, context_column="file", rank=0, thread=0, depth=10000, highlight_name=False, colormap="RdYlGn", invert_colormap=False, ) assert "121489.000 main" in output assert "663.000 LagrangeElements" in output assert "21493.000 CalcTimeConstraintsForElems" in output output = ConsoleRenderer(unicode=True, color=False).render( gf.graph.roots, gf.dataframe, metric_column="time (inc)", precision=3, name_column="name", expand_name=False, context_column="file", rank=0, thread=0, depth=10000, highlight_name=False, colormap="RdYlGn", invert_colormap=False, ) assert "662712.000 EvalEOSForElems" in output assert "2895319.000 LagrangeNodal" in output
def test_calc_pi_json(calc_pi_caliper_json): """Sanity test a GraphFrame object with known data.""" gf = GraphFrame.from_caliper(str(calc_pi_caliper_json)) assert len(gf.dataframe.groupby("name")) == 100