print("Configuration Data:")
configs_df = configs_as_dataframe(configs)
print(tabulate(configs_df, headers='keys', tablefmt='psql'))
print("Tensor Field:")
print(tabulate(tensor_fields[0], headers='keys', tablefmt='psql'))
print("Output:")

# RDD:
print()
print("RDD:")
result: list = raw_result.take(5)
pprint(result[:2])
# to get all results execute the following
# result: list = raw_result.collect()
print()

print("Spark DataFrame:")
sdf: DataFrame = to_spark_df(raw_result, spark, policy_aggregation.genesis_states)
# sdf: DataFrame = to_spark_df(raw_result, spark)
sdf.show(5)
print()

# Pandas :
print()
print("Pandas DataFrame:")
pdf: pd.DataFrame = to_pandas_df(raw_result, policy_aggregation.genesis_states)
# pdf: pd.DataFrame = to_pandas_df(raw_result)
print(tabulate(pdf.head(), headers='keys', tablefmt='psql'))

예제 #2
0
# print()

print("Configuration Data:")
configs_df = configs_as_dataframe(configs)
print(tabulate(configs_df, headers='keys', tablefmt='psql'))
print("Tensor Field:")
print(tabulate(tensor_fields[0], headers='keys', tablefmt='psql'))
print("Output:")

# RDD:
print()
print("RDD:")
result: list = raw_result.take(5)
pprint(result[:2])
# to get all results execute the following
# result: list = raw_result.collect()
print()

print("Spark DataFrame:")
sdf: DataFrame = to_spark_df(raw_result)
# sdf: DataFrame = to_spark_df(raw_result, spark)
sdf.show(5)
print()

# Pandas:
print()
print("Pandas DataFrame:")
# pdf: pd.DataFrame = to_pandas_df(raw_result, config1.genesis_states)
pdf: pd.DataFrame = to_pandas_df(raw_result)
print(tabulate(pdf.head(), headers='keys', tablefmt='psql'))
예제 #3
0
pprint(sessions)

print("Configuration Data:")
configs_df = configs_as_dataframe(configs)
print(tabulate(configs_df, headers='keys', tablefmt='psql'))
print("Tensor Field:")
print(tabulate(tensor_fields[0], headers='keys', tablefmt='psql'))
print("Output:")

# RDD:
print()
print("RDD:")
result: list = raw_result.take(5)
pprint(result[:2])
# to get all results execute the following
# result: list = raw_result.collect()
print()

print("Spark DataFrame:")
sdf: DataFrame = to_spark_df(raw_result, spark, sweep_config.genesis_states)
# sdf: DataFrame = to_spark_df(raw_result, spark)
sdf.show(5)
print()

# Pandas :
print()
print("Pandas DataFrame:")
pdf: pd.DataFrame = to_pandas_df(raw_result, sweep_config.genesis_states)
# pdf: pd.DataFrame = to_pandas_df(raw_result)
print(tabulate(pdf.head(), headers='keys', tablefmt='psql'))
예제 #4
0
def create_test_params(feature, fields):
    raw_result, tensor_fields, sessions = run.execute()
    df: pd.DataFrame = to_pandas_df(raw_result)
    expected = generate_expected(sweep_params)
    return [[feature, df, expected, fields, [row]]]