if __name__ == "__main__": odps_project = os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_PROJECT") odps_conf = OdpsConf( accessid=os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_AK"), accesskey=os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_SK"), # endpoint should looks like: "https://service.cn.maxcompute.aliyun.com/api" endpoint=os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_ENDPOINT"), project=odps_project) features = [ SparseColumn(name="deep_id", shape=[15033], dtype="int"), SparseColumn(name="user_space_stat", shape=[310], dtype="int"), SparseColumn(name="user_behavior_stat", shape=[511], dtype="int"), SparseColumn(name="space_stat", shape=[418], dtype="int") ] labels = DenseColumn(name="l", shape=[1], dtype="int", separator=",") try: os.mkdir("scratch") except FileExistsError: pass train(SQLFlowEstimatorBuilder(), odps_conf=odps_conf, project=odps_project, train_table="gomaxcompute_driver_w7u.sparse_column_test", eval_table="gomaxcompute_driver_w7u.sparse_column_test", features=features, labels=labels, feature_map_table="", feature_map_partition="", epochs=1, batch_size=2,
odps_project = os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_PROJECT") odps_conf = OdpsConf( accessid=os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_AK"), accesskey=os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_SK"), # endpoint should looks like: # "https://service.cn.maxcompute.aliyun.com/api" endpoint=os.getenv("SQLFLOW_TEST_DB_MAXCOMPUTE_ENDPOINT"), project=odps_project) features = [] for col_name in [ "sepal_length", "sepal_width", "petal_length", "petal_width" ]: # NOTE: add sparse columns like: # SparseColumn(name="deep_id", shape=[15033], dtype="int") features.append(DenseColumn(name=col_name, shape=[1], dtype="float32")) labels = DenseColumn(name="class", shape=[1], dtype="int", separator=",") try: os.mkdir("scratch") except FileExistsError: pass train(SQLFlowEstimatorBuilder(), odps_conf=odps_conf, project=odps_project, train_table="%s.sqlflow_test_iris_train" % odps_project, eval_table="%s.sqlflow_test_iris_test" % odps_project, features=features, labels=labels, feature_map_table="",