def main(): sf = FS.Select(Sequence = False, Random = True, Cross = False) #select the way you want to process searching sf.ImportDF(prepareData(),label = 'is_trade') sf.ImportLossFunction(modelscore,direction = 'descend') sf.ImportCrossMethod(CrossMethod) sf.NonTrainableFeatures = ['used','instance_id', 'item_property_list', 'context_id', 'context_timestamp', 'predict_category_property', 'is_trade'] sf.InitialFeatures(['item_category_list', 'item_price_level','item_sales_level','item_collected_level', 'item_pv_level','day']) sf.clf = lgbm.LGBMClassifier(random_state=1, num_leaves = 6, n_estimators=5000, max_depth=3, learning_rate = 0.05, n_jobs=8) sf.logfile = 'record.log' sf.run(validation)
def main(): sf = FS.Select(Sequence=True, Random=True, Cross=True) sf.ImportDF(prepareData(), label='Survived') sf.ImportLossFunction(modelscore, direction='ascend') sf.ImportCrossMethod(CrossMethod) sf.NonTrainableFeatures = ['Survived'] sf.InitialFeatures([]) sf.PotentialAdd = ['Pclass'] # sf.clf = lgbm.LGBMClassifier(random_state=1, num_leaves = 6, n_estimators=5000, max_depth=3, learning_rate = 0.05, n_jobs=1) sf.clf = LogisticRegression() sf.logfile = 'record.log' sf.run(validation)
def main(): PotentialAdd = ['min_query_time_gap_after', 'hour', 'shop_score_delivery', 'min_query_time_gap_before_user_item', 'shop_id_smooth_query_rate', 'min_query_time_gap_before', 'shop_score_description', 'item_sales_level', 'shop_query_count', 'user_star_level', 'user_age_level', 'item_sales_query_rate', 'item_query_count', 'shop_score_service', 'shop_review_positive_rate', 'item_price_level', 'min_query_time_gap_after_user_item'] ''' PotentialAdd = [] ''' sf = FS.Select(Sequence = True, Random = True, Cross = False, PotentialAdd = PotentialAdd) #select the way you want to process searching sf.ImportDF(prepareData(),label = 'is_trade') sf.ImportLossFunction(modelscore,direction = 'descend') sf.ImportCrossMethod(CrossMethod) sf.NonTrainableFeatures = ['instance_id', 'item_id', 'item_brand_id', 'item_city_id', 'user_id', 'context_id', 'shop_id', 'item_category_0', 'time', 'context_timestamp', 'item_property_list', 'predict_category_property', 'item_category_list', 'is_trade', 'day', ] sf.InitialFeatures(['item_price_level', 'item_sales_level', 'item_collected_level', 'min_query_time_gap_after', 'min_query_time_gap_before_user_item', 'min_query_time_gap_after_user_item', 'hour', 'item_category_1', 'shop_score_service', 'user_age_level', 'user_star_level', 'context_page_id', 'min_query_time_gap_before', 'shop_query_count', 'item_sales_count']) #sf.InitialFeatures(['item_price_level','item_sales_level','item_collected_level', 'item_pv_level']) sf.clf = lgbm.LGBMClassifier(random_state=1, num_leaves = 6, n_estimators=5000, max_depth=3, learning_rate = 0.05, n_jobs=8) sf.logfile = 'record.log' sf.run(validation)