def trainxgb(model_id,train_x,train_y,valid_x,valid_y,test_x):
    train_x,train_y=shuffle(train_x,train_y)

    random_state=random.randint(0, 1000000)
    print('random state: {state}'.format(state=random_state))

    xgboost = XGBoostClassifier(silent=0,
                 objective='multi:softprob',
                 eval_metric='mlogloss',
                 num_class=9,
                 nthread=4,
                 seed=random_state,
                 eta=random.uniform(0.01,0.1),
                 max_depth=random.randint(10,20),
                 max_delta_step=random.randint(1,10),
                 min_child_weight=random.randint(1,10),
                 subsample=random.uniform(0.0,1.0),
                 gamma=random.uniform(0.01,0.1),
                 colsample_bytree=random.uniform(0.0,1.0),
                 early_stopping_rounds=30,
                 num_round=1000
                )
      
    xgboost.fit(train_x, train_y)
    
    valid_predictions = xgboost.predict_proba(valid_x)
    score =log_loss(valid_y,valid_predictions)
    print score
    fName = open("report_xgb_"+str(model_id)+".txt", 'w')
    print >> fName, "score:"
    print >> fName, score
    print >>fName, "model_id:"
    print >>fName, model_id
    print >> fName, xgboost
    fName.close()
### building the classifiers
clfs = []

xgb1 = XGBoostClassifier(
    alpha=0,
    booster="gbtree",
    colsample_bytree=0.391995799463,
    early_stopping_rounds=30,
    eta=0.0238249854939,
    eval_metric="mlogloss",
    gamma=0.0339065215885,
    l=0,
    lambda_bias=0,
    max_delta_step=3,
    max_depth=19,
    min_child_weight=8,
    nthread=4,
    ntree_limit=0,
    num_class=9,
    num_round=2000,
    objective="multi:softprob",
    seed=463324,
    silent=1,
    subsample=0.732463140484,
    use_buffer=True,
)

xgb1.fit(train_x, train_y)
print("xgb1 LogLoss {score}".format(score=log_loss(test_y, xgb1.predict_proba(test_x))))
clfs.append(xgb1)