def inference(x, T, A): ''' Given an adaboost ensemble of decision trees and one data instance, infer the label of the instance. Input: x: the attribute vector of a data instance, a numpy vectr of shape p. Each attribute value can be int/float T: the root nodes of decision stumps, a list of length n_tree. A: the weights of the decision stumps, a numpy float vector of length n_tree. Output: y: the class label, a scalar of int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE y_list = [] b = 0 for i in T: y_list.append(DT.inference(i, x)) for i in set(y_list): a = 0 for j in range(len(A)): if y_list[j] == i: a += A[j] if a > b: b = a y = i ######################################### return y
def inference(T,x): ''' Given a bagging ensemble of decision trees and one data instance, infer the label of the instance. Input: T: a list of decision trees. x: the attribute vector, a numpy vectr of shape p. Each attribute value can be int/float Output: y: the class labels, a numpy array of length n. Each element can be int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE tmp = [] for t in T: tmp.append(DT.inference(t, x)) stat = dict() for i in tmp: if i not in stat: stat[i] = 1 else: stat[i] += 1 num = 0 for sta in stat: if stat[sta] > num: num = stat[sta] y = sta ######################################### return y
def inference(x, T, A): ''' Given an adaboost ensemble of decision trees and one data instance, infer the label of the instance. Input: x: the attribute vector of a data instance, a numpy vectr of shape p. Each attribute value can be int/float T: the root nodes of decision stumps, a list of length n_tree. A: the weights of the decision stumps, a numpy float vector of length n_tree. Output: y: the class label, a scalar of int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE lab = np.array([]) for t in T: lab = np.append(lab, DT.inference(t, x)) stat = [] for i in range(len(lab)): stat.append([lab[i], A[i]]) d = dict() for i in range(len(lab)): if lab[i] not in d: d[lab[i]] = A[i] else: d[lab[i]] += A[i] y = DT.most_common(d) ######################################### return y
def inference(T, x): ''' Given a bagging ensemble of decision trees and one data instance, infer the label of the instance. Input: T: a list of decision trees. x: the attribute vector, a numpy vectr of shape p. Each attribute value can be int/float Output: y: the class labels, a numpy array of length n. Each element can be int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE ys = [] for tree in T: ys.append(DT.inference(tree, x)) d = {} for item in ys: if item in d.keys(): d[item] += 1 else: d[item] = 1 y = sorted(d, key=lambda x: d[x])[-1] ######################################### return y
def inference(T, x): ''' Given a bagging ensemble of decision trees and one data instance, infer the label of the instance. Input: T: a list of decision trees. x: the attribute vector, a numpy vectr of shape p. Each attribute value can be int/float Output: y: the class labels, a numpy array of length n. Each element can be int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE y = [] for t in T: y.append(DT.inference(t, x)) y = np.array(y) count = {} for yy in y: try: count[yy] += 1 except KeyError: count[yy] = 1 num = 0 for k, v in count.iteritems(): if v > num: num = v y = k ######################################### return y
def inference(x, T, A): ''' Given an adaboost ensemble of decision trees and one data instance, infer the label of the instance. Input: x: the attribute vector of a data instance, a numpy vectr of shape p. Each attribute value can be int/float T: the root nodes of decision stumps, a list of length n_tree. A: the weights of the decision stumps, a numpy float vector of length n_tree. Output: y: the class label, a scalar of int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE Y = [] Y = [DT.inference(t, x) for t in T] y = DS.most_common(Y, A) ######################################### return y
def inference(T, x): ''' Given a bagging ensemble of decision trees and one data instance, infer the label of the instance. Input: T: a list of decision trees. x: the attribute vector, a numpy vectr of shape p. Each attribute value can be int/float Output: y: the class labels, a numpy array of length n. Each element can be int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE list_y = [] for t in T: list_y.append(DT.inference(t, x)) y = max(list_y, key=list_y.count) ######################################### return y
def inference(T, x): ''' Given a bagging ensemble of decision trees and one data instance, infering the label of the instance. Input: T: a list of decision trees. x: the attribute vector, a numpy vectr of shape p. Each attribute value can be int/float Output: y: the class labels, a numpy array of length n. Each element can be int/float/string. ''' Y = [] for t in T: inference = DT.inference(t, x) Y.append(inference) y = max(set(Y), key=Y.count) return y
def inference(T, x): ''' Given a bagging ensemble of decision trees and one data instance, infer the label of the instance. Input: T: a list of decision trees. x: the attribute vector, a numpy vectr of shape p. Each attribute value can be int/float Output: y: the class labels, a numpy array of length n. Each element can be int/float/string. ''' ######################################### ## INSERT YOUR CODE HERE y = [] for i in T: y.append(DT.inference(i, x)) y = np.unique(y, return_counts=True)[0][np.argmax( np.unique(y, return_counts=True)[1])] ######################################### return y