def main(): try: fecha = datetime.date.today().strftime("%Y%m%d") extract.parse('https://www.boletinoficial.gob.ar', fecha) response = transform.process(fecha) if not response is None: load.process(response) except Exception as e: logging.error('Error at %s', exc_info=e)
X /= retain_prob return X def model(X, w_h, w_h2, w_o, p_drop_input, p_drop_hidden): X = dropout(X, p_drop_input) h = rectify(T.dot(X, w_h)) h = dropout(h, p_drop_hidden) h2 = rectify(T.dot(h, w_h2)) h2 = dropout(h2, p_drop_hidden) py_x = softmax(T.dot(h2, w_o)) return h, h2, py_x col, trX, trY=load.geotutor() col, trX, trY=load.process(col, trX, trY) X = T.fmatrix() Y = T.fmatrix() w_h = init_weights((364, 59)) w_h2 = init_weights((59, 59)) w_o = init_weights((59, 2)) noise_h, noise_h2, noise_py_x = model(X, w_h, w_h2, w_o, 0.2, 0.5) h, h2, py_x = model(X, w_h, w_h2, w_o, 0., 0.) y_x = T.argmax(py_x, axis=1) params = [w_h, w_h2, w_o] #cost=T.mean(T.nnet.categorical_crossentropy(noise_py_x, Y))+ 0.01 * ((w_h ** 2).sum()+(w_h2 ** 2).sum()+(w_o ** 2).sum()) cost = T.mean(T.nnet.categorical_crossentropy(noise_py_x, Y))
predY=predict(trX[:num]) train_acc=np.mean(np.argmax(trY[:num], axis=1) == predict(trX[:num])) #print num, precision_recall_fscore_support(list(y[1] for y in trueY.tolist()),predY.tolist(), average='micro') print num, train_acc,test_acc return train_acc,test_acc def data_split(data,targets): trainX=data[:4908] trainY=targets[:4908] testX=data[4908:] testY=targets[4908:] return trainX,testX,trainY,testY col, data, targets=load.geotutor() col, data, targets=load.process(col, data, targets) #trX,teX,trY,teY=cross_validation.train_test_split(data,targets,test_size=0.3,random_state=0) trX,teX,trY,teY=data_split(data,targets) X=T.fmatrix() Y=T.fmatrix() w=init_weights((407, 2)) py_x=model(X,w) y_pred=T.argmax(py_x, axis=1) cost=T.mean(T.nnet.categorical_crossentropy(py_x, Y))+0.001*T.sum(w**2) gradient=T.grad(cost=cost, wrt=w) update=[[w,w-gradient*0.005]]