def get(self): gtoken = self.request.cookies.get('gtoken') if gtoken: gitkit_user = gitkit_instance.VerifyGitkitToken(gtoken) else: return webapp2.redirect_to('widget', mode='select') i = Identity() i = i.get_or_insert(gitkit_user.user_id, name=gitkit_user.name, email=gitkit_user.email, provider_id=gitkit_user.provider_id) template_values = {'i': i} template = jinja_environment.get_template('submit_key.htm') self.response.out.write(template.render(template_values))
def wx_UserRegist(): appId = 'wxde4ed04d17675e14' if request.method == 'POST': req_data = json.loads(request.get_data()) #app.logger.debug('res') #app.logger.debug(json.dumps(req_data)) #return req_data[u'code'] #encData = req_data.encryptedData url = 'https://api.weixin.qq.com/sns/jscode2session?appid=wxde\ 4ed04d17675e14&secret=c12283859e80a742ef233b73eafaff33&\ js_code=' + req_data[u'code'] + '&grant_type=authorization_code' #app.logger.debug(url) f = urllib2.urlopen(url) res = json.loads(f.read()) f.close() #app.logger.debug(res) #iv = req_data.iv #session_key and openid should be saved, while session_key shouldn't be transed by web openid = res[u'openid'] session_key = res[u'session_key'] (record_anew, flag_anew) = Identity.update_id_unique_record( openid=openid, session_key=session_key) #fetch the anew flag app.logger.debug("in UserRegist before commit") app.logger.debug(record_anew.created_time) db.session.merge(record_anew) db.session.commit() app.logger.debug("in UserRegist after commit") app.logger.debug(record_anew.created_time) try: return jsonify({'openid': res[u'openid'], 'flag_anew': flag_anew}) except: return res[u'errmsg'] else: return '<h1> you are not allwed to get anything unless post<h1>'
def get_identity_for_processing(identity_id): """gets the identity for processing sets status to in processing""" response = api.verifier_take_pending_request(long(identity_id)) if 'result' in response: return Identity(response['result']) else: log.error(response) return None
def get_identity(identity_id): """gets the identity by id does not change status""" response = api.verifier_peek_request(long(identity_id)) if 'result' in response: return Identity(response['result']) else: log.error(response) return None
def get_identities(self, status=Identity.STATUS_AWAITING_PROCESSING): """gets requests by status""" response = api.verifier_list_requests(status) results = [] if 'result' in response: results = [Identity(r) for r in response['result']] return results
def test_failure_identity_already_exists(self): auth = generate_test_authorization() identity_record = Identity(auth['public_address']) db.session.add(identity_record) re = self.client.post('/v1/identities', headers=auth['headers']) self.assertEqual(403, re.status_code) self.assertEqual({"error": 'identity already exists'}, re.json)
def save_identity(caller_identity): identity = Identity.query.get(caller_identity) if identity: return jsonify(error='identity already exists'), 403 identity = Identity(caller_identity) db.session.add(identity) db.session.commit() return jsonify({})
def wx_getInfo(): req_data = json.loads(request.get_data()) openid = req_data[u'openid'] (rcd, flag_anew) = Identity.update_id_unique_record( openid=openid) #update pzn_sessn in function update_id_unique_record app.logger.debug('Info_Get Approaching for Openid = ' + openid) if flag_anew: # In debugging case,maybe audio info could be request before regist app.logger.debug('No record for present openid!') if (rcd.group != 0) and (rcd.group != 1): rcd.group = 1 app.logger.debug('Subject without gender Info Enter Here!') rcd.fdbck += '<_BugNOGender_>' db.session.merge( rcd ) #Whatever happened, rcd should be updated before looking up, since sometimes update in Login was skipped for unclarified reason db.session.commit() (rcd, flag_anew) = Identity.update_id_unique_record( openid=openid) #update pzn_sessn in function update_id_unique_record audioInfo = AudioInfo.get_Info(sessn_No=rcd.pzn_sessn, group=rcd.group) #selected audio record app.logger.debug(json.dumps(audioInfo)) return json.dumps(audioInfo)
def get(self): pre_keys = None email = self.request.get('email') if email: i = Identity.query(Identity.email == email).get() if i: pre_keys = i.pre_keys template_values = {'pre_keys': pre_keys, 'email': email} template = jinja_environment.get_template('search.htm') # self.response.write('Hello world!') self.response.out.write(template.render(template_values))
def delete(self): gtoken = self.request.cookies.get('gtoken') if gtoken: gitkit_user = gitkit_instance.VerifyGitkitToken(gtoken) key_id = self.request.get("id") obj = ndb.Key(urlsafe=key_id) i = Identity.query(Identity.email == gitkit_user.email).get() list_of_keys = ndb.get_multi(i.pre_keys) print i.pre_keys print obj if obj in i.pre_keys: obj.delete() i.pre_keys.remove(obj) i.put() else: self.abort(403) self.response.out.write("it works!")
def wx_UserLog(): req_data = json.loads(request.get_data()) openid = req_data[u'openid'] #app.logger.debug(json.dumps(req_data[u'train_state'])) if req_data.has_key(u'train_state'): train_state = 1 app.logger.debug('Training state: one') else: train_state = 0 app.logger.debug('Training state: zero') if req_data.has_key(u'gender'): gender = req_data[u'gender'] app.logger.debug('Gender Loaded') else: gender = 0 if req_data.has_key(u'uploaded_data'): app.logger.debug('data_uploaded') uploaded_data = req_data[u'uploaded_data'] else: uploaded_data = '' if req_data.has_key(u'merge_seq'): app.logger.debug('merge_seq detected') merge_seq = req_data[u'merge_seq'] else: merge_seq = -2 (record_anew,flag_anew) = Identity.update_id_unique_record(openid=openid,\ train_state = train_state,uploaded_data = uploaded_data,gender = gender,merge_seq = merge_seq)#fetch the anew flag if hasattr(req_data, u'gender'): record_anew.gender = req_data[u'gender'] app.logger.debug( cls.query.filter_by(gender=gender).order_by( cls.gender.desc()).first()) #last record with same gender db.session.merge(record_anew) db.session.commit() return jsonify( { 'pzn_sessn': record_anew.pzn_sessn, 'flag_anew': flag_anew } ) #max sessn can also be returned to extend training plans for the participant
def __init__(self, sparse_coding, nb_negative, embed_dims=128, context_dims=128, init_embeddings=None, negprob_table=None, optimizer='adam'): super(NCELangModelV4, self).__init__(weighted_inputs=False) vocab_size = sparse_coding.shape[0] # the extra word is for OOV self.nb_base = sparse_coding.shape[1] - 1 self.vocab_size = vocab_size self.embed_dim = embed_dims self.optimizer = optimizers.get(optimizer) self.nb_negative = nb_negative self.loss = categorical_crossentropy self.loss_fnc = objective_fnc(self.loss) self.sparse_coding = sparse_coding if negprob_table is None: negprob_table_ = np.ones(shape=(vocab_size, ), dtype=theano.config.floatX) / vocab_size negprob_table = theano.shared(negprob_table_) self.neg_prob_table = negprob_table_ else: self.neg_prob_table = negprob_table.astype(theano.config.floatX) negprob_table = theano.shared( negprob_table.astype(theano.config.floatX)) self.sampler = TableSampler(self.neg_prob_table) self.add_input(name='idxes', ndim=3, dtype='int32') idxes = self.inputs['idxes'].get_output(True) shape = idxes.shape[1:] codes = tsp.csr_matrix('sp-codes', dtype=floatX) nb_pos_words = shape[0] * shape[1] pos_codes = codes[:nb_pos_words] self.add_node(Identity(inputs={ True: pos_codes, False: pos_codes }), name='codes_flat') self.add_node(Identity(inputs={ True: shape, False: shape }), name='sents_shape') self.add_node(Identity(inputs={ True: codes, False: codes }), name='sparse_codes') self.add_node(SparseEmbedding(self.nb_base + 1, embed_dims, weights=init_embeddings), name='embedding', inputs=('codes_flat', 'sents_shape')) self.add_node(LangLSTMLayer(embed_dims, output_dim=context_dims), name='encoder', inputs='embedding') # seq.add(Dropout(0.5)) self.add_node(PartialSoftmaxV4(input_dim=context_dims, base_size=self.nb_base + 1), name='part_prob', inputs=('idxes', 'sparse_codes', 'encoder')) self.add_node(Dense(input_dim=context_dims, output_dim=1, activation='exponential'), name='normalizer', inputs='encoder') self.add_node(LookupProb(negprob_table), name='lookup_prob', inputs='idxes') self.add_node(SharedWeightsDense(self.nodes['part_prob'].W, self.nodes['part_prob'].b, self.sparse_coding, activation='exponential'), name='true_unnorm_prob', inputs='encoder') self.add_node(ActivationLayer(name='normalization'), name='true_prob', inputs='true_unnorm_prob') self.add_output('pos_prob', node='part_prob') self.add_output('neg_prob', node='lookup_prob') self.add_output('pred_prob', node='true_prob') self.add_output('normalizer', node='normalizer') self.add_output('unrm_prob', node='true_unnorm_prob')
def get_next_identity(): """gets the next identity to be verified""" return Identity(api.take_next_request()['result'])