def get_identity_for_processing(identity_id): """gets the identity for processing sets status to in processing""" response = api.verifier_take_pending_request(long(identity_id)) if 'result' in response: return Identity(response['result']) else: log.error(response) return None
def get_identity(identity_id): """gets the identity by id does not change status""" response = api.verifier_peek_request(long(identity_id)) if 'result' in response: return Identity(response['result']) else: log.error(response) return None
def get_identities(self, status=Identity.STATUS_AWAITING_PROCESSING): """gets requests by status""" response = api.verifier_list_requests(status) results = [] if 'result' in response: results = [Identity(r) for r in response['result']] return results
def test_failure_identity_already_exists(self): auth = generate_test_authorization() identity_record = Identity(auth['public_address']) db.session.add(identity_record) re = self.client.post('/v1/identities', headers=auth['headers']) self.assertEqual(403, re.status_code) self.assertEqual({"error": 'identity already exists'}, re.json)
def save_identity(caller_identity): identity = Identity.query.get(caller_identity) if identity: return jsonify(error='identity already exists'), 403 identity = Identity(caller_identity) db.session.add(identity) db.session.commit() return jsonify({})
def __init__(self, sparse_coding, nb_negative, embed_dims=128, context_dims=128, init_embeddings=None, negprob_table=None, optimizer='adam'): super(NCELangModelV4, self).__init__(weighted_inputs=False) vocab_size = sparse_coding.shape[0] # the extra word is for OOV self.nb_base = sparse_coding.shape[1] - 1 self.vocab_size = vocab_size self.embed_dim = embed_dims self.optimizer = optimizers.get(optimizer) self.nb_negative = nb_negative self.loss = categorical_crossentropy self.loss_fnc = objective_fnc(self.loss) self.sparse_coding = sparse_coding if negprob_table is None: negprob_table_ = np.ones(shape=(vocab_size, ), dtype=theano.config.floatX) / vocab_size negprob_table = theano.shared(negprob_table_) self.neg_prob_table = negprob_table_ else: self.neg_prob_table = negprob_table.astype(theano.config.floatX) negprob_table = theano.shared( negprob_table.astype(theano.config.floatX)) self.sampler = TableSampler(self.neg_prob_table) self.add_input(name='idxes', ndim=3, dtype='int32') idxes = self.inputs['idxes'].get_output(True) shape = idxes.shape[1:] codes = tsp.csr_matrix('sp-codes', dtype=floatX) nb_pos_words = shape[0] * shape[1] pos_codes = codes[:nb_pos_words] self.add_node(Identity(inputs={ True: pos_codes, False: pos_codes }), name='codes_flat') self.add_node(Identity(inputs={ True: shape, False: shape }), name='sents_shape') self.add_node(Identity(inputs={ True: codes, False: codes }), name='sparse_codes') self.add_node(SparseEmbedding(self.nb_base + 1, embed_dims, weights=init_embeddings), name='embedding', inputs=('codes_flat', 'sents_shape')) self.add_node(LangLSTMLayer(embed_dims, output_dim=context_dims), name='encoder', inputs='embedding') # seq.add(Dropout(0.5)) self.add_node(PartialSoftmaxV4(input_dim=context_dims, base_size=self.nb_base + 1), name='part_prob', inputs=('idxes', 'sparse_codes', 'encoder')) self.add_node(Dense(input_dim=context_dims, output_dim=1, activation='exponential'), name='normalizer', inputs='encoder') self.add_node(LookupProb(negprob_table), name='lookup_prob', inputs='idxes') self.add_node(SharedWeightsDense(self.nodes['part_prob'].W, self.nodes['part_prob'].b, self.sparse_coding, activation='exponential'), name='true_unnorm_prob', inputs='encoder') self.add_node(ActivationLayer(name='normalization'), name='true_prob', inputs='true_unnorm_prob') self.add_output('pos_prob', node='part_prob') self.add_output('neg_prob', node='lookup_prob') self.add_output('pred_prob', node='true_prob') self.add_output('normalizer', node='normalizer') self.add_output('unrm_prob', node='true_unnorm_prob')
def get_next_identity(): """gets the next identity to be verified""" return Identity(api.take_next_request()['result'])