def get_group_info(self): url = 'http://s.web2.qq.com/api/get_group_name_list_mask2' payload = {'vfwebqq': self.params['vfwebqq']} self.s.headers[ 'referer'] = 'http://s.web2.qq.com/proxy.html?v=20110412001&callback=1&id=1' r = self.s.post(url, data=payload) ret = json.loads(r.text) self.groups = {g['gid']: g for g in ret['result']['gnamelist']} url = 'http://s.web2.qq.com/api/get_group_info_ext2' for group in self.groups.values(): payload = { 'gcode': group['code'], 'vfwebqq': self.params['vfwebqq'], 't': utils.ctime() } self.s.headers[ 'referer'] = 'http://s.web2.qq.com/proxy.html?v=20110412001&callback=1&id=1' r = self.s.get(url, params=payload) ret = json.loads(r.text) # member info group['minfo'] = {} for m in ret['result']['minfo']: group['minfo'][m['uin']] = { 'nick': m['nick'], 'qq': self.get_qq_from_uin(m['uin']) } print self.groups
def add_answer(question_id, answer): session = Session() answerObj = Answer(question_id=question_id, answer=answer, created_at=utils.ctime()) session.add(answerObj) session.commit() return _get_current_ratio(question_id, answer)
def get_qq_from_uin(self, uin): url = 'http://s.web2.qq.com/api/get_friend_uin2' payload = {'tuin': uin, 'type': 1, 'vfwebqq': self.params['vfwebqq'], 't': utils.ctime(), 'verifysession': '', 'code': ''} self.s.headers['referer'] = 'http://s.web2.qq.com/proxy.html?v=20110412001&callback=1&id=1' r = self.s.get(url, params = payload) ret = json.loads(r.text) return ret['result']['account']
def get_qq_from_uin(self, uin): url = 'http://s.web2.qq.com/api/get_friend_uin2' payload = { 'tuin': uin, 'type': 1, 'vfwebqq': self.params['vfwebqq'], 't': utils.ctime(), 'verifysession': '', 'code': '' } self.s.headers[ 'referer'] = 'http://s.web2.qq.com/proxy.html?v=20110412001&callback=1&id=1' r = self.s.get(url, params=payload) ret = json.loads(r.text) return ret['result']['account']
def log_question(question_id): session = Session() log = QuestionLog(question_id=question_id, timestamp=utils.ctime()) session.add(log) session.commit()
def keep_alive(self): url = 'http://web2.qq.com/web2/get_msg_tip?uin=&tp=1&id=0&retype=1&rc=150&lv=3&t=' + utils.ctime( ) r = self.s.get(url)
def getFactory(request): guid = request.form['guid'] print(ctime(), guid) return KeyDerivationFactory(guid)
def keep_alive(self): url = 'http://web2.qq.com/web2/get_msg_tip?uin=&tp=1&id=0&retype=1&rc=150&lv=3&t=' + utils.ctime() r = self.s.get(url)
def get_group_info(self): url = 'http://s.web2.qq.com/api/get_group_name_list_mask2' payload = {'vfwebqq': self.params['vfwebqq']} self.s.headers['referer'] = 'http://s.web2.qq.com/proxy.html?v=20110412001&callback=1&id=1' r = self.s.post(url, data = payload) ret = json.loads(r.text) self.groups = { g['gid']: g for g in ret['result']['gnamelist'] } url = 'http://s.web2.qq.com/api/get_group_info_ext2' for group in self.groups.values(): payload = {'gcode': group['code'], 'vfwebqq': self.params['vfwebqq'], 't': utils.ctime()} self.s.headers['referer'] = 'http://s.web2.qq.com/proxy.html?v=20110412001&callback=1&id=1' r = self.s.get(url, params = payload) ret = json.loads(r.text) # member info group['minfo'] = {} for m in ret['result']['minfo']: group['minfo'][m['uin']] = {'nick': m['nick'], 'qq': self.get_qq_from_uin(m['uin'])} print self.groups
def run(config, num_batches, batch_size, model_name, class_model_name, ofile, threshold, num_workers, epochs, multi_gans, gan_weights, trunc_norm, fixed_dset, transform, filter_samples): # Instanciating generator config['G_batch_size'] = batch_size generator = GeneratorWrapper(config, model_name, trunc_norm, multi_gans, gan_weights) generator_fn = generator.gen_batch if gan_weights: print('Using GAN weights (multi-GAN setting): ', str(gan_weights)) # Instanciating filtering classifier if filter_samples: print('Using ResNet20 weights: %s.pth' % class_model_name) filter_net = Classifier('resnet20', config['n_classes']) filter_net.load(class_model_name) filter_fn = filter_net.filter else: filter_fn = None # Creating a filtered loader using the classifier num_classes = config['n_classes'] loader = FilteredLoader(generator_fn, filter_fn, num_classes, num_batches, batch_size, threshold, num_workers, fixed_dset, transform) print('Training using %d generated images per epoch' % loader.train_length()) # Creating a blank ResNet net = resnet20(config['n_classes'], width=64).to('cuda') # Initializing loss functions, optimizer, learning rate scheduler cross_entropy = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=0.1, momentum=0.9, weight_decay=0.0001) scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[100, 150]) # Evaluating the model on the test set test_loader = utils.make_test_loader(config['dataset'], batch_size, transforms.Normalize(*utils.norm_vals)) # Training the model t1 = utils.ctime() best_acc = 0.0 for epoch in range(epochs): print('Epoch: %3d' % (epoch+1), end=" ") train(net, loader, batch_size, optimizer, cross_entropy) scheduler.step() acc = evaluate(net, test_loader) best_acc = max(acc, best_acc) loader.reset() print('Val acc: %4.2f %% ' % evaluate(net, test_loader), ' | Best acc: %4.2f %%\n' % best_acc) tt = utils.ctime() - t1 print('Finished training, total time: %4.2fs' % tt) print('Best accuracy achieved: %4.5f %%' % best_acc) # Saving output model output = './output/%s.pth' % ofile print('Saving trained classifier in %s' % output) torch.save(net.state_dict(), output)