def download_subtitles(download_link, dest_path, **params): session = get_session() download_url = None current_try = 0 for current_try in xrange(15): download_url = session.open( urljoin(download_link, '/ajax/sub/downloadun.asp'), data=params['data_encoded'], ).read() if 'ERROR:' not in download_url: break time.sleep(1) if download_url: res = session.open(urljoin(download_link, download_url)) if res: subtitle_name = "" zf = zipfile.ZipFile(StringIO(res.read())) for zif in zf.filelist: if path.splitext(zif.filename)[1].lower().lstrip('.') in SUB_EXT: subtitle_path = path.join(dest_path, zif.filename) with open(subtitle_path, 'wb') as f: f.write(zf.read(zif)) f.flush() zf.close() return subtitle_path zf.close()
def manage_feeder(code=None): feeder = _() if not code else db.Feeder.get_by_code(code) if code and not feeder: raise HTTPError(404, "Feeder not found: %s" % id) session = get_session() if request.method == 'POST': form = forms.FeederForm(request) try: if form.is_valid(): form.save() action = ('created' if not code else 'updated') session['messages']['pass'].append('Feeder %s' % action) else: session['messages']['fail'].extend(form.errors) return redirect('/admin/feeders/') except pymongo.errors.DuplicateKeyError: session['messages']['fail'].append("Provided Feeder Code and/or Name already exists.") feeder = form._instance return { 'title':'Projects', 'feeder': feeder, }
def manage_project(id=None): project = _(xforms=[], uforms=[]) if not id else db.Project.get_by_id(id) if not project: raise HTTPError(404, "Project not found: %s" % id) xforms = db.XForm.get_unassigned_xforms(False, False) uforms = db.XForm.get_unassigned_uforms(False, False) session = get_session() if request.method == 'POST': form = forms.ProjectForm(request) try: if form.is_valid(): form.save() action = ('created' if not id else 'updated') session['messages']['pass'].append('Project %s' % action) else: session['messages']['fail'].extend(form.errors) return redirect('/admin/projects/') except pymongo.errors.DuplicateKeyError: session['messages']['fail'].append("Provided Project Id and/or Name already exists.") project = form._instance return { 'title':'Projects', 'project': project, 'xforms': [_(f) for f in xforms], 'uforms': [_(f) for f in uforms], }
def manage_station(feeder_code, code=None): feeder = db.Feeder.get_by_code(feeder_code) if not feeder: raise HTTPError(404, "Feeder not found: %s" % feeder_code) station = _() if not code else db.Station.get_by_code(code) if code and not station: raise HTTPError(404, "Station not found: %s" % code) session = get_session()['messages'] if request.method == 'POST': form = forms.StationForm(request) try: if form.is_valid(): form.save() action = ('created' if not code else 'updated') session['pass'].append('Station %s' % action) else: session['fail'].extend(form.errors) return redirect('/admin/feeders/%s/' % feeder_code) except pymongo.errors.DuplicateKeyError: session['fail'].append("Provided Station Code already exists.") station = form._instance return { 'title': 'Station', 'station': station, 'feeder': feeder, 'vratio_choices': db.Volt.CHOICES, }
def xforms_sync(): failed, reports = [], [] session = get_session() messages = session['messages'] try: for forms in api.get_xforms(): for f in forms: exist = db.XForm.get_by_id(f['id_string']) if exist: continue try: f['active'] = False db.XForm.insert_one(f) except Exception as ex: failed.append(f) reports.append(str(ex)) if not failed: messages['pass'].append('Sync was successful.') else: all_failed = (len(forms) - len(failed)) == 0 if all_failed: messages['fail'].append('Sync was unsuccessful.') else: msg = 'Sync was partially successful. %s entries failed.' messages['warn'].append(msg % len(failed)) write_log(reports) except ConnectionError as ex: messages['fail'].append('Connection Error. Unable to establish ' 'Internet connection.') return redirect('/admin/xforms/')
def xforms_update(): active = request.forms.getall('activate') startup_all = request.forms.get('startup-all').split(',') startup_active = request.forms.get('startup-active').split(',') updated = [] # handle recently activated forms new_actives = [x for x in active if x not in startup_active] for id in new_actives: db.XForm.set_active(id, True) updated.append(id) # handle recently deactivated forms inactives = [x for x in startup_all if x not in active] startup_inactives = [x for x in startup_all if x not in startup_active] new_inactives = [x for x in inactives if x not in startup_inactives] for id in new_inactives: print(db.XForm.set_active(id, False)) updated.append(id) if updated: session = get_session() session['messages']['pass'].append('%s XForm(s) Updated.' % len(updated)) session.save() return redirect('/admin/xforms/')
def profile(): user = authnz.current_user user_info = _(username=user.username, email_addr=user.email_addr, role=user.role) session = get_session()['messages'] if request.method == 'POST': form = PasswordChangeForm(request, user) try: if form.is_valid(): form.save() session['pass'].append('Password has been changed successfully.') return authnz.logout(success_redirect='/profile') else: session['fail'].append(form.errors) except HTTPResponse: raise except Exception as ex: error_message = 'Password change failed. Error: %s' % str(ex) session['fail'].append(error_message) logging.error(error_message, exc_info=True) roles = sorted( list(authnz.list_roles()), key=lambda x: x[1], reverse=True) return { 'title': 'Profile', 'user': user_info, 'roles': roles, 'readonly': True, }
def activate_account(registration_code): session = get_session()['messages'] try: authnz.validate_registration(registration_code) session['pass'].append('Account activated!') except Exception as ex: session['fail'].append("Account activation failed. Error: %s" % str(ex)) return redirect('/')
def process_request(self, request): """ Writes the signed_request into the Session """ fb = get_session(request) setattr(request, 'fb_session', fb) application = get_app_dict() logger.debug('Request Method = %s\n, AccessToken=%s' % (request.method, fb.access_token)) if 'feincms' in settings.INSTALLED_APPS: # if feincms is installed, try to get the application from the page from facebook.feincms.utils import get_application_from_request page_app = get_application_from_request(request) if application: application = get_app_dict(page_app) # Temporary OAuth2.0 fix due to missing access_token in cookie sr: if 'access_token' in request.GET: fb.store_token(request.GET.get('access_token')) # default POST/GET request from facebook with a signed request if 'signed_request' in request.POST: parsed_request = parseSignedRequest(request.POST['signed_request'], application['SECRET']) logger.debug(u'got signed_request from facebook: %s' % parsed_request) if 'user' in parsed_request: language = parsed_request['user']['locale'] logger.debug('language: %s' %language) request.LANGUAGE_CODE = language translation.activate(language) fb.signed_request = parsed_request logger.debug('stored signed_request') expires = None # rewrite important data if 'oauth_token' in parsed_request: expires = datetime.fromtimestamp(float(parsed_request['expires'])) fb.store_token(parsed_request['oauth_token'], expires) elif 'access_token' in parsed_request: expires = datetime.fromtimestamp(float(parsed_request['expires'])) fb.store_token(parsed_request['access_token'], expires) else: #The chance is good that there is already a valid token in the session. Remove it. fb.store_token(None) if 'user_id' in parsed_request: fb.user_id = parsed_request['user_id'] else: logger.debug("Signed Request didn't contain public user info.") if expires: logger.debug('Signed Request issued at: %s' % datetime.fromtimestamp(float(parsed_request['issued_at']))) # auth via callback from facebook elif 'code' in request.GET and 'facebook' in request.META.get('HTTP_REFERER', u''): authenticate(request.REQUEST['code'], fb, application, request.build_absolute_uri().split('?')[0] \ .replace(application['CANVAS-URL'], application['CANVAS-PAGE']))
def __init__(self, search_url=BASE_URL, log=python_logger): self.session = get_session() self.search_url = search_url self.log = log self.sub_dict = { 'sub_id': -1, 's': 1920, 'code': None, 'sh': 'yes', 'guest': None, 'timewaited': int(floor(random() * 7 + 12)) }
def project_sync(project_code): p = db.Project.get_by_code(project_code) if not p: raise HTTPError(404, 'Project not found: %s' % project_code) messages = get_session()['messages'] form_type, xforms_to_sync = None, None for key in ['project_xforms', 'project_uforms']: if key in request.forms: form_type = key xforms_to_sync = request.forms.get(key).split(',') sync_target = (db.Capture if form_type == 'project_xforms' else db.Update) # get form count for xform_id in xforms_to_sync: count = sync_target.count_by_form(xform_id) xform = db.XForm.get_by_id(xform_id) # transform nerc compliant forms differently; transform content to march # previous form entries in order not to break application analysis... # forms 08 and above are supposed to be nerc compliant transform_func = transform.to_flatten_dict try: form_no = int(xform_id[7:9]) if form_no >= 8: transform_func = transform.ndc_flatten_dict except: pass # pull new captures try: transformed, pull_count = [], 0 for captures in api.get_captures(int(xform.object_id), start=count): logging.debug('# captures pulled: %s', len(captures)) if captures: pull_count += len(captures) for c in captures: transformed.append(transform_func(c)) sync_target.save_many(transformed) transformed = [] messages['pass'].append('%s captures pulled.' % pull_count) except ConnectionError: messages['fail'].append('Sync failed. Internet connection required.') except Exception as ex: messages['fail'].append('Sync failed. %s' % str(ex)) logging.error('sync failed. %s', str(ex), exc_info=True) return redirect('/projects/%s/' % project_code)
def download_subtitles(download_url, dest_path, proxies=None): session = get_session(proxies=proxies, http_10=True) session.addheaders = [('User-Agent', 'Mozilla/4.0 (compatible; Synapse)'), ('Content-Length', 0)] res = session.open(download_url) if res: gf = gzip.GzipFile(fileobj=StringIO(res.read())) with open(dest_path, 'wb') as f: f.write(gf.read()) f.flush() gf.close() return True return False
def __init__(self, dataset, model_type, loss_type, dim_input, dim_output, alpha, beta, K, batch_size, is_train, num_updates, norm): ''' model_tpye: choose model tpye for each task, choice: ('fc',) loss_type: choose the form of the objective function dim_input: input dimension dim_output: desired output dimension alpha: fixed learning rate to calculate the gradient beta: learning rate used for Adam Optimizer K: perform K-shot learning batch_size: number of tasks sampled in each iteration ''' self._sess = utils.get_session(1) self._is_train = is_train self._dataset = dataset self._alpha = alpha self._K = K self._norm = norm self._dim_input = dim_input self._dim_output = dim_output self._batch_size = batch_size self._num_updates = num_updates self._meta_optimizer = tf.train.AdamOptimizer(beta) self._avoid_second_derivative = False self._task_name = 'MAML.{}_{}-shot_{}-updates_{}-batch_norm-{}'.format( dataset.name, self._K, self._num_updates, self._batch_size, self._norm) log.infov('Task name: {}'.format(self._task_name)) # Build placeholder self._build_placeholder() # Build model model = self._import_model(model_type) self._construct_weights = model.construct_weights self._contruct_forward = model.construct_forward # Loss function self._loss_fn = self._get_loss_fn(loss_type) self._build_graph(dim_input, dim_output, norm=norm) # Misc self._summary_dir = os.path.join('log', self._task_name) self._checkpoint_dir = os.path.join('checkpoint', self._task_name) self._saver = tf.train.Saver(max_to_keep=10) if self._is_train: if not os.path.exists(self._summary_dir): os.makedirs(self._summary_dir) self._writer = tf.summary.FileWriter(self._summary_dir, self._sess.graph) if not os.path.exists(self._checkpoint_dir): os.makedirs(self._checkpoint_dir) # Initialize all variables log.infov("Initialize all variables") self._sess.run(tf.global_variables_initializer())
def test_datastore(app, client): # Test that user record is properly set after proper 2FA setup. sms_sender = SmsSenderFactory.createSender("test") data = dict(email="*****@*****.**", password="******") response = client.post("/login", data=json.dumps(data), headers={"Content-Type": "application/json"}) assert response.jdata["meta"]["code"] == 200 session = get_session(response) assert session["tf_state"] == "setup_from_login" # setup data = dict(setup="sms", phone="+111111111111") response = client.post("/tf-setup", data=json.dumps(data), headers={"Content-Type": "application/json"}) assert sms_sender.get_count() == 1 session = get_session(response) assert session["tf_state"] == "validating_profile" assert session["tf_primary_method"] == "sms" code = sms_sender.messages[0].split()[-1] # submit token and show appropriate response response = client.post("/tf-validate", data=dict(code=code), follow_redirects=True) assert b"Your token has been confirmed" in response.data session = get_session(response) # Verify that successful login clears session info assert not tf_in_session(session) with app.app_context(): user = app.security.datastore.find_user(email="*****@*****.**") assert user.tf_primary_method == "sms" assert user.tf_phone_number == "+111111111111" assert "enckey" in user.tf_totp_secret
def test_gan_loss(logits_real, logits_fake, d_loss_true, g_loss_true): tf.reset_default_graph() with get_session() as sess: d_loss, g_loss = sess.run( gan_loss(tf.constant(logits_real), tf.constant(logits_fake))) d_loss_error = rel_error(d_loss_true, d_loss) g_loss_error = rel_error(g_loss_true, g_loss) print("Maximum error in d_loss: %g" % d_loss_error) print("Maximum error in g_loss: %g" % g_loss_error) if d_loss_error < 1e-8 and g_loss_error < 1e-8: print("GAN Loss test passed!") else: print("GAN Loss test failed!")
def lambda_handler(event, context): """ Prepares for an AMI deployment. """ # Get details from the event. job = event["CodePipeline.job"] input_bucket, input_key = get_input_artifact_location(job) output_bucket, output_key = get_output_artifact_location(job) user_params = get_user_parameters(job) assume_role_arn = user_params["AssumeRoleArn"] image_parameter_name = user_params["ImageParameterName"] stack_name = user_params["StackName"] template_filename = user_params["TemplateFilename"] # Create client in the pipeline account. pipeline_s3_client = get_artifact_s3_client(job) # Create clients in the target account. target_session = get_session( role_arn=assume_role_arn, session_name="prepare-ami-deployment" ) target_cfn_client = target_session.client("cloudformation") target_ssm_client = target_session.client("ssm") # Download the input artifact zip file, read manifest.json from it, # and get the AMI it references. Also look up the associated image name. with download_zip_file( s3_client=pipeline_s3_client, bucket=input_bucket, key=input_key ) as zip_file: image_detail_string = zip_file.read("imageDetail.json").decode("utf-8") log("IMAGE_DETAIL_STRING", image_detail_string) image_detail = json.loads(image_detail_string) image = image_detail["ImageURI"] log("IMAGE", image) # Update the SSM parameters with the image, # to be used by the CloudFormation deployment stage of the pipeline. target_ssm_client.put_parameter( Name=image_parameter_name, Value=image, Type="String", Overwrite=True ) # Write the CloudFormation stack's template to the output artifact location, # to be used by the CloudFormation deployment stage of the pipeline. template = get_cloudformation_template( cfn_client=target_cfn_client, stack_name=stack_name ) with create_zip_file({template_filename: template}) as zip_path: pipeline_s3_client.upload_file(zip_path, output_bucket, output_key)
def report_default(): messages = get_session()['messages'] project_id = request.forms.get('project_id') ref_date = request.forms.get('ref_date') try: result = report.write_report(project_id, ref_date) # messages['pass'].append('Report generated.') return static_file(result[0], root=result[1], download=True) except Exception as ex: messages['fail'].append('Report generation failed. %s' % str(ex)) print(ex) return redirect('/')
def check_call_records(request): """ 抓取通话记录检验是否需要验证码,图片验证码,如果需要,返回验证码链接 & 触发发送短信验证码 :param request: :return: """ session_id = request.query['session_id'] session = get_session(session_id) spider = select_spider(**session) ok, ext = spider.check_call_records(session) if ok: return web.json_response({'ok': True, 'error': None, 'data': ext}) else: return web.json_response({'ok': False, 'error': ext, 'data': None})
def main(args=sys.argv[1:]): args, tf_args, generator_args, nms_args, inference_args, transfer_args = parse_args( args) from utils import set_tf_environ set_tf_environ(**vars(tf_args)) import tensorflow as tf import keras.backend as K from utils import get_session, get_name, record_hyperparameters from model import build_model, model_path K.set_session(get_session()) import judger_hand from model import load_model imgs = inference_args.inputs or judger_hand.get_file_names() output = inference_args.output or judger_hand.get_output_file_object() model = load_model(inference_args.weights, vars(nms_args), compile=True) sep = ',' if inference_args.output else ' ' name = get_name(transfer_args.__dict__.pop('name'), 'transfer') log_dir, model_dir = model_path(name) print(name) writer = tf.summary.FileWriter(log_dir) record_hyperparameters(args, writer) with open('%s/config.yml' % model_dir, 'w') as f: f.write(model.to_yaml()) try: buff = inference_train(model, imgs, output=output, sep=sep, **vars(generator_args), **vars(transfer_args), name=name, writer=writer) except KeyboardInterrupt: pass if not inference_args.output: score, err = judger_hand.judge() print('score', score) if err is not None: # in case we failed to judge your submission raise Exception(err) return score return model, name
def train(dbm, xs, init_lr, num_epoch, batch_size, mf_k, pcd_k, pcd_chain_size, output_dir): num_batches = len(xs) / batch_size assert num_batches * batch_size == len(xs) vis = tf.placeholder(tf.float32, (None, ) + xs.shape[1:], name='vis_input') lr = tf.placeholder(tf.float32, (), name='lr') pcd_states = [ tf.placeholder(tf.float32, (pcd_chain_size, i)) for i in dbm.num_units ] pcd_vals = [ np.random.uniform(0, 1, (pcd_chain_size, i)) for i in dbm.num_units ] loss, updates, new_pcd_states = dbm.train_step(lr, vis, pcd_states, mf_k, pcd_k) if output_dir is not None: sample_imgs = dbm.sample_from_dbm(100, 1000) sess = utils.get_session() with sess.as_default(): tf.initialize_all_variables().run() for i in range(num_epoch): np.random.shuffle(xs) t = time.time() loss_vals = np.zeros(num_batches) for b in range(num_batches): batch_xs = xs[b * batch_size:(b + 1) * batch_size] feed_dict = { vis: batch_xs, lr: utils.scheduled_lr(init_lr, i, num_epoch) } for key, val in zip(pcd_states, pcd_vals): feed_dict[key] = val loss_val, _, pcd_vals = sess.run( [loss, updates, new_pcd_states], feed_dict=feed_dict) loss_vals[b] = loss_val print 'Epoch: %d, Train Loss: %s' % (i, loss_vals.mean()) print '\tTime took:', time.time() - t if output_dir is not None: if not os.path.exists(output_dir): os.makedirs(output_dir) imgs = sess.run(sample_imgs) img_path = os.path.join(output_dir, 'epoch%d-plot.png' % i) utils.vis_samples(imgs, 10, 10, (28, 28), img_path)
def get(self, hotelroom_id, reserved_night_date): # get a database session session = get_session() occupancy = [] revenue_booking_curve = [] # write code here for Question 2 return { 'booking_curve': { "occupancy": occupancy, "revenue": revenue_booking_curve } }
def _handle_logout(args: List[str]): if args: print("Please do not put arguments after `login`") return sess = get_session() if sess is None: print("No session found") return choice = input("Are you sure you want to delete user {}? [y/n]".format( sess.cookies.get("dmoj-cli-username", "[UNKNOWN USER]"))) if choice.strip().lower() == "y": delete_session() else: print("Not deleting!")
def get(self, hotelroom_id, start_date, end_date): # parse the dates as they're sent as a string e.g. 2018-01-01 start_date = datetime.strptime(start_date, '%Y-%m-%d').date() end_date = datetime.strptime(end_date, '%Y-%m-%d').date() # get a session/connection to the database session = get_session() # get the hotelroom object to calculate capacity hotelroom = session.query(HotelRooms).get(hotelroom_id) # get number of bookings and cancellations num_of_bookings = session.query(func.count(Bookings)).filter( and_( Bookings.hotelroom_id == hotelroom_id, Bookings.reserved_night_date.between( start_date, end_date ), Bookings.row_type == 'booking' ) ).all() num_of_cancellations = session.query(func.Count(Bookings)).filter( and_( Bookings.hotelroom_id == hotelroom_id, Bookings.reserved_night_date.between( start_date, end_date ), Bookings.row_type == 'cancellations' ) ).all() # calculate numerator and denominator for occupancy net_bookings = num_of_bookings - num_of_cancellations total_available_rooms = hotelroom.capacity * ((end_date - start_date).days + 1) # check to make sure total_available_rooms is not 0 (division by zero error) if total_available_rooms == 0: occupancy = None else: # convert to string and round to 2 decimal places and calculate PERCENTAGE occupancy = str(round(net_bookings * 100 / total_available_rooms, 2)) return { 'occupancy': occupancy }
def register(): session = get_session()['messages'] if request.method == 'POST': form = RegisterForm(request, authnz) if form.is_valid(): form.save() session['pass'].append('Account created!') return redirect('/') else: session['fail'].extend(form.errors) else: form = _(username=None, email=None) return { 'title': 'Register To Use CENTrak', 'form': _(username=form.username, email=form.email) }
def register(): session = get_session()['messages'] if request.method == 'POST': form = RegisterForm(request, authnz) if form.is_valid(): form.save() session['pass'].append('Account created!') return redirect('/') else: session['fail'].extend(form.errors) else: form = _(username=None, email=None) return { 'title':'Register To Use CENTrak', 'form': _(username=form.username, email=form.email) }
def test(name, ckpt, image_pack_name=None, output_filename=None): try: external_image = image_pack_name.index('.') != -1 except: external_image = None if image_pack_name is None: data = None elif not external_image: print("Loading image pack {}".format(image_pack_name)) data = load_data(image_pack_name.split(',')) with get_session() as sess: fcn = FCN(sess=sess, name=name) if ckpt != "-1": fcn.load(ckpt) else: fcn.load_absolute(name) if not external_image: errors, _, _, _, ret, conf = fcn.test( scales=[0.5], summary=True, summary_key=123, data=data, eval_speed=False, visualize=output_filename is None) if output_filename is not None: try: os.mkdir('outputs') except: pass with open('outputs/%s.pkl' % output_filename, 'wb') as f: pickle.dump(ret, f) with open('outputs/%s_err.pkl' % output_filename, 'wb') as f: pickle.dump(errors, f) with open('outputs/%s_conf.pkl' % output_filename, 'wb') as f: pickle.dump(conf, f) print ret print 'results dumped to outputs/%s_err.pkl' % output_filename else: img = cv2.imread(image_pack_name) # reverse gamma correction for sRGB img = (img / 255.0)**2.2 * 65536 images = [img] fcn.test_external(images=images, fns=[image_pack_name], show=False)
def test_sample_noise(): batch_size = 3 dim = 4 tf.reset_default_graph() with get_session() as sess: z = sample_noise(batch_size, dim) # Check z has the correct shape assert z.get_shape().as_list() == [batch_size, dim] # Make sure z is a Tensor and not a numpy array assert isinstance(z, tf.Tensor) # Check that we get different noise for different evaluations z1 = sess.run(z) z2 = sess.run(z) assert not numpy.array_equal(z1, z2) # Check that we get the correct range assert numpy.all(z1 >= -1.0) and numpy.all(z1 <= 1.0) print("Sample Noise test passed!")
def __init__(self): self.opts = opts_init() self.session = get_session() self.cate_d = { 'pingpang': '乒乓球', 'badminton': '羽毛球', 'swim': '游泳', 'dance': '舞蹈' } self.query_d = { 'badminton': list(range(16, 24)) + list(range(67, 75)), 'pingpang': list(range(38, 61)) + list(range(26, 29)), 'swim': [ 65, ], 'dance': [25, 77, 170, 172, 173], }
def train(): config = load_yaml_config("config.yml") display_step = config["model"]["display_step"] evaluate_step = config["model"]["evaluate_step"] save_step = config["model"]["save_step"] checkpoint_path = config["model"]["checkpoint_path"] pickle_path = config["data"]["pickle_path"] pb_path = config["model"]["pb_path"] model = TodAutoEncoder(config) print(model.input_x) print(model.loss) with open(pickle_path, "rb") as f: _ = pickle.load(f) _, sparse_test = pickle.load(f) card, sparse = zip(*sparse_test) test = dense_transform(list(sparse)) sess = get_session() sess.run(tf.global_variables_initializer()) batch_data = get_batch() for batch in batch_data: _, loss_train, step = model.step(sess, batch) if step % display_step == 0: print("step: %d => loss: %.4f" % (step, loss_train)) if step % evaluate_step == 0: _, loss_test, _ = model.step(sess, test) print("{0:-^30}".format("evaluation loss: %.4f" % loss_test)) print("") if step % save_step == 0: model.save(sess, checkpoint_path) model.save(sess, checkpoint_path) shutil.rmtree(pb_path, ignore_errors=True) builder = tf.saved_model.builder.SavedModelBuilder(pb_path) inputs = {'input_x': tf.saved_model.utils.build_tensor_info(model.input_x)} outputs = {'output': tf.saved_model.utils.build_tensor_info(model.loss)} signature = tf.saved_model.signature_def_utils.build_signature_def( inputs=inputs, outputs=outputs, method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME) builder.add_meta_graph_and_variables(sess, [tag_constants.SERVING], {'my_signature': signature}) builder.save()
def process_request(self, request): app_requests = [] if request.GET.get('request_ids', None): fb = get_session(request) request_ids = urllib.unquote(request.GET.get('request_ids')) request_ids = request_ids.split(',') logger.debug('Got app request ids: %s' % request_ids) for id in request_ids: r, created = AppRequest.objects.get_or_create(id=int(id)) if settings.DEBUG and created: try: graph = get_graph(request) r.get_from_facebook(graph, save=True) except GraphAPIError: pass app_requests.append(r.id) if len(app_requests) > 0: fb.app_requests = app_requests
def login(): if request.method == 'POST': session = get_session() session_msgs = session['messages'] username = request.POST.get('username', '').strip() password = request.POST.get('password', '').strip() if username and password: logged_in = authnz.login(username, password) if logged_in: # hack: get redirect url from session if available redirect_url = session.get('login_redirect_url', '/') session['login_redirect_url'] = None return redirect(redirect_url) session_msgs['fail'].append('Invalid username and/or password.') else: session_msgs['warn'].append('Username and password required.') return {'title': 'Log In'}
def import_station(feeder_code): feeder = db.Feeder.get_by_code(feeder_code) if not feeder: raise HTTPError(404, "Feeder not found: %s" % feeder_code) session = get_session()['messages'] if request.method == 'POST': form = forms.StationImportForm(request, '.csv') if form.is_valid(): result = imxport.feeder_stations(feeder, form._instance.impfile) if not result.errors: session['pass'].append(result.summary()) else: session['fail'].extend(result.errors) return redirect('/admin/feeders/%s/stations/' % feeder_code) else: session['fail'].extend(form.errors) return {'title': 'Import Feeder Stations'}
def register(): session = get_session()['messages'] if request.method == 'POST': form = RegisterForm(request, authnz) if form.is_valid(): form.save() session['pass'].append( "Account has been created. However you'd have to wait on the " "administrator to activate the account before you can use it." ) return redirect('/') else: session['fail'].extend(form.errors) else: form = _(username=None, email=None) return { 'title':'Register To Use CENTrak', 'form': _(username=form.username, email=form.email) }
def load_model(mode, output_dir, use_word): if use_word: word_str = "word" else: word_str = "char" vocab_file = os.path.join(output_dir, f"train_vocab.{word_str}") with open(vocab_file) as f: vocab_size = len(f.readlines()) print(f"vocab_file: {vocab_file}, vocab_size: {vocab_size}") sess = get_session() model = ChatModel(vocab_size, config) epoch = config["model"]["saved_epoch"] if mode != "train" or config["model"]["resume_train"]: sess.run(tf.global_variables_initializer()) model.load(sess, config["model"]["model_path"], f"epoch-{epoch}") print('Successfully load model!') else: sess.run(tf.global_variables_initializer()) print('Init model using tf.global_variables_initializer()') return sess, model, vocab_file
def main(): args = get_args() utils.configure_logging(verbose=args.verbose, error=args.error, debug=args.debug) session = utils.get_session(args.config) # Time parameters: average = datetime.timedelta(seconds=args.average) # Run query start, end = get_time_range(args.date) rows = get_data(session, start, end, average) # Clean up rows = (transform(row) for row in rows) # Sort chronologically rows = sort(rows, key='timestamp') # Save output file write_csv(args.output, rows=rows)
def like_album(request): data = {'ret_code': 0, 'ret_data': {}} try: if request.method == 'POST': req_data = json.loads(request.body) else: req_data = request.GET sess = utils.get_session(req_data['sess_key']) album = Album.objects.get(openid=sess['openid'], album_id=req_data['album_id']) album.like_cnt += 1 album.save() data['ret_code'] = 1 except Exception as err: data['msg'] = 'program or internet error.' logger.error(str(err)) res = json.dumps(data, ensure_ascii=False) return HttpResponse(res, content_type="application/json")
def test_incoming_mail_pipeline(soledad_client, tmpdir): # create a user and login session = yield get_session(tmpdir) # create a OpenPGP key and upload it key = gen_key(session.username) yield put_key(session.uuid, session.token, str(key.pubkey)) # get a soledad client for that user client = soledad_client(uuid=session.uuid, passphrase='123', token=session.token) # send the email sent_secret = send_email(session.username) # check the incoming blob and compare sent and received secrets fd = yield get_incoming_fd(client) received_secret = get_received_secret(key, fd) assert sent_secret == received_secret
def import_station(feeder_code): feeder = db.Feeder.get_by_code(feeder_code) if not feeder: raise HTTPError(404, "Feeder not found: %s" % feeder_code) session = get_session()['messages'] if request.method == 'POST': form = forms.StationImportForm(request, '.csv') if form.is_valid(): result = imxport.feeder_stations(feeder, form._instance.impfile) if not result.errors: session['pass'].append(result.summary()) else: session['fail'].extend(result.errors) return redirect('/admin/feeders/%s/stations/' % feeder_code) else: session['fail'].extend(form.errors) return { 'title': 'Import Feeder Stations' }
def test_setup_bad_phone(app, client): data = dict(email="*****@*****.**", password="******") response = client.post("/login", data=data, follow_redirects=True) message = b"Two-factor authentication adds an extra layer of security" assert message in response.data sms_sender = SmsSenderFactory.createSender("test") data = dict(setup="sms", phone="555-1212") response = client.post("/tf-setup", data=data, follow_redirects=True) assert b"Phone number not valid" in response.data assert sms_sender.get_count() == 0 client.post( "/tf-setup", data=dict(setup="sms", phone="650-555-1212"), follow_redirects=True ) assert sms_sender.get_count() == 1 code = sms_sender.messages[0].split()[-1] response = client.post("/tf-validate", data=dict(code=code), follow_redirects=True) assert b"Your token has been confirmed" in response.data assert not tf_in_session(get_session(response))
def dump_result(name, ckpt, image_pack_name=None): if image_pack_name is None: data = None else: data = load_data(image_pack_name.split(',')) outputs = [] gts = [] with get_session() as sess: fcn = FCN(sess=sess, name=name) fcn.load(ckpt) _, _, outputs, gts = fcn.test(scales=[0.5], summary=True, summary_key=123, data=data) result = { 'outputs': np.array(outputs), 'gts': np.array(gts), } pickle.dump( result, open("outputs/%s-%s-%s.pkl" % (name, ckpt, image_pack_name), "wb"))
def delete(zone, name, yes): """Delete the A record and PTR reverse record.""" ds = designate_client.Client(session=get_session()) # Get the record name record_name = "%s.%s" % (name, zone) record = try_assign(ds.recordsets.get, zone, record_name, exit=True) # Delete A record if record['type'] in 'A': _show(record) if (yes or click.confirm("Are you sure you want to delete this record?", abort=True)): click.echo("Deleting A record for %s" % record_name) old_record_ip = ipaddress.ip_address(record['records'][0]) old_ptr_record_name, old_ptr_zone = \ _get_ptr_name_zone(old_record_ip) _show(ds.recordsets.delete(zone, record_name)) ptr_record = try_assign(ds.recordsets.get, old_ptr_zone, old_ptr_record_name) if ptr_record is not None: _show(ptr_record) if (yes or click.confirm("Do you want to delete PTR record?", abort=True)): print("Deleting PTR record for %s" % old_ptr_record_name) _show(ds.recordsets.delete(old_ptr_zone, old_ptr_record_name)) # Delete CNAME record elif record['type'] in 'CNAME': _show(record) if (yes or click.confirm("Are you sure you want to delete this record?", abort=True)): click.echo("Deleting CNAME record for %s" % record_name) _show(ds.recordsets.delete(zone, record_name))
def scrape_youtube(url): transcript_log = [] with get_session() as ses: res = ses.get(url) title = res.text.split('og:title" content="')[1].split('">')[0].strip() innertube_api_key = res.text.split('"INNERTUBE_API_KEY":"')[ 1].split('"')[0] client_screen_nonce = res.text.split('"EVENT_ID":"')[1].split('"')[0] click_tracking_params = res.text.split('engagement-panel-transcript')[ 1].split('"clickTrackingParams":"')[1].split('"')[0] params = res.text.split('serializedShareEntity":"')[1].split('"')[0] ytcfg = json.loads(res.text.split( '"INNERTUBE_CONTEXT":')[1].split('});')[0]) payload = { "context": { **ytcfg, "user": {}, "clientScreenNonce": client_screen_nonce, "clickTracking": { "clickTrackingParams": click_tracking_params } }, "params": params } res = ses.post( 'https://www.youtube.com/youtubei/v1/get_transcript?key=' + innertube_api_key, json=payload) transcript = res.json() for action in transcript['actions']: for group in action['updateEngagementPanelAction']['content']['transcriptRenderer']['body']['transcriptBodyRenderer']['cueGroups']: for cue in group['transcriptCueGroupRenderer']['cues']: time = group['transcriptCueGroupRenderer']['formattedStartOffset']['simpleText'] text = cue['transcriptCueRenderer']['cue']['simpleText'] transcript_log.append((time, text)) return (title, transcript_log)
def test_incoming_mail_pipeline(soledad_client, tmpdir): # create a user and login session = yield get_session(tmpdir) # create a OpenPGP key and upload it key = gen_key(session.username) yield put_key(session.uuid, session.token, str(key.pubkey)) # get a soledad client for that user client = soledad_client( uuid=session.uuid, passphrase='123', token=session.token) # send the email sent_secret = send_email(session.username) # check the incoming blob and compare sent and received secrets fd = yield get_incoming_fd(client) received_secret = get_received_secret(key, fd) assert sent_secret == received_secret
def test_input_gamma(name, ckpt, input_gamma, image_pack_name=None, output_filename=None): config_set_input_gamma(float(input_gamma)) if image_pack_name is None: data = None else: data = load_data(image_pack_name.split(',')) with get_session() as sess: fcn = FCN(sess=sess, name=name) fcn.load(ckpt) _, _, _, _, ret = fcn.test(scales=[0.5], summary=True, summary_key=123, data=data) if output_filename is not None: with open('outputs/%s.pkl' % output_filename, 'wb') as f: pickle.dump(ret, f) print ret print 'results dumped'
def test_evil_validate(app, client): """ Test logged in, and randomly try to validate a token """ signalled_identity = [] @identity_changed.connect_via(app) def on_identity_changed(app, identity): signalled_identity.append(identity.id) response = authenticate(client, "*****@*****.**") session = get_session(response) assert "tf_state" not in session # Jill is 4th user to be added in utils.py assert signalled_identity[0] == 4 del signalled_identity[:] # try to validate response = client.post("/tf-validate", data=dict(code="?"), follow_redirects=True) # This should log us out since it thinks we are evil assert not signalled_identity[0] del signalled_identity[:]
def project_sync(project_id): p = db.Project.get_by_id(project_id) if not p: raise HTTPError(404, 'Project not found: %s' % project_id) messages = get_session()['messages'] form_type, xforms_to_sync = None, None for key in ['project_xforms', 'project_uforms']: if key in request.forms: form_type = key xforms_to_sync = request.forms.get(key).split(',') sync_target = (db.Capture if form_type == 'project_xforms' else db.Update) # get form count for xform_id in xforms_to_sync: count = sync_target.count_by_form(xform_id) xform = db.XForm.get_by_id(xform_id) # pull new captures try: transformed, pull_count = [], 0 for captures in api.get_captures(xform.id, start=count): if captures: pull_count += len(captures) for c in captures: transformed.append(transform.to_flatten_dict(c)) sync_target.save_many(transformed) transformed = [] messages['pass'].append('%s captures pulled.' % pull_count) except ConnectionError: messages['fail'].append('Sync failed. Internet connection required.') except Exception as ex: messages['fail'].append('Sync failed. %s' % str(ex)) return redirect('/projects/%s/' % project_id)
def __init__(self, search_url=None, proxies=None): self.session = get_session(proxies=proxies) self.search_url = search_url or get_sub_domain() self.token = None
from collections import OrderedDict except: from ordereddict import OrderedDict import utils import trello import settings # Enabling a tiny bit of cache so Trello doesn't block us cache = pyfscache.FSCache('cache', minutes=2) long_cache = pyfscache.FSCache('cache', days=1) # HTML parser for unescaping of html stuff parser = HTMLParser.HTMLParser() session = utils.get_session() client = trello.TrelloClient( api_key=settings.API_KEY, api_secret=settings.API_SECRET, token=settings.TOKEN, token_secret=settings.TOKEN_SECRET, ) class MLStripper(HTMLParser.HTMLParser): def __init__(self): self.reset() self.fed = []