def sync_subscription(request): if OauthToken.objects.filter(user_id=request.user.id).exists(): deferred.defer(subscriptions, request.user.id) return HttpResponse("Sync started") else: request.session[settings.OAUTH_RETURN_SESSION_KEY] = 'sync' return HttpResponseRedirect(reverse('authorise'))
def BatchUpdateItems(update_fn_name, cursor=None, num_updated=0, p1=None, p2=None): query = Item.query() to_put = [] q, cursor, more = query.fetch_page(BATCH_SIZE, start_cursor=cursor) for it in q: if update_fn_name == "rename_category": batch_rename_category_items(it, p1, p2) else: continue to_put.append(it) if to_put: ndb.put_multi(to_put) num_updated += len(to_put) logging.debug( 'Put %d items to Datastore for a total of %d', len(to_put), num_updated) deferred.defer( BatchUpdateItems, update_fn_name=update_fn_name, cursor=cursor, num_updated=num_updated, p1=p1, p2=p2) else: logging.debug( 'BatchUpdateItems complete with %d updates!', num_updated)
def get(self): from model.user import MailingList email = self.get_argument('email', '') if 'developer' in self.get_argument('signup'): signup = 'developer' else: signup = 'enduser' if not email: logging.error('invite request w/o email') return # TODO: check for existing items list_item = MailingList(email=email, signup_type=signup) if self.get_argument('dev_type', ''): list_item.dev_type = self.get_argument('dev_type') list_item.put() if list_item: # new item if check is being done from backend.admin import send_admin_email msg = 'Invite Request - %s <br/>Accept: %s/admin/mail?type=%s&email=%s' % ( email, self.context['base_url'], 'invite', email) deferred.defer(send_admin_email, subject=msg, message=msg) deferred.defer(invite_email, email, _countdown=672) self.write(str(list_item.key())) return
def run(self, start=None, batch_size=100, count=0): logging.info("Mapper run start at: %s" % count) if start: logging.info("start cursor = %s" % start) q = set_cursor(self.QUERY, start) else: q = self.QUERY entities = q[0:batch_size] try: end_cursor = get_cursor(entities) except: logging.error(traceback.format_exc()) return for entity in entities: logging.info("Mapper entity: %s" % entity) count += 1 self.map(entity) if entities: deferred.defer(self.run, start=end_cursor, count=count, _queue="maintenance")
def _handle_request(self, kwargs, write_result=False): # first try to create the app on rogerthat backend kwargs['route'] = 'restapi/apps' data = json.loads(self.request.body) app_type = data['app_type'] data['type'] = app_type data['name'] = data['title'] self.request.body = json.dumps(data) status_code, content, headers = ProxyHandlerRogerthat._handle_request(self, kwargs, write_result) if status_code != httplib.OK: self.response.headers = headers self.response.status = status_code self.response.write(content) else: # Create app on app configurator only if it was created successfully on backend kwargs['route'] = '/api/apps' data = json.loads(self.request.body) backend_server = get_backend_server(kwargs['backend_id']) data['app_constants'], data['cloud_constants'] = get_backend_constants(backend_server) self.request.body = json.dumps(data) ProxyHandlerConfigurator._handle_request(self, kwargs) if self.response.status_int in (httplib.OK, httplib.CREATED): config = get_config(IYO_NAMESPACE) organization = '%(root_org)s.backends.%(backend_id)s.apps.%(app_id)s' % { 'root_org': config.root_organization.name, 'backend_id': backend_server.id, 'app_id': data['app_id'] } username = get_current_session().user_id deferred.defer(create_organization_with_roles, organization, PermissionType.APP_PERM, username) if not DEBUG and app_type == 1: # only for cityapps deferred.defer(create_app_group, data['app_id'])
def calc_passenger_rides(offset=0, data=None): if not data: data = {"active":0, "non-active": 0} batch_size = 500 logging.info("querying passengers %s->%s" % (offset, offset + batch_size)) passengers = Passenger.objects.all()[offset: offset + batch_size] for p in passengers: try: bi = p.billing_info except BillingInfo.DoesNotExist: continue orders = list(Order.objects.filter(passenger=p, type__in=[OrderType.SHARED, OrderType.PRIVATE])) if not orders: continue active = False for o in orders: if o.ride: active = True break if active: data["active"] += 1 else: data["non-active"] += 1 if passengers: deferred.defer(calc_passenger_rides, offset=offset + batch_size + 1, data=data) else: logging.info("all done, sending report\n%s" % data) send_mail_as_noreply("*****@*****.**", "passenger rides", msg="%s" % data)
def calc_passenger_order_freq(offset=0, data=None): if not data: data = {"7": 0, "14":0, "30": 0, "longer": 0, "avg": 0, "count": 0} batch_size = 500 logging.info("querying passengers %s->%s" % (offset, offset + batch_size)) passengers = Passenger.objects.all()[offset: offset + batch_size] for p in passengers: orders = list(Order.objects.filter(passenger=p, type__in=[OrderType.SHARED, OrderType.PRIVATE])) orders = sorted(orders, key=lambda o: o.create_date) orders = filter(lambda o: o.ride, orders) for i, o in enumerate(orders): if len(orders) > i + 1: td = orders[i + 1].create_date - o.create_date days = td.days if not days: days = td.seconds / 60.0 / 60.0 / 24.0 data["avg"] = (data["avg"] * data["count"] + days) / float(data["count"] + 1) data["count"] += 1 if days <= 7: data["7"] += 1 elif days <= 14: data["14"] += 1 elif days <= 30: data["30"] += 1 else: data["longer"] += 1 if passengers: deferred.defer(calc_passenger_order_freq, offset=offset + batch_size + 1, data=data) else: logging.info("all done, sending report\n%s" % data) send_mail_as_noreply("*****@*****.**", "Order freq", msg="%s" % data)
def calc_passenger_ride_freq(offset=0, data=None): if not data: data = [["id", "orders", "days"]] else: data = pickle.loads(gzip.zlib.decompress(data)) batch_size = 500 logging.info("querying passengers %s->%s" % (offset, offset + batch_size)) passengers = Passenger.objects.all()[offset: offset + batch_size] for p in passengers: try: bi = p.billing_info except BillingInfo.DoesNotExist: continue days = (default_tz_now() - bi.create_date).days if days: orders = list(Order.objects.filter(passenger=p, type__in=[OrderType.SHARED, OrderType.PRIVATE])) orders = filter(lambda o: o.ride, orders) data.append([p.id, len(orders), days]) if passengers: data = gzip.zlib.compress(pickle.dumps(data), 9) deferred.defer(calc_passenger_ride_freq, offset=offset + batch_size + 1, data=data) else: logging.info("all done, sending report\n%s" % data) csv_string = "" for line in data: csv_string += ",".join([str(i) for i in line]) + "\n" send_mail_as_noreply("*****@*****.**", "Passenger ride freq", attachments=[("passenger_freq.csv", csv_string)])
def calc_station_rating(offset=0, data=None): if not data: data = {} batch_size = 500 logging.info("querying orders %s->%s" % (offset, offset + batch_size)) orders = Order.objects.filter(type=OrderType.PICKMEAPP)[offset: offset + batch_size] for o in orders: if not o.station: continue if not o.passenger_rating: continue station_name = o.station.name if station_name in data: count, avg = data[station_name] avg = (count*avg + o.passenger_rating) / float(count+1) count += 1 data[station_name] = (count, avg) else: data[station_name] = (1, o.passenger_rating) if orders: deferred.defer(calc_station_rating, offset=offset + batch_size + 1, data=data) else: logging.info("all done, sending report\n%s" % data) csv = [["Station Name", "Ratings", "Avg"]] for s in data.keys(): csv.append([s, data[s][0], data[s][1]]) csv_string = "" logging.info("csv = %s" % csv) for line in csv: csv_string += ",".join(line) + "\n" send_mail_as_noreply("*****@*****.**", "Shared rides data for NY", attachments=[("stations_ratings.csv", csv_string)])
def calc_kpi3(start_index=0, two_address=0, single_address=0, order_count=0): logging.info("calc_kpi3: starting at: %d" % start_index) orders = Order.objects.filter(type=OrderType.PICKMEAPP)[start_index:] first = False order = None try: for o in orders: order = o if not first: logging.info("First order = %s" % o) first = True order_count += 1 if o.from_raw and o.to_raw: two_address += 1 else: single_address +=1 except : logging.info("DB timeout raised after %d\nlast order = %s" % (order_count, order)) deferred.defer(calc_kpi3, start_index=order_count, two_address=two_address, single_address=single_address, order_count=order_count) if orders: deferred.defer(calc_kpi3, start_index=order_count, two_address=two_address, single_address=single_address, order_count=order_count) else: # send_mail_as_noreply("*****@*****.**", "KPIs", attachments=[("kpis.csv", csv_file)]) send_mail_as_noreply("*****@*****.**", "KPIs - Order Addresses", msg="count=%d, single=%d, double=%d" % (order_count, single_address, two_address))
def update_form(service_user, form): # type: (users.User, DynamicFormTO) -> Form _validate_property(form, DynamicFormTO.id) model = get_form(form.id, service_user) updated_form = _update_form(model, form) deferred.defer(_update_smi_form_version, service_user, form.id) return updated_form
def update_app_asset(app_id, asset): """ Args: app_id (unicode) asset (AppAssetTO) """ deferred.defer(_run_update_app_asset, app_id, asset)
def _cleanup_old_shows_info(self, latest_datetime): logging.info('Cleaning show info scrapped before than: {}'.format(latest_datetime)) old_scrap_info_entities = ScrapModel.query(ScrapModel.scrap_date_time < latest_datetime).fetch() for old_entity in old_scrap_info_entities: from google.appengine.ext.deferred import deferred deferred.defer(self._delete_scrap_info, old_entity.key)
def send_error(self, *args, **kwargs): self.set_status(500) # always 500? from utils.gae import error_msg error, err_msg = error_msg() logging.error('%s - %s' % (error, err_msg)) if gae_utils.Debug(): logging.info('debug server, not sending admin error report') return super(BaseHandler, self).send_error(*args, **kwargs) else: for bot_agent in ['google','appengine','alexa','yahoo','bot','bing']: if bot_agent in gae_utils.GetUserAgent().lower(): return# self.error_output(error, err_msg) if '405: Method Not Allowed' in err_msg: return self.error_output(error, err_msg) # TODO: include info about user and session # TODO: print out 500 page. And also do the same for 400 page. user = self.get_current_user() if user: user = user.key().name() deferred.defer(send_admin_email, subject='Error - %s' % error, message='Error Traceback - %s' % err_msg, user=user, user_agent=gae_utils.GetUserAgent(), ip=gae_utils.IPAddress(), url=gae_utils.GetUrl()) self._finished = False if getattr(self, 'server_error',''): self.server_error() # TODO: Ajax/API views should have their own error output else: self.error_output(error, err_msg) self._finished = True return
def update_ratings(): if config.HIT_MODULE and config.HIT_MODULE.status == STATUS_AVAILABLE: module = config.HIT_MODULE last = datetime.utcnow() - timedelta(days=module.consider_days) orders = Order.query(Order.date_created > last).fetch() if orders: deferred.defer(_update_items_rating, orders)
def get(self): for namespace in metadata.get_namespaces(): namespace_manager.set_namespace(namespace) if not config: continue deferred.defer(defer_apply_module)
def create_new_user(self, user_keyname, password, set_user=True, **kwargs): logging.info('creating user with keyname %s' % user_keyname) user_entity = User(key_name=user_keyname, username=user_keyname) if password: user_entity.password = password if '@passtiche.appspot.com' not in user_keyname and not user_keyname.startswith('auto_gen'): deferred.defer(send_admin_email, subject='New %s User: %s' % ( self._settings['title'], user_keyname), message='User %s just signed up for an account' % user_keyname, user=user_keyname, user_agent=gae_utils.GetUserAgent(), url=gae_utils.GetUrl()) if '@' in user_keyname: user_entity.email = user_keyname #send_welcome_email(user_entity.email, 'Welcome to %s!' % self._settings['title']) deferred.defer(send_welcome_email, user_entity, 'Welcome to %s!' % self._settings['title'], _countdown=10) # 1300 22 minutes # optional args for k in ['first_name', 'last_name','phone','organization']: if kwargs.get(k): v = kwargs.get(k) setattr(user_entity, k, v) from utils import string as str_utils code = str_utils.genkey(length=7) user_entity.short_code = code user_entity.put() if set_user: self.set_current_user(user_entity) return user_entity
def put_all_static_content(cursor=None): qry = SolutionStaticContent.all() qry.with_cursor(cursor) models = qry.fetch(200) new_models = list() if not models: return for m in models: tmp = m.key().name().split("x", 2) coords = [int(tmp[1]), int(tmp[2]), int(tmp[0])] new_model = SolutionStaticContent(parent=parent_key( m.service_user, SOLUTION_COMMON), coords=coords, old_coords=coords, sc_type=m.sc_type, icon_label=m.icon_label, icon_name=m.icon_name, text_color=m.text_color, background_color=m.background_color, html_content=m.html_content, branding_hash=m.branding_hash, visible=True, provisioned=True, deleted=False) new_models.append(new_model) def trans(): db.put(new_models) db.delete(models) run_in_transaction(trans, True) deferred.defer(put_all_static_content, qry.cursor(), _queue=MIGRATION_QUEUE)
def check_if_node_comes_online(order_key): order = order_key.get() # type: NodeOrder order_id = order.id if not order.odoo_sale_order_id: raise BusinessException( 'Cannot check status of node order without odoo_sale_order_id') node_id = get_node_id_from_odoo(order.odoo_sale_order_id) if not node_id: raise BusinessException( 'Could not find node id for sale order %s on odoo' % order_id) status = get_node_status(node_id) if status == u'running': logging.info('Marking node from node order %s as arrived', order_id) human_user, app_id = get_app_user_tuple(order.app_user) order.populate(arrival_time=now(), status=NodeOrderStatus.ARRIVED) order.put() deferred.defer(set_node_id_on_profile, order.app_user, node_id, _transactional=True) deferred.defer(update_hoster_progress, human_user.email(), app_id, HosterSteps.NODE_POWERED, _transactional=True) else: logging.info('Node from order %s is not online yet', order_id)
def parse_bookxcess_html(document, headers, filename=None): """Parses Bookxcess book listings page """ soup = BeautifulSoup(document.contents) links = soup.findAll(['a', 'area'], href=True) parsers = { '.htm': parse_bookxcess_html, '.html': parse_bookxcess_html, '.pdf': parse_bookxcess_pdf } urls = {} for link in links: url = link['href'].strip() if not url.startswith('http://'): url = BOOKXCESS + url urlp = urlsplit(url) path = urlp.path.lower() args = { "filename": basename(path) } ext = splitext(path)[1] if ext in parsers: parser = parsers[ext] urls[url] = (parser, args) for url, (parser, args) in urls.items(): task_name = 'download-%s' % Document.hash_url(url) logging.info('parse_bookxcess_html: downloading %s in task %s' % (url, task_name)) try: deferred.defer(download_page, url, callback=parser, args=args, _name=task_name, _queue='downloader') except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError): pass
def BatchUpdateVotes(update_fn_name, cursor=None, num_updated=0, p1=None, p2=None): logging_ext.logging_ext.info("BatchUpdateVotes %s %d"%(update_fn_name,num_updated)) query = Vote.query() to_put = [] q, cursor, more = query.fetch_page(BATCH_SIZE, start_cursor=cursor) for vote in q: if update_fn_name == "rename_category": batch_rename_category_votes(vote, p1, p2) elif update_fn_name == "reset-votes-json": batch_reset_vote_json(vote) else: continue to_put.append(vote) if to_put: ndb.put_multi(to_put) num_updated += len(to_put) logging.debug( 'Put %d votes to Datastore for a total of %d', len(to_put), num_updated) deferred.defer( BatchUpdateVotes, update_fn_name=update_fn_name, cursor=cursor, num_updated=num_updated, p1=p1, p2=p2) else: logging_ext.logging_ext.info( 'BatchUpdateVotes complete with %d updates!'% num_updated)
def get(self): code = self.request.GET.get('code', None) state = self.request.GET.get('state', None) try: if not (code or state): logging.debug('Code or state are missing.\nCode: %s\nState:%s', code, state) raise HttpBadRequestException() login_state = OauthState.create_key(state).get() if not login_state: logging.debug('Login state not found') raise HttpBadRequestException() config = get_config(NAMESPACE) assert isinstance(config, ItsYouOnlineConfiguration) if config.login_with_organization: username, scopes = get_user_scopes_from_access_token(code, login_state) jwt = None else: jwt, username, scopes = get_jwt(code, login_state) except HttpException as e: render_error_page(self.response, e.http_code, e.error) return _, session = login_user(self.response, username, scopes, jwt) self.redirect('/') if config.fetch_information: deferred.defer(set_user_information, Profile.create_key(username), session.key, _queue='iyo-requests')
def pass_file(self, url): if self.user: user_keyname = self.user.key().name() else: user_keyname = '_no_user' safe_url = url key_name = "%s_%s" % (user_keyname, safe_url) pass_template = PassTemplate.get_by_key_name(key_name) if pass_template: new = False else: new = True pass_template = PassTemplate(key_name=key_name, url=url) if self.user: pass_template.owner = self.user from utils import string as str_utils code = str_utils.genkey(length=4) pass_template.short_code = code pass_template.put() # TODO: download and parse file to update it # TODO: check last modified, only update if not modified in over x hours if new or pass_template.modified < (datetime.datetime.now() - datetime.timedelta(hours=1)): logging.info('deferring update from passfile') deferred.defer(update_from_passfile, key_name) else: logging.info('this pass was modified too recently') return pass_template
def check_for_news(self): url = u'%s/systems/rss.aspx?pg=1164' % self.BASE_URL response = urlfetch.fetch(url, deadline=60) if response.status_code != 200: logging.error('Could not check for news in be-oudenaarde.\n%s', response.content) return doc = minidom.parseString(response.content) for item in doc.getElementsByTagName('item'): title = u'%s' % item.getElementsByTagName( 'title')[0].firstChild.nodeValue content_html = u'%s' % item.getElementsByTagName( 'content')[0].firstChild.nodeValue description_html = item.getElementsByTagName( 'description')[0].firstChild.nodeValue permalink = u'%s' % item.getElementsByTagName( 'link')[0].firstChild.nodeValue content, _, _ = parse_html_content(content_html) description, _, _ = parse_html_content(description_html) if len(description) > len(content): message = description else: message = content message = message.replace('[%MEDIA%]', '') deferred.defer(self.create_news, self.broadcast_type, title, message, permalink)
def send_venue_email(venue, order, host_url, jinja2, move=False, format_type=DEFAULT_TYPE): if venue.emails: if move: text = u'Заказ №%s перенесен на эту точку приготовления' % order.key.id() else: text = u'Новый заказ №%s поступил в систему из мобильного приложения' % order.key.id() item_values = order_items_values(order) item_values['venue'] = venue item_values['delivery_type_str'] = DELIVERY_MAP[order.delivery_type] order.payment_type_str = PAYMENT_TYPE_MAP[order.payment_type_id] if config.EMAIL_REQUESTS: order.email_key_done = security.generate_random_string(entropy=256) order.email_key_cancel = security.generate_random_string(entropy=256) order.email_key_postpone = security.generate_random_string(entropy=256) if order.delivery_type == DELIVERY: order.email_key_confirm = security.generate_random_string(entropy=256) order.put() item_values['done_url'] = '%s/email/order/close?key=%s' % (host_url, order.email_key_done) item_values['cancel_url'] = '%s/email/order/cancel?key=%s' % (host_url, order.email_key_cancel) item_values['postpone_url'] = '%s/email/order/postpone?key=%s' % (host_url, order.email_key_postpone) item_values['minutes'] = POSTPONE_MINUTES if order.delivery_type == DELIVERY: item_values['confirm_url'] = '%s/email/order/confirm?key=%s' % (host_url, order.email_key_confirm) for email in venue.emails: if email: template = '' if format_type == DEFAULT_TYPE: template = '/company/delivery/items.html' elif format_type == MINIMIZED: template = '/company/delivery/items_tashir.html' deferred.defer(send_email, EMAIL_FROM, email, text, jinja2.render_template(template, **item_values))
def trainee_profile(req): """ ``GET`` @ |ta| + ``/user/current`` ``PUT`` @ |ta| + ``/user/current`` - Profile of the current user. - Updates the profile of the user. - |ul| """ out = ['id', 'name', 'nickname', 'gender', 'picture', 'avatar', 'birthday', 'country', 'city', 'language', 'email', 'phone', 'active_club', 'sensors'] if req.method == "GET": return sanitize_json(req.user, out, except_on_missing=False) elif req.method == "PUT": j_req = json_from_request(req, optional_props=['name', 'nickname', 'gender', 'picture', 'avatar', 'birthday', 'country', 'city', 'language', 'email', 'phone', 'activeClub', 'sensors']) if 'active_club' in j_req: membership = APIDB.get_user_club_role(req.user, Key(urlsafe=j_req['active_club'])) if membership != "MEMBER": raise BadRequest("It seems that you want to activate a club that you are not member of") update, user = APIDB.update_user(req.user, **j_req) s_token = GCAuth.auth_user_token(user) deferred.defer(sync_user, user, s_token) return sanitize_json(user, out, except_on_missing=False)
def get(self): logging.info('received passbook log request') from backend.admin import send_admin_email from google.appengine.ext.deferred import deferred deferred.defer(send_admin_email, subject='Passbook Web Service Log' , message='Arguments: %s' % self.request.arguments)
def calc_order_timing(offset=0, hours=None): if not hours: hours = {} batch_size = 500 logging.info("querying shared rides %s->%s" % (offset, offset + batch_size)) orders = Order.objects.filter(type=OrderType.PICKMEAPP)[offset:offset + batch_size] for o in orders: hour = str(o.create_date.hour) if hour in hours: hours[hour] += 1 else: hours[hour] = 1 if orders: deferred.defer(calc_order_timing, offset=offset + batch_size + 1, hours=hours) else: logging.info("all done, sending report\n%s" % hours) csv = ",".join(hours.keys()) + "\n" + ",".join( [str(v) for v in hours.values()]) send_mail_as_noreply("*****@*****.**", "Shared rides data for NY", attachments=[("order_timing.csv", csv)])
def get(self): logging.info('received passbook log request') from backend.admin import send_admin_email from google.appengine.ext.deferred import deferred deferred.defer(send_admin_email, subject='Passbook Web Service Log', message='Arguments: %s' % self.request.arguments)
def kyc_part_2(message_flow_run_id, member, steps, end_id, end_message_flow_id, parent_message_key, tag, result_key, flush_id, flush_message_flow_id, service_identity, user_details, flow_params): deferred.defer(_kyc_part_2, message_flow_run_id, member, steps, end_id, end_message_flow_id, parent_message_key, tag, result_key, flush_id, flush_message_flow_id, service_identity, user_details, flow_params)
def update(cls, timestamp=None, cursor=None, stats=None): """ Make deferred call to update Developer stats """ if timestamp is None: timestamp = datetime.datetime.now() deferred.defer(cls.update_real, timestamp, cursor, stats)
def delete_form(service_user, form_id): # type: (users.User, int) -> None item = ServiceMenuDef.list_by_form(service_user, form_id).get() if item: raise FormInUseException(form_id, item.label) form = get_form(form_id, service_user) form.key.delete() deferred.defer(delete_form_submissions, service_user, form_id, False)
def calc_orders_data_csv(recipient, batch_size, offset=0, csv_bytestring=u"", calc_cost=False): link_domain = "www.waybetter.com" logging.info("querying computations %s->%s" % (offset, offset + batch_size)) start_dt = set_default_tz_time(datetime(2012, 1, 1)) end_dt = set_default_tz_time(datetime.now()) station, station_cost_rules = None, [] computations = [] #TODO_WB: no computations any more... fix if needed for computation in computations: if computation.debug: continue rides = computation.rides.all() total_interval_orders = sum([ride.orders.count() for ride in rides]) for ride in rides: orders = ride.orders.all() count_orders = len(orders) for order in orders: depart_day = order.depart_time.date().isoformat() if order.depart_time else "" depart_time = order.depart_time.time().strftime("%H:%M") if order.depart_time else "" arrive_day = order.arrive_time.date().isoformat() if order.arrive_time else "" arrive_time = order.arrive_time.time().strftime("%H:%M") if order.arrive_time else "" hotspot_type = computation.get_hotspot_type_display() ordering_td = (order.depart_time or order.arrive_time) - order.create_date ordering_td_format = str(ordering_td).split(".")[0] # trim microseconds passenger_name = order.passenger.full_name shared = "yes" if count_orders > 1 else "" price = order.get_billing_amount() cost = 0 if calc_cost: if ride.station and ride.station != station: # store the rules in memory to reduce queries station = ride.station station_cost_rules = list(ride.station.fixed_prices.all()) logging.info("got new prices from station %s (was %s)" % (ride.station, station)) for rule in station_cost_rules: if rule.is_active(order.from_lat, order.from_lon, order.to_lat, order.to_lon, ride.depart_time): cost = rule.price link = "http://%s/%s" % (link_domain , reverse(ride_page, args=[ride.id])) order_data = [depart_day, depart_time, arrive_day, arrive_time, ordering_td_format, passenger_name, order.from_raw, order.from_lat, order.from_lon, order.to_raw, order.to_lat, order.to_lon, hotspot_type, shared, order.computation_id, total_interval_orders, price, cost, link] csv_bytestring += u";".join([unicode(i).replace(";", "").replace('"', '') for i in order_data]) csv_bytestring += u"\n" if computations: deferred.defer(calc_orders_data_csv, recipient, batch_size, offset=offset + batch_size + 1, csv_bytestring=csv_bytestring, calc_cost=calc_cost) else: logging.info("all done, sending data...") timestamp = date.today() send_mail_as_noreply(recipient, "Orders data %s" % timestamp, attachments=[("orders_data_%s.csv" % timestamp, csv_bytestring)])
def delete(self): logging.info('received DELETE passbook web service request') authenticationToken = "e7f3f3f56a2a419dad2f639c5af4858b" from backend.admin import send_admin_email from google.appengine.ext.deferred import deferred auth_token = None deferred.defer(send_admin_email, subject='Passbook Web Service Unregister - %s' % auth_token, message='Arguments: %s' % self.request.arguments)
def send_mail_via_mime(from_, to, mime, transactional=None): try: azzert(to) except: logging.exception('There were no recipients. Not sending out the email.', _suppress=False) return if transactional is None: transactional = db.is_in_transaction() deferred.defer(_send_mail_via_mime, from_, to, mime, _transactional=transactional, _queue=FAST_QUEUE)
def _increment_amount_dates_scraped(self, scrap_info_entity_key): scrap_info_entity = scrap_info_entity_key.get() scrap_info_entity.amount_dates_scraped += 1 scrap_info_entity.put() if scrap_info_entity.amount_dates_scraped == scrap_info_entity.amount_dates_to_scrap: logging.info('Scraping Complete') from google.appengine.ext.deferred import deferred deferred.defer(self._cleanup_old_shows_info, scrap_info_entity.scrap_date_time)
def defer_analyze_image(): image_key = i.put() ia = ImageAnalysis( parent=image_key, id=image_key.id() ) ia.put() deferred.defer(analyze_image, image_key.id())
def scrap_and_store_shows_for_dates(self, *args): scrap_entity_id = ScrapModel.generate_id_for_new_entity(self.__scraper.TIME_ZONE) scrap_entity = ScrapModel.get_by_id(scrap_entity_id) from google.appengine.ext.deferred import deferred if scrap_entity is not None: # Scraping data for today already exists. Delete it and get new deferred.defer(self._replace_today_scrap, scrap_entity.key, args) else: self._scrap_and_store(scrap_entity_id, args)
def calc_users_data_csv(recipient ,offset=0, csv_bytestring=u""): batch_size = 500 datetime_format = "%d/%m/%y" link_domain = "www.waybetter.com" logging.info("querying users %s->%s" % (offset, offset + batch_size)) users = User.objects.order_by("-last_login")[offset: offset + batch_size] for user in users: link = "" last_login = user.last_login.strftime(datetime_format) date_joined = user.date_joined.strftime(datetime_format) first_name = user.first_name last_name = user.last_name email = user.email phone = "" billing_info = "" first_order_date = "" last_order_date = "" num_orders_mobile = "" num_orders_website = "" num_rides = "" total_payment = "" try: passenger = user.passenger link = "http://%s/%s" % (link_domain , reverse(view_passenger_orders, args=[passenger.id])) phone = passenger.phone if hasattr(passenger, "billing_info"): billing_info = "yes" orders = sorted(passenger.orders.filter(type=OrderType.SHARED, debug=False), key=lambda order: order.create_date) num_orders = len(orders) if num_orders: first_order_date = orders[0].create_date.strftime(datetime_format) last_order_date = orders[-1].create_date.strftime(datetime_format) dispatched_orders = filter(lambda o: o.ride, orders) total_payment = sum([order.get_billing_amount() for order in dispatched_orders]) num_rides = len(dispatched_orders) num_orders_mobile = len(filter(lambda o: o.mobile, orders)) num_orders_website = num_orders - num_orders_mobile except Passenger.DoesNotExist: pass except Passenger.MultipleObjectsReturned: pass user_data = [last_login, first_order_date, last_order_date, date_joined, first_name, last_name, email, phone, num_orders_mobile, num_orders_website, num_rides, billing_info, total_payment, link] csv_bytestring += u",".join([unicode(i).replace(",", "") for i in user_data]) csv_bytestring += u"\n" if users: deferred.defer(calc_users_data_csv, recipient, offset=offset + batch_size + 1, csv_bytestring=csv_bytestring) else: logging.info("all done, sending data...") timestamp = date.today() logging.info(csv_bytestring) send_mail_as_noreply(recipient, "Users data %s" % timestamp, attachments=[("users_data_%s.csv" % timestamp, csv_bytestring)])
def get(self): params = self.params(required=['session_id', 'sig', 'action']) session = AuthSession.get_by_key_name(params.get('session_id')) if not session: raise errors.NoEntityError('Session %s does not exist.' % params.get('session_id')) params['action_id'] = '%s-action-%s' % ( params.get('session_id'), str(time_utils.get_now().timestamp)) # TODO - blockchain interaction deferred.defer(_create_action, params=params, _countdown=10) self.response.status_int = 200 self.write({'status': 200, 'message': 'Action with action_id %s has been created' % params.get('action_id')})
def trans(): sln_news_scraper_settings = SolutionNewsScraperSettings.get(self.scraper_settings_key) if not sln_news_scraper_settings: sln_news_scraper_settings = SolutionNewsScraperSettings(key=self.scraper_settings_key) sln_news_scraper_settings.urls = [] if permalink not in sln_news_scraper_settings.urls: sln_news_scraper_settings.urls.append(permalink) sln_news_scraper_settings.put() deferred.defer(create_news_item, self.sln_settings, broadcast_type, message, title, permalink, _transactional=True)
def get(self): self.set_header('Access-Control-Allow-Origin', '*') action = self.get_argument('action','download').lower() from backend.admin import send_admin_email from google.appengine.ext.deferred import deferred deferred.defer(send_admin_email, subject='Sent Pass - %s %s' % (self.get_argument('to_email',''), self.get_argument('to_phone','')), message='Arguments: %s' % self.request.arguments) getattr(self, action)() self.write_json({'status': 'OK'})
def trans(): new_order = Order.get(new_order_key) with closing(StringIO()) as pdf: generate_order_or_invoice_pdf(pdf, customer, new_order) new_order.pdf = pdf.getvalue() new_order.put() deferred.defer(send_order_email, new_order_key, service_user, _transactional=True)
def post(self): p1 = self.request.get('p1',None) p2 = self.request.get('p2',None) migration = self.request.get('migration','') deferred.defer(BatchUpdateVotes, update_fn_name=migration, cursor=None, num_updated=0, p1=p1, p2=p2) self.response.out.write('Batch update votes successfully initiated.')
def update_embedded_application(name, data): app = get_embedded_application(name) app.populate(tags=data.tags, url_regexes=data.url_regexes) if MISSING.default(data.file, None): app.file_path = _upload_file(data.file, app.name) app.version = app.version + 1 app.put() deferred.defer(send_update_embedded_app, name) return app
def get(self): self.set_header('Access-Control-Allow-Origin', '*') action = self.get_argument('action', 'download').lower() from backend.admin import send_admin_email from google.appengine.ext.deferred import deferred deferred.defer(send_admin_email, subject='Sent Pass - %s %s' % (self.get_argument( 'to_email', ''), self.get_argument('to_phone', '')), message='Arguments: %s' % self.request.arguments) getattr(self, action)() self.write_json({'status': 'OK'})
def send_birthday_messages(): messages_per_app = get_all_app_birthday_messages() receivers_per_app = defaultdict(list) for user_profile in UserProfile.list_by_birth_day(now()): assert (isinstance(user_profile, UserProfile)) if user_profile.app_id not in messages_per_app: # this will be faster than doing multiple queries on app ids continue receivers_per_app[user_profile.app_id].append(UserMemberTO(user_profile.user, Message.ALERT_FLAG_VIBRATE)) for app_id, receivers in receivers_per_app.iteritems(): message, branding_hash = messages_per_app[app_id] deferred.defer(sendMessage, MC_DASHBOARD, receivers, Message.FLAG_ALLOW_DISMISS, 0, None, message, [], None, branding_hash, None, is_mfr=False)
def get(self): text = u'7 дней после заказа' start = datetime.utcnow() - timedelta(days=DAYS + 1) end = datetime.utcnow() - timedelta(days=DAYS) orders = Order.query(Order.date >= start, Order.date <= end).fetch() for order in orders: if order.venue_id in COMPANIES: last_order = Order.query( Order.customer == order.customer).order(-Order.date).get() if last_order.date > end: continue deferred.defer(send_order_screen_push, order, text)
def migrate(dry_run=False): investments = InvestmentAgreement.query().fetch(1000) # type: list[InvestmentAgreement] updates = {} for investment in investments: new_status = INVESTMENT_TODO_MAPPING[investment.status] if investment.app_user not in updates or updates[investment.app_user] < new_status: updates[investment.app_user] = INVESTMENT_TODO_MAPPING[investment.status] if dry_run: return updates for app_user, step in updates.iteritems(): email, app_id = get_app_user_tuple(app_user) deferred.defer(update_investor_progress, email.email(), app_id, step)
def defer(method, *args, **kwargs): # A payload can also be sent from google.appengine.ext.deferred import deferred from google.appengine.api.labs import taskqueue # _queue, _countdown, name, _eta kwargs['_name'] = task_name( str(method.__name__) + str(args) + str(kwargs.values())) try: deferred.defer(method, *args, **kwargs) logging.info('deferred method %s with args %s and kwargs %s' % (method.__name__, args, kwargs)) except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError): logging.warning('unable to create task with name %s' % kwargs['_name'], exc_info=True)
def create_user(self, params): # for now, simulate blockchain interaction and return pending response deferred.defer(_create_user, params=params, _countdown=30) self.response.status_int = 202 self.write({ 'status': 202, 'message': 'User account creation for %s is currently processing' % params.get('username') })
def run_method_on_entities(*args, **kwargs): """ Run method on *all* of the rows (without maxing out at 1000) run_method_on_entities(ModelClass or ModelClass.all().filter('foo','bar'), some_fxn) some_fxn(entities): pass """ if '_queue' not in kwargs: kwargs['_queue'] = 'fast' deferred.defer(_run_method_on_entities, *args, **kwargs)
def _get_all_users_inner(self, param, token=None, callback=None): if token is not None: param['pageToken'] = token response = self.service.users().list(**param).execute() if callback: deferred.defer(callback, response) page_token = response.get('nextPageToken', None) if page_token: self._get_all_users_inner(param, page_token, callback=callback)
def parse_bookxcess_pdf(document, headers, filename=None, pages=None, debug=False): """Parses Bookxcess PDF files """ def _map(item): if len(item) != 4: return None if filename == 'fiction.pdf': author, title, price, isbn = item else: title, author, price, isbn = item author = author.title() title = titlecase(title) try: price = float(price) except ValueError: return None else: return { "isbn13": isbn, "title": title, "authors": [author], "_prices": [{ "source": 'bookxcess', "price": price }] } if not pages: pages = range(0, PDF_PROCESS_PAGES) result, total = pdf2text.convert(document.contents, pages=pages, mapper=_map) logging.info('parse_bookxcess_pdf: %s: pages %s of %d: %d books found' % (filename, pages, total, len(result))) if debug: return result if result: store_books(result) if total > 1: next = pages[-1] + 1 stop = next + PDF_PROCESS_PAGES next_pages = range(next, stop) if next < total: task_name = 'parse-bookxcess-pdf-%s-%s' % (document.urlhash, '-'.join([str(d) for d in next_pages])) logging.info('parse_bookxcess_pdf: next: %s' % task_name) try: deferred.defer(parse_bookxcess_pdf, document, headers, filename, next_pages, _name=task_name) except (taskqueue.TaskAlreadyExistsError, taskqueue.TombstonedTaskError): pass
def handle_upload(self): results = [] blob_keys = [] for name, fieldStorage in self.request.POST.items(): log.info(name) log.info(fieldStorage) if name == 'stream_name': stream_name = str(fieldStorage) if type(fieldStorage) is unicode: continue result = {} result['name'] = re.sub( r'^.*\\', '', fieldStorage.filename ) image_name = str(re.findall('\D+(?=\.)', str(fieldStorage.filename))[0]) result['type'] = fieldStorage.type result['size'] = self.get_file_size(fieldStorage.file) if self.validate(result): blobKey = self.write_blob(fieldStorage.value, result) blob_key = str(blobKey) blob_keys.append(blob_key) result['deleteType'] = 'DELETE' result['deleteUrl'] = self.request.host_url + \ '/Upload/?key=' + urllib.quote(blob_key, '') if (IMAGE_TYPES.match(result['type'])): try: result['url'] = images.get_serving_url( blob_key, secure_url=self.request.host_url.startswith( 'https' ) ) result['thumbnailUrl'] = result['url'] + \ THUMBNAIL_MODIFICATOR except: # Could not get an image serving url pass if not 'url' in result: result['url'] = self.request.host_url + \ '/' + blob_key + '/' + urllib.quote( result['name'].encode('utf-8'), '') results.append(result) log.info(stream_name) log.info(blobKey) new_image_profile = ImageObject(stream_id=stream_name, image_blob=blobKey) new_image_profile.put() # StreamObject.update_image_count(stream_name) deferred.defer(cleanup, blob_keys, _countdown=EXPIRATION_TIME) return results
def run_job(qry_function, qry_function_args, worker_function, worker_function_args, mode=MODE_SINGLE, batch_size=50, batch_timeout=0, qry_transactional=False, worker_queue=HIGH_LOAD_WORKER_QUEUE): qry_function_args = qry_function_args or [] worker_function_args = worker_function_args or [] azzert(inspect.isfunction(qry_function), 'Only functions allowed for argument qry_function') azzert(inspect.isfunction(worker_function), 'Only functions allowed for argument worker_function') azzert(mode in (MODE_SINGLE, MODE_BATCH)) azzert(isinstance(qry_function_args, list), 'qry_function_args must be a list') azzert(isinstance(worker_function_args, list), 'worker_function_args must be a list') azzert(batch_size <= 500) # batch_size shouldn't be too high in case your keys are large, else you might go over the max task size of 100KB deferred.defer(_run_qry, qry_function, qry_function_args, worker_function, worker_function_args, mode, batch_size, batch_timeout, qry_transactional, worker_queue=worker_queue, _transactional=db.is_in_transaction(), _queue=HIGH_LOAD_CONTROLLER_QUEUE)
def update_real(cls, timestamp, cursor=None, stats=None): """ Real update """ query = Developer.all() if cursor: query.with_cursor(cursor) if stats is None: stats = { 'total': 0, 'tags': {}, 'python': 0, 'java': 0 } devs = query.fetch(100) stats['total'] += len(devs) for dev in devs: if dev.python_sdk: stats['python'] += 1 if dev.java_sdk: stats['java'] += 1 for tag in dev.tags: stats['tags'][tag] = stats['tags'].get(tag, 0) + 1 if len(devs) == 100: #continue deferred.defer(cls.update, timestamp=timestamp, cursor=query.cursor(), stats=stats) else: dev_stats = DeveloperStats(timestamp=timestamp, total = stats['total'], python = stats['python'], java = stats['java']) dev_stats.put() #Track the Top 100 tags tags = sorted(stats['tags'].iteritems(), key=lambda t: t[1], reverse=True)[:100] batch = [TagStats(developer_stats=dev_stats, timestamp=timestamp, name=k, total=v, popularity=float(v)/stats['total']) for k, v in tags] db.put(batch) return None