def trans(): # Operates on 2 entity groups hookup_with_default_services.schedule(mobile_user, ipaddress) deferred.defer(sync_payment_database, mobile_user, _transactional=True) if invitor_code and invitor_secret: pp = ProfilePointer.get(invitor_code) if not pp: logging.error("User with userCode %s not found!" % invitor_code) else: deferred.defer(ack_invitation_by_invitation_secret, mobile_user, pp.user, invitor_secret, _transactional=True, _countdown=10) elif invitor_code: for ysaaa_hash, static_email in chunks(server_settings.ysaaaMapping, 2): if invitor_code == ysaaa_hash: service_user = users.User(static_email) makeFriends(service_user, mobile_user, original_invitee=None, servicetag=None, origin=ORIGIN_YSAAA) break else: azzert(False, u"ysaaa registration received but not found mapping") for _, static_email in chunks(server_settings.staticPinCodes, 2): if mobile_user.email() == static_email: break else: deferred.defer(send_messages_after_registration, mobile_key, _transactional=True)
def nuke_news(): for keys in chunks(list(NewsItem.all(keys_only=True)), 200): db.delete(keys) for keys in chunks(list(NewsCoupon.all(keys_only=True)), 200): db.delete(keys) for keys in chunks(list(NewsItemImage.all(keys_only=True)), 200): db.delete(keys)
def delete_payment_provider(provider_id): provider = get_payment_provider(provider_id) if not provider: raise PaymentProviderNotFoundException(provider_id) to_delete = [provider.key] to_put = [] def filter_by_provider(thing): return thing.provider_id != provider_id for payment_user in PaymentUser.list_by_provider_id(provider_id): payment_user.providers = filter(filter_by_provider, payment_user.providers) payment_user.assets = filter(filter_by_provider, payment_user.assets) if not payment_user.providers and not payment_user.assets: to_delete.append(payment_user.key) else: to_put.append(payment_user) services = PaymentService.list_by_provider_id(provider_id, True).fetch() + PaymentService.list_by_provider_id( provider_id, False).fetch() for service in services: # type: PaymentService service.providers = filter(filter_by_provider, service.providers) service.test_providers = filter(filter_by_provider, service.test_providers) if service.providers and service.test_providers: to_put.append(service) else: to_delete.append(service.key) for chunk in chunks(to_put, 200): ndb.put_multi(chunk) for chunk in chunks(to_delete, 200): ndb.delete_multi(chunk)
def trans(): si_chunk = db.get(si_keys_chunk) for si in reversed(si_chunk): if not si.serviceData: si_chunk.remove(si) continue old_ancestor = si.serviceData._ancestor new_ancestor = _create_new_key(job, si.serviceData._ancestor) for chunk in chunks(si.serviceData._keys.values(), 5): _migrate_models(job, db.get([KVBucket.create_key(bucket_id, old_ancestor) for bucket_id in chunk])) for chunk in chunks(si.serviceData._blob_keys.values(), 5): for blob_bucket_ids in chunk: _migrate_models(job, db.get([KVBlobBucket.create_key(blob_bucket_id, old_ancestor) for blob_bucket_id in blob_bucket_ids])) si.serviceData._ancestor = new_ancestor if si_chunk: put_and_invalidate_cache(*si_chunk) if si_keys: # there is still work to do deferred.defer(_2500_migrate_service_data, job_key, si_keys, _transactional=True, _queue=MIGRATION_QUEUE) return False return True
def schedule_tasks(tasks, queue_name=deferred._DEFAULT_QUEUE): # type: (list[Task], str) -> list[Task] queue = Queue(queue_name) results = [] for chunk in chunks(tasks, MAX_TASKS_PER_ADD): results.extend(queue.add(chunk)) return results
def drop_sessions_of_user(user): sessions = list(Session.all().filter("user =", user).filter('deleted', False)) for session in sessions: session.deleted = True for chunk in chunks(sessions, 200): db.put(chunk) invalidate_model_cache(chunk)
def trans(): to_put = list() service_profile = get_service_profile(service_user) service_profile.expiredAt = now() service_profile.enabled = False to_put.append(service_profile) service_identity_keys = get_service_identities_query(service_user, True) search_configs = db.get( [SearchConfig.create_key(create_service_identity_user(users.User(key.parent().name()), key.name())) for key in service_identity_keys]) svc_index = search.Index(name=SERVICE_INDEX) loc_index = search.Index(name=SERVICE_LOCATION_INDEX) for search_config in search_configs: if search_config: search_config.enabled = False to_put.append(search_config) on_trans_committed(_cleanup_search_index, search_config.service_identity_user.email(), svc_index, loc_index) for objects_to_put in chunks(to_put, 200): put_and_invalidate_cache(*objects_to_put) deferred.defer(cleanup_sessions, service_user, _transactional=True) deferred.defer(cleanup_friend_connections, service_user, _transactional=True)
def get_facebook_app_info(domain): settings = get_solution_server_settings() apps = settings.facebook_apps for host, app_id, app_secret in chunks(apps, 3): if host == domain.lower().strip(): return app_id, app_secret
def get(self): solution_server_settings = get_solution_server_settings() for module_name, service_user in chunks(solution_server_settings.solution_news_scrapers, 2): try: module = importlib.import_module("solutions.common.cron.news.%s" % module_name) getattr(module, 'check_for_news')(users.User(service_user)) except: pass
def trans(): mfd_list = [ mfd for mfd in get_message_flow_designs_by_status( service_user, MessageFlowDesign.STATUS_VALID) if mfd.xml and not mfd.definition ] # XML-only flows render_js_for_message_flow_designs(mfd_list, notify_friends=False) for chunk in chunks(mfd_list, 200): put_and_invalidate_cache(*chunk) schedule_update_all_friends_of_service_user(service_user)
def put_invoices(): all_invoices = list(Invoice.all()) mobicage_legal_entity_id = get_mobicage_legal_entity() for invoices in chunks(all_invoices, 200): to_put = list() for i in invoices: manager = i.operator and RegioManager.get(RegioManager.create_key(i.operator.email())) i.legal_entity_id = manager and manager.team.legal_entity_id or mobicage_legal_entity_id.id if i.paid and i.paid_timestamp is None: i.paid_timestamp = 0 if i.legal_entity_id == mobicage_legal_entity_id.id else now() to_put.append(i) db.put(invoices)
def store_stats_api_failures(counters): keys = [ServiceAPIFailures.key_from_service_user_email(k) for k in counters.keys()] safs = db.get(keys) for i in xrange(len(keys)): key = keys[i] saf = safs[i] if not saf: saf = ServiceAPIFailures(key=key, creationTime=now(), failedCalls=0, failedCallBacks=0) safs[i] = saf saf.failedCalls += counters[key.name()][monitoring.SERVICE_API_CALL] saf.failedCallBacks += counters[key.name()][monitoring.SERVICE_API_CALLBACK] for chunk in chunks(safs, 200): db.put(chunk)
def generate_unassigned_short_urls(app_id, amount): @db.non_transactional def allocate_ids(): return db.allocate_ids(db.Key.from_path(ShortURL.kind(), 1), amount) # (start, end) result = list() id_range = allocate_ids() for short_url_id in xrange(id_range[0], id_range[1] + 1): user_code = userCode(users.User("%s@%s" % (short_url_id, app_id))) result.append(ShortURL(key=db.Key.from_path(ShortURL.kind(), short_url_id), full="/q/i" + user_code)) for c in chunks(result, 200): db.put(c) return result
def get_value_for_datastore(self, model_instance): if not db.is_in_transaction(): raise InvalidStateException(u"This model should be put in a transaction") kv_store = super(KeyValueProperty, self).get_value_for_datastore(model_instance) if kv_store is not None: logging.debug('Deleting %s bucket(s)', len(kv_store._to_be_deleted_models)) if kv_store._to_be_deleted_models: logging.debug('\n'.join(map(repr, kv_store._to_be_deleted_models))) for chunk in chunks(list(kv_store._to_be_deleted_models), 200): db.delete(chunk) to_be_put_models = set(kv_store._to_be_put_buckets) for blob_buckets in kv_store._to_be_put_blob_buckets.itervalues(): to_be_put_models.update(blob_buckets) logging.debug('Putting %s bucket(s)', len(to_be_put_models)) if to_be_put_models: logging.debug('\n'.join((repr(m.key()) for m in to_be_put_models))) for chunk in chunks(list(to_be_put_models), 200): db.put(chunk) stream = StringIO() _serialize_kv_store(stream, kv_store) return db.Blob(stream.getvalue())
def trans(): # Operates on 3 entity groups email = get_human_user_from_app_user(mobile_user).email() for _, static_email in chunks(server_settings.staticPinCodes, 2): if email == static_email: break else: deferred.defer(send_welcome_message, mobile_user, _transactional=True, _countdown=5) mobile_settings, app_settings = db.get([ms_key, app_settings_key]) request = UpdateSettingsRequestTO() request.settings = SettingsTO.fromDBSettings(mobile_settings, app_settings) updateSettings(update_settings_response_handler, logError, mobile_user, request=request) deferred.defer(_finishup_mobile_registration_step2, mobile.key(), invitor_code, invitor_secret, ipaddress, mobile_settings.majorVersion, mobile_settings.minorVersion, _transactional=True)
def store_flow_stats(counters): to_put = list() flow_stats_keys = counters.keys() for flow_stats_key, flow_stats in zip(flow_stats_keys, db.get(flow_stats_keys)): if not flow_stats: flow_stats = FlowStatistics(key=flow_stats_key) to_put.append(flow_stats) for day, stats_list in sorted(counters[flow_stats_key].iteritems()): flow_stats.set_today(datetime.datetime.utcfromtimestamp(day).date()) for breadcrumbs, statuses, current_step_id, current_btn_id in stats_list: for status in statuses: btn_id = current_btn_id if status == FlowStatistics.STATUS_ACKED else None flow_stats.add(breadcrumbs, current_step_id, status, btn_id) for chunk in chunks(to_put, 200): put_rpcs['flow_stats'].append(db.put_async(chunk))
def post(self): version = self.request.get("version", None) install_id = self.request.get("install_id", None) registration_time = self.request.get("registration_time", None) device_id = self.request.get("device_id", None) registration_id = self.request.get("registration_id", None) signature = self.request.get("signature", None) language = self.request.get("language", None) country = self.request.get("country", None) app_id = self.request.get("app_id", App.APP_ID_ROGERTHAT) use_xmpp_kick_channel = self.request.get('use_xmpp_kick', 'true') == 'true' GCM_registration_id = self.request.get('GCM_registration_id', '') ysaaa_guid = self.request.get("service", None) if not ysaaa_guid: logging.warn('Missing YSAAA guid!\nPOST params: %s', self.request.POST) return self.abort(401) server_settings = get_server_settings() calculated_signature = sha256_hex(version + " " + install_id + " " + registration_time + " " + device_id + " " + \ registration_id + " " + ysaaa_guid + base64.b64decode(server_settings.registrationMainSignature.encode("utf8"))) if signature.upper() != calculated_signature.upper(): logging.error("Invalid request signature.") self.response.set_status(500) return for ysaaa_hash, _ in chunks(server_settings.ysaaaMapping, 2): if ysaaa_guid == ysaaa_hash: break else: azzert(False, u"ysaaa registration received but not found mapping") if '-' in language: language = get_iso_lang(language.lower()) elif language and country: language = '%s_%s' % (language, country) user_id = str(uuid.uuid4()).replace("-", "") user = users.User("*****@*****.**" % user_id) account, _, age_and_gender_set = register_mobile(user, user_id, app_id, use_xmpp_kick_channel=use_xmpp_kick_channel, GCM_registration_id=GCM_registration_id, language=language, ysaaa=True) self.response.out.write(json.dumps(dict(result="success", account=account.to_dict(), age_and_gender_set=age_and_gender_set)))
def _job(): products = {p.code: p for p in Product.all()} order_keys = set() for order_item in OrderItem.all(): if products[order_item.product_code].is_subscription_extension: order_keys.add(order_item.order_key) orders = db.get(order_keys) to_put = list() for order in orders: if not order.is_subscription_extension_order: order.is_subscription_extension_order = True customer = order.parent() subscription_order = Order.get_by_order_number(customer.id, customer.subscription_order_number) order.next_charge_date = subscription_order.next_charge_date to_put.append(order) for chunk in chunks(to_put, 200): db.put(chunk)
def _2000_migrate_user_data(job_key): phase = MigrateServiceJob.PHASE_2000_MIGRATE_USER_DATA next_phase = MigrateServiceJob.PHASE_2250_CLEANUP_USER_DATA # Validate that the job still exists job = _get_job(job_key, phase) # Do the work _log_progress(job) # Get all the UserData models (possibly contains None) user_data_keys = list() for fsic_str_key in job.fsic_keys: fsic_key = db.Key(fsic_str_key) app_user = users.User(fsic_key.parent().name()) old_si_user = users.User(fsic_key.name()) user_data_keys.append(UserData.createKey(app_user, old_si_user)) if user_data_keys: # { app_user : { si_user : user_data } } job_user_datas = dict() for chunk in chunks(user_data_keys, 200): for user_data in db.get(chunk): if user_data: app_user_datas = job_user_datas.setdefault(user_data.app_user.email(), dict()) if user_data.data: app_user_datas[user_data.service_identity_user.email()] = user_data.data elif user_data.userData: app_user_datas[user_data.service_identity_user.email()] = json.dumps( user_data.userData.to_json_dict()) if job_user_datas: logging.info("Storing job.user_datas: %s", job_user_datas) def trans_update_user_datas(): job = _get_job(job_key) job.set_user_datas(job_user_datas) job.put() db.run_in_transaction(trans_update_user_datas) # Set the next phase _set_job_in_next_phase(job_key, phase, next_phase)
def _start_building_grid(google_maps_key, app_id, postal_codes, radius, sw_lat, sw_lon, ne_lat, ne_lon, city_name, check_phone_number): grid = get_grid(sw_lat, sw_lon, ne_lat, ne_lon, radius) todo_count = len(grid) logging.info('Total points count: %s', todo_count) for points in chunks(grid, 4000): todo_count -= len(points) is_last = (todo_count == 0) deferred.defer(_store_grid_points, google_maps_key, app_id, postal_codes, radius, points, is_last, city_name, check_phone_number, _queue=HIGH_LOAD_WORKER_QUEUE)
def _run_qry(qry_function, qry_function_args, worker_function, worker_function_args, mode, batch_size, batch_timeout, batch_timeout_counter=0, qry_transactional=False, cursor=None, worker_queue=HIGH_LOAD_WORKER_QUEUE, controller_queue=HIGH_LOAD_CONTROLLER_QUEUE): if mode == MODE_SINGLE: fetch_size = taskqueue.MAX_TASKS_PER_ADD else: fetch_size = min(int(taskqueue.MAX_TASKS_PER_ADD * batch_size), datastore_rpc.BaseConnection.MAX_GET_KEYS) if qry_transactional: items, new_cursor, has_more = db.run_in_transaction(_exec_qry, qry_function, qry_function_args, cursor, fetch_size) else: items, new_cursor, has_more = _exec_qry(qry_function, qry_function_args, cursor, fetch_size) if not items: return tasks = [] taskargs = { 'url': deferred._DEFAULT_URL, 'headers': deferred._TASKQUEUE_HEADERS, } count_down = batch_timeout_counter if mode == MODE_SINGLE: for item in items: pickled = deferred.serialize(worker_function, item, *worker_function_args) tasks.append(taskqueue.Task(payload=pickled, countdown=count_down, **taskargs)) count_down += batch_timeout else: for keys in chunks(items, batch_size): pickled = deferred.serialize(worker_function, keys, *worker_function_args) tasks.append(taskqueue.Task(payload=pickled, countdown=count_down, **taskargs)) count_down += batch_timeout taskqueue.Queue(worker_queue).add(tasks) if has_more: deferred.defer(_run_qry, qry_function, qry_function_args, worker_function, worker_function_args, mode, batch_size, batch_timeout, count_down, qry_transactional, new_cursor, worker_queue=worker_queue, controller_queue=controller_queue, _queue=controller_queue)
def put_in_chunks(to_put): for chunk in chunks(to_put, 200): db.put(chunk)
def migrate(): files = _get_files() for files_list in chunks(files, 50): deferred.defer(_do_move_logs, files_list)
def put_charges(): all_charges = list(Charge.all()) for charges in chunks(all_charges, 200): db.put(charges)
def put_in_chunks(to_put, is_ndb=False): if is_ndb: ndb.put_multi(to_put) else: for chunk in chunks(to_put, 200): db.put(chunk)
def trans(): db_puts = list() # Create registration entry. installation = Installation.get_by_key_name(install_id) if install_id else None app_user = create_app_user(users.User(email), app_id) registration = None if version == 2: registration = Registration.get_by_key_name(registration_id, parent_key(app_user)) if registration and registration.request_id == request_id: InstallationLog(parent=registration.installation, timestamp=now(), registration=registration, pin=registration.pin, description="Received a HTTP request retry for 'request pin'. Not sending a new mail.").put() return rogerthat_profile = get_service_or_user_profile(users.User(email)) if rogerthat_profile and isinstance(rogerthat_profile, ServiceProfile): # some guy tries to register a mobile on a service account ?!? variables = dict(email=email) body = render("somebody_tries_to_register_his_mobile_on_your_service_account_warning", [language], variables) html = render("somebody_tries_to_register_his_mobile_on_your_service_account_warning_html", [language], variables) logging.info("Sending message to %s\n%s" % (email, body)) recipients = [email] for admin in get_service_admins_non_transactional(app_user): recipients.append(admin.user_email) msg = MIMEMultipart('alternative') msg['Subject'] = "Warning, possibly somebody tries to hack your service account." msg['From'] = server_settings.senderEmail msg['To'] = ', '.join(recipients) msg.attach(MIMEText(body.encode('utf-8'), 'plain', 'utf-8')) msg.attach(MIMEText(html.encode('utf-8'), 'html', 'utf-8')) send_mail_via_mime(server_settings.senderEmail, recipients, msg) warning = InstallationLog(parent=installation, timestamp=now(), description="Warning somebody tries to register a mobile with the email address of service account %s" % email) db_puts.append(warning) else: profile = get_user_profile(app_user) if profile: name = profile.name else: deactivated_profile = get_deactivated_user_profile(app_user) name = deactivated_profile.name if deactivated_profile else None if not registration: registration = Registration(parent=parent_key(app_user), key_name=registration_id) registration.timestamp = registration_time registration.device_id = device_id server_settings = get_server_settings() for pin, static_email in chunks(server_settings.staticPinCodes, 2): if email == static_email and len(pin) == 4: registration.pin = int(pin) pin_str = unicode(registration.pin).rjust(4, '0') utils.send_mail(server_settings.dashboardEmail, server_settings.supportWorkers, pin_str, u'Configured pin code %s for %s' % (pin_str, app_user.email())) break else: registration.pin = random.randint(1000, 9999) registration.timesleft = 3 registration.installation = installation registration.request_id = request_id registration.language = language db_puts.append(registration) i1 = InstallationLog(parent=registration.installation, timestamp=now(), registration=registration, pin=registration.pin, description="%s requested pin" % email) db_puts.append(i1) # Send email with pin. app = get_app_by_id(app_id) variables = dict(pin=registration.pin, name=name, app=app) body = render("activation_code_email", [language], variables) html = render("activation_code_email_html", [language], variables) logging.info("Sending message to %s\n%s" % (email, body)) msg = MIMEMultipart('alternative') msg['Subject'] = localize(language, "%(app_name)s mobile registration", app_name=app.name) msg['From'] = server_settings.senderEmail if app.type == App.APP_TYPE_ROGERTHAT else ("%s <%s>" % (app.name, app.dashboard_email_address)) msg['To'] = email msg.attach(MIMEText(body.encode('utf-8'), 'plain', 'utf-8')) msg.attach(MIMEText(html.encode('utf-8'), 'html', 'utf-8')) send_mail_via_mime(server_settings.senderEmail, email, msg) i2 = InstallationLog(parent=registration.installation, timestamp=now(), registration=registration, pin=registration.pin, description="Sent email to %s with pin %s" % (email, registration.pin)) db_puts.append(i2) db.put(db_puts)
def store_news_actions(counters): from google.appengine.api import memcache from rogerthat.settings import get_server_settings from rogerthat.to.news import NewsItemTO from mcfw.serialization import serialize from rogerthat.bizz.news import setup_news_statistics_count_for_news_item news_to_update = counters['news_to_update'] news_ids = news_to_update.keys() news_ds_objects = NewsItem.get_by_id(news_ids) news_items = {} for news_id, news_item in zip(news_ids, news_ds_objects): if news_item: news_items[news_id] = news_item if news_item.statistics is None: news_item.statistics = NewsStatisticPerApp() for app_id in news_item.app_ids: news_item.statistics[app_id] = NewsItemStatistics.default_statistics() else: logging.warn('Skipping summarize for news item %s since it was not found.', news_id) for news_id, news_item in news_items.iteritems(): updated = False assert (isinstance(news_item, NewsItem)) news_item_datetime = datetime.datetime.utcfromtimestamp(news_item.timestamp) if news_item.follow_count < 0: setup_news_statistics_count_for_news_item(news_item) updated = True for app_id, update in news_to_update[news_id].iteritems(): if app_id == 'sticky_timed_out': sponsoring_timed_out = update if sponsoring_timed_out and news_item.sticky: news_item.sticky = False updated = True continue reached = update.get('reached') if reached: news_item.reach += reached updated = True rogered = update.get('rogered') if rogered: news_item.rogered = True news_item.users_that_rogered.extend(rogered) updated = True follow_count = update.get('follow_count') if follow_count: news_item.follow_count += follow_count updated = True action_count = update.get('action_count') if action_count: news_item.action_count += action_count updated = True for action in (NEWS_REACHED, NEWS_ROGERED, NEWS_NEW_FOLLOWER, NEWS_ACTION): added_statistics = update.get('stats_%s' % action) if added_statistics: updated = True key = action.replace('news.', '') for prop in ('age', 'gender', 'time'): statistic_property = '%s_%s' % (key, prop) default_stats = getattr(NewsItemStatistics, 'default_%s_stats' % prop)() original_statistics = getattr(news_item.statistics[app_id], statistic_property, default_stats) if prop == 'time': # This is a dict instead of a list for timestamp, value in added_statistics[prop].iteritems(): item_date = datetime.datetime.utcfromtimestamp(timestamp) hour_index = NewsItemStatistics.get_time_index(news_item_datetime, item_date) diff = hour_index - len(original_statistics) + 1 for i in xrange(diff): original_statistics.append(0) original_statistics[hour_index] += value else: for i, value in enumerate(added_statistics[prop]): original_statistics[i] += value if news_item.statistics[app_id] is None: news_item.statistics[app_id] = NewsItemStatistics.default_statistics() setattr(news_item.statistics[app_id], statistic_property, original_statistics) if not updated: del news_items[news_id] # don't include in db.put if len(news_items): server_settings = get_server_settings() def create_news_item_to(news_item): return NewsItemTO.from_model(news_item, server_settings.baseUrl) logging.info('Summarize: updating %d news items\n%s', len(news_items), news_items.keys()) for chunk in chunks(news_items.values(), 200): put_rpcs['news_stats'].append(db.put_async(chunk)) memcache_data = dict() for news_item in chunk: to = create_news_item_to(news_item) memcache_data[str(to.id)] = serialize(NewsItemTO, to) memcache.set_multi(memcache_data, time=600, namespace='v1.news') # @UndefinedVariable
def update_statistic(): # Completely rebuilds statistics on run. current_date = now() broadcast_count_dict = {} for news_item in get_news_of_last_month(): service_email = get_service_user_from_service_identity_user( news_item.sender).email() if service_email not in broadcast_count_dict: broadcast_count_dict[service_email] = 1 else: broadcast_count_dict[service_email] += 1 future_event_count_dict = {} for future_event_key in Event.get_future_event_keys(current_date): service_email = future_event_key.parent().name() if service_email not in future_event_count_dict: future_event_count_dict[service_email] = 1 else: future_event_count_dict[service_email] += 1 static_content_count_dict = {} for static_content_key in SolutionStaticContent.get_all_keys(): service_email = static_content_key.parent().name() if service_email not in static_content_count_dict: static_content_count_dict[service_email] = 1 else: static_content_count_dict[service_email] += 1 unanswered_question_count_dict = {} # find the oldest, unanswered question per customer and add it to the statistics for unanswered_question in SolutionInboxMessage.get_all_unanswered_questions( 5): service_email = unanswered_question.service_user.email() if unanswered_question.question_asked_timestamp != 0: if not service_email in unanswered_question_count_dict: unanswered_question_count_dict[ service_email] = unanswered_question.question_asked_timestamp elif unanswered_question.question_asked_timestamp < unanswered_question_count_dict[ service_email]: unanswered_question_count_dict[ service_email] = unanswered_question.question_asked_timestamp # dict with as keys the app id from the city, value the statistics of this city. statistics = {} for customer in Customer.all(): if len(customer.app_ids) != 0: service_email = customer.service_email if customer.app_id not in statistics: stats = AssociationStatistic(key_name=customer.app_id) stats.generated_on = current_date statistics[customer.app_id] = stats else: stats = statistics[customer.app_id] if not service_email: logging.error( u'Association customer %s(%d) has no service_email!', customer.name, customer.id) continue stats.customer_emails.append(service_email) if service_email in broadcast_count_dict: stats.broadcasts_last_month.append( broadcast_count_dict[customer.service_email]) else: stats.broadcasts_last_month.append(0) if service_email in future_event_count_dict: stats.future_events_count.append( future_event_count_dict[service_email]) else: stats.future_events_count.append(0) if service_email in static_content_count_dict: stats.static_content_count.append( static_content_count_dict[service_email]) else: stats.static_content_count.append(0) if service_email in unanswered_question_count_dict: stats.last_unanswered_questions_timestamps.append( unanswered_question_count_dict[service_email]) else: stats.last_unanswered_questions_timestamps.append(0) for chunk in chunks(statistics.values(), 200): db.put(chunk)
def get_custom_signin_path(self, host): settings = get_server_settings() paths = settings.customSigninPaths mapping = dict((h, p) for h, p in chunks(paths, 2)) return mapping.get(host)