def post(self): """ Send a reminder email to player (if email address on file) each time the opponent in a game completes a move. Email body provides urlsafe key, player's hand, visible draw_card and instructions. Uses appEngine Push Queue """ app_id = app_identity.get_application_id() user = get_by_urlsafe(self.request.get('user_key'), User) game = get_by_urlsafe(self.request.get('game_key'), Game) # Get hand of current player if game.active_player == game.player_one: hand = game.hand_one else: hand = game.hand_two # Format game data for e-mail sorted_hand = sorted(hand) string_hand = ' '.join(str(card) for card in sorted_hand) string_card = ' '.join(game.draw_card) # Prepare e-mail subject = 'Your turn!' body = "Hello {}: Your opponent just moved, so it's your turn." \ " Your hand is {}. The visible card is {}." \ " When you go to start_move, enter 1 to take visible card" \ " or 2 to draw from pile. The game key is {}.". \ format(user.name, string_hand, string_card, game.key.urlsafe()) logging.debug(body) # Arguments to send_mail are: from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'.format( app_identity.get_application_id()), user.email, subject, body)
def get(self): """ Send a reminder email to each User with email address with games in progress. Email body includes a count of in-progress games and their urlsafe keys Called every day using a cron job """ app_id = app_identity.get_application_id() users = User.query(User.email != None) for user in users: q = user.all_games() games = q.filter(Game.game_over == False) if games.count() > 0: subject = 'In Progress game reminder!' body = 'Hello {}, you have {} games in progress.' \ ' Their keys are: {}'.\ format(user.name, games.count(), ', '.join(game.key.urlsafe() for game in games)) logging.debug(body) # Send emails # Arguments to send_mail are: from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'. format(app_identity.get_application_id()), user.email, subject, body)
def get(self): now=datetime.datetime.now() # we assume that the finalize comes after the start, so we go through the # events in reverse order looking for events that started by never ended unfinished = dict() for event in MapReduceEvent.query().order(MapReduceEvent.start_time): if event.step == "kickoffjob_callback": unfinished[event.mr_id]=event elif event.step == "finalizejob_callback": unfinished.pop(event.mr_id,0) logging.info("logging events %s" % unfinished) timeseries = [] for event in unfinished.itervalues(): timeseries.append({ "timeseriesDesc": {"project":get_application_id(), "metric":DURATION_METRIC, "labels":{MR_ID_LABEL:event.mr_id}}, "point": { "start": now.replace(microsecond=0).isoformat()+"Z", "end": now.replace(microsecond=0).isoformat()+"Z", "doubleValue": (now-event.start_time).total_seconds()}}) if len(timeseries) > 0: cm = cloudmonitoring() cm.timeseries().write( project=get_application_id(), body={"timeseries":timeseries}).execute()
def get(self): now=datetime.datetime.now() timeseries=[] for dsbackupop in DatastoreAdminOperation.query(DatastoreAdminOperation.status.IN(("Active","Created"))): oldest=BackupInformation(start_time=now) for info in BackupInformation.query(ancestor=dsbackupop.key): if info.start_time < oldest.start_time: oldest = info timeseries.append({ "timeseriesDesc": {"project":get_application_id(), "metric":DS_DURATION_METRIC, "labels":{DS_ID_LABEL:str(dsbackupop.key.id())}}, "point": { "start": now.replace(microsecond=0).isoformat()+"Z", "end": now.replace(microsecond=0).isoformat()+"Z", "doubleValue": (now-oldest.start_time).total_seconds()}}) logging.info("logging events %s" % timeseries) if len(timeseries) > 0: cm = cloudmonitoring() cm.timeseries().write( project=get_application_id(), body={"timeseries":timeseries}).execute()
def server_expired(): server = get_server(request.form['server']) sender = 'no-reply@' + get_application_id() + '.appspotmail.com' site_url = 'http://' + get_application_id() + '.appspot.com' for account in Account.query(): message = mail.EmailMessage(sender=sender, to=account.email) message.subject = "Server expire notification: %s" % server.server_name show_url = url_for('server.show', server_key=server.key.urlsafe()) message_body = "Server %s is expired at %s\n %s%s" \ % (server.server_name, server.expire_date, site_url, show_url) if server.blocked: url = 'http://%s/manager/expired/%s'\ % (server.ip_address, server.token) result = fetch_server_url(url=url, retry_count=3) if result.status_code == 200: if result.content == 'TRUE': message_body += '\n Server %s is blocked upon expiry, %s'\ % (server.server_name, server.expire_date) else: message_body += '\n %s' % result.content else: message_body += '\n Unable to reach %s to block upon expiry, %s'\ % (server.server_name, server.expire_date) message.body = message_body message.send() return make_response('')
def Check(EmployeeDataFilePath,emailID,fallOutReport): FileName = EmployeeDataFilePath.split('/')[-1].split(".xlsx")[0] EmployeeDataFilePathTsv = XlsxToTsv(EmployeeDataFilePath) FieldId,Employees,TotalEmployee = EmployeeData(EmployeeDataFilePathTsv) Errors = ErrorList(Employees,TotalEmployee,ProductionTemplateFileName) if len(Errors) == 0 : if fallOutReport == 1: FileName = FallOutReportXlsx(FieldId,Employees,TotalEmployee,ProductionTemplateFileName,FileName) Message = open("SuccessEmailBody.txt").read() Subject = "Success Factor Upload FallOut Report" mail.send_mail(sender=sender_email_id.format( app_identity.get_application_id()), to=emailID, subject=Subject, body=Message,attachments=[(FileName, gcs.open(FileName).read())]) gcs.delete(FileName) else: FileName = XlsxErrorReport(Errors,FileName) Message = open("ErrorEmailBody.txt").read() Subject = "Success Factor Upload File Error" mail.send_mail(sender=sender_email_id.format( app_identity.get_application_id()), to=emailID, subject=Subject, body=Message,attachments=[(FileName, gcs.open(FileName).read())]) gcs.delete(FileName) if len(Errors) == 0: return 1 else: return 0
def draw_page(self, template_name, **render_data): if (get_application_id() == "the-hat-international") or (get_application_id() == "the-hat-dev"): dev = True else: dev = None template = self.jinja2().environment.get_template("{}.html".format(template_name)) render_data["dev"] = dev render_data["user_link"] = ( users.create_logout_url("/") if self.user else users.create_login_url(self.request.url) ) if self.user: render_data["user_email"] = users.get_current_user().email() else: render_data["user_email"] = None render_data["is_admin"] = users.is_current_user_admin() curr_channel = ndb.Key(NotificationChannel, "notifications").get() render_data["token"] = curr_channel.channel_token if curr_channel else None if self.user: locale = self.user_key.get().localization else: locale = "ru_RU" i18n.get_i18n().set_locale(locale) self.response.write(template.render(render_data))
def post_unauthenticated_(self): if self.request.params['secret'] == "Suomen Partacolliet ry": email = self.request.params['email'] entries = LocalUser.gql("WHERE email = :1", email) if entries.count() == 0: entry = LocalUser(email = email, nickname = self.request.params['nickname'], password = uuid.uuid4().hex) entry.put() else: entry = entries.get() message = mail.EmailMessage() message.sender = "*****@*****.**" message.to = email message.subject = "Uusi salasana" message.body = ("""Hei! Pyysit salasanaa Suomen Partacolliet ry:n jalostustietokantaan. Voit kirjautua tietokantaan tästä linkistä: <https://%s.appspot.com/LocalLoginRedirect?%s""" % (app_identity.get_application_id(), urllib.urlencode({'email': str(entry.email), 'password': str(entry.password), 'redirect_url': ('https://%s.appspot.com/' % app_identity.get_application_id())}))) message.send() self.jsonReply({'email': '', 'nick': '', 'secret': '', 'status_message': 'Viesti matkalla'})
def get(self, club_id, email_id): # check credentials if not(check_admin(club_id)): add_notify("Error", "You do not have the appropriate permissions") self.redirect("/") return user = users.get_current_user() club = Club.get_by_key_name(club_id) # get the email to be admin-ified email = Email.get_by_key_name(email_id) if not(email): add_notify("Error", "No email to be promoted!") self.redirect("/club/%s" % club.slug) return # find the link to promote it query = EmailToClub.all() query.filter("email =", email) query.filter("club =", club) link = query.get() if not(link): add_notify("Error", "No email to be promoted!") self.redirect("/club/%s" % club.slug) return if not link.enable: add_notify("Error", "Can't promote a disabled email") self.redirect("/club/%s" % club.slug) return # flip the admin bit link.admin = not(link.admin) link.put() # make sure we have at least one admin query = EmailToClub.all() query.filter("club =", club) query.filter("admin =", True) admin_check = query.get() if not(admin_check): # reverse the admin link.admin = True link.put() # send an email if you've just been promoted if link.admin: domain = "http://%s.appspot.com" % get_application_id() msg = mail.EmailMessage() fromaddr = "noreply@%s.appspotmail.com" % get_application_id() msg.sender = "Flyer Guy <%s>" % fromaddr msg.to = email.email msg.subject = "[Flyer] You've an admin of %s!" % club.name msg.html = template.render("templates/email_admin.html", {"domain":domain, "club":club.name, "email":email.id}) try: msg.send() except apiproxy_errors.OverQuotaError, (message,): # Log the error add_notify("Error", "Could not send email") logging.error("Could not send email") logging.error(message)
def send_bm(bmk): bm = bmk.get() message = mail.EmailMessage() message.sender = 'bm@' + "%s" % app_identity.get_application_id() + '.appspotmail.com' message.to = bm.user.email() message.subject = "(%s) %s" % (app_identity.get_application_id(), bm.title) message.html = """ %s (%s)<br>%s<br><br>%s """ % (bm.title, dtf(bm.data), bm.url, bm.comment) message.send()
def post(self): user = users.get_current_user() if user: # find if we're already bound to an email email_query = Email.all() email_query.filter("user ="******"Notice", "Already bound to an email") self.redirect("/") return # handle the email input email_addr = normalize_email(self.request.get("email")) if not(email_addr): add_notify("Notice", "Not a correct UNI format") self.redirect("/") return # find the email by the email address email_key = generate_hash(email_addr)[:10] email, made = get_or_make(Email, email_key) if not(email.email): email.id = email_key email.email = email_addr email.put() # user already tied, don't allow transfers through this interface if email.user_enable: add_notify("Notice", "User is already enabled") self.redirect("/") return if not(email.user): email.user = user # generate a new key email.user_request_key = generate_random_hash(str(email)) email.user_request_time = datetime.today() email.put() # send a verification email domain = "http://%s.appspot.com" % get_application_id() verify_addr = domain + "/link/email/%s" % email.user_request_key msg = mail.EmailMessage() fromaddr = "noreply@%s.appspotmail.com" % get_application_id() msg.sender = "Flyer Guy <%s>" % fromaddr msg.to = email.email msg.subject = "[Flyer] Verify your email address" msg.html = template.render("templates/email_verify.html", {'verify_addr':verify_addr}) try: msg.send() except apiproxy_errors.OverQuotaError, (message,): # Log the error add_notify("Error", "Could not send email") logging.error("Could not send email") logging.error(message) self.redirect("/")
def notify_task(queue_id): try: settings = Settings.all().get() client = TwilioRestClient(settings.account_sid, settings.auth_code) call_queue = CallQueue.get_by_id(queue_id) next_entry = call_queue.entries.filter('status =', 'P').order('sequence').get() if next_entry and call_queue.status == 'P': if next_entry.entry_type == 'phone': next_entry.status = 'C' next_entry.put() client.calls.create(to=next_entry.recipient.phone_number, from_=settings.twilio_number, url='http://%s.appspot.com/call?call_queue_id=%s&queue_entry_id=%s' % ( app_identity.get_application_id(), queue_id, next_entry.key().id()), status_callback='http://%s.appspot.com/callStatus?call_queue_id=%s&queue_entry_id=%s' % ( app_identity.get_application_id(), queue_id, next_entry.key().id()), method='GET', timeout=15) event_log = 'Called %s (%s)' % (next_entry.recipient.name, next_entry.recipient.phone_number) alert_trace = AlertTrace(event_log=event_log, call_queue=call_queue) alert_trace.put() if next_entry.entry_type == 'sms': next_entry.status = 'C' next_entry.put() sms_message = '%s Send 1 to accept' % call_queue.notifier.sms_message client.sms.messages.create(to=next_entry.recipient.phone_number, from_=settings.twilio_number, body=sms_message) deferred.defer(notify_task, queue_id, _countdown=settings.sms_timeout) event_log = 'SMS %s (%s)' % (next_entry.recipient.name, next_entry.recipient.phone_number) alert_trace = AlertTrace(event_log=event_log, call_queue=call_queue) alert_trace.put() elif next_entry == None and call_queue.status == 'P' and call_queue.loop_count <= 5: call_queue.loop_count = call_queue.loop_count + 1 call_queue.put() event_log = 'Alert not accepted yet. Restarting. Loop count:%s' % call_queue.loop_count alert_trace = AlertTrace(event_log=event_log, call_queue=call_queue) alert_trace.put() # re start the alert propagation entries = [] for entry in call_queue.entries: new_entry = clone_entity(entry, status='P') entries.append(new_entry) if len(entries) != 0: db.put(entries) deferred.defer(notify_task, queue_id, _countdown=5 * 60) except: # raise logging.error("notify_task failed for queue_id : %s" % queue_id) logging.error(traceback.format_exc())
def send_feedback(): json_data = flask.request.get_json(True) name = json_data['name'] email = json_data['email'] message = json_data['message'] sheet_config = get_config() receiver = sheet_config['feedBackEmailReceiver'] sent_message = sheet_config['feedBackSentMessage'] mail.send_mail(sender = "feedback@" + app_identity.get_application_id() + ".appspotmail.com", to = receiver, subject = app_identity.get_application_id() + " Feedback Form", body = "Email: " + email + " Name: " + name + " Message: " + message) return flask.Response(json.dumps({'message' : sent_message}), content_type='application/json')
def index(): """List all examples""" form = SignupForm() if form.validate_on_submit(): SignupModel(email=form.email.data).put() mail.send_mail_to_admins( sender="landing@%s.appspotmail.com" % app_identity.get_application_id(), subject="New landing lead for %s." % app_identity.get_application_id(), body=form.email.data, ) flash(u'Thank you for your interest!', 'success') return redirect(url_for('index')) return render_template('index.html', form=form)
def send_full_rsvp_list_email(): sender_address = 'admin@%s.appspotmail.com' % app_identity.get_application_id() recepient_address = '[email protected], [email protected]' subject = 'Full RSVP List from %s' % app_identity.get_application_id() body = '' group_members = db.GqlQuery('SELECT * FROM Guest order by group_name, first_name') for group_member in group_members: body += '%s,%s,%s,%s,%d,%d,%d\n' % (group_member.first_name, group_member.last_name, group_member.group_name, \ group_member.filled_out_name.strip(',') if group_member.filled_out_name is not None else '', \ group_member.garba, group_member.ceremony, group_member.reception) mail.send_mail(sender_address, recepient_address, subject, body)
def get(self): # is it a week day? current_date = datetime.now(CurrentTimeZone()) day = current_date.weekday() # starts 0=monday... 6=sunday if day < 5: # weekday # get all the active jobs job_query = Job.all() job_query.filter("active =", True) jobs = job_query.fetch(2000) # check if the jobs are past their event date flyers = set([j.flyer for j in jobs]) for flyer in flyers: flyer_date = flyer.event_date.replace(tzinfo=CurrentTimeZone()) if current_date > flyer_date: flyer.active = False flyer.put() for job in jobs: if not(job.flyer.active): job.active = False job.put() # only get the un-done jobs jobs = [j for j in jobs if j.active and j.state != 2] # send the emails: bin jobs by email, send emails = set([j.email for j in jobs]) # !!! # email sending pre-computation domain = "http://%s.appspot.com" % get_application_id() fromaddr = "noreply@%s.appspotmail.com" % get_application_id() date = time.strftime("%Y/%m/%d") # send the emails for email in emails: js = [j for j in jobs if j.email == email] msg = mail.EmailMessage(sender="Flyer Guy <%s>" % fromaddr, to=email.email) msg.subject = "[Flyer] Reminder (%s)" % date msg.html = template.render("templates/email.html", {"jobs": js, "domain": domain, "email": email.email}) try: msg.send() except apiproxy_errors.OverQuotaError, (message,): # Log the error. logging.error("Could not send email") logging.error(message) self.response.out.write("Sent emails")
def backup_datastore(bucket=None, kinds=None): """ Using the new scheduled backup service write all required entity kinds to a specific GCS bucket path. """ backup_enabled = get_backup_setting("ENABLED", False) if not backup_enabled: logger.warning( "DJANGAE_BACKUP_ENABLED is False or not set." "The datastore backup will not be run." ) return # make sure no blacklisted entity kinds are included in our export valid_models = _get_valid_export_models(kinds) if not valid_models: logger.warning("No whitelisted entity kinds to export.") return # build the service object with the necessary credentials and trigger export service = _get_service() body = { 'outputUrlPrefix': get_backup_path(bucket), 'entityFilter': { 'kinds': valid_models, 'namespaceIds': [''], } } app_id = app_identity.get_application_id() request = service.projects().export(projectId=app_id, body=body) request.execute()
def ListActions(self, error=None): """Handler for get requests to datastore_admin/confirm_delete.""" use_stats_kinds = False kinds = [] try: kinds = self.GetKinds() if not kinds: use_stats_kinds = True except datastore_errors.Error: use_stats_kinds = True last_stats_update, kind_stats = _GetDatastoreStats( kinds, use_stats_kinds=use_stats_kinds) template_params = { 'kind_stats': kind_stats, 'cancel_url': self.request.path + '?' + self.request.query_string, 'last_stats_update': last_stats_update, 'app_id': self.request.get('app_id'), 'hosting_app_id': app_identity.get_application_id(), 'has_namespace': self.request.get('namespace', None) is not None, 'namespace': self.request.get('namespace'), 'action_list': sorted(ENTITY_ACTIONS.keys()), 'backup_action_list': sorted(BACKUP_ACTIONS.keys()), 'pending_backup_action_list': sorted(PENDING_BACKUP_ACTIONS.keys()), 'error': error, 'completed_operations': self.GetOperations(active=False), 'active_operations': self.GetOperations(active=True), 'pending_backups': self.GetPendingBackups(), 'backups': self.GetBackups(), 'map_reduce_path': utils.config.MAPREDUCE_PATH + '/detail' } utils.RenderToResponse(self, 'list_actions.html', template_params)
def _get_pending_auth_db_transaction(): """Used internally to keep track of changes done in the transaction. Returns: Instance of _AuthDBTransaction (stored in the transaction context). """ # Use transaction context to store the object. Note that each transaction # retry gets its own new transaction context which is what we need, # see ndb/context.py, 'transaction' tasklet, around line 982 (for SDK 1.9.6). assert ndb.in_transaction() ctx = ndb.get_context() txn = getattr(ctx, "_auth_db_transaction", None) if txn: return txn # Prepare next AuthReplicationState (auth_db_rev +1). state = replication_state_key().get() if not state: primary_id = app_identity.get_application_id() if is_primary() else None state = AuthReplicationState(key=replication_state_key(), primary_id=primary_id, auth_db_rev=0) # Assert Primary or Standalone. Replicas can't increment auth db revision. if not is_primary() and state.primary_id: raise ValueError("Can't modify Auth DB on Replica") state.auth_db_rev += 1 state.modified_ts = utils.utcnow() # Store the state in the transaction context. Used in replicate_auth_db(...) # later. txn = _AuthDBTransaction(state) ctx._auth_db_transaction = txn return txn
def get(self): err_msg = self.request.get("error", default_value="") tools.loadHead( self.response ) html = tools.parseMarkdown("./page_src/sign.md", { "{{app_name}}" : get_application_id(), "{{error_msg}}" : err_msg }) self.response.write(html)
def get(self): """Send a reminder email to each User with an email who has games in progress. Email body includes a count of active games and their urlsafe keys Called every 12 hours using a cron job""" users = User.query(User.email != None) for user in users: games = Game.query(ndb.OR(Game.user_1 == user.key, Game.user_2 == user.key)). \ filter(Game.game_over == False) if games.count() > 0: subject = 'This is a reminder!' body = 'Hello {}, you have {} games in progress. Their' \ ' keys are: {}'. \ format(user.name, games.count(), ', '.join(game.key.urlsafe() for game in games)) logging.debug(body) # This will send test emails, the arguments to send_mail are: # from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'. format(app_identity.get_application_id()), user.email, subject, body)
def get(self): """Send a challenge email to each player whose score is less than the global average. Email body also includes a count of active games and their urlsafe keys. Called every hour using a cron job""" users = User.query() for user in users: subject = 'Is your memory better than average?' avg = int(float(MemoryGameAPI._get_average_score())) if (user.score < avg): diff = avg - user.score body = 'Greetings {}, Your current Memory Game score is: {}. ' 'It is {} less than the average score of {}. Keep going!'.\ format(diff, avg) games = Game.query(Game.user == user.key).\ filter(Game.status == GameState.Active) if games.count() > 0: active_games = 'You have {} games in progress. Their' \ ' keys are: {}'.\ format(user.name, games.count(), ', '. join(game.key.urlsafe() for game in games)) body += active_games logging.debug(body) mail.send_mail('noreply@{}.appspotmail.com'. format(app_identity.get_application_id()), user.email, subject, body)
def send_email(to, subject, body, sender=''): """ Core function to send emails Don't call it directly Use it calling to the Handler SendEmailHandler with taskqueue email_url = self.uri_for('taskqueue-send-email') taskqueue.add(url = email_url, params={ 'to': [[contact_recipient]], 'subject' : [[subject]], 'body' : [[body]], }) """ if sender != '' or not is_email_valid(sender): if is_email_valid(config.contact_sender): sender = config.contact_sender else: app_id = app_identity.get_application_id() sender = "%s <no-reply@%s.appspotmail.com>" % (app_id, app_id) try: logEmail = models.LogEmail( sender = sender, to = to, subject = subject, body = body, when = get_date_time("datetimeProperty") ) logEmail.put() except (apiproxy_errors.OverQuotaError, BadValueError): logging.error("Error saving Email Log in datastore") mail.send_mail(sender, to, subject, body)
def admin_config_update(): config_db = model.Config.get_master_db() form = ConfigUpdateForm(obj=config_db) if form.validate_on_submit(): form.populate_obj(config_db) config_db.put() reload(config) app.config.update(CONFIG_DB=config_db) return flask.redirect(flask.url_for('welcome')) if flask.request.path.startswith('/_s/'): return util.jsonify_model_db(config_db) instances_url = None if config.PRODUCTION: instances_url = '%s?app_id=%s&version_id=%s' % ( 'https://appengine.google.com/instances', app_identity.get_application_id(), config.CURRENT_VERSION_ID, ) return flask.render_template( 'admin/config_update.html', title='Admin Config', html_class='admin-config', form=form, config_db=config_db, instances_url=instances_url, has_json=True, )
def send(recipient, subject, body, sender=None, reply_to=None, **kwargs): """ Sends an html email to ``recipient`` with the given ``subject`` and ``body``. If sender is none, it's automatically set to ``settings['email_sender']``, If the setting is not configured, then the default ``noreply@[appid].appspotmail.com`` is used. Any additionally arguments are passed to ``mail.send_mail``, such as headers. """ try: sender = sender if sender else settings.get('email_sender', None) except settings.ConfigurationError: sender = None if not sender: sender = "noreply@%s.appspotmail.com" % app_identity.get_application_id() logging.info("No sender configured, using the default one: %s" % sender) res = mail.send_mail( sender=sender, to=recipient, subject=subject, body=body, html=body, reply_to=reply_to if reply_to else sender, **kwargs) logging.info('Email sent to %s by %s with subject %s and result %s' % (recipient, sender, subject, res)) return res
def get_app_hostname(): """Return hostname of a running Endpoints service. Returns hostname of an running Endpoints API. It can be 1) "localhost:PORT" if running on development server, or 2) "app_id.appspot.com" if running on external app engine prod, or "app_id.googleplex.com" if running as Google first-party Endpoints API, or 4) None if not running on App Engine (e.g. Tornado Endpoints API). Returns: A string representing the hostname of the service. """ if not is_running_on_app_engine() or is_running_on_localhost(): return None version = modules.get_current_version_name() app_id = app_identity.get_application_id() suffix = 'appspot.com' if ':' in app_id: tokens = app_id.split(':') api_name = tokens[1] if tokens[0] == 'google.com': suffix = 'googleplex.com' else: api_name = app_id # Check if this is the default version default_version = modules.get_default_version() if version == default_version: return '{0}.{1}'.format(app_id, suffix) else: return '{0}-dot-{1}.{2}'.format(version, api_name, suffix)
def send(recipient, subject, body, text_body=None, sender=None, reply_to=None, **kwargs): """ Sends an html email to ``recipient`` with the given ``subject`` and ``body``. If ``sender`` is none, it's automatically set to ``app_config['email']['sender']``. If ``text_body`` is not specified, then ``body`` is used. Any additionally arguments are passed to ``mail.send_mail``, such as headers. """ sender = sender if sender else settings.get('email')['sender'] if not sender: sender = "noreply@%s.appspotmail.com" % app_identity.get_application_id() logging.info("No sender configured, using the default one: %s" % sender) if not text_body: text_body = body res = mail.send_mail( sender=sender, to=recipient, subject=subject, body=text_body, html=body, reply_to=reply_to if reply_to else sender, **kwargs) logging.info('Email sent to %s by %s with subject %s and result %s' % (recipient, sender, subject, res)) return res
def setUp(self): # Set up Google App Engine testbed self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_app_identity_stub() self.contactNumber = "+16135551234" # Set up some handy constants self._APP_ID = app_identity.get_application_id() self.MAIL_SUFFIX = "@" + self._APP_ID + ".appspotmail.com" self.XMPP_SUFFIX = "@" + self._APP_ID + ".appspotchat.com" self.ownerPhoneNumber = "+16135554444" self.ownerJid = "*****@*****.**" self.ownerEmailAddress = "*****@*****.**" self.owner = Owner(self.ownerPhoneNumber, self.ownerJid, self.ownerEmailAddress) self.xmppvoicemail = XmppVoiceMail(self.owner) self.communications = self.xmppvoicemail._communications = CommunicationsFixture() # Subscribe the default sender. defaultSender = Contact.getDefaultSender() defaultSender.subscribed = True Contact.update(defaultSender)
def receive(self, mail_message): logging.info("Received a message from: " + mail_message.sender) body = list(mail_message.bodies(content_type='text/plain'))[0] logging.info("Body of message: " + body[1].decode()) # Point to the Main Email trigger URL which will poll for all Firehalls # Example replace: https://svvfd.soft-haus.com/ with the root of you installation url = "https://svvfd.soft-haus.com/webhooks/email_trigger_webhook.php" form_fields = { "sender": mail_message.sender, "subject": mail_message.subject, "to": mail_message.to, "date": mail_message.date, "body": body[1].decode() } form_data = urllib.urlencode(form_fields) GAE_APP_ID = app_identity.get_application_id() GAE_ACCOUNT_NAME = app_identity.get_service_account_name() logging.info("AppID: " + GAE_APP_ID + " SAM: " + GAE_ACCOUNT_NAME) result = urlfetch.fetch(url=url, payload=form_data, method=urlfetch.POST, headers={'Content-Type': 'application/x-www-form-urlencoded', 'X-RipRunner-Auth-APPID': GAE_APP_ID, 'X-RipRunner-Auth-ACCOUNTNAME': GAE_ACCOUNT_NAME}) logging.info(result.status_code) logging.info(result.content)
def populateContext(self): self.context['now'] = self.now self.context['request_args'] = None # Get the current GAE Application self.context['gae_application_id'] = app_identity.get_application_id() self.context['build_version'] = os.environ['CURRENT_VERSION_ID']
def get(self): """Send a reminder email to each User with an email about games. Called every hour using a cron job""" app_id = app_identity.get_application_id() users = User.query(User.email != "") for user in users: active_games = Game.query(Game.user == user.key).filter( Game.status == 'NEW') if active_games: subject = "'Who Says' game reminder!" body = "Hello {}, you have one or more unfinished" \ " 'Who Says' games!".format(user.name) mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user.email, subject, body)
def get(self): scripts = PyScript.query().fetch(1000) params = { "list_columns": [ ('codes_name', 'Script Name'), # ('codes_content', 'Script Content'), ('script_url_ext', 'Script URL - <b>http://' + str(app_identity.get_application_id()) + ".appspot.com/pub/</b>") ], "scripts": scripts } return self.render_template('admin/listscripts.html', **params)
def get_project_id(): """Gets the project ID for the current App Engine application. Returns: str: The project ID Raises: EnvironmentError: If the App Engine APIs are unavailable. """ # pylint: disable=missing-raises-doc # Pylint rightfully thinks EnvironmentError is OSError, but doesn't # realize it's a valid alias. if app_identity is None: raise EnvironmentError( 'The App Engine APIs are not available.') return app_identity.get_application_id()
def fill_values(app_context, template_values, raw_answers_job): results = jobs.MapReduceJob.get_results(raw_answers_job) if not results: template_values['any_results'] = False else: template_values['any_results'] = True template_values['max_interactive_download_size'] = ( RawAnswersDataSource.MAX_INTERACTIVE_DOWNLOAD_SIZE) results = {k: v for k, v in results} template_values['interactive_download_allowed'] = ( results[RawAnswersGenerator.TOTAL_STUDENTS] <= RawAnswersDataSource.MAX_INTERACTIVE_DOWNLOAD_SIZE) template_values['course_slug'] = app_context.get_slug() template_values['app_id'] = (app_identity.get_application_id()) template_values['hostname'] = ( app_identity.get_default_version_hostname())
def get(self): """Send a reminder email to each User with an email about games. Called every hour using a cron job""" app_id = app_identity.get_application_id() users = User.query(User.email != None) for user in users: games = Game.query(Game.user == user.key, Game.game_over == False).fetch() if games: subject = 'Unfinished game!' body = 'Hello {}, remember to finish your game of Hangman!'.format( user.name) # This will send test emails, the arguments to send_mail are: # from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user.email, subject, body)
def post(self): user_name = self.request.get('user_name') user_email = self.request.get('user_email') game_key = self.request.get('game_key') message = self.request.get('message') app_id = app_identity.get_application_id() subject = 'Your turn!' body = 'Hello {}, you have a message regarding your Battleship game with id {}!'.format(user_name, game_key) body += '\n \n' + message + '\n \n Good luck!' # This will send test emails, the arguments to send_mail are: # from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user_email, subject, body)
def _get_settings_with_defaults(): """Returns (rev, cfg) where cfg is a parsed SettingsCfg message. If config does not exists, returns (None, <cfg with defaults>). The config is cached in the datastore. """ rev, cfg = _get_settings() cfg = cfg or config_pb2.SettingsCfg() cfg.default_expiration = cfg.default_expiration or 30 * 24 * 60 * 60 cfg.sharding_letters = cfg.sharding_letters or 4 cfg.gs_bucket = cfg.gs_bucket or app_identity.get_application_id() cfg.auth.full_access_group = cfg.auth.full_access_group or 'administrators' cfg.auth.readonly_access_group = \ cfg.auth.readonly_access_group or 'administrators' return rev, cfg
def get(self): """Send a reminder email to each User with at least 1 incomplete game. Called every 24 hours using a cron job""" app_id = app_identity.get_application_id() users = User.query(User.email != None) for user in users: games = Game.query(Game.user == user.key) games = games.filter(Game.game_over == False) if games.count() > 0: subject = 'This is a reminder to prevent an execution!' body = 'Hello {}, please complete your Hangman game!'.format(user.name) mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user.email, subject, body)
def post(self): if self.request.headers['apikey'] != config.authkey: self.response.set_status(401) else: try: # get json request req = self.request.body req = json.loads(req) # prepare data sender = "{} <no_reply@{}.appspotmail.com>".format( req.get("from"), app_identity.get_application_id()) receiver = req.get("to") subject = req.get("subject") body_header = req.get("body_header") body_footer = req.get("body_footer") attach_list = [] body_img = "<br>" if len(req.get("attach_list")) > 0: for x in req.get("attach_list"): file = gcs.open(x) file_name = x.split("/") file_attach = mail.Attachment(file_name[-1], file.read()) attach_list.append(file_attach) if len(req.get("img_list")) > 0: for i, x in zip(range(len(req.get("img_list"))), req.get("img_list")): img_file = gcs.open(x) img_name = x.split("/") img_attach = mail.Attachment( img_name[-1], img_file.read(), content_id='<image{}>'.format(i)) attach_list.append(img_attach) body_img += '''<img src="cid:image{}" alt="image{}"><br>'''.format( i, i) # send email message = mail.EmailMessage(sender=sender, subject=subject) message.to = receiver message.html = '''<html><pre>%s<br>%s<br>%s</pre></html>''' % ( body_header, body_img, body_footer) if len(attach_list) > 0: message.attachments = attach_list message.send() self.response.set_status(200) except Exception: self.response.set_status(400)
def report_event(self, event_type, room_id=None, time_ms=None, client_time_ms=None, host=None): """Report an event to BigQuery. Args: event_type: One of analytics.EventType. room_id: Room ID related to the given event type. time_ms: Time that the event occurred on the server. Will be automatically populated if not given explicitly. client_time_ms: Time that an event occurred on the client, if the event originated on the client. host: Hostname this is being logged on. """ # Be forgiving. If an event is a string or is an unknown number we # still log it but log it as the string value. event_type_name = EventType.Name.get(event_type, str(event_type)) event = {LogField.EVENT_TYPE: event_type_name} if room_id is not None: event[LogField.ROOM_ID] = room_id if client_time_ms is not None: event[LogField.CLIENT_TIMESTAMP] = self._timestamp_from_millis( client_time_ms) if host is not None: event[LogField.HOST] = host if time_ms is None: time_ms = time.time() * 1000. event[LogField.TIMESTAMP] = self._timestamp_from_millis(time_ms) obj = {'rows': [{'json': event}]} logging.info('Event: %s', obj) if self.bigquery is not None: response = self.bigquery.tabledata().insertAll( projectId=app_identity.get_application_id(), datasetId=self.bigquery_dataset, tableId=self.bigquery_table, body=obj).execute() logging.info('BigQuery response: %s', response)
def get(self): access_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/datastore') app_id = app_identity.get_application_id() timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S') output_url_prefix = "gs://" + GCS_BUCKET # comment this out because at the moment we do not want to use a URL param to run this service, # output_url_prefix = self.request.get('output_url_prefix') assert output_url_prefix and output_url_prefix.startswith('gs://') if '/' not in output_url_prefix[5:]: # Only a bucket name has been provided - no prefix or trailing slash output_url_prefix += '/' + timestamp else: output_url_prefix += timestamp entity_filter = { 'kinds': self.request.get_all('kind'), 'namespace_ids': self.request.get_all('namespace_id') } request = { 'project_id': app_id, 'output_url_prefix': output_url_prefix, 'entity_filter': entity_filter } headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + access_token } url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id try: result = urlfetch.fetch(url=url, payload=json.dumps(request), method=urlfetch.POST, deadline=60, headers=headers) if result.status_code == httplib.OK: logging.info(result.content) elif result.status_code >= 500: logging.error(result.content) else: logging.warning(result.content) self.response.status_int = result.status_code except urlfetch.Error: logging.exception('Failed to initiate export.') self.response.status_int = httplib.INTERNAL_SERVER_ERROR
def save_bqrecord(self, pk_id, bqrecord, bqtable, w_dao, w_session, project_id=None): """ Save the BQRecord object into the bigquery_sync table. :param pk_id: primary key id value from source table. :param bqrecord: BQRecord object. :param bqtable: BQTable object. :param w_dao: Writable BigQuerySyncDao object :param w_session: Session from a writable BigQuerySyncDao object :param project_id: Project ID override value. """ if not w_dao or not w_session: raise ValueError('Invalid BigQuerySyncDao dao or session argument.') # see if there is a project id override value. if project_id: cur_id = project_id else: cur_id = 'localhost' try: from google.appengine.api import app_identity cur_id = app_identity.get_application_id() if not cur_id or cur_id == 'None': cur_id = 'localhost' except ImportError: pass except AttributeError: pass mappings = bqtable.get_project_map(cur_id) for project_id, dataset_id, table_id in mappings: # If project_id is None, we shouldn't save records for this project. if dataset_id is None: # logging.warning('{0} is mapped to none in {1} project.'.format(project_id, cur_id)) continue bqs_rec = w_session.query(BigQuerySync.id).\ filter(BigQuerySync.pk_id == pk_id, BigQuerySync.projectId == project_id, BigQuerySync.datasetId == dataset_id, BigQuerySync.tableId == table_id).first() bqs = BigQuerySync() bqs.id = bqs_rec.id if bqs_rec else None bqs.pk_id = pk_id bqs.projectId = project_id bqs.datasetId = dataset_id bqs.tableId = table_id bqs.resource = bqrecord.to_dict(serialize=True) w_dao.upsert_with_session(w_session, bqs)
def export_tables(database, tables, directory, deidentify): """ Export the given tables from the given DB; deidentifying if requested. A deidentified request outputs exports into a different bucket which may have less restrictive ACLs than the other export buckets; for this reason the tables for these requests are also more restrictive. Deidentification also obfuscates participant IDs, as these are known by other systems (e.g. HealthPro, PTC). Currently this ID obfuscation is not reversible and is not stable across separate exports (note: it is stable across multiple tables in a single export request). """ app_id = app_identity.get_application_id() # Determine what GCS bucket to write to based on the environment and database. if app_id == 'None': bucket_name = app_identity.get_default_gcs_bucket_name() elif deidentify: bucket_name = '%s-deidentified-export' % app_id elif database == 'rdr': bucket_name = '%s-rdr-export' % app_id elif database in ['cdm', 'voc']: bucket_name = '%s-cdm' % app_id else: raise BadRequest("Invalid database: %s" % database) for table_name in tables: if not _TABLE_PATTERN.match(table_name): raise BadRequest("Invalid table name: %s" % table_name) deidentify_salt = None if deidentify: if database not in _DEIDENTIFY_DB_TABLE_WHITELIST: raise BadRequest("deidentified exports are only supported for database: {}".format( _DEIDENTIFY_DB_TABLE_WHITELIST.keys())) tableset = set(tables) table_whitelist = _DEIDENTIFY_DB_TABLE_WHITELIST[database] if not tableset.issubset(table_whitelist): raise BadRequest("deidentified exports are unsupported for tables:" "[{}] (must be in [{}])".format(', '.join(tableset - table_whitelist), ', '.join(table_whitelist))) # This salt must be identical across all tables exported, otherwise the exported particpant # IDs will not be consistent. Used with sha1, so ensure this value isn't too short. deidentify_salt = str(random.getrandbits(256)).encode('utf-8') for table_name in tables: deferred.defer(TableExporter._export_csv, bucket_name, database, directory, deidentify_salt, table_name) return {'destination': 'gs://%s/%s' % (bucket_name, directory)}
def bootstrap(paths, global_env=None, filters=None): """Resets cached Jinja2 env to pick up new template paths. This is purely additive and idempotent. So consecutive calls to this functions with different arguments is fine. Args: paths: dict {prefix -> template_dir}, templates under template_dir would be accessible as <prefix>/<path relative to template_dir>. global_env: dict with variables to add to global template environment. filters: dict with filters to add to global filter list. """ assert isinstance(paths, dict), paths assert all( _TEMPLATE_PATHS.get(k, v) == v for k, v in paths.items()), paths assert all(os.path.isabs(p) for p in paths.values()), paths assert all(os.path.isdir(p) for p in paths.values()), paths if global_env is not None: assert isinstance(global_env, dict), global_env assert all(isinstance(k, str) for k in global_env), global_env assert all( _GLOBAL_ENV.get(k, v) == v for k, v in global_env.items()), global_env if filters is not None: assert isinstance(filters, dict), filters assert all( isinstance(k, str) and callable(v) for k, v in filters.items()), filters assert all( _GLOBAL_FILTERS.get(k, v) == v for k, v in filters.items()), filters _TEMPLATE_PATHS.update(paths) if global_env: _GLOBAL_ENV.update(global_env) # These are immutable. _GLOBAL_ENV.setdefault('app_id', app_identity.get_application_id()) _GLOBAL_ENV.setdefault('app_version', utils.get_app_version()) _GLOBAL_ENV.setdefault('app_revision_url', utils.get_app_revision_url()) if filters: _GLOBAL_FILTERS.update(filters) utils.clear_cache(get_jinja_env)
def get(self): """Runs everyday, queries db for open games last modified more than 72 hours ago and sends the game owner a reminder email""" three_days_ago = datetime.now() - timedelta(days=3) app_name = app_identity.get_application_id() subject = "You have had an open game for 3 days now!" open_games = Game.query(Game.over == False).\ filter(Game.modified < three_days_ago) for game in open_games: user = game.user_name.get() if user.email_address: body = "Hello {}, you have an open game! Please come back and play!".\ format(user.user_name) mail.send_mail('noreply@{}.appspotmail.com'.format(app_name), user.email_address, subject, body)
def post(self): try: file = self.request.POST.getall('raw_file') name = self.request.get('name') email = self.request.get('email') query_params = { 'decrypt': True, } self.redirect('/success?' + urllib.urlencode(query_params)) # ****************************** # add reconstruct algorithm here # ****************************** decoder = algo.CombinedShare() # Creat a new decoder object # print("Decoding!!!") for f in file: sub_file = f.file.read() print("****************") # print(sub_file) decoder.addNewShare(sub_file) # After add all shares original_data, meta = decoder.decryptAndReconstruct() # original_data = original_data[0] original_data = ''.join(original_data) #### test here!!!!!!! # print("original DATA: ", original_data[0]) # print("file name:", meta.FileName) file_name = str(meta.FileName) mail.send_mail( sender='{}@ece6102assignment4.appspotmail.com'.format( app_identity.get_application_id()), to=email, subject="Reconstructed file", body=""" The reconstructed file is in the attachment! """, attachments=[(file_name, original_data)]) except: query_params = { 'except': True, } self.redirect('/decrypt?' + urllib.urlencode(query_params))
def _create_table_if_needed(self, bigquery, dataset_id, table_id): """Create a new table if needed.""" project_id = app_identity.get_application_id() table_body = { 'tableReference': { 'datasetId': dataset_id, 'projectId': project_id, 'tableId': table_id, }, 'timePartitioning': { 'type': 'DAY', }, } table_insert = bigquery.tables().insert( projectId=project_id, datasetId=dataset_id, body=table_body) return self._execute_insert_request(table_insert)
class CloudConfig(ndb.Model): control_bucket_path = ndb.StringProperty( default=_DEFAULT_CONTROL_BUCKET_PATH) setup_scheme = 'http' if _is_devserver() else 'https' default_corpus = ndb.StringProperty( default='%s://%s' % (setup_scheme, app_identity.get_default_version_hostname())) urlfetch_service_id = ndb.StringProperty(default='') gce_project_name = ndb.StringProperty( default=app_identity.get_application_id()) gce_source_disk_image = ndb.StringProperty( default=_DEFAULT_SOURCE_DISK_IMAGE) gce_zone = ndb.StringProperty(default=_GCE_DEFAULT_ZONE) gce_machine_type = ndb.StringProperty(default=_GCE_DEFAULT_MACHINE_TYPE) trace_upload_bucket = ndb.StringProperty( default='%s/traces' % app_identity.get_default_gcs_bucket_name()) catapult_path = ndb.StringProperty(default=_DEFAULT_CATAPULT_PATH)
def get(self): """Send a reminder email to each User with an email about games. Called every day using a cron job""" app_id = app_identity.get_application_id() users = User.query(User.email != None) for user in users: games = Game.query(ndb.OR(Game.user1 == user.key, Game.user2 == user.key)). \ filter(Game.game_over == False) if games.count() > 0: subject = 'Reminder for you!' body = 'Hello {}, you have games in progress'.format(user.name) # This will send test emails, the arguments to send_mail are: # from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user.email, subject, body)
def comment_worker(): # when adding a new comment ,send mail notify to post's author. postid = int(request.values.get('postid')) commentid = int(request.values.get('commentid')) replyto = request.values.get('replyto') post = Post.getpost(postid) comment = Comment.get_by_id(commentid) if post is None or comment is None: return appid = app_identity.get_application_id() idx = appid.find('~') if idx > 0: appid = appid[0:idx] sender = "robot@%s.appspotmail.com" % appid if replyto: pcomment = Comment.getcomment(int(replyto)) if pcomment.email: body = u"""%s 回复了你对 <a href="%s" target="_blank"/>「%s」</a>发表的评论: <br /> <br /> %s<br /> < hr/> %s""" \ % (comment.username, "http://" + Config()["host"], post.title, pcomment.content, comment.content) try: message = mail.EmailMessage(sender=sender, subject=u"你的评论有了新回复") message.to = pcomment.email message.html = body message.send() except Exception as e: logging.error("Send email to %s failed: %s" % (pcomment.email, str(e))) body = u"""%s 对<a href="%s" target="_blank"/>「%s」</a>发表了评论: <br /> <br /> %s""" % (comment.username, "http://" + Config()["host"], post.title, comment.content) try: message = mail.EmailMessage(sender=sender, subject=u"你发表的文章有了新的评论") message.to = post.author.email() message.html = body message.send() except Exception as e: logging.error("Send email to %s failed: %s" % (post.author.email(), str(e))) return make_response('', 200)
def bind_tabs(cls): def bind(key, label, handler, href=None): if href: target = '_blank' else: href = 'admin?action=admin&tab=%s' % key target = None tabs.Registry.register(cls.ACTION, key, label, contents=handler, href=href, target=target) bind('courses', 'Courses', cls.get_courses) bind('settings', 'Site Settings', cls.get_settings) bind('perf', 'Metrics', cls.get_perf) bind('deployment', 'Deployment', cls.get_deployment) if DIRECT_CODE_EXECUTION_UI_ENABLED: bind('console', 'Console', cls.get_console) if appengine_config.gcb_appstats_enabled(): bind('stats', 'Appstats', None, href='/admin/stats/') if appengine_config.PRODUCTION_MODE: app_id = app.get_application_id() href = ('https://appengine.google.com/' 'dashboard?app_id=s~%s' % app_id) bind('gae', 'Google App Engine', None, href=href) else: bind('gae', 'Google App Engine', None, href='http://localhost:8000/') bind('welcome', 'Welcome', None, href='/admin/welcome') bind('help', 'Site Help', None, href='https://code.google.com/p/course-builder/wiki/AdminPage') bind('news', 'News', None, href=('https://groups.google.com/forum/' '?fromgroups#!forum/course-builder-announce'))
def load_pii_csv(hpo_id, pii_table_name, source_folder_prefix=""): """ Load PII file from a bucket into a table in bigquery :param hpo_id: ID for the HPO site :param pii_table_name: name of the CDM table :return: an object describing the associated bigquery job """ if pii_table_name not in common.PII_TABLES: raise ValueError('{} is not a valid table to load'.format(pii_table_name)) app_id = app_identity.get_application_id() dataset_id = get_dataset_id() bucket = gcs_utils.get_hpo_bucket(hpo_id) fields_filename = os.path.join(resources.fields_path, pii_table_name + '.json') gcs_object_path = 'gs://%s/%s%s.csv' % (bucket, source_folder_prefix, pii_table_name) table_id = get_table_id(hpo_id, pii_table_name) return load_csv(fields_filename, gcs_object_path, app_id, dataset_id, table_id)
def __init__(self): try: # Check if we are running on the local dev server. software = os.environ["SERVER_SOFTWARE"] Config.is_dev = software.startswith( "Dev") and "testbed" not in software except KeyError: pass try: self.APP_NAME = app_identity.get_application_id() except AttributeError: # We're calling code outside of GAE, so we must be testing. self.APP_NAME = "testbed-test" if self.APP_NAME == "testbed-test": Config.is_testing = True Config.is_prod = not (Config.is_dev or Config.is_testing) # The URL of the signup app. self.SIGNUP_URL = "https://hd-signup-hrd.appspot.com" # The minimum amount of time that must be left between consecutive events, # in minutes. self.MIN_EVENT_SPACING = 30 # The maximum amount of future events a single user can have scheduled. self.USER_MAX_FUTURE_EVENTS = 10 # The maximum number of events a single user can have within a four-week # period. self.USER_MAX_FOUR_WEEKS = 6 # How long we have to wait after we sign up before we can create an event. # (days) self.NEW_EVENT_WAIT_PERIOD = 0 # How long we will keep an event on hold when a user is suspended before it # expires. (days) self.SUSPENDED_EVENT_EXPIRY = 72 # The hours that we wan to have only one event during. (24-hour time.) self.EVENT_HOURS = (9, 17) if Config.is_testing: logging.debug("Is testing.") elif Config.is_dev: logging.debug("Is dev server.") else: logging.debug("Is production server.")
def do(self): kinds = self.request.get_all('kind'), if not kinds: raise Exception("Backup handler requires kinds.") bucket = self.request.get('bucket', None) if not bucket: raise Exception("Backup handler requires bucket.") access_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/datastore') app_id = app_identity.get_application_id() entity_filter = { 'kinds': kinds, 'namespace_ids': self.request.get_all('namespace_id') } request = { 'project_id': app_id, 'output_url_prefix': 'gs://{}'.format(bucket), 'entity_filter': entity_filter } headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + access_token } url = 'https://datastore.googleapis.com/v1/projects/{}:export'.format( app_id) try: result = urlfetch.fetch(url=url, payload=json.dumps(request), method=urlfetch.POST, deadline=60, headers=headers) if result.status_code == httplib.OK: logging.info(result.content) elif result.status_code >= 500: logging.error(result.content) else: logging.warning(result.content) self.response.status_int = result.status_code except urlfetch.Error: raise Exception('Failed to initiate export.') self.response.write("Export initiated.")
def delete_table(table_id, dataset_id=None): """ Delete bigquery table by id Note: This will throw `HttpError` if the table doesn't exist. Use `table_exists` prior if necessary. :param table_id: id of the table :param dataset_id: id of the dataset (EHR dataset by default) :return: """ assert (table_id not in common.VOCABULARY_TABLES) app_id = app_identity.get_application_id() if dataset_id is None: dataset_id = get_dataset_id() bq_service = create_service() delete_job = bq_service.tables().delete(projectId=app_id, datasetId=dataset_id, tableId=table_id) logging.debug('Deleting {dataset_id}.{table_id}'.format(dataset_id=dataset_id, table_id=table_id)) return delete_job.execute(num_retries=bq_consts.BQ_DEFAULT_RETRY_COUNT)
def get(self): """Send a reminder email to all users with at least one active game; call the handler every 24 hours using a cron job""" app_id = app_identity.get_application_id() users = User.query(User.email != None) msg = ('\n\nIt looks like you started a Pelmanism game, ' 'but haven\'t finished. Come back and find some matches!') for user in users: games = Game.query(Game.user == user.key).fetch() for game in games: if not game.game_over and not game.cancelled: subject = 'Where did you go?' body = 'Hi, {}!'.format(user.name) + msg # This will send test emails to all users mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user.email, subject, body) break
def app_id(self): """ Get the Google App Engine app id :return: The app id :rtype: str """ if not self.is_gae(): raise RuntimeError( 'Not running in Google App Engine environment while fetching app_id.' ) if not hasattr(self, '_app_id'): from google.appengine.api import app_identity self._app_id = app_identity.get_application_id() return self._app_id
def clean_dataset(project=None, dataset=None, statements=None): if project is None or project == '' or project.isspace(): project = app_identity.get_application_id() LOGGER.debug('Project name not provided. Using default.') if statements is None: statements = [] failures = 0 successes = 0 for statement in statements: rule_query = statement.format(project=project, dataset=dataset) try: LOGGER.info("Running query %s", rule_query) results = bq_utils.query(rule_query) except (oauth2client.client.HttpAccessTokenRefreshError, googleapiclient.errors.HttpError): LOGGER.exception("FAILED: Clean rule not executed:\n%s", rule_query) failures += 1 continue LOGGER.info("Executing query %s", rule_query) # wait for job to finish query_job_id = results['jobReference']['jobId'] incomplete_jobs = bq_utils.wait_on_jobs([query_job_id]) if incomplete_jobs != []: failures += 1 raise bq_utils.BigQueryJobWaitError(incomplete_jobs) successes += 1 if successes > 0: LOGGER.info("Successfully applied %d clean rules for %s.%s", successes, project, dataset) else: LOGGER.warning("No clean rules successfully applied to %s.%s", project, dataset) if failures > 0: print("Failed to apply {} clean rules for {}.{}".format( failures, project, dataset)) LOGGER.warning("Failed to apply %d clean rules for %s.%s", failures, project, dataset)
def post(self): """Turn notification reminder email. Will be called called from a taskqueue""" app_id = app_identity.get_application_id() user_key = ndb.Key(urlsafe=self.request.get('user_id')) subject = 'This is a reminder!' body = ('Hello {}, it is your turn. Please complete your ' 'Tic Tac Toe with gameid = {} !'.format( user_key.get().name, self.request.get('game_id'))) # This will send test emails, the arguments to send_mail are: # from, to, subject, body mail.send_mail('noreply@{}.appspotmail.com'.format(app_id), user_key.get().email, subject, body)
def is_config_admin(user_email): app_id = app_identity.get_application_id() # Allow clients to simulate an unauthentiated request (for testing) # becaues we haven't found another way to create an unauthenticated request # when using dev_appserver. When client tests are checking to ensure that an # unauthenticated requests gets rejected, they helpfully add this header. # The `application_id` check ensures this feature only works in dev_appserver. if app_id == "None" and request.headers.get('unauthenticated'): user_email = None if user_email: config_admin = CONFIG_ADMIN_MAP.get( app_id, 'configurator@{}.iam.gserviceaccount.com'.format(app_id)) if user_email == config_admin: return True return False