def get(self): # pylint: disable=g-bad-name # Only run backups in prod. if not env_utils.RunningInProd(): logging.info('Datastore backups are only run in prod') return # Only run one backup per day. if _DailyBackupExists(): logging.info('A backup was already performed today.') return kinds = [k for k in metadata.get_kinds() if not k.startswith('_')] bucket = '%s/%s' % (settings.ENV.DATASTORE_BACKUP_BUCKET, _CreateDayString()) params = { 'kind': kinds, 'name': _BACKUP_PREFIX + '_', # Date suffix is automatically added. 'filesystem': 'gs', 'gs_bucket_name': bucket, 'queue': constants.TASK_QUEUE.BACKUP, } # Dump the backup onto a task queue. Don't worry about catching Exceptions, # anything that gets raised will be dealt with in UpvoteRequestHandler and # reported as a 500. logging.info('Starting a new Datastore backup') taskqueue.add(url='/_ah/datastore_admin/backup.create', params=params, target='ah-builtin-python-bundle', queue_name=constants.TASK_QUEUE.BACKUP) _DATASTORE_BACKUPS.Increment()
def dispatch(self): appengine_user = users.get_current_user() logging.info('Request initiated by %s', appengine_user.email() if appengine_user else 'cron') # Cron-triggered exports will not have a requesting user. Only allow these # in prod. prod_cron_export = env_utils.RunningInProd() and not appengine_user logging.info('This is%s an automatic production export', '' if prod_cron_export else ' not') # If there is a requesting user, only proceed if the user has manual export # permissions. user_has_permission = False if appengine_user: current_user = user_models.User.GetOrInsert( appengine_user=appengine_user) user_has_permission = current_user.HasPermissionTo( constants.PERMISSIONS.TRIGGER_MANUAL_DATA_EXPORT) logging.info( 'User %s does%s have permission to perform a manual export', appengine_user.email(), '' if user_has_permission else ' not') if prod_cron_export or user_has_permission: super(BaseHandler, self).dispatch() else: self.abort(httplib.FORBIDDEN)
def handle_exception(self, exception, unused_debug_mode): """Handle any uncaught exceptions. Args: exception: The exception that was thrown. unused_debug_mode: True if the application is running in debug mode. """ # Default to a 500. http_status = httplib.INTERNAL_SERVER_ERROR # Calls to abort() raise a child class of HTTPException, so extract the # HTTP status and explanation if possible. if isinstance(exception, webapp2.HTTPException): http_status = getattr(exception, 'code', httplib.INTERNAL_SERVER_ERROR) # Write out the exception's explanation to the response body escaped_explanation = _HtmlEscape(str(exception)) self.response.write(escaped_explanation) # If the RequestHandler has a corresponding request counter, increment it. if self.RequestCounter is not None: self.RequestCounter.Increment(http_status) # If the exception occurs within a unit test, make sure the stacktrace is # easily discerned from the console. if not env_utils.RunningInProd(): exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback) # Set the response code and log the exception regardless. self.response.set_status(http_status) logging.exception(exception)
def get(self): # pylint: disable=g-bad-name # Only run backups in prod. if not env_utils.RunningInProd(): logging.info('Datastore backups are only run in prod') return logging.info('Starting a new Datastore backup') access_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/datastore') app_id = app_identity.get_application_id() # Configure a backup of all Datastore kinds, stored in a separate Cloud # Storage bucket for each day. output_url_prefix = 'gs://%s/%s/' % ( env_utils.ENV.DATASTORE_BACKUP_BUCKET, datetime.datetime.utcnow().strftime('%Y_%m_%d')) kinds = [k for k in metadata.get_kinds() if not k.startswith('_')] request = { 'project_id': app_id, 'output_url_prefix': output_url_prefix, 'entity_filter': { 'kinds': kinds } } headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + access_token } url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id logging.info('Backing up %d kind(s) to %s', len(kinds), output_url_prefix) try: result = urlfetch.fetch(url=url, payload=json.dumps(request), method=urlfetch.POST, deadline=60, headers=headers) if result.status_code == httplib.OK: logging.info(result.content) _DATASTORE_BACKUPS.Increment() else: logging.warning(result.content) self.response.status_int = result.status_code except urlfetch.Error: logging.exception('Datastore backup failed') self.response.status_int = httplib.INTERNAL_SERVER_ERROR
def Send(subject, body, to=None, cc=None, bcc=None, html=False): """Sends an email. Args: subject: The email subject. body: The email body. to: The TO address(es). Can be either a string or list of strings, and each string can be either a username or email address. cc: The CC address(es). Can be either a string or list of strings, and each string can be either a username or email address. bcc: The BCC address(es). Can be either a string or list of strings, and each string can be either a username or email address. html: Whether the body contains HTML or plain text. Raises: NoRecipientsError: if the to, cc, and bcc arguments are all empty. """ message = mail.EmailMessage(sender=_SENDER, reply_to=_REPLY_TO, subject='%s %s' % (_SUBJECT_PREFIX, subject)) if html: message.html = body else: message.body = body to = _SanitizeAddrs(to) cc = _SanitizeAddrs(cc) bcc = _SanitizeAddrs(bcc) if to: message.to = to if cc: message.cc = cc if bcc: message.bcc = bcc # Make sure we're actually sending this to someone. recipients = sorted(list(set(to + cc + bcc))) if not recipients: raise NoRecipientsError try: logging.info('Sending email to %s', recipients) message.check_initialized() if env_utils.RunningInProd(): message.send() # If something blows up, log it and move on. Failure to send an email is not # something that should take the caller off the rails. except Exception: # pylint: disable=broad-except logging.exception('Error encountered while sending email')