def email_exception(logger, etype, evalue, tb): """ Send stringified exception to configured email address. """ from inbox.server.config import config exc_email_addr = config.get('EXCEPTION_EMAIL_ADDRESS') if exc_email_addr is None: logger.error("No EXCEPTION_EMAIL_ADDRESS configured!") mailgun_api_endpoint = config.get('MAILGUN_API_ENDPOINT') if mailgun_api_endpoint is None: logger.error("No MAILGUN_API_ENDPOINT configured!") mailgun_api_key = config.get('MAILGUN_API_KEY') if mailgun_api_key is None: logger.error("No MAILGUN_API_KEY configured!") r = requests.post( mailgun_api_endpoint, auth=("api", mailgun_api_key), data={"from": "Inbox App Server <{}>".format(exc_email_addr), "to": [exc_email_addr], "subject": "Uncaught error! {} {}".format(etype, evalue), "text": u""" Something went wrong on {}. Please investigate. :) {} """.format(socket.getfqdn(), '\t'.join(traceback.format_exception(etype, evalue, tb)))}) if r.status_code != requests.codes.ok: logger.error("Couldn't send exception email: {}".format(r.json()))
def _save_to_s3(self, data): assert len(data) > 0, "Need data to save!" # TODO: store AWS credentials in a better way. assert 'AWS_ACCESS_KEY_ID' in config, "Need AWS key!" assert 'AWS_SECRET_ACCESS_KEY' in config, "Need AWS secret!" assert 'MESSAGE_STORE_BUCKET_NAME' in config, \ "Need bucket name to store message data!" # Boto pools connections at the class level conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), config.get('AWS_SECRET_ACCESS_KEY')) bucket = conn.get_bucket(config.get('MESSAGE_STORE_BUCKET_NAME')) # See if it alreays exists and has the same hash data_obj = bucket.get_key(self.data_sha256) if data_obj: assert data_obj.get_metadata('data_sha256') == self.data_sha256, \ "Block hash doesn't match what we previously stored on s3!" # log.info("Block already exists on S3.") return data_obj = Key(bucket) # if metadata: # assert type(metadata) is dict # for k, v in metadata.iteritems(): # data_obj.set_metadata(k, v) data_obj.set_metadata('data_sha256', self.data_sha256) # data_obj.content_type = self.content_type # Experimental data_obj.key = self.data_sha256 # log.info("Writing data to S3 with hash {0}".format(self.data_sha256)) # def progress(done, total): # log.info("%.2f%% done" % (done/total * 100) ) # data_obj.set_contents_from_string(data, cb=progress) data_obj.set_contents_from_string(data)
def engine_uri(database=None): """ By default doesn't include the specific database. """ config_prefix = 'RDS' if is_prod() else 'MYSQL' username = config.get('{0}_USER'.format(config_prefix), None) assert username, "Must have database username to connect!" password = config.get('{0}_PASSWORD'.format(config_prefix), None) assert password, "Must have database password to connect!" host = config.get('{0}_HOSTNAME'.format(config_prefix), None) assert host, "Must have database to connect!" port = config.get('{0}_PORT'.format(config_prefix), None) assert port, "Must have database port to connect!" uri_template = 'mysql://{username}:{password}@{host}:{port}/{database}?charset=utf8mb4' return uri_template.format( username=username, # http://stackoverflow.com/questions/15728290/sqlalchemy-valueerror-for-slash-in-password-for-create-engine (also applicable to '+' sign) password=urlquote(password), host=host, port=port, database=database if database else '')
def email_exception(logger, etype, evalue, tb): """ Send stringified exception to configured email address. """ from inbox.server.config import config exc_email_addr = config.get('EXCEPTION_EMAIL_ADDRESS') if exc_email_addr is None: logger.error("No EXCEPTION_EMAIL_ADDRESS configured!") mailgun_api_endpoint = config.get('MAILGUN_API_ENDPOINT') if mailgun_api_endpoint is None: logger.error("No MAILGUN_API_ENDPOINT configured!") mailgun_api_key = config.get('MAILGUN_API_KEY') if mailgun_api_key is None: logger.error("No MAILGUN_API_KEY configured!") r = requests.post(mailgun_api_endpoint, auth=("api", mailgun_api_key), data={ "from": "Inbox App Server <{}>".format(exc_email_addr), "to": [exc_email_addr], "subject": "Uncaught error! {} {}".format(etype, evalue), "text": u""" Something went wrong on {}. Please investigate. :) {} """.format(socket.getfqdn(), '\t'.join(traceback.format_exception(etype, evalue, tb))) }) if r.status_code != requests.codes.ok: logger.error("Couldn't send exception email: {}".format(r.json()))
def _get_from_s3(self): assert self.data_sha256, "Can't get data with no hash!" # Boto pools connections at the class level conn = S3Connection(config.get("AWS_ACCESS_KEY_ID"), config.get("AWS_SECRET_ACCESS_KEY")) bucket = conn.get_bucket(config.get("MESSAGE_STORE_BUCKET_NAME")) data_obj = bucket.get_key(self.data_sha256) assert data_obj, "No data returned!" return data_obj.get_contents_as_string()
def _get_from_s3(self): assert self.data_sha256, "Can't get data with no hash!" # Boto pools connections at the class level conn = S3Connection(config.get('AWS_ACCESS_KEY_ID'), config.get('AWS_SECRET_ACCESS_KEY')) bucket = conn.get_bucket(config.get('MESSAGE_STORE_BUCKET_NAME')) data_obj = bucket.get_key(self.data_sha256) assert data_obj, "No data returned!" return data_obj.get_contents_as_string()
def get_queue(): # The queue label is set via config to allow multiple distinct Inbox # instances to hit the same Redis server without interfering with each # other. label = config.get('ACTION_QUEUE_LABEL', None) assert label, "Must set ACTION_QUEUE_LABEL in config.cfg" return Queue(label, connection=Redis())
def check_db(): # Check database revision from inbox.server.models.ignition import db_uri # needs to be after load_config() inbox_db_engine = sqlalchemy.create_engine(db_uri()) # top-level, with setup.sh alembic_ini_filename = config.get('ALEMBIC_INI', None) assert alembic_ini_filename, 'Must set ALEMBIC_INI config var' assert os.path.isfile(alembic_ini_filename), \ 'Must have alembic.ini file at {}'.format(alembic_ini_filename) alembic_cfg = alembic_config(alembic_ini_filename) try: inbox_db_engine.dialect.has_table(inbox_db_engine, 'alembic_version') except sqlalchemy.exc.OperationalError: sys.exit("Databases don't exist! Run create_db.py") if inbox_db_engine.dialect.has_table(inbox_db_engine, 'alembic_version'): res = inbox_db_engine.execute('SELECT version_num from alembic_version') current_revision = [r for r in res][0][0] assert current_revision, \ 'Need current revision in alembic_version table...' script = ScriptDirectory.from_config(alembic_cfg) head_revision = script.get_current_head() log.info('Head database revision: {0}'.format(head_revision)) log.info('Current database revision: {0}'.format(current_revision)) if current_revision != head_revision: raise Exception('Outdated database! Migrate using `alembic upgrade head`') else: log.info('[OK] Database scheme matches latest') else: raise Exception('Un-stamped database! `create_db.py` should have done this... bailing.')
def _data_file_directory(self): assert self.data_sha256 # Nest it 6 items deep so we don't have folders with too many files. h = self.data_sha256 root = config.get("MSG_PARTS_DIRECTORY", None) assert root, "Need root path for saving data" return os.path.join(root, h[0], h[1], h[2], h[3], h[4], h[5])
def z_search(self): """ Proxy function for the ZeroMQ search service. """ if not self._zmq_search: search_srv_loc = config.get('SEARCH_SERVER_LOC', None) assert search_srv_loc, "Where is the Search ZMQ service?" self._zmq_search = zerorpc.Client(search_srv_loc) return self._zmq_search.search
def sync_status(self): """ Returns data representing the status of all syncing users, like: user_id: { state: 'initial sync', stored_data: '12127227', stored_messages: '50000', status: '56%', } user_id: { state: 'poll', stored_data: '1000000000', stored_messages: '200000', status: '2013-06-08 14:00', } """ if not self._sync: self._sync = zerorpc.Client(config.get('CRISPIN_SERVER_LOC', None)) status = self._sync.status() user_ids = status.keys() with session_scope() as db_session: users = db_session.query(User).filter(User.id.in_(user_ids)) for user in users: status[user.id]['stored_data'] = 0 status[user.id]['stored_messages'] = 0 for account in user.accounts: status[user.id]['stored_data'] += \ total_stored_data(account.id, db_session) status[user.id]['stored_messages'] += \ total_stored_messages(account.id, db_session) return status
def _data_file_directory(self): assert self.data_sha256 # Nest it 6 items deep so we don't have folders with too many files. h = self.data_sha256 root = config.get('MSG_PARTS_DIRECTORY', None) assert root, "Need root path for saving data" return os.path.join(root, h[0], h[1], h[2], h[3], h[4], h[5])
def _keyfile(self, create_dir=True): assert self.key key_dir = config.get('KEY_DIR', None) assert key_dir if create_dir: mkdirp(key_dir) key_filename = '{0}'.format(sha256(self.key).hexdigest()) return os.path.join(key_dir, key_filename)
def password(self, value): assert AUTH_TYPES.get(self.provider) == 'Password' assert value is not None key_size = int(config.get('KEY_SIZE', 128)) self.password_aes, key = encrypt_aes(value, key_size) self.key = key[:len(key)/2] with open(self._keyfile, 'w+') as f: f.write(key[len(key)/2:])
def create_auth_account(db_session, email_address): uri = config.get('GOOGLE_OAUTH_REDIRECT_URI', None) assert uri, 'Must define GOOGLE_OAUTH_REDIRECT_URI' def is_alive(): try: # Note that we're using a self-signed SSL cert, so we disable # verification of the cert chain resp = requests.get(uri + '/alive', verify=False) if resp.status_code is 200: return True else: raise Exception('OAuth callback server detected, \ but returned {0}'.format(resp.status_code)) except requests.exceptions.ConnectionError: return False if uri != 'urn:ietf:wg:oauth:2.0:oob' and not is_alive(): print """\033[93m \n\n Hey you! It looks like you're not using the Google oauth 'installed' app type, meaning you need a web oauth callback. The easiest way to do this is to run the stub flask app. :\n sudo tools/oauth_callback_server/start \n Make sure that {0} is directed to your VM by editing /etc/hosts on the host machine\n Go ahead and start it. I'll wait for a minute...\n \033[0m""".format(uri) while True: if is_alive(): print 'Good to go!' break else: time.sleep(.5) response = auth_account(email_address) account = create_account(db_session, email_address, response) return account
def trigger_index_update(namespace_id): c = zerorpc.Client() c.connect(config.get('SEARCH_SERVER_LOC', None)) c.index(namespace_id)
def get_webhook_client(): if not hasattr(g, 'webhook_client'): g.webhook_client = zerorpc.Client() g.webhook_client.connect(config.get('WEBHOOK_SERVER_LOC')) return g.webhook_client
import urllib import requests from inbox.util.url import url_concat from inbox.server.log import get_logger log = get_logger() from inbox.server.config import config from inbox.server.basicauth import AuthError # Google OAuth app credentials GOOGLE_OAUTH_CLIENT_ID = config.get('GOOGLE_OAUTH_CLIENT_ID', None) GOOGLE_OAUTH_CLIENT_SECRET = config.get('GOOGLE_OAUTH_CLIENT_SECRET', None) REDIRECT_URI = config.get('GOOGLE_OAUTH_REDIRECT_URI', None) assert GOOGLE_OAUTH_CLIENT_ID, 'Missing Google OAuth Client Id' assert GOOGLE_OAUTH_CLIENT_SECRET, 'Missing Google OAuth Client Secret' assert REDIRECT_URI, 'Missing Google OAuth redirect URI' OAUTH_AUTHENTICATE_URL = 'https://accounts.google.com/o/oauth2/auth' OAUTH_ACCESS_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token' OAUTH_TOKEN_VALIDATION_URL = 'https://www.googleapis.com/oauth2/v1/tokeninfo' USER_INFO_URL = 'https://www.googleapis.com/oauth2/v1/userinfo' OAUTH_SCOPE = ' '.join([ 'https://www.googleapis.com/auth/userinfo.email', # email address 'https://www.googleapis.com/auth/userinfo.profile', # G+ profile 'https://mail.google.com/', # email 'https://www.google.com/m8/feeds', # contacts 'https://www.googleapis.com/auth/calendar' # calendar ])
import os from hashlib import sha256 from sqlalchemy import Column, Integer, String from inbox.server.config import config from inbox.server.log import get_logger log = get_logger() STORE_MSG_ON_S3 = config.get("STORE_MESSAGES_ON_S3", None) if STORE_MSG_ON_S3: from boto.s3.connection import S3Connection from boto.s3.key import Key from inbox.util.file import mkdirp, remove_file class Blob(object): """ A blob of data that can be saved to local or remote (S3) disk. """ size = Column(Integer, default=0) data_sha256 = Column(String(64)) @property def data(self): if self.size == 0: log.warning("block size is 0")
import os from hashlib import sha256 from sqlalchemy import Column, Integer, String from inbox.server.config import config from inbox.server.log import get_logger log = get_logger() STORE_MSG_ON_S3 = config.get('STORE_MESSAGES_ON_S3', None) if STORE_MSG_ON_S3: from boto.s3.connection import S3Connection from boto.s3.key import Key from inbox.util.file import mkdirp, remove_file class JSONSerializable(object): def cereal(self): """ Override this and return a string of the object serialized for the web client. """ raise NotImplementedError("cereal not implemented") class Blob(object): """ A blob of data that can be saved to local or remote (S3) disk. """ size = Column(Integer, default=0) data_sha256 = Column(String(64)) def save(self, data): assert data is not None, \
def _path_from_key(key): parts = [safe_filename(part) for part in splitall(key)] cache_dir = config.get('CACHE_BASEDIR', None) assert cache_dir, "Need directory to store cache! Set CACHE_BASEDIR in config.cfg" return os.path.join(cache_dir, *parts)
import os from hashlib import sha256 from sqlalchemy import Column, Integer, String from inbox.server.config import config from inbox.server.log import get_logger log = get_logger() STORE_MSG_ON_S3 = config.get('STORE_MESSAGES_ON_S3', None) if STORE_MSG_ON_S3: from boto.s3.connection import S3Connection from boto.s3.key import Key from inbox.util.file import mkdirp, remove_file class Blob(object): """ A blob of data that can be saved to local or remote (S3) disk. """ size = Column(Integer, default=0) data_sha256 = Column(String(64)) @property def data(self): if self.size == 0: log.warning("block size is 0") # NOTE: This is a placeholder for "empty bytes". If this doesn't
#!/usr/bin/python import zerorpc from inbox.server.config import config, load_config load_config() API_SERVER_LOC = config.get('API_SERVER_LOC', None) def get_subjects(n): api_client = zerorpc.Client(timeout=5) api_client.connect(API_SERVER_LOC) subjects = api_client.first_n_subjects(10) print """ The first {0} emails in your inbox... """.format(n) for s in subjects: print """ {0} """.format(s[0]) get_subjects(10)
def db_uri(): config_prefix = 'RDS' if is_prod() else 'MYSQL' database = config.get('{0}_DATABASE'.format(config_prefix), None) assert database, "Must have database name to connect!" return engine_uri(database)
from flask import Flask, request import logging as log import urlparse from inbox.server.config import config, load_config load_config() app = Flask(__name__) log.basicConfig(level=log.DEBUG) uri = config.get('GOOGLE_OAUTH_REDIRECT_URI', None) assert uri, 'You must define GOOGLE_OAUTH_REDIRECT_URI' path = urlparse.urlparse(uri) CALLBACK_URI = path.path @app.route(CALLBACK_URI + '/alive') def alive(): return 'Yep' @app.route(CALLBACK_URI) def index(): assert 'code' in request.args authorization_code = request.args['code'] return """ <!DOCTYPE html> <html> <head> <title>Successfull OAuth</title> <style type="text/css">