def init_db(settings): """Create the database models.""" store = api.store.store_from_settings(settings) api.store.create_db(store) engine = engine_from_config(settings, 'sqlalchemy.') bind_engine(engine, should_create=True)
def db_session(request): """Session for SQLAlchemy.""" from pyramid_fullauth.models import Base # pylint:disable=import-outside-toplevel if request.param == "sqlite": connection = "sqlite:///fullauth.sqlite" elif request.param == "mysql": request.getfixturevalue("mysql") # takes care of creating database connection = "mysql+mysqldb://root:@127.0.0.1:3307/tests?charset=utf8" elif request.param == "postgresql": request.getfixturevalue( "postgresql") # takes care of creating database connection = "postgresql+psycopg2://postgres:@127.0.0.1:5433/tests" engine = create_engine(connection, echo=False, poolclass=NullPool) pyramid_basemodel.Session = scoped_session(sessionmaker()) register(pyramid_basemodel.Session) pyramid_basemodel.bind_engine(engine, pyramid_basemodel.Session, should_create=True, should_drop=True) def destroy(): transaction.commit() Base.metadata.drop_all(engine) request.addfinalizer(destroy) return pyramid_basemodel.Session
def db_session(request): """SQLAlchemy session.""" from pyramid_fullauth.models import Base if request.param == 'sqlite': connection = 'sqlite:///fullauth.sqlite' elif request.param == 'mysql': request.getfuncargvalue('mysqldb') # takes care of creating database connection = 'mysql+mysqldb://root:@127.0.0.1:3307/tests?charset=utf8' elif request.param == 'postgresql': request.getfuncargvalue('postgresql') # takes care of creating database connection = 'postgresql+psycopg2://postgres:@127.0.0.1:5433/tests' engine = create_engine(connection, echo=False, poolclass=NullPool) pyramid_basemodel.Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) pyramid_basemodel.bind_engine( engine, pyramid_basemodel.Session, should_create=True, should_drop=True) def destroy(): transaction.commit() Base.metadata.drop_all(engine) request.addfinalizer(destroy) return pyramid_basemodel.Session
def db_session(request): """SQLAlchemy session.""" from pyramid_fullauth.models import Base if request.param == 'sqlite': connection = 'sqlite:///fullauth.sqlite' elif request.param == 'mysql': request.getfuncargvalue('mysqldb') # takes care of creating database connection = 'mysql+mysqldb://root:@127.0.0.1:3307/tests?charset=utf8' elif request.param == 'postgresql': request.getfuncargvalue( 'postgresql') # takes care of creating database connection = 'postgresql+psycopg2://postgres:@127.0.0.1:5433/tests' engine = create_engine(connection, echo=False, poolclass=NullPool) pyramid_basemodel.Session = scoped_session( sessionmaker(extension=ZopeTransactionExtension())) pyramid_basemodel.bind_engine(engine, pyramid_basemodel.Session, should_drop=True) def destroy(): transaction.commit() Base.metadata.drop_all(engine) request.addfinalizer(destroy) return pyramid_basemodel.Session
def run(): # Bind to the database. engine = create_engine(os.environ['DATABASE_URL']) bind_engine(engine, should_create=False) # Prepare. notification_cls = orm.Notification notification_dispatch_cls = orm.NotificationDispatch notification_preference_factory = repo.NotificationPreferencesFactory() now = datetime.datetime.now() # Run the algorithm. with transaction.manager: # 1. ignore all the notifications from the Notification table that have read field set. unread_notifications = notification_dispatch_cls.query.join(notification_cls).filter(notification_cls.read == None) # 2. get all of the non duplicated user ids who are due to dispatch and have not been sent. due_to_dispatch = unread_notifications.filter(notification_dispatch_cls.due <= now).filter(notification_dispatch_cls.sent == None) user_ids_to_dispatch = set() for dispatch in due_to_dispatch.all(): user_ids_to_dispatch.add(dispatch.notification.user_id) # 3. for each user id get all of the notifications grouped by channel for user_id in user_ids_to_dispatch: # Build the NotificationPreference object so we can get the preferences. user = orm.NotificationPreference.query.filter_by(user_id=user_id).all()[-1] # If we don't have a notification preference object, we just create it on the fly. if user is None: user = notification_preference_factory(user_id) user_notifications = due_to_dispatch.filter(notification_cls.user_id == user_id).all() dispatch_user_notifications(user, user_notifications)
def db_session(request, settings): """SQLAlchemy session.""" engine = engine_from_config(settings, 'sqlalchemy.') pyramid_basemodel.Session = _make_session() pyramid_basemodel.bind_engine(engine, should_create=True, should_drop=True) def destroy(): transaction.commit() pyramid_basemodel.Base.metadata.drop_all(engine) pyramid_basemodel.Session.close() request.addfinalizer(destroy) return pyramid_basemodel.Session
def db_session(request): """Session for SQLAlchemy.""" from pyramid_localize.models import Base engine = create_engine('sqlite:///fullauth.sqlite', echo=False, poolclass=NullPool) pyramid_basemodel.Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) pyramid_basemodel.bind_engine( engine, pyramid_basemodel.Session, should_create=True, should_drop=True) def destroy(): transaction.commit() Base.metadata.drop_all(engine) request.addfinalizer(destroy) return pyramid_basemodel.Session
def db_session(request): """Session for SQLAlchemy.""" from pyramid_localize.models import Base # pylint:disable=import-outside-toplevel engine = create_engine("sqlite:///localize.sqlite", echo=False, poolclass=NullPool) pyramid_basemodel.Session = scoped_session(sessionmaker()) register(pyramid_basemodel.Session) pyramid_basemodel.bind_engine(engine, pyramid_basemodel.Session, should_create=True, should_drop=True) def destroy(): transaction.commit() Base.metadata.drop_all(engine) request.addfinalizer(destroy) return pyramid_basemodel.Session
def main(args=None): """Consume the Twitter Streaming API.""" # Write a pid file. f = open('stream.pid', 'w') f.write(str(os.getpid())) f.close() # Parse the command line args. if args is None: args = parse_args() # Read the config file. config = ConfigParser.SafeConfigParser() config.read(args.config_file) # Setup logging. logging.config.fileConfig(args.config_file) # Patch sockets and threading. from gevent import monkey monkey.patch_all() import gevent_psycopg2 gevent_psycopg2.monkey_patch() # Bind the model classes. engine = create_engine(config.get('app:beliveat', 'sqlalchemy.url')) bind_engine(engine) # Instantiate a ``Manager`` with a redis client and oauth handler and # start the manager running. client = get_redis_client() handler = oauth_handler_factory(config) manager = Manager(client, handler, args.input_channel, args.output_channel) # Close the db connection Session.remove() try: manager.start() except KeyboardInterrupt: manager.stop()
def main(args=None): """Process the ``INPUT_CHANNEL`` redis queue.""" # Write a pid file. f = open('queue.pid', 'w') f.write(str(os.getpid())) f.close() # Parse the command line args. if args is None: args = parse_args() # Read the config file. config = ConfigParser.SafeConfigParser() config.read(args.config_file) # Setup logging. logging.config.fileConfig(args.config_file) # Patch sockets, threading and the db driver. from gevent import monkey monkey.patch_all() import gevent_psycopg2 gevent_psycopg2.monkey_patch() # Bind the model classes. engine = create_engine(config.get('app:beliveat', 'sqlalchemy.url')) bind_engine(engine) # Setup the redis queue processor. client = get_redis_client() processor = QueueProcessor(client, [args.input_channel], handle_data) # Close the db connection Session.remove() try: processor.start() except KeyboardInterrupt: pass
def run(): # Bind to the database. engine = create_engine(os.environ['DATABASE_URL']) bind_engine(engine, should_create=False) # Prepare. notification_cls = orm.Notification notification_dispatch_cls = orm.NotificationDispatch notification_preference_factory = repo.NotificationPreferencesFactory() now = datetime.datetime.now() # Run the algorithm. with transaction.manager: # 1. ignore all the notifications from the Notification table that have read field set. unread_notifications = notification_dispatch_cls.query.join( notification_cls).filter(notification_cls.read == None) # 2. get all of the non duplicated user ids who are due to dispatch and have not been sent. due_to_dispatch = unread_notifications.filter( notification_dispatch_cls.due <= now).filter( notification_dispatch_cls.sent == None) user_ids_to_dispatch = set() for dispatch in due_to_dispatch.all(): user_ids_to_dispatch.add(dispatch.notification.user_id) # 3. for each user id get all of the notifications grouped by channel for user_id in user_ids_to_dispatch: # Build the NotificationPreference object so we can get the preferences. user = orm.NotificationPreference.query.filter_by( user_id=user_id).all()[-1] # If we don't have a notification preference object, we just create it on the fly. if user is None: user = notification_preference_factory(user_id) user_notifications = due_to_dispatch.filter( notification_cls.user_id == user_id).all() dispatch_user_notifications(user, user_notifications)