Exemple #1
0
 def __init__(self, url, pool_size=sqlalchemy_dao.POOL_DEFAULT):
     if pool_size == sqlalchemy_dao.POOL_DISABLED:
         self._engine = engine.create_engine(url, poolclass=NullPool)
     else:
         self._engine = engine.create_engine(url, pool_size=pool_size, pool_recycle=3600,
                 max_overflow=sys.maxsize)
     self._Session = session.sessionmaker(bind=self._engine, class_=self.session_class) # pylint: disable=invalid-name
    def __init__(self, environ=None):
        """
        Establish an environ for this instance.
        """

        global ENGINE, MAPPED

        if environ is None:
            environ = {}
        self.environ = environ

        if not ENGINE:
            db_config = self._db_config()
            if 'mysql' in db_config:
                ENGINE = create_engine(db_config,
                        pool_recycle=3600,
                        pool_size=20,
                        max_overflow=-1,
                        pool_timeout=2)
                try:
                    from tiddlywebplugins.mysql3 import on_checkout
                    from sqlalchemy import event
                    event.listen(ENGINE, 'checkout', on_checkout)
                except ImportError:
                    pass
            else:
                ENGINE = create_engine(db_config)
            METADATA.bind = ENGINE
            SESSION.configure(bind=ENGINE)

        self.session = SESSION()

        if not MAPPED:
            METADATA.create_all(ENGINE)
            MAPPED = True
    def __init__(self, environ=None):
        """
        Establish an environ for this instance.
        """

        global ENGINE, MAPPED

        if environ is None:
            environ = {}
        self.environ = environ

        if not ENGINE:
            db_config = self._db_config()
            if 'mysql' in db_config:
                try:
                    from tiddlywebplugins.mysql2 import LookLively
                    listeners = [LookLively()]
                except ImportError:
                    listeners = []
                ENGINE = create_engine(db_config,
                        pool_recycle=3600,
                        pool_size=20,
                        max_overflow=-1,
                        pool_timeout=2,
                        listeners=listeners)
            else:
                ENGINE = create_engine(db_config)
            METADATA.bind = ENGINE
            SESSION.configure(bind=ENGINE)

        self.session = SESSION()

        if not MAPPED:
            METADATA.create_all(ENGINE)
            MAPPED = True
Exemple #4
0
    def compute(self):
        url = URL(drivername=self.get_input('protocol'),
                  username=self.force_get_input('user', None),
                  password=self.force_get_input('password', None),
                  host=self.force_get_input('host', None),
                  port=self.force_get_input('port', None),
                  database=self.get_input('db_name'))

        try:
            engine = create_engine(url)
        except ImportError, e:
            driver = url.drivername
            installed = False
            if driver == 'sqlite':
                raise ModuleError(self,
                                  "Python was built without sqlite3 support")
            elif (driver == 'mysql' or
                    driver == 'drizzle'): # drizzle is a variant of MySQL
                installed = install({
                        'pip': 'mysql-python',
                        'linux-debian': 'python-mysqldb',
                        'linux-ubuntu': 'python-mysqldb',
                        'linux-fedora': 'MySQL-python'})
            elif (driver == 'postgresql' or
                    driver == 'postgre'):   # deprecated alias
                installed = install({
                        'pip': 'psycopg2',
                        'linux-debian':'python-psycopg2',
                        'linux-ubuntu':'python-psycopg2',
                        'linux-fedora':'python-psycopg2'})
            elif driver == 'firebird':
                installed = install({
                        'pip': 'fdb',
                        'linux-fedora':'python-fdb'})
            elif driver == 'mssql' or driver == 'sybase':
                installed = install({
                        'pip': 'pyodbc',
                        'linux-debian':'python-pyodbc',
                        'linux-ubuntu':'python-pyodbc',
                        'linux-fedora':'pyodbc'})
            elif driver == 'oracle':
                installed = install({
                        'pip': 'cx_Oracle'})
            else:
                raise ModuleError(self,
                                  "SQLAlchemy couldn't connect: %s" %
                                  debug.format_exception(e))
            if not installed:
                raise ModuleError(self,
                                  "Failed to install required driver")
            try:
                engine = create_engine(url)
            except Exception, e:
                raise ModuleError(self,
                                  "Couldn't connect to the database: %s" %
                                  debug.format_exception(e))
Exemple #5
0
def alchemyEngine() -> Engine:
   

    if database_url().startswith('sqlite://'):
        engine = create_engine(database_url(), pool_recycle=alchemy_pool_recycle())
        @event.listens_for(engine, 'connect')
        def setSQLiteFKs(dbapi_con, con_record):
            dbapi_con.execute('PRAGMA foreign_keys=ON')
    else:  engine = create_engine(database_url(), pool_recycle=alchemy_pool_recycle(), pool_size=30, max_overflow=60)        

    return engine
Exemple #6
0
def database_exists(url):
    """Check if a database exists.

    :param url: A SQLAlchemy engine URL.

    Performs backend-specific testing to quickly determine if a database
    exists on the server. ::

        database_exists('postgresql://postgres@localhost/name')  #=> False
        create_database('postgresql://postgres@localhost/name')
        database_exists('postgresql://postgres@localhost/name')  #=> True

    Supports checking against a constructed URL as well. ::

        engine = create_engine('postgresql://postgres@localhost/name')
        database_exists(engine.url)  #=> False
        create_database(engine.url)
        database_exists(engine.url)  #=> True

    """

    url = copy(make_url(url))
    database = url.database
    if url.drivername.startswith('postgres'):
        url.database = 'postgres'
    elif not url.drivername.startswith('sqlite'):
        url.database = None

    engine = create_engine(url)

    if engine.dialect.name == 'postgresql':
        text = "SELECT 1 FROM pg_database WHERE datname='%s'" % database
        return bool(engine.execute(text).scalar())

    elif engine.dialect.name == 'mysql':
        text = ("SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA "
                "WHERE SCHEMA_NAME = '%s'" % database)
        return bool(engine.execute(text).scalar())

    elif engine.dialect.name == 'sqlite':
        return database == ':memory:' or os.path.exists(database)

    else:
        text = 'SELECT 1'
        try:
            url.database = database
            engine = create_engine(url)
            engine.execute(text)
            return True

        except (ProgrammingError, OperationalError):
            return False
    def __init__(self, path, delete_everything=False):
        if path.startswith('sqlite:'):
            self.engine = create_engine(path, poolclass=SingletonThreadPool)
            sqlalchemy.event.listen(
                self.engine, 'connect', self._apply_pragmas_callback)
        else:
            self.engine = create_engine(path)

        Session.configure(bind=self.engine)

        if delete_everything == 'yes-really!':
            self._delete_everything()

        Base.metadata.create_all(self.engine)
Exemple #8
0
    def drop_admin_user(self):
        """
        for the tests, we will drop the imported user table
        """

        sqlconnect = self.appconf.get('sqlalchemy.url')
        engine = create_engine(sqlconnect)

        # create the session for the db operation

        Sessionmaker = sessionmaker()
        Sessionmaker.configure(bind=engine)
        session = Sessionmaker()

        # we try to delete the table if it exists

        try:

            SetPasswordHandler.AdminUser.__table__.drop(engine)
            session.commit()

        except (ProgrammingError, Exception) as exx:

            log.info("Drop Table failed %r", exx)
            session.rollback()

        finally:

            session.close()
def main():
 
    global connection
    
    # Open source
    engine = create_engine("sqlite:///webshop.sqlite")
    connection = engine.connect()
    
    # Cleanup
    connection.execute("DELETE FROM dates");
    connection.execute("DELETE FROM customers");
    connection.execute("DELETE FROM countries");
    connection.execute("DELETE FROM products");
    connection.execute("DELETE FROM sales");
    connection.execute("DELETE FROM webvisits")
    
    # Import facts and dimension data
    import_sales()
    # Generate all dates
    generate_dates()
    # Generate site visits
    generate_webvisits()

    # Add extra dimensions for left joins
    insert_product ("Books", "200 ways of slicing a cube")    
Exemple #10
0
def InitDb(*args, **kwds):
    """
    Create a new DBAPI connection pool.

    The most common and only required argument is the connection URL.
    The URL can either be a string or a `sqlalchemy.engine.url.URL`.
    This method has not return value and needs to be called only once per process.

    See `sqlalchemy.engine.create_engine`.
    """
    global _Base
    global _db
    global _session

    if len(args) > 0:
        # inject the foreign key pragma when using SQLite databases to ensure integrity
        # http://docs.sqlalchemy.org/en/rel_0_8/dialects/sqlite.html#foreign-key-support
        if (isinstance(args[0], str) and args[0].startswith('sqlite')) or \
                (isinstance(args[0], URL) and args[0].get_dialect() == 'sqlite'):
            @event.listens_for(engine.Engine, "connect")
            def set_sqlite_pragma(dbapi_connection, _):
                cursor = dbapi_connection.cursor()
                cursor.execute("PRAGMA foreign_keys=ON")
                cursor.close()

    _db = engine.create_engine(*args, **kwds)
    _Base.metadata.create_all(_db)
    _session = session.sessionmaker(bind=_db)
    logger.debug("DB bound to %s", _db)
    return None
    def persist_bundle_sensor(self):
        from madmex.persistence.driver import persist_bundle
        folder = '/LUSTRE/MADMEX/staging/madmex_antares/test_ingest/556_297_041114_dim_img_spot'
        from sqlalchemy import create_engine
        from sqlalchemy.orm.session import sessionmaker
        from madmex.mapper.bundle.spot5 import Bundle
        #from madmex.configuration import SETTINGS

        dummy = Bundle(folder)
        #dummy.target = '/LUSTRE/MADMEX/staging/'
        target_url = getattr(SETTINGS, 'TEST_FOLDER')
        print target_url
        #TODO please fix me, horrible hack
        dummy.target = target_url
        persist_bundle(dummy)
        my_database = getattr(SETTINGS, 'ANTARES_TEST_DATABASE')
        klass = sessionmaker(bind=create_engine(my_database))
        session = klass()
        query = 'SELECT count(*) FROM product WHERE uuid=\'%s\';' % dummy.uuid_id

        try:
            result_set = session.execute(query)
            for row in result_set:
                self.assertGreater(row['count'], 0)
            session.delete(dummy.get_database_object())
            session.commit()
            for file_name in dummy.get_files():
                full_path = os.path.join(target_url, os.path.basename(file_name))
                self.assertTrue(os.path.isfile(full_path))
                os.remove(full_path)
        except:
            session.rollback()
            raise
        finally:
            session.close()
Exemple #12
0
      def __init__(self, **settings):
          super(SQLAlchemyConnectionHandler, self).__init__(settings)

          from sqlalchemy.engine.url import URL
          from sqlalchemy.engine import create_engine
          url = URL(drivername=__settings.ENGINE, username=__settings.USER, password=__settings.PASSWORD, host=__settings.HOST, port=__settings.PORT, database=__settings.DBNAME)
          self.__engine = create_engine(url)
 def persist_bundle(self):
     from madmex.persistence.driver import persist_bundle
     from sqlalchemy import create_engine
     from sqlalchemy.orm.session import sessionmaker
     from madmex.util import remove_file
     dummy = DummyBundle()
     persist_bundle(dummy)
     
     
     my_database = getattr(SETTINGS, 'ANTARES_TEST_DATABASE')
     klass = sessionmaker(bind=create_engine(my_database))
     session = klass()
     query = 'SELECT count(*) FROM product WHERE uuid=\'%s\';' % dummy.uuid_id
     print query
     try:
         result_set = session.execute(query)
         for row in result_set:
             self.assertGreater(row['count'], 0)
         # Delete object from database.
         session.delete(dummy.get_database_object())
         session.commit()
         for file_name in dummy.get_files():
             full_path = os.path.join(dummy.get_output_directory(), os.path.basename(file_name))
             self.assertTrue(os.path.isfile(full_path))
             # Remove file from filesystem.
             remove_file(full_path)
     except:
         session.rollback()
         raise
     finally:
         session.close()
def _init_engine(database, backend, user=None, password=None, host=None, debug=False):
    if backend=='mysql':
        # Use oursql because other backends do not properly support MySQL cursors,
        # i.e. if you want to stream a large table through memory, other backends
        # will pull every row in the table into memory. The SQLAlchemy docs for yield_per
        # say that only psycopg2 is supported, but OurSQL also streams by default.
        if password:
            url = ('mysql+oursql://%s:%s@%s/%s?charset=utf8&use_unicode=1' % 
                   (user, password, host, database))
        else:
            url = ('mysql+oursql://%s@%s/%s?charset=utf8&use_unicode=1' % 
                   (user, host, database))
        
    elif backend=='sqlite_memory':
        url = 'sqlite:///:memory:'
    else:
        raise ValueError('Unknown database backend %s' % backend)

    engine = create_engine(url, echo=debug)
    
    # Enforce referential integrity on sqlite_memory for more rigorous tests.
    # http://stackoverflow.com/a/7831210/281469
    if backend=='sqlite_memory':
        event.listen(engine, 'connect', _fk_pragma_on_connect)
    return engine
Exemple #15
0
def main():
    parser = argparse.ArgumentParser(description='Generates SQLAlchemy model code from an existing database.')
    parser.add_argument('url', nargs='?', help='SQLAlchemy url to the database')
    parser.add_argument('--version', action='store_true', help="print the version number and exit")
    parser.add_argument('--schema', help='load tables from an alternate schema')
    parser.add_argument('--tables', help='tables to process (comma-separated, default: all)')
    parser.add_argument('--noviews', action='store_true', help="ignore views")
    parser.add_argument('--noindexes', action='store_true', help='ignore indexes')
    parser.add_argument('--noconstraints', action='store_true', help='ignore constraints')
    parser.add_argument('--nojoined', action='store_true', help="don't autodetect joined table inheritance")
    parser.add_argument('--noinflect', action='store_true', help="don't try to convert tables names to singular form")
    parser.add_argument('--noclasses', action='store_true', help="don't generate classes, only tables")
    parser.add_argument('--alwaysclasses', action='store_true', help="always generate classes")
    parser.add_argument('--nosequences', action='store_true', help="don't auto-generate postgresql sequences")
    parser.add_argument('--outfile', help='file to write output to (default: stdout)')
    args = parser.parse_args()

    if args.version:
        print(sqlacodegen.version)
        return
    if not args.url:
        print('You must supply a url\n', file=sys.stderr)
        parser.print_help()
        return

    engine = create_engine(args.url)
    metadata = MetaData(engine)
    tables = args.tables.split(',') if args.tables else None
    metadata.reflect(engine, args.schema, not args.noviews, tables)
    outfile = codecs.open(args.outfile, 'w', encoding='utf-8') if args.outfile else sys.stdout
    generator = CodeGenerator(metadata, args.noindexes, args.noconstraints, args.nojoined, args.noinflect,
                              args.noclasses, args.alwaysclasses, args.nosequences)
    generator.render(outfile)
def engine(request, sqlalchemy_connect_url, app_config):
    """Engine configuration.
    See http://docs.sqlalchemy.org/en/latest/core/engines.html
    for more details.

    :sqlalchemy_connect_url: Connection URL to the database. E.g
    postgresql://scott:tiger@localhost:5432/mydatabase 
    :app_config: Path to a ini config file containing the sqlalchemy.url
    config variable in the DEFAULT section.
    :returns: Engine instance

    """
    pass
    if app_config:
        from sqlalchemy import engine_from_config
        engine = engine_from_config(app_config)
    elif sqlalchemy_connect_url:
        from sqlalchemy.engine import create_engine
        engine = create_engine(sqlalchemy_connect_url)
    else:
        raise RuntimeError("Can not establish a connection to the database")

    def fin():
        print ("Disposing engine")
        engine.dispose()

    request.addfinalizer(fin)
    return engine
 def test_url_default(self):
     engine = create_engine('presto://localhost:8080/hive')
     try:
         with contextlib.closing(engine.connect()) as connection:
             self.assertEqual(connection.execute('SELECT 1 AS foobar FROM one_row').scalar(), 1)
     finally:
         engine.dispose()
Exemple #18
0
    def create_sa_proxies(self):

        # create the table and mapper
        metadata = schema.MetaData()
        user_table = schema.Table(
            'user',
            metadata,
            schema.Column('id', types.Integer, primary_key=True),
            schema.Column('first_name', types.Unicode(25)),
            schema.Column('last_name', types.Unicode(25))
        )

        class User(object):
            pass
        orm.mapper(User, user_table)

        # create the session
        engine = create_engine('sqlite:///:memory:')
        metadata.bind = engine
        metadata.create_all()
        session = orm.sessionmaker(bind=engine)()

        # add some dummy data
        user_table.insert().execute([
            {'first_name': 'Jonathan', 'last_name': 'LaCour'},
            {'first_name': 'Yoann', 'last_name': 'Roman'}
        ])

        # get the SA objects
        self.sa_object = session.query(User).first()
        select = user_table.select()
        self.result_proxy = select.execute()
        self.row_proxy = select.execute().fetchone()
Exemple #19
0
def get_exist_sn_list(params, user_name_equal, user_name_not_equal):
    # postgresql://hr_user:S@[email protected]:5432/hrcore_v2
    engine = create_engine('postgres://*****:*****@localhost/itdb2', echo=False)
    connection = engine.connect()
    Session = sessionmaker(bind=engine)
    session = Session()
    crud = Crud(session)
    asset_infos = []
    for param in params:
        asset_info = crud.get_data_by_asset_sn(param["sn"])
        if asset_info is not None:
            asset_id = asset_info.asset_id.upper()
            if param["sn"] == "CN032K5G6418041J0M5U":
                print 11111111111111
                print asset_id
                print asset_id in user_name_equal
                print asset_id in user_name_not_equal
            if asset_id in user_name_equal or asset_id in user_name_not_equal:
                continue
            if param["sn"] == "CN032K5G6418041J0M5U":
                print u"加入asset——infos"
            asset_infos.append(asset_info.sn.upper())
    print len(asset_infos)
    session.commit()
    return asset_infos
def upgrade():
    """ this script is made to upgrade data from 9 -> 10 """
    from sqlalchemy.engine import create_engine
    engine = create_engine('sqlite:///OSMTM.db')
    connection = engine.connect()

    from OSMTM.models import TileHistory, Tile, Job
    from OSMTM.history_meta import VersionedListener
    from sqlalchemy import orm
    from sqlalchemy.sql.expression import and_

    sm = orm.sessionmaker(bind=engine, autoflush=True, autocommit=False,
                expire_on_commit=True,
                extension=[VersionedListener()])
    session = orm.scoped_session(sm)

    jobs = session.query(Job).all()

    for job in jobs:

        print "job: %s" % job.id

        tiles = session.query(Tile).filter(and_(Tile.change==True, Tile.job_id==job.id))
        for tile in tiles:
            tile.change = False
            tile.username = None
            tile.comment = None
            session.add(tile)

        session.commit()

    pass
Exemple #21
0
def setup_module():
    global connection, engine

    engine = create_engine("sqlite://")
    connection = engine.connect()
    Base.metadata.create_all(connection)
    session_factory.configure(bind=engine)
Exemple #22
0
    def __init__(self, db_connection, base_model, session_options=None, **kwargs):

        self.engine = engine.create_engine(db_connection, convert_unicode=True, **kwargs)

        self.Query = orm.Query
        self.session = self.create_scoped_session(session_options)
        self.Model = self.extend_base_model(base_model)
Exemple #23
0
 def __init__(self, config):
     logger.info("Marcotti-MLS v{0}: Python {1} on {2}".format(__version__, sys.version, sys.platform))
     logger.info("Opened connection to {0}".format(self._public_db_uri(config.database_uri)))
     self.engine = create_engine(config.database_uri)
     self.connection = self.engine.connect()
     self.start_year = config.START_YEAR
     self.end_year = config.END_YEAR
 def tester():
     class FooObj(DataObject):
         partition_attribute = 'id'
     
     @schema([IntegerColumn(name='id')])
     class Foo(FooObj):
         identity_key_ = (('id', 'id'),)
         sort_key_ = ('id',)
     
     # Create a test database and table
     engine = create_engine('sqlite://')
     metadata = MetaData(bind=engine)
     foos_table = Foo.to_sqa_table(metadata, 'foos')
     metadata.create_all()
     
     # Define the mapping between tables and objects for writing
     writer_config = {Foo: SqaWriterConfig(foos_table),
                     }
     
     # Define the mapping between tables and objects for reading
     reader_config = {Foo: SqaReaderConfig(foos_table, engine_wrapper(engine), **reader_args)}
     
     # Create some objects
     foos = [Foo(id=i) for i in range(1000)]
     
     # Write the objects to the database
     writer = Foo.writer(writer_config)
     for foo in foos:
         writer.write(foo)
     
     def do_the_reading():
         return [foo for foo in Foo.reader(reader_config)]
     
     assertion(do_the_reading)
Exemple #25
0
def run_with_taskmanager(dsn=None):
    """Example for running PCSE/WOFOST with the task manager.

    Runs PyWOFOST for 6 crop types for one location in water-limited mode
    using an ensemble of 50 members. Each member is initialized with different
    values for the initial soil moisture and each member receives different
    values for rainfall as forcing variable. Executing PyWOFOST runs is done
    through the task manager. Output is writted to the database

    Parameters:
    dsn - SQLAlchemy data source name pointing to the database to be used.
    """

    # Open database connection and empty output table
    db_engine = sa_engine.create_engine(run_settings.connstr)

    # Initialise task manager
    taskmanager = TaskManager(db_engine, dbtype="MySQL")
    # Loop until no tasks are left
    task = taskmanager.get_task()
    while task is not None:
        try:
            task_id = task["task_id"]
            print "Running task: %i" % task_id
            task_runner(db_engine, task)

            # Set status of current task to 'Finished'
            taskmanager.set_task_finished(task)

        except SQLAlchemyError as inst:
            msg = "Database error on task_id %i." % task_id
            print msg
            logging.exception(msg)
            # Break because of error in the database connection
            break

        except PCSEError as inst:
            msg = "Error in PCSE on task_id %i." % task_id
            print msg
            logging.exception(msg)
            # Set status of current task to 'Error'
            taskmanager.set_task_error(task)

        except Exception as inst:
            msg = "General error on task_id %i" % task_id
            print msg
            logging.exception(msg)
            # Set status of current task to 'Error'
            taskmanager.set_task_error(task)

        except KeyboardInterrupt:
            msg = "Terminating on user request!"
            print msg
            logging.error(msg)
            taskmanager.set_task_error(task)
            sys.exit()

        finally:
            #Get new task
            task = taskmanager.get_task()
Exemple #26
0
def init_engine(uri, **kwargs):
    global engine
    global Session
    engine = create_engine(uri, **kwargs)
    sm = sessionmaker(autocommit=False, autoflush=False, bind=engine)
    Session = scoped_session(sm)
    return engine
Exemple #27
0
    def setUpClass(cls):
        global engine
        if not engine:
            engine = create_engine(TEST_DB_URL, poolclass=StaticPool)

        cls.engine = Base.metadata.bind = engine
        expensive_setup()
Exemple #28
0
def load_connection(connection_string, echo=False):
    """Create and return a connection to the database given in the
    connection string.

    Parameters
    ----------
    connection_string : str
        A string that points to the database conenction.  The
        connection string is in the following form:
        dialect+driver://username:password@host:port/database
    echo : bool
        Show all SQL produced.

    Returns
    -------
    session : sesson object
        Provides a holding zone for all objects loaded or associated
        with the database.
    engine : engine object
        Provides a source of database connectivity and behavior.
    """

    engine = create_engine(connection_string, echo=echo)
    Session = sessionmaker(bind=engine)

    return Session, engine
Exemple #29
0
def main():
    # Initialise
    recs = []
    db_engine = None;
    
    try:
        # Initialise further
        db_engine = sa_engine.create_engine(run_settings.connstr)
        db_metadata = MetaData(db_engine)
        
        # Now loop over the crops
        for crop_no in range(1, 29):
            # Report which crop is next
            crop_name, mgmt_code = select_crop(db_engine, crop_no)
            msg = "About to get TSUM values for " + crop_name + " (" + mgmt_code + ")"
            print msg
            
            # Now retrieve how to divide over TSUM1 and TSUM2
            cip = CropInfoProvider(crop_name, mgmt_code, run_settings.data_dir)
            cropdata = cip.getCropData();
            fract1, fract2 = split_tsum(cropdata, 1)
            rec = {}
            rec["crop_no"] = crop_no
            rec["fract_tsum1"] = fract1
            rec["fract_tsum2"] = fract2
            recs.append(rec)
        
        cropinfodict = {"crop_no":1, "fract_tsum1":0.4 , "fract_tsum2":0.6}    
        store_to_database(db_engine, recs, db_metadata, cropinfodict)
        
    except Exception as e:
        print str(e)
    finally:
        if db_engine != None: db_engine.dispose()
Exemple #30
0
def engine(request):
    """Engine configuration."""
    url = request.config.getoption("--sqlalchemy-connect-url")
    from sqlalchemy.engine import create_engine
    engine = create_engine(url)
    yield engine
    engine.dispose()
Exemple #31
0
def make_rest_app(
    db_filepath: str,
    network_middleware: RestMiddleware,
    federated_only: bool,
    treasure_map_tracker: dict,
    node_tracker: 'FleetStateTracker',
    node_bytes_caster: Callable,
    work_order_tracker: list,
    node_nickname: str,
    node_recorder: Callable,
    stamp: SignatureStamp,
    verifier: Callable,
    suspicious_activity_tracker: dict,
    serving_domains,
    log=Logger("http-application-layer")) -> Tuple:

    forgetful_node_storage = ForgetfulNodeStorage(
        federated_only=federated_only)

    from nucypher.keystore import keystore
    from nucypher.keystore.db import Base
    from sqlalchemy.engine import create_engine

    log.info("Starting datastore {}".format(db_filepath))

    # See: https://docs.sqlalchemy.org/en/rel_0_9/dialects/sqlite.html#connect-strings
    if db_filepath:
        db_uri = f'sqlite:///{db_filepath}'
    else:
        db_uri = 'sqlite://'  # TODO: Is this a sane default? See #667

    engine = create_engine(db_uri)

    Base.metadata.create_all(engine)
    datastore = keystore.KeyStore(engine)
    db_engine = engine

    from nucypher.characters.lawful import Alice, Ursula
    _alice_class = Alice
    _node_class = Ursula

    rest_app = rest_app = Flask("ursula-service")

    @rest_app.route("/public_information")
    def public_information():
        """
        REST endpoint for public keys and address..
        """
        response = Response(response=node_bytes_caster(),
                            mimetype='application/octet-stream')

        return response

    @rest_app.route('/node_metadata', methods=["GET"])
    def all_known_nodes():
        headers = {'Content-Type': 'application/octet-stream'}

        if node_tracker.checksum is NO_KNOWN_NODES:
            return Response(b"", headers=headers, status=204)

        payload = node_tracker.snapshot()

        ursulas_as_vbytes = (VariableLengthBytestring(n) for n in node_tracker)
        ursulas_as_bytes = bytes().join(bytes(u) for u in ursulas_as_vbytes)
        ursulas_as_bytes += VariableLengthBytestring(node_bytes_caster())

        payload += ursulas_as_bytes
        signature = stamp(payload)
        return Response(bytes(signature) + payload, headers=headers)

    @rest_app.route('/node_metadata', methods=["POST"])
    def node_metadata_exchange():
        # If these nodes already have the same fleet state, no exchange is necessary.

        learner_fleet_state = request.args.get('fleet')
        if learner_fleet_state == node_tracker.checksum:
            log.debug(
                "Learner already knew fleet state {}; doing nothing.".format(
                    learner_fleet_state))
            headers = {'Content-Type': 'application/octet-stream'}
            payload = node_tracker.snapshot() + bytes(FLEET_STATES_MATCH)
            signature = stamp(payload)
            return Response(bytes(signature) + payload, headers=headers)

        nodes = _node_class.batch_from_bytes(
            request.data, federated_only=federated_only)  # TODO: 466

        # TODO: This logic is basically repeated in learn_from_teacher_node and remember_node.
        # Let's find a better way.  #555
        for node in nodes:
            if GLOBAL_DOMAIN not in serving_domains:
                if not serving_domains.intersection(node.serving_domains):
                    continue  # This node is not serving any of our domains.

            if node in node_tracker:
                if node.timestamp <= node_tracker[
                        node.checksum_public_address].timestamp:
                    continue

            @crosstown_traffic()
            def learn_about_announced_nodes():

                try:
                    certificate_filepath = forgetful_node_storage.store_node_certificate(
                        certificate=node.certificate)

                    node.verify_node(
                        network_middleware,
                        accept_federated_only=federated_only,  # TODO: 466
                        certificate_filepath=certificate_filepath)

                # Suspicion
                except node.SuspiciousActivity:
                    # TODO: Include data about caller?
                    # TODO: Account for possibility that stamp, rather than interface, was bad.
                    # TODO: Maybe also record the bytes representation separately to disk?
                    message = "Suspicious Activity: Discovered node with bad signature: {}.  Announced via REST."
                    log.warn(message)
                    suspicious_activity_tracker['vladimirs'].append(node)

                # Async Sentinel
                except Exception as e:
                    log.critical(str(e))
                    raise

                # Believable
                else:
                    log.info(
                        "Learned about previously unknown node: {}".format(
                            node))
                    node_recorder(node)
                    # TODO: Record new fleet state

                # Cleanup
                finally:
                    forgetful_node_storage.forget()

        # TODO: What's the right status code here?  202?  Different if we already knew about the node?
        return all_known_nodes()

    @rest_app.route('/consider_arrangement', methods=['POST'])
    def consider_arrangement():
        from nucypher.policy.models import Arrangement
        arrangement = Arrangement.from_bytes(request.data)

        with ThreadedSession(db_engine) as session:
            new_policy_arrangement = datastore.add_policy_arrangement(
                arrangement.expiration.datetime(),
                id=arrangement.id.hex().encode(),
                alice_pubkey_sig=arrangement.alice.stamp,
                session=session,
            )
        # TODO: Make the rest of this logic actually work - do something here
        # to decide if this Arrangement is worth accepting.

        headers = {'Content-Type': 'application/octet-stream'}
        # TODO: Make this a legit response #234.
        return Response(
            b"This will eventually be an actual acceptance of the arrangement.",
            headers=headers)

    @rest_app.route("/kFrag/<id_as_hex>", methods=['POST'])
    def set_policy(id_as_hex):
        """
        REST endpoint for setting a kFrag.
        TODO: Instead of taking a Request, use the apistar typing system to type
            a payload and validate / split it.
        TODO: Validate that the kfrag being saved is pursuant to an approved
            Policy (see #121).
        """
        policy_message_kit = UmbralMessageKit.from_bytes(request.data)

        alices_verifying_key = policy_message_kit.sender_pubkey_sig
        alice = _alice_class.from_public_keys(
            {SigningPower: alices_verifying_key})

        try:
            cleartext = verifier(alice, policy_message_kit, decrypt=True)
        except InvalidSignature:
            # TODO: Perhaps we log this?
            return Response(status_code=400)

        kfrag = KFrag.from_bytes(cleartext)

        if not kfrag.verify(signing_pubkey=alices_verifying_key):
            raise InvalidSignature("{} is invalid".format(kfrag))

        with ThreadedSession(db_engine) as session:
            datastore.attach_kfrag_to_saved_arrangement(alice,
                                                        id_as_hex,
                                                        kfrag,
                                                        session=session)

        # TODO: Sign the arrangement here.  #495
        return ""  # TODO: Return A 200, with whatever policy metadata.

    @rest_app.route('/kFrag/<id_as_hex>', methods=["DELETE"])
    def revoke_arrangement(id_as_hex):
        """
        REST endpoint for revoking/deleting a KFrag from a node.
        """
        from nucypher.policy.models import Revocation

        revocation = Revocation.from_bytes(request.data)
        log.info("Received revocation: {} -- for arrangement {}".format(
            bytes(revocation).hex(), id_as_hex))
        try:
            with ThreadedSession(db_engine) as session:
                # Verify the Notice was signed by Alice
                policy_arrangement = datastore.get_policy_arrangement(
                    id_as_hex.encode(), session=session)
                alice_pubkey = UmbralPublicKey.from_bytes(
                    policy_arrangement.alice_pubkey_sig.key_data)

                # Check that the request is the same for the provided revocation
                if id_as_hex != revocation.arrangement_id.hex():
                    log.debug(
                        "Couldn't identify an arrangement with id {}".format(
                            id_as_hex))
                    return Response(status_code=400)
                elif revocation.verify_signature(alice_pubkey):
                    datastore.del_policy_arrangement(id_as_hex.encode(),
                                                     session=session)
        except (NotFound, InvalidSignature) as e:
            log.debug("Exception attempting to revoke: {}".format(e))
            return Response(
                response='KFrag not found or revocation signature is invalid.',
                status=404)
        else:
            log.info("KFrag successfully removed.")
            return Response(response='KFrag deleted!', status=200)

    @rest_app.route('/kFrag/<id_as_hex>/reencrypt', methods=["POST"])
    def reencrypt_via_rest(id_as_hex):
        from nucypher.policy.models import WorkOrder  # Avoid circular import
        arrangement_id = binascii.unhexlify(id_as_hex)
        work_order = WorkOrder.from_rest_payload(arrangement_id, request.data)
        log.info("Work Order from {}, signed {}".format(
            work_order.bob, work_order.receipt_signature))
        with ThreadedSession(db_engine) as session:
            policy_arrangement = datastore.get_policy_arrangement(
                arrangement_id=id_as_hex.encode(), session=session)
        kfrag_bytes = policy_arrangement.kfrag  # Careful!  :-)
        verifying_key_bytes = policy_arrangement.alice_pubkey_sig.key_data

        # TODO: Push this to a lower level. Perhaps to Ursula character? #619
        kfrag = KFrag.from_bytes(kfrag_bytes)
        alices_verifying_key = UmbralPublicKey.from_bytes(verifying_key_bytes)
        cfrag_byte_stream = b""

        alices_address = canonical_address_from_umbral_key(
            alices_verifying_key)
        if not alices_address == work_order.alice_address:
            message = f"This Bob ({work_order.bob}) sent an Alice's ETH address " \
                      f"({work_order.alice_address}) that doesn't match " \
                      f"the one I have ({alices_address})."
            raise SuspiciousActivity(message)

        bob_pubkey = work_order.bob.stamp.as_umbral_pubkey()
        if not work_order.alice_address_signature.verify(
                message=alices_address, verifying_key=bob_pubkey):
            message = f"This Bob ({work_order.bob}) sent an invalid signature of Alice's ETH address"
            raise InvalidSignature(message)

        # This is Bob's signature of Alice's verifying key as ETH address.
        alice_address_signature = bytes(work_order.alice_address_signature)

        for capsule, capsule_signature in zip(work_order.capsules,
                                              work_order.capsule_signatures):
            # This is the capsule signed by Bob
            capsule_signature = bytes(capsule_signature)
            # Ursula signs on top of it. Now both are committed to the same capsule.
            # She signs Alice's address too.
            ursula_signature = stamp(capsule_signature +
                                     alice_address_signature)
            capsule.set_correctness_keys(verifying=alices_verifying_key)
            cfrag = pre.reencrypt(kfrag,
                                  capsule,
                                  metadata=bytes(ursula_signature))
            log.info(f"Re-encrypting for {capsule}, made {cfrag}.")
            signature = stamp(bytes(cfrag) + bytes(capsule))
            cfrag_byte_stream += VariableLengthBytestring(cfrag) + signature

        # TODO: Put this in Ursula's datastore
        work_order_tracker.append(work_order)

        headers = {'Content-Type': 'application/octet-stream'}

        return Response(response=cfrag_byte_stream, headers=headers)

    @rest_app.route('/treasure_map/<treasure_map_id>')
    def provide_treasure_map(treasure_map_id):
        headers = {'Content-Type': 'application/octet-stream'}

        treasure_map_bytes = keccak_digest(binascii.unhexlify(treasure_map_id))

        try:

            treasure_map = treasure_map_tracker[treasure_map_bytes]
            response = Response(bytes(treasure_map), headers=headers)
            log.info("{} providing TreasureMap {}".format(
                node_nickname, treasure_map_id))

        except KeyError:
            log.info("{} doesn't have requested TreasureMap {}".format(
                stamp, treasure_map_id))
            response = Response(
                "No Treasure Map with ID {}".format(treasure_map_id),
                status=404,
                headers=headers)

        return response

    @rest_app.route('/treasure_map/<treasure_map_id>', methods=['POST'])
    def receive_treasure_map(treasure_map_id):
        from nucypher.policy.models import TreasureMap

        try:
            treasure_map = TreasureMap.from_bytes(
                bytes_representation=request.data, verify=True)
        except TreasureMap.InvalidSignature:
            do_store = False
        else:
            do_store = treasure_map.public_id() == treasure_map_id

        if do_store:
            log.info("{} storing TreasureMap {}".format(
                stamp, treasure_map_id))

            # # # #
            # TODO: Now that the DHT is retired, let's do this another way.
            # self.dht_server.set_now(binascii.unhexlify(treasure_map_id),
            #                         constants.BYTESTRING_IS_TREASURE_MAP + bytes(treasure_map))
            # # # #

            # TODO 341 - what if we already have this TreasureMap?
            treasure_map_tracker[keccak_digest(
                binascii.unhexlify(treasure_map_id))] = treasure_map
            return Response(bytes(treasure_map), status=202)
        else:
            # TODO: Make this a proper 500 or whatever.
            log.info(
                "Bad TreasureMap ID; not storing {}".format(treasure_map_id))
            assert False

    @rest_app.route('/status')
    def status():
        # TODO: Seems very strange to deserialize *this node* when we can just pass it in.
        #       Might be a sign that we need to rethnk this composition.

        headers = {"Content-Type": "text/html", "charset": "utf-8"}
        this_node = _node_class.from_bytes(node_bytes_caster(),
                                           federated_only=federated_only)

        previous_states = list(reversed(node_tracker.states.values()))[:5]

        try:
            content = status_template.render(this_node=this_node,
                                             known_nodes=node_tracker,
                                             previous_states=previous_states)
        except Exception as e:
            log.debug("Template Rendering Exception: ".format(str(e)))
            raise TemplateError(str(e)) from e

        return Response(response=content, headers=headers)

    return rest_app, datastore
        return b


class A(Base, Mixin):
    __tablename__ = "As"
    a_id = Column("id", Integer, primary_key=True)

    def __init__(self, b=None):
        Mixin.__init__(self, b)


class B(Base):
    __tablename__ = "Bs"
    b_id = Column("id", Integer, primary_key=True)


engine = create_engine("sqlite:///:memory:")
Session = sessionmaker(bind=engine)
session = Session()
Base.metadata.create_all(engine)

b = B()
session.add(b)
session.commit()

#print session.query(B).first()

a = A()
assert a.b is b
Exemple #33
0
'''
Created on 2019-06-02

@author: F-Monkey
'''
from sqlalchemy.engine import create_engine
from spider.io import settings
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.ext.declarative.api import declarative_base
import os

#!/usr/bin/python3
'''
    https://www.cnblogs.com/gispathfinder/p/5787313.html
    echo : log sql
'''
engine = create_engine(settings.DB_URL, max_overflow=settings.DB_MAX_OVERFLOW, echo=True)

Session = sessionmaker(bind=engine)

session = Session()

# pojos parent
Base = declarative_base()

if __name__ == '__main__':
    print(os.path.dirname(settings.__file__))
Exemple #34
0
def metadata():
    metadata = MetaData()
    metadata.bind = create_engine(URL)
    return metadata
Exemple #35
0
from sqlalchemy.engine import create_engine
from sqlalchemy.orm.session import sessionmaker

from drivr import core

engine = create_engine(
    core.settings.SQLALCHEMY_DATABASE_URI,
    connect_args={"check_same_thread": False},
)

SessionLocal = sessionmaker(
    bind=engine,
    autocommit=False,
    autoflush=False,
)
    def get_connection_to_athena(
        self,
        DbName: str,
        region_name: Optional[str] = None,
        S3QueryResultsLocation: Optional[str] = None,
    ) -> Dict[str, Union[str, sa.engine.Engine]]:
        """
        Connect Athena to an existing database

        Parameters
        ----------
        DbName : str
            Name of the glue database name.

        region_name : str, optional
            The region to connect to athena. The default region will be used if receives None.

        S3QueryResultsLocation : str, optional
            The s3 bucket where to store query results. The results will not be saved if received None.


        Returns
        -------
        db_url : str
            A sql alchemy connection string.
        engine : sqlalchemy.engine.Engine
            A sql alchemy engine.

        Example
        --------
        >>> from aws.utils.notebooks.database import AthenaUtils
        >>> from sqlalchemy.engine import create_engine
        >>> from aws.utils.notebooks.common import get_workspace
        >>> (db_url,engine) = AthenaUtils.get_connection_to_athena(
        ...     DbName = glue_db,
        ...     my_region = my_region,
        ...     S3QueryResultsLocation = results_location)
        """

        workspace = get_workspace()
        if region_name == None:
            region_name = workspace["region"]

        if S3QueryResultsLocation == None:
            S3QueryResultsLocation = f"{workspace['ScratchBucket']}/athena"

        template_con_str = (
            "awsathena+rest://athena.{region_name}.amazonaws.com:443/"
            "{schema_name}?s3_staging_dir={s3_staging_dir}")
        conn_str = template_con_str.format(
            region_name=region_name,
            schema_name=DbName,
            s3_staging_dir=quote_plus(S3QueryResultsLocation),
        )

        engine = create_engine(conn_str)
        self.db_url = conn_str
        self.current_engine = engine
        self.db_class = "athena"
        return {
            "db_url": self.db_url,
            "engine": self.current_engine,
        }
    def get_connection_to_redshift(
        self,
        clusterIdentifier: str,
        DbName: str,
        DbUser: str,
        lambdaName: Optional[str] = None,
    ) -> Dict[str, Union[str, sa.engine.Engine]]:
        """
        Connect to an existing cluster or create a new cluster if it does not exists.

        Note
        ----
        To connect to existing cluster:
            **Cluster networking is configured correctly (has access to the Orbit Workbench VPC).
            **Cluster ESG was created, assigned to the cluster.
            **The ESG should have an ingress rule to allow inbound tcp on the redshift port from the Orbit Workbench
                instance SG.
            **A new teamspace should be launched from the Service Catalog and the ESG should be passed as a parameter
                to allow external traffic.

        Parameters
        ----------
        clusterIdentifier : str
            A unique name of the Redshift cluster, can be a new/existing cluster as well. If used with existing cluster,
            lambdaName should be given as well.
        DbName : str
            The Redshift DB name to connect to.
        DbUser : str
            The Redshift User name to connect with.
        lambdaName : str, optional
            For new cluster/mandatory for existing cluster connection, the lambda name which is responsible to get the
            cluster credentials.


        Returns
        -------
        db_url : str
            A sql alchemy connection string
        engine : sqlalchemy.engine.Engine
            A sql alchemy engine
        redshift_role : str
            The redshift role ARN that can be used to connect to Glue catalog

        (e.g. {'db_url': 'redshift+psycopg2://..', 'engine': engine,
        'redshift_role': 'arn:aws:iam::{accountid}:role/...'})

        Example
        --------
        >>> from aws.utils.notebooks.database import get_redshift
        >>> import json, urllib.parse
        >>> from sqlalchemy.engine import create_engine
        >>> response = get_redshift().get_connection_to_redshift(
        ...     clusterIdentifier = "my_cluster",
        ...     DbName = "my_database",
        ...     DBUser = "******",
        ...     lambdaName = None
        ...     )

        """

        redshift = boto3.client("redshift")

        if lambdaName:
            # Trying to get user and temp password from cluster
            try:
                lambda_client = boto3.client("lambda")
                response = lambda_client.invoke(
                    InvocationType="RequestResponse",
                    FunctionName=lambdaName,
                    Payload=json.dumps({
                        "DbUser": DbUser,
                        "DbName": DbName,
                        "clusterIdentifier": clusterIdentifier,
                    }),
                )

                # Parsing into JSON format
                data = json.loads(response["Payload"].read().decode("utf-8"))
                DbPassword = data["DbPassword"]
                DbUser = data["DbUser"]
            except Exception as e:
                logger.error(
                    f"There was an error getting the cluster details: {data}")
                raise e
        else:
            response = redshift.get_cluster_credentials(
                DbUser=DbUser,
                DbName=DbName,
                AutoCreate=True,
                ClusterIdentifier=clusterIdentifier,
            )
            DbPassword = urllib.parse.quote(response["DbPassword"])
            DbUser = urllib.parse.quote(response["DbUser"])

        # Get the rest of the cluster properties
        clusterInfo = redshift.describe_clusters(
            ClusterIdentifier=clusterIdentifier)

        hostName = clusterInfo["Clusters"][0]["Endpoint"]["Address"]
        port = clusterInfo["Clusters"][0]["Endpoint"]["Port"]

        self.db_url = "redshift+psycopg2://{}:{}@{}:{}/{}".format(
            DbUser, DbPassword, hostName, port, DbName)
        if clusterInfo["Clusters"][0]["IamRoles"]:
            self.redshift_role = clusterInfo["Clusters"][0]["IamRoles"][0][
                "IamRoleArn"]
        else:
            self.redshift_role = None
        self.current_engine = create_engine(self.db_url)
        self.db_class = "redshift"
        return {
            "db_url": self.db_url,
            "engine": self.current_engine,
            "redshift_role": self.redshift_role,
        }
Exemple #38
0
#!/usr/bin/env python
from sqlalchemy.engine import create_engine
from sqlalchemy.schema import Column, ForeignKey
from sqlalchemy.types import Integer, String
from sqlalchemy.orm import scoped_session, sessionmaker, backref, relationship
from sqlalchemy.ext.declarative import declarative_base

engine = create_engine('sqlite:///:memory:', echo=True)
session = scoped_session(sessionmaker(bind=engine, autoflush=False))

Base = declarative_base()

class Author(Base):
    __tablename__ = 'author'
    id = Column(Integer, primary_key=True)
    name = Column(String(128))

class Book(Base):
    __tablename__ = 'book'
    id = Column(Integer, primary_key=True)
    title = Column(String(128))
    author_id = Column(Integer, ForeignKey(Author.id), nullable=False)
    author = relationship(Author, backref=backref('books', lazy='dynamic'))

Base.metadata.create_all(engine)

author = Author(name="Frank Herbert")
book = Book(title="Dune", author=author)
session.add_all([author, book])
session.commit()
session.expunge_all()
Exemple #39
0
def _get_sqlite_engine(filepath):
    engine = create_engine("sqlite:///%s" % filepath)
    engine.connect()
    return engine
Exemple #40
0
    os.path.abspath(os.path.join(base_path, '../../../../boinc/py')))

import datetime
from utils.logging_helper import config_logger
from archive.processed_galaxy_mod import sort_data, finish_processing
from sqlalchemy.engine import create_engine
from sqlalchemy.sql.expression import select
from config import BOINC_DB_LOGIN, DB_LOGIN, PROCESSED, COMPUTING
from database.boinc_database_support_core import RESULT
from database.database_support_core import GALAXY

LOG = config_logger(__name__)
LOG.info('PYTHONPATH = {0}'.format(sys.path))

# Get the work units still being processed
ENGINE = create_engine(BOINC_DB_LOGIN)
connection = ENGINE.connect()
current_jobs = []
for result in connection.execute(
        select([RESULT]).where(RESULT.c.server_state != 5)):
    current_jobs.append(result[RESULT.c.name])
connection.close()

# Connect to the database - the login string is set in the database package
ENGINE = create_engine(DB_LOGIN)
connection = ENGINE.connect()

sorted_data = sort_data(connection, current_jobs)
for key in sorted(sorted_data.iterkeys()):
    LOG.info('{0}: {1} results'.format(key, len(sorted_data[key])))
#################################################
# Flask Setup
#################################################
app = Flask(__name__)

#################################################
# Database Setup
#################################################

# create route that renders index.html template
from google.cloud import bigquery

os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "static/js/BigQueryCreds.json"

client = bigquery.Client()
engine = create_engine('bigquery://project-1-257523/bigquery-public-data',
                       credentials_path='static/js/BigQueryCreds.json')

StartDate = '20170701'
EndDate = '20170703'


# Query the database and send the jsonified results
@app.route("/")
def send():
    filtered_query = bigquery.Client().query("""
        SELECT 
        date,
        SUM ( totals.transactions ) AS total_transactions
        FROM
        `bigquery-public-data.google_analytics_sample.ga_sessions_*`
        WHERE
 def create_engine(self):
     return create_engine('hive://localhost:10000/default')
Exemple #43
0
from __future__ import print_function
from sqlalchemy.engine import create_engine

from mariadb_connect import db_conn_str

# `echo=True` => log all SQL to stdout
engine = create_engine(db_conn_str('tester'), echo=True)
connection = engine.connect()

connection.execute(
    """
    CREATE TABLE users (
    username VARCHAR(32) PRIMARY KEY,
    password VARCHAR(128) NOT NULL
    );
    """
)

connection.execute(
    "INSERT INTO users (username, password) VALUES (\"%s\", \"%s\");" %
    ("foo", "bar")
)

# `result` is a `ResultProxy`
result = connection.execute("SELECT username FROM users")
for row in result:
    print("username:", row['username'])
    
connection.close()
def engine_using_test_dataset(test_service_account_key):
    engine = create_engine(
        'bigquery://{}/test_pybigquery'.format(TEST_PROJECT),
        echo=True,
        credentials_path=test_service_account_key)
    return engine
def engine_with_location(test_service_account_key):
    engine = create_engine('bigquery://{}'.format(TEST_PROJECT),
                           echo=True,
                           location="asia-northeast1",
                           credentials_path=test_service_account_key)
    return engine
Exemple #46
0
from sqlalchemy import func
from sqlalchemy.engine import create_engine
from sqlalchemy.orm import sessionmaker

from M2M import Girl

engine = create_engine(
    'mysql+pymysql://root:[email protected]:3306/sqlalchemy?charset=utf8')

db_session = sessionmaker(engine)()

# g1 = Girl(name='diane', gtb=[Boy(name='jack'), Boy(name='jack2')])
# g2 = Girl(name='angle', gtb=[Boy(name='tom'), Boy(name='tom2')])
# db_session.add_all([g1, g2])
# db_session.commit()
# db_session.close()

# res = db_session.query(Girl).filter(Girl.id > 1).filter(Girl.id == 3).first()
# res = db_session.query(Girl).filter_by(name='echo').all()
# print(res[0].name)

# res = db_session.query(Girl).filter(Girl.name != 'echo').all()
# res = db_session.query(Girl).filter(Girl.name == 'echo').all()
# print([i.name for i in res])

query = db_session.query(Girl)

# res = query.filter(Girl.name != 'echo').all()
# print([i.name for i in res])

# # 模糊匹配
Exemple #47
0
from sqlalchemy.orm import sessionmaker

HOST = '127.0.0.1'
DB = 'flask_sqlalchemy01'
USER = '******'
PASSWORD = '******'
PORT = 3306

app = Flask(__name__)
app.config.update({'DEBUG': True, 'TEMPLATES_AUTO_RELOAD': True})

# mysql+pymysql://root:[email protected]:3306/flask_sqlalchemy01?charset=utf8
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{user}:{password}@{host}:{port}/{db}?charset=utf8'\
                            .format(user=USER, password=PASSWORD, host=HOST, port=PORT, db=DB)

engine = create_engine(SQLALCHEMY_DATABASE_URI)
session = sessionmaker(engine)()
Base = declarative_base(engine)


@app.route('/')
def index():
    return 'hello world'


class User(Base):
    __tablename__ = 'user'
    id = Column(Integer, primary_key=True, autoincrement=True)
    name = Column(String(64), nullable=False)
    age = Column(Integer)
    is_delete = Column(Boolean, default=False)
Exemple #48
0
'''
New Connect database
'''

NOW = time.strftime('%Y-%m-%d %X', time.localtime())


def tostring(dt):
    if isinstance(dt, datetime.datetime):
        return dt.strftime('%Y-%m-%d %X')
    else:
        return dt


_con_str = "mysql://*****:*****@localhost:3306/mygift?charset=utf8"
dbObj = create_engine(_con_str, echo=False)

metadata = MetaData(dbObj)

#Base = declarative_base()
product_tb = Table('product', metadata, autoload=True)
product_price_tb = Table('product_price', metadata, autoload=True)


class ProductPriceObj(object):
    def _to_dict(self):
        _dict = {}
        key_list = ['update_time', 'price']
        for key in key_list:
            if key in self.__dict__.keys():
                _dict[key] = tostring(self.__dict__[key])
Exemple #49
0
def main():
    parser = argparse.ArgumentParser(
        description='Generates SQLAlchemy model code from an existing database.'
    )
    parser.add_argument('url',
                        nargs='?',
                        help='SQLAlchemy url to the database')
    parser.add_argument('--version',
                        action='store_true',
                        help="print the version number and exit")
    parser.add_argument('--schema',
                        help='load tables from an alternate schema')
    parser.add_argument(
        '--tables', help='tables to process (comma-separated, default: all)')
    parser.add_argument('--noviews', action='store_true', help="ignore views")
    parser.add_argument('--noindexes',
                        action='store_true',
                        help='ignore indexes')
    parser.add_argument('--noconstraints',
                        action='store_true',
                        help='ignore constraints')
    parser.add_argument('--nojoined',
                        action='store_true',
                        help="don't autodetect joined table inheritance")
    parser.add_argument(
        '--noinflect',
        action='store_true',
        help="don't try to convert tables names to singular form")
    parser.add_argument('--noclasses',
                        action='store_true',
                        help="don't generate classes, only tables")
    parser.add_argument('--outfile',
                        help='file to write output to (default: stdout)')
    parser.add_argument('--audited',
                        help='comma separated list of audited table names')
    parser.add_argument('--auditall',
                        action='store_true',
                        help='audit all tables')
    args = parser.parse_args()

    if args.version:
        version = pkg_resources.get_distribution('sqlacodegen').parsed_version
        print(version.public)
        return
    if not args.url:
        print('You must supply a url\n', file=sys.stderr)
        parser.print_help()
        return

    engine = create_engine(args.url)
    metadata = MetaData(engine)
    tables = args.tables.split(',') if args.tables else None
    metadata.reflect(engine, args.schema, not args.noviews, tables)
    outfile = codecs.open(args.outfile, 'w',
                          encoding='utf-8') if args.outfile else sys.stdout
    if args.auditall:
        generator = CodeGenerator(metadata,
                                  args.noindexes,
                                  args.noconstraints,
                                  args.nojoined,
                                  args.noinflect,
                                  args.noclasses,
                                  audit_all=args.auditall)
    elif args.audited:
        generator = CodeGenerator(metadata,
                                  args.noindexes,
                                  args.noconstraints,
                                  args.nojoined,
                                  args.noinflect,
                                  args.noclasses,
                                  audited=set(args.audited.split(',')))
    else:
        generator = CodeGenerator(metadata, args.noindexes, args.noconstraints,
                                  args.nojoined, args.noinflect,
                                  args.noclasses)
    generator.render(outfile)
def engine(test_service_account_key):
    engine = create_engine('bigquery://{}'.format(TEST_PROJECT),
                           echo=True,
                           credentials_path=test_service_account_key)
    return engine
Exemple #51
0
def _new_engine(url):
    return create_engine(url, echo=config.SQL_DEBUG)
Exemple #52
0
def make_session(connection_string):
    engine = create_engine(connection_string, echo=False,
                           convert_unicode=True)
    Session = sessionmaker(bind=engine)
    return Session(), engine
Exemple #53
0
def create_database(url, encoding='utf8', template=None):
    """Issue the appropriate CREATE DATABASE statement.

    :param url: A SQLAlchemy engine URL.
    :param encoding: The encoding to create the database as.
    :param template:
        The name of the template from which to create the new database. At the
        moment only supported by PostgreSQL driver.

    To create a database, you can pass a simple URL that would have
    been passed to ``create_engine``. ::

        create_database('postgres://postgres@localhost/name')

    You may also pass the url from an existing engine. ::

        create_database(engine.url)

    Has full support for mysql, postgres, and sqlite. In theory,
    other database engines should be supported.
    """

    url = copy(make_url(url))

    database = url.database

    if url.drivername.startswith('postgresql'):
        url.database = 'template1'
    elif not url.drivername.startswith('sqlite'):
        url.database = None

    engine = create_engine(url)

    if engine.dialect.name == 'postgresql':
        if engine.driver == 'psycopg2':
            from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
            engine.raw_connection().set_isolation_level(
                ISOLATION_LEVEL_AUTOCOMMIT
            )

        if not template:
            template = 'template0'

        text = "CREATE DATABASE {0} ENCODING '{1}' TEMPLATE {2}".format(
            quote(engine, database),
            encoding,
            quote(engine, template)
        )
        engine.execute(text)

    elif engine.dialect.name == 'mysql':
        text = "CREATE DATABASE {0} CHARACTER SET = '{1}'".format(
            quote(engine, database),
            encoding
        )
        engine.execute(text)

    elif engine.dialect.name == 'sqlite' and database != ':memory:':
        open(database, 'w').close()

    else:
        text = 'CREATE DATABASE {0}'.format(quote(engine, database))
        engine.execute(text)
Exemple #54
0
from sqlalchemy import schema, types
from sqlalchemy.engine import create_engine
from mariadb_connect import db_conn_str

metadata = schema.MetaData()

page_table = schema.Table(
    'page', metadata,
    schema.Column('id', types.Integer, primary_key=True),
    schema.Column('name', types.Unicode(255), default=u''),
    schema.Column('title', types.Unicode(255), default=u'Untitled Page'),
    schema.Column('content', types.Text(), default=u'')
)

for t in metadata.sorted_tables:
    print "Table name: ", t.name
    print "t is page_table: ", t is page_table

for column in page_table.columns:
    print "Column Table name: ", column.type

engine = create_engine(db_conn_str('tester'))
metadata.bind = engine

metadata.create_all(checkfirst=True)
Exemple #55
0
def engine_using_test_dataset():
    engine = create_engine('bigquery:///test_pybigquery', echo=True)
    return engine
Exemple #56
0
    def __init__(
        self,
        db_name,
        db_filepath,
        network_middleware,
        federated_only,
        treasure_map_tracker,
        node_tracker,
        node_bytes_caster,
        work_order_tracker,
        node_recorder,
        stamp,
        verifier,
        suspicious_activity_tracker,
        certificate_dir,
    ) -> None:

        self.network_middleware = network_middleware
        self.federated_only = federated_only

        self._treasure_map_tracker = treasure_map_tracker
        self._work_order_tracker = work_order_tracker
        self._node_tracker = node_tracker
        self._node_bytes_caster = node_bytes_caster
        self._node_recorder = node_recorder
        self._stamp = stamp
        self._verifier = verifier
        self._suspicious_activity_tracker = suspicious_activity_tracker
        self._certificate_dir = certificate_dir
        self.datastore = None

        routes = [
            Route('/kFrag/{id_as_hex}', 'POST', self.set_policy),
            Route('/kFrag/{id_as_hex}/reencrypt', 'POST',
                  self.reencrypt_via_rest),
            Route('/public_information', 'GET', self.public_information),
            Route('/node_metadata', 'GET', self.all_known_nodes),
            Route('/node_metadata', 'POST', self.node_metadata_exchange),
            Route('/consider_arrangement', 'POST', self.consider_arrangement),
            Route('/treasure_map/{treasure_map_id}', 'GET',
                  self.provide_treasure_map),
            Route('/status', 'GET', self.status),
            Route('/treasure_map/{treasure_map_id}', 'POST',
                  self.receive_treasure_map),
        ]

        self.rest_app = App(routes=routes)
        self.db_name = db_name
        self.db_filepath = db_filepath

        from nucypher.keystore import keystore
        from nucypher.keystore.db import Base
        from sqlalchemy.engine import create_engine

        self.log.info("Starting datastore {}".format(self.db_filepath))
        engine = create_engine('sqlite:///{}'.format(self.db_filepath))
        Base.metadata.create_all(engine)
        self.datastore = keystore.KeyStore(engine)
        self.db_engine = engine

        from nucypher.characters.lawful import Alice, Ursula
        self._alice_class = Alice
        self._node_class = Ursula

        with open(os.path.join(TEMPLATES_DIR, "basic_status.j2"), "r") as f:
            _status_template_content = f.read()
        self._status_template = Template(_status_template_content)
Exemple #57
0
def engine_with_location():
    engine = create_engine('bigquery://',
                           echo=True,
                           location="asia-northeast1")
    return engine
Exemple #58
0
def test_datastore():
    engine = create_engine('sqlite:///:memory:')
    Base.metadata.create_all(engine)
    test_datastore = datastore.Datastore(engine)
    yield test_datastore
Exemple #59
0
def engine():
    engine = create_engine('bigquery://', echo=True)
    return engine
 def __init__(self, engine_args_list, engine_kwargs_dict):
     self._engine = create_engine(*engine_args_list, **engine_kwargs_dict)