コード例 #1
0
ファイル: db.py プロジェクト: sunghwanJo/ASAP
 def init_test_database(self):
     '''
     TEST 를 위한 Database 를 메모리 상에 만든다. 물론 Session 도.
     '''
     engine = db.create_engine('sqlite://', convert_unicode=True, encoding='utf-8', echo=False)
     self.session = sessionmaker(bind=engine, autoflush=True, autocommit=False)
     Base.metadata.create_all(engine)
コード例 #2
0
ファイル: cli_test.py プロジェクト: Snoin/seektam-web
def test_loader_inserts_food_element(  # noqa
    clirunner, monkeypatch, tmpdir, listfile):
    # Mocking for Nurungji-only food list
    monkeypatch.setattr(
        koreafood.requests, 'get',
        lambda _: MockHTTPResponse(listfile.read()))
    monkeypatch.setattr(
        koreafood.requests, 'Session',
        lambda: MockHTTPSession(post=_mock_foodlist_analysis_page))

    food = koreafood.get_food_list().next()
    monkeypatch.setattr(
        koreafood, 'get_food_list', lambda: [food])

    # Trigger SUT command
    url = 'sqlite:///'+tmpdir.join('new.db').strpath
    res = clirunner.invoke(loader, [url])

    # Test result
    assert res.exit_code == 0
    session = sessionmaker(create_engine(url))()

    mfood = session.query(koreafood_model.Food).one()
    assert mfood.name == food.name
    assert mfood.category_big == food.category_big
    assert mfood.category_small == food.category_small
    assert len(mfood.aliments) == 1
コード例 #3
0
 def setUp(self):
     print "setup.."
     #engine = create_engine('sqlite:///data/test.db', echo=True)
     engine = create_engine('sqlite://', echo=True)
     Session = sessionmaker(bind=engine)
     self.session = Session()
     ImageModel.metadata.create_all(engine)
コード例 #4
0
ファイル: database_session.py プロジェクト: depsi/timefly
def session_cm():
    session = sessionmaker(bind=db_engine)()
    try:
        yield session
    except Exception, e:
        # logger.warn(traceback.format_exc())
        raise
コード例 #5
0
 def create_session(self, engine=None, shard=False):
     class_ = QKShardSession if shard else QKSession
     factory = sessionmaker(
         db=self, bind=engine, class_=class_,
         autocommit=False, autoflush=False
     )
     return _scoped_session(factory, scopefunc=self.scopefunc)
コード例 #6
0
ファイル: data_option.py プロジェクト: zhoulunhao/spider
def Session_Adapter_Add(data_information):
    Session = sessionmaker()
    Session.configure(bind=engine)
    session = Session()
    session.add(data_information)
    session.flush()
    session.commit()
コード例 #7
0
ファイル: migrate.py プロジェクト: 32x32/fufufuu
    def migrate_tanks(self, old_tank_list):
        tank_list = Tag.objects.filter(tag_type=TagType.TANK)
        tank_title_map = dict([(t.name, t) for t in tank_list])

        new_id = Tag.objects.all().aggregate(Max("id"))["id__max"] or 10000
        new_id = max(new_id, 10000)

        session = sessionmaker(bind=SQL_ENGINE)()

        for old_tank in old_tank_list:
            if old_tank.title in tank_title_map:
                LegacyTank.objects.create(id=old_tank.id, tag=tank_title_map[old_tank.title])
                continue

            manga = session.query(OldManga).filter(OldManga.tank_id == old_tank.id).order_by(OldManga.tank_chp)[:]
            if len(manga) > 0:
                cover = manga[0].cover
            else:
                cover = None

            new_id += 1
            tag = Tag(
                id=new_id,
                tag_type=TagType.TANK,
                name=old_tank.title,
                slug=old_tank.slug,
                created_by_id=self.user.id,
                created_on=old_tank.date_created,
                cover=self.get_file(cover),
            )
            tag.save(updated_by=None)
            tank_title_map[old_tank.title] = tag

            LegacyTank.objects.create(id=old_tank.id, tag=tag)
コード例 #8
0
ファイル: Model.py プロジェクト: elioth010/lugama
 def __init__(self):
     '''
     Constructor
     '''
     self.base = declarative_base()
     self.SessionFactory = sessionmaker(bind=self.engine)
     self.session = self.SessionFactory()
コード例 #9
0
ファイル: queries.py プロジェクト: Mardak/splice
def tile_exists(target_url, bg_color, title, type, image_uri, enhanced_image_uri, locale, conn=None, *args, **kwargs):
    """
    Return the id of a tile having the data provided
    """
    from splice.environment import Environment
    env = Environment.instance()

    if conn is not None:
        sm = sessionmaker(bind=conn)
        session = sm()
    else:
        session = env.db.session

    # we add order_by in the query although it shouldn't be necessary
    # this is because of a previous bug where duplicate tiles could be created
    results = (
        session
        .query(Tile.id)
        .filter(Tile.target_url == target_url)
        .filter(Tile.bg_color == bg_color)
        .filter(Tile.title == title)
        .filter(Tile.image_uri == image_uri)
        .filter(Tile.enhanced_image_uri == enhanced_image_uri)
        .filter(Tile.locale == locale)
        .order_by(asc(Tile.id))
        .first()
    )

    if results:
        return results[0]

    return results
コード例 #10
0
def main(keep_saved, config_location):
    if not config_location:
        config_location = os.path.dirname(os.path.abspath(__file__)) + "\\config.ini"
    try:
        open(config_location)
    except IOError:
        print "Problem opening config.ini at " + config_location
    engine = create_engine(make_connection(config_location), echo=False)
    Session = sessionmaker(bind=engine)
    session = Session()

    Base = declarative_base()

    class FeverItems(Base):
        __tablename__ = 'fever_items'

        id = Column(INTEGER, primary_key=True)
        description = Column(LONGTEXT)
        is_saved = Column(TINYINT)

    records = session.query(FeverItems.description).filter_by(is_saved=1)
    contents = []
    for record in records:
        contents.append(record)
    if not keep_saved:
    session.query(FeverItems).filter_by(is_saved=1).update({"is_saved": 0})
    session.commit()
    return contents


if __name__ == "__main__":
    main(bool(sys.argv[1]), str(sys.argv[2]))
コード例 #11
0
 def __init__(self, name, url, scope_func):
     self._name = name
     self._url, engine_params = self._parse_url(url)
     self._engine = sqlalchemy.create_engine(self._url, **engine_params)
     self._session_maker = sessionmaker(self.engine, class_=Session, expire_on_commit=False)
     self._scoped_session_maker = scoped_session(self._session_maker, scopefunc=scope_func)
     self.Model = declarative_base()
コード例 #12
0
ファイル: database.py プロジェクト: blindsightcorp/rigor
	def __init__(self, database, config):
		self.name = database
		self._config = config
		url = Database.build_url(database, config)
		self._engine = sa.create_engine(url)
		self._metadata = sa.MetaData(bind=self._engine, naming_convention=kNamingConvention)
		self._sessionmaker = sessionmaker(bind=self._engine)
コード例 #13
0
ファイル: control.py プロジェクト: OskarBun/lecturefb
    def __init__(self, db_url, echo=False, drop_all=False):
        '''
        Constructor
        '''
        self._clients = []
        self._pending = []

        logging.info("connecting to %s",db_url)
        params = dict(echo=echo)
        if 'mysql' in db_url:
            params['encoding']='utf-8'
            params['pool_recycle']=3600
            params['isolation_level']='READ COMMITTED'
        self._engine = create_engine(db_url, **params)
        self._Session = sessionmaker(bind=self._engine,
                                      extension=self._SESSION_EXTENSIONS_,
                                      **self._SESSION_KWARGS_)
        if drop_all is True:
            with self.session as session:
                self._drop_all_(session)
        self._create_all_()
        if drop_all is True:
            with self.session as session:
                session.add(model.Issue(id=1, name="repeat idea", type = model.Issue.TYPE[0]))
                session.add(model.Issue(id=2, name="speed", type = model.Issue.TYPE[1]))
                session.commit()
コード例 #14
0
 def __init__(self, engine):
     '''
     Constructor
     '''
     self.engine = engine
     klass = sessionmaker(bind=self.engine)
     self.session = klass()
コード例 #15
0
ファイル: dao.py プロジェクト: CooledCoffee/sqlalchemy-dao
 def __init__(self, url, pool_size=sqlalchemy_dao.POOL_DEFAULT):
     if pool_size == sqlalchemy_dao.POOL_DISABLED:
         self._engine = engine.create_engine(url, poolclass=NullPool)
     else:
         self._engine = engine.create_engine(url, pool_size=pool_size, pool_recycle=3600,
                 max_overflow=sys.maxsize)
     self._Session = session.sessionmaker(bind=self._engine, class_=self.session_class) # pylint: disable=invalid-name
コード例 #16
0
ファイル: sqlite.py プロジェクト: LouisChen1905/OneAnalyser
 def add_period(self, period):
     if isinstance(period, Period):
         DBSession = sessionmaker(bind=self.engine)
         session = DBSession()
         
         session.add(period)
         session.commit()
コード例 #17
0
ファイル: archive.py プロジェクト: Pylons/repozitory
 def _create_session(self, engine):
     Base.metadata.create_all(engine)
     # Distinguish sessions by thread.
     session = scoped_session(sessionmaker(
         extension=ZopeTransactionExtension()))
     session.configure(bind=engine)
     return session
コード例 #18
0
    def persist_bundle_sensor(self):
        from madmex.persistence.driver import persist_bundle
        folder = '/LUSTRE/MADMEX/staging/madmex_antares/test_ingest/556_297_041114_dim_img_spot'
        from sqlalchemy import create_engine
        from sqlalchemy.orm.session import sessionmaker
        from madmex.mapper.bundle.spot5 import Bundle
        #from madmex.configuration import SETTINGS

        dummy = Bundle(folder)
        #dummy.target = '/LUSTRE/MADMEX/staging/'
        target_url = getattr(SETTINGS, 'TEST_FOLDER')
        print target_url
        #TODO please fix me, horrible hack
        dummy.target = target_url
        persist_bundle(dummy)
        my_database = getattr(SETTINGS, 'ANTARES_TEST_DATABASE')
        klass = sessionmaker(bind=create_engine(my_database))
        session = klass()
        query = 'SELECT count(*) FROM product WHERE uuid=\'%s\';' % dummy.uuid_id

        try:
            result_set = session.execute(query)
            for row in result_set:
                self.assertGreater(row['count'], 0)
            session.delete(dummy.get_database_object())
            session.commit()
            for file_name in dummy.get_files():
                full_path = os.path.join(target_url, os.path.basename(file_name))
                self.assertTrue(os.path.isfile(full_path))
                os.remove(full_path)
        except:
            session.rollback()
            raise
        finally:
            session.close()
コード例 #19
0
ファイル: schema.py プロジェクト: omersaeed/spire
    def _create_engine(self, url):
        echo = self.configuration.get('echo')
        if echo:
            echo = 'debug'

        engine = self.dialect.create_engine(url, self.schema, echo=echo)
        return engine, sessionmaker(bind=engine)
コード例 #20
0
 def write_table(self, table, db_cols, hstore_col_name):
     logger.debug('pg_copy_writer starting for project %s, table %s' % (self.project,table['name']))
     prefix = self.project
     
     csvdir = conf.TMP_FILES_DIR
     csvfilename = '%s-%s.csv' % (prefix, table['name'])
     
     csv_writer = CsvPlainWriter(csvdir)        
     
     try:
         csv_writer.write_table(table, csvfilename, db_cols, hstore_col_name)
     except EmptyTableException:
         logger.info('pg_copy_writer stopping, no rows to write to csv file')
         return
     
     csvfile = os.path.join(csvdir, csvfilename)
     with open(csvfile, 'r') as csv_file:
         abspath = os.path.abspath(csv_file.name)
         headings = csv_file.readline()
     
     engine = sqlalchemy.create_engine(conf.SQLALCHEMY_DB_URL)
     session = sessionmaker(bind=engine)
     
     copy_file = open(abspath,'r')
     copy_sql = "COPY %s (%s) FROM STDIN DELIMITER ',' CSV HEADER" % (table['name'], headings)
     
     logger.debug('starting pg_copy')
     # need to use raw psycopg here to copy from stdin because with RDS not allowing admin users, can't copy from a file.
     raw_conn = engine.raw_connection()
     raw_cur = raw_conn.cursor()
     raw_cur.copy_expert(copy_sql, copy_file)
     raw_conn.commit()
     raw_cur.close()
     raw_conn.close()
コード例 #21
0
 def persist_bundle(self):
     from madmex.persistence.driver import persist_bundle
     from sqlalchemy import create_engine
     from sqlalchemy.orm.session import sessionmaker
     from madmex.util import remove_file
     dummy = DummyBundle()
     persist_bundle(dummy)
     
     
     my_database = getattr(SETTINGS, 'ANTARES_TEST_DATABASE')
     klass = sessionmaker(bind=create_engine(my_database))
     session = klass()
     query = 'SELECT count(*) FROM product WHERE uuid=\'%s\';' % dummy.uuid_id
     print query
     try:
         result_set = session.execute(query)
         for row in result_set:
             self.assertGreater(row['count'], 0)
         # Delete object from database.
         session.delete(dummy.get_database_object())
         session.commit()
         for file_name in dummy.get_files():
             full_path = os.path.join(dummy.get_output_directory(), os.path.basename(file_name))
             self.assertTrue(os.path.isfile(full_path))
             # Remove file from filesystem.
             remove_file(full_path)
     except:
         session.rollback()
         raise
     finally:
         session.close()
コード例 #22
0
ファイル: TestUcscGeneDao.py プロジェクト: orenlivne/ober
    def test_intersecting_genes(self):
        '''Test retrieving genes intersecting a base-pair position segment.'''
        # print repr(User.__table__) #@UndefinedVariable
        Session = sessionmaker(bind=self.engine)
        session = Session()
        
        # I believe all of the following are equivalent statements and all are prepared statements
        chrom = 1
        (start, end) = (62151773, 62516683)
        expected = [('NM_032027', 'TM2D1'), ('NM_176877', 'INADL')]
        
        assert_equal([(gene.name, gene.name2) for gene in session.query(Gene).
                      filter(and_(Gene.chrom == 'chr%s' % (chrom,),
                                  greatest(Gene.txStart, start) <= 
                                  least(Gene.txEnd, end))).
                      order_by(Gene.txStart)], expected)

        assert_equal([(gene.name, gene.name2) for gene in session.query(Gene).
                      filter('chrom = :chrom and greatest(txStart, :start) <= least(txEnd, :end)').
                      params(chrom='chr%s' % (chrom,), start=start, end=end).
                      order_by(Gene.txStart)], expected)

        assert_equal([(gene.name, gene.name2) for gene in 
                      self.dao.intersecting_genes(chrom, start, end)], expected)
        
        session.close()
コード例 #23
0
ファイル: db.py プロジェクト: sunghwanJo/ASAP
 def init_database(self):
     '''
     Database 와 여기에 접속할 수 있는 Session 을 만든다.
     '''
     
     Base.metadata.create_all(self.get_engine())
     self.session = sessionmaker(bind=self.get_engine(), autoflush=True, autocommit=False)()
コード例 #24
0
ファイル: searchreplacedb.py プロジェクト: johnraz/sword
def db_searchreplace(db_name, db_user, db_password, db_host, search, replace ):
    engine = create_engine("mysql://%s:%s@%s/%s" % (db_user, db_password, db_host, db_name ))
    #inspector = reflection.Inspector.from_engine(engine)
    #print inspector.get_table_names()
    meta = MetaData()
    meta.bind = engine
    meta.reflect()

    Session = sessionmaker(engine)


    Base = declarative_base(metadata=meta)
    session = Session()

    tableClassDict = {}
    for table_name, table_obj in dict.iteritems(Base.metadata.tables):
        try:
            tableClassDict[table_name] = type(str(table_name), (Base,), {'__tablename__': table_name, '__table_args__':{'autoload' : True, 'extend_existing': True} })
    #        class tempClass(Base):
    #            __tablename__ = table_name
    #            __table_args__ = {'autoload' : True, 'extend_existing': True}
    #            foo_id = Column(Integer, primary_key='temp')
            for row in session.query(tableClassDict[table_name]).all():
                for column in table_obj._columns.keys():
                    data_to_fix = getattr(row, column)
                    fixed_data = recursive_unserialize_replace( search, replace, data_to_fix, False)

                    setattr(row, column, fixed_data)
                    #print fixed_data
        except Exception, e:
            print e
コード例 #25
0
ファイル: orm.py プロジェクト: dsjl4506/medic
def InitDb(*args, **kwds):
    """
    Create a new DBAPI connection pool.

    The most common and only required argument is the connection URL.
    The URL can either be a string or a `sqlalchemy.engine.url.URL`.
    This method has not return value and needs to be called only once per process.

    See `sqlalchemy.engine.create_engine`.
    """
    global _Base
    global _db
    global _session

    if len(args) > 0:
        # inject the foreign key pragma when using SQLite databases to ensure integrity
        # http://docs.sqlalchemy.org/en/rel_0_8/dialects/sqlite.html#foreign-key-support
        if (isinstance(args[0], str) and args[0].startswith('sqlite')) or \
                (isinstance(args[0], URL) and args[0].get_dialect() == 'sqlite'):
            @event.listens_for(engine.Engine, "connect")
            def set_sqlite_pragma(dbapi_connection, _):
                cursor = dbapi_connection.cursor()
                cursor.execute("PRAGMA foreign_keys=ON")
                cursor.close()

    _db = engine.create_engine(*args, **kwds)
    _Base.metadata.create_all(_db)
    _session = session.sessionmaker(bind=_db)
    logger.debug("DB bound to %s", _db)
    return None
コード例 #26
0
ファイル: indexer.py プロジェクト: QuLogic/obspy
def _run_indexer(options):
    logging.info("Starting indexer %s:%s ..." % (options.host, options.port))
    # initialize crawler
    service = WaveformIndexer((options.host, options.port), MyHandler)
    service.log = logging
    try:
        # prepare paths
        if ',' in options.data:
            paths = options.data.split(',')
        else:
            paths = [options.data]
        paths = service._prepare_paths(paths)
        if not paths:
            return
        # prepare map file
        if options.mapping_file:
            with open(options.mapping_file, 'r') as f:
                data = f.readlines()
            mappings = parse_mapping_data(data)
            logging.info("Parsed %d lines from mapping file %s" %
                         (len(data), options.mapping_file))
        else:
            mappings = {}
        # create file queue and worker processes
        manager = multiprocessing.Manager()
        in_queue = manager.dict()
        work_queue = manager.list()
        out_queue = manager.list()
        log_queue = manager.list()
        # spawn processes
        for i in range(options.number_of_cpus):
            args = (i, in_queue, work_queue, out_queue, log_queue, mappings)
            p = multiprocessing.Process(target=worker, args=args)
            p.daemon = True
            p.start()
        # connect to database
        engine = create_engine(options.db_uri, encoding=native_str('utf-8'),
                               convert_unicode=True)
        metadata = Base.metadata
        # recreate database
        if options.drop_database:
            metadata.drop_all(engine, checkfirst=True)
        metadata.create_all(engine, checkfirst=True)
        # initialize database + options
        _session = sessionmaker(bind=engine)
        service.session = _session
        service.options = options
        service.mappings = mappings
        # set queues
        service.input_queue = in_queue
        service.work_queue = work_queue
        service.output_queue = out_queue
        service.log_queue = log_queue
        service.paths = paths
        service._reset_walker()
        service._step_walker()
        service.serve_forever(options.poll_interval)
    except KeyboardInterrupt:
        quit()
    logging.info("Indexer stopped.")
コード例 #27
0
ファイル: tests.py プロジェクト: sfermigier/WhooshAlchemy
    def setUp(self):

        engine = create_engine("sqlite:///:memory:", echo=True)
        Session = sessionmaker(bind=engine)
        self.session = Session()

        Base = declarative_base()

        class Post(Base):
            __tablename__ = "objectA"
            __searchable__ = ["title", "body"]

            id = Column(Integer, primary_key=True)
            title = Column(Text)
            body = Column(UnicodeText)
            created = Column(DateTime, default=datetime.datetime.utcnow())

            def __repr__(self):
                return "{0}(title={1})".format(self.__class__.__name__, self.title)

        self.Post = Post
        Base.metadata.create_all(engine)

        self.index_manager = whooshalchemy.IndexService(session=self.session)
        self.index_manager.register_class(Post)
コード例 #28
0
ファイル: core.py プロジェクト: posborne/putio-sync
 def _ensure_database_exists(self):
     if not os.path.exists(SETTINGS_DIR):
         os.makedirs(SETTINGS_DIR)
     self._db_engine = create_engine("sqlite:///{}".format(DATABASE_FILE))
     self._db_engine.connect()
     self._scoped_session = scoped_session(sessionmaker(self._db_engine))
     DBModelBase.metadata.create_all(self._db_engine)
コード例 #29
0
    def tearDown(self):
        self.db_session.close()

        database.db_session = scoped_session(
            sessionmaker())  # restore original db_session for following test cases

        Base.metadata.drop_all(bind=self.db_engine)
コード例 #30
0
ファイル: environment.py プロジェクト: 7scientists/rouster
 def get_session(self):
     if not self.sessionmaker:
         engine = self.get_db_engine()
         self.sessionmaker = scoped_session(sessionmaker(bind=engine))
     session = self.sessionmaker()
     session.rollback()
     return session
コード例 #31
0
ファイル: webscraper.py プロジェクト: awardnopoints/BikeFind
def main():
    """Runs an infinite loop, calling DB update functions on each iteration.
    getStaticData is called once, getDynamicData every 5 mins, and getWeatherData
    every 30 mins"""
    # add static data (once-off)
    getStaticData()

    Session = sessionmaker(bind=engine)
    session = Session()

    getStaticData()

    getCurrentData()

    getForecastData()
    session.close()

    counter = 0
    while(True):
        # New DB session for each iteration
        Session = sessionmaker(bind=engine)
        session = Session()

        getDynamicData()

        # update weather every 30 minutes
        if counter % 6 == 0:
            getWeatherData()
            if counter % 12 == 0:
                getForecastData()

        session.close()
        counter += 1
        print("sleeping now", counter)
        # 300 seconds - execution time for one iteration (~55s)
        time.sleep(245)
コード例 #32
0
ファイル: rules.py プロジェクト: WyHy/ip_proxy_spider
def get_ip_proxy():
    engine = create_engine('mysql+mysqlconnector://' + MYSQL_USER + ':' + MYSQL_PASSWD + '@' + MYSQL_HOST + ':3306/' + MYSQL_DBNAME)
    DBSession = sessionmaker(bind=engine)
      
    session = DBSession()      
    ##进行查询
#     item = session.query(ProxyItem).filter(ProxyItem.id=='4').one()
    items = session.query(ProxyItem).filter(ProxyItem.isvalid=='1')
    ipList = []
    for item in items:
        ip = {'ip':item.ip, 'port':item.port, 'user_pass':''}
        ipList.append(ip)
    session.close()
    
    return ipList
コード例 #33
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    logging.getLogger('sqlalchemy.engine').setLevel(logging.ERROR)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    Session = sessionmaker(extension=ZopeTransactionExtension())  # noqa
    session = Session(bind=engine)
    configure_es_from_config(settings)

    with transaction.manager:
        fill_index(session)
コード例 #34
0
ファイル: postgres.py プロジェクト: desaintmartin/remoulade
 def __init__(
     self,
     *,
     namespace: str = "remoulade-state",
     encoder: Optional[Encoder] = None,
     client: Optional[sessionmaker] = None,
     max_size: int = 2000000,
     url: Optional[str] = None,
 ):
     self.url = url or DEFAULT_POSTGRES_URI
     super().__init__(namespace=namespace, encoder=encoder)
     self.client = client or sessionmaker(create_engine(self.url))
     self.init_db()
     self.max_size = max_size
     self.lock = threading.Lock()
コード例 #35
0
ファイル: __init__.py プロジェクト: procool/itstructure
    def _get_db_session(self, bind_key=None, access=None, echo=True, **kwargs):
        db_alias = bind_key or self.__class__._db_alias
        if db_alias not in _database._connections:
            engine = self._get_engine(bind_key=bind_key, echo=echo, access=access)
            self.__class__._connections[db_alias] = scoped_session(sessionmaker(
                bind=engine,
                class_=DBSession,
                expire_on_commit=False,
            ))

        connections_ = self.__class__._connections[db_alias]

        session = connections_(**kwargs)
        log.debug(u"DB<%s>: initiated as %s <%s>", id(session), db_alias, session.bind.pool.status())
        return session
コード例 #36
0
class DBConnectionManager:
    '''This class handles connection to the database.'''
    Base = declarative_base()
    conn_url = os.environ.get('DB_CONN_URL')
    engine = create_engine(conn_url)
    Session = sessionmaker(bind=engine)

    @staticmethod
    def get_db():
        '''Returns a session.
        
        :return: The session
        :rtype: Session
        '''
        return Session()
コード例 #37
0
    def __init__(self, url=None, echo=False):

        self.engine = self.init_engine(url)

        self.add_engine_pidguard(self.engine)

        session_factory = sessionmaker(bind=self.engine)

        Session = scoped_session(session_factory)

        self.session = session_factory()

        Base.metadata.create_all(self.engine)

        self.session.commit()
コード例 #38
0
def _connect_to_db(transcript):

    """This method will connect to the database using the information
    contained in the JSON configuration.
    :param transcript: the Transcript instance
    :type transcript: Mikado.loci_objects.transcript.Transcript

    """

    transcript.engine = dbutils.connect(
        transcript.json_conf, transcript.logger)

    transcript.sessionmaker = sessionmaker()
    transcript.sessionmaker.configure(bind=transcript.engine)
    transcript.session = transcript.sessionmaker()
コード例 #39
0
ファイル: db_helper.py プロジェクト: dmcconaughy/OnlyFans
def create_database_session(connection_info,
                            connection_type="sqlite:///",
                            autocommit=False,
                            pool_size=5) -> tuple[scoped_session, Engine]:
    kwargs = {}
    if connection_type == "mysql+mysqldb://":
        kwargs["pool_size"] = pool_size
        kwargs["pool_pre_ping"] = True
        kwargs["max_overflow"] = -1

    engine = sqlalchemy.create_engine(
        f'{connection_type}{connection_info}?charset=utf8mb4', **kwargs)
    session_factory = sessionmaker(bind=engine, autocommit=autocommit)
    Session = scoped_session(session_factory)
    return Session, engine
コード例 #40
0
    def predict(self, t: str, target_ric: str) -> List[str]:
        # Connect to Postgres
        engine = create_engine(self.config.db_uri)
        SessionMaker = sessionmaker(bind=engine)
        pg_session = SessionMaker()

        rics = self.config.rics \
            if target_ric in self.config.rics \
            else [target_ric] + self.config.rics

        alignments = load_alignments_from_db(pg_session, rics, t, self.seqtypes)

        # Write the prediction data
        self.config.dir_output.mkdir(parents=True, exist_ok=True)
        dest_alignments = self.config.dir_output / Path('alignment-predict.json')
        with dest_alignments.open(mode='w') as f:
            writer = jsonlines.Writer(f)
            writer.write(alignments.to_dict())

        predict_iter = create_dataset(self.config,
                                      self.device,
                                      self.vocab,
                                      rics,
                                      self.seqtypes)

        self.model.eval()

        batch = next(iter(predict_iter))

        times = batch.time
        tokens = batch.token
        raw_short_field = stringify_ric_seqtype(Code.N225.value, SeqType.RawShort)
        latest_vals = [x for x in getattr(batch, raw_short_field).data[:, 0]]
        raw_long_field = stringify_ric_seqtype(Code.N225.value, SeqType.RawLong)
        latest_closing_vals = get_latest_closing_vals(batch, raw_long_field, times)

        loss, pred, attn_weight = self.model(batch,
                                             batch.batch_size,
                                             tokens,
                                             times,
                                             self.criterion,
                                             Phase.Test)

        i_eos = self.vocab.stoi[SpecialToken.EOS.value]
        pred_sents = [remove_bos([self.vocab.itos[i] for i in takeuntil(i_eos, sent)])
                      for sent in zip(*pred)]

        return replace_tags_with_vals(pred_sents[0], latest_closing_vals[0], latest_vals[0])
コード例 #41
0
def tile_exists(target_url, bg_color, title, typ, image_uri, enhanced_image_uri, locale,
                frecent_sites, time_limits, frequency_caps, adgroup_name, explanation, check_inadjacency, channel_id, conn=None, *args, **kwargs):
    """
    Return the id of a tile having the data provided
    """
    from splice.environment import Environment
    env = Environment.instance()

    if conn is not None:
        sm = sessionmaker(bind=conn)
        session = sm()
    else:
        session = env.db.session

    # we add order_by in the query although it shouldn't be necessary
    # this is because of a previous bug where duplicate tiles could be created
    results = (
        session
        .query(Tile.id, Tile.adgroup_id)
        .filter(Tile.target_url == target_url)
        .filter(Tile.bg_color == bg_color)
        .filter(Tile.title == title)
        .filter(Tile.type == typ)
        .filter(Tile.image_uri == image_uri)
        .filter(Tile.enhanced_image_uri == enhanced_image_uri)
        .filter(Adgroup.locale == locale)
        .filter(Adgroup.start_date == time_limits.get('start'))
        .filter(Adgroup.end_date == time_limits.get('end'))
        .filter(Adgroup.start_date_dt == time_limits.get('start_dt'))
        .filter(Adgroup.end_date_dt == time_limits.get('end_dt'))
        .filter(Adgroup.frequency_cap_daily == frequency_caps['daily'])
        .filter(Adgroup.frequency_cap_total == frequency_caps['total'])
        .filter(Adgroup.name == adgroup_name)
        .filter(Adgroup.explanation == explanation)
        .filter(Adgroup.check_inadjacency == check_inadjacency)
        .filter(Adgroup.channel_id == channel_id)
        .join(Adgroup.tiles)
        .order_by(asc(Tile.id))
    )

    if results:
        for tile_id, adgroup_id in results:
            # now check frecent sites for this tile
            db_frecents = get_frecent_sites_for_tile(tile_id, conn)
            if db_frecents == sorted(set(frecent_sites)):
                return tile_id, adgroup_id

    return None, None
コード例 #42
0
def main() -> None:

    output_dir = Path('output')
    output_dir.mkdir(parents=True, exist_ok=True)

    engine = create_engine('sqlite://')
    SessionMaker = sessionmaker(bind=engine)
    session = SessionMaker()
    create_tables(engine)

    result_csv = Path('results/result.csv')
    results = []
    with result_csv.open(mode='r') as f:
        reader = csv.reader(f, delimiter=',')
        next(reader)
        for fields in reader:
            metric = fields[0]
            noun = fields[1]
            verb = fields[2]
            vote = fields[3]
            user_id = fields[4]
            results.append(Result(metric, noun, verb, vote, user_id).to_dict())
    session.execute(Result.__table__.insert(), results)

    with Path('resources/translation.json').open(mode='r') as f:
        t = json.load(f)

    ja_rows = calc_means(session)
    with Path('output/mean-ja.csv').open(mode='w') as ja:
        ja_writer = csv.writer(ja)
        ja_writer.writerows(ja_rows)
    with Path('output/mean-en.csv').open(mode='w') as en:
        en_writer = csv.writer(en)
        en_writer.writerows([(t[j[0]], t[j[1].replace('X', '')], j[2], j[3])
                             for j in ja_rows])

    for (pos, metric) in itertools.product(['noun', 'verb'], list(Metric)):
        ja_rows = sort(session, pos, metric)
        with Path('output/{}s-sorted-by-{}-ja.csv'.format(
                pos, metric.value)).open(mode='w') as ja:
            ja_writer = csv.writer(ja)
            ja_writer.writerows(ja_rows)

        with Path('output/{}s-sorted-by-{}-en.csv'.format(
                pos, metric.value)).open(mode='w') as en:
            en_writer = csv.writer(en)
            en_writer.writerows([(t[j[0].replace('X', '')], j[1])
                                 for j in ja_rows])
コード例 #43
0
ファイル: services.py プロジェクト: e7dal/mercury
    def __init__(self, **kwargs):

        self.db_name = kwargs['database']
        self.host = kwargs['host']
        self.port = int(kwargs.get('port', 5432))
        self.username = kwargs['username']
        self.password = kwargs['password']
        self.schema = kwargs['schema']
        self.metadata = None
        self.engine = None
        self.session_factory = None
        self.Base = None

        url_template = '{db_type}://{user}:{passwd}@{host}/{database}'
        db_url = url_template.format(db_type='postgresql+psycopg2',
                                     user=self.username,
                                     passwd=self.password,
                                     host=self.host,
                                     port=self.port,
                                     database=self.db_name)

        retries = 0
        connected = False
        while not connected and retries < 3:
            try:
                self.engine = sqla.create_engine(db_url, echo=False)
                self.metadata = MetaData(schema=self.schema)
                self.Base = automap_base(bind=self.engine,
                                         metadata=self.metadata)
                self.Base.prepare(self.engine, reflect=True)
                self.metadata.reflect(bind=self.engine)
                self.session_factory = sessionmaker(bind=self.engine,
                                                    autoflush=False,
                                                    autocommit=False)
                connected = True
                print('### Connected to PostgreSQL DB.', file=sys.stderr)

            except Exception as err:
                print(err)
                print(err.__class__.__name__)
                print(err.__dict__)
                time.sleep(1)
                retries += 1

        if not connected:
            raise Exception(
                '!!! Unable to connect to PostgreSQL db on host %s at port %s.'
                % (self.host, self.port))
コード例 #44
0
ファイル: system.py プロジェクト: ace-ecosystem/ace2-core
    async def initialize(self):
        """Initializes database connections by creating the SQLAlchemy engine and session objects."""
        # see https://github.com/PyMySQL/PyMySQL/issues/644
        # /usr/local/lib/python3.6/dist-packages/pymysql/cursors.py:170: Warning: (1300, "Invalid utf8mb4 character string: '800363'")
        warnings.filterwarnings(action="ignore", message=".*Invalid utf8mb4 character string.*")

        if not self.db_url:
            self.db_url = await self.get_config_value(CONFIG_DB_URL, env="ACE_DB_URL", default=self.db_url)

        if not self.db_kwargs:
            self.db_kwargs = await self.get_config_value(CONFIG_DB_KWARGS, default=self.db_kwargs)

        get_logger().info(f"connecting to {self.db_url} ({self.db_kwargs})")
        self.engine = create_async_engine(self.db_url, **self.db_kwargs)
        self.async_session = sessionmaker(self.engine, expire_on_commit=False, class_=AsyncSession)
        await super().initialize()
コード例 #45
0
def load_folder(path, processor):
    engine = get_engine()
    Session = sessionmaker(bind=engine)
    session = Session()
    try:
        for root, dirs, files in os.walk(path):
            for file in files:
                if not file == 'README':
                    f = os.path.join(root, file)
                    processor(session, f)
                session.commit()
    except:
        session.rollback()
        raise
    finally:
        session.close()
コード例 #46
0
ファイル: cli.py プロジェクト: Snoin/seektam-web
def loader(url):
    u"""농식품종합정보시스템 식품 정보(식단명 및 재료)를 DB에 저장합니다.

    :param URL: 저장할 데이터베이스 URL (ex. mysql://scott@tiger:example.com/dbname)
    """
    engine = create_engine(url)
    Session = scoped_session(sessionmaker(engine))
    sess = Session()

    Base.metadata.bind = engine
    Base.metadata.create_all()

    for food in koreafood.get_food_list():
        mfood = koreafood.food_to_model(sess, food)
        sess.merge(mfood)
        sess.commit()
コード例 #47
0
ファイル: db.py プロジェクト: novolei/Miyagi
    def __init__(self, app):
        self.app = app
        self.config = app.config
        try:
            # Check if we have valid db config
            self.config.DB.type is True
        except AttributeError:
            raise MiyagiDbError(
                'No DB config found.'
                'Please provide the needed parameters inside the "DB" key in the config file.'
            )

        self.SQLAlchemyBase = SQLAlchemyBase
        self.db_engine = create_engine(self.config.db_uri, echo=True)
        self.session_maker = sessionmaker(autoflush=False)
        self.session_maker.configure(bind=self.db_engine)
コード例 #48
0
def set_session(env_setting='local'):
    # 2. Load db config
    file_dir = os.path.dirname(os.path.realpath(__file__))
    file_name = "alembic_%s.ini" % env_setting
    db_config_file = os.path.join(file_dir, file_name)
    config = ConfigParser.ConfigParser()
    config.read(db_config_file)

    # 3. Set SQLAlchemy db engine
    db_engine = config.get('alembic', 'sqlalchemy.url')

    # 4. Configure SQLAlchemy session
    engine = create_engine(db_engine)
    Session = sessionmaker()
    Session.configure(bind=engine)
    return Session()
コード例 #49
0
ファイル: app.py プロジェクト: woshitima/library
def books():
    if 'key_word' in request.args:
        key_word = request.args.get("key_word")
        session = sessionmaker(engine)()
        books = session.execute(f"""
            SELECT *
            FROM "Book"
            WHERE name LIKE '%{key_word}%'
                    OR author LIKE '%{key_word}%';
            """)
        session.commit()
    else:
        books = db.execute("""SELECT * FROM "Book";""")
        db.commit()

    return render_template("books_table.html", object_list=books)
コード例 #50
0
ファイル: rules.py プロジェクト: WyHy/ip_proxy_spider
def get_rules():
    engine = create_engine('mysql+mysqlconnector://' + MYSQL_USER + ':' + MYSQL_PASSWD + '@' + MYSQL_HOST + ':3306/' + MYSQL_DBNAME)
    DBSession = sessionmaker(bind=engine)
      
    session = DBSession()
#     new_user = User(id='4', name='Huangyi')
#     session.add(new_user)
#     session.commit()
    #session.close()
      
    ##进行查询
#     item = session.query(ProxyItem).filter(ProxyItem.id=='4').one()
    item = session.query(ProxyRule)[0]
    print 'type:', type(item)
    print 'object:', item.name + item.start_urls + item.sub_link
    session.close()
コード例 #51
0
ファイル: driver.py プロジェクト: makeling/antares
def get_states_names():
    '''
    '''
    engine = create_engine(
        'postgresql://*****:*****@reddbase.conabio.gob.mx:5432/madmex_database'
    )
    session = sessionmaker(engine)()
    query = 'SELECT nom_ent FROM vectordata.country_mexico_2012'
    try:
        result = session.execute(query)
    except Exception:
        LOGGER.error('Not expected error in host insertion.')
        raise
    finally:
        session.close()
    return result
コード例 #52
0
    def __init__(self, inputs):
        self.connODM = conection().conODM()
        self.connORM = conection().conORM()

        self.status = 200
        self.msg = "0"
        self.type = "text/plain"
        Session = sessionmaker(bind=self.connORM)
        self.session = Session()
        if inputs["company"] in self.endpoints:
            #This just will apply to systems registred on the endpoint handle
            targetCls=self.endpoints[inputs["company"]]()
            retVal=getattr(targetCls, "process")(inputs)
            self.msg= retVal["value"]
            self.type=retVal["type"]
            self.status=retVal["status"]
コード例 #53
0
def upgrade(migrate_engine):
    conn = migrate_engine.connect()
    Session = sessionmaker(bind=migrate_engine)
    session = Session()

    for s in session.query(Study).all():
        fg = FieldGroup(name=s.name)
        session.add(fg)

        s.field_group = fg
        session.add(s)

        upd = fields.update().where(fields.c.study_id == s.id).values(field_group_id=fg.id)
        conn.execute(upd)

    session.commit()
コード例 #54
0
def setup(app):
    global dburl, engine, meta, sess, questions, assignments, assignment_questions, courses, competency

    app.connect("env-before-read-docs", reset_questions)
    app.connect("build-finished", finalize_updates)
    # the `qbank` option is for the QuestionBank
    # it allows us to populate the database from the question bank
    # but we don't care about populating chapter and subchapter tables and others
    # so we commit each question.
    app.add_config_value("qbank", False, "html")
    try:
        dburl = get_dburl()
        engine = create_engine(dburl,
                               client_encoding="utf8",
                               convert_unicode=True)
        Session = sessionmaker()
        engine.connect()
        Session.configure(bind=engine)
        sess = Session()
    except Exception as e:  # psycopg2.OperationalError
        dburl = None
        engine = None
        sess = None
        print(e)
        print(
            "Skipping all DB operations because environment variables not set up"
        )
    else:
        # If no exceptions are raised, then set up the database.
        meta = MetaData()
        questions = Table("questions",
                          meta,
                          autoload=True,
                          autoload_with=engine)
        assignments = Table("assignments",
                            meta,
                            autoload=True,
                            autoload_with=engine)
        assignment_questions = Table("assignment_questions",
                                     meta,
                                     autoload=True,
                                     autoload_with=engine)
        courses = Table("courses", meta, autoload=True, autoload_with=engine)
        competency = Table("competency",
                           meta,
                           autoload=True,
                           autoload_with=engine)
コード例 #55
0
ファイル: models.py プロジェクト: halsayed/kraken-bot
def init_db(db_url: str, clean_open_orders: bool = False) -> None:
    """
    Initializes this module with the given config,
    registers all known command handlers
    and starts polling for message updates
    :param db_url: Database to use
    :param clean_open_orders: Remove open orders from the database.
        Useful for dry-run or if all orders have been reset on the exchange.
    :return: None
    """
    kwargs = {}

    # Take care of thread ownership if in-memory db
    if db_url == 'sqlite://':
        kwargs.update({
            'connect_args': {
                'check_same_thread': False
            },
            'poolclass': StaticPool,
            'echo': False,
        })

    try:
        engine = create_engine(db_url, **kwargs)
    except NoSuchModuleError:
        raise OperationalException(
            f"Given value for db_url: '{db_url}' "
            f"is no valid database URL! (See {_SQL_DOCS_URL})")

    # https://docs.sqlalchemy.org/en/13/orm/contextual.html#thread-local-scope
    # Scoped sessions proxy requests to the appropriate thread-local session.
    # We should use the scoped_session object - not a seperately initialized version
    Trade._session = scoped_session(
        sessionmaker(bind=engine, autoflush=True, autocommit=True))
    Trade.query = Trade._session.query_property()
    Order.query = Trade._session.query_property()
    PairLock.query = Trade._session.query_property()

    previous_tables = inspect(engine).get_table_names()
    _DECL_BASE.metadata.create_all(engine)
    check_migrate(engine,
                  decl_base=_DECL_BASE,
                  previous_tables=previous_tables)

    # Clean dry_run DB if the db is not in-memory
    if clean_open_orders and db_url != 'sqlite://':
        clean_dry_run_db()
コード例 #56
0
    def connect(self,
                user=None,
                password=None,
                host=None,
                database=None,
                type=None,
                driver=None):
        if self.engine:
            self._connection.close()
            self._connection = None
            self.engine.dispose()
            self.engine = None

        user = user or self.config.get("user")
        password = password or self.config.get("user")
        database = database or self.config.get("database")
        host = host or self.config.get("host")
        type = type or self.config.get("type")
        driver = driver or self.config.get("driver")
        pool_size = self.config.get("pool_size", 5)
        params = {
            "user": user,
            "password": password,
            "database": database,
            "host": host,
            "type": type,
            "driver": driver,
        }

        connect_url = ""
        if user:
            if not password:
                connect_url = "{type}+{driver}://{user}@{host}/{database}"
            else:
                connect_url = "{type}+{driver}://{user}:{password}@{host}/{database}"
        else:
            connect_url = "{type}+{driver}://{host}/{database}"
        self.engine = create_engine(connect_url.format(**params),
                                    encoding='utf8',
                                    pool_size=pool_size)

        if self.config.get("use_threadlocal", False):
            self.engine.pool._use_threadlocal = True

        self._connection = self.engine.connect()
        self._connection.execute("COMMIT")
        self.Session = sessionmaker(bind=self.engine)
コード例 #57
0
ファイル: services.py プロジェクト: e7dal/mercury
    def __init__(self, **kwargs):
        self.host = kwargs['host']
        self.db_name = kwargs['database']
        self.port = kwargs['port']
        self.username = kwargs['username']
        self.schema = kwargs['schema']
        password = kwargs['password']

        self.metadata = None
        self.engine = None
        self.session_factory = None
        self.Base = None

        url_template = '{db_type}://{user}:{passwd}@{host}:{port}/{database}'
        db_url = url_template.format(db_type='redshift+psycopg2',
                                     user=self.username,
                                     passwd=password,
                                     host=self.host,
                                     port=self.port,
                                     database=self.db_name)
        retries = 0

        connected = False
        while not connected and retries < 3:
            try:
                print('### Connecting to Redshift DB...', file=sys.stderr)
                self.engine = sqla.create_engine(db_url)

                #self.Base.prepare(self.engine, reflect=True)
                self.session_factory = sessionmaker(bind=self.engine,
                                                    autoflush=False,
                                                    autocommit=False)
                print('### created session factory', file=sys.stderr)
                connected = True
                print('### Connected to Redshift DB.', file=sys.stderr)

            except Exception as err:
                print(err, file=sys.stderr)
                print(err.__class__.__name__, file=sys.stderr)
                print(err.__dict__, file=sys.stderr)
                time.sleep(1)
                retries += 1

        if not connected:
            raise Exception(
                '!!! Unable to connect to Redshift db on host %s at port %s.' %
                (self.host, self.port))
コード例 #58
0
def create_session():
    """."""

    Session = sessionmaker()

    if DB_TYPE == 'sqlite':
        engine = create_engine('sqlite:///{}'.format(main_db_details['path']))

    elif DB_TYPE == 'mysql':
        engine = create_engine('mysql+pymysql://{}:{}@{}:{}/{}'.format(
            main_db_details['username'], main_db_details['password'],
            main_db_details['host'], main_db_details['port'],
            main_db_details['name']))

    Session.configure(bind=engine)

    return Session()
コード例 #59
0
def initialize_database_seed_values(database_connection_string):
    ''' Performs any application-specific data initialization that must be run on application
    startup vs during database migration.

    Parameters
    ----------
        database_connection_string: string
            The connection string that will be used to connect to the database and perform any
            runtime seeding.
    '''

    Session = sessionmaker()
    engine = create_engine(database_connection_string)
    Session.configure(bind=engine)
    session = Session()

    _populate_configuration_table(session)
コード例 #60
0
def init_scibot(database):
    dburi = config.dbUri(user='******', database=database)
    #dburi = dbUri('postgres')
    engine = create_engine(dburi)
    init(engine, should_create=True, authority='scicrunch')

    Session = sessionmaker()
    Session.configure(bind=engine)
    session = Session()
    file = Path(__file__).parent / '../sql/permissions.sql'
    with open(file.as_posix(), 'rt') as f:
        sql = f.read()
    #args = dict(database=database)
    # FIXME XXX evil replace
    sql_icky = sql.replace(':database', f'"{database}"')
    session.execute(sql_icky)
    session.commit()