Esempio n. 1
0
    def connect(self, dbname, server='sqlite', backup=True):
        "connect to an existing database"
        if server == 'sqlite':
            if not os.path.exists(dbname):
                raise IOError("Database '%s' not found!" % dbname)
            
            if not isMotorDB(dbname):
                raise ValueError("'%s' is not an Motor file!" % dbname)

            if backup:
                save_backup(dbname)
        self.dbname = dbname
        self.engine = make_engine(dbname, server)
        self.conn = self.engine.connect()
        self.session = sessionmaker(bind=self.engine)()

        self.metadata =  MetaData(self.engine)
        self.metadata.reflect()
        tables = self.tables = self.metadata.tables

        try:
            clear_mappers()
        except:
            pass

        mapper(MotorsTable,   tables['motors'])
        mapper(InfoTable,   tables['info'])
Esempio n. 2
0
    def test_invocation_systemwide_loaders(self):
        baked.bake_lazy_loaders()
        try:
            User, Address = self._o2m_fixture()

            sess = Session()
            q = sess.query(User).options(lazyload(User.addresses))
            with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
                u1 = q.first()
                u1.addresses
                # invoked
                is_(
                    el.mock_calls[0][1][1],
                    u1._sa_instance_state
                )
        finally:
            baked.unbake_lazy_loaders()

        clear_mappers()
        User, Address = self._o2m_fixture()
        sess = Session()
        q = sess.query(User).options(lazyload(User.addresses))

        with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el:
            u1 = q.first()
            u1.addresses
            # not invoked
            eq_(el.mock_calls, [])
Esempio n. 3
0
    def setMappers(self):
        clear_mappers()
        self.proprio = getProprio(self.metadata)
        self.proprio.create(checkfirst=True)

        mapper(Proprietaire, self.proprio,
               primary_key=[self.proprio.c.pro_pk], )
Esempio n. 4
0
def delData(doc):
    #getting database configuration
    param_name = 'db_%s' %doc.getParentDatabase().id
    conf = doc.get_properties(params=(param_name, )).values()[0]
    if not 'value' in conf.keys():
        api.portal.show_message(message='Replication not configured')
        return -1
    conf = json.loads(conf['value'])
    #istantiation of SQLAlquemy object
    try:
        db = sql.create_engine(conf['conn_string'])
        metadata = sql.schema.MetaData(bind=db,reflect=True,schema=conf['db_schema'])
        table = sql.Table(conf['db_table'], metadata, autoload=True)
        orm.clear_mappers() 
        rowmapper = orm.mapper(plominoData,table)
    except:
        api.portal.show_message(message=u'Si sono verificati errori nella connessione al database', request=doc.REQUEST )
        return -1
    #creating session
    Sess = orm.sessionmaker(bind = db)
    session = Sess()
    #deleting row from database
    docid = doc.getId()
    session.query(plominoData).filter_by(id=docid).delete()
    session.commit()    
Esempio n. 5
0
def reflect(engine, models, schema = None):
    metadata = MetaData()
    metadata.bind = engine

    with warnings.catch_warnings():
        warnings.simplefilter("ignore", category = SAWarning)
        metadata.reflect(schema = schema, views = False)

    if schema is not None:
        tables = dict((table_name.replace(str(schema) + ".", ""), table)
                      for table_name, table in metadata.tables.iteritems())
    else:
        tables = metadata.tables

    clear_mappers()

    mappers = {}
    for table_name, table in tables.iteritems():
        modelname = "".join([word.capitalize() for word in table_name.split("_")])

        try:
            model = getattr(models, modelname)
        except AttributeError:
            stderr.write("Missing model for table %s\n" % table_name)
        else:
            mappers[modelname] = mapper(model, table)

    Session = sessionmaker(bind = engine, autocommit = False, autoflush = True)

    return mappers, tables, Session
Esempio n. 6
0
    def test_broken(self):
        exception = False
        try:
            clear_mappers()
            dao = Dao("")
            dao.load_gtfs(BROKEN_GTFS, lenient=False)
        except KeyError:
            exception = True
        self.assertTrue(exception)

        clear_mappers()
        dao = Dao("")
        dao.load_gtfs(BROKEN_GTFS, lenient=True)

        # The following are based on BROKEN GTFS content,
        # that is the entities count minus broken ones.
        self.assertTrue(len(dao.routes()) == 4)
        self.assertTrue(len(list(dao.stops())) == 12)
        self.assertTrue(len(dao.calendars()) == 2)
        self.assertTrue(len(list(dao.trips())) == 104)
        self.assertTrue(len(dao.stoptimes()) == 500)
        self.assertTrue(len(dao.fare_attributes()) == 2)
        self.assertTrue(len(dao.fare_rules()) == 4)
        # This stop has missing coordinates in the broken file
        stop00 = dao.stop('FUR_CREEK_RES3')
        self.assertAlmostEquals(stop00.stop_lat, 0.0, 5)
        self.assertAlmostEquals(stop00.stop_lon, 0.0, 5)
Esempio n. 7
0
    def __init__(self, dbname='xrayref.db'):
        "connect to an existing database"
        if not os.path.exists(dbname):
            parent, child = os.path.split(__file__)
            dbname = os.path.join(parent, dbname)
            if not os.path.exists(dbname):
                raise IOError("Database '%s' not found!" % dbname)

        if not isxrayDB(dbname):
            raise ValueError("'%s' is not a valid X-ray Database file!" % dbname)

        self.dbname = dbname
        self.engine = make_engine(dbname)
        self.conn = self.engine.connect()
        self.session = sessionmaker(bind=self.engine)()
        self.metadata =  MetaData(self.engine)
        self.metadata.reflect()
        tables = self.tables = self.metadata.tables
        try:
            clear_mappers()
        except:
            pass
        mapper(ChantlerTable,            tables['Chantler'])
        mapper(WaasmaierTable,           tables['Waasmaier'])
        mapper(KeskiRahkonenKrauseTable, tables['KeskiRahkonen_Krause'])
        mapper(ElementsTable,            tables['elements'])
        mapper(XrayLevelsTable,          tables['xray_levels'])
        mapper(XrayTransitionsTable,     tables['xray_transitions'])
        mapper(CosterKronigTable,        tables['Coster_Kronig'])
        mapper(PhotoAbsorptionTable,     tables['photoabsorption'])
        mapper(ScatteringTable,          tables['scattering'])
Esempio n. 8
0
def db_commit():
    from openpyxl import load_workbook
    wb = load_workbook(filename=os.path.join(app.config['UPLOAD_FOLDER'],session['filename']), read_only=True)
    sheet_headers=session['sheet_headers']
    
    ws = wb.active

    tab=session['table_name']
    metadata = MetaData(bind=e)    
    t = Table(tab, metadata, Column('id', Integer, primary_key=True),*(Column(header, String(8000)) for header in sheet_headers))
    clear_mappers() 
    mapper(sheet, t)
      
    handler_count = session['handler_count']
    handle_size = session['handle_size']
    count=0
    handle_size_counter=0
    for r in ws.rows:
        count+=1
        if handler_count>count-1:
            continue
        else:
            handle_size_counter+=1
            s = sheet()
            cou=0
            for c in r:
                setattr(s,sheet_headers[cou],c.value)
                cou+=1
                
            db_session.add(s)
            if handle_size_counter-1==handle_size:
                break
    session['handler_count']=handler_count+handle_size_counter
    db_session.commit()
    return redirect(url_for('database_handler'))
Esempio n. 9
0
        def go():
            mapper(A, table1, properties={
                "bs": relationship(B, order_by=table2.c.col1)
            })
            mapper(B, table2)

            mapper(A, table1, non_primary=True)

            sess = create_session()
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")])
                ],
                alist)

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            clear_mappers()
    def saveData(self,doc):
        self.plominoDoc = doc
        plominoDb = doc.getParentDatabase()
        prm = doc.getItem('pg_replication_config',0)

        self.conn_string = plominoDb.connString
        self.db_schema = plominoDb.dbSchema
        self.db_table = plominoDb.dbTable

        db = sql.create_engine(self.conn_string)
        metadata = sql.schema.MetaData(bind=db,reflect=True,schema=self.db_schema)
        table = sql.Table(self.db_table, metadata, autoload=True)
        orm.clear_mappers()
        rowmapper = orm.mapper(plominoData,table)
        Sess = orm.sessionmaker(bind = db)
        
        doc = self.plominoDoc
        data = self.getPlominoValues()
        data = json.loads(json.dumps(data, default=DateTime.DateTime.ISO,use_decimal=True ))
        data['id'] = doc.getId()
        data['plominodb'] = doc.getParentDatabase().id
        data['plominoform'] = doc.getForm().getFormName()
        data['owner'] = doc.getItem('owner','')
        data['review_state'] = doc.getItem('iol','')
        row = plominoData(data['id'],data['plominodb'],data['plominoform'],data['owner'],data['review_state'],data) 
        id = data['id']
        session = Sess()
        session.query(plominoData).filter_by(id=row.id).delete()
        session.commit()
        session.add(row)
        session.commit()        
Esempio n. 11
0
        def go():
            m1 = mapper(A, table1, properties={
                "bs":relation(B)
            })
            m2 = mapper(B, table2)

            m3 = mapper(A, table1, non_primary=True)

            sess = create_session()
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1,a2,a3]:
                sess.save(x)
            sess.flush()
            sess.clear()

            alist = sess.query(A).all()
            self.assertEquals(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")])
                ],
                alist)

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            clear_mappers()
Esempio n. 12
0
    def setUp(self):
        clear_mappers()
        pysqla.AVAILABLE_OBJECTS = pysqla._marker
        self.session = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
        Base = extended_declarative_base(
            self.session,
            metadata=sa.MetaData('sqlite:///:memory:'))

        class Test1(Base):
            id = sa.Column(sa.Integer, primary_key=True)
            name = sa.Column(sa.String(50))

        class Test2(Base):
            idtest = sa.Column(sa.Integer, primary_key=True)
            name = sa.Column(sa.String(50))

        Base.metadata.create_all()

        self.Test1 = Test1
        self.Test2 = Test2
        with transaction.manager:
            self.value1 = Test1(name='Bob')
            self.session.add(self.value1)
            self.value2 = Test2(name='Bob')
            self.session.add(self.value2)
Esempio n. 13
0
def queryDb():
    try:
        metadata = MetaData(bind=e)    
        t = Table(session['table_name'], metadata, Column('id', Integer, primary_key=True),*(Column(header, String(8000)) for header in session['sheet_headers']))
        clear_mappers() 
        mapper(sheet, t)
        qu = request.get_json()
        print qu
        query_var_byUser = qu['col_value']
        query_column_byUser = qu['col_name']
        qargs = {query_column_byUser:query_var_byUser}
        acc = db_session.query(sheet).filter_by(**qargs).all()
        result_list=[]
        for item in acc:
            temp_dict={}
            for head in session['sheet_headers']:
                temp_dict[head]=getattr(item,head)
            result_list.append(temp_dict)
        
        print result_list
        if result_list==[]:
            return jsonify([{"No Records available for this Query":""}]), 200
        return jsonify(result_list), 200
    except:
        return jsonify([{"Enter valid Data":""}]), 200
Esempio n. 14
0
    def test_noninherited_warning(self):
        A, B, b_table, a_table, Dest, dest_table = (
            self.classes.A,
            self.classes.B,
            self.tables.b_table,
            self.tables.a_table,
            self.classes.Dest,
            self.tables.dest_table,
        )

        mapper(A, a_table, properties={"some_dest": relationship(Dest)})
        mapper(B, b_table, inherits=A, concrete=True)
        mapper(Dest, dest_table)
        b = B()
        dest = Dest()
        assert_raises(AttributeError, setattr, b, "some_dest", dest)
        clear_mappers()

        mapper(A, a_table, properties={"a_id": a_table.c.id})
        mapper(B, b_table, inherits=A, concrete=True)
        mapper(Dest, dest_table)
        b = B()
        assert_raises(AttributeError, setattr, b, "a_id", 3)
        clear_mappers()

        mapper(A, a_table, properties={"a_id": a_table.c.id})
        mapper(B, b_table, inherits=A, concrete=True)
        mapper(Dest, dest_table)
Esempio n. 15
0
    def connect(self, dbname, backup=True):
        "connect to an existing database"
        if not os.path.exists(dbname):
            raise IOError("Database '%s' not found!" % dbname)

        if not isInstrumentDB(dbname):
            raise ValueError("'%s' is not an Instrument file!" % dbname)

        if backup:
            save_backup(dbname)
        self.dbname = dbname
        self.engine = create_engine('sqlite:///%s' % self.dbname,
                                    poolclass = SingletonThreadPool)
        self.conn = self.engine.connect()
        self.session = sessionmaker(bind=self.engine)()

        self.metadata =  MetaData(self.engine)
        self.metadata.reflect()
        tables = self.tables = self.metadata.tables

        try:
            clear_mappers()
        except:
            pass

        mapper(Info,     tables['info'])
        mapper(Command,  tables['command'])
        mapper(PV,       tables['pv'])

        mapper(Instrument, tables['instrument'],
               properties={'pvs': relationship(PV,
                                               backref='instrument',
                                    secondary=tables['instrument_pv'])})

        mapper(PVType,   tables['pvtype'],
               properties={'pv':
                           relationship(PV, backref='pvtype')})

        mapper(Position, tables['position'],
               properties={'instrument': relationship(Instrument,
                                                      backref='positions'),
                           'pvs': relationship(Position_PV) })

        mapper(Instrument_PV, tables['instrument_pv'],
               properties={'pv':relationship(PV),
                           'instrument':relationship(Instrument)})

        mapper(Position_PV, tables['position_pv'],
               properties={'pv':relationship(PV)})

        mapper(Instrument_Precommand,  tables['instrument_precommand'],
               properties={'instrument': relationship(Instrument,
                                                      backref='precommands'),
                           'command':   relationship(Command,
                                                     backref='inst_precoms')})
        mapper(Instrument_Postcommand,   tables['instrument_postcommand'],
               properties={'instrument': relationship(Instrument,
                                                      backref='postcommands'),
                           'command':   relationship(Command,
                                                     backref='inst_postcoms')})
Esempio n. 16
0
    def tearDownAll(self):
        global clear_mappers
        if clear_mappers is None:
            from sqlalchemy.orm import clear_mappers

        clear_mappers()
        _otest_metadata.drop_all()
Esempio n. 17
0
    def _create_tables(self):

        metadata = MetaData()
        metadata.bind = self.engine
        self.messages = self._create_table('messages', metadata,
                                           Column('id', String(192), index=True, primary_key=True),
                                           Column('content', Text, nullable=False),
                                           Column('received_at', DateTime, nullable=False),
                                           Column('sent_at', DateTime))

        self.digests = self._create_table('digests', metadata,
                                          Column('msg_id', String(192), ForeignKey('messages.id', ondelete='cascade'),
                                                 primary_key=True),
                                          Column('send_to', String(192), index=True, primary_key=True),
                                          Column('scheduled_at', DateTime, nullable=False),
                                          Column('sent_at', DateTime))

        metadata.bind = self.engine
        metadata.create_all(self.engine)

        clear_mappers()
        mapper(Message, self.messages, properties={
                'digests': relationship(Digest, backref='msg')
                })
        mapper(Digest, self.digests)

        self.session = create_session()
    def teardown(self):
        clear_mappers()

        for db in self.dbs:
            db.connect().invalidate()
        for i in range(1, 5):
            os.remove("shard%d_%s.db" % (i, provision.FOLLOWER_IDENT))
    def _do_mapper_test(self, configs):
        opts = {
            'lazyload':'select',
            'joinedload':'joined',
            'subqueryload':'subquery',
        }

        for o, i, k, count in configs:
            mapper(User, users, properties={
                'orders':relationship(Order, lazy=opts[o], order_by=orders.c.id), 
            })
            mapper(Order, orders, properties={
                'items':relationship(Item, 
                            secondary=order_items, lazy=opts[i], order_by=items.c.id), 
            })
            mapper(Item, items, properties={
                'keywords':relationship(Keyword, 
                                            lazy=opts[k],
                                            secondary=item_keywords,
                                            order_by=keywords.c.id)
            })
            mapper(Keyword, keywords)
            
            try:
                self._do_query_tests([], count)
            finally:
                clear_mappers()
Esempio n. 20
0
def restore(self):
	"""restores a griffith compressed backup"""
	filename = gutils.file_chooser(_("Restore Griffith backup"), \
		action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons= \
		(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, \
		gtk.STOCK_OPEN, gtk.RESPONSE_OK))
	if filename[0]:
		try:
			zip = zipfile.ZipFile(filename[0], 'r')
		except:
			gutils.error(self, _("Can't read backup file"), self.widgets['window'])
			return False
		mypath = os.path.join(self.locations['posters'])
		for each in zip.namelist():
			file_to_restore = os.path.split(each)
			if not os.path.isdir(file_to_restore[1]):
				if file_to_restore[1] == '':
					continue
				if file_to_restore[1].endswith('.jpg'):
					myfile = os.path.join(mypath,file_to_restore[1])
				else:
					myfile = os.path.join(self.locations['home'],file_to_restore[1])
				outfile = open(myfile, 'wb')
				outfile.write(zip.read(each))
				outfile.flush()
				outfile.close()
		zip.close()

		# restore config file
		self.config = config.Config(file=os.path.join(self.locations['home'],'griffith.conf'))
		filename = os.path.join(self.locations['home'], self.config["default_db"])

		self.db.metadata.engine.dispose() # close DB
		from sqlalchemy.orm import clear_mappers
		clear_mappers()

		# check if file needs conversion
		if self.config['default_db'].lower().endswith('.gri'):
			self.debug.show('Old database format detected. Converting...')
			from dbupgrade import convert_from_old_db
			from initialize	import location_posters
			if convert_from_old_db(self, filename, os.path.join(self.locations['home'], 'griffith.db')):
				self.config.save()
				location_posters(self.locations, self.config)
			else:
				print 'Cant convert old database, exiting.'
				import sys
				sys.exit(4)

		self.db = sql.GriffithSQL(self.config, self.debug, self.locations['home'])
		from initialize	import dictionaries, people_treeview
		dictionaries(self)
		people_treeview(self)
		# let's refresh the treeview
		self.clear_details()
		self.populate_treeview()
		self.go_last()
		self.treeview_clicked()
		self.count_statusbar()
		gutils.info(self, _("Backup restored"), self.widgets['window'])
Esempio n. 21
0
def main():
    # First create the SQL db that we will dump to
    engine, table = init_db()
    connection = engine.connect()

    # Load up all this stuff - crappy code but it works (clean up if time but this whole script is a shoddy hack)
    clear_mappers()
    session = loadTables()
    session2 = loadOutput()

    # create a connection to the mongo DB
    client = MongoClient()
    db = client.dealtrader
    collection = db.raw_tweets

    while True:
        # get number of deals in the table
        cnttot = session.query(func.max(Deals.deal_id))
        num_deals = cnttot[0][0]
        #print num_deals

        cntdone = session2.query(func.max(Output.deal_id))
        min_deal = cntdone[0][0] or 0
        #print min_deal

        res = session.query(Deals).all()

        for i in range(min_deal, num_deals):
            tweetid = int(res[i].tweet_id)
            q =  session.query(Matches)
            mchres = q.filter(Matches.tweet_id == tweetid).all()
            tweet = collection.find_one( { 'id': tweetid } )
            try:
                deal_id = res[i].deal_id
                origtext = tweet['text']
                tweetts = str(tweet['created_at'])
                itemdescr = res[i].description
                itemprice = res[i].price
                itemurl = res[i].url
                lowest_price = min(list(map(lambda x : x.merchant_price, mchres)))
                best_listings = list(filter(lambda x : x.merchant_price==lowest_price, mchres))
                best_listing = best_listings[0]
                bestprice = str(best_listing.merchant_price)
                bestlink = str(best_listing.url)

                ins = insert(table).values(
                                deal_id = deal_id,
                                tweet_id = tweetid,
                                orig_text = origtext,
                                tweet_ts = tweetts,
                                description = itemdescr,
                                price = itemprice,
                                url = itemurl,
                                best_price = bestprice,
                                best_url = bestlink
                                )
                result = connection.execute(ins)
            except:
                pass
Esempio n. 22
0
File: api.py Progetto: roaet/melange
def db_reset(options, *plugins):
    drop_db(options)
    db_sync(options)
    # NOTE(jkoelker) This is bad, but the only way for the models to pick
    #                up columns added in the migrations for unittests
    clear_mappers()
    db_reset_for_plugins(options, *plugins)
    configure_db(options)
Esempio n. 23
0
 def setup_engine(self, url):
     self.engine = create_engine(url) #'sqlite:///:memory:', echo=True)
     self.metadata = schema.MetaData()
     self.metadata.bind = self.engine
     orm.clear_mappers()
     sm = orm.sessionmaker(bind=self.engine, autoflush=True, autocommit=False,
         expire_on_commit=True)
     self.session = orm.scoped_session(sm)
Esempio n. 24
0
    def createMappers(self):
        orm.clear_mappers()

        orm.mapper(models.User, self.users_table, properties={
            'emails': orm.relation(models.Email, lazy=False),
            'phone_numbers': orm.relation(models.PhoneNumber, lazy=True)})
        orm.mapper(models.Email, self.emails_table)
        orm.mapper(models.PhoneNumber, self.phone_numbers_table)
Esempio n. 25
0
 def map_tables(self):
     """ Maps the MySQL tables into our python classes."""
     clear_mappers()
     mapper(Student, self.student_table)
     mapper(Term, self.term_table)
     mapper(Course, self.course_table)
     mapper(Offering, self.offering_table)
     mapper(Enrolled, self.enrolled_table)
     mapper(Received, self.received_table)
Esempio n. 26
0
    def setUpAll(self):
        clear_mappers()
        objectstore.clear()
        global Person, Preferences, Address

        class Person(ActiveMapper):
            class mapping:
                __version_id_col__ = 'row_version'
                full_name   = column(String(128))
                first_name  = column(String(128))
                middle_name = column(String(128))
                last_name   = column(String(128))
                birth_date  = column(DateTime)
                ssn         = column(String(128))
                gender      = column(String(128))
                home_phone  = column(String(128))
                cell_phone  = column(String(128))
                work_phone  = column(String(128))
                row_version = column(Integer, default=0)
                prefs_id    = column(Integer, foreign_key=ForeignKey('preferences.id'))
                addresses   = one_to_many('Address', colname='person_id', backref='person', order_by=['state', 'city', 'postal_code'])
                preferences = one_to_one('Preferences', colname='pref_id', backref='person')

            def __str__(self):
                s =  '%s\n' % self.full_name
                s += '  * birthdate: %s\n' % (self.birth_date or 'not provided')
                s += '  * fave color: %s\n' % (self.preferences.favorite_color or 'Unknown')
                s += '  * personality: %s\n' % (self.preferences.personality_type or 'Unknown')

                for address in self.addresses:
                    s += '  * address: %s\n' % address.address_1
                    s += '             %s, %s %s\n' % (address.city, address.state, address.postal_code)

                return s

        class Preferences(ActiveMapper):
            class mapping:
                __table__        = 'preferences'
                favorite_color   = column(String(128))
                personality_type = column(String(128))

        class Address(ActiveMapper):
            class mapping:
                # note that in other objects, the 'id' primary key is
                # automatically added -- if you specify a primary key,
                # then ActiveMapper will not add an integer primary key
                # for you.
                id          = column(Integer, primary_key=True)
                type        = column(String(128))
                address_1   = column(String(128))
                city        = column(String(128))
                state       = column(String(128))
                postal_code = column(String(128))
                person_id   = column(Integer, foreign_key=ForeignKey('person.id'))

        activemapper.metadata.bind = testing.db
        activemapper.create_tables()
Esempio n. 27
0
def save_sqlite(filename, dbname=None, server='postgresql', **kws):
    """save scandb to sqlite format

    Arguments
    ---------
    filename  name of sqlite3 database to write -- will be clobbered if it exists
    dbname    name of database
    server    server type (only postgresql supported)
    """
    if server.startswith('sqlit'):
        raise ValueError("no need to save sqlite db to sqlite!")

    pg_scandb = ScanDB(dbname=dbname, server=server, **kws)
    if os.path.exists(filename):
        os.unlink(filename)
        time.sleep(0.5)

    tablenames = ('info', 'config', 'slewscanpositioners', 'scanpositioners',
                  'scancounters', 'scandetectors', 'scandefs', 'extrapvs',
                  'macros', 'pv', 'instrument', 'position', 'instrument_pv',
                  'position_pv', 'commands')

    rows, cols = {}, {}
    n = 0
    for tname in tablenames:
        allrows = pg_scandb.select(tname)
        if len(allrows) > 0:
            cols[tname] = allrows[0].keys()
            rows[tname] = [[item for item in row] for row in allrows]
            n += 1
            if n % 10000 == 0:
                print('', end='.')
                sys.stdout.flush()

    pg_scandb.close()
    clear_mappers()

    sdb = ScanDB(dbname=filename, server='sqlite3', create=True)
    sdb.clobber_all_info()
    sdb.commit()
    for tname in tablenames:
        if tname not in rows:
            continue
        cls, table = sdb.get_table(tname)
        ckeys = cols[tname]
        for row in rows[tname]:
            kws = {}
            for k, v in zip(ckeys, row):
                kws[k] = v
                n += 1
                if n % 10000 == 0:
                    print('', end='.')
                    sys.stdout.flush()
            table.insert().execute(**kws)
    sdb.commit()
    print(" Wrote %s " % filename)
Esempio n. 28
0
 def tearDown(self):
     """ Clear the registry, unload the blok manager and  drop the database
     """
     registry = RegistryManager.get(Configuration.get('db_name'))
     registry.close()
     RegistryManager.clear()
     BlokManager.unload()
     clear_mappers()
     self.__class__.dropdb()
     super(TestBlok, self).tearDown()
Esempio n. 29
0
    def tearDown(self):
        """ Clear the registry, unload the blok manager and  drop the database
        """
        if self.registry:
            self.registry.close()

        RegistryManager.clear()
        BlokManager.unload()
        clear_mappers()
        super(DBTestCase, self).tearDown()
Esempio n. 30
0
    def test_alias_pathing(self):
        metadata = MetaData(self.engine)

        a = Table(
            "a",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("bid", Integer, ForeignKey("b.id")),
            Column("type", String(30)),
        )

        asub = Table(
            "asub",
            metadata,
            Column("id", Integer, ForeignKey("a.id"), primary_key=True),
            Column("data", String(30)),
        )

        b = Table(
            "b",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
        )
        mapper(A, a, polymorphic_identity="a", polymorphic_on=a.c.type)
        mapper(ASub, asub, inherits=A, polymorphic_identity="asub")
        mapper(B, b, properties={"as_": relationship(A)})

        metadata.create_all()
        sess = Session()
        a1 = ASub(data="a1")
        a2 = ASub(data="a2")
        a3 = ASub(data="a3")
        b1 = B(as_=[a1, a2, a3])
        sess.add(b1)
        sess.commit()
        del sess

        # sqlite has a slow enough growth here
        # that we have to run it more times to see the
        # "dip" again
        @profile_memory(maxtimes=120)
        def go():
            sess = Session()
            sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
            sess.close()

        try:
            go()
        finally:
            metadata.drop_all()
        clear_mappers()
Esempio n. 31
0
    def test_alias_pathing(self):
        metadata = MetaData(testing.db)

        a = Table("a", metadata,
            Column('id', Integer, primary_key=True,
                                test_needs_autoincrement=True),
            Column('bid', Integer, ForeignKey('b.id')),
            Column('type', String(30))
        )

        asub = Table("asub", metadata,
            Column('id', Integer, ForeignKey('a.id'),
                                primary_key=True),
            Column('data', String(30)))

        b = Table("b", metadata,
            Column('id', Integer, primary_key=True,
                                test_needs_autoincrement=True),
        )
        mapper(A, a, polymorphic_identity='a',
            polymorphic_on=a.c.type)
        mapper(ASub, asub, inherits=A,polymorphic_identity='asub')
        m1 = mapper(B, b, properties={
            'as_':relationship(A)
        })

        metadata.create_all()
        sess = Session()
        a1 = ASub(data="a1")
        a2 = ASub(data="a2")
        a3 = ASub(data="a3")
        b1 = B(as_=[a1, a2, a3])
        sess.add(b1)
        sess.commit()
        del sess

        # sqlite has a slow enough growth here
        # that we have to run it more times to see the
        # "dip" again
        @profile_memory(times=120)
        def go():
            sess = Session()
            sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
            sess.close()
        try:
            go()
        finally:
            metadata.drop_all()
        clear_mappers()
Esempio n. 32
0
def write_job_metadata(tool_job_working_directory, job_metadata, set_meta, tool_provided_metadata):
    for i, file_dict in enumerate(tool_provided_metadata.get_new_datasets_for_metadata_collection(), start=1):
        filename = file_dict["filename"]
        new_dataset_filename = os.path.join(tool_job_working_directory, "working", filename)
        new_dataset = galaxy.model.Dataset(id=-i, external_filename=new_dataset_filename)
        extra_files = file_dict.get('extra_files', None)
        if extra_files is not None:
            new_dataset._extra_files_path = os.path.join(tool_job_working_directory, "working", extra_files)
        new_dataset.state = new_dataset.states.OK
        new_dataset_instance = galaxy.model.HistoryDatasetAssociation(id=-i, dataset=new_dataset, extension=file_dict.get('ext', 'data'))
        set_meta(new_dataset_instance, file_dict)
        file_dict['metadata'] = json.loads(new_dataset_instance.metadata.to_JSON_dict())  # storing metadata in external form, need to turn back into dict, then later jsonify

    tool_provided_metadata.rewrite()
    clear_mappers()
Esempio n. 33
0
    def test_no_instrumentation(self):
        users = self.tables.users

        mapper(User, users)
        u1 = User(name="ed")
        u1_pickled = pickle.dumps(u1, -1)

        clear_mappers()

        mapper(User, users)

        u1 = pickle.loads(u1_pickled)
        # this fails unless the InstanceState
        # compiles the mapper
        eq_(str(u1), "User(name='ed')")
Esempio n. 34
0
    def test_path_registry(self):
        metadata = MetaData()
        a = Table("a", metadata,
                  Column('id', Integer, primary_key=True),
                  Column('foo', Integer),
                  Column('bar', Integer)
                  )
        m1 = mapper(A, a)

        @profile_memory()
        def go():
            ma = sa.inspect(aliased(A))
            m1._path_registry[m1.attrs.foo][ma][m1.attrs.bar]
        go()
        clear_mappers()
Esempio n. 35
0
def session_factory():
    clear_mappers()
    engine = create_engine(TEST_DATABASE_URI_IN_MEMORY)
    metadata.create_all(engine)
    for table in reversed(metadata.sorted_tables):
        engine.execute(table.delete())
    map_model_to_tables()
    session_factory = sessionmaker(bind=engine)
    database_repository.populate(session_factory, TEST_DATA_PATH_DATABASE,
                                 "Data1000Movies.csv")
    database_repository.populate_user_comment(engine, TEST_DATA_PATH_DATABASE)

    yield session_factory
    metadata.drop_all(engine)
    clear_mappers()
    def teardown_test(self):
        clear_mappers()

        # the tests in this suite don't cleanly close out the Session
        # at the moment so use the reaper to close all connections
        testing_reaper.checkin_all()

        for i in [1, 3]:
            os.remove("shard%d_%s.db" % (i, provision.FOLLOWER_IDENT))

        with self.postgresql_engine.begin() as conn:
            self.tables_test_metadata.drop_all(conn)
            for i in [2, 4]:
                conn.exec_driver_sql("DROP SCHEMA shard%s CASCADE" % (i, ))
        self.postgresql_engine.dispose()
Esempio n. 37
0
 def assert_env_is_clean(self):
     # sanity check, ensure source has data :
     session = RealSession()
     session.clear()
     assert session.query(Product).count()
     
     # ensure target is empty :
     session = MemSession()
     session.clear()
     eq_(session.query(Product).count(), 0)
     
     ### FIXME, this shouldn't be so lame
     # clear mappers so that the dataset_generator() can setup mappers on its own:
     from sqlalchemy.orm import clear_mappers
     clear_mappers()
Esempio n. 38
0
    def loadTables(self):

        dbPath = 'tweet.db'
        engine = create_engine('sqlite:///%s' % dbPath, echo=True)

        metadata = MetaData(engine)
        deals = Table('deals', metadata, autoload=True)
        matches = Table('price_check_history', metadata, autoload=True)
        clear_mappers()
        mapper(Deals, deals)
        mapper(Matches, matches)

        Session = sessionmaker(bind=engine)
        session = Session()
        return session
Esempio n. 39
0
    def __init__(self, dbname='xraydb.sqlite', read_only=True):
        "connect to an existing database"
        if not os.path.exists(dbname):
            parent, child = os.path.split(__file__)
            dbname = os.path.join(parent, dbname)
            if not os.path.exists(dbname):
                raise IOError("Database '%s' not found!" % dbname)

        if not isxrayDB(dbname):
            raise ValueError("'%s' is not a valid X-ray Database file!" %
                             dbname)

        self.dbname = dbname
        self.engine = make_engine(dbname)
        self.conn = self.engine.connect()
        kwargs = {}
        if read_only:
            kwargs = {'autoflush': True, 'autocommit': False}

            def readonly_flush(*args, **kwargs):
                return

            self.session = sessionmaker(bind=self.engine, **kwargs)()
            self.session.flush = readonly_flush
        else:
            self.session = sessionmaker(bind=self.engine, **kwargs)()

        self.metadata = MetaData(self.engine)
        self.metadata.reflect()
        tables = self.tables = self.metadata.tables

        clear_mappers()
        mapper(ChantlerTable, tables['Chantler'])
        mapper(WaasmaierTable, tables['Waasmaier'])
        mapper(KeskiRahkonenKrauseTable, tables['KeskiRahkonen_Krause'])
        mapper(KrauseOliverTable, tables['Krause_Oliver'])
        mapper(CoreWidthsTable, tables['corelevel_widths'])
        mapper(ElementsTable, tables['elements'])
        mapper(XrayLevelsTable, tables['xray_levels'])
        mapper(XrayTransitionsTable, tables['xray_transitions'])
        mapper(CosterKronigTable, tables['Coster_Kronig'])
        mapper(PhotoAbsorptionTable, tables['photoabsorption'])
        mapper(ScatteringTable, tables['scattering'])

        self.atomic_symbols = [
            e.element
            for e in self.tables['elements'].select().execute().fetchall()
        ]
Esempio n. 40
0
def session():
    clear_mappers()
    database_engine = create_engine(TEST_DATABASE_URI_IN_MEMORY)
    metadata.create_all(database_engine)
    for table in reversed(metadata.sorted_tables):
        database_engine.execute(table.delete())
    map_model_to_tables()
    session_factory = sessionmaker(autocommit=False,
                                   autoflush=True,
                                   bind=database_engine)
    database_repository.populate(session_factory, TEST_DATA_PATH_DATABASE)
    database_repository.second_populate(session_factory,
                                        TEST_DATA_PATH_DATABASE)
    yield session_factory()
    metadata.drop_all(database_engine)
    clear_mappers()
Esempio n. 41
0
 def saveData(self, item, tableName):
     # 设置修改的表名
     t.name = tableName
     if not t.exists(bind=engine):
         t.create(bind=engine)
     clear_mappers()
     mapper(HouseDetail, t)
     # print(111)
     houseDetail = HouseDetail()
     for key in item:
         if hasattr(houseDetail, key):
             setattr(houseDetail, key, item[key])
     global Session
     session = Session()
     session.add(houseDetail)
     session.commit()
Esempio n. 42
0
def database_engine():
    engine = create_engine(TEST_DATABASE_URI_FILE)
    clear_mappers()
    metadata.create_all(engine)  # Conditionally create database tables.
    for table in reversed(
            metadata.sorted_tables):  # Remove any data from the tables.
        engine.execute(table.delete())
    map_model_to_tables()
    session_factory = sessionmaker(bind=engine)
    database_repository.populate(session_factory, TEST_DATA_PATH_DATABASE,
                                 "Data1000Movies.csv")
    database_repository.populate_user_comment(engine, TEST_DATA_PATH_DATABASE)

    yield engine
    metadata.drop_all(engine)
    clear_mappers()
Esempio n. 43
0
    def test_no_mappers(self):
        users = self.tables.users

        mapper(User, users)
        u1 = User(name="ed")
        u1_pickled = pickle.dumps(u1, -1)

        clear_mappers()

        assert_raises_message(
            orm_exc.UnmappedInstanceError,
            "Cannot deserialize object of type "
            "<class 'sqlalchemy.testing.pickleable.User'> - no mapper()",
            pickle.loads,
            u1_pickled,
        )
Esempio n. 44
0
 def _create_userdb_tables(self, metadata):
     """Called by the datamanager to setup the dbase tables.
     We also map the ORM classes to the dbase tables.
     Returns a dict with the keys set to the dbase table names and
     the values set to the ORM classes."""
     tables = {}
     sqlorm.clear_mappers()
     for table in self.tableslist:
         self._appendcolumns(table)
         if hasattr(ORMS, table.name):
             orm = getattr(ORMS, table.name)
             orm._name = table.name
             sqlorm.mapper(orm, table)
             tables[table.name] = orm
     
     metadata.create_all()
     return tables
Esempio n. 45
0
    def tearDown(self):
        global Session
        if Session is None:
            from sqlalchemy.orm.session import Session
        Session.close_all()
        global clear_mappers
        if clear_mappers is None:
            from sqlalchemy.orm import clear_mappers

        if not self.keep_mappers:
            clear_mappers()
        if not self.keep_data:
            for t in reversed(_otest_metadata.sorted_tables):
                try:
                    t.delete().execute().close()
                except Exception, e:
                    print "EXCEPTION DELETING...", e
Esempio n. 46
0
 def setUp(self):
     global realmeta, RealSession, memmeta, MemSession
     import sqlalchemy
     from sqlalchemy import MetaData, create_engine
     from sqlalchemy.orm import clear_mappers, scoped_session, sessionmaker, relation
     clear_mappers()
     
     realmeta = MetaData(bind=create_engine(conf.HEAVY_DSN))
     RealSession = scoped_session(sessionmaker(autoflush=False, transactional=False, bind=realmeta.bind))
     
     memmeta = MetaData(bind=create_engine(conf.LITE_DSN))
     MemSession = scoped_session(sessionmaker(autoflush=True, transactional=False, bind=memmeta.bind))
     
     self.setup_mappers()
     
     session = RealSession()
     
     # data source :
     categories.create(bind=realmeta.bind)
     products.create(bind=realmeta.bind)
     offers.create(bind=realmeta.bind)
     
     parkas = Category()
     parkas.name = "parkas"
     session.save(parkas)
     jersey = Product()
     jersey.name = "jersey"
     jersey.category = parkas
     session.save(jersey)
     
     rebates = Category()
     rebates.name = "rebates"
     rebates.id = 2
     super_cashback = Offer()
     super_cashback.name = "super cash back!"
     super_cashback.product = jersey
     super_cashback.category = rebates
     session.save(super_cashback)
     session.save(rebates)
     
     session.flush()
     
     # data target:
     categories.create(bind=memmeta.bind)
     products.create(bind=memmeta.bind)
     offers.create(bind=memmeta.bind)
Esempio n. 47
0
def setup_mappers():
    orm.clear_mappers()
    # Other tests can clear mappers by calling clear_mappers(),
    # be more robust by setting up mappers in the test setup.
    m1 = orm.mapper(User,
                    test_users,
                    properties={
                        'skills':
                        orm.relation(Skill,
                                     primaryjoin=test_users.columns['id'] ==
                                     test_skills.columns['user_id']),
                    })
    m2 = orm.mapper(Skill, test_skills)

    m3 = orm.mapper(TestOne, test_one)
    m4 = orm.mapper(TestTwo, test_two)
    return [m1, m2, m3, m4]
Esempio n. 48
0
def database_engine():
    database_engine = create_engine(TEST_DATABASE_URI_FILE)
    clear_mappers()
    metadata.create_all(
        database_engine)  # Conditionally create database tables.
    for table in reversed(
            metadata.sorted_tables):  # Remove any data from the tables.
        database_engine.execute(table.delete())
    map_model_to_tables()
    session_factory = sessionmaker(autocommit=False,
                                   autoflush=True,
                                   bind=database_engine)
    database_repository.populate(session_factory, TEST_DATA_PATH_DATABASE)
    database_repository.second_populate(session_factory,
                                        TEST_DATA_PATH_DATABASE)
    yield database_engine
    metadata.drop_all(database_engine)
    clear_mappers()
def DBconnect(Guest_object, Photo_object):
	engine = create_engine('mysql+pymysql://root:123456@localhost:3306\
			/guestbook?charset=utf8mb4', echo=True)  
	# engine add sql data with default charset utf8

	# initial table guestbook if not exist 
	# !! need to drop table guestbook in mysql if not as same as follows
	metadata = MetaData(bind=engine)
	guest_table = Table(
		'guestbook', metadata,
		Column('id', Integer, primary_key=True),
		Column('name', Text, nullable=False),
		Column('message', Text, nullable=False),
		Column('time', DateTime, default=datetime.now),
		mysql_default_charset='utf8mb4')  # very very very important in create a table supporting utf8

	photo_table = Table(
		'photo', metadata,
		Column('id', Integer, primary_key=True),
		Column('guestbook_id', Integer, ForeignKey('guestbook.id')),
		Column('image', String(128), nullable=True),
		Column('url', String(256), nullable=True),
		mysql_default_charset='utf8')

#	guest_photo_table = Table(
#		'guestbook_photo', metadata,
#		Column('guestbook_id', None, ForeignKey('guestbook.id'), \
#			primary_key=True),
#		Column('photo_id', None, ForeignKey('photo.id'), \
#			primary_key=True))

	metadata.create_all()  
	# guest_table = Table('guestbook', metadata, autoload=True)  
	# for guest_table already exists in metadata
	clear_mappers()  # clear original mapper in pymysql? or somthing else?
	# build mapper
	mapper(Photo_object, photo_table)
	mapper(Guest_object, guest_table, properties=dict(\
		photos=relationship(Photo, \
			backref='guestbook'))) 

	DBSession = sessionmaker(bind=engine)
	session = DBSession()
	return session
Esempio n. 50
0
    def test_compileonattr_rel_backref_b(self):
        m = MetaData()
        t1 = Table(
            "t1",
            m,
            Column("id", Integer, primary_key=True),
            Column("x", Integer),
        )
        t2 = Table(
            "t2",
            m,
            Column("id", Integer, primary_key=True),
            Column("t1_id", Integer, ForeignKey("t1.id")),
        )

        class Base:
            def __init__(self):
                pass

        class Base_AKW:
            def __init__(self, *args, **kwargs):
                pass

        for base in object, Base, Base_AKW:

            class A(base):
                pass

            class B(base):
                pass

            self.mapper_registry.map_imperatively(A, t1)
            self.mapper_registry.map_imperatively(
                B, t2, properties=dict(a=relationship(A, backref="bs")))

            a = A()
            b = B()
            b.a = a

            session = fixture_session()
            session.add(a)
            assert b in session, "base: %s" % base
            clear_mappers()
Esempio n. 51
0
    def run_test(self):
        def go(CommonMixin):
            declarative = registry().mapped

            @declarative
            @dataclasses.dataclass
            class BaseType(CommonMixin):

                discriminator = Column("type", String(50))
                __mapper_args__ = dict(polymorphic_on=discriminator)
                id = Column(Integer, primary_key=True)
                value = Column(Integer())

            @declarative
            @dataclasses.dataclass
            class Single(BaseType):

                __tablename__ = None
                __mapper_args__ = dict(polymorphic_identity="type1")

            @declarative
            @dataclasses.dataclass
            class Joined(BaseType):

                __mapper_args__ = dict(polymorphic_identity="type2")
                id = Column(Integer,
                            ForeignKey("basetype.id"),
                            primary_key=True)

            eq_(BaseType.__table__.name, "basetype")
            eq_(
                list(BaseType.__table__.c.keys()),
                ["timestamp", "type", "id", "value"],
            )
            eq_(BaseType.__table__.kwargs, {"mysql_engine": "InnoDB"})
            assert Single.__table__ is BaseType.__table__
            eq_(Joined.__table__.name, "joined")
            eq_(list(Joined.__table__.c.keys()), ["id"])
            eq_(Joined.__table__.kwargs, {"mysql_engine": "InnoDB"})

        yield go

        clear_mappers()
Esempio n. 52
0
def create_distance_table():
    columns = [
        "id", "name", "address", "avg_covered", "distance", "lat", "lon"
    ]
    metadata = MetaData(bind=engine)
    dist_table = Table(
        'HospitalsDistance',
        metadata,
        Column('id', Integer, primary_key=True),
        Column('name', String),
        Column('address', String),
        Column('avg_covered', DECIMAL(8, 2)),
        Column('distance', DECIMAL(10, 2)),
    )

    metadata.create_all()
    clear_mappers()
    mapper(HospitalsDistance, dist_table)
    session = create_session(bind=engine, autocommit=False, autoflush=True)
    return session
Esempio n. 53
0
def client_with_db_data():
    clear_mappers()
    filename = 'datafiles/Data100Movies.csv'
    movie_file_reader = MovieFileCSVReader(filename)
    movie_file_reader.read_csv_file()

    database_engine = create_engine('sqlite:///')
    map_model()
    session_factory = sessionmaker(autocommit=False,
                                   autoflush=True,
                                   bind=database_engine)
    populate_database(session_factory, database_engine, filename)

    repository = DatabaseRepository(session_factory)

    return create_app(repository, {
        'TESTING': True,
        'WTF_CSRF_ENABLED': False,
        'SECRET_KEY': 'test'
    }).test_client(), repository
Esempio n. 54
0
	def get_query_object(self,tablename,engine):
		metadata = MetaData(bind=engine)
		rs_query_monitor = Table(tablename, metadata,
    	    Column('query_id', Integer(), primary_key=True),
    	    Column('username', String(255)),
    	    Column('workmem', String(255)),
    	    Column('num_diskhits', Integer()),
    	    Column('inner_bcast_count', Integer()),
    	    Column('bcast_rows', Integer()),
    	    Column('exec_time', Integer()),
    	    Column('slot_count', Integer()),
    	    Column('queue_time', Integer()),
    	    Column('starttime', DateTime(), default=datetime.datetime.utcnow()+ist_delta),
    	    Column('state', String(255)),
    	    Column('queue', Integer()),
    	    Column('last_modified_on', DateTime(), default=datetime.datetime.utcnow()+ist_delta))
		rs_query_monitor.create(checkfirst=True)
		clear_mappers()
		mapper(RSQueryMonitor, rs_query_monitor)
		return RSQueryMonitor
Esempio n. 55
0
    def test_concurrent_create(self):
        for i in range(50):
            Base = declarative_base()
            clear_mappers()

            self.make_a(Base)
            result = [False]
            threads = [
                threading.Thread(target=self.make_b, args=(Base,)),
                threading.Thread(target=self.query_a, args=(Base, result)),
            ]

            for t in threads:
                t.start()

            for t in threads:
                t.join()

            if isinstance(result[0], orm_exc.UnmappedClassError):
                raise result[0]
Esempio n. 56
0
def reload_mappers(metadata):
    """all model mapper reload.
    @param metadata: reload MetaData
    @type metadata: sqlalchemy.schema.MetaData
    """
    if metadata.bind.name == 'sqlite':
        _now = sqlalchemy.func.datetime('now', 'localtime')
    else:
        _now = sqlalchemy.func.now()

    t_jobgroup = get_jobgroup_table(metadata, _now)
    t_job = get_job_table(metadata, _now)
    try:
        mapper(JobGroup, t_jobgroup, properties={'jobs': relation(Job)})
        #mapper(JobGroup, t_jobgroup, properties={'jobs': relation(Job, backref='job_group')})
        mapper(Job, t_job)
    except sqlalchemy.exc.ArgumentError, ae:
        clear_mappers()
        mapper(JobGroup, t_jobgroup, properties={'jobs': relation(Job)})
        #mapper(JobGroup, t_jobgroup, properties={'jobs': relation(Job, backref='job_group')})
        mapper(Job, t_job)
Esempio n. 57
0
    def loadTable(self, event):
        """
        Load the table into the ObjectListView widget
        """
        current_table = self.tableCbo.GetValue()
        metadata = MetaData(self.engine)
        table = Table(current_table,
                      metadata,
                      autoload=True,
                      autoload_with=self.engine)
        self.columns = table.columns.keys()

        clear_mappers()
        mapper(GenericDBClass, table)

        Session = sessionmaker(bind=self.engine)
        session = Session()
        self.db_data = session.query(GenericDBClass).all()

        self.setData()
        self.Layout()
Esempio n. 58
0
    def test_noninherited_warning(self):
        A, B, b_table, a_table, Dest, dest_table = (
            self.classes.A,
            self.classes.B,
            self.tables.b_table,
            self.tables.a_table,
            self.classes.Dest,
            self.tables.dest_table,
        )

        self.mapper_registry.map_imperatively(
            A, a_table, properties={"some_dest": relationship(Dest)}
        )
        self.mapper_registry.map_imperatively(
            B, b_table, inherits=A, concrete=True
        )
        self.mapper_registry.map_imperatively(Dest, dest_table)
        b = B()
        dest = Dest()
        assert_raises(AttributeError, setattr, b, "some_dest", dest)
        clear_mappers()

        self.mapper_registry.map_imperatively(
            A, a_table, properties={"a_id": a_table.c.id}
        )
        self.mapper_registry.map_imperatively(
            B, b_table, inherits=A, concrete=True
        )
        self.mapper_registry.map_imperatively(Dest, dest_table)
        b = B()
        assert_raises(AttributeError, setattr, b, "a_id", 3)
        clear_mappers()

        self.mapper_registry.map_imperatively(
            A, a_table, properties={"a_id": a_table.c.id}
        )
        self.mapper_registry.map_imperatively(
            B, b_table, inherits=A, concrete=True
        )
        self.mapper_registry.map_imperatively(Dest, dest_table)
    def test_compileonattr_rel_backref_a(self):
        m = MetaData()
        t1 = Table(
            "t1",
            m,
            Column("id", Integer, primary_key=True),
            Column("x", Integer),
        )
        t2 = Table(
            "t2",
            m,
            Column("id", Integer, primary_key=True),
            Column("t1_id", Integer, ForeignKey("t1.id")),
        )

        class Base(object):
            def __init__(self, *args, **kwargs):
                pass

        for base in object, Base:

            class A(base):
                pass

            class B(base):
                pass

            mapper(A, t1, properties=dict(bs=relationship(B, backref="a")))
            mapper(B, t2)

            b = B()
            assert b.a is None
            a = A()
            b.a = a

            session = fixture_session()
            session.add(b)
            assert a in session, "base is %s" % base

            clear_mappers()
Esempio n. 60
0
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(A):
                pass

            clear_mappers()
            self.mapper_registry.map_imperatively(
                A,
                table1,
                polymorphic_on=table1.c.col2,
                polymorphic_identity="a",
            )
            self.mapper_registry.map_imperatively(B,
                                                  table2,
                                                  inherits=A,
                                                  polymorphic_identity="b")

            sess = Session(self.engine, autoflush=False)
            a1 = A()
            a2 = A()
            b1 = B(col3="b1")
            b2 = B(col3="b2")
            for x in [a1, a2, b1, b2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # don't need to clear_mappers()
            del B
            del A