def test_abstract_in_hierarchy(self):
        class Document(Base, AbstractConcreteBase):
            doctype = Column(String)

        class ContactDocument(Document):
            __abstract__ = True

            send_method = Column(String)

        class ActualDocument(ContactDocument):
            __tablename__ = 'actual_documents'
            __mapper_args__ = {
                'concrete': True,
                'polymorphic_identity': 'actual'}

            id = Column(Integer, primary_key=True)

        configure_mappers()
        session = Session()
        self.assert_compile(
            session.query(Document),
            "SELECT pjoin.doctype AS pjoin_doctype, "
            "pjoin.send_method AS pjoin_send_method, "
            "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type "
            "FROM (SELECT actual_documents.doctype AS doctype, "
            "actual_documents.send_method AS send_method, "
            "actual_documents.id AS id, 'actual' AS type "
            "FROM actual_documents) AS pjoin"
        )
示例#2
0
文件: db.py 项目: xieyanfu/nbclassify
def get_photos_with_taxa(session, metadata):
    """Return photos with genus, section, and species class.

    This generator returns 4-tuples ``(photo, genus, section, species)``.
    """
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()

    Photo = Base.classes.photos
    Taxon = Base.classes.taxa
    Rank = Base.classes.ranks

    stmt_genus = session.query(Photo.id, Taxon.name.label('genus')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'genus').subquery()

    stmt_section = session.query(Photo.id, Taxon.name.label('section')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'section').subquery()

    stmt_species = session.query(Photo.id, Taxon.name.label('species')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'species').subquery()

    q = session.query(Photo, 'genus', 'section', 'species').\
        join(stmt_genus, stmt_genus.c.id == Photo.id).\
        outerjoin(stmt_section, stmt_section.c.id == Photo.id).\
        join(stmt_species, stmt_species.c.id == Photo.id)

    return q
示例#3
0
 def initialize_db(cls, drop=False, create=True):
     configure_mappers()
     cls.BaseClass.metadata.bind = cls.engine
     if drop:
         cls.BaseClass.metadata.drop_all(cls.engine, checkfirst=True)
     if create:
         cls.BaseClass.metadata.create_all(cls.engine, checkfirst=True)
示例#4
0
    def test_mapper_args_declared_attr_two(self):

        # same as test_mapper_args_declared_attr, but we repeat
        # ComputedMapperArgs on both classes for no apparent reason.

        class ComputedMapperArgs:
            @declared_attr
            def __mapper_args__(cls):
                if cls.__name__ == 'Person':
                    return {'polymorphic_on': cls.discriminator}
                else:
                    return {'polymorphic_identity': cls.__name__}

        class Person(Base, ComputedMapperArgs):

            __tablename__ = 'people'
            id = Column(Integer, primary_key=True)
            discriminator = Column('type', String(50))

        class Engineer(Person, ComputedMapperArgs):
            pass

        configure_mappers()
        assert class_mapper(Person).polymorphic_on \
            is Person.__table__.c.type
        eq_(class_mapper(Engineer).polymorphic_identity, 'Engineer')
示例#5
0
    def test_doc(self):
        """test documentation transfer.

        the documentation situation with @declared_attr is problematic.
        at least see if mapped subclasses get the doc.

        """

        class MyMixin(object):

            @declared_attr
            def type_(cls):
                """this is a document."""

                return Column(String(50))

            @declared_attr
            def t2(cls):
                """this is another document."""

                return column_property(Column(String(50)))

        class MyModel(Base, MyMixin):

            __tablename__ = 'test'
            id = Column(Integer, primary_key=True)

        configure_mappers()
        eq_(MyModel.type_.__doc__, """this is a document.""")
        eq_(MyModel.t2.__doc__, """this is another document.""")
示例#6
0
    def setUp(self):
        super(TestDBAPI, self).setUp()

        configure_mappers()
        cfg.CONF.set_override('connection', 'sqlite://', 'database')
        neutron_db_api.configure_db()
        neutron_db_api.register_models(models.BASEV2)
示例#7
0
 def create_all(self):
     from sqlalchemy import orm
     orm.configure_mappers()
     with self.engine.begin() as conn:
         conn.execute("SET search_path TO " + self.metadata.schema)
         self.create_tables(conn)
         self.create_functions(conn)
示例#8
0
    def test_has_inherited_table_in_mapper_args(self):
        class Test(Base):
            __tablename__ = 'test'
            id = Column(Integer, primary_key=True)
            type = Column(String(20))

            @declared_attr
            def __mapper_args__(cls):
                if not has_inherited_table(cls):
                    ret = {
                        'polymorphic_identity': 'default',
                        'polymorphic_on': cls.type,
                        }
                else:
                    ret = {'polymorphic_identity': cls.__name__}
                return ret

        class PolyTest(Test):
            __tablename__ = 'poly_test'
            id = Column(Integer, ForeignKey(Test.id), primary_key=True)

        configure_mappers()

        assert Test.__mapper__.polymorphic_on is Test.__table__.c.type
        assert PolyTest.__mapper__.polymorphic_on is Test.__table__.c.type
示例#9
0
文件: main.py 项目: Governa/Camelot
 def setup_model():
     from sqlalchemy.orm import configure_mappers
     from camelot.core.sql import metadata
     metadata.bind = settings.ENGINE()
     import camelot.model.party
     import camelot.model.authentication
     import camelot.model.i18n
     import camelot.model.fixture
     import camelot.model.memento
     import camelot_example.model
     #
     # setup_all is only needed for those models that rely on elixir
     #
     from elixir import setup_all
     setup_all()
     #
     # create the tables for all models, configure mappers first, to make
     # sure all deferred properties have been handled, as those could
     # create tables or columns
     #
     configure_mappers()
     metadata.create_all()
     from camelot.model.authentication import update_last_login
     update_last_login()
     # 
     # Load sample data with the fixure mechanism
     #
     from camelot_example.fixtures import load_movie_fixtures
     load_movie_fixtures()
     #
     # setup the views
     #
     from camelot_example.view import setup_views
     setup_views()
示例#10
0
文件: db.py 项目: xieyanfu/nbclassify
def get_taxa_photo_count(session, metadata):
    """Return the photo count for each (genus, section, species) combination.

    Taxa are returned as 4-tuples ``(genus, section, species, photo_count)``.
    """
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()

    Photo = Base.classes.photos
    Taxon = Base.classes.taxa
    Rank = Base.classes.ranks

    stmt_genus = session.query(Photo.id, Taxon.name.label('genus')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'genus').subquery()

    stmt_section = session.query(Photo.id, Taxon.name.label('section')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'section').subquery()

    stmt_species = session.query(Photo.id, Taxon.name.label('species')).\
        join(Photo.taxa_collection, Taxon.ranks).\
        filter(Rank.name == 'species').subquery()

    q = session.query('genus', 'section', 'species',
            functions.count(Photo.id).label('photos')).\
        select_from(Photo).\
        join(stmt_genus, stmt_genus.c.id == Photo.id).\
        outerjoin(stmt_section, stmt_section.c.id == Photo.id).\
        join(stmt_species, stmt_species.c.id == Photo.id).\
        group_by('genus', 'section', 'species')

    return q
示例#11
0
    def test_o2m_relationship_cascade(self):
        Base = automap_base(metadata=self.metadata)
        Base.prepare()

        configure_mappers()

        b_rel = Base.classes.a.b_collection
        assert not b_rel.property.cascade.delete
        assert not b_rel.property.cascade.delete_orphan
        assert not b_rel.property.passive_deletes

        assert b_rel.property.cascade.save_update

        c_rel = Base.classes.a.c_collection
        assert c_rel.property.cascade.delete
        assert c_rel.property.cascade.delete_orphan
        assert not c_rel.property.passive_deletes

        assert c_rel.property.cascade.save_update

        d_rel = Base.classes.a.d_collection
        assert d_rel.property.cascade.delete
        assert d_rel.property.cascade.delete_orphan
        assert d_rel.property.passive_deletes

        assert d_rel.property.cascade.save_update

        e_rel = Base.classes.a.e_collection
        assert not e_rel.property.cascade.delete
        assert not e_rel.property.cascade.delete_orphan
        assert e_rel.property.passive_deletes

        assert e_rel.property.cascade.save_update
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)

    translation_manager.options.update({
        'locales': settings['available_languages'].split(),
        'get_locale_fallback': True
    })
    configure_mappers()

    postgis_version = DBSession.execute(func.postgis_version()).scalar()
    if not postgis_version.startswith('2.'):
        # With PostGIS 1.x the AddGeometryColumn and DropGeometryColumn
        # management functions should be used.
        Area.__table__.c.geometry.type.management = True
        Task.__table__.c.geometry.type.management = True

    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
        geometry = '{"type":"MultiPolygon","coordinates":[[[[0.9439973,48.1124991],[0.9439617,48.1124316],[0.9436539,48.1123846],[0.9435646,48.1123915],[0.9435327,48.1124159],[0.9435101,48.1124985],[0.9434772,48.1125175],[0.9426452,48.1123642],[0.9425722,48.112333],[0.9422699,48.1120818],[0.9421995,48.1120605],[0.9421477,48.1120648],[0.9420632,48.112103],[0.941881,48.1122479],[0.9418038,48.1122679],[0.9417503,48.1122513],[0.9415619,48.1121153],[0.9414559,48.1120661],[0.9413928,48.1120672],[0.9409135,48.1121726],[0.9408424,48.1121538],[0.9407001,48.112081],[0.9406476,48.1120888],[0.940564,48.1121817],[0.9404357,48.1122414],[0.9403099,48.112273],[0.9399965,48.1122711],[0.9396713,48.1123254],[0.9393974,48.1122674],[0.9392938,48.1122742],[0.939251,48.1123041],[0.9391999,48.1123871],[0.9391291,48.1124258],[0.9390605,48.1124212],[0.9387723,48.1123216],[0.9387038,48.112312],[0.9385911,48.112324],[0.938539,48.1123602],[0.9384626,48.1124552],[0.938409,48.1124857],[0.9382915,48.1124816],[0.937814,48.1123474],[0.9373468,48.1121692],[0.9372416,48.1121531],[0.9370728,48.11222],[0.9368963,48.1122388],[0.9368195,48.1122207],[0.9366888,48.1120758],[0.9366305,48.1120462],[0.9365513,48.1120533],[0.9364563,48.1121898],[0.9364051,48.112215],[0.9362435,48.1122447],[0.9360775,48.1122529],[0.935965,48.1122311],[0.9358367,48.1121436],[0.9356261,48.1121597],[0.9354644,48.1121544],[0.935367,48.1121832],[0.9353183,48.1122211],[0.9353043,48.1123266],[0.9352257,48.1123409],[0.9351053,48.1123109],[0.9349266,48.112308],[0.9349437,48.1122208],[0.935028,48.1121111],[0.9350097,48.1120778],[0.9349264,48.1120525],[0.9348114,48.1120507],[0.9345203,48.1121667],[0.934303,48.1121371],[0.9341118,48.1120827],[0.9336163,48.1118631],[0.9334011,48.1118632],[0.9333406,48.1118784],[0.9333018,48.1119087],[0.9332712,48.1120369],[0.9332095,48.1120514],[0.9326641,48.1119703],[0.9324749,48.1119783],[0.9323901,48.1120035],[0.9320996,48.1121894],[0.932001,48.1122266],[0.9318959,48.1122429],[0.9318205,48.1122366],[0.9317522,48.1121982],[0.9315106,48.111858],[0.9314835,48.111855],[0.9313209,48.111955],[0.9311756,48.1119605],[0.9310817,48.111943],[0.9308539,48.111838],[0.93077,48.1118461],[0.9306913,48.1120577],[0.9306603,48.1120802],[0.9304127,48.1120509],[0.9301916,48.1121665],[0.9301366,48.1121803],[0.9300427,48.1121754],[0.9299711,48.112132],[0.9296377,48.1117226],[0.929518,48.1116539],[0.9291236,48.1115423],[0.928688,48.1113557],[0.9284554,48.1112127],[0.928209,48.1111933],[0.9276319,48.1110024],[0.9272926,48.1109949],[0.92716,48.1109442],[0.9268904,48.1107475],[0.9267995,48.1106985],[0.926513,48.1105908],[0.9257587,48.110361],[0.9256844,48.1103175],[0.9252016,48.1099046],[0.9247036,48.1095281],[0.9238873,48.1089805],[0.9234698,48.1087463],[0.9230175,48.1085317],[0.9228205,48.1084023],[0.9226396,48.1082393],[0.9223989,48.1079389],[0.9221707,48.1076004],[0.9220664,48.1074937],[0.9218263,48.1073359],[0.9206929,48.1067142],[0.9202506,48.1064974],[0.9194249,48.1061589],[0.9189929,48.1059179],[0.9185714,48.1056273],[0.918382,48.1054632],[0.9181514,48.1051805],[0.9180088,48.105076],[0.9178621,48.1050116],[0.9177965,48.1049636],[0.917625,48.1047545],[0.9174401,48.1046548],[0.9171922,48.1045622],[0.9171528,48.1045598],[0.9170589,48.1046051],[0.916151,48.1044458],[0.9157368,48.1043942],[0.9155837,48.104331],[0.9153533,48.1043506],[0.9152267,48.1043078],[0.9151469,48.1043302],[0.9144978,48.1042957],[0.9138818,48.1042867],[0.913612,48.1042512],[0.9135344,48.1042158],[0.91342,48.1041083],[0.9133722,48.1040059],[0.9133453,48.1038797],[0.9124999,48.1037314],[0.9120556,48.103698],[0.9119134,48.1036545],[0.9114243,48.1035828],[0.9110642,48.1034704],[0.9107811,48.1034509],[0.9106821,48.103419],[0.9104466,48.103264],[0.9103186,48.1031233],[0.9102487,48.1031311],[0.9101926,48.103167],[0.9101209,48.1032555],[0.9099347,48.1033954],[0.9097093,48.1035337],[0.9093202,48.1037211],[0.9091969,48.1037633],[0.9088825,48.1038087],[0.908232,48.1038471],[0.9079792,48.1039572],[0.9078613,48.1039921],[0.9073389,48.1040699],[0.9069709,48.1040314],[0.9067374,48.1040984],[0.9062589,48.1041359],[0.9059745,48.1041992],[0.9056679,48.1041682],[0.9050763,48.1042027],[0.904938,48.1041824],[0.9048179,48.1041305],[0.9045452,48.1039603],[0.9044561,48.103924],[0.904196,48.1038809],[0.9036573,48.103662],[0.9034452,48.1035998],[0.903079,48.1035192],[0.9028311,48.103446],[0.9018642,48.1033358],[0.9013742,48.1032336],[0.9005004,48.1031099],[0.9002539,48.1030173],[0.9000271,48.1029553],[0.8980542,48.1026568],[0.8970976,48.1025458],[0.896599,48.1024688],[0.895392,48.1023392],[0.8944552,48.1023009],[0.8943655,48.1022504],[0.893184,48.1021089],[0.892454,48.1020611],[0.8907345,48.102055],[0.8890399,48.1021236],[0.8871747,48.100757],[0.8867015,48.1002951],[0.8856048,48.099084],[0.8853025,48.0987087],[0.8852286,48.0986621],[0.8849884,48.0986016],[0.8850231,48.0983716],[0.8849404,48.0983012],[0.88507,48.0980405],[0.8852975,48.0976653],[0.8852704,48.0976029],[0.8851619,48.0975104],[0.8850349,48.0958233],[0.884925,48.09557],[0.8845941,48.0949965],[0.8844349,48.094641],[0.8839767,48.094183],[0.883877,48.0940455],[0.8838347,48.0939336],[0.8838474,48.0937614],[0.8838277,48.0937378],[0.8836345,48.0937005],[0.8835989,48.0936473],[0.8835399,48.0932616],[0.8835453,48.0931402],[0.8836322,48.0929363],[0.8837115,48.0928221],[0.8839719,48.0925712],[0.8840114,48.0924936],[0.8841869,48.0918922],[0.884514,48.0909761],[0.8845052,48.0909363],[0.8841545,48.0909667],[0.8840605,48.0909899],[0.8840665,48.0909786],[0.88377,48.0910244],[0.8821938,48.0913622],[0.8822637,48.090884],[0.8822353,48.0904922],[0.8816999,48.0878551],[0.8816151,48.0876328],[0.8814922,48.0874268],[0.8814738,48.0874217],[0.8814855,48.0874104],[0.8814306,48.0872591],[0.8815099,48.0869485],[0.8815416,48.086688],[0.8813337,48.0861234],[0.8813029,48.086034],[0.8813103,48.0858493],[0.881293,48.0858045],[0.881064,48.0855132],[0.8809225,48.0852696],[0.8809149,48.0851904],[0.880876,48.0851082],[0.8807837,48.0850312],[0.8805608,48.0849696],[0.87945,48.0845884],[0.8794268,48.0845672],[0.879631,48.0841288],[0.8799079,48.083644],[0.880107,48.0831259],[0.880317,48.0828603],[0.8804452,48.0827907],[0.8808702,48.0826873],[0.8809531,48.0826021],[0.8810799,48.0825662],[0.881479,48.0823449],[0.8818056,48.0820532],[0.8818558,48.081984],[0.8819116,48.0817215],[0.8818362,48.0817037],[0.8819608,48.0813496],[0.8819926,48.0813395],[0.8823055,48.0813597],[0.8823133,48.0812496],[0.8824595,48.0807075],[0.8824582,48.0806392],[0.8823237,48.080407],[0.8821394,48.08017],[0.881799,48.0798468],[0.8816261,48.0796064],[0.8816053,48.0795361],[0.8816229,48.0793828],[0.8815777,48.0788658],[0.8819271,48.0726864],[0.8819837,48.0726738],[0.8827333,48.0724684],[0.8834216,48.0723433],[0.8838909,48.0722909],[0.8861918,48.0721536],[0.8870951,48.07213],[0.8885457,48.0721751],[0.8890476,48.0722146],[0.8897387,48.0722913],[0.8924092,48.0726618],[0.8929554,48.0726915],[0.8938723,48.0726924],[0.8952958,48.0725969],[0.895799,48.0725847],[0.8971856,48.0726643],[0.898136,48.0727578],[0.8990872,48.0728791],[0.899557,48.0729567],[0.9001325,48.0730757],[0.9029573,48.0737891],[0.9038139,48.0740308],[0.9070093,48.0750725],[0.9081086,48.0753728],[0.9099514,48.0758167],[0.910322,48.0759223],[0.9117924,48.0765358],[0.9126649,48.0768328],[0.9132159,48.0769866],[0.9137789,48.0771191],[0.9143513,48.0772292],[0.9149079,48.0773127],[0.9168405,48.0772668],[0.9194871,48.0771411],[0.9199632,48.0770987],[0.9207016,48.0771334],[0.9227369,48.0770818],[0.9227977,48.0771217],[0.9237378,48.0774408],[0.9242799,48.077677],[0.9245315,48.0777642],[0.9259242,48.078126],[0.9259835,48.0781653],[0.9257486,48.0782691],[0.9250651,48.0786527],[0.9250171,48.0787207],[0.9250181,48.078768],[0.9250785,48.0788498],[0.9251769,48.0789144],[0.9253266,48.078969],[0.9253932,48.0790145],[0.9254937,48.0791402],[0.925567,48.0791978],[0.9257456,48.0791989],[0.9258597,48.0791272],[0.9259201,48.0790553],[0.9259676,48.0788869],[0.9260389,48.07885],[0.9262833,48.078847],[0.9263631,48.0788613],[0.9272144,48.0791422],[0.928243,48.0794004],[0.9287124,48.0795397],[0.9288762,48.0796129],[0.9291839,48.07988],[0.9292587,48.080876],[0.9290689,48.081457],[0.9286252,48.0830512],[0.9286038,48.0830777],[0.92844,48.0830836],[0.9281558,48.0834763],[0.9277933,48.083806],[0.9278103,48.0838341],[0.927992,48.0839684],[0.9281209,48.0840093],[0.9281962,48.0840779],[0.9286212,48.0846534],[0.9288801,48.0845032],[0.9290658,48.0843593],[0.9291169,48.0843561],[0.9291326,48.0843802],[0.9290639,48.0845228],[0.9290714,48.084559],[0.9291115,48.084589],[0.9293888,48.0846715],[0.9297144,48.0849806],[0.9299357,48.0850234],[0.9300327,48.0850594],[0.9300734,48.0851101],[0.9301458,48.0853341],[0.9300687,48.0855351],[0.9301256,48.085745],[0.9300608,48.0859372],[0.9301152,48.0860054],[0.9301412,48.0861604],[0.9301083,48.0863554],[0.9301356,48.0864634],[0.9305376,48.0870193],[0.9306298,48.0870276],[0.9311066,48.0868727],[0.9311625,48.0868716],[0.9313259,48.0869521],[0.9318028,48.0880182],[0.9318266,48.0881523],[0.9319901,48.0884926],[0.9320287,48.0885328],[0.9327689,48.0884882],[0.933996,48.0883107],[0.93406,48.0888072],[0.9358186,48.0885738],[0.9379588,48.088448],[0.9404128,48.0884739],[0.9404298,48.0884615],[0.9404271,48.087987],[0.9409544,48.0880107],[0.9436225,48.0909035],[0.944013,48.0912746],[0.9445871,48.0917695],[0.9446888,48.0918275],[0.9448178,48.0918585],[0.9450145,48.0919347],[0.9452888,48.0920105],[0.9456031,48.0921922],[0.9458274,48.0925722],[0.9461027,48.0929715],[0.9467571,48.0937549],[0.9466013,48.0939368],[0.9457586,48.0946713],[0.9449309,48.0951669],[0.9442778,48.0957071],[0.9431922,48.0967234],[0.9430035,48.0968566],[0.9427329,48.0970077],[0.9428798,48.0970882],[0.9436925,48.0974573],[0.9442749,48.097623],[0.9449722,48.0979461],[0.9449351,48.0979618],[0.9446782,48.0981461],[0.9446701,48.0981728],[0.9447244,48.098264],[0.9447744,48.098652],[0.9448442,48.0989081],[0.9448821,48.0989785],[0.9450293,48.099123],[0.9452603,48.0993019],[0.9454104,48.0994448],[0.9454708,48.0996084],[0.9455327,48.099971],[0.9457118,48.1002634],[0.9457302,48.1003421],[0.9457144,48.1005718],[0.945601,48.1009101],[0.9455879,48.1010401],[0.9454882,48.1012024],[0.9454903,48.1017689],[0.9457282,48.1019203],[0.9459384,48.10198],[0.9459877,48.1020078],[0.9461158,48.1021672],[0.9461662,48.1022786],[0.9461757,48.102371],[0.9464741,48.1027264],[0.9465394,48.1029182],[0.9467153,48.103137],[0.9467236,48.1032315],[0.9466626,48.1033925],[0.9466604,48.1034558],[0.9468874,48.1037169],[0.9469397,48.1038022],[0.9469778,48.1041225],[0.9469578,48.104168],[0.9469827,48.1042705],[0.9469437,48.1044249],[0.9469577,48.1045391],[0.9469388,48.1046384],[0.9469778,48.1047147],[0.9470086,48.1047847],[0.9473819,48.1050776],[0.9474707,48.1051754],[0.9475253,48.1052809],[0.9475305,48.1054269],[0.9475663,48.1055737],[0.9475898,48.1056804],[0.9475196,48.1057741],[0.9473788,48.1058605],[0.9472747,48.106018],[0.9471141,48.1062086],[0.9471387,48.1063949],[0.94701,48.1067945],[0.9470153,48.1069376],[0.9469803,48.1069933],[0.946806,48.1070801],[0.9466969,48.1071462],[0.9466209,48.1072286],[0.9464116,48.1072491],[0.9462462,48.1072746],[0.9460057,48.107246],[0.9457974,48.1073321],[0.9456807,48.1074036],[0.9453011,48.1076196],[0.9449878,48.1078416],[0.9448832,48.1079474],[0.9448628,48.1082604],[0.944825,48.1086698],[0.9448109,48.1089699],[0.9447343,48.1098883],[0.9447134,48.1101126],[0.9445625,48.1101423],[0.9444986,48.1102203],[0.9444422,48.1103438],[0.9445083,48.1106833],[0.9446679,48.1108109],[0.9446864,48.1108429],[0.9445882,48.1109482],[0.9445629,48.1110658],[0.9445243,48.1111079],[0.9444333,48.1111562],[0.9443598,48.1111432],[0.9442693,48.1112372],[0.9442649,48.1113383],[0.9442985,48.1114212],[0.9444328,48.1115197],[0.9444457,48.1115469],[0.9444051,48.1116867],[0.9444106,48.1118206],[0.9442092,48.1119209],[0.9441154,48.1120517],[0.9439546,48.112119],[0.9438787,48.1122846],[0.9438909,48.1123155],[0.9440504,48.1123422],[0.9440828,48.11236],[0.9439973,48.1124991]]]]}'  # noqa
        geometry = geojson.loads(geometry,
                                 object_hook=geojson.GeoJSON.to_instance)
        geometry = shapely.geometry.asShape(geometry)
        geometry = shape.from_shape(geometry, 4326)

        area = Area(
            geometry
        )
        DBSession.add(area)

        project = Project(
            'Map all primary roads'
        )
        project.area = area
        project.short_description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."  # noqa
        project.description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."  # noqa
        DBSession.add(project)

        with project.force_locale('fr'):
            project.name = "Cartographier les routes"

        project.auto_fill(14)

        license = License()
        license.name = 'NextView'
        license.description = "This data is licensed for use by the US Government (USG) under the NextView (NV) license and copyrighted by Digital Globe or GeoEye. The NV license allows the USG to share the imagery and Literal Imagery Derived Products (LIDP) with entities outside the USG when that entity is working directly with the USG, for the USG, or in a manner that is directly beneficial to the USG. The party receiving the data can only use the imagery or LIDP for the original purpose or only as otherwise agreed to by the USG. The party receiving the data cannot share the imagery or LIDP with a third party without express permission from the USG. At no time should this imagery or LIDP be used for other than USG-related purposes and must not be used for commercial gain. The copyright information should be maintained at all times. Your acceptance of these license terms is implied by your use."  # noqa
        license.plain_text = "In other words, you may only use NextView imagery linked from this site for digitizing OpenStreetMap data for humanitarian purposes."  # noqa
        DBSession.add(license)

        license = License()
        license.name = 'Astrium/UNOSAT'
        license.description = "UNOSAT allow any INTERNET USER to use the IMAGE to develop DERIVATIVE WORKS provided that the INTERNET USER includes the DERIVATIVE WORKS he/she created in the OpenStreetMap database under CC-BY-SA licence (http://creativecommons.org/licenses/by-sa/2.0/) and/or Open Database licence (ODbL: http://www.opendatacommons.org/licenses/odbl/), with the credit of the corresponding PRODUCT conspicuously displayed and written in full, in order to allow any OpenStreetMap database user to have access to and to use the DERIVATIVE WORKS. Except for the foregoing, the END USER and/or the INTERNET USER shall not be entitled to sell, distribute, assign, dispose of, lease, sublicense or transfer, directly or indirectly, any DERIVATIVE WORKS to any third party."  # noqa
        license.plain_text = "Astrium GEO-Information Services and UNOSAT are allowing access to this imagery for creating information in OpenStreetMap. Other uses are not allowed."  # noqa
        DBSession.add(license)
示例#13
0
    def test_uses_get_on_class_col_fk(self):

        # test [ticket:1492]

        class Master(Base):

            __tablename__ = "master"
            id = Column(Integer, primary_key=True, test_needs_autoincrement=True)

        class Detail(Base):

            __tablename__ = "detail"
            id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
            master_id = Column(None, ForeignKey(Master.id))
            master = relationship(Master)

        Base.metadata.create_all()
        configure_mappers()
        assert class_mapper(Detail).get_property("master").strategy.use_get
        m1 = Master()
        d1 = Detail(master=m1)
        sess = create_session()
        sess.add(d1)
        sess.flush()
        sess.expunge_all()
        d1 = sess.query(Detail).first()
        m1 = sess.query(Master).first()

        def go():
            assert d1.master

        self.assert_sql_count(testing.db, go, 0)
示例#14
0
    def test_string_dependency_resolution_tables(self):
        class User(Base, fixtures.ComparableEntity):

            __tablename__ = "users"
            id = Column(Integer, primary_key=True)
            name = Column(String(50))
            props = relationship(
                "Prop",
                secondary="user_to_prop",
                primaryjoin="User.id==user_to_prop.c.u" "ser_id",
                secondaryjoin="user_to_prop.c.prop_id=" "=Prop.id",
                backref="users",
            )

        class Prop(Base, fixtures.ComparableEntity):

            __tablename__ = "props"
            id = Column(Integer, primary_key=True)
            name = Column(String(50))

        user_to_prop = Table(
            "user_to_prop",
            Base.metadata,
            Column("user_id", Integer, ForeignKey("users.id")),
            Column("prop_id", Integer, ForeignKey("props.id")),
        )
        configure_mappers()
        assert class_mapper(User).get_property("props").secondary is user_to_prop
示例#15
0
        def define_tables(cls, metadata):
            global User, Address
            Base = decl.declarative_base(metadata=metadata)

            class User(Base, fixtures.ComparableEntity):

                __tablename__ = "users"
                id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
                name = Column(String(50))

            class Address(Base, fixtures.ComparableEntity):

                __tablename__ = "addresses"
                id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
                email = Column(String(50))
                user_id = Column(Integer, ForeignKey("users.id"))
                if inline:
                    if stringbased:
                        user = relationship("User", primaryjoin="User.id==Address.user_id", backref="addresses")
                    else:
                        user = relationship(User, primaryjoin=User.id == user_id, backref="addresses")

            if not inline:
                configure_mappers()
                if stringbased:
                    Address.user = relationship("User", primaryjoin="User.id==Address.user_id", backref="addresses")
                else:
                    Address.user = relationship(User, primaryjoin=User.id == Address.user_id, backref="addresses")
示例#16
0
    def _automap(self, e):
        Base = automap_base()

        Base.prepare(e, reflect=True)

        time.sleep(.01)
        configure_mappers()
示例#17
0
文件: base.py 项目: Anonymike/quark
 def setUp(self):
     super(MySqlBaseFunctionalTest, self).setUp()
     self.context = context.Context('fake', 'fake', is_admin=False)
     configure_mappers()
     engine = neutron_db_api.get_engine()
     models.BASEV2.metadata.create_all(engine)
     quota_driver.Quota.metadata.create_all(engine)
示例#18
0
 def _configure_driver_mappers(self):
     try:
         configure_mappers()
     except Exception as e:
         logging.error((
             '{}: Unable to configure mappers. '
             'Have you imported your models?'
         ).format(str(e)))
示例#19
0
    def test_use_get_reverseorder(self):
        mapper(self.classes.A, self.tables.a)
        m_b = mapper(self.classes.B, self.tables.b_differentorder, properties={
            'a': relationship(self.classes.A)
        })

        configure_mappers()
        is_true(m_b.relationships.a.strategy.use_get)
示例#20
0
文件: db.py 项目: xieyanfu/nbclassify
def get_photos(session, metadata):
    """Return photo records from the database."""
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()
    Photo = Base.classes.photos
    photos = session.query(Photo)
    return photos
示例#21
0
文件: base.py 项目: openstack/quark
 def setUp(self):
     super(BaseFunctionalTest, self).setUp()
     self.context = context.Context('fake', 'fake', is_admin=False)
     cfg.CONF.set_override('connection', 'sqlite://', 'database')
     configure_mappers()
     self.engine = api.context_manager.get_legacy_facade().get_engine()
     models.BASEV2.metadata.create_all(self.engine)
     quota_driver.Quota.metadata.create_all(self.engine)
示例#22
0
 def setup_mappers(cls):
     global Session
     Session = scoped_session(sessionmaker())
     mapper(User, users, properties={'addresses'
            : relationship(Address, backref='user',
            order_by=addresses.c.id)})
     mapper(Address, addresses)
     configure_mappers()
示例#23
0
 def setup_model( self ):
     """This function will be called at application startup, it is used to 
     setup the model"""
     from camelot.core.sql import metadata
     from sqlalchemy.orm import configure_mappers
     metadata.bind = self.ENGINE()
     import camelot.model.i18n
     import subs.model
     configure_mappers()
     metadata.create_all()
示例#24
0
    def test_use_get_sameorder(self):
        mapper(self.classes.A, self.tables.a)
        m_b = mapper(
            self.classes.B,
            self.tables.b_sameorder,
            properties={"a": relationship(self.classes.A)},
        )

        configure_mappers()
        is_true(m_b.relationships.a.strategy.use_get)
示例#25
0
    def __init__(self, marker, column, slug=None, parent=None, relchain=None):
        self.marker = marker
        self.column = column
        self.slug_column = slug

        assert bool(parent) == bool(relchain)
        self.parent = parent
        self.relchain = relchain

        configure_mappers()
def make_histogram(args, what_roi, session, metadata):
    """
    Make a histogram of every image and write the values
    to a file.
    
    Expects the result of an argument parser, a string of what kind
    of roi needs to be used and a connection
    to an existing metadata database via an SQLAlchemy Session
    instance 'session' and an SQLAlchemy MetaData instance
    'metadata' which describes the database tables.

    A connection to the database table 'Photos' is made.
    Every file that is an image is opened and the title of that
    image is taken from the database. A mask is created to isolate
    a part of the image (Region Of Interest (ROI)) and a histogram is
    made of that ROI. The values in the histogram-list are
    normalized and relevant data is written to the outputfile.
    """
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()
    Photo = Base.classes.photos

    # Open outputfile.
    outputfile = open(args.outputfile, 'a')

    # Walk through files.
    for root, dirs, files in os.walk(args.imdir):
        for filename in files:
            sys.stderr.write("File %s is being processed...\n" % filename)

            # Make path to file.
            path = os.path.join(root, filename)

            # Open file and check datatype.
            img = cv2.imread(path, 1)
            if not isinstance(img, np.ndarray):
                sys.stderr.write("File is no image: will be skipped.\n")
                continue

            photo_id = filename.split(".")[0]

            # Get title of image from database.
            # Set default in case there is no database entry for it.
            title = photo_id
            for pic in session.query(Photo).filter(Photo.id == photo_id):
                title = photo_id if pic.title is None else pic.title

            img, contour = create_mask(img, args, what_roi)
            hist = ft.color_bgr_means(img, contour, bins=args.bins)
            means_norm = hist_means(hist)
            write_to_output_file(photo_id, title, means_norm, args, outputfile)

    # Close outputfile.
    outputfile.close()
        def __init__(self):
            """
            Sets up the connection to the database and initialises a connection check.
            """

            print "Histo DB connection initialised"
            configure_mappers()
            engine = create_engine(os.environ["HISTODB_PATH"])
            Base.metadata.bind = engine
            DBSession = sessionmaker(bind=engine)                
            self.session = DBSession()
示例#28
0
文件: base.py 项目: Cerberus98/quark
 def setUp(self):
     super(BaseFunctionalTest, self).setUp()
     self.context = context.Context('fake', 'fake', is_admin=False)
     cfg.CONF.set_override('connection', 'sqlite://', 'database')
     configure_mappers()
     # Must set the neutron's facade to none before each test
     # otherwise the data will be shared between tests
     neutron_db_api._FACADE = None
     self.engine = neutron_db_api.get_engine()
     models.BASEV2.metadata.create_all(self.engine)
     quota_driver.Quota.metadata.create_all(self.engine)
示例#29
0
    def test_clause_expansion(self):
        self._fixture(False)
        Edge = self.classes.Edge
        from sqlalchemy.orm import configure_mappers
        configure_mappers()

        self.assert_compile(
            select([Edge]).order_by(Edge.start),
            "SELECT edge.id, edge.x1, edge.y1, edge.x2, edge.y2 FROM edge "
            "ORDER BY edge.x1, edge.y1"
        )
示例#30
0
    def test_dont_use_get_pj_is_different(self):
        mapper(self.classes.A, self.tables.a)
        m_b = mapper(self.classes.B, self.tables.b_sameorder, properties={
            'a': relationship(self.classes.A, primaryjoin=and_(
                self.tables.a.c.id1 == self.tables.b_sameorder.c.a_id1,
                self.tables.a.c.id2 == 12
            ))
        })

        configure_mappers()
        is_false(m_b.relationships.a.strategy.use_get)
示例#31
0
文件: test_m2m.py 项目: hinohi/elixir
    def test_simple(self):
        class A(Entity):
            using_options(shortnames=True)
            name = Field(String(60))
            as_ = ManyToMany('A')
            bs_ = ManyToMany('B')

        class B(Entity):
            using_options(shortnames=True)
            name = Field(String(60))
            as_ = ManyToMany('A')

        setup_all(True)
        configure_mappers()

        # check m2m table was generated correctly
        m2m_table = A.bs_.property.secondary
        assert m2m_table.name in metadata.tables

        # check column names
        m2m_cols = m2m_table.columns
        assert 'a_id' in m2m_cols
        assert 'b_id' in m2m_cols

        # check selfref m2m table column names were generated correctly
        m2m_cols = A.as_.property.secondary.columns
        assert 'as__id' in m2m_cols
        assert 'inverse_id' in m2m_cols

        # check the relationships work as expected
        b1 = B(name='b1', as_=[A(name='a1')])

        session.commit()
        session.close()

        a = A.query.one()
        b = B.query.one()

        assert a in b.as_
        assert b in a.bs_
示例#32
0
def connect_to_database(filename=database_filename):
    """Handles the connection to a pre-existing apartment database

    Returns:
        SQLAlchemy session: Connection to database
    """
    from sqlalchemy import create_engine
    from sqlalchemy.orm import sessionmaker

    # Setup database engine
    engine = create_engine('sqlite:///' + filename)
    configure_mappers()
    Base.metadata.bind = engine

    # Create a session instance, for sql query execution
    DBSession = sessionmaker(bind=engine)
    session = DBSession()

    logging.info("Successfully connected to database")

    # Return database session
    return session
    def setup_mappers(cls):
        mapper(
            cls.classes.A,
            cls.tables.a,
            properties={"b": relationship(cls.classes.B)},
        )
        bm = mapper(
            cls.classes.B,
            cls.tables.b,
            properties={
                "parent":
                relationship(cls.classes.B, remote_side=cls.tables.b.c.b_id),
                "zc":
                relationship(cls.classes.C),
            },
        )
        mapper(cls.classes.C, cls.tables.c)

        bmp = bm._props
        configure_mappers()
        # Bug is order-dependent, must sort the "zc" property to the end
        bmp.sort()
示例#34
0
 def initialize(self):
     self.init_services()
     db.base.metadata.create_all(bind=db.engine)
     configure_mappers()
     db.configure_application_events(self)
     self.init_forms()
     self.reset_run_status()
     if not db.fetch("user", allow_none=True, name="admin"):
         self.configure_server_id()
         self.create_admin_user()
         db.session.commit()
         if self.settings["app"]["create_examples"]:
             self.migration_import(
                 name="examples", import_export_types=db.import_classes
             )
             self.update_credentials()
         else:
             self.migration_import(
                 name="default", import_export_types=db.import_classes
             )
         self.get_git_content()
         db.session.commit()
示例#35
0
    def setup_mappers(cls):
        A, B, C, D, E, F, G = cls.classes("A", "B", "C", "D", "E", "F", "G")
        a, b, c, d, e, f, g = cls.tables("a", "b", "c", "d", "e", "f", "g")

        mapper(A, a, properties={"bs": relationship(B), "gs": relationship(G)})
        mapper(
            B,
            b,
            properties={
                "cs": relationship(C),
                "ds": relationship(D),
                "es": relationship(E),
                "fs": relationship(F),
            },
        )
        mapper(C, c)
        mapper(D, d)
        mapper(E, e)
        mapper(F, f)
        mapper(G, g)

        configure_mappers()
示例#36
0
文件: base.py 项目: blaisep/eNMS
 def configure_database(self):
     self.init_services()
     Base.metadata.create_all(bind=engine)
     configure_mappers()
     configure_events(self)
     self.init_forms()
     self.clean_database()
     if not fetch("user", allow_none=True, name="admin"):
         self.configure_server_id()
         self.create_admin_user()
         Session.commit()
         if self.settings["app"]["create_examples"]:
             self.migration_import(
                 name="examples", import_export_types=import_classes
             )
             self.update_credentials()
         else:
             self.migration_import(
                 name="default", import_export_types=import_classes
             )
         self.get_git_content()
         Session.commit()
示例#37
0
    def test_overlapping_backref_relationship(self):
        A, B, b_table, a_table, Dest, dest_table = (
            self.classes.A,
            self.classes.B,
            self.tables.b_table,
            self.tables.a_table,
            self.classes.Dest,
            self.tables.dest_table,
        )

        # test issue #3630, no error or warning is generated
        mapper(A, a_table)
        mapper(B, b_table, inherits=A, concrete=True)
        mapper(
            Dest,
            dest_table,
            properties={
                "a": relationship(A, backref="dest"),
                "a1": relationship(B, backref="dest"),
            },
        )
        configure_mappers()
示例#38
0
def main(_, **settings):
    """
    This function returns a Pyramid WSGI application.
    """
    configuration.init(settings.get("app.cfg"))
    settings.update(configuration.get_config())

    config = Configurator(settings=settings)

    c2cwsgiutils.pretty_json.init(config)
    config.include("c2cgeoportal_admin")

    # Initialize the dev dbsession
    settings = config.get_settings()
    settings["tm.manager_hook"] = "pyramid_tm.explicit_manager"

    configure_mappers()
    engine = engine_from_config(settings)
    session_factory = sessionmaker()
    session_factory.configure(bind=engine)

    def get_tm_session(session_factory, transaction_manager):
        dbsession = session_factory()
        zope.sqlalchemy.register(dbsession,
                                 transaction_manager=transaction_manager)
        return dbsession

    # Make request.dbsession available for use in Pyramid
    config.add_request_method(
        # request.tm is the transaction manager used by pyramid_tm
        lambda request: get_tm_session(session_factory, request.tm),
        "dbsession",
        reify=True,
    )

    config.add_subscriber(add_renderer_globals, BeforeRender)
    config.add_subscriber(add_localizer, NewRequest)

    return config.make_wsgi_app()
示例#39
0
 def initialize_database(self):
     self.init_plugins()
     self.init_services()
     db.private_properties_set |= set(
         sum(db.private_properties.values(), []))
     db.base.metadata.create_all(bind=db.engine)
     configure_mappers()
     db.configure_model_events(self)
     if self.cli_command:
         return
     self.init_forms()
     if not db.get_user("admin"):
         self.create_admin_user()
         self.migration_import(
             name=self.settings["app"].get("startup_migration", "default"),
             import_export_types=db.import_export_models,
         )
         self.update_credentials()
         self.get_git_content()
     self.configure_server_id()
     self.reset_run_status()
     db.session.commit()
示例#40
0
def dbinit():
    '''
    Initialise the database connections from the config
    '''
    global ENGINE, SESSION, SESSION_FACTORY, BASE

    #config.Config.CONF

    db_uri = config.Config.CONF.get('database', 'URI')
    db_opts = dict()
    for option in config.Config.CONF.options('database'):
        if option.startswith('option.'):
            oopt = option.split('.', 1)[1]
            db_opts[oopt] = config.Config.CONF.get('database', option)

    session_autocommit = config.Config.CONF.getboolean('session', 'autocommit')

    ENGINE = create_engine(db_uri, **db_opts)
    SESSION_FACTORY = sessionmaker(bind=ENGINE, autocommit=session_autocommit)

    BASE = declarative_base()
    configure_mappers()
示例#41
0
def save_bow(args, bow, nclusters, imglist):
    """
    The function takes the result of an argument parser,
    a dictionary with image 'words' (Bag-Of-Words = bow), 
    a number of clusters and a list with image filenames.
    A header is created and saved to a .tsv file.
    A connection to the database is made. For every
    image in the imagelist, a photo_id is taken out
    of the filename. A title for the image is taken
    from the database. The photo_id, title and
    all values in the bow of this image are written
    to the .tsv file.
    """
    print("Saving bag-of-words...")
    if not args.bow.endswith(".tsv"):
        args.bow += ".tsv"
    outputfile = open(args.bow, "w")
    header = make_header(nclusters)
    outputfile.write(header)
    # Connect to database.
    with db.session_scope(args.meta_file) as (session, metadata):
        Base = automap_base(metadata=metadata)
        Base.prepare()
        configure_mappers()
        Photo = Base.classes.photos
        for filename in imglist:
            photo_id = filename.split(".")[0]
            title = photo_id
            for pic in session.query(Photo).filter(Photo.id == photo_id):
                title = photo_id if pic.title is None else pic.title
            valuelist = [photo_id, title]
            words = bow[filename.rstrip()]
            for item in words:
                valuelist.append(str(item))
            row = "\t".join(valuelist)
            row += "\n"
            outputfile.write(row)
    outputfile.close()
示例#42
0
def get_filtered_photos_with_taxon(session, metadata, filter_):
    """Return photos with corresponding class for a filter.

    Returns all photos with corresponding taxon, as filterd by `filter_`. The
    taxon returned per photo is defined by the `class` attribute of the filter.
    Taxa to filter photos by is set in the `where` attribute of the filter.
    Filters are those as returned by
    :meth:`~nbclassify.functions.classification_hierarchy_filters`. Returned
    rows are 2-tuples ``(photo, taxon_name)``.
    """
    if not isinstance(filter_, dict):
        ValueError("Expected a dict as filter")

    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()

    # Get the table classes.
    Photo = Base.classes.photos
    Taxon = Base.classes.taxa
    Rank = Base.classes.ranks

    # Use a subquery because we want photos to be returned even if they don't
    # have taxa for the given class.
    class_ = filter_.get('class')
    stmt_genus, stmt_section, stmt_species = get_subqueries(
        session, Photo, Taxon, Rank)

    # Construct the main query.
    q = session.query(Photo, class_).\
        join(stmt_genus, stmt_genus.c.id == Photo.id).\
        outerjoin(stmt_section, stmt_section.c.id == Photo.id).\
        join(stmt_species, stmt_species.c.id == Photo.id)

    # Filter on each taxon in the where attribute of the filter.
    q = filter_query(filter_, q, stmt_genus, stmt_section, stmt_species)

    return q
示例#43
0
def get_photos_with_taxa(session, metadata):
    """Return photos with genus, section, and species class.

    This generator returns 4-tuples ``(photo, genus, section, species)``.
    """
    Base = automap_base(metadata=metadata)
    Base.prepare()
    configure_mappers()

    Photo = Base.classes.photos
    Taxon = Base.classes.taxa
    Rank = Base.classes.ranks

    # Make subqueries.
    stmt_genus, stmt_section, stmt_species = get_subqueries(
        session, Photo, Taxon, Rank)

    q = session.query(Photo, 'genus', 'section', 'species').\
        join(stmt_genus, stmt_genus.c.id == Photo.id).\
        outerjoin(stmt_section, stmt_section.c.id == Photo.id).\
        join(stmt_species, stmt_species.c.id == Photo.id)

    return q
示例#44
0
    def test_mapper_args_declared_attr(self):

        class ComputedMapperArgs:

            @declared_attr
            def __mapper_args__(cls):
                if cls.__name__ == 'Person':
                    return {'polymorphic_on': cls.discriminator}
                else:
                    return {'polymorphic_identity': cls.__name__}

        class Person(Base, ComputedMapperArgs):
            __tablename__ = 'people'
            id = Column(Integer, primary_key=True)
            discriminator = Column('type', String(50))

        class Engineer(Person):
            pass

        configure_mappers()
        assert class_mapper(Person).polymorphic_on \
            is Person.__table__.c.type
        eq_(class_mapper(Engineer).polymorphic_identity, 'Engineer')
示例#45
0
    def test_concrete_extension_warn_concrete_disc_resolves_overlap(self):
        class Employee(ConcreteBase, Base, fixtures.ComparableEntity):
            _concrete_discriminator_name = "_type"

            __tablename__ = "employee"

            employee_id = Column(Integer,
                                 primary_key=True,
                                 test_needs_autoincrement=True)
            name = Column(String(50))
            __mapper_args__ = {
                "polymorphic_identity": "employee",
                "concrete": True,
            }

        class Manager(Employee):
            __tablename__ = "manager"
            employee_id = Column(Integer,
                                 primary_key=True,
                                 test_needs_autoincrement=True)
            type = Column(String(50))
            __mapper_args__ = {
                "polymorphic_identity": "manager",
                "concrete": True,
            }

        configure_mappers()
        self.assert_compile(
            select(Employee),
            "SELECT pjoin.employee_id, pjoin.name, pjoin._type, pjoin.type "
            "FROM (SELECT employee.employee_id AS employee_id, "
            "employee.name AS name, CAST(NULL AS VARCHAR(50)) AS type, "
            "'employee' AS _type FROM employee UNION ALL "
            "SELECT manager.employee_id AS employee_id, "
            "CAST(NULL AS VARCHAR(50)) AS name, manager.type AS type, "
            "'manager' AS _type FROM manager) AS pjoin",
        )
示例#46
0
 def __init__(self, p_tables, autoflush=True):
     # Database tables schema
     self.tables = p_tables
     # Make basic connection and setup declarative
     self.engine = create_engine(URL(**DATABASE))
     orm.configure_mappers()  # Important for full text search index
     try:
         session_mark = scoped_session(
             sessionmaker(autoflush=autoflush, bind=self.engine))
         session = session_mark()
         if session:
             print("Database connection OK")
             logging.debug(inspect.stack()[0][3],
                           "Database session opened successfully")
             self.session = session
             # Registering the index service
             # self._index_tables()
         else:
             print("Failed to open a database session")
             logging.error(inspect.stack()[0][3],
                           "Failed to open database session")
             raise Exception("Failed to open database session")
     except OperationalError:
         print("Database arguments are invalid")
示例#47
0
文件: test_m2m.py 项目: hinohi/elixir
    def test_custom_global_column_nameformat(self):
        # this needs to be done before declaring the classes
        elixir.options.M2MCOL_NAMEFORMAT = elixir.options.OLD_M2MCOL_NAMEFORMAT

        class A(Entity):
            bs_ = ManyToMany('B')

        class B(Entity):
            as_ = ManyToMany('A')

        setup_all(True)

        # revert to original format
        elixir.options.M2MCOL_NAMEFORMAT = elixir.options.NEW_M2MCOL_NAMEFORMAT

        # check m2m table was generated correctly
        configure_mappers()
        m2m_table = A.bs_.property.secondary
        assert m2m_table.name in metadata.tables

        # check column names
        m2m_cols = m2m_table.columns
        assert '%s_id' % A.table.name in m2m_cols
        assert '%s_id' % B.table.name in m2m_cols
示例#48
0
    def setup_mappers(cls):
        A, B, C, D, E, F, G = cls.classes("A", "B", "C", "D", "E", "F", "G")
        a, b, c, d, e, f, g = cls.tables("a", "b", "c", "d", "e", "f", "g")

        cls.mapper_registry.map_imperatively(
            A, a, properties={"bs": relationship(B), "gs": relationship(G)}
        )
        cls.mapper_registry.map_imperatively(
            B,
            b,
            properties={
                "cs": relationship(C),
                "ds": relationship(D),
                "es": relationship(E),
                "fs": relationship(F),
            },
        )
        cls.mapper_registry.map_imperatively(C, c)
        cls.mapper_registry.map_imperatively(D, d)
        cls.mapper_registry.map_imperatively(E, e)
        cls.mapper_registry.map_imperatively(F, f)
        cls.mapper_registry.map_imperatively(G, g)

        configure_mappers()
示例#49
0
    def test_overlapping_forwards_relationship(self):
        A, B, b_table, a_table, Dest, dest_table = (
            self.classes.A,
            self.classes.B,
            self.tables.b_table,
            self.tables.a_table,
            self.classes.Dest,
            self.tables.dest_table,
        )

        # this is the opposite mapping as that of #3630, never generated
        # an error / warning
        mapper(A,
               a_table,
               properties={"dest": relationship(Dest, backref="a")})
        mapper(
            B,
            b_table,
            inherits=A,
            concrete=True,
            properties={"dest": relationship(Dest, backref="a1")},
        )
        mapper(Dest, dest_table)
        configure_mappers()
示例#50
0
def configure_db(app):
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True

    if not app.config['TESTING']:
        if config.SQLALCHEMY_DATABASE_URI is None:
            raise Exception(
                "No proper SQLAlchemy store has been configured. Please edit your indico.conf"
            )

        app.config['SQLALCHEMY_DATABASE_URI'] = config.SQLALCHEMY_DATABASE_URI
        app.config['SQLALCHEMY_RECORD_QUERIES'] = False
        app.config['SQLALCHEMY_POOL_SIZE'] = config.SQLALCHEMY_POOL_SIZE
        app.config['SQLALCHEMY_POOL_TIMEOUT'] = config.SQLALCHEMY_POOL_TIMEOUT
        app.config['SQLALCHEMY_POOL_RECYCLE'] = config.SQLALCHEMY_POOL_RECYCLE
        app.config['SQLALCHEMY_MAX_OVERFLOW'] = config.SQLALCHEMY_MAX_OVERFLOW

    import_all_models()
    db.init_app(app)
    if not app.config['TESTING']:
        apply_db_loggers(app)

    plugins_loaded.connect(lambda sender: configure_mappers(), app, weak=False)
    models_committed.connect(on_models_committed, app)
示例#51
0
def setup_schema():
    # noinspection PyProtectedMember
    for class_ in Model._decl_class_registry.values():
        if not hasattr(class_, '__tablename__'):
            continue  # Skip abstract classes that don't have an underlying table.

        if hasattr(class_, 'SchemaClass'):
            continue

        # if class_.__name__.endswith('Schema'):
        #     raise ModelConversionError(
        #         "For safety, setup_schema can not be used when a Model class ends with 'Schema'"
        #     )

        # Determine schema metaclass
        meta_base = getattr(class_, 'Meta', object)
        if meta_base is not object and hasattr(meta_base, 'model'):
            Meta = meta_base
        else:
            class Meta(meta_base):
                model = class_

        schema_class = type(
            "{}Schema".format(class_.__name__),  # Name of new class
            (latci.schema.Schema,),  # Subclasses
            {'Meta': Meta}  # Members
        )
        setattr(class_, 'SchemaClass', schema_class)

configure_mappers()
示例#52
0
                        default=datetime.datetime.now,
                        onupdate=datetime.datetime.now)
    created_at = Column(DateTime, default=datetime.datetime.now)
    name = Column(String)
    data = Column(
        Json
    )  # A JSON -- should be everything returned by `get_model_data`, I think
    to_predict = Column(Array)
    company_id = Column(Integer)
    version = Column(
        Integer, default=entitiy_version
    )  # mindsdb_native version, can be used in the future for BC
    datasource_id = Column(Integer, ForeignKey('datasource.id'))
    is_custom = Column(Boolean)


class Log(Base):
    __tablename__ = 'log'

    id = Column(Integer, primary_key=True)
    created_at = Column(DateTime, default=datetime.datetime.now)
    log_type = Column(String)  # log, info, warning, traceback etc
    source = Column(String)  # file + line
    company_id = Column(Integer)
    payload = Column(String)
    created_at_index = Index("some_index", "created_at_index")


Base.metadata.create_all(engine)
orm.configure_mappers()
示例#53
0
    def __init__(self, server_iface):
        super().__init__(server_iface)

        self.server_iface = server_iface
        self.initialized = False

        try:
            config.init(
                os.environ.get("GEOMAPFISH_CONFIG",
                               "/etc/qgisserver/geomapfish.yaml"))

            c2cwsgiutils.broadcast.init()

            configure_mappers()
            db_match = re.match(".*(@[^@]+)$",
                                config.get("sqlalchemy_slave.url"))
            QgsMessageLog.logMessage(
                "Connect to the database: ***{}".format(
                    db_match.group(1) if db_match else ""),
                "GeoMapFishAccessControl",
                level=Qgis.Info,
            )
            engine = sqlalchemy.create_engine(
                config["sqlalchemy_slave.url"],
                **(config.get_config().get("sqlalchemy", {})))
            session_factory = sessionmaker()
            session_factory.configure(bind=engine)
            DBSession = scoped_session(session_factory)  # noqa: N806

            if "GEOMAPFISH_OGCSERVER" in os.environ:
                self.single = True
                self.ogcserver_accesscontrol = OGCServerAccessControl(
                    server_iface, os.environ["GEOMAPFISH_OGCSERVER"],
                    config.get("srid"), DBSession)

                QgsMessageLog.logMessage(
                    "Use OGC server named '{}'.".format(
                        os.environ["GEOMAPFISH_OGCSERVER"]),
                    "GeoMapFishAccessControl",
                    level=Qgis.Info,
                )
                self.initialized = True
            elif "GEOMAPFISH_ACCESSCONTROL_CONFIG" in os.environ:
                self.single = False
                self.ogcserver_accesscontrols = {}
                with open(os.environ["GEOMAPFISH_ACCESSCONTROL_CONFIG"]
                          ) as ac_config_file:
                    ac_config = yaml.safe_load(ac_config_file.read())

                for map_, map_config in ac_config.get("map_config").items():
                    map_config["access_control"] = OGCServerAccessControl(
                        server_iface, map_config["ogc_server"],
                        config.get("srid"), DBSession)
                    self.ogcserver_accesscontrols[map_] = map_config
                QgsMessageLog.logMessage(
                    "Use config '{}'.".format(
                        os.environ["GEOMAPFISH_ACCESSCONTROL_CONFIG"]),
                    "GeoMapFishAccessControl",
                    level=Qgis.Info,
                )
                self.initialized = True
            else:
                QgsMessageLog.logMessage(
                    "The environment variable 'GEOMAPFISH_OGCSERVER' or "
                    "'GEOMAPFISH_ACCESSCONTROL_CONFIG' is not defined.",
                    "GeoMapFishAccessControl",
                    level=Qgis.Critical,
                )

        except Exception:
            print("".join(traceback.format_exception(*sys.exc_info())))
            QgsMessageLog.logMessage(
                "".join(traceback.format_exception(*sys.exc_info())),
                "GeoMapFishAccessControl",
                level=Qgis.Critical,
            )

        server_iface.registerAccessControl(
            self, int(os.environ.get("GEOMAPFISH_POSITION", 100)))
    def setup_mappers(cls):
        Node, composite_pk_table, users, Keyword, items, Dingaling, \
            order_items, item_keywords, Item, User, dingalings, \
            Address, keywords, CompositePk, nodes, Order, orders, \
            addresses = cls.classes.Node, \
            cls.tables.composite_pk_table, cls.tables.users, \
            cls.classes.Keyword, cls.tables.items, \
            cls.classes.Dingaling, cls.tables.order_items, \
            cls.tables.item_keywords, cls.classes.Item, \
            cls.classes.User, cls.tables.dingalings, \
            cls.classes.Address, cls.tables.keywords, \
            cls.classes.CompositePk, cls.tables.nodes, \
            cls.classes.Order, cls.tables.orders, cls.tables.addresses

        mapper(
            User,
            users,
            properties={
                'addresses':
                relationship(Address, backref='user', order_by=addresses.c.id),
                # o2m, m2o
                'orders':
                relationship(Order, backref='user', order_by=orders.c.id)
            })
        mapper(
            Address,
            addresses,
            properties={
                # o2o
                'dingaling':
                relationship(Dingaling, uselist=False, backref="address")
            })
        mapper(Dingaling, dingalings)
        mapper(
            Order,
            orders,
            properties={
                # m2m
                'items':
                relationship(Item, secondary=order_items, order_by=items.c.id),
                'address':
                relationship(Address),  # m2o
            })
        mapper(
            Item,
            items,
            properties={
                'keywords': relationship(Keyword,
                                         secondary=item_keywords)  # m2m
            })
        mapper(Keyword, keywords)

        mapper(Node,
               nodes,
               properties={
                   'children':
                   relationship(Node,
                                backref=backref('parent',
                                                remote_side=[nodes.c.id]))
               })

        mapper(CompositePk, composite_pk_table)

        configure_mappers()
示例#55
0
 def __init__(self, db_type=database["db_type"]):
     self.db = self.create_db_connection(database, metadata, db_type=db_type)
     self.db.session_options = {"autocommit": True}
     metadata.create_all(self.db.engine)
     orm.configure_mappers()
示例#56
0
    def define_tables(cls, metadata):
        global Table1, Table1B, Table2, Table3, Data
        table1 = Table(
            "table1",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column(
                "related_id", Integer, ForeignKey("table1.id"), nullable=True
            ),
            Column("type", String(30)),
            Column("name", String(30)),
        )

        table2 = Table(
            "table2",
            metadata,
            Column("id", Integer, ForeignKey("table1.id"), primary_key=True),
        )

        table3 = Table(
            "table3",
            metadata,
            Column("id", Integer, ForeignKey("table1.id"), primary_key=True),
        )

        data = Table(
            "data",
            metadata,
            Column(
                "id", Integer, primary_key=True, test_needs_autoincrement=True
            ),
            Column("node_id", Integer, ForeignKey("table1.id")),
            Column("data", String(30)),
        )

        # join = polymorphic_union(
        #   {
        #   'table3' : table1.join(table3),
        #   'table2' : table1.join(table2),
        #   'table1' : table1.select(table1.c.type.in_(['table1', 'table1b'])),
        #   }, None, 'pjoin')

        with testing.expect_deprecated_20(
            r"The Join.alias\(\) method is considered legacy"
        ):
            join = table1.outerjoin(table2).outerjoin(table3).alias("pjoin")
            # join = None

        class Table1(object):
            def __init__(self, name, data=None):
                self.name = name
                if data is not None:
                    self.data = data

            def __repr__(self):
                return "%s(%s, %s, %s)" % (
                    self.__class__.__name__,
                    self.id,
                    repr(str(self.name)),
                    repr(self.data),
                )

        class Table1B(Table1):
            pass

        class Table2(Table1):
            pass

        class Table3(Table1):
            pass

        class Data(object):
            def __init__(self, data):
                self.data = data

            def __repr__(self):
                return "%s(%s, %s)" % (
                    self.__class__.__name__,
                    self.id,
                    repr(str(self.data)),
                )

        try:
            # this is how the mapping used to work.  ensure that this raises an
            # error now
            table1_mapper = mapper(
                Table1,
                table1,
                select_table=join,
                polymorphic_on=table1.c.type,
                polymorphic_identity="table1",
                properties={
                    "nxt": relationship(
                        Table1,
                        backref=backref(
                            "prev", foreignkey=join.c.id, uselist=False
                        ),
                        uselist=False,
                        primaryjoin=join.c.id == join.c.related_id,
                    ),
                    "data": relationship(mapper(Data, data)),
                },
            )
            configure_mappers()
            assert False
        except Exception:
            assert True
            clear_mappers()

        # currently, the "eager" relationships degrade to lazy relationships
        # due to the polymorphic load.
        # the "nxt" relationship used to have a "lazy='joined'" on it, but the
        # EagerLoader raises the "self-referential"
        # exception now.  since eager loading would never work for that
        # relationship anyway, its better that the user
        # gets an exception instead of it silently not eager loading.
        # NOTE: using "nxt" instead of "next" to avoid 2to3 turning it into
        # __next__() for some reason.
        table1_mapper = mapper(
            Table1,
            table1,
            # select_table=join,
            polymorphic_on=table1.c.type,
            polymorphic_identity="table1",
            properties={
                "nxt": relationship(
                    Table1,
                    backref=backref(
                        "prev", remote_side=table1.c.id, uselist=False
                    ),
                    uselist=False,
                    primaryjoin=table1.c.id == table1.c.related_id,
                ),
                "data": relationship(
                    mapper(Data, data), lazy="joined", order_by=data.c.id
                ),
            },
        )

        mapper(Table1B, inherits=table1_mapper, polymorphic_identity="table1b")

        mapper(
            Table2,
            table2,
            inherits=table1_mapper,
            polymorphic_identity="table2",
        )

        mapper(
            Table3,
            table3,
            inherits=table1_mapper,
            polymorphic_identity="table3",
        )

        configure_mappers()
        assert table1_mapper.primary_key == (
            table1.c.id,
        ), table1_mapper.primary_key
示例#57
0
#!/usr/bin/env python3

from sqlalchemy.orm import configure_mappers

from getpost.models import Base
from getpost.orm import engine

configure_mappers()  # sqlalchemy-searchable
Base.metadata.create_all(engine)
示例#58
0
    def test_with_polymorphic(self):
        metadata = MetaData(testing.db)

        order = Table(
            "orders",
            metadata,
            Column("id", Integer, primary_key=True),
            Column(
                "employee_id",
                Integer,
                ForeignKey("employees.id"),
                nullable=False,
            ),
            Column("type", Unicode(16)),
        )

        employee = Table(
            "employees",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("name", Unicode(16), unique=True, nullable=False),
        )

        product = Table("products", metadata,
                        Column("id", Integer, primary_key=True))

        orderproduct = Table(
            "orderproducts",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("order_id",
                   Integer,
                   ForeignKey("orders.id"),
                   nullable=False),
            Column(
                "product_id",
                Integer,
                ForeignKey("products.id"),
                nullable=False,
            ),
        )

        class Order(object):
            pass

        class Employee(object):
            pass

        class Product(object):
            pass

        class OrderProduct(object):
            pass

        order_join = order.select().alias("pjoin")

        mapper(
            Order,
            order,
            with_polymorphic=("*", order_join),
            polymorphic_on=order_join.c.type,
            polymorphic_identity="order",
            properties={
                "orderproducts":
                relationship(OrderProduct, lazy="select", backref="order")
            },
        )

        mapper(
            Product,
            product,
            properties={
                "orderproducts":
                relationship(OrderProduct, lazy="select", backref="product")
            },
        )

        mapper(
            Employee,
            employee,
            properties={
                "orders": relationship(Order,
                                       lazy="select",
                                       backref="employee")
            },
        )

        mapper(OrderProduct, orderproduct)

        # this requires that the compilation of order_mapper's "surrogate
        # mapper" occur after the initial setup of MapperProperty objects on
        # the mapper.
        configure_mappers()
示例#59
0
def install_activity() -> None:
    orm.configure_mappers()

    create_versionning_tables()

    db.session.commit()
示例#60
0
def create_app(script_info=None):
    app = Flask(__name__)
    app.config.from_object('config')

    db.init_app(app)
    migrate.init_app(app)
    orm.configure_mappers()

    talisman.init_app(
        app,
        force_https=False,
        session_cookie_secure=False,
        content_security_policy=csp,
        strict_transport_security=FLASK_STRICT_TRANSPORT_SECURITY,
        referrer_policy='no-referrer')

    login_manager.init_app(app)
    login_manager.session_protection = FLASK_SESSION_PROTECTION
    login_manager.login_view = 'tracker.login'

    app.url_map.converters['regex'] = RegexConverter
    app.jinja_env.globals['ATOM_FEEDS'] = atom_feeds
    app.jinja_env.globals['SSO_ENABLED'] = SSO_ENABLED

    if SSO_ENABLED:
        app.config["IDP_CLIENT_ID"] = SSO_CLIENT_ID
        app.config["IDP_CLIENT_SECRET"] = SSO_CLIENT_SECRET

        oauth.init_app(app)
        oauth.register(name='idp',
                       server_metadata_url=SSO_METADATA_URL,
                       client_kwargs={'scope': 'openid email'})
        login_manager.unauthorized_handler(handle_unauthorized_access_with_sso)

    from tracker.view.error import error_handlers
    for error_handler in error_handlers:
        app.register_error_handler(error_handler['code_or_exception'],
                                   error_handler['func'])

    from tracker.view.blueprint import blueprint
    app.register_blueprint(tracker)
    app.register_blueprint(blueprint)

    @app.shell_context_processor
    def make_shell_context():
        from tracker.model import CVE
        from tracker.model import Advisory
        from tracker.model import CVEGroup
        from tracker.model import CVEGroupEntry
        from tracker.model import CVEGroupPackage
        from tracker.model import Package
        from tracker.model import User
        return dict(db=db,
                    migrate=migrate,
                    talisman=talisman,
                    login_manager=login_manager,
                    tracker=tracker,
                    Advisory=Advisory,
                    CVE=CVE,
                    CVEGroup=CVEGroup,
                    CVEGroupEntry=CVEGroupEntry,
                    CVEGroupPackage=CVEGroupPackage,
                    User=User,
                    Package=Package,
                    oauth=oauth)

    return app