Esempio n. 1
0
    def setUp(self):
        if not sqlalchemy:
            self.skipTest("'sqlalchemy' is not available")

        # Create DB and map objects
        self.metadata = MetaData()
        self.engine = create_engine("sqlite:///:memory:", echo=False)

        Session = sessionmaker(bind=self.engine)

        self.session = Session()
        self.tables = {}

        self.tables["users"] = Table(
            "users", self.metadata, Column("id", Integer, primary_key=True), Column("name", String(64))
        )

        self.tables["addresses"] = Table(
            "addresses",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("user_id", Integer, ForeignKey("users.id")),
            Column("email_address", String(128)),
        )

        self.tables["lazy_loaded"] = Table(
            "lazy_loaded",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("user_id", Integer, ForeignKey("users.id")),
        )

        self.tables["another_lazy_loaded"] = Table(
            "another_lazy_loaded",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("user_id", Integer, ForeignKey("users.id")),
        )

        self.mappers = {}

        self.mappers["user"] = mapper(
            User,
            self.tables["users"],
            properties={
                "addresses": relation(Address, backref="user", lazy=False),
                "lazy_loaded": relation(LazyLoaded, lazy=True),
                "another_lazy_loaded": relation(AnotherLazyLoaded, lazy=True),
            },
        )

        self.mappers["addresses"] = mapper(Address, self.tables["addresses"])
        self.mappers["lazy_loaded"] = mapper(LazyLoaded, self.tables["lazy_loaded"])
        self.mappers["another_lazy_loaded"] = mapper(AnotherLazyLoaded, self.tables["another_lazy_loaded"])

        self.metadata.create_all(self.engine)

        pyamf.register_class(User, "server.User")
        pyamf.register_class(Address, "server.Address")
        pyamf.register_class(LazyLoaded, "server.LazyLoaded")
Esempio n. 2
0
def orm_load_baseordereddataset(man):
    man.baseordereddataset_table = Table('baseordereddataset', man.metadata,
                                         Column('id', Integer, primary_key=True),
                                         Column('created', DateTime),
                                         Column('label', String(50), nullable=False, unique=True),
                                         Column('type', String(30), nullable=False))

    man.ordereditems_table = Table('ordereddata_items', man.metadata,
                         Column('dataset_id', Integer, ForeignKey('baseordereddataset.id'),
                                primary_key=True),
                         Column('item_id', Integer, ForeignKey('basedata.basedata_id'),
                                primary_key=True),
                         Column('index', Integer, nullable=False)
                         )
    
    man.metadata.create_all()

    mapper(BaseOrderedDataSet, man.baseordereddataset_table,
           polymorphic_on=man.baseordereddataset_table.c.type, polymorphic_identity='base_ordered_dataset',
           properties={'data_items': relation(OrderedDataSetItem,
                                                  backref='ordered_datasets_items',
                                                  cascade='all, delete-orphan',
                                                  collection_class=column_mapped_collection(man.ordereditems_table.c.index))
                       }
           )
    mapper(OrderedDataSetItem, man.ordereditems_table, properties={
        'item': relation(BaseData, lazy='joined', backref='dataitem')
        })
Esempio n. 3
0
    def _setup_mappers(self, tables, mappers):
        """Map the database Tables to SQLAlchemy Mapper objects
		"""
        standalone_table_tuple_ls = [
            ("snps", SNPs),
            ("call_method", CallMethod),
            ("readme", README),
            ("accession", Accession),
        ]
        for table_name, table_class in standalone_table_tuple_ls:
            mappers[table_name] = mapper(table_class, tables[table_name])

        mappers["snpset"] = mapper(
            SNPset,
            tables["snpset"],
            properties={"snps": relation(SNPs, secondary=tables["snps2snpset"], backref="snpset")},
        )
        mappers["snps2snpset"] = mapper(
            SNPs2SNPset, tables["snps2snpset"], properties={"snps": relation(SNPs), "snpset": relation(SNPset)}
        )
        mappers["calls"] = mapper(
            Calls,
            tables["calls"],
            properties={"snps": relation(SNPs), "call_method": relation(CallMethod), "accession": relation(Accession)},
        )
        mappers["snps_ab_allele_mapping"] = mapper(
            SNPsABAlleleMapping,
            tables["snps_ab_allele_mapping"],
            properties={"snps": relation(SNPs, backref="snps_ab_allele_mapping"), "readme": relation(README)},
        )
def initialize_mapper():
    orm.mapper(Content, content,
               polymorphic_on=content.c.object_type,
               polymorphic_identity='content',
               properties = {
                   'children': orm.relation(
                       Content,
                       backref=orm.backref(
                           'parent',
                           remote_side=[content.c.content_id])),
                   'relations': orm.relation(
                       Relation,
                       cascade="all, delete-orphan",
                       primaryjoin=content.c.content_id==relations.c.source_id,
                       backref=orm.backref("source"))})

    orm.mapper(Relation, relations,
               properties = {
               'target': orm.relation(
                   Content, uselist=False,
                   primaryjoin=content.c.content_id==relations.c.target_id)})

    orm.mapper(File, files,
               polymorphic_on=files.c.type,
               polymorphic_identity='db-file')
Esempio n. 5
0
def initModels():
    """Initialize timebot DB tables and mappers"""
    engine = db.getEngine()
    meta = db.getMeta()
    session = db.getSession()

    tb_user = Table(
            'tb_user', meta,
            Column('id', Integer, primary_key=True, autoincrement=True),
            Column('company_id', None, ForeignKey('tb_company.id'), nullable=True),
            Column('jid', Unicode(50), unique=True, nullable=False),
            Column('name', Unicode(50)),
            Column('rate', Integer))

    tb_worktime = Table(
            'tb_time', meta,
            Column('id', Integer, primary_key=True),
            Column('user_id', None, ForeignKey('tb_user.id'), nullable=False),
            Column('start', DateTime, default=datetime.now),
            Column('stop', DateTime, nullable=True))

    tb_company = Table(
            'tb_company', meta,
            Column('id', Integer, primary_key=True),
            Column('name', Unicode(50), nullable=True))

    meta.create_all()

    mapper(TUser, tb_user, properties=dict(
            worktime=relation(TWorktime),
            company=relation(TCompany)))
    mapper(TWorktime, tb_worktime, properties=dict(
            user=relation(TUser)))
    mapper(TCompany, tb_company, properties=dict(
            users=relation(TUser)))
Esempio n. 6
0
 def versions_properties(item_class, change_class, versions_table):
     props = {
         "change": relation(change_class, uselist=False),
         "head": relation(item_class, uselist=False)
     }
     # Notes:
     # - domain.AttachedFile is the only versionable type that is 
     # not a ParliamentaryItem.
     # - !+IVersionable(mr, jul-2011) an AttachedFile does not have 
     # attached_files; but, this violates the meaning of IVersionable? 
     # Or, the ability to have attached_files should be independent of
     # being versionable? IMayAttachFiles
     if item_class is not domain.AttachedFile:
         props["attached_files"] = relation(domain.AttachedFileVersion,
             primaryjoin=rdb.and_(
                 versions_table.c.content_id ==
                     schema.attached_file_versions.c.item_id,
                 versions_table.c.version_id ==
                     schema.attached_file_versions.c.file_version_id
             ),
             foreign_keys=[
                 schema.attached_file_versions.c.item_id,
                 schema.attached_file_versions.c.file_version_id
             ]
         )
     return props
Esempio n. 7
0
	def InitMapper( cls, metadata, ObjectType ):
		cls.__table__ = Table( cls.__tablename__, metadata,
				Column('id',	    Integer,     index = True, primary_key = True),
				Column('type_id',	Integer,     ForeignKey( ObjectType.id ), nullable = False),
				Column('parent_id', Integer,     ForeignKey( "%s.id" % cls.__tablename__ ), nullable = True),
				Column('name',      Text,        nullable = False),
				Column('size',      Integer(64), nullable = False, default = 0),
				Column('pos_x',     Integer(64), nullable = False, default = 0),
				Column('pos_y',     Integer(64), nullable = False, default = 0),
				Column('pos_z',     Integer(64), nullable = False, default = 0),
				Column('mtime',	    DateTime,    nullable = False,
					onupdate = func.current_timestamp(), default = func.current_timestamp()))

		cols = cls.__table__.c

		Index('ix_%s_position' % cls.__tablename__, cols.pos_x, cols.pos_y, cols.pos_z)

		mapper( cls, cls.__table__, polymorphic_on = cols.type_id, properties = {
			'type': relation( ObjectType,
				uselist = False,
				backref = backref( 'objects' )),
			# Tree like hierarchy for objects ie. Universe => Solar systems => Planets => etc.
			'children': relation( cls,
				backref = backref( 'parent', remote_side = [ cols.id ] )),
			# Object position in 3D space
			'position': composite( Vector3D, cols.pos_x, cols.pos_y, cols.pos_z ),
			})
Esempio n. 8
0
def create_tables():
    """Create tables filled with test data."""

    occupations_table = Table('occupations', metadata,
        Column('id', Integer, primary_key=True),
        Column('name', String(20)))
    users_table = Table('users', metadata,
        Column('id', Integer, primary_key=True),
        Column('name', String(20)),
        Column('occupation_id', Integer, ForeignKey("occupations.id")))
    addresses_table = Table('addresses', metadata,
        Column('id', Integer, primary_key=True),
        Column('user_id', Integer, ForeignKey("users.id")),
        Column('street', String(50)),
        Column('city', String(40)))

    mapper(Occupation, occupations_table)
    mapper(User, users_table, properties={
        'occupation' : relation(Occupation, lazy=False),
        'addresses': relation(Address, backref='user', lazy=False)})
    mapper(Address, addresses_table)

    try:
        metadata.create_all()
    except Exception, error:
        # workaround for a problem with PySqlite < 2.6 and SQLAlchemy < 0.5
        if 'Cannot operate on a closed cursor' in str(error):
            metadata.create_all(checkfirst=False)
        else:
            raise
Esempio n. 9
0
def init_model(engine):
	"""Call me before using any of the tables or classes in the model"""
	global prefix_table
	prefix_table = schema.Table('prefix', meta.metadata, 
			schema.Column('id', types.Integer,
				schema.Sequence('prefix_id_seq'), primary_key=True),
			schema.Column('prefix', types.Text())
			)
	
	global users_table
	users_table = schema.Table('users', meta.metadata, 
			schema.Column('id', types.Integer,
				schema.Sequence('users_id_seq'), primary_key=True),
			schema.Column('name', types.Text()),
			schema.Column('password', types.Text()),
			schema.Column('real_name', types.Text())
			)
	
	global users_prefix_access
	users_prefix_access = schema.Table('user_prefix_access', meta.metadata, 
			schema.Column('user_id', types.Integer, schema.ForeignKey('users.id'), primary_key=True),
			schema.Column('prefix_id', types.Integer, schema.ForeignKey('prefix.id'), primary_key=True),
			schema.Column('write_access', types.Integer)
			)

	orm.mapper(Users, users_table, properties = { 'base':orm.relation(UsersPrefixAssociation) })
	orm.mapper(UsersPrefixAssociation, users_prefix_access, properties = { 'prefix':orm.relation(Prefix) })
	orm.mapper(Prefix, prefix_table)
	
	meta.Session.configure(bind=engine)
	meta.engine = engine
Esempio n. 10
0
    def test_pending_expunge(self):
        class Order(_fixtures.Base):
            pass
        class Item(_fixtures.Base):
            pass
        class Attribute(_fixtures.Base):
            pass

        mapper(Attribute, attributes)
        mapper(Item, items, properties=dict(
            attributes=relation(Attribute, cascade="all,delete-orphan", backref="item")
        ))
        mapper(Order, orders, properties=dict(
            items=relation(Item, cascade="all,delete-orphan", backref="order")
        ))

        s = create_session()
        order = Order(name="order1")
        s.add(order)

        attr = Attribute(name="attr1")
        item = Item(name="item1", attributes=[attr])

        order.items.append(item)
        order.items.remove(item)

        assert item not in s
        assert attr not in s

        s.flush()
        assert orders.count().scalar() == 1
        assert items.count().scalar() == 0
        assert attributes.count().scalar() == 0
Esempio n. 11
0
def setup_orm():
    tables = meta.metadata.tables
    orm.mapper(Language, tables['languages'])
    orm.mapper(I18nText, tables['i18n_texts'],
               properties={
                   'versions': relation(I18nTextVersion,
                                        order_by=tables['i18n_texts_versions'].c.language_id.asc())
               })

    orm.mapper(I18nTextVersion, tables['i18n_texts_versions'],
               properties={ 'language': relation(Language) })


    # LanguageText is deprecated until it uses I18nText
    orm.mapper(LanguageText,
               tables['language_texts'],
               properties={
                   'language': relation(Language,
                                        backref=backref('texts',
                                                        order_by=tables['language_texts'].c.id.asc(),
                                                        cascade='all, delete-orphan'))})

    orm.mapper(Country,
               tables['countries'],
               properties={
                   'language': relation(Language,
                                        backref=backref('countries',
                                                        cascade='all, delete-orphan',
                                                        order_by=tables['countries'].c.id.asc()))})
Esempio n. 12
0
def reload_mapper(metadata, now):
    """<comment-ja>
    Machine(Model)のマッパーをリロードします。
    @param metadata: リロードしたいMetaData
    @type metadata: sqlalchemy.schema.MetaData
    @param now: now
    @type now: Datatime
    </comment-ja>
    <comment-en>
    TODO: English Comment
    </comment-en>
    """
    t_machine = get_machine_table(metadata, now)
    t_machine_tag = metadata.tables['machine2tag']
    t_user = metadata.tables['user']
    
    mapper(Machine, t_machine, properties={
        'children' : relation(Machine,
                              backref=backref('parent',
                                              remote_side=[t_machine.c.id])),
        'notebook' : relation(karesansui.db.model.notebook.Notebook),
        'created_user' : relation(karesansui.db.model.user.User,
                                  primaryjoin=t_machine.c.created_user_id==t_user.c.id),
        'modified_user' : relation(karesansui.db.model.user.User,
                                  primaryjoin=t_machine.c.modified_user_id==t_user.c.id),
        'tags' : relation(karesansui.db.model.tag.Tag,
                         secondary=t_machine_tag,
                         backref="machine"),
        })
Esempio n. 13
0
def reload_mapper(metadata, now):
    """<comment-ja>
    Machine2Jobgroup(Model)のマッパーをリロードします。
    @param metadata: リロードしたいMetaData
    @type metadata: sqlalchemy.schema.MetaData
    @param now: now
    @type now: Datatime
    </comment-ja>
    <comment-en>
    TODO: English Comment
    </comment-en>
    """
    t_machine2jobgroup = get_machine2jobgroup_table(metadata, now)
    t_machine = metadata.tables["machine"]
    t_user = metadata.tables["user"]
    mapper(Machine2Jobgroup, t_machine2jobgroup, properties={
        'created_user' : relation(karesansui.db.model.user.User,
                                  primaryjoin=t_machine2jobgroup.c.created_user_id==t_user.c.id),
        
        'modified_user' : relation(karesansui.db.model.user.User,
                                   primaryjoin=t_machine2jobgroup.c.modified_user_id==t_user.c.id),
        
        'machine' : relation(karesansui.db.model.machine.Machine,
                             primaryjoin=t_machine2jobgroup.c.machine_id==t_machine.c.id,
                             ),
        })
Esempio n. 14
0
    def testone(self):
        """
        Tests eager load of a many-to-one attached to a one-to-many.  this
        testcase illustrated the bug, which is that when the single Company is
        loaded, no further processing of the rows occurred in order to load
        the Company's second Address object.

        """
        mapper(Address, addresses)

        mapper(Company, companies, properties={"addresses": relation(Address, lazy=False)})

        mapper(Invoice, invoices, properties={"company": relation(Company, lazy=False)})

        a1 = Address(address="a1 address")
        a2 = Address(address="a2 address")
        c1 = Company(company_name="company 1", addresses=[a1, a2])
        i1 = Invoice(date=datetime.datetime.now(), company=c1)

        session = create_session()
        session.add(i1)
        session.flush()

        company_id = c1.company_id
        invoice_id = i1.invoice_id

        session.expunge_all()
        c = session.query(Company).get(company_id)

        session.expunge_all()
        i = session.query(Invoice).get(invoice_id)

        eq_(c, i.company)
Esempio n. 15
0
    def make_tables( me):
        t1 = Table('table1', me.meta,
                Column('name', Text, ),
                Column('id', Integer, primary_key=True),
                Column('t2_id', Integer,
                            ForeignKey('table2.id',)
                    )
            )

        t2 = Table('table2', me.meta,
                Column('name', Text, ),
                Column('id', Integer, primary_key=True),
                Column('t1_id', Integer,
                            ForeignKey('table1.id',
                                use_alter=True,
                                name='zt2id_fk'
                            )
                    )
            )

        me.meta.create_all()
        mapper( A, t1, properties={
            'link': relation( B,
                        primaryjoin= t1.c.t2_id==t2.c.id,
                        )
        })

        mapper( B, t2, properties={
            'link': relation( A,
                        primaryjoin= t2.c.t1_id==t1.c.id,
                        post_update=True
                        )
        })
Esempio n. 16
0
def setup_orm():
    tables = meta.metadata.tables
    columns = tables['group_mailing_list_messages'].c
    orm.mapper(GroupMailingListMessage,
               tables['group_mailing_list_messages'],
               inherits=ContentItem,
               polymorphic_identity='mailing_list_message',
               polymorphic_on=tables['content_items'].c.content_type,
               properties = {
                             'reply_to': relation(GroupMailingListMessage,
                                                  backref=backref('replies'),
                                                  foreign_keys=(columns.reply_to_message_machine_id),
                                                  primaryjoin=columns.id == columns.reply_to_message_machine_id,
                                                  remote_side=(columns.id)),
                             'thread': relation(GroupMailingListMessage,
                                                post_update=True,
                                                order_by=[asc(columns.sent)],
                                                backref=backref('posts'),
                                                foreign_keys=(columns.thread_message_machine_id),
                                                primaryjoin=columns.id == columns.thread_message_machine_id,
                                                remote_side=(columns.id)),
                             'author': relation(User,
                                                backref=backref('messages')),
                             'group': relation(Group,
                                               primaryjoin=(columns.group_id == tables['groups'].c.id)),
                             'attachments': synonym("files")
                             })
Esempio n. 17
0
    def test_bidirectional(self):
        """tests a many-to-many backrefs"""
        Place.mapper = mapper(Place, place)
        Transition.mapper = mapper(Transition, transition, properties = dict(
            inputs = relation(Place.mapper, place_output, lazy=True, backref='inputs'),
            outputs = relation(Place.mapper, place_input, lazy=True, backref='outputs'),
            )
        )

        t1 = Transition('transition1')
        t2 = Transition('transition2')
        t3 = Transition('transition3')
        p1 = Place('place1')
        p2 = Place('place2')
        p3 = Place('place3')

        t1.inputs.append(p1)
        t1.inputs.append(p2)
        t1.outputs.append(p3)
        t2.inputs.append(p1)
        p2.inputs.append(t2)
        p3.inputs.append(t2)
        p1.outputs.append(t1)
        sess = create_session()
        sess.add_all((t1, t2, t3,p1, p2, p3))
        sess.flush()

        self.assert_result([t1], Transition, {'outputs': (Place, [{'name':'place3'}, {'name':'place1'}])})
        self.assert_result([p2], Place, {'inputs': (Transition, [{'name':'transition1'},{'name':'transition2'}])})
Esempio n. 18
0
    def test_double(self):
        """test that a mapper can have two eager relations to the same table, via
        two different association tables.  aliases are required."""

        Place.mapper = mapper(Place, place, properties = {
            'thingies':relation(mapper(PlaceThingy, place_thingy), lazy=False)
        })

        Transition.mapper = mapper(Transition, transition, properties = dict(
            inputs = relation(Place.mapper, place_output, lazy=False),
            outputs = relation(Place.mapper, place_input, lazy=False),
            )
        )

        tran = Transition('transition1')
        tran.inputs.append(Place('place1'))
        tran.outputs.append(Place('place2'))
        tran.outputs.append(Place('place3'))
        sess = create_session()
        sess.add(tran)
        sess.flush()

        sess.expunge_all()
        r = sess.query(Transition).all()
        self.assert_unordered_result(r, Transition,
            {'name': 'transition1',
            'inputs': (Place, [{'name':'place1'}]),
            'outputs': (Place, [{'name':'place2'}, {'name':'place3'}])
            })
Esempio n. 19
0
def initialize_sql(engine):
    """Called by the app on startup to setup bindings to the DB"""
    DBSession.configure(bind=engine)
    Base.metadata.bind = engine

    # only if we are on sqlite do we have this relation
    if 'sqlite' in str(DBSession.bind):

        if not hasattr(SqliteBmarkFT, 'bmark'):
            Bmark.fulltext = relation(SqliteBmarkFT,
                         backref='bmark',
                         uselist=False,
                         cascade="all, delete, delete-orphan",
                         )

        if not hasattr(SqliteContentFT, 'readable'):
            Readable.fulltext = relation(SqliteContentFT,
                         backref='readable',
                         uselist=False,
                         cascade="all, delete, delete-orphan",
                         )

        # this is purely to make queries easier. If I've searched the content,
        # I want to get back to the hashed->bmark as quickly as possible. Since
        # there's only one fulltext result for each hashed anyway, it's ok to
        # join it directly without going through the Readable table object
        if not hasattr(SqliteContentFT, 'hashed'):
            Hashed.fulltext = relation(SqliteContentFT,
                         backref='hashed',
                         uselist=False,
                         primaryjoin=Hashed.hash_id == SqliteContentFT.hash_id,
                         foreign_keys=[SqliteContentFT.hash_id],
                         cascade="all, delete, delete-orphan",
                         )
Esempio n. 20
0
    def test_one(self):
        p_m = mapper(Part, parts)

        mapper(InheritedPart, inherited_part, properties=dict(
            part=relation(Part, lazy=False)))

        d_m = mapper(Design, design, properties=dict(
            inheritedParts=relation(InheritedPart,
                                    cascade="all, delete-orphan",
                                    backref="design")))

        mapper(DesignType, design_types)

        d_m.add_property(
            "type", relation(DesignType, lazy=False, backref="designs"))

        p_m.add_property(
            "design", relation(
                Design, lazy=False,
                backref=backref("parts", cascade="all, delete-orphan")))


        d = Design()
        sess = create_session()
        sess.add(d)
        sess.flush()
        sess.expunge_all()
        x = sess.query(Design).get(1)
        x.inheritedParts
Esempio n. 21
0
 def setup_mappers(cls):
     mapper(Extra, extra)
     mapper(Pref, prefs, properties=dict(
         extra = relation(Extra, cascade="all, delete")
     ))
     mapper(User, users, properties = dict(
         pref = relation(Pref, lazy=False, cascade="all, delete-orphan", single_parent=True  )
     ))
Esempio n. 22
0
    def createMappers(self):
        orm.clear_mappers()

        orm.mapper(models.User, self.users_table, properties={
            'emails': orm.relation(models.Email, lazy=False),
            'phone_numbers': orm.relation(models.PhoneNumber, lazy=True)})
        orm.mapper(models.Email, self.emails_table)
        orm.mapper(models.PhoneNumber, self.phone_numbers_table)
Esempio n. 23
0
 def setup_mappers(cls):
     mapper(T1, t1, properties={
         't2':relation(T2, primaryjoin=t1.c.t2id == t2.c.id)})
     mapper(T2, t2, properties={
         't1':relation(T1, primaryjoin=t2.c.t1id == t1.c.id)})
     mapper(T3, t3, properties={
         't1':relation(T1),
         't2':relation(T2)})
Esempio n. 24
0
def init_model():
    engine = meta.engine
    
    bb_post_table = sa.Table('bb_post', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('title', types.String(255)),
            sa.Column('slug', types.String(255)),
            sa.Column('body', types.Text),
            sa.Column('userId', types.Integer, sa.ForeignKey('bb_user.id')),
            sa.Column('posted', types.DateTime, onupdate=datetime.datetime.now),
            sa.Column('edited', types.DateTime))
    orm.mapper(Post, bb_post_table, \
            properties={'user':orm.relation(User), \
                        'comments':orm.relation(Comment)})

    bb_tag_table = sa.Table('bb_tag', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('name', types.String(255)),
            sa.Column('slug', types.String(255)))
    orm.mapper(Tag, bb_tag_table)

    bb_post_tag_table = sa.Table('bb_post_tag', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('postId', types.Integer, sa.ForeignKey('bb_post.id')),
            sa.Column('tagId', types.Integer, sa.ForeignKey('bb_tag.id')))

    bb_comment_table = sa.Table('bb_comment_table', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('name', types.String(60)),
            sa.Column('email', types.String(100)),
            sa.Column('url', types.String(255)),
            sa.Column('body', types.Text),
            sa.Column('postId', types.Integer, sa.ForeignKey('bb_post.id')),
            sa.Column('userId', types.Integer, sa.ForeignKey('bb_user.id')),
            sa.Column('posted', types.DateTime, onupdate=datetime.datetime.now))
    orm.mapper(Comment, bb_comment_table, \
            properties={'post':orm.relation(Post)})

    bb_user_table = sa.Table('bb_user', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('username', types.String(255)),
            sa.Column('password', types.String(64)),
            sa.Column('nickname', types.String(255)),
            sa.Column('roleId', types.Integer, sa.ForeignKey('bb_role.id')),
            sa.Column('status', types.String(30)))
    orm.mapper(User, bb_user_table, properties={'role':orm.relation(Role)})

    bb_role_table = sa.Table('bb_role', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('name', types.String(60)))
    orm.mapper(Role, bb_role_table)

    bb_resource_table = sa.Table('bb_resource', meta.metadata,
            sa.Column('id', types.Integer, primary_key=True),
            sa.Column('name', types.String(90)))
    orm.mapper(Resource, bb_resource_table)

    meta.Session.configure(bind=engine)
Esempio n. 25
0
 def changes_properties(item_class, change_tbl):
     return {
         "user": relation(domain.User,
             primaryjoin=(change_tbl.c.user_id == schema.users.c.user_id),
             uselist=False,
             lazy=True
         ),
         "head": relation(item_class, uselist=False)
     }
Esempio n. 26
0
 def test_error(self):
     mapper(Place, place, properties={
         'transitions':relation(Transition, secondary=place_input, backref='places')
     })
     mapper(Transition, transition, properties={
         'places':relation(Place, secondary=place_input, backref='transitions')
     })
     assert_raises_message(sa.exc.ArgumentError, "Error creating backref",
                              sa.orm.compile_mappers)
Esempio n. 27
0
def init_model(bind):
  global engine,Session
  """Call me before using any of the tables or classes in the model"""
  engine = bind
  Session = orm.scoped_session(
    orm.sessionmaker(autocommit=False, autoflush=True, bind=bind))
  orm.mapper(id_AES, aes_table, properties={'yubikey': orm.relation(Key)} )
  orm.mapper(Key, key_table, properties={'yubi_aes': orm.relation(id_AES), 'api': orm.relation(API)} )
  orm.mapper(API, api_table)
Esempio n. 28
0
    def testtwo(self):
        """The original testcase that includes various complicating factors"""

        mapper(Phone, phone_numbers)

        mapper(Address, addresses, properties={
            'phones': relation(Phone, lazy=False, backref='address',
                               order_by=phone_numbers.c.phone_id)})

        mapper(Company, companies, properties={
            'addresses': relation(Address, lazy=False, backref='company',
                                  order_by=addresses.c.address_id)})

        mapper(Item, items)

        mapper(Invoice, invoices, properties={
            'items': relation(Item, lazy=False, backref='invoice',
                              order_by=items.c.item_id),
            'company': relation(Company, lazy=False, backref='invoices')})

        c1 = Company(company_name='company 1', addresses=[
            Address(address='a1 address',
                    phones=[Phone(type='home', number='1111'),
                            Phone(type='work', number='22222')]),
            Address(address='a2 address',
                    phones=[Phone(type='home', number='3333'),
                            Phone(type='work', number='44444')])
            ])

        session = create_session()
        session.add(c1)
        session.flush()

        company_id = c1.company_id

        session.expunge_all()

        a = session.query(Company).get(company_id)

        # set up an invoice
        i1 = Invoice(date=datetime.datetime.now(), company=a)

        item1 = Item(code='aaaa', qty=1, invoice=i1)
        item2 = Item(code='bbbb', qty=2, invoice=i1)
        item3 = Item(code='cccc', qty=3, invoice=i1)

        session.flush()
        invoice_id = i1.invoice_id

        session.expunge_all()
        c = session.query(Company).get(company_id)

        session.expunge_all()
        i = session.query(Invoice).get(invoice_id)

        eq_(c, i.company)
Esempio n. 29
0
def orm_load_floatdelta(man):
    man.floatdelta_table = Table('floatdelta', man.metadata,
                            Column('basedata_id', Integer, ForeignKey('basedata.basedata_id'), primary_key=True),
                            Column('channel_1_id', Integer, ForeignKey('channel.id')),
                            Column('channel_2_id', Integer, ForeignKey('channel.id')),
                            Column('delta', Float))    
    #man.metadata.create_all()
    mapper(FloatDelta, man.floatdelta_table, inherits=BaseData, polymorphic_identity='floatdelta',
           properties={'channel_1': relation(Channel, primaryjoin=man.floatdelta_table.c.channel_1_id==man.channel_table.c.id),
                       'channel_2': relation(Channel, primaryjoin=man.floatdelta_table.c.channel_2_id==man.channel_table.c.id)})
Esempio n. 30
0
 def setup_mappers(cls):
     mapper(Address, addresses)
     mapper(User, users, properties = dict(
         addresses = relation(Address, cascade="all, delete-orphan", backref="user"),
         orders = relation(
             mapper(Order, orders), cascade="all, delete-orphan")
     ))
     mapper(Dingaling,dingalings, properties={
         'address':relation(Address)
     })
Esempio n. 31
0
                       Column("create_time", DateTime, default=now),
                       Column("update_time", DateTime, default=now, onupdate=now),
                       Column("name", TrimmedString(255), index=True, unique=True),
                       Column("description", TEXT),
                       Column("deleted", Boolean, index=True, default=False))

Tag.table = Table("tag", metadata,
                  Column("id", Integer, primary_key=True),
                  Column("type", Integer),
                  Column("parent_id", Integer, ForeignKey("tag.id")),
                  Column("name", TrimmedString(255)),
                  UniqueConstraint("name"))

# With the tables defined we can define the mappers and setup the relationships between the model objects.
mapper(User, User.table,
       properties=dict(active_repositories=relation(Repository, primaryjoin=((Repository.table.c.user_id == User.table.c.id) & (not_(Repository.table.c.deleted))), order_by=(Repository.table.c.name)),
                       galaxy_sessions=relation(GalaxySession, order_by=desc(GalaxySession.table.c.update_time)),
                       api_keys=relation(APIKeys, backref="user", order_by=desc(APIKeys.table.c.create_time))))

mapper(PasswordResetToken, PasswordResetToken.table,
       properties=dict(user=relation(User, backref="reset_tokens")))

mapper(APIKeys, APIKeys.table, properties={})

mapper(Group, Group.table,
       properties=dict(users=relation(UserGroupAssociation)))

mapper(Role, Role.table,
       properties=dict(
           repositories=relation(RepositoryRoleAssociation,
                                 primaryjoin=((Role.table.c.id == RepositoryRoleAssociation.table.c.role_id) & (RepositoryRoleAssociation.table.c.repository_id == Repository.table.c.id))),
Esempio n. 32
0
def define_harvester_tables():

    global harvest_source_table
    global harvest_job_table
    global harvest_object_table
    global harvest_object_extra_table
    global harvest_gather_error_table
    global harvest_object_error_table
    global harvest_log_table

    harvest_source_table = Table(
        'harvest_source',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('url', types.UnicodeText, nullable=False),
        Column('title', types.UnicodeText, default=u''),
        Column('description', types.UnicodeText, default=u''),
        Column('config', types.UnicodeText, default=u''),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
        Column('type', types.UnicodeText, nullable=False),
        Column('active', types.Boolean, default=True),
        Column('user_id', types.UnicodeText, default=u''),
        Column('publisher_id', types.UnicodeText, default=u''),
        Column('frequency', types.UnicodeText, default=u'MANUAL'),
        Column('next_run', types.DateTime),
    )
    # Was harvesting_job
    harvest_job_table = Table(
        'harvest_job',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
        Column('gather_started', types.DateTime),
        Column('gather_finished', types.DateTime),
        Column('finished', types.DateTime),
        Column('source_id', types.UnicodeText,
               ForeignKey('harvest_source.id')),
        # status: New, Running, Finished
        Column('status', types.UnicodeText, default=u'New', nullable=False),
    )
    # A harvest_object contains a representation of one dataset during a
    # particular harvest
    harvest_object_table = Table(
        'harvest_object',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        # The guid is the 'identity' of the dataset, according to the source.
        # So if you reharvest it, then the harvester knows which dataset to
        # update because of this identity. The identity needs to be unique
        # within this CKAN.
        Column('guid', types.UnicodeText, default=u''),
        # When you harvest a dataset multiple times, only the latest
        # successfully imported harvest_object should be flagged 'current'.
        # The import_stage usually reads and writes it.
        Column('current', types.Boolean, default=False),
        Column('gathered', types.DateTime, default=datetime.datetime.utcnow),
        Column('fetch_started', types.DateTime),
        Column('content', types.UnicodeText, nullable=True),
        Column('fetch_finished', types.DateTime),
        Column('import_started', types.DateTime),
        Column('import_finished', types.DateTime),
        # state: WAITING, FETCH, IMPORT, COMPLETE, ERROR
        Column('state', types.UnicodeText, default=u'WAITING'),
        Column('metadata_modified_date', types.DateTime),
        Column('retry_times', types.Integer, default=0),
        Column('harvest_job_id', types.UnicodeText,
               ForeignKey('harvest_job.id')),
        Column('harvest_source_id', types.UnicodeText,
               ForeignKey('harvest_source.id')),
        Column('package_id',
               types.UnicodeText,
               ForeignKey('package.id', deferrable=True),
               nullable=True),
        # report_status: 'added', 'updated', 'not modified', 'deleted', 'errored'
        Column('report_status', types.UnicodeText, nullable=True),
        Index('harvest_job_id_idx', 'harvest_job_id'),
    )

    # New table
    harvest_object_extra_table = Table(
        'harvest_object_extra',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_object_id', types.UnicodeText,
               ForeignKey('harvest_object.id')),
        Column('key', types.UnicodeText),
        Column('value', types.UnicodeText),
    )

    # New table
    harvest_gather_error_table = Table(
        'harvest_gather_error',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_job_id', types.UnicodeText,
               ForeignKey('harvest_job.id')),
        Column('message', types.UnicodeText),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )
    # New table
    harvest_object_error_table = Table(
        'harvest_object_error',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_object_id', types.UnicodeText,
               ForeignKey('harvest_object.id')),
        Column('message', types.UnicodeText),
        Column('stage', types.UnicodeText),
        Column('line', types.Integer),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )
    # Harvest Log table
    harvest_log_table = Table(
        'harvest_log',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('content', types.UnicodeText, nullable=False),
        Column(
            'level',
            types.Enum('DEBUG',
                       'INFO',
                       'WARNING',
                       'ERROR',
                       'CRITICAL',
                       name='log_level')),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )

    mapper(
        HarvestSource,
        harvest_source_table,
        properties={
            'jobs':
            relation(
                HarvestJob,
                lazy=True,
                backref=u'source',
                order_by=harvest_job_table.c.created,
            ),
        },
    )

    mapper(
        HarvestJob,
        harvest_job_table,
    )

    mapper(
        HarvestObject,
        harvest_object_table,
        properties={
            'package': relation(
                Package,
                lazy=True,
                backref='harvest_objects',
            ),
            'job': relation(
                HarvestJob,
                lazy=True,
                backref=u'objects',
            ),
            'source': relation(
                HarvestSource,
                lazy=True,
                backref=u'objects',
            ),
        },
    )

    mapper(
        HarvestGatherError,
        harvest_gather_error_table,
        properties={
            'job': relation(HarvestJob, backref='gather_errors'),
        },
    )

    mapper(
        HarvestObjectError,
        harvest_object_error_table,
        properties={
            'object':
            relation(HarvestObject,
                     backref=backref('errors', cascade='all,delete-orphan')),
        },
    )

    mapper(
        HarvestObjectExtra,
        harvest_object_extra_table,
        properties={
            'object':
            relation(HarvestObject,
                     backref=backref('extras', cascade='all,delete-orphan')),
        },
    )

    mapper(
        HarvestLog,
        harvest_log_table,
    )

    event.listen(HarvestObject, 'before_insert',
                 harvest_object_before_insert_listener)
Esempio n. 33
0
class TVDBSeries(TVDBContainer, Base):

    __tablename__ = "tvdb_series"

    id = Column(Integer, primary_key=True, autoincrement=False)
    lastupdated = Column(Integer)
    expired = Column(Boolean)
    seriesname = Column(Unicode)
    language = Column(Unicode)
    rating = Column(Float)
    status = Column(Unicode)
    runtime = Column(Integer)
    airs_time = Column(Unicode)
    airs_dayofweek = Column(Unicode)
    contentrating = Column(Unicode)
    network = Column(Unicode)
    overview = Column(Unicode)
    imdb_id = Column(String)
    zap2it_id = Column(String)
    banner = Column(String)
    fanart = Column(String)
    poster = Column(String)
    poster_file = Column(Unicode)
    _genre = Column('genre', Unicode)
    genre = pipe_list_synonym('_genre')
    _firstaired = Column('firstaired', DateTime)
    firstaired = text_date_synonym('_firstaired')

    episodes = relation('TVDBEpisode',
                        backref='series',
                        cascade='all, delete, delete-orphan')

    def update(self, tvdb_id=None):
        tvdb_id = tvdb_id or self.id
        url = get_mirror() + api_key + '/series/%s/%s.xml' % (tvdb_id,
                                                              language)
        try:
            data = requests.get(url).content
        except RequestException as e:
            raise LookupError('Request failed %s' % url)
        result = ElementTree.fromstring(data).find('Series')
        if result is not None:
            self.update_from_xml(result)
        else:
            raise LookupError('Could not retrieve information from thetvdb')

    def get_poster(self, only_cached=False):
        """Downloads this poster to a local cache and returns the path"""
        from flexget.manager import manager
        base_dir = os.path.join(manager.config_base, 'userstatic')
        if os.path.isfile(os.path.join(base_dir, self.poster_file or '')):
            return self.poster_file
        elif only_cached:
            return
        # If we don't already have a local copy, download one.
        url = get_mirror('banner') + self.poster
        log.debug('Downloading poster %s' % url)
        dirname = os.path.join('tvdb', 'posters')
        # Create folders if the don't exist
        fullpath = os.path.join(base_dir, dirname)
        if not os.path.isdir(fullpath):
            os.makedirs(fullpath)
        filename = os.path.join(dirname, posixpath.basename(self.poster))
        thefile = file(os.path.join(base_dir, filename), 'wb')
        thefile.write(requests.get(url).content)
        self.poster_file = filename
        # If we are detached from a session, update the db
        if not Session.object_session(self):
            with Session() as session:
                session.query(TVDBSeries).filter(
                    TVDBSeries.id == self.id).update(
                        values={'poster_file': filename})
        return filename

    def __repr__(self):
        return '<TVDBSeries name=%s,tvdb_id=%s>' % (self.seriesname, self.id)
Esempio n. 34
0
__author__ = 'Jonny Lamb'
__copyright__ = 'Copyright © 2008 Jonny Lamb'
__license__ = 'MIT'

import sqlalchemy as sa
from sqlalchemy import orm

from debexpo.model import meta, OrmObject
from debexpo.model.package_versions import PackageVersion

t_binary_packages = sa.Table(
    'binary_packages',
    meta.metadata,
    sa.Column('id', sa.types.Integer, primary_key=True),
    sa.Column('package_version_id', sa.types.Integer,
              sa.ForeignKey('package_versions.id')),
    sa.Column('arch', sa.types.String(200), nullable=False),
)


class BinaryPackage(OrmObject):
    foreign = ['package_version']


orm.mapper(BinaryPackage,
           t_binary_packages,
           properties={
               'package_version':
               orm.relation(PackageVersion, backref='binary_packages'),
           })
Esempio n. 35
0
__all__ = ['GroupExtra', 'group_extra_table']

group_extra_table = Table('group_extra', meta.metadata,
    Column('id', types.UnicodeText, primary_key=True, default=_types.make_uuid),
    Column('group_id', types.UnicodeText, ForeignKey('group.id')),
    Column('key', types.UnicodeText),
    Column('value', types.UnicodeText),
    Column('state', types.UnicodeText, default=core.State.ACTIVE),
)


class GroupExtra(core.StatefulObjectMixin,
                 domain_object.DomainObject):
    pass

meta.mapper(GroupExtra, group_extra_table, properties={
    'group': orm.relation(group.Group,
        backref=orm.backref('_extras',
            collection_class=orm.collections.attribute_mapped_collection(u'key'),
            cascade='all, delete, delete-orphan',
            ),
        )
    }
)

def _create_extra(key, value):
    return GroupExtra(key=str(key), value=value)

group.Group.extras = association_proxy(
    '_extras', 'value', creator=_create_extra)
Esempio n. 36
0
    pass


orm.mapper(SoftwareVersion, software_version_table)
orm.mapper(SoftwareFollower, software_follower_table)
orm.mapper(SoftwareReviewDigger, software_review_digger_table
           #, properties={
           #    'member': orm.relation(Member),
           #}
           )

orm.mapper(
    SoftwareReview,
    software_review_table,
    properties={
        'diggers': orm.relation(SoftwareReviewDigger),
        #    'member': orm.relation(Member),
    })


class Software(object):
    def __init__(self, name=''):
        self.name = name

    def get_android_apps(self):
        return session.query(Software).filter(Software.type == '1')

    def get_followers(self, limit=20):
        fs = session.query(SoftwareFollower).filter(
            SoftwareFollower.software_id == self.id)
        return fs
Esempio n. 37
0
class TableApp(Base):
    __tablename__ = 'app'
    id = Column(Integer, primary_key=True)
    app_id = Column(String(255), unique=True)
    title = Column(String(255))
    creator = Column(String(255))
    creator_id = Column(String(255))
    package_name = Column(String(255), unique=True)
    version = Column(String(255))
    version_code = Column(Integer)
    rating = Column(String(255))
    promo_text = Column(Text)
    ratings_count = Column(Integer)
    install_size = Column(Integer)
    downloads_count_text = Column(String(255))
    permission_id_list = Column(String(255))
    permission_num = Column(Integer)
    category_name = Column(String(255))
    contact_email = Column(String(255))
    contact_website = Column(String(255))
    recent_changes = Column(String(255))
    description = Column(Text)
    price_currency = Column(String(255))
    created_on = Column(DateTime)
    updated_on = Column(DateTime)

    category_id = Column(Integer, ForeignKey('category.id'))
    category = relation('TableCategory', backref='category')

    apks = relation('TableApk',
                    order_by='TableApk.id',
                    uselist=True,
                    backref='app',
                    secondary=app_apk)

    def __init__(self, obj):
        self.app_id = obj['appId']
        self.title = obj['title']
        self.creator = obj['creator']
        self.creator_id = obj['creatorId']
        self.package_name = obj['packageName']
        self.version = obj['version']
        self.version_code = obj['versionCode']
        self.rating = obj['rating']
        self.promo_text = obj['promoText']
        self.ratings_count = obj['ratingsCount']
        self.install_size = obj['installSize']
        self.downloads_count_text = obj['downloadsCountText']
        self.permission_id_list = obj['permissionIdList']
        self.permission_num = obj['permissionNum']
        self.category_name = obj['category']
        self.contact_email = obj['contactEmail']
        self.contact_website = obj['contactWebsite']
        self.recent_changes = obj['recentChanges']
        self.description = obj['description']
        self.price_currency = obj['priceCurrency']

        self.created_on = datetime.utcnow()
        self.updated_on = datetime.utcnow()

    def __repr__(self):
        return '<TableApp: %r>' % self.app_id
Esempio n. 38
0
    def _test_onetomany(self, passive_updates):
        mapper(User,
               users,
               properties={
                   'addresses': relation(Address,
                                         passive_updates=passive_updates)
               })
        mapper(Address, addresses)

        sess = create_session()
        u1 = User(username='******', fullname='jack')
        u1.addresses.append(Address(email='jack1'))
        u1.addresses.append(Address(email='jack2'))
        sess.add(u1)
        sess.flush()
        a1 = u1.addresses[0]

        eq_(
            sa.select([addresses.c.username]).execute().fetchall(),
            [('jack', ), ('jack', )])

        assert sess.query(Address).get(a1.id) is u1.addresses[0]

        u1.username = '******'
        sess.flush()
        assert u1.addresses[0].username == 'ed'
        eq_(
            sa.select([addresses.c.username]).execute().fetchall(), [('ed', ),
                                                                     ('ed', )])

        sess.expunge_all()
        eq_([Address(username='******'),
             Address(username='******')],
            sess.query(Address).all())

        u1 = sess.query(User).get(u1.id)
        u1.username = '******'

        def go():
            sess.flush()

        if not passive_updates:
            self.assert_sql_count(
                testing.db, go, 4
            )  # test passive_updates=False; load addresses, update user, update 2 addresses
        else:
            self.assert_sql_count(testing.db, go,
                                  1)  # test passive_updates=True; update user
        sess.expunge_all()
        assert User(username='******',
                    addresses=[
                        Address(username='******'),
                        Address(username='******')
                    ]) == sess.query(User).get(u1.id)
        sess.expunge_all()

        u1 = sess.query(User).get(u1.id)
        u1.addresses = []
        u1.username = '******'
        sess.flush()
        sess.expunge_all()
        a1 = sess.query(Address).get(a1.id)
        eq_(a1.username, None)

        eq_(
            sa.select([addresses.c.username]).execute().fetchall(), [(None, ),
                                                                     (None, )])

        u1 = sess.query(User).get(u1.id)
        eq_(User(username='******', fullname='jack'), u1)
Esempio n. 39
0
def define_harvester_tables():

    global harvest_source_table
    global harvest_job_table
    global harvest_object_table
    global harvest_gather_error_table
    global harvest_object_error_table
    global harvest_coupled_resource_table

    harvest_source_table = Table(
        'harvest_source',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('url', types.UnicodeText, nullable=False),
        Column('title', types.UnicodeText, default=u''),
        Column('description', types.UnicodeText, default=u''),
        Column('config', types.UnicodeText, default=u''),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
        Column('type', types.UnicodeText, nullable=False),
        Column('active', types.Boolean, default=True),
        Column('user_id', types.UnicodeText, default=u''),
        Column('publisher_id', types.UnicodeText, default=u''),
    )
    # Was harvesting_job
    harvest_job_table = Table(
        'harvest_job',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
        Column('gather_started', types.DateTime),
        Column('gather_finished', types.DateTime),
        Column('source_id', types.UnicodeText,
               ForeignKey('harvest_source.id')),
        Column('status', types.UnicodeText, default=u'New', nullable=False),
    )
    # Was harvested_document
    harvest_object_table = Table(
        'harvest_object',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('guid', types.UnicodeText, default=u''),
        Column('current', types.Boolean, default=False),
        Column('gathered', types.DateTime, default=datetime.datetime.utcnow),
        Column('fetch_started', types.DateTime),
        Column('content', types.UnicodeText, nullable=True),
        Column('fetch_finished', types.DateTime),
        Column('metadata_modified_date', types.DateTime),
        Column('retry_times', types.Integer),
        Column('harvest_job_id', types.UnicodeText,
               ForeignKey('harvest_job.id')),
        Column('harvest_source_id', types.UnicodeText,
               ForeignKey('harvest_source.id')),
        Column(
            'harvest_source_reference', types.UnicodeText
        ),  # id according to the Harvest Source, for Gemini Coupled Resources
        Column('package_id',
               types.UnicodeText,
               ForeignKey('package.id'),
               nullable=True),
    )
    # New table
    harvest_gather_error_table = Table(
        'harvest_gather_error',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_job_id', types.UnicodeText,
               ForeignKey('harvest_job.id')),
        Column('message', types.UnicodeText),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )
    # New table
    harvest_object_error_table = Table(
        'harvest_object_error',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('harvest_object_id', types.UnicodeText,
               ForeignKey('harvest_object.id')),
        Column('message', types.UnicodeText),
        Column('stage', types.UnicodeText),
        Column('created', types.DateTime, default=datetime.datetime.utcnow),
    )
    harvest_coupled_resource_table = Table(
        'harvest_coupled_resource',
        metadata,
        Column('id', types.UnicodeText, primary_key=True, default=make_uuid),
        Column('service_record_package_id',
               types.UnicodeText,
               ForeignKey('package.id'),
               nullable=True),
        Column('harvest_source_reference', types.UnicodeText, nullable=False),
        Column('dataset_record_package_id',
               types.UnicodeText,
               ForeignKey('package.id'),
               nullable=True),
    )

    mapper(
        HarvestSource,
        harvest_source_table,
        properties={
            'jobs':
            relation(
                HarvestJob,
                lazy=True,
                backref=u'source',
                order_by=harvest_job_table.c.created,
            ),
        },
    )

    mapper(
        HarvestJob,
        harvest_job_table,
    )

    mapper(
        HarvestObject,
        harvest_object_table,
        properties={
            'package': relation(
                Package,
                lazy=True,
                backref='harvest_objects',
            ),
            'job': relation(
                HarvestJob,
                lazy=True,
                backref=u'objects',
            ),
            'source': relation(
                HarvestSource,
                lazy=True,
                backref=u'objects',
            ),
        },
    )

    mapper(
        HarvestGatherError,
        harvest_gather_error_table,
        properties={
            'job': relation(HarvestJob, backref='gather_errors'),
        },
    )

    mapper(
        HarvestObjectError,
        harvest_object_error_table,
        properties={
            'object': relation(HarvestObject, backref='errors'),
        },
    )

    mapper(
        HarvestCoupledResource,
        harvest_coupled_resource_table,
        properties={
            'service_record':
            relation(
                Package,
                primaryjoin=harvest_coupled_resource_table.c.
                service_record_package_id == Package.id,
                lazy=True,
                backref='coupled_dataset',
            ),
            'dataset_record':
            relation(
                Package,
                primaryjoin=harvest_coupled_resource_table.c.
                dataset_record_package_id == Package.id,
                lazy=True,
                backref='coupled_service',
            ),
        },
    )

    event.listen(HarvestObject, 'before_insert',
                 harvest_object_before_insert_listener)
Esempio n. 40
0
            self.data = {}
        else:
            self.data = data

    @classmethod
    def by_activity_id(cls: Type[TActivityDetail],
                       activity_id: str) -> list["TActivityDetail"]:
        return ckan.model.Session.query(cls) \
            .filter_by(activity_id=activity_id).all()


meta.mapper(ActivityDetail,
            activity_detail_table,
            properties={
                'activity':
                orm.relation(Activity, backref=orm.backref('activity_detail'))
            })


def _activities_limit(q: QActivity,
                      limit: int,
                      offset: Optional[int] = None) -> QActivity:
    '''
    Return an SQLAlchemy query for all activities at an offset with a limit.
    '''
    import ckan.model as model
    # type_ignore_reason: incomplete SQLAlchemy types
    q = q.order_by(desc(model.Activity.timestamp))  # type: ignore
    if offset:
        q = q.offset(offset)
    if limit:
Esempio n. 41
0
class TVDBSeries(Base):
    __tablename__ = "tvdb_series"

    id = Column(Integer, primary_key=True, autoincrement=False)
    last_updated = Column(Integer)
    expired = Column(Boolean)
    name = Column(Unicode)
    language = Column(Unicode)
    rating = Column(Float)
    status = Column(Unicode)
    runtime = Column(Integer)
    airs_time = Column(Unicode)
    airs_dayofweek = Column(Unicode)
    content_rating = Column(Unicode)
    network = Column(Unicode)
    overview = Column(Unicode)
    imdb_id = Column(Unicode)
    zap2it_id = Column(Unicode)
    _banner = Column('banner', Unicode)

    _first_aired = Column('first_aired', DateTime)
    first_aired = text_date_synonym('_first_aired')
    _aliases = Column('aliases', Unicode)
    aliases = json_synonym('_aliases')
    _actors = Column('actors', Unicode)
    actors_list = json_synonym('_actors')
    _posters = Column('posters', Unicode)
    posters_list = json_synonym('_posters')

    _genres = relation('TVDBGenre', secondary=genres_table)
    genres = association_proxy('_genres', 'name')

    episodes = relation('TVDBEpisode',
                        backref='series',
                        cascade='all, delete, delete-orphan')

    def __init__(self, tvdb_id):
        """
        Looks up movie on tvdb and creates a new database model for it.
        These instances should only be added to a session via `session.merge`.
        """
        self.id = tvdb_id

        try:
            series = TVDBRequest().get('series/%s' % self.id)
        except requests.RequestException as e:
            raise LookupError('Error updating data from tvdb: %s' % e)

        self.language = 'en'
        self.last_updated = series['lastUpdated']
        self.name = series['seriesName']
        self.rating = float(
            series['siteRating']) if series['siteRating'] else 0.0
        self.status = series['status']
        self.runtime = int(series['runtime']) if series['runtime'] else 0
        self.airs_time = series['airsTime']
        self.airs_dayofweek = series['airsDayOfWeek']
        self.content_rating = series['rating']
        self.network = series['network']
        self.overview = series['overview']
        self.imdb_id = series['imdbId']
        self.zap2it_id = series['zap2itId']
        self.first_aired = series['firstAired']
        self.expired = False
        self.aliases = series['aliases']
        self._banner = series['banner']
        self._genres = [TVDBGenre(
            id=name) for name in series['genre']] if series['genre'] else []

        # Actors and Posters are lazy populated
        self._actors = None
        self._posters = None

    def __repr__(self):
        return '<TVDBSeries name=%s,tvdb_id=%s>' % (self.name, self.id)

    @property
    def banner(self):
        if self._banner:
            return TVDBRequest.BANNER_URL + self._banner

    @property
    def actors(self):
        return self.get_actors()

    @property
    def posters(self):
        return self.get_posters()

    def get_actors(self):
        if not self._actors:
            log.debug('Looking up actors for series %s' % self.name)
            try:
                actors_query = TVDBRequest().get('series/%s/actors' % self.id)
                self.actors_list = [a['name'] for a in actors_query
                                    ] if actors_query else []
            except requests.RequestException as e:
                if None is not e.response and e.response.status_code == 404:
                    self.actors_list = []
                else:
                    raise LookupError('Error updating actors from tvdb: %s' %
                                      e)

        return self.actors_list

    def get_posters(self):
        if not self._posters:
            log.debug('Getting top 5 posters for series %s' % self.name)
            try:
                poster_query = TVDBRequest().get('series/%s/images/query' %
                                                 self.id,
                                                 keyType='poster')
                self.posters_list = [p['fileName'] for p in poster_query[:5]
                                     ] if poster_query else []
            except requests.RequestException as e:
                if None is not e.response and e.response.status_code == 404:
                    self.posters_list = []
                else:
                    raise LookupError('Error updating posters from tvdb: %s' %
                                      e)

        return [TVDBRequest.BANNER_URL + p for p in self.posters_list]

    def to_dict(self):
        return {
            'tvdb_id':
            self.id,
            'last_updated':
            datetime.fromtimestamp(
                self.last_updated).strftime('%Y-%m-%d %H:%M:%S'),
            'expired':
            self.expired,
            'series_name':
            self.name,
            'language':
            self.language,
            'rating':
            self.rating,
            'status':
            self.status,
            'runtime':
            self.runtime,
            'airs_time':
            self.airs_time,
            'airs_dayofweek':
            self.airs_dayofweek,
            'content_rating':
            self.content_rating,
            'network':
            self.network,
            'overview':
            self.overview,
            'imdb_id':
            self.imdb_id,
            'zap2it_id':
            self.zap2it_id,
            'banner':
            self.banner,
            'posters':
            self.posters,
            'genres': [g for g in self.genres],
            'actors':
            self.actors,
            'first_aired':
            self.first_aired,
        }
Esempio n. 42
0
class BlurayMovie(Base):
    __tablename__ = 'bluray_movies'

    id = Column(Integer, primary_key=True, autoincrement=False, nullable=False)
    name = Column(Unicode)
    url = Column(Unicode)
    release_date = Column(Date)
    year = year_property('release_date')
    runtime = Column(Integer)
    overview = Column(Unicode)
    country = Column(Unicode)
    studio = Column(Unicode)
    rating = Column(Float)
    bluray_rating = Column(Integer)
    certification = Column(Unicode)
    _genres = relation('BlurayGenre', secondary=genres_table, backref='movies')
    genres = association_proxy('_genres', 'name')
    updated = Column(DateTime, default=datetime.now, nullable=False)

    def __init__(self, title, year):
        if year:
            title_year = '{} ({})'.format(title, year)
        else:
            title_year = title

        params = {
            'section': 'bluraymovies',
            'country': 'ALL',
            'keyword': title,
            '_': str(int(time.time() * 1000)),
        }

        country_params = {'_': params['_']}
        try:
            response = bluray_request('quicksearch/search.php', **params)

            if not response or 'items' not in response:
                raise LookupError(
                    'No search results found for {} on blu-ray.com'.format(title_year)
                )

            search_results = response['items']
            countries = bluray_request('countries.json.php', **country_params) or {}

            search_results = sorted(
                search_results, key=lambda k: extract_release_date(k)
            )
        except requests.RequestException as e:
            raise LookupError('Error searching for {} on blu-ray.com: {}'.format(title_year, e))

        # Simply take the first result unless year does not match
        for result in search_results:
            if year and str(year) != result['year']:
                continue

            self.id = int(result['url'].split('/')[-2])
            self.name = result['title']

            flag = result['flag']
            country_code = (
                flag.split('/')[-1].split('.')[0].lower()
            )  # eg. http://some/url/UK.png -> uk
            # find country based on flag url, default United States
            country = 'United States'
            for c in countries['countries']:
                if c['c'].lower() == country_code:
                    country = c['n']
            self.country = country
            self.release_date = extract_release_date(result)
            self.bluray_rating = int(result['rating']) if result['rating'] else None

            # Used for parsing some more data, sadly with soup
            self.url = result['url']

            movie_info_response = requests.get(self.url).content

            movie_info = get_soup(movie_info_response)

            # runtime and rating, should be the last span tag with class subheading
            bluray_info = movie_info.find('div', attrs={'class': 'bluray'})
            bluray_info = bluray_info.find_all('span', attrs={'class': 'subheading'})[
                -1
            ].text.split('|')

            self.studio = bluray_info[0].strip()

            for info in bluray_info[1:]:
                if 'min' in info:
                    self.runtime = int(info.replace('min', '').strip())
                elif 'Rated' in info:
                    self.certification = info.replace('Rated', '').strip()

            # rating
            rating_tag = movie_info.find('div', id='ratingscore')
            self.rating = float(rating_tag.text.strip()) if rating_tag else None

            # Third onecol_content contains some information we want
            onecol_content = movie_info.find_all('div', attrs={'class': 'onecol_content'})[2]

            # overview, genres etc
            contents = onecol_content.find('div').find('div')

            overview_tag = contents.find('p')
            self.overview = overview_tag.text.strip() if overview_tag else None

            # genres
            genres_table = contents.find('table')
            if not genres_table:
                break

            genres_content = genres_table.find_all('tr')
            if not genres_content:
                break

            genres = set()
            for genre in genres_content:
                genres.add(genre.find('td').text.strip())
            self._genres = [BlurayGenre(name=genre) for genre in genres]
            break
        else:
            raise LookupError('No search results found for {} on blu-ray.com'.format(title_year))
Esempio n. 43
0
class GentooInstalledPackagePropertiesRel(object):
    def __init__(self, installed_package_id, version_id, keyword_status,
                 masked, unmasked, world, repo_id):
        self.installed_package_id = installed_package_id
        self.version_id = version_id
        self.keyword_status = keyword_status
        self.masked = masked
        self.unmasked = unmasked
        self.world = world
        self.repo_id = repo_id


mapper(GentooInstalledPackagePropertiesRel,
       _gentoo_installed_package_props_table,
       properties={
           'install': relation(GentooInstalledPackagesRel),
       })

_gentoo_installed_package_use_flags_table = Table(
    'gentoo_installed_package_use_flags',
    metadata,
    Column('id', Integer, primary_key=True, autoincrement=True),
    Column('installed_package_id',
           Integer,
           ForeignKey('%s.id' % 'gentoo_installed_packages'),
           nullable=False),
    Column('use_flag_id',
           Integer,
           ForeignKey('%s.id' % 'gentoo_use_flag_pool'),
           nullable=False),
    Column('enabled', SmallInteger,
Esempio n. 44
0
class Answer(db.Model, SerializerMixin):
    __tablename__ = 'answers'
    id = Column(Integer, primary_key=True, autoincrement=True)  # id
    text = Column(String)
    qid = Column(Integer, ForeignKey("questions.id"))
    question = orm.relation("Question")  # связь с таблицой questions
Esempio n. 45
0
log = getLogger(__name__)

doi_table = Table(
    u'doi',
    meta.metadata,
    Column(u'identifier', types.UnicodeText, primary_key=True),
    Column(u'package_id',
           types.UnicodeText,
           ForeignKey(u'package.id', onupdate=u'CASCADE', ondelete=u'CASCADE'),
           nullable=False,
           unique=True),
    Column(u'published', types.DateTime, nullable=True),
    # Date DOI was published to DataCite
)


class DOI(DomainObject):
    '''DOI Object'''
    pass


meta.mapper(DOI,
            doi_table,
            properties={
                u'dataset':
                relation(Package,
                         backref=backref(u'doi',
                                         cascade=u'all, delete-orphan'),
                         primaryjoin=doi_table.c.package_id.__eq__(Package.id))
            })
Esempio n. 46
0
    def __init__(self, username, password, database, label, hidden=False):
        self.database = database
        self.username = username
        self.password = password
        self.label = label
        self.hidden = hidden

        url = URL(drivername=config.DATABASE_DRIVER, username=username,
                  password=password, host=config.DATABASE_HOST,
                  port=config.DATABASE_PORT, database=database,
                  query={'charset': 'utf8', 'use_unicode': 0})
        self.engine = create_engine(url, convert_unicode=True, pool_recycle=3600)
        self.metadata = metadata = MetaData(bind=self.engine)

        class Solver(object):
            """ Maps the Solver table """
            pass

        class SolverConfiguration(object):
            """ Solver configuration mapping the SolverConfig table.
                A solver configuration consists of a solver and a set of
                parameters and their values.
            """

            def get_name(self):
                """ Returns the name of the solver configuration. """
                return self.name

            def __str__(self):
                return self.get_name()

        class Parameter(object):
            """ Maps the Parameters table. """
            pass

        class ParameterInstance(object):
            """ Maps the n:m association table SolverConfig_has_Parameters,
                which for a parameter specifies its value in the corresponding
                solver configuration.
            """
            pass

        class Instance(object):
            """ Maps the Instances table. """

            def __str__(self):
                return self.get_name()

            def get_name(self):
                if self.instance_classes[0] is None:
                    return self.name
                pc = self.instance_classes[0]
                parent_classes = [pc.name]
                count = 1
                while pc.parent_class:
                    pc = pc.parent_class
                    parent_classes.append(pc.name)
                    count += 1
                if pc.parent_class:
                    return '/'.join(reversed(parent_classes)) + "/" + self.name
                else:
                    return '/'.join(reversed(parent_classes)) + "/" + self.name

            def get_class_hierarchy(self):
                if self.instance_classes[0] is None:
                    return self.name
                pc = self.instance_classes[0]
                parent_classes = [pc.name]
                while pc.parent_class:
                    pc = pc.parent_class
                    parent_classes.append(pc.name)
                return reversed(parent_classes)

            def get_property_value(self, property, db):
                """ Returns the value of the property with the given name. """
                try:
                    for p in self.properties:
                        if p.idProperty == int(property):
                            return p.get_value()
                except:
                    return None

            def get_instance(self, db):
                """
                    Decompresses the instance blob if necessary and returns it as string.
                    EDACC can store compressed and uncompressed instances. To distinguish
                    between them, we prepend the ASCII characters "LZMA" to a compressed instance.
                """
                table = db.metadata.tables['Instances']
                c_instance = table.c['instance']
                c_id = table.c['idInstance']
                # get prefix
                instance_header = db.session.connection().execute(select([func.substring(c_instance, 1, 4)],
                                                                         c_id == self.idInstance).select_from(
                    table)).first()[0]
                data_length = db.session.connection().execute(select([func.length(c_instance)],
                                                                     c_id == self.idInstance).select_from(
                    table)).first()[0]
                if data_length > 32 * 1024 * 1024:
                    return "Instance too large for processing. Please use the EDACC GUI application."
                if instance_header == 'LZMA': # compressed instance?
                    # get blob without LZMA prefix
                    instance_blob = db.session.connection().execute(select([func.substring(c_instance, 5)],
                                                                           c_id == self.idInstance).select_from(
                        table)).first()[0]
                    return utils.lzma_decompress(instance_blob)
                else:
                    return self.instance

            def get_compressed_instance(self, db):
                table = db.metadata.tables['Instances']
                c_instance = table.c['instance']
                c_id = table.c['idInstance']
                # get prefix
                instance_header = db.session.connection().execute(select([func.substring(c_instance, 1, 4)],
                                                                         c_id == self.idInstance).select_from(
                    table)).first()[0]
                data_length = db.session.connection().execute(select([func.length(c_instance)],
                                                                     c_id == self.idInstance).select_from(
                    table)).first()[0]
                if instance_header == 'LZMA': # compressed instance?
                    # get blob without LZMA prefix
                    instance_blob = db.session.connection().execute(select([func.substring(c_instance, 5)],
                                                                           c_id == self.idInstance).select_from(
                        table)).first()[0]
                    return instance_blob, True
                else:
                    return self.instance, False

            def set_instance(self, uncompressed_instance):
                """ Compresses the instance and sets the instance blob attribute """
                self.instance = "LZMA" + utils.lzma_compress(uncompressed_instance)


        class Experiment(object):
            """ Maps the Experiment table. """

            def get_num_jobs(self, db):
                return db.session.query(db.ExperimentResult).filter_by(experiment=self).count()

            #def get_num_runs(self, db):
            #    """ Returns the number of runs of the experiment """
            #    num_results = db.session.query(db.ExperimentResult) \
            #                        .filter_by(experiment=self).count()
            #    num_solver_configs = db.session.query(db.SolverConfiguration) \
            #                            .filter_by(experiment=self).count()
            #    num_instances = db.session.query(db.Instance) \
            #                                .filter(db.Instance.experiments \
            #                                        .contains(self)).distinct().count()
            #    if num_solver_configs == 0 or num_instances == 0:
            #        return 0
            #    return num_results / num_solver_configs / num_instances

            def get_solved_instance_ids_by_solver_id(self, db, instances, solver_configs):
                if not solver_configs or not instances: return []
                solver_config_ids = [sc.idSolverConfig for sc in solver_configs]
                instance_ids = [i.idInstance for i in instances]
                successful_runs = db.session.query(db.ExperimentResult.SolverConfig_idSolverConfig,
                                                   db.ExperimentResult.Instances_idInstance) \
                    .filter_by(experiment=self).filter(db.ExperimentResult.resultCode.like("1%")) \
                    .filter(db.ExperimentResult.Instances_idInstance.in_(instance_ids)) \
                    .filter(db.ExperimentResult.SolverConfig_idSolverConfig.in_(solver_config_ids)) \
                    .filter_by(status=1).all()
                solved_instances = dict((sc.idSolverConfig, set()) for sc in solver_configs)
                for run in successful_runs:
                    solved_instances[run.SolverConfig_idSolverConfig].add(run.Instances_idInstance)

                return solved_instances

            def get_sota_solvers(self, db, instances, solver_configs):
                """
                    Returns the set of state-of-the-art solvers of the experiment.
                    A solver is considered SOTA if no other solver solves a strict
                    superset of its solved instances.
                """
                if not solver_configs or not instances: return []
                solver_config_ids = [sc.idSolverConfig for sc in solver_configs]
                instance_ids = [i.idInstance for i in instances]
                successful_runs = db.session.query(db.ExperimentResult.SolverConfig_idSolverConfig,
                                                   db.ExperimentResult.Instances_idInstance) \
                    .filter_by(experiment=self).filter(db.ExperimentResult.resultCode.like("1%")) \
                    .filter(db.ExperimentResult.Instances_idInstance.in_(instance_ids)) \
                    .filter(db.ExperimentResult.SolverConfig_idSolverConfig.in_(solver_config_ids)) \
                    .filter_by(status=1).all()
                solved_instances = dict((sc.idSolverConfig, set()) for sc in solver_configs)
                for run in successful_runs:
                    solved_instances[run.SolverConfig_idSolverConfig].add(run.Instances_idInstance)

                sota_solvers = []
                sota_solved = set()
                for solver in solved_instances:
                    isSOTA = True
                    for othersolver in solved_instances.iterkeys():
                        if solver == othersolver: continue
                        if solved_instances[othersolver] > solved_instances[solver]:
                            isSOTA = False
                            break
                    if isSOTA: sota_solvers.append(solver)

                return [sc for sc in solver_configs if sc.idSolverConfig in sota_solvers]

            def unique_solver_contributions(self, db, instances, solver_configs):
                """
                    Returns a dictionary that for each solver configuration specifies the set of IDs
                    of the instances that only this solver config solved.
                """
                if not solver_configs or not instances: return {}
                solver_config_ids = [sc.idSolverConfig for sc in solver_configs]
                instance_ids = [i.idInstance for i in instances]
                successful_runs = db.session.query(db.ExperimentResult.SolverConfig_idSolverConfig,
                                                   db.ExperimentResult.Instances_idInstance) \
                    .filter_by(experiment=self).filter(db.ExperimentResult.resultCode.like("1%")) \
                    .filter(db.ExperimentResult.Instances_idInstance.in_(instance_ids)) \
                    .filter(db.ExperimentResult.SolverConfig_idSolverConfig.in_(solver_config_ids)) \
                    .filter_by(status=1).all()
                solved_instances = dict((sc.idSolverConfig, set()) for sc in solver_configs)
                for run in successful_runs:
                    solved_instances[run.SolverConfig_idSolverConfig].add(run.Instances_idInstance)

                sc_by_id = dict((sc.idSolverConfig, sc) for sc in solver_configs)
                unique_solver_contribs = dict()
                for solver in solved_instances:
                    solved = set([i for i in solved_instances[solver]])
                    for othersolver in solved_instances:
                        if othersolver == solver: continue
                        solved = solved.difference(solved_instances[othersolver])
                    unique_solver_contribs[sc_by_id[solver]] = solved

                return unique_solver_contribs

            def get_max_num_runs(self, db):
                """ Returns the number of runs of the experiment """
                res = db.session.query(func.max(db.ExperimentResult.run)).filter_by(experiment=self).first()
                if res is None or res[0] is None: return 0
                return res[0] + 1

            def get_solved_instances(self, db):
                """ Returns the instances of the experiment that any solver
                solved in any of its runs
                """
                instance_ids = [i[0] for i in db.session.query(db.ExperimentResult.Instances_idInstance) \
                    .filter_by(experiment=self).filter(db.ExperimentResult.resultCode.like('1%')) \
                    .filter_by(status=1).distinct().all()]
                return db.session.query(db.Instance).filter(db.Instance.idInstance.in_(instance_ids)).all()

            def get_fully_solved_instances(self, db):
                """ Returns the instances of the experiment that all solvers
                solved in all of their runs
                """
                numInstances = db.session.query(db.Instance).options(joinedload_all('properties')) \
                    .filter(db.Instance.experiments.contains(self)).distinct().count()
                if numInstances == 0: return 0
                num_jobs_per_instance = db.session.query(db.ExperimentResult) \
                                            .filter_by(experiment=self).count() / numInstances
                instances = []
                for i in self.instances:
                    if db.session.query(db.ExperimentResult) \
                        .filter(db.ExperimentResult.resultCode.like('1%')) \
                        .filter_by(experiment=self, instance=i, status=1) \
                        .count() == num_jobs_per_instance:
                        instances.append(i)
                return instances

            def get_unsolved_instances(self, db):
                t_results = db.metadata.tables['ExperimentResults']
                s = select([t_results.c['Instances_idInstance']],
                           and_(t_results.c['Experiment_idExperiment'] == self.idExperiment,
                                t_results.c['resultCode'].like('1%'),
                                t_results.c['status'] == 1),
                           from_obj=t_results).distinct()
                ids = db.session.connection().execute(s).fetchall()
                return db.session.query(db.Instance).options(joinedload_all('properties')).filter(
                    db.Instance.experiments.contains(self)).filter(
                    not_(db.Instance.idInstance.in_(list(r[0] for r in ids)))).all()

            def get_instances(self, db):
                return db.session.query(db.Instance).options(joinedload_all('properties')) \
                    .filter(db.Instance.experiments.contains(self)).distinct().all()

            def get_num_solver_configs(self, db):
                return db.session.query(db.SolverConfiguration) \
                    .filter_by(experiment=self).distinct().count()

            def get_num_instances(self, db):
                return db.session.query(db.Instance) \
                    .filter(db.Instance.experiments.contains(self)).distinct().count()

            def get_total_instance_blob_size(self, db):
                table = db.metadata.tables['Instances']
                c_instance = table.c['instance']
                c_id = table.c['idInstance']
                instance_ids = [i.idInstance for i in self.get_instances(db)]
                instance_sizes = db.session.connection().execute(select([func.length(c_instance)],
                                                                        c_id.in_(instance_ids)).select_from(
                    table)).fetchall()
                total_size = sum(i[0] for i in instance_sizes or [(0,)])
                return total_size

            def get_results_by_instance(self, db, solver_configs, instance, penalize_incorrect, penalize_factor=1,
                                        result_property='resultTime'):
                results_by_instance = dict((sc.idSolverConfig, dict()) for sc in solver_configs)
                if not solver_configs: return results_by_instance
                solver_config_ids = [sc.idSolverConfig for sc in solver_configs]

                table = db.metadata.tables['ExperimentResults']
                table_result_codes = db.metadata.tables['ResultCodes']
                table_status_codes = db.metadata.tables['StatusCodes']
                c = table.c
                c_rc = table_result_codes.c
                c_sc = table_status_codes.c

                if result_property == 'resultTime':
                    c_cost = c['resultTime']
                    c_limit = c['CPUTimeLimit']
                    pass
                elif result_property == 'wallTime':
                    c_cost = c['wallTime']
                    c_limit = c['wallClockTimeLimit']
                    pass
                elif result_property == 'cost':
                    c_cost = c['cost']
                    c_limit = -1
                    pass
                else:
                    # result property table
                    c_cost = 0
                    c_limit = -1
                    pass

                join_expression = table.join(table_result_codes).join(table_status_codes)
                select_statement = select([
                                              c['idJob'],
                                              c['status'],
                                              c['resultCode'],
                                              expression.label('result_code_description', c_rc['description']),
                                              expression.label('status_code_description', c_sc['description']),
                                              c['Instances_idInstance'],
                                              c['SolverConfig_idSolverConfig'],
                                              c['run'],
                                              expression.label('cost',
                                                               expression.case([(
                                                                                    c['status'] > 0,
                                                                                    # only consider cost of "finished" jobs
                                                                                    c_cost if not penalize_incorrect else \
                                                                                        expression.case(
                                                                                            [(
                                                                                             table.c['resultCode'].like(
                                                                                                 '1%'), c_cost)],
                                                                                            else_=c_limit * penalize_factor
                                                                                        )
                                                                                )],
                                                                               else_=None)
                                              ),
                                              expression.label('limit', c_limit),
                                          ],
                                          and_(
                                              c['Experiment_idExperiment'] == self.idExperiment,
                                              c['Instances_idInstance'] == instance.idInstance,
                                              c['SolverConfig_idSolverConfig'].in_(solver_config_ids),

                                          ),
                                          from_obj=join_expression,
                )

                results = db.session.connection().execute(select_statement)
                for row in results:
                    results_by_instance[row.SolverConfig_idSolverConfig][row.run] = row

                return results_by_instance


            def get_result_matrix(self, db, solver_configs, instances, cost='resultTime', fixed_limit=None):
                """ Returns the results as matrix of lists of result tuples, i.e.
                    Dict<idInstance, Dict<idSolverConfig, List of runs>> """
                num_successful = dict(
                    (i.idInstance, dict((sc.idSolverConfig, 0) for sc in solver_configs)) for i in instances)
                num_completed = dict(
                    (i.idInstance, dict((sc.idSolverConfig, 0) for sc in solver_configs)) for i in instances)
                M = dict((i.idInstance, dict((sc.idSolverConfig, list()) for sc in solver_configs)) for i in instances)
                solver_config_ids = [sc.idSolverConfig for sc in solver_configs]
                instance_ids = [i.idInstance for i in instances]
                if not solver_config_ids or not instance_ids:
                    return M, 0, 0
                table = db.metadata.tables['ExperimentResults']
                table_result_codes = db.metadata.tables['ResultCodes']
                from_table = table
                table_has_prop = db.metadata.tables['ExperimentResult_has_Property']
                table_has_prop_value = db.metadata.tables['ExperimentResult_has_PropertyValue']

                status_column = table.c['status']
                result_code_column = table.c['resultCode']
                if cost == 'resultTime':
                    cost_column = table.c['resultTime']
                    cost_property = db.ExperimentResult.resultTime
                    cost_limit_column = table.c['CPUTimeLimit']

                    if fixed_limit:
                        cost_column = expression.case([(table.c['resultTime'] > fixed_limit, fixed_limit)],
                                                      else_=table.c['resultTime'])
                        cost_limit_column = literal(fixed_limit)
                        status_column = expression.case([(table.c['resultTime'] > fixed_limit, literal(21))],
                                                        else_=table.c['status'])
                        result_code_column = expression.case([(table.c['resultTime'] > fixed_limit, literal(-21))],
                                                             else_=table.c['resultCode'])
                elif cost == 'wallTime':
                    cost_column = table.c['wallTime']
                    cost_property = db.ExperimentResult.wallTime
                    cost_limit_column = table.c['wallClockTimeLimit']

                    if fixed_limit:
                        cost_column = expression.case([(table.c['wallTime'] > fixed_limit, fixed_limit)],
                                                      else_=table.c['wallTime'])
                        cost_limit_column = literal(fixed_limit)
                        status_column = expression.case([(table.c['wallTime'] > fixed_limit, literal(22))],
                                                        else_=table.c['status'])
                        result_code_column = expression.case([(table.c['wallTime'] > fixed_limit, literal(-22))],
                                                             else_=table.c['resultCode'])
                elif cost == 'cost':
                    cost_column = table.c['cost']
                    cost_property = db.ExperimentResult.cost
                    inf = float('inf')
                    cost_limit_column = table.c['CPUTimeLimit'] # doesnt matter
                else:
                    cost_column = table_has_prop_value.c['value']
                    cost_property = db.ResultPropertyValue.value
                    inf = float('inf')
                    cost_limit_column = table.c['CPUTimeLimit']
                    from_table = table.join(table_has_prop, and_(table_has_prop.c['idProperty'] == int(cost),
                                                                 table_has_prop.c['idExperimentResults'] == table.c[
                                                                     'idJob'])).join(table_has_prop_value)

                s = select([table.c['idJob'], expression.label('resultCode', result_code_column),
                            expression.label('cost', cost_column), expression.label('status', status_column),
                            table.c['SolverConfig_idSolverConfig'], table.c['Instances_idInstance'],
                            table_result_codes.c['description'], expression.label('limit', cost_limit_column)],
                           and_(table.c['Experiment_idExperiment'] == self.idExperiment,
                                table.c['SolverConfig_idSolverConfig'].in_(solver_config_ids),
                                table.c['Instances_idInstance'].in_(instance_ids)),
                           from_obj=from_table.join(table_result_codes))

                Run = namedtuple('Run', ['idJob', 'status', 'result_code_description', 'resultCode', 'resultTime',
                                         'successful', 'penalized_time10', 'idSolverConfig', 'idInstance',
                                         'penalized_time1', 'censored'])

                for r in db.session.connection().execute(s):
                    if r.Instances_idInstance not in M: continue
                    if r.SolverConfig_idSolverConfig not in M[r.Instances_idInstance]: continue
                    if str(r.resultCode).startswith('1'): num_successful[r.Instances_idInstance][
                        r.SolverConfig_idSolverConfig] += 1
                    if r.status not in STATUS_PROCESSING: num_completed[r.Instances_idInstance][
                        r.SolverConfig_idSolverConfig] += 1
                    M[r.Instances_idInstance][r.SolverConfig_idSolverConfig].append(
                        Run(r.idJob, int(r.status), r[6], int(r.resultCode),
                            None if int(r.status) <= 0 else float(r.cost), str(r.resultCode).startswith('1'),
                            float(r.cost) if str(r.resultCode).startswith('1') else (inf if cost not in (
                            'resultTime', 'wallTime') else float(r.limit)) * 10,
                            r.SolverConfig_idSolverConfig, r.Instances_idInstance,
                            float(r.cost) if str(r.resultCode).startswith('1') else (
                            inf if cost not in ('resultTime', 'wallTime') else float(r.limit)),
                            not str(r.resultCode).startswith('1')))
                return M, num_successful, num_completed

        class ExperimentResult(object):
            """ Maps the ExperimentResult table. Provides a function
                to obtain a result property of a job.
            """

            def get_time(self):
                """ Returns the CPU time needed for this result or the
                    experiment's timeOut value if the status is
                    not correct (certified SAT/UNSAT answer).
                """
                # if the job is being processed or the CC had a crash return None
                if self.status <= 0:
                    return None

                if self.status in (STATUS_FINISHED, 21):
                    return self.resultTime

                return None

            def get_penalized_time(self, p_factor=10):
                if self.CPUTimeLimit == -1:
                    return float('inf')
                else:
                    return self.CPUTimeLimit * p_factor

            def get_property_value(self, property, db):
                """ Returns the value of the property with the given name.
                    If the property is 'cputime' it returns the time.
                    If the property is an integer, it returns the value of the
                    associated Property with this id.
                """
                if property == 'resultTime':
                    return self.get_time()
                elif property == 'wallTime':
                    return self.wallTime
                elif property == 'cost':
                    return self.cost
                else:
                    try:
                        for pv in self.properties:
                            if pv.idProperty == int(property):
                                return pv.get_value()
                    except:
                        return None

            def to_json(self):
                return {
                    'idJob': self.idJob,
                    'Experiment_idExperiment': self.Experiment_idExperiment,
                    'Instances_idInstance': self.Instances_idInstance,
                    'run': self.run,
                    'resultCode': self.resultCode,
                    'resultTime': self.resultTime,
                    'status': self.status,
                    'seed': self.seed,
                    'startTime': str(self.startTime),
                    'computeQueue': self.computeQueue,
                    'priority': self.priority,
                }

        class ExperimentResultOutput(object):
            pass

        class InstanceClass(object):
            def __str__(self):
                return self.name

        class ResultCodes(object):
            def to_json(self):
                return {
                    'code': self.resultCode,
                    'description': self.description,
                }

        class StatusCodes(object):
            def to_json(self):
                return {
                    'code': self.statusCode,
                    'description': self.description,
                }

        class SolverBinary(object):
            pass

        class Client(object):
            pass

        class Experiment_has_Client(object):
            pass

        class GridQueue(object):
            pass

        class Course(object):
            pass

        class ConfigurationScenario(object):
            def get_parameter_domain(self, parameter_name):
                """ Returns the domain name of a parameter. This can
                    be one of the following:
                    realDomain, flagDomain, categoricalDomain, ordinalDomain,
                    integerDomain, mixedDomain, optionalDomain.
                """
                pgraph = self.solver_binary.solver.parameter_graph[0].serializedGraph
                if pgraph is None: return None
                tree = etree.parse(StringIO(pgraph))
                if tree is None: return None
                root = tree.getroot()
                for node in root:
                    if node.tag == "parameters" and node[1].text == parameter_name:
                        return node[0].attrib.values()[0]

        class ConfigurationScenarioParameter(object):
            pass

        class ParameterGraph(object):
            pass

        # competition tables

        class User(object):
            pass

        class DBConfiguration(object):
            pass

        class CompetitionCategory(object):
            def __str__(self):
                return self.name

        class BenchmarkType(object):
            def __str__(self):
                return self.name

        # result and instance properties

        class Property(object):
            def is_result_property(self):
                return self.propertyType == RESULT_PROPERTY_TYPE

            def is_instance_property(self):
                return self.propertyType == INSTANCE_PROPERTY_TYPE

            def is_simple(self):
                """ Returns whether the property is a simple property which is
                    stored in a way that's directly castable to a Python object
                """
                return self.propertyValueType.lower() in ('float', 'double',
                                                          'int', 'integer',
                                                          'string')

            def is_plotable(self):
                """ Returns whether the property is a simple property which is
                    stored in a way that's directly castable to a Python object
                    and is numeric.
                """
                return self.propertyValueType.lower() in ('float', 'double',
                                                          'int', 'integer')

        class PropertyValueType(object):
            pass

        class ExperimentResultProperty(object):
            def get_value(self):
                valueType = self.property.propertyValueType.lower()
                try:
                    if valueType in ('float', 'double'):
                        return float(self.values[0].value)
                    elif valueType in ('int', 'integer'):
                        return int(self.values[0].value)
                    else:
                        return None
                except Exception:
                    return None

        class ResultPropertyValue(object):
            pass

        class InstanceProperties(object):
            def get_value(self):
                valueType = self.property.propertyValueType.lower()
                try:
                    if valueType in ('float', 'double',):
                        return float(self.value)
                    elif valueType in ('int', 'integer'):
                        return int(self.value)
                    elif valueType in ('string', ):
                        return str(self.value)
                    else:
                        return None
                except ValueError:
                    return None

        class VerifierConfig(object):
            pass

        class VerifierConfigParameter(object):
            pass

        self.Solver = Solver
        self.SolverConfiguration = SolverConfiguration
        self.Parameter = Parameter
        self.ParameterInstance = ParameterInstance
        self.Instance = Instance
        self.Experiment = Experiment
        self.ExperimentResult = ExperimentResult
        self.ExperimentResultOutput = ExperimentResultOutput
        self.InstanceClass = InstanceClass
        self.GridQueue = GridQueue
        self.ResultCodes = ResultCodes
        self.StatusCodes = StatusCodes
        self.SolverBinary = SolverBinary
        self.Client = Client
        self.Experiment_has_Client = Experiment_has_Client
        self.ConfigurationScenario = ConfigurationScenario
        self.ConfigurationScenarioParameter = ConfigurationScenarioParameter
        self.Course = Course
        self.VerifierConfig = VerifierConfig
        self.VerifierConfigParameter = VerifierConfigParameter

        self.User = User
        self.DBConfiguration = DBConfiguration
        self.CompetitionCategory = CompetitionCategory
        self.BenchmarkType = BenchmarkType

        self.Property = Property
        self.PropertyValueType = PropertyValueType
        self.ExperimentResultProperty = ExperimentResultProperty
        self.ResultPropertyValue = ResultPropertyValue
        self.InstanceProperties = InstanceProperties
        self.ParameterGraph = ParameterGraph

        metadata.reflect()

        schema.Table("instanceClass", metadata,
                     schema.Column('parent', sqlalchemy.Integer, schema.ForeignKey("instanceClass.idinstanceClass")),
                     useexisting=True, autoload=True
        )

        schema.Table("ExperimentResults", metadata,
                     schema.Column('cost', sqlalchemy.Float),
                     useexisting=True, autoload=True
        )

        schema.Table("Experiment", metadata,
                     schema.Column('costPenalty', sqlalchemy.Float),
                     useexisting=True, autoload=True
        )

        schema.Table("SolverConfig", metadata,
                     schema.Column('cost', sqlalchemy.Float),
                     useexisting=True, autoload=True
        )

        # Table-Class mapping
        mapper(VerifierConfig, metadata.tables['VerifierConfig'],
               properties={
                   'parameters': relation(VerifierConfigParameter, backref='verifier_config'),
               }
        )
        mapper(VerifierConfigParameter, metadata.tables['VerifierConfig_has_VerifierParameter'])
        mapper(Course, metadata.tables['Course'])
        mapper(GridQueue, metadata.tables['gridQueue'])
        mapper(Client, metadata.tables['Client'],
               properties={
                   'grid_queue': relationship(GridQueue, backref='clients'),
                   'experiments': relationship(Experiment,
                                               secondary=metadata.tables['Experiment_has_Client'], backref='clients'),
               }
        )
        mapper(Experiment_has_Client, metadata.tables['Experiment_has_Client'])
        mapper(Parameter, metadata.tables['Parameters'])
        mapper(InstanceClass, metadata.tables['instanceClass'],
               properties={
                   'parent_class': relationship(InstanceClass,
                                                remote_side=metadata.tables['instanceClass'].c['idinstanceClass'],
                                                lazy="joined", join_depth=10, backref='subclasses')
               }
        )
        mapper(Instance, metadata.tables['Instances'],
               properties={
                   'instance': deferred(metadata.tables['Instances'].c.instance),
                   'instance_classes': relationship(InstanceClass,
                                                    secondary=metadata.tables['Instances_has_instanceClass'],
                                                    backref='instances',
                                                    lazy="joined"),
                   'properties': relation(InstanceProperties, backref='instance'),
               }
        )
        mapper(Solver, metadata.tables['Solver'],
               properties={
                   'code': deferred(metadata.tables['Solver'].c.code),
                   'binaries': relation(SolverBinary, backref='solver'),
                   'parameters': relation(Parameter, backref='solver'),
                   'competition_categories': relationship(
                       CompetitionCategory,
                       backref='solvers',
                       secondary=metadata.tables['Solver_has_CompetitionCategory']),
                   'parameter_graph': relation(ParameterGraph),
                   'description_pdf': deferred(metadata.tables['Solver'].c.description_pdf),
               }
        )
        mapper(SolverBinary, metadata.tables['SolverBinaries'],
               properties={
                   'binaryArchive': deferred(metadata.tables['SolverBinaries'].c.binaryArchive),
               }
        )
        mapper(ParameterInstance, metadata.tables['SolverConfig_has_Parameters'],
               properties={
                   'parameter': relation(Parameter)
               }
        )
        mapper(SolverConfiguration, metadata.tables['SolverConfig'],
               properties={
                   'parameter_instances': relation(ParameterInstance, backref="solver_configuration"),
                   'solver_binary': relation(SolverBinary, backref="solver_configurations"),
                   'experiment': relation(Experiment),
               }
        )
        mapper(ParameterGraph, metadata.tables['ParameterGraph'])
        mapper(ConfigurationScenarioParameter, metadata.tables['ConfigurationScenario_has_Parameters'],
               properties={
                   'parameter': relation(Parameter)
               }
        )
        mapper(ConfigurationScenario, metadata.tables['ConfigurationScenario'],
               properties={
                   'parameters': relation(ConfigurationScenarioParameter),
                   'solver_binary': relation(SolverBinary),
                   'course': relation(Course, backref='configuration_scenario'),
               }
        )
        mapper(Experiment, metadata.tables['Experiment'],
               properties={
                   'instances': relationship(Instance,
                                             secondary=metadata.tables['Experiment_has_Instances'],
                                             backref='experiments'),
                   'solver_configurations': relation(SolverConfiguration),
                   'grid_queue': relationship(GridQueue,
                                              secondary=metadata.tables['Experiment_has_gridQueue']),
                   'results': relation(ExperimentResult),
                   'configuration_scenario': relation(ConfigurationScenario, uselist=False),
                   'verifier_config': relation(VerifierConfig, backref='experiment', uselist=False),
               }
        )
        mapper(StatusCodes, metadata.tables['StatusCodes'])
        mapper(ResultCodes, metadata.tables['ResultCodes'])
        mapper(ExperimentResultOutput, metadata.tables['ExperimentResultsOutput'])
        mapper(ExperimentResult, metadata.tables['ExperimentResults'],
               properties={
                   'output': relation(ExperimentResultOutput, backref='result', uselist=False),
                   'solver_configuration': relation(SolverConfiguration, backref=backref('runs', passive_deletes=True)),
                   'properties': relationship(ExperimentResultProperty, backref='experiment_result'),
                   'experiment': relation(Experiment, backref='experiment_results'),
                   'instance': relation(Instance, backref='results'),
                   'status_code': relation(StatusCodes, uselist=False),
                   'result_code': relation(ResultCodes, uselist=False),
                   'computeNode': deferred(metadata.tables['ExperimentResults'].c.computeNode),
                   'computeNodeIP': deferred(metadata.tables['ExperimentResults'].c.computeNodeIP),
                   'client': relation(Client),
               }
        )

        mapper(User, metadata.tables['User'],
               properties={
                   'solvers': relation(Solver, backref='user'),
                   'source_classes': relation(InstanceClass, backref='user'),
                   'benchmark_types': relation(BenchmarkType, backref='user')
               }
        )
        mapper(DBConfiguration, metadata.tables['DBConfiguration'])
        mapper(CompetitionCategory, metadata.tables['CompetitionCategory'])
        mapper(BenchmarkType, metadata.tables['BenchmarkType'],
               properties={
                   'instances': relation(Instance, backref='benchmark_type')
               }
        )

        mapper(Property, metadata.tables['Property'])
        mapper(PropertyValueType, metadata.tables['PropertyValueType'])
        mapper(ExperimentResultProperty, metadata.tables['ExperimentResult_has_Property'],
               properties={
                   'property': relationship(Property, backref='experiment_results', lazy='joined'),
                   'values': relation(ResultPropertyValue, backref='experiment_result_property', lazy='joined')
               }
        )
        mapper(ResultPropertyValue, metadata.tables['ExperimentResult_has_PropertyValue'])
        mapper(InstanceProperties, metadata.tables['Instance_has_Property'],
               properties={
                   'property': relationship(Property, backref='instances')
               }
        )

        self.session = scoped_session(sessionmaker(bind=self.engine, autocommit=False,
                                                   autoflush=False))

        # initialize DBConfiguration table if not already done
        if self.session.query(DBConfiguration).get(0) is None:
            dbConfig = DBConfiguration()
            dbConfig.id = 0
            dbConfig.competition = False
            dbConfig.competitionPhase = None
            self.session.add(dbConfig)
            self.session.commit()

        self.db_is_competition = self.session.query(self.DBConfiguration).get(0).competition
        if not self.db_is_competition:
            self.db_competition_phase = None
        else:
            self.db_competition_phase = self.session.query(self.DBConfiguration).get(0).competitionPhase
Esempio n. 47
0
class PackageExtra(core.StatefulObjectMixin, domain_object.DomainObject):
    def related_packages(self):
        return [self.package]


meta.mapper(
    PackageExtra,
    package_extra_table,
    properties={
        'package':
        orm.relation(
            _package.Package,
            backref=orm.backref(
                '_extras',
                collection_class=orm.collections.attribute_mapped_collection(
                    u'key'),
                cascade='all, delete, delete-orphan',
            ),
        ),
    },
    order_by=[package_extra_table.c.package_id, package_extra_table.c.key],
    extension=[extension.PluginMapperExtension()],
)


def _create_extra(key, value):
    return PackageExtra(key=text_type(key), value=value)


_package.Package.extras = association_proxy('_extras',
Esempio n. 48
0
class Corpus(Base):

    __tablename__ = 'corpus'

    def __repr__(self):
        return "<Corpus (%s)>" % self.id

    id = Column(Integer, Sequence('corpus_seq_id', optional=True), primary_key=True)
    UUID = Column(Unicode(36))
    name = Column(Unicode(255))
    description = Column(UnicodeText)
    content = Column(UnicodeText(length=2**31))
    enterer_id = Column(Integer, ForeignKey('user.id', ondelete='SET NULL'))
    enterer = relation('User', primaryjoin='Corpus.enterer_id==User.id')
    modifier_id = Column(Integer, ForeignKey('user.id', ondelete='SET NULL'))
    modifier = relation('User', primaryjoin='Corpus.modifier_id==User.id')
    form_search_id = Column(Integer, ForeignKey('formsearch.id', ondelete='SET NULL'))
    form_search = relation('FormSearch')
    datetime_entered = Column(DateTime)
    datetime_modified = Column(DateTime, default=now)
    tags = relation('Tag', secondary=CorpusTag.__table__)
    forms = relation('Form', secondary=CorpusForm.__table__, backref='corpora')

    # ``files`` attribute holds references to ``CorpusFile`` models, not ``File``
    # models.  This is a one-to-many relation, like form.translations.
    files = relation('CorpusFile', backref='corpus', cascade='all, delete, delete-orphan')

    def get_dict(self):
        """Return a Python dictionary representation of the Corpus.  This
        facilitates JSON-stringification, cf. utils.JSONOLDEncoder.  Relational
        data are truncated, e.g., corpus_dict['elicitor'] is a dict with keys
        for 'id', 'first_name' and 'last_name' (cf. get_mini_user_dict above) and
        lacks keys for other attributes such as 'username',
        'personal_page_content', etc.
        """

        return {
            'id': self.id,
            'UUID': self.UUID,
            'name': self.name,
            'description': self.description,
            'content': self.content,
            'enterer': self.get_mini_user_dict(self.enterer),
            'modifier': self.get_mini_user_dict(self.modifier),
            'form_search': self.get_mini_form_search_dict(self.form_search),
            'datetime_entered': self.datetime_entered,
            'datetime_modified': self.datetime_modified,
            'tags': self.get_tags_list(self.tags),
            'files': self.get_corpus_files_list(self.files)
        }

    def get_full_dict(self):
        result = self.get_dict()
        result['forms'] = self.get_forms_list(self.forms)
        return result

    makefilter = Keeper

    @classmethod
    def get_int(cls, input_):
        try:
            return int(input_)
        except Exception:
            return None

    @classmethod
    def get_form_references(cls, content):
        """Similar to ``get_ids_of_forms_referenced`` except that references are
        assumed to be comma-delimited strings of digits -- all other text is
        filtered out.
        """
        digits_comma_only = cls.makefilter('1234567890,')
        return filter(None, map(cls.get_int, digits_comma_only(content).split(',')))
Esempio n. 49
0
    Column('pospoints', SmallInteger),
    UniqueConstraint('eventid', 'carid', name='eridx_2'))
Index('eridx_1', t_eventresults.c.eventid)


class EventResult(object):
    def __init__(self, **kwargs):
        for k, v in kwargs.iteritems():
            if hasattr(self, k):
                setattr(self, k, v)


mapper(EventResult,
       t_eventresults,
       properties={
           'car': relation(Car, backref='results'),
           'class': relation(Class)
       })

## Announcer information precalculated
t_announcer = Table(
    'announcer',
    metadata,
    Column('id', Integer, primary_key=True),
    Column('eventid', Integer, ForeignKey('events.id')),
    Column('carid', Integer, ForeignKey('cars.id')),
    Column('classcode', String(16), ForeignKey('classlist.code')),
    Column('lastcourse', Integer),
    Column('rawdiff', Float),
    Column('netdiff', Float),
    Column(
Esempio n. 50
0
File: fit.py Progetto: njdaniel/Pyfa
boostedOntoRel = relationship(
    CommandFit,
    primaryjoin=commandFits_table.c.boosterID == fits_table.c.ID,
    backref='booster_fit',
    collection_class=attribute_mapped_collection('boostedID'),
    cascade='all, delete, delete-orphan')

mapper(
    es_Fit,
    fits_table,
    properties={
        "_Fit__modules":
        relation(
            Module,
            collection_class=HandledModuleList,
            primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID,
                             modules_table.c.projected == False),  # noqa
            order_by=modules_table.c.position,
            cascade='all, delete, delete-orphan'),
        "_Fit__projectedModules":
        relation(Module,
                 collection_class=HandledProjectedModList,
                 cascade='all, delete, delete-orphan',
                 single_parent=True,
                 primaryjoin=and_(modules_table.c.fitID == fits_table.c.ID,
                                  modules_table.c.projected == True)),  # noqa
        "owner":
        relation(User, backref="fits"),
        "itemID":
        fits_table.c.shipID,
        "shipID":
Esempio n. 51
0

def setup():
    if model.user_table.exists() and not user_extra_table.exists():
        user_extra_table.create()
        log.debug('User extra table created')


def _create_extra(key, value):
    return UserExtra(key=unicode(key), value=value)


meta.mapper(UserExtra,
            user_extra_table,
            properties={
                'user':
                orm.relation(
                    user.User,
                    backref=orm.backref('_extras',
                                        collection_class=orm.collections.
                                        attribute_mapped_collection(u'key'),
                                        cascade='all, delete, delete-orphan'))
            },
            order_by=[user_extra_table.c.user_id, user_extra_table.c.key])

_extras_active = vdm.sqlalchemy.stateful.DeferredProperty(
    '_extras', vdm.sqlalchemy.stateful.StatefulDict)
setattr(user.User, 'extras_active', _extras_active)
user.User.extras = vdm.sqlalchemy.stateful.OurAssociationProxy(
    'extras_active', 'value', creator=_create_extra)
Esempio n. 52
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eos.  If not, see <http://www.gnu.org/licenses/>.
# ===============================================================================

from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table
from sqlalchemy.orm import relation, mapper, synonym, deferred

from eos.db import gamedata_meta
from eos.types import Group, Icon, Category

groups_table = Table(
    "invgroups", gamedata_meta, Column("groupID", Integer, primary_key=True),
    Column("groupName", String), Column("description", String),
    Column("published", Boolean),
    Column("categoryID", Integer, ForeignKey("invcategories.categoryID")),
    Column("iconID", Integer, ForeignKey("icons.iconID")))

mapper(Group,
       groups_table,
       properties={
           "category": relation(Category, backref="groups"),
           "icon": relation(Icon),
           "ID": synonym("groupID"),
           "name": synonym("groupName"),
           "description": deferred(groups_table.c.description)
       })
Esempio n. 53
0
class TVMazeSeries(Base):
    __tablename__ = 'tvmaze_series'

    tvmaze_id = Column(Integer, primary_key=True)
    status = Column(Unicode)
    rating = Column(Float)
    genres = relation(TVMazeGenre, secondary=genres_table)
    weight = Column(Integer)
    updated = Column(DateTime)  # last time show was updated at tvmaze
    name = Column(Unicode)
    language = Column(Unicode)
    _schedule = Column('schedule', Unicode)
    schedule = json_synonym('_schedule')
    url = Column(String)
    original_image = Column(String)
    medium_image = Column(String)
    tvdb_id = Column(Integer)
    tvrage_id = Column(Integer)
    premiered = Column(DateTime)
    year = Column(Integer)
    summary = Column(Unicode)
    webchannel = Column(String)
    runtime = Column(Integer)
    show_type = Column(String)
    network = Column(Unicode)
    episodes = relation('TVMazeEpisodes',
                        order_by='TVMazeEpisodes.season_number',
                        cascade='all, delete, delete-orphan',
                        backref='series')
    last_update = Column(DateTime)  # last time we updated the db for the show

    def __init__(self, series, session):
        self.tvmaze_id = series['id']
        self.update(series, session)

    def to_dict(self):
        return {
            'tvmaze_id': self.tvmaze_id,
            'status': self.status,
            'rating': self.rating,
            'genres': [genre.name for genre in self.genres],
            'weight': self.weight,
            'updated': self.updated,
            'name': self.name,
            'language': self.language,
            'schedule': self.schedule,
            'url': self.url,
            'original_image': self.original_image,
            'medium_image': self.medium_image,
            'tvdb_id': self.tvdb_id,
            'tvrage_id': self.tvrage_id,
            'premiered': self.premiered,
            'year': self.year,
            'summary': self.summary,
            'webchannel': self.webchannel,
            'runtime': self.runtime,
            'show_type': self.show_type,
            'network': self.network,
            'last_update': self.last_update
        }

    def update(self, series, session):
        self.status = series['status']
        self.rating = series['rating']['average']
        self.weight = series['weight']
        self.updated = datetime.fromtimestamp(series['updated'])
        self.name = series['name']
        self.language = series['language']
        self.schedule = series['schedule']
        self.url = series['url']
        self.original_image = series.get('image').get(
            'original') if series.get('image') else None
        self.medium_image = series.get('image').get('medium') if series.get(
            'image') else None
        self.tvdb_id = series['externals'].get('thetvdb')
        self.tvrage_id = series['externals'].get('tvrage')
        self.premiered = parser.parse(
            series.get('premiered'),
            ignoretz=True) if series.get('premiered') else None
        self.year = int(
            series.get('premiered')[:4]) if series.get('premiered') else None
        self.summary = series['summary']
        self.webchannel = series.get('web_channel')['name'] if series.get(
            'web_channel') else None
        self.runtime = series['runtime']
        self.show_type = series['type']
        self.network = series.get('network')['name'] if series.get(
            'network') else None
        self.last_update = datetime.now()

        self.genres[:] = get_db_genres(series['genres'], session)

    def __repr__(self):
        return '<TVMazeSeries(title=%s,id=%s,last_update=%s)>' % (
            self.name, self.tvmaze_id, self.last_update)

    def __str__(self):
        return self.name

    @property
    def expired(self):
        if not self.last_update:
            log.debug('no last update attribute, series set for update')
            return True
        time_dif = datetime.now() - self.last_update
        expiration = time_dif.days > UPDATE_INTERVAL
        return expiration
Esempio n. 54
0
import sqlalchemy as rdb
from sqlalchemy.orm import mapper, relation, column_property, backref

import schema
import domain

# Users
# general representation of a person
mapper(domain.User,
       schema.users,
       properties={"user_addresses": relation(domain.UserAddress)})

# Groups

mapper(
    domain.Group,
    schema.groups,
    primary_key=[schema.groups.c.group_id],
    properties={
        "members":
        relation(domain.GroupMembership),
        "group_principal_id":
        column_property(
            #
            # !+ ATTENTION: the following sqlalchemy str concat (on c.type)
            # gives some VERY strange behaviour :
            #
            # print "group." + schema.groups.c.type + "."
            # >>> :type_1 || groups.type || :param_1
            #
            # print group.groups.type.
Esempio n. 55
0
File: group.py Progetto: arkka/ckan
    extension=[
        vdm.sqlalchemy.Revisioner(group_revision_table),
    ],
)

vdm.sqlalchemy.modify_base_object_mapper(Group, core.Revision, core.State)
GroupRevision = vdm.sqlalchemy.create_object_version(meta.mapper, Group,
                                                     group_revision_table)

meta.mapper(
    Member,
    member_table,
    properties={
        'group':
        orm.relation(Group,
                     backref=orm.backref('member_all',
                                         cascade='all, delete-orphan')),
    },
    extension=[
        vdm.sqlalchemy.Revisioner(member_revision_table),
    ],
)

vdm.sqlalchemy.modify_base_object_mapper(Member, core.Revision, core.State)
MemberRevision = vdm.sqlalchemy.create_object_version(meta.mapper, Member,
                                                      member_revision_table)

#TODO
MemberRevision.related_packages = lambda self: [self.continuity.package]

HIERARCHY_CTE = """
Esempio n. 56
0
        res_dict = ckan.lib.dictization.table_dictize(self,
                                                      context={'model': model})
        return activity.ActivityDetail(activity_id, self.id, u"Resource",
                                       activity_type,
                                       {'resource': res_dict})



## Mappers

meta.mapper(Resource, resource_table, properties={
    'package': orm.relation(
        Package,
        # all resources including deleted
        # formally package_resources_all
        backref=orm.backref('resources_all',
                            collection_class=ordering_list('position'),
                            cascade='all, delete',
                            order_by=resource_table.c.position,
                            ),
    )
},
order_by=[resource_table.c.package_id],
extension=[vdm.sqlalchemy.Revisioner(resource_revision_table),
           extension.PluginMapperExtension(),
           ],
)


## VDM

vdm.sqlalchemy.modify_base_object_mapper(Resource, core.Revision, core.State)
Esempio n. 57
0
    q = q.filter(AuthorizationGroup.users.contains(user))
    return q.count() == 1

def add_user_to_authorization_group(user, authorization_group, role):
    assert not user_in_authorization_group(user, authorization_group)
    from authz import add_user_to_role
    meta.Session.add(authorization_group)
    authorization_group.users.append(user)
    add_user_to_role(user, role, authorization_group)

def remove_user_from_authorization_group(user, authorization_group):
    assert user_in_authorization_group(user, authorization_group)
    from authz import remove_user_from_role, AuthorizationGroupRole
    meta.Session.add(authorization_group)
    authorization_group.users.remove(user)
    q = meta.Session.query(AuthorizationGroupRole)
    q = q.filter_by(authorization_group=authorization_group,
                    user=user)
    for agr in q:
        remove_user_from_role(user, agr.role, authorization_group)



meta.mapper(AuthorizationGroup, authorization_group_table, properties={
       'users': orm.relation(user.User, lazy=True, secondary=authorization_group_user_table,
                         backref=orm.backref('authorization_groups', lazy=True))
       },
       order_by=authorization_group_table.c.name)

meta.mapper(AuthorizationGroupUser, authorization_group_user_table)
Esempio n. 58
0
class Fqdn(Base):
    __tablename__ = _TN
    _instance_label = 'fqdn'

    id = Column(Integer, Sequence('%s_id_seq' % _TN), primary_key=True)

    name = Column(AqStr(63), nullable=False)

    dns_domain_id = Column(Integer,
                           ForeignKey('dns_domain.id',
                                      name='%s_dns_domain_fk' % _TN),
                           nullable=False)

    dns_environment_id = Column(Integer,
                                ForeignKey('dns_environment.id',
                                           name='%s_dns_env_fk' % _TN),
                                nullable=False)

    creation_date = deferred(
        Column(DateTime, default=datetime.now, nullable=False))

    dns_domain = relation(DnsDomain, innerjoin=True)

    dns_environment = relation(DnsEnvironment, innerjoin=True)

    __table_args__ = (UniqueConstraint(dns_domain_id,
                                       name,
                                       dns_environment_id,
                                       name='%s_domain_name_env_uk' % _TN),
                      Index('%s_dns_env_idx' % _TN, dns_environment_id))

    @property
    def fqdn(self):
        return self.name + '.' + self.dns_domain.name

    @classmethod
    def get_unique(cls,
                   session,
                   fqdn=None,
                   dns_environment=None,
                   name=None,
                   dns_domain=None,
                   **kwargs):
        if fqdn:
            if name or dns_domain:  # pragma: no cover
                raise TypeError("fqdn and name/dns_domain should not be mixed")
            (name, dns_domain) = parse_fqdn(session, fqdn)

        if not isinstance(dns_environment, DnsEnvironment):
            dns_environment = DnsEnvironment.get_unique_or_default(
                session, dns_environment)
        return super(Fqdn, cls).get_unique(session,
                                           name=name,
                                           dns_domain=dns_domain,
                                           dns_environment=dns_environment,
                                           **kwargs)

    @classmethod
    def get_or_create(cls,
                      session,
                      dns_environment=None,
                      preclude=False,
                      ignore_name_check=False,
                      query_options=None,
                      **kwargs):
        fqdn = cls.get_unique(session,
                              dns_environment=dns_environment,
                              query_options=query_options,
                              **kwargs)
        if fqdn:
            if preclude:
                _raise_custom(preclude, ArgumentError,
                              "{0} already exists.".format(fqdn))
            return fqdn

        if not isinstance(dns_environment, DnsEnvironment):
            dns_environment = DnsEnvironment.get_unique_or_default(
                session, dns_environment)

        fqdn = cls(session=session,
                   dns_environment=dns_environment,
                   ignore_name_check=ignore_name_check,
                   **kwargs)
        session.add(fqdn)
        return fqdn

    @classmethod
    def check_name(cls, name, dns_domain, ignore_name_check=False):
        """ Validate the name parameter """

        if not isinstance(name, basestring):  # pragma: no cover
            raise TypeError("%s: name must be a string." % cls.name)
        if not isinstance(dns_domain, DnsDomain):  # pragma: no cover
            raise TypeError("%s: dns_domain must be a DnsDomain." % cls.name)

        # Allow SRV records to opt out from this test
        if not ignore_name_check:
            DnsDomain.check_label(name)

        # The limit for DNS name length is 255, assuming wire format. This
        # translates to 253 for simple ASCII text; see:
        # http://www.ops.ietf.org/lists/namedroppers/namedroppers.2003/msg00964.html
        if len(name) + 1 + len(dns_domain.name) > 253:
            raise ArgumentError('The fully qualified domain name is too long.')

    def _check_session(self, session):
        if not session or not isinstance(session, Session):  # pragma: no cover
            raise InternalError("%s needs a session." %
                                self._get_class_label())

    def __init__(self,
                 session=None,
                 name=None,
                 dns_domain=None,
                 fqdn=None,
                 dns_environment=None,
                 ignore_name_check=False,
                 **kwargs):
        if fqdn:
            if name or dns_domain:  # pragma: no cover
                raise TypeError("fqdn and name/dns_domain should not be mixed")
            self._check_session(session)
            (name, dns_domain) = parse_fqdn(session, fqdn)

        self.check_name(name, dns_domain, ignore_name_check)

        if not isinstance(dns_environment, DnsEnvironment):
            self._check_session(session)
            dns_environment = DnsEnvironment.get_unique_or_default(
                session, dns_environment)

        super(Fqdn, self).__init__(name=name,
                                   dns_domain=dns_domain,
                                   dns_environment=dns_environment,
                                   **kwargs)
    @classmethod
    def name_exists(cls, name):
        '''Returns true if there is a vocabulary with the same name (case insensitive)'''
        query = meta.Session.query(cls)
        return query.filter(func.lower(cls.name) == func.lower(name)).first() is not None


class VocabularyServiceTerm(DomainObject):
    """A VocabularyServiceTerm object represents a term from an external vocabulary
    used for populating and controlling a metadata schema field"""

    def __init__(self, vocabulary_service_id=None, label=None, uri=None):
        self.vocabulary_service_id = vocabulary_service_id
        self.label = label
        self.uri = uri

    @classmethod
    def get(cls, reference):
        '''Returns a VocabularyServiceTerm object referenced by its id.'''
        query = meta.Session.query(cls).filter(cls.id == reference)
        vocabulary_service_term = query.first()

        return vocabulary_service_term


meta.mapper(VocabularyService, vocabulary_service_table, properties={
    'terms': relation(lambda: VocabularyServiceTerm, order_by=lambda: VocabularyServiceTerm.label)
})
meta.mapper(VocabularyServiceTerm, vocabulary_service_term_table)
Esempio n. 60
0
        return cls.extra_columns

        ## Mappers


meta.mapper(
    Resource,
    resource_table,
    properties={
        'resource_group':
        orm.relation(
            ResourceGroup,
            # all resources including deleted
            # formally package_resources_all
            backref=orm.backref(
                'resources_all',
                collection_class=ordering_list('position'),
                cascade='all, delete',
                order_by=resource_table.c.position,
            ),
        )
    },
    order_by=[resource_table.c.resource_group_id],
    extension=[
        vdm.sqlalchemy.Revisioner(resource_revision_table),
        extension.PluginMapperExtension(),
    ],
)

meta.mapper(
    ResourceGroup,