Exemplo n.º 1
0
from app import db
from sqlalchemy import PrimaryKeyConstraint

# Table to manage many to many relationship between device and sims
devices_sims = db.Table('devices_sims',
                        db.Column('device_id', db.String(50), db.ForeignKey('devices.device_id')),
                        db.Column('sim_id', db.String(50), db.ForeignKey('sims.serial_number')),
                        PrimaryKeyConstraint('device_id', 'sim_id', name='device_sim_id'),
                        )
class OFXTransaction(Base, ModelAsDict):

    __tablename__ = 'ofx_trans'
    __table_args__ = (PrimaryKeyConstraint('account_id', 'fitid'), {
        'mysql_engine': 'InnoDB'
    })

    #: Account ID this transaction is associated with
    account_id = Column(Integer, ForeignKey('accounts.id'), nullable=False)

    #: Account this transaction is associated with
    account = relationship('Account', uselist=False)

    #: OFXStatement ID this transaction was last seen in
    statement_id = Column(Integer,
                          ForeignKey('ofx_statements.id'),
                          nullable=False)

    #: OFXStatement this transaction was last seen in
    statement = relationship("OFXStatement", backref="ofx_trans")

    #: OFX - FITID
    fitid = Column(String(255))

    #: OFX - Transaction Type
    trans_type = Column(String(50))

    #: OFX - Date Posted
    date_posted = Column(UtcDateTime)

    #: OFX - Amount
    amount = Column(Numeric(precision=10, scale=4))  # Amount format from OFX

    #: OFX - Name
    name = Column(String(255), index=True)

    #: OFX - Memo
    memo = Column(String(255), index=True)

    #: OFX - SIC
    sic = Column(String(255))

    #: OFX - MCC
    mcc = Column(String(255))

    #: OFX - Checknum
    checknum = Column(String(32))

    # app-specific fields

    #: Description
    description = Column(String(254), index=True)

    #: Notes
    notes = Column(Text)

    #: Account's ``re_payment`` matched
    is_payment = Column(Boolean, default=False)

    #: Account's ``re_late_fee`` matched
    is_late_fee = Column(Boolean, default=False)

    #: Account's ``re_interest_charge`` matched
    is_interest_charge = Column(Boolean, default=False)

    #: Account's ``re_fee`` matched
    is_other_fee = Column(Boolean, default=False)

    #: Account's ``re_interest_paid`` matched
    is_interest_payment = Column(Boolean, default=False)

    #: The reconcile_id for the OFX Transaction
    reconcile_id = Column(Integer, ForeignKey('txn_reconciles.id'))

    def __repr__(self):
        return "<OFXTransaction(account_id='%s', fitid='%s')>" % (
            self.account_id, self.fitid)

    @staticmethod
    def params_from_ofxparser_transaction(t, acct_id, stmt, cat_memo=False):
        """
        Given an ofxparser.ofxparser.Transaction object, generate and return
        a dict of kwargs to create a new OFXTransaction.

        :param t: ofxparser transaction
        :type t: ``ofxparser.ofxparser.Transaction``
        :param acct_id: OFXAccount ID
        :type acct_id: int
        :param stmt: OFXStatement this transaction was on
        :type stmt: biweeklybudget.models.ofx_statement.OFXStatement
        :param cat_memo: whether or not to concatenate OFX Memo to Name
        :type cat_memo: bool
        :return: dict of kwargs to create an OFXTransaction
        :rtype: dict
        """
        if t.id is None:
            raise RuntimeError('Transaction has no ID: %s', vars(t))
        kwargs = {
            'account_id': acct_id,
            'statement': stmt,
            'memo': t.memo,
            'name': t.payee,
            'amount': t.amount,
            'trans_type': t.type,
            # Note that as of 0.16, OfxParser returns tz-naive UTC datetimes
            'date_posted': t.date.replace(tzinfo=UTC),
            'fitid': t.id,
            'sic': t.sic,
            'mcc': t.mcc
        }
        if cat_memo:
            del kwargs['memo']
            kwargs['name'] = t.payee + t.memo
        for x in ['mcc', 'sic', 'checknum']:
            if not hasattr(t, x):
                continue
            val = getattr(t, x)
            if val is not None and val != '':
                kwargs[x] = val
        return kwargs

    @property
    def account_amount(self):
        """
        Return the amount of the transaction, appropriately negated if the
        :py:class:`~.Account` for this transaction has
        :py:attr:`~.Account.negate_ofx_amounts` True.

        :return: amount, negated as appropriate
        :rtype: decimal.Decimal
        """
        if not self.account.negate_ofx_amounts:
            return self.amount
        return self.amount * Decimal(-1.0)

    @staticmethod
    def unreconciled(db):
        """
        Return a query to match all unreconciled OFXTransactions.

        :param db: active database session to use for queries
        :type db: sqlalchemy.orm.session.Session
        :return: query to match all unreconciled OFXTransactions
        :rtype: sqlalchemy.orm.query.Query
        """
        cutoff_date = datetime(RECONCILE_BEGIN_DATE.year,
                               RECONCILE_BEGIN_DATE.month,
                               RECONCILE_BEGIN_DATE.day,
                               0,
                               0,
                               0,
                               tzinfo=UTC)
        return db.query(OFXTransaction).filter(
            OFXTransaction.reconcile.__eq__(null()),
            OFXTransaction.date_posted.__ge__(cutoff_date),
            OFXTransaction.account.has(reconcile_trans=True))

    @property
    def first_statement_by_date(self):
        """
        Return the first OFXStatement on or after `self.date_posted`.

        :return: first OFXStatement on or after `self.date_posted`
        :rtype: biweeklybudget.models.ofx_statement.OFXStatement
        """
        sess = inspect(self).session
        res = sess.query(OFXStatement).filter(
            OFXStatement.account.__eq__(self.account),
            OFXStatement.as_of.__ge__(self.date_posted)).order_by(
                OFXStatement.as_of.asc()).first()
        logger.debug('First statement for %s: %s', self, res)
        return res
Exemplo n.º 3
0
Index("created_at_idx", twitterTweets.c.created_at)
Index("stored_at_idx", twitterTweets.c.stored_at)

# Twitter metric data samples from twitter_direct_agent
# NOTE: some messages may match multiple metrics
twitterTweetSamples = Table(
    "twitter_tweet_samples",
    metadata,

    # Sequence number; we save the sequence number of the most recently dispatched
    # non-metric data item in the "emitted_non_metric_tracker" table
    Column("seq",
           mysql.BIGINT(unsigned=True),
           autoincrement=True,
           nullable=False),
    PrimaryKeyConstraint("seq", name="twitter_tweet_samples_pk"),

    # Metric name
    Column("metric", mysql.VARCHAR(length=METRIC_NAME_MAX_LEN),
           nullable=False),

    # UID of message in twitter_tweets table
    Column("msg_uid",
           mysql.VARCHAR(length=_MAX_TWEET_MSG_ID_LEN),
           ForeignKey(twitterTweets.c.uid,
                      name="twitter_tweet_samples_to_twitter_fk",
                      onupdate="CASCADE",
                      ondelete="CASCADE"),
           nullable=False,
           server_default=""),
Exemplo n.º 4
0
class Product(BASE):
    __tablename__ = 'product'

    product_id = Column('product_id',
                        SMALLINT(unsigned=True),
                        nullable=False,
                        primary_key=True,
                        autoincrement=True)
    name = Column('name', String(45), nullable=False)
    description = Column('description', String(45), nullable=False)
    supplier_id = Column('supplier_id',
                         SMALLINT(unsigned=True),
                         nullable=False)
    category_id = Column('category_id',
                         SMALLINT(unsigned=True),
                         nullable=False)
    cost = Column('cost', Float, nullable=False)
    reorder_level = Column('reorder_level',
                           SMALLINT(unsigned=True),
                           nullable=False)
    weight_unit_of_measure = Column('weight_unit_of_measure',
                                    String(3),
                                    nullable=False)
    weight = Column('weight', Float, nullable=False)
    last_update = Column('last_update', TIMESTAMP, nullable=False)

    # How to handle associated films
    #
    # Since the secondary table is also explicitly mapped elsewhere as Film_Actor
    # the relationship should have the viewonly flag so that we can save actors
    # independently of films

    # Commented out so flask will work for now. These would need to be created in the future for the real project
    # ratings = relationship('customer', secondary='customer_rating', viewonly=True)
    # orders = relationship('customer', secondary='customer_order', viewonly=True)
    category = relationship('Category', backref=backref('product'))

    __table_args__ = (
        PrimaryKeyConstraint('product_id', name='PRIMARY'),
        ForeignKeyConstraint(['supplier_id'], ['supplier.supplier_id']),
        ForeignKeyConstraint(['category_id'], ['category.category_id']),
    )

    # The constructor with only the fields we care about from Product table
    def __init__(self, name, description, supplier_id, category_id, cost,
                 reorder_level, weight_unit_of_measure, weight):
        self.name = name
        self.description = description
        self.supplier_id = supplier_id
        self.category_id = category_id
        self.cost = cost
        self.reorder_level = reorder_level
        self.weight_unit_of_measure = weight_unit_of_measure
        self.weight = weight
        self.last_update = datetime.today()

    # A __repr__ method defines how the object should be represented (like toString() in Java)
    # It typically is the constructor call required to re-create the instance.
    def __repr__(self):
        return "\nProduct(product_id = {self.product_id}, " \
               "\n\tdescription = {self.description}," \
               "\n\tdescription = {self.description}," \
               "\n\tsupplier_id = {self.supplier_id}," \
               "\n\tcategory_id = {self.category_id}," \
               "\n\tcost = {self.cost}," \
               "\n\treorder_level = {self.reorder_level}," \
               "\n\tweight_unit_of_measure = {self.weight_unit_of_measure}," \
               "\n\tweight = {self.weight}," \
               "\n\tlast_update = {self.last_update})".format(self=self)
Exemplo n.º 5
0
from sqlalchemy.orm import relationship
from sqlalchemy_utils import TSVectorType

from dispatch.database.core import Base
from dispatch.incident_priority.models import IncidentPriorityCreate, IncidentPriorityRead
from dispatch.incident_type.models import IncidentTypeCreate, IncidentTypeRead
from dispatch.term.models import TermCreate
from dispatch.models import ContactBase, ContactMixin, DispatchBase, TermReadNested

assoc_team_contact_incident_priorities = Table(
    "team_contact_incident_priority",
    Base.metadata,
    Column("incident_priority_id", Integer,
           ForeignKey("incident_priority.id")),
    Column("team_contact_id", Integer, ForeignKey("team_contact.id")),
    PrimaryKeyConstraint("incident_priority_id", "team_contact_id"),
)

assoc_team_contact_incident_types = Table(
    "team_contact_incident_type",
    Base.metadata,
    Column("incident_type_id", Integer, ForeignKey("incident_type.id")),
    Column("team_contact_id", Integer, ForeignKey("team_contact.id")),
    PrimaryKeyConstraint("incident_type_id", "team_contact_id"),
)

assoc_team_contact_incidents = Table(
    "team_contact_incident",
    Base.metadata,
    Column("incident_id", Integer, ForeignKey("incident.id")),
    Column("team_contact_id", Integer, ForeignKey("team_contact.id")),
Exemplo n.º 6
0
from sqlalchemy.sql.elements import BooleanClauseList, ClauseElement

from cloudbot import hook
from cloudbot.client import Client
from cloudbot.clients.irc import IrcClient
from cloudbot.util import database
from cloudbot.util.irc import parse_mode_string
from plugins.core.server_info import get_channel_modes, get_server_info

table = Table(
    "channel_keys",
    database.metadata,
    Column("conn", String),
    Column("chan", String),
    Column("key", String),
    PrimaryKeyConstraint("conn", "chan"),
)


@hook.connect(clients=["irc"])
def load_keys(conn: IrcClient, db) -> None:
    """
    Load channel keys to the client
    """
    query = select([table.c.chan, table.c.key],
                   table.c.conn == conn.name.lower())
    conn.clear_channel_keys()
    for row in db.execute(query):
        conn.set_channel_key(row["chan"], row["key"])

Exemplo n.º 7
0
class AccountUpdateOperation(Base):
    """

    Steem Blockchain Example
    ======================
    {
      "json_metadata": "",
      "account": "theoretical",
      "memo_key": "STM6FATHLohxTN8RWWkU9ZZwVywXo6MEDjHHui1jEBYkG2tTdvMYo",
      "posting": {
        "key_auths": [
          [
            "STM6FATHLohxTN8RWWkU9ZZwVywXo6MEDjHHui1jEBYkG2tTdvMYo",
            1
          ],
          [
            "STM76EQNV2RTA6yF9TnBvGSV71mW7eW36MM7XQp24JxdoArTfKA76",
            1
          ]
        ],
        "account_auths": [],
        "weight_threshold": 1
      }
    }

    """

    __tablename__ = 'sbds_op_account_updates'
    __table_args__ = (
        PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'),
        ForeignKeyConstraint(['account'], ['sbds_meta_accounts.name'],
                             deferrable=True,
                             initially='DEFERRED',
                             use_alter=True),
    )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    account = Column(String(16))  # steem_type:account_name_type
    owner = Column(JSONB)  # steem_type:optional< authority>
    active = Column(JSONB)  # steem_type:optional< authority>
    posting = Column(JSONB)  # steem_type:optional< authority>
    memo_key = Column(String(60), nullable=False)  # steem_type:public_key_type
    json_metadata = Column(JSONB)  # name:json_metadata
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='account_update')

    _fields = dict(
        owner=lambda x: json_string_field(x.get('owner')
                                          ),  # steem_type:optional< authority>
        active=lambda x: json_string_field(x.get('active')),  # name:active
        posting=lambda x: json_string_field(x.get('posting')),  # name:posting
        json_metadata=lambda x: json_string_field(x.get('json_metadata')
                                                  ),  # name:json_metadata
    )

    _account_fields = frozenset([
        'account',
    ])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = sbds.sbds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return sbds.sbds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Exemplo n.º 8
0
	def __init__(self, engine, db_prefix='', install=False, connection=None):
		self.engine = engine
		self.engine_session = sessionmaker(bind=self.engine)()

		self.db_prefix = db_prefix

		self.name_length = 16
		self.display_length = 32
		self.category_length = 16

		metadata = MetaData()

		default_bytes = 0b0 * 16

		if 'mysql' == self.engine_session.bind.dialect.name:
			Binary = mysql_binary
		else:
			Binary = sqla_binary

		# stickers tables
		self.stickers = Table(
			self.db_prefix + 'stickers',
			metadata,
			Column('id', Binary(16), default=default_bytes),
			Column('creation_time', Integer, default=0),
			Column('name', String(self.name_length)),
			Column('display', String(self.display_length)),
			Column('category', String(self.category_length)),
			Column('category_order', Integer, default=0),
			Column('group_bits', Integer, default=0),
			PrimaryKeyConstraint('id'),
		)

		# collected stickers tables
		self.collected_stickers = Table(
			self.db_prefix + 'collected_stickers',
			metadata,
			Column('id', Binary(16), default=default_bytes),
			Column('receive_time', Integer, default=0),
			Column('user_id', Binary(16), default=default_bytes),
			Column('sticker_id', Binary(16), default=default_bytes),
			PrimaryKeyConstraint('id'),
		)

		# placed stickers tables
		self.sticker_placements = Table(
			self.db_prefix + 'sticker_placements',
			metadata,
			Column('id', Binary(16), default=default_bytes),
			Column('placement_time', Integer, default=0),
			Column('subject_id', Binary(16), default=default_bytes),
			Column('user_id', Binary(16), default=default_bytes),
			Column('sticker_id', Binary(16), default=default_bytes),
			Column('position_x', Float, default=0),
			Column('position_y', Float, default=0),
			Column('rotation', Float, default=0),
			Column('scale', Float, default=0),
			PrimaryKeyConstraint('id'),
		)

		if connection:
			self.connection = connection
		else:
			self.connection = self.engine.connect()

		if install:
			for table in [
					self.stickers,
					self.collected_stickers,
					self.sticker_placements,
				]:
				table.create(bind=self.engine, checkfirst=True)
Exemplo n.º 9
0
from sqlalchemy.orm import backref, relationship
from sqlalchemy_utils import TSVectorType

from dispatch.database import Base
from dispatch.incident_priority.models import IncidentPriorityCreate, IncidentPriorityRead
from dispatch.incident_type.models import IncidentTypeCreate, IncidentTypeRead
from dispatch.messaging import INCIDENT_DOCUMENT_DESCRIPTIONS
from dispatch.models import DispatchBase, ResourceMixin, TermNested, TermReadNested, TimeStampMixin

# Association tables for many to many relationships
assoc_document_incident_priorities = Table(
    "document_incident_priority",
    Base.metadata,
    Column("incident_priority_id", Integer, ForeignKey("incident_priority.id")),
    Column("document_id", Integer, ForeignKey("document.id")),
    PrimaryKeyConstraint("incident_priority_id", "document_id"),
)

assoc_document_incident_types = Table(
    "document_incident_type",
    Base.metadata,
    Column("incident_type_id", Integer, ForeignKey("incident_type.id")),
    Column("document_id", Integer, ForeignKey("document.id")),
    PrimaryKeyConstraint("incident_type_id", "document_id"),
)

assoc_document_incidents = Table(
    "document_incident",
    Base.metadata,
    Column("incident_id", Integer, ForeignKey("incident.id")),
    Column("document_id", Integer, ForeignKey("document.id")),
Exemplo n.º 10
0
 def test_deferrable_pk(self):
     factory = lambda **kw: PrimaryKeyConstraint('a', **kw)
     self._test_deferrable(factory)
Exemplo n.º 11
0
class SqlRun(Base):
    """
    DB model for :py:class:`mlflow.entities.Run`. These are recorded in ``runs`` table.
    """
    __tablename__ = 'runs'

    run_uuid = Column(String(32), nullable=False)
    """
    Run UUID: `String` (limit 32 characters). *Primary Key* for ``runs`` table.
    """
    name = Column(String(250))
    """
    Run name: `String` (limit 250 characters).
    """
    source_type = Column(String(20), default="LOCAL")
    """
    Source Type: `String` (limit 20 characters). Can be one of ``NOTEBOOK``, ``JOB``, ``PROJECT``,
                 ``LOCAL`` (default), or ``UNKNOWN``.
    """
    source_name = Column(String(500))
    """
    Name of source recording the run: `String` (limit 500 characters).
    """
    entry_point_name = Column(String(50))
    """
    Entry-point name that launched the run run: `String` (limit 50 characters).
    """
    user_id = Column(String(256), nullable=True, default=None)
    """
    User ID: `String` (limit 256 characters). Defaults to ``null``.
    """
    status = Column(String(20), default="SCHEDULED")
    """
    Run Status: `String` (limit 20 characters). Can be one of ``RUNNING``, ``SCHEDULED`` (default),
                ``FINISHED``, ``FAILED``.
    """
    start_time = Column(BigInteger, default=int(time.time()))
    """
    Run start time: `BigInteger`. Defaults to current system time.
    """
    end_time = Column(BigInteger, nullable=True, default=None)
    """
    Run end time: `BigInteger`.
    """
    source_version = Column(String(50))
    """
    Source version: `String` (limit 50 characters).
    """
    lifecycle_stage = Column(String(20), default="active")
    """
    Lifecycle Stage of run: `String` (limit 32 characters).
                            Can be either ``active`` (default) or ``deleted``.
    """
    artifact_uri = Column(String(200), default=None)
    """
    Default artifact location for this run: `String` (limit 200 characters).
    """
    experiment_id = Column(Integer, ForeignKey('experiments.experiment_id'))
    """
    Experiment ID to which this run belongs to: *Foreign Key* into ``experiment`` table.
    """
    experiment = relationship('SqlExperiment', backref=backref('runs', cascade='all'))
    """
    SQLAlchemy relationship (many:one) with :py:class:`mlflow.store.dbmodels.models.SqlExperiment`.
    """

    __table_args__ = (
        CheckConstraint(source_type.in_(SourceTypes), name='source_type'),
        CheckConstraint(status.in_(RunStatusTypes), name='status'),
        CheckConstraint(lifecycle_stage.in_(["active", "deleted"]),
                        name='lifecycle_stage'),
        PrimaryKeyConstraint('run_uuid', name='run_pk')
    )
Exemplo n.º 12
0
class WitnessUpdateOperation(Base):
    """

    dPay Blockchain Example
    ======================
    {
      "fee": "0.000 BEX",
      "url": "jared/dpayd-docker",
      "props": {
        "bbd_interest_rate": 1000,
        "account_creation_fee": "100.000 BEX",
        "maximum_block_size": 131072
      },
      "owner": "jared",
      "block_signing_key": "DWB8LoQjQqJHvotqBo7HjnqmUbFW9oJ2theyqonzUd9DdJ7YYHsvD"
    }

    """

    __tablename__ = 'dpds_op_witness_updates'
    __table_args__ = (
        PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'),
        ForeignKeyConstraint(['owner'], ['dpds_meta_accounts.name'],
                             deferrable=True,
                             initially='DEFERRED',
                             use_alter=True),
    )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    owner = Column(String(16))  # dpay_type:account_name_type
    url = Column(UnicodeText)  # dpay_type:string -> default
    block_signing_key = Column(String(60),
                               nullable=False)  # dpay_type:public_key_type
    props = Column(JSONB)  # dpay_type:chain_properties
    fee = Column(Numeric(20, 6), nullable=False)  # dpay_type:asset
    fee_symbol = Column(String(5))  # dpay_type:asset
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='witness_update')

    _fields = dict(
        props=lambda x: json_string_field(x.get('props')
                                          ),  # dpay_type:chain_properties
        fee=lambda x: amount_field(x.get('fee'), num_func=float
                                   ),  # dpay_type:asset
        fee_symbol=lambda x: amount_symbol_field(x.get('fee')
                                                 ),  # dpay_type:asset
    )

    _account_fields = frozenset([
        'owner',
    ])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = dpds.dpds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return dpds.dpds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Exemplo n.º 13
0
class CustomJsonOperation(Base):
    """

    dPay Blockchain Example
    ======================
    {
      "required_auths": [],
      "id": "follow",
      "json": "{\"follower\":\"jared\",\"following\":\"stan\",\"what\":[\"posts\"]}",
      "required_posting_auths": [
        "jared"
      ]
    }

    """

    __tablename__ = 'dpds_op_custom_jsons'
    __table_args__ = (PrimaryKeyConstraint('block_num', 'transaction_num',
                                           'operation_num'), )

    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40), nullable=False)
    timestamp = Column(DateTime(timezone=False))
    required_auths = Column(JSONB)  # dpay_type:flat_set< account_name_type>
    required_posting_auths = Column(
        JSONB)  # dpay_type:flat_set< account_name_type>
    id = Column(UnicodeText)  # dpay_type:string -> default
    json = Column(JSONB)  # name:json
    operation_type = Column(operation_types_enum,
                            nullable=False,
                            index=True,
                            default='custom_json')

    _fields = dict(
        required_auths=lambda x: json_string_field(x.get(
            'required_auths')),  # dpay_type:flat_set< account_name_type>
        required_posting_auths=lambda x: json_string_field(
            x.get('required_posting_auths')
        ),  # dpay_type:flat_set< account_name_type>
        json=lambda x: json_string_field(x.get('json')),  # name:json
    )

    _account_fields = frozenset([])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = dpds.dpds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return dpds.dpds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Exemplo n.º 14
0
    DispatchBase,
    ResourceBase,
    ProjectMixin,
    ResourceMixin,
)

from dispatch.search_filter.models import SearchFilterRead
from dispatch.project.models import ProjectRead

# Association tables for many to many relationships
assoc_document_filters = Table(
    "assoc_document_filters",
    Base.metadata,
    Column("document_id", Integer, ForeignKey("document.id", ondelete="CASCADE")),
    Column("search_filter_id", Integer, ForeignKey("search_filter.id", ondelete="CASCADE")),
    PrimaryKeyConstraint("document_id", "search_filter_id"),
)


class Document(ProjectMixin, ResourceMixin, Base):
    id = Column(Integer, primary_key=True)
    name = Column(String)
    description = Column(String)
    report_id = Column(Integer, ForeignKey("report.id", ondelete="CASCADE"))
    incident_id = Column(Integer, ForeignKey("incident.id", ondelete="CASCADE", use_alter=True))

    filters = relationship("SearchFilter", secondary=assoc_document_filters, backref="documents")

    evergreen = Column(Boolean)
    evergreen_owner = Column(String)
    evergreen_reminder_interval = Column(Integer, default=90)  # number of days
Exemplo n.º 15
0
 def __table_args__(self):
     return PrimaryKeyConstraint(self.id),
Exemplo n.º 16
0
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import Table
from sqlalchemy import ForeignKey
from sqlalchemy import PrimaryKeyConstraint

from src.database_models.base import Base

responsibility = Table(
    'responsibilities', Base.metadata,
    Column("profile_id",
           Integer,
           ForeignKey('profiles.id', ondelete='CASCADE'),
           nullable=False),
    Column("group_id",
           Integer,
           ForeignKey('groups.id', ondelete='CASCADE'),
           nullable=False),
    PrimaryKeyConstraint('profile_id', 'group_id', name='responsibilities_pk'))
Exemplo n.º 17
0
from time import time
from collections import defaultdict
from sqlalchemy import Table, Column, String, Integer, PrimaryKeyConstraint, desc
from sqlalchemy.sql import select
from cloudbot import hook
from cloudbot.event import EventType
from cloudbot.util import database

duck_tail = "・゜゜・。。・゜゜"
duck = ["\_o< ", "\_O< ", "\_0< ", "\_\u00f6< ", "\_\u00f8< ", "\_\u00f3< "]
duck_noise = ["QUACK!", "FLAP FLAP!", "quack!"]

table = Table('duck_hunt', database.metadata, Column('network', String),
              Column('name', String), Column('shot', Integer),
              Column('befriend', Integer), Column('chan', String),
              PrimaryKeyConstraint('name', 'chan', 'network'))

optout = Table('nohunt', database.metadata, Column('network', String),
               Column('chan', String), PrimaryKeyConstraint('chan', 'network'))
"""
game_status structure 
{ 
    'network':{
        '#chan1':{
            'duck_status':0|1|2, 
            'next_duck_time':'integer', 
            'game_on':0|1,
            'no_duck_kick': 0|1,
            'duck_time': 'float', 
            'shoot_time': 'float',
            'messages': integer,
Exemplo n.º 18
0
class Monitoring(MonitoringDb.Base, idbutils.DbObject):
    """A table containing monitoring data."""

    __tablename__ = 'monitoring'

    db = MonitoringDb
    table_version = 2

    timestamp = Column(DateTime, nullable=False)
    activity_type = Column(Enum(fitfile.field_enums.ActivityType))
    intensity = Column(Integer)
    duration = Column(Time, nullable=False, default=datetime.time.min)
    distance = Column(Float)
    cum_active_time = Column(Time, nullable=False, default=datetime.time.min)
    active_calories = Column(Integer)
    steps = Column(Integer)
    strokes = Column(Integer)
    cycles = Column(Float)

    __table_args__ = (PrimaryKeyConstraint("timestamp", "activity_type"), )

    @classmethod
    def s_get_from_dict(cls, session, values_dict):
        """Return a single DeviceInfo instance for the given id."""
        return session.query(cls).filter(
            cls.timestamp == values_dict['timestamp']).filter(
                cls.activity_type ==
                values_dict['activity_type']).one_or_none()

    @classmethod
    def get_active_calories(cls, session, activity_type, start_ts, end_ts):
        """Return the total calories burned during activity during the indicated period."""
        active_calories = cls.s_get_col_avg_of_max_per_day_for_value(
            session, cls.active_calories, cls.activity_type, activity_type,
            start_ts, end_ts)
        return active_calories if active_calories is not None else 0

    @classmethod
    def get_stats(cls, session, func, start_ts, end_ts):
        """Return a dict of stats for table entries within the time span."""
        return {
            'steps':
            func(session, cls.steps, start_ts, end_ts),
            'calories_active_avg': (cls.get_active_calories(
                session, fitfile.field_enums.ActivityType.running, start_ts,
                end_ts) + cls.get_active_calories(
                    session, fitfile.field_enums.ActivityType.cycling,
                    start_ts, end_ts) + cls.get_active_calories(
                        session, fitfile.field_enums.ActivityType.walking,
                        start_ts, end_ts))
        }

    @classmethod
    def get_daily_stats(cls, session, day_ts):
        """Return a dict of stats for table entries for the given day."""
        stats = cls.get_stats(session, cls.s_get_col_max, day_ts,
                              day_ts + datetime.timedelta(1))
        stats['day'] = day_ts
        return stats

    @classmethod
    def get_weekly_stats(cls, session, first_day_ts):
        """Return a dict of stats for table entries for the given week."""
        stats = cls.get_stats(session, cls.s_get_col_sum_of_max_per_day,
                              first_day_ts,
                              first_day_ts + datetime.timedelta(7))
        stats['first_day'] = first_day_ts
        return stats

    @classmethod
    def get_monthly_stats(cls, session, first_day_ts, last_day_ts):
        """Return a dict of stats for table entries for the given week."""
        stats = cls.get_stats(session, cls.s_get_col_sum_of_max_per_day,
                              first_day_ts, last_day_ts)
        stats['first_day'] = first_day_ts
        return stats
Exemplo n.º 19
0
def make_dataset_table(table_name: str, create: bool = False) -> Table:
    """Create customized dataset table using a table name.
    TODO: Create an example
    Args:
        table_name - Table name
        create - Flag to create if not exists
    Returns
        dataset_table definition
    """
    s_name = f"{Config.SAMPLEDB_SCHEMA}.dataset_{table_name}_id_seq"

    if create:
        if not sqlalchemy.inspect(
                db.engine).has_table(table_name=f'dataset_{table_name}',
                                     schema=Config.SAMPLEDB_SCHEMA):
            db.engine.execute(
                f"CREATE TABLE {Config.SAMPLEDB_SCHEMA}.dataset_{table_name} OF dataset_type"
            )
            db.engine.execute(f"CREATE SEQUENCE {s_name}")

            klass = Table(f'dataset_{table_name}',
                          metadata,
                          autoload=True,
                          autoload_with=db.engine,
                          extend_existing=True)

            # Add index, primary key and foreign key
            db.engine.execute(
                f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.class_id.name} SET NOT NULL"
            )
            db.engine.execute(
                f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.start_date.name} SET NOT NULL"
            )
            db.engine.execute(
                f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.end_date.name} SET NOT NULL"
            )
            db.engine.execute(
                f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.created_at.name} SET DEFAULT CURRENT_TIMESTAMP"
            )
            db.engine.execute(
                f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.updated_at.name} SET DEFAULT CURRENT_TIMESTAMP"
            )

            db.engine.execute(
                f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER {klass.c.id.name} SET DEFAULT NEXTVAL('{s_name}');"
            )

            db.engine.execute(
                f"ALTER SEQUENCE {s_name} owned by {Config.SAMPLEDB_SCHEMA}.{klass.c.id};"
            )

            db.engine.execute(AddConstraint(PrimaryKeyConstraint(klass.c.id)))
            db.engine.execute(CreateIndex(Index(None, klass.c.user_id)))
            db.engine.execute(CreateIndex(Index(None, klass.c.class_id)))
            db.engine.execute(
                CreateIndex(
                    Index(None, klass.c.location, postgresql_using='gist')))
            db.engine.execute(CreateIndex(Index(None, klass.c.start_date)))
            db.engine.execute(CreateIndex(Index(None, klass.c.end_date)))
            db.engine.execute(CreateIndex(Index(None,
                                                klass.c.collection_date)))
            db.engine.execute(CreateIndex(Index(None, klass.c.created_at)))
            db.engine.execute(CreateIndex(Index(None, klass.c.updated_at)))
            Index(f'idx_{klass.name}_start_end_date', klass.c.start_date,
                  klass.c.end_date)

            db.engine.execute(
                AddConstraint(
                    ForeignKeyConstraint(
                        name=
                        f"dataset_{table_name}_{klass.c.class_id.name}_fkey",
                        columns=[klass.c.class_id],
                        refcolumns=[LucClass.id],
                        onupdate="CASCADE",
                        ondelete="CASCADE")))
        else:
            raise RuntimeError(f'Table {table_name} already exists')
    else:
        klass = Table(f'dataset_{table_name}',
                      metadata,
                      autoload=True,
                      autoload_with=db.engine)

    return klass, s_name
Exemplo n.º 20
0
class UTDSTranscriptGeneConflict(BaseDS):

    # =============================================================================
    # /!\ This table is build during the insertion of data. It associates to each
    #     (transcript_id, data_source) unique pair for which a conflict about the
    #     Gene entry to which it is related, the list of gene IDs. This allows to
    #     manage the cases where conflicts about the gene are found several times
    #     for a particular DSTranscript entry.
    # =============================================================================

    __tablename__ = 'UTDSTranscriptGeneConflict'

    transcript_id = Column(
        String(
            255,
            collation=SQLCollationManager.get_instance().get_db_collation()))
    data_source = Column(
        String(
            50,
            collation=SQLCollationManager.get_instance().get_db_collation()))
    gene_ids = Column(
        Text(collation=SQLCollationManager.get_instance().get_db_collation()))

    # Define the primary key attribute
    __table_args__ = (PrimaryKeyConstraint('transcript_id', 'data_source'), )

    ## __eq__
    #  ------
    #
    # Tests the equality between two instances of this class.
    # Two instances are considered equals if their primary keys are all equals.
    #
    # @param other: UTDSTranscriptGeneConflict - Another UTDSTranscriptGeneConflict
    #                                            object to compare to this object.
    #
    # @return Boolean - Are this object and 'other' equal?
    #
    def __eq__(self, other):

        # Check if other object is of the same class
        if (type(other) != type(self)):
            return False

        # Check if the two instances may be considered equal
        elif ((self.transcript_id == other.transcript_id)
              and (self.data_source == other.data_source)):
            return True

        else:
            return False

    ## __hash__
    #  --------
    #
    # Returns the hash value of a UTDSTranscriptGeneConflict object.
    # The hash value of an instance is computed using its primary key attributes.
    #
    # @return the hash value of the UTDSTranscriptGeneConflict object.
    #
    def __hash__(self):

        return hash((self.transcript_id, self.data_source))
Exemplo n.º 21
0
from dispatch.report.enums import ReportTypes
from dispatch.report.models import ReportRead
from dispatch.storage.models import StorageRead
from dispatch.tag.models import TagRead
from dispatch.ticket.models import TicketRead
from dispatch.workflow.models import WorkflowInstanceRead

from .enums import IncidentStatus

assoc_incident_terms = Table(
    "assoc_incident_terms",
    Base.metadata,
    Column("incident_id", Integer, ForeignKey("incident.id",
                                              ondelete="CASCADE")),
    Column("term_id", Integer, ForeignKey("term.id", ondelete="CASCADE")),
    PrimaryKeyConstraint("incident_id", "term_id"),
)

assoc_incident_tags = Table(
    "assoc_incident_tags",
    Base.metadata,
    Column("incident_id", Integer, ForeignKey("incident.id",
                                              ondelete="CASCADE")),
    Column("tag_id", Integer, ForeignKey("tag.id", ondelete="CASCADE")),
    PrimaryKeyConstraint("incident_id", "tag_id"),
)


class Incident(Base, TimeStampMixin):
    id = Column(Integer, primary_key=True)
    name = Column(String)
Exemplo n.º 22
0
                         db.ForeignKey("sound.id"),
                         nullable=False)


# Table for association between Sound and SoundTag
sound_tags = db.Table(
    "sound_tags",
    db.Column("tag_id",
              db.Integer,
              db.ForeignKey("sound_tag.id"),
              primary_key=True),
    db.Column("sound_id",
              db.Integer,
              db.ForeignKey("sound.id"),
              primary_key=True),
    PrimaryKeyConstraint("tag_id", "sound_id"),
)
# Same but for albums
album_tags = db.Table(
    "album_tags",
    db.Column("tag_id",
              db.Integer,
              db.ForeignKey("sound_tag.id"),
              primary_key=True),
    db.Column("album_id",
              db.Integer,
              db.ForeignKey("album.id"),
              primary_key=True),
    PrimaryKeyConstraint("tag_id", "album_id"),
)
Exemplo n.º 23
0
from irclib.util.compare import match_mask
from sqlalchemy import Table, Column, UniqueConstraint, PrimaryKeyConstraint, String, Boolean

from cloudbot import hook
from cloudbot.util import database

table = Table("ignored", database.metadata, Column("connection", String),
              Column("channel", String), Column("mask", String),
              Column("status", Boolean, default=True),
              UniqueConstraint("connection", "channel", "mask", "status"),
              PrimaryKeyConstraint("connection", "channel", "mask"))

ignore_cache = []


@hook.on_start
def load_cache(db):
    """
    :type db: sqlalchemy.orm.Session
    """
    new_cache = []
    for row in db.execute(table.select()):
        conn = row["connection"]
        chan = row["channel"]
        mask = row["mask"]
        new_cache.append((conn, chan, mask))

    ignore_cache.clear()
    ignore_cache.extend(new_cache)

Exemplo n.º 24
0
class ConvertOperation(Base):
    """

    Steem Blockchain Example
    ======================
    {
      "amount": "5.000 SBD",
      "requestid": 1467592156,
      "owner": "summon"
    }

    """

    __tablename__ = 'sbds_op_converts'
    __table_args__ = (
        PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'),
        ForeignKeyConstraint(['owner'], ['sbds_meta_accounts.name'],
            deferrable=True, initially='DEFERRED', use_alter=True),)

    
    block_num = Column(Integer, nullable=False, index=True)
    transaction_num = Column(SmallInteger, nullable=False, index=True)
    operation_num = Column(SmallInteger, nullable=False, index=True)
    trx_id = Column(String(40),nullable=False)
    timestamp = Column(DateTime(timezone=False))
    owner = Column(String(16)) # steem_type:account_name_type
    requestid = Column(Numeric) # steem_type:uint32_t
    amount = Column(Numeric(20,6), nullable=False) # steem_type:asset
    amount_symbol = Column(String(5)) # steem_type:asset
    operation_type = Column(operation_types_enum,nullable=False,index=True,default='convert')


    _fields = dict(
        amount=lambda x: amount_field(x.get('amount'), num_func=float), # steem_type:asset
        amount_symbol=lambda x: amount_symbol_field(x.get('amount')), # steem_type:asset
        
    )

    _account_fields = frozenset(['owner',])

    def dump(self):
        return dissoc(self.__dict__, '_sa_instance_state')

    def to_dict(self, decode_json=True):
        data_dict = self.dump()
        if isinstance(data_dict.get('json_metadata'), str) and decode_json:
            data_dict['json_metadata'] = sbds.sbds_json.loads(
                data_dict['json_metadata'])
        return data_dict

    def to_json(self):
        data_dict = self.to_dict()
        return sbds.sbds_json.dumps(data_dict)

    def __repr__(self):
        return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % (
            self.__class__.__name__, self.block_num, self.transaction_num,
            self.operation_num, tuple(self.dump().keys()))

    def __str__(self):
        return str(self.dump())
Exemplo n.º 25
0
from datetime import datetime

import requests
from sqlalchemy import Table, Column, PrimaryKeyConstraint, String

from cloudbot import hook
from cloudbot.util import timeformat, web, database

api_url = "https://libre.fm/2.0/?format=json"

# Some of the libre.fm API calls do not have equivalent last.fm calls.
unsupported_msg = "This feature is not supported in the libre.fm API"

table = Table("librefm", database.metadata, Column('nick', String),
              Column('acc', String), PrimaryKeyConstraint('nick'))

last_cache = []


def api_request(method, **params):
    params.update(method=method)
    request = requests.get(api_url, params=params)

    if request.status_code != requests.codes.ok:
        return None, "Failed to fetch info ({})".format(request.status_code)

    response = request.json()
    return response, None


@hook.on_start()
Exemplo n.º 26
0
from datetime import datetime
from data.config import Config
import logging
import os
import sys
import utilities as utils

force_auto_coercion()

Base = declarative_base()

user_manga_table = Table(
    'user_manga', Base.metadata,
    Column('user_id', Integer, ForeignKey('user.id')),
    Column('manga_volume_id', Integer, ForeignKey('manga_volume.id')),
    PrimaryKeyConstraint('user_id', 'manga_volume_id'))


class User(Base):
    __tablename__ = 'user'
    id = Column(BigInteger, primary_key=True)
    language_code = Column(String(2))
    email = Column(EncryptedType(String, Config.SECRET_KEY))
    password = Column(EncryptedType(String, Config.SECRET_KEY))
    save_credentials = Column(Boolean(create_constraint=False))
    file_format = Column(Enum(FileFormat), default=FileFormat.pdf)
    cache_expire_date = Column(TIMESTAMP(timezone=False),
                               default=datetime.now())
    now_caching = Column(Boolean(create_constraint=False), default=False)
    cache_built = Column(Boolean(create_constraint=False), default=False)
    login_error = Column(Boolean(create_constraint=False), default=False)
Exemplo n.º 27
0
    Column('source_user_id', BIGINT, index=True, nullable=False),
    Column('tweet_id',
           BIGINT,
           primary_key=True,
           autoincrement=False,
           index=True,
           nullable=False),
    Column('polling_time', DateTime, index=True, nullable=False),
    Column('raw_tweet', JSONB, nullable=False),
    Column('depth', INTEGER, index=True))

# table with followers
followers = Table(
    'followers', metadata, Column('user_id', BIGINT),
    Column('follower_id', BIGINT),
    PrimaryKeyConstraint('user_id', 'follower_id', name='follower_pk'))

# table with friends
friends = Table('friends', metadata, Column('user_id', BIGINT),
                Column('friend_id', BIGINT),
                PrimaryKeyConstraint('user_id', 'friend_id', name='friend_pk'))

# tracking last tweet_id processed for a given user_id when doing BFS traversal
cursor_tweets = Table(
    'cursor_tweets', metadata,
    Column('user_id', BIGINT, primary_key=True, autoincrement=False),
    Column('next_tid', BIGINT))

# tracking last follower_id processed for a given user_id when doing BFS traversal
cursor_followers = Table(
    'cursor_followers', metadata,
Exemplo n.º 28
0
duck = [
    "\_ø< ", "\_Ø< ",
    "\_o< ", "\_O< ", "\_0< ",
    "\_\u00f6< ", "\_\u00f8< ", "\_\u00f3< "
]
duck_noise = ["QUACK!", "FLAP FLAP!", "quack!", "QuAaAaAACk"]

table = Table(
    'duck_hunt',
    database.metadata,
    Column('network', String),
    Column('name', String),
    Column('shot', Integer),
    Column('befriend', Integer),
    Column('chan', String),
    PrimaryKeyConstraint('name', 'chan', 'network')
)

optout = Table(
    'nohunt',
    database.metadata,
    Column('network', String),
    Column('chan', String),
    PrimaryKeyConstraint('chan', 'network')
)

status_table = Table(
    'duck_status',
    database.metadata,
    Column('network', String),
    Column('chan', String),
Exemplo n.º 29
0
def _createXigniteGlobalnewsSchema(schemaName, metadata):
    schema = Table(
        schemaName,
        metadata,

        # Foreign key reference into xignite_securty.symbol column
        Column("symbol",
               mysql.VARCHAR(length=_FIN_SECURITY_SYMBOL_MAX_LEN,
                             **_ASCII_TEXT_KWARGS),
               ForeignKey(xigniteSecurity.c.symbol,
                          name=schemaName + "_to_security_fk",
                          onupdate="CASCADE",
                          ondelete="CASCADE"),
               nullable=False,
               server_default=""),

        # The title for this headline
        Column("title", mysql.VARCHAR(length=500), nullable=True),

        # The date when this headline was published (or re-published by another
        # source)
        Column("local_pub_date", DATE, nullable=False),

        # The UTC offset for the local_pub_date field
        Column("utc_offset", FLOAT, autoincrement=False, nullable=False),

        # The UTC date/time when this press release was discovered by our agent
        Column("discovered_at", DATETIME, nullable=False),

        # The originating journal/website for this headline. NOTE: the same article
        # URL can originate from multiple sources (e.g., "Clusterstock" and
        # "Business Insider: Finance")
        Column("source",
               mysql.VARCHAR(length=MAX_UTF8_KEY_LENGTH),
               nullable=False),

        # The URL for the headline
        # NOTE: max key length in SQL is 767 bytes
        Column("url",
               mysql.VARCHAR(length=767, **_ASCII_TEXT_KWARGS),
               nullable=False),

        # JSON list that contains URLs of all images associated with this headline
        Column("image_urls",
               mysql.MEDIUMTEXT(convert_unicode=True),
               nullable=True),

        # JSON list that contains all tags associated with this headline, broken
        # down by tag groups; the original is flattened; example:
        #   [{"Companies": ["American Airlines Group Inc.", "S&P Capital IQ"]},
        #    {"Sectors": ["Finance", "Transportation"]},
        #    {"Symbols": ["DAL", "AAL"]}, {"Topics": ["Business_Finance"]}]
        # Source: xignite SecurityHeadline.Tags
        Column("tags", TEXT(convert_unicode=True), nullable=True),

        # The time taken (in seconds) to process the request on xignite servers.
        Column("proc_dur", FLOAT, nullable=False),

        # An abbreviated version(usually 2-3 paragraphs) of the full article; NULL
        # if unknown
        # Source: GetMarketNewsDetails MarketNewsItem.Summary
        Column("summary", mysql.TEXT(convert_unicode=True), nullable=True),

        # The UTC date/time when this news article was (originally) published; NULL
        # if unknown
        # Source: GetMarketNewsDetails MarketNewsItem.Time
        Column("orig_pub_time", DATETIME, nullable=True),

        # The originating journal/website for this headline; NULL if not known
        # Source: GetMarketNewsDetails MarketNewsItem.Source
        Column("orig_source", mysql.TEXT(convert_unicode=True), nullable=True),

        # The time taken (in seconds) to process the GetMarketNewsDetails request on
        # xignite servers.
        # Source: GetMarketNewsDetails MarketNewsItem.Delay
        Column("details_proc_dur", FLOAT, nullable=True),
        PrimaryKeyConstraint("symbol",
                             "local_pub_date",
                             "url",
                             "source",
                             name=schemaName + "_pk"),
        Index("discovered_at_idx", "discovered_at", unique=False))

    return schema
Exemplo n.º 30
0
        literal(10).label('Priority'),
    ]).statement,
)
hades_view_ddl.add_view(radius_property, radgroupcheck)

radreply = Table(
    'radreply',
    ModelBase.metadata,
    Column('Priority', Integer),
    Column('UserName', String(64), nullable=False),
    Column('NASIPAddress', String(15), nullable=False),
    Column('NASPortId', String(50), nullable=False),
    Column('Attribute', String(64), nullable=False),
    Column('Op', String(2), nullable=False),
    Column('Value', String(253), nullable=False),
    PrimaryKeyConstraint('UserName', 'NASIPAddress', 'NASPortId', 'Priority'),
)

radgroupreply_base = Table(
    'radgroupreply_base',
    ModelBase.metadata,
    Column('GroupName', String),
    Column('Attribute', String),
    Column('Op', String),
    Column('Value', String),
    PrimaryKeyConstraint('GroupName', 'Attribute', 'Op', 'Value'),
)

radgroupreply = View(
    name='radgroupreply',
    query=union_all(