Exemplo n.º 1
0
class Lot(DeclarativeBaseGuid):
    """
    A GnuCash Lot. Each lot is linked to an account. Splits in this account can be associated to a Lot. Whenever
    the balance of the splits goes to 0, the Lot is closed (otherwise it is opened)

    Attributes:
        is_closed (int) : 1 if lot is closed, 0 otherwise
        account (:class:`piecash.core.account.Account`): account of the Lot
        splits (:class:`piecash.core.transaction.Split`): splits associated to the Lot
    """
    __tablename__ = 'lots'

    __table_args__ = {}

    # column definitions
    account_guid = Column('account_guid', VARCHAR(length=32),
                          ForeignKey('accounts.guid'))
    is_closed = Column('is_closed', INTEGER(), nullable=False)

    title = pure_slot_property('title')
    notes = pure_slot_property('notes')

    # relation definitions
    account = relation(
        'Account',
        back_populates='lots',
    )
    splits = relation(
        'Split',
        back_populates='lot',
        collection_class=CallableList,
    )

    def __init__(self, title, account, notes="", splits=None, is_closed=0):
        self.title = title
        self.account = account
        self.notes = notes
        if splits:
            self.splits[:] = splits
        self.is_closed = is_closed

    @validates("splits", "account")
    def check_no_change_if_lot_is_close(self, key, value):
        if self.is_closed:
            raise ValueError(
                "Lot is closed and cannot be changed (adding splits or changing account"
            )
        return value

    def object_to_validate(self, change):
        yield self

    def validate(self):
        # check all splits have same account
        for sp in self.splits:
            if sp.account != self.account:
                raise ValueError(
                    "Split {} is not in the same commodity of the lot {}".
                    format(sp, self))

    def __unirepr__(self):
        return u"Lot<'{}' on {}>".format(self.title, self.account.name)
Exemplo n.º 2
0
class Transaction(DeclarativeBaseGuid):
    """
    A GnuCash Transaction.

    Attributes:
        currency (:class:`piecash.core.commodity.Commodity`): currency of the transaction. This attribute is
            write-once (i.e. one cannot change it after being set)
        description (str): description of the transaction
        enter_date (:class:`datetime.datetime`): datetimetime at which transaction is entered
        post_date (:class:`datetime.date`): day on which transaction is posted
        num (str): user provided transaction number
        splits (list of :class:`Split`): list of the splits of the transaction
        scheduled_transaction  (:class:`ScheduledTransaction`): scheduled transaction behind the transaction
        notes (str): notes on the transaction (provided via a slot)
    """
    __tablename__ = 'transactions'

    __table_args__ = {}

    # column definitions
    currency_guid = Column('currency_guid', VARCHAR(length=32), ForeignKey('commodities.guid'), nullable=False)
    num = Column('num', VARCHAR(length=2048), nullable=False)
    _post_date = Column('post_date', _DateAsDateTime(neutral_time=True), index=True)
    post_date = mapped_to_slot_property(_post_date,
                                        slot_name="date-posted",
                                        # slot_transform=lambda x: x.date() if x else None
                                        )
    enter_date = Column('enter_date', _DateTime)
    description = Column('description', VARCHAR(length=2048))
    notes = pure_slot_property('notes')

    scheduled_transaction = pure_slot_property('from-sched-xaction', ignore_invalid_slot=True)

    # relation definitions
    currency = relation('Commodity',
                        back_populates='transactions',
                        )
    splits = relation('Split',
                      back_populates="transaction",
                      cascade='all, delete-orphan',
                      collection_class=CallableList,
                      )

    def __init__(self,
                 currency,
                 description="",
                 notes=None,
                 splits=None,
                 enter_date=None,
                 post_date=None,
                 num="",
                 ):

        assert enter_date is None or isinstance(enter_date, datetime.datetime), "enter_date should be a datetime object"
        assert post_date is None or (
                isinstance(post_date, datetime.date)
                and not isinstance(post_date, datetime.datetime)), "post_date should be a date object"

        self.currency = currency
        self.description = description
        self.enter_date = (enter_date
                           if enter_date
                           else datetime.datetime.now()).replace(microsecond=0)
        self.post_date = (post_date if post_date else datetime.date.today())
        self.num = num
        if notes is not None:
            self.notes = notes
        if splits:
            self.splits = splits

    def __unirepr__(self):
        return u"Transaction<[{}] '{}' on {:%Y-%m-%d}{}>".format(self.currency.mnemonic,
                                                                 self.description,
                                                                 self.post_date,
                                                                 " (from sch tx)" if self.scheduled_transaction else "")

    def object_to_validate(self, change):
        yield self

    def validate(self):
        old = self.get_all_changes()

        if old["STATE_CHANGES"][-1] == "deleted":
            return

        if self.currency.namespace != "CURRENCY":
            raise GncValidationError("You are assigning a non currency commodity to a transaction")

        # check all accounts related to the splits of the transaction are not placeholder(=frozen)
        for sp in self.splits:
            if sp.account.placeholder != 0:
                raise GncValidationError("Account '{}' used in the transaction is a placeholder".format(sp.account))

        # check same currency
        if "currency" in old and old["currency"] is not NEVER_SET:
            raise GncValidationError("You cannot change the currency of a transaction once it has been set")

        # validate the splits
        if hasattr(self, "_recalculate_balance"):
            del self._recalculate_balance
            value_imbalance, quantity_imbalances = self.calculate_imbalances()
            if value_imbalance:
                # raise exception instead of creating an imbalance entry as probably an error
                # (in the gnucash GUI, another decision taken because need of "save unfinished transaction")
                raise GncImbalanceError("The transaction {} is not balanced on its value".format(self))

            if any(quantity_imbalances.values()) and self.book.use_trading_accounts:
                self.normalize_trading_accounts()

        # normalise post_date to 10:59AM
        # if self.post_date:
        #    self.post_date = self.post_date.replace(hour=10, minute=59, second=0, microsecond=0, tzinfo=utc)

    def calculate_imbalances(self):
        """Calculate value and quantity imbalances of a transaction"""
        value_imbalance = Decimal(0)  # hold imbalance on split.value
        quantity_imbalances = defaultdict(Decimal)  # hold imbalance on split.quantity per cdty

        # collect imbalance information
        for sp in self.splits:
            value_imbalance += sp.value
            quantity_imbalances[sp.account.commodity] += sp.quantity

        return value_imbalance, quantity_imbalances

    def normalize_trading_accounts(self):
        # collect imbalance information
        classic_splits = defaultdict(list)
        trading_splits = defaultdict(list)
        trading_target_value = defaultdict(Decimal)
        trading_target_quantity = defaultdict(Decimal)

        for sp in self.splits:
            cdty = sp.account.commodity

            if sp.account.type == "TRADING":
                trading_splits[cdty].append(sp)
            else:
                classic_splits[cdty].append(sp)
            trading_target_value[cdty] += sp.value
            trading_target_quantity[cdty] += sp.quantity

        root = self.book.root_account
        # imbalance in quantities to be settled using trading accounts
        for cdty, v in trading_target_value.items():
            q = trading_target_quantity[cdty]

            # if commodity is balanced, do not do anything
            if (v == q == 0): continue

            # otherwise, look if there is some trading imbalance (ie a split with the trading account already exists!)
            if cdty in trading_splits:
                # and adjust the related split to rebalance
                sp, = trading_splits[cdty]
                sp.value -= v
                sp.quantity -= q
            else:
                # otherwise, we must create the split related to the trading account
                # assume trading account exists
                t_acc = self.book.trading_account(cdty)
                sp = Split(account=t_acc,
                           value=-v,
                           quantity=-q,
                           transaction=self,
                           )
Exemplo n.º 3
0
class Economic(Base):
    id = Column(Integer(11), primary_key=True)
    eco_name = Column(VARCHAR(24), nullable=False)
    eco_unit = Column(VARCHAR(10), nullable=False)
    city_id = Column(Integer(11), primary_key=False)
    data = Column(Integer(20), nullable=False)
Exemplo n.º 4
0
class TiKu(Base):
    __tablename__ = 'spider_tiku'
    id = Column(Integer(), primary_key=True, autoincrement=True)
    kaoshi = Column(VARCHAR(200), nullable=True)
    subject = Column(VARCHAR(200), nullable=True)
    chapter = Column(VARCHAR(200), nullable=True)
    section = Column(VARCHAR(200), nullable=True)
    tixing = Column(VARCHAR(20), nullable=True)
    question_case = Column(Text(), nullable=True)
    question = Column(Text, nullable=True)
    a = Column(VARCHAR(1000), nullable=True)
    b = Column(VARCHAR(1000), nullable=True)
    c = Column(VARCHAR(1000), nullable=True)
    d = Column(VARCHAR(1000), nullable=True)
    e = Column(VARCHAR(1000), nullable=True)
    other_sections = Column(VARCHAR(3000), nullable=True)
    answer = Column(VARCHAR(500), nullable=True)
    analysis = Column(Text(), nullable=True)
    source = Column(Text(), nullable=True)

    def __str__(self):
        return '< {} {} {}>'.format(self.kaoshi, self.subject, self.chapter)
Exemplo n.º 5
0
        "recurrences": 2,
        "schedxactions": 1,
        "slots": 4,
        "splits": 5,
        "taxtable_entries": 3,
        "taxtables": 2,
        "transactions": 4,
        "vendors": 1,
    },
}

# this is not a declarative as it is used before binding the session to an engine.
gnclock = Table(
    u"gnclock",
    DeclarativeBase.metadata,
    Column("hostname", VARCHAR(length=255)),
    Column("pid", INTEGER()),
)


class Version(DeclarativeBase):
    """The declarative class for the 'versions' table."""

    __tablename__ = "versions"

    __table_args__ = {}

    # column definitions
    # : The name of the table
    table_name = Column(
        "table_name", VARCHAR(length=50), primary_key=True, nullable=False
Exemplo n.º 6
0
class OperateRevenueRanking(Base):
    __tablename__ = 'revenue_ranking'
    day = Column(VARCHAR(255), primary_key=True)
    Admin = Column(VARCHAR(5))
    ranking = Column(VARCHAR(255))
Exemplo n.º 7
0
Arquivo: eew.py Projeto: yupix/ssm
class Eew(Base):
    __tablename__ = 'eew'

    id = Column(Integer, primary_key=True, autoincrement=True)
    event_id = Column(VARCHAR(256), unique=True)
Exemplo n.º 8
0
class Product(Base):
    __tablename__ = "product"
    product_id = Column(Integer, primary_key=True, autoincrement=True)
    product_name = Column(VARCHAR(100))
    quantity = Column(Integer)
    mrp = Column(Float)
    batch_num = Column(VARCHAR(100))
    batch_date = Column(VARCHAR(100))
    vendor_name = Column(VARCHAR(100))
    user_id = Column(Integer, ForeignKey("user.user_id"))
    status = Column(VARCHAR(50), Enum("active", "inactive"))
    created_datetime = Column(DATETIME)

    def __init__(self, product_name=None, quantity=None, mrp=None, batch_num=None, batch_date=None,
                 vendor_name=None, user_id=None, status=None, created_datetime=None):
        self.product_name = product_name
        self.quantity = quantity
        self.batch_num = batch_num
        self.batch_date = batch_date
        self.mrp = mrp
        self.vendor_name = vendor_name
        self.status = status
        self.user_id = user_id
        self.created_datetime = datetime.datetime.now()

    def add_product_to_inventory(self):
        product_obj = Product(product_name=self.product_name, quantity=self.quantity,
                              mrp=self.mrp, batch_num=self.batch_num, batch_date=self.batch_date,
                              vendor_name=self.vendor_name, user_id=self.user_id, status=self.status,
                              created_datetime=self.created_datetime)
        session.add(product_obj)
        session.commit()
        return product_obj.product_id

    def update_product_in_inventory(self, product_id):
        session.query(Product).filter(Product.product_id == product_id).update({"product_name": self.product_name,
                                                                                   'quantity': self.quantity,
                                                                                   "mrp": self.mrp,
                                                                                   "batch_num": self.batch_num,
                                                                                   "batch_date": self.batch_date,
                                                                                   "vendor_name": self.vendor_name,
                                                                                   "status": "active"})
        session.commit()

    def remove_product_from_inventory(self, product_id):
        product_obj = session.query(Product).filter(Product.product_id == product_id).first()
        session.delete(product_obj)
        session.commit()

    def get_all_products(self):
        product_list = session.query(Product).all()
        session.commit()
        return product_list

    def get_all_products_by_user_id(self, user_id):
        product_list = session.query(Product).filter(Product.user_id == user_id).all()
        session.commit()
        return product_list

    def get_user_by_product_id(self, product_id):
        product_list = session.query(Product).filter(Product.product_id == product_id).first()
        user_list = session.query(UserRoleMap).filter(UserRoleMap.user_id == product_list.user_id).first()
        response = [product_list, user_list]
        session.commit()
        return response

    def get_active_products(self):
        acc_product_list = session.query(Product).filter(Product.status == "active").all()
        return acc_product_list

    def get_inactive_products(self):
        pen_product_obj = session.query(Product).filter(Product.status == "inactive").all()
        return pen_product_obj
Exemplo n.º 9
0
class Neighbours(Base):
    __tablename__ = "neighbours"

    id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
    router_name = Column(VARCHAR(80))
    neighbour = Column(VARCHAR(80))
Exemplo n.º 10
0
class IfBscPsopreferstock(Base):
    __tablename__ = 'if_bsc_psopreferstock'

    vc_scode = Column(VARCHAR(16), primary_key=True)
    vc_code = Column(VARCHAR(16))
    vc_sname = Column(VARCHAR(128))
    c_is_list = Column(CHAR(1))
    l_market = Column(VARCHAR(50))
    vc_kind = Column(VARCHAR(8))
    l_listdate = Column(VARCHAR(50))
    l_delistdate = Column(VARCHAR(50))
    vc_valuecur = Column(VARCHAR(3))
    en_nst_volume = Column(VARCHAR(50))
    en_nst_scale = Column(VARCHAR(50))
    en_couponrate = Column(VARCHAR(50))
    l_begin_date = Column(VARCHAR(50))
    l_couponrate_adjust = Column(VARCHAR(50))
    l_adjust_unit = Column(VARCHAR(50))
    vc_adjust_desc = Column(VARCHAR(1024))
    vc_repricing_date = Column(VARCHAR(128))
    en_premium = Column(VARCHAR(50))
    l_day_type = Column(VARCHAR(50))
    l_day_mode = Column(VARCHAR(50))
    l_interestpay_times = Column(VARCHAR(50))
    vc_yearpay_date = Column(VARCHAR(128))
    l_nextpay_date = Column(VARCHAR(50))
    c_is_accumulate = Column(CHAR(1))
    c_is_merge = Column(CHAR(1))
    c_is_switch = Column(CHAR(1))
    cl_transfershare_memo = Column(Text)
    en_current_price = Column(VARCHAR(50))
    c_is_call = Column(CHAR(1))
    cl_call_desc = Column(Text)
    c_is_putclause = Column(CHAR(1))
    cl_putclause_desc = Column(Text)
    l_repaymode = Column(VARCHAR(50))
    vc_chgsecode = Column(VARCHAR(36))
    vc_chgsymbol = Column(VARCHAR(36))
    c_as_bond_or_equity = Column(CHAR(1), server_default=text("'3'"))
    vc_eventcode = Column(VARCHAR(20))
    d_updatetime = Column(DateTime)
    vc_source = Column(VARCHAR(20))
    vc_update_operater = Column(VARCHAR(20))

    def __str__(self):
        return 'vc_scode'
Exemplo n.º 11
0
class BuggyDomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
    """Test Firebird domains (and some other reflection bumps),
    see [ticket:1663] and http://tracker.firebirdsql.org/browse/CORE-356"""

    __only_on__ = "firebird"

    # NB: spacing and newlines are *significant* here!
    # PS: this test is superfluous on recent FB, where the issue 356 is
    # probably fixed...

    AUTOINC_DM = """\
CREATE DOMAIN AUTOINC_DM
AS
NUMERIC(18,0)
"""

    MONEY_DM = """\
CREATE DOMAIN MONEY_DM
AS
NUMERIC(15,2)
DEFAULT 0
CHECK (VALUE BETWEEN -
9999999999999.99 AND +9999999999999.99)
"""

    NOSI_DM = """\
CREATE DOMAIN
NOSI_DM AS
CHAR(1)
DEFAULT 'N'
NOT NULL
CHECK (VALUE IN
('S', 'N'))
"""

    RIT_TESORERIA_CAPITOLO_DM = """\
CREATE DOMAIN RIT_TESORERIA_CAPITOLO_DM
AS
VARCHAR(6)
CHECK ((VALUE IS NULL) OR (VALUE =
UPPER(VALUE)))
"""

    DEF_ERROR_TB = """\
CREATE TABLE DEF_ERROR (
RITENUTAMOV_ID AUTOINC_DM
NOT NULL,
RITENUTA MONEY_DM,
INTERESSI MONEY_DM
DEFAULT
0,
STAMPATO_MODULO NOSI_DM DEFAULT 'S',
TESORERIA_CAPITOLO
RIT_TESORERIA_CAPITOLO_DM)
"""

    DEF_ERROR_NODOM_TB = """\
CREATE TABLE
DEF_ERROR_NODOM (
RITENUTAMOV_ID INTEGER NOT NULL,
RITENUTA NUMERIC(15,2) DEFAULT 0,
INTERESSI NUMERIC(15,2)
DEFAULT
0,
STAMPATO_MODULO CHAR(1) DEFAULT 'S',
TESORERIA_CAPITOLO
CHAR(1))
"""

    DOM_ID = """
CREATE DOMAIN DOM_ID INTEGER NOT NULL
"""

    TABLE_A = """\
CREATE TABLE A (
ID DOM_ID /* INTEGER NOT NULL */ DEFAULT 0 )
"""

    # the 'default' keyword is lower case here
    TABLE_B = """\
CREATE TABLE B (
ID DOM_ID /* INTEGER NOT NULL */ default 0 )
"""

    @classmethod
    def setup_class(cls):
        con = testing.db.connect()
        con.exec_driver_sql(cls.AUTOINC_DM)
        con.exec_driver_sql(cls.MONEY_DM)
        con.exec_driver_sql(cls.NOSI_DM)
        con.exec_driver_sql(cls.RIT_TESORERIA_CAPITOLO_DM)
        con.exec_driver_sql(cls.DEF_ERROR_TB)
        con.exec_driver_sql(cls.DEF_ERROR_NODOM_TB)

        con.exec_driver_sql(cls.DOM_ID)
        con.exec_driver_sql(cls.TABLE_A)
        con.exec_driver_sql(cls.TABLE_B)

    @classmethod
    def teardown_class(cls):
        con = testing.db.connect()
        con.exec_driver_sql("DROP TABLE a")
        con.exec_driver_sql("DROP TABLE b")
        con.exec_driver_sql("DROP DOMAIN dom_id")
        con.exec_driver_sql("DROP TABLE def_error_nodom")
        con.exec_driver_sql("DROP TABLE def_error")
        con.exec_driver_sql("DROP DOMAIN rit_tesoreria_capitolo_dm")
        con.exec_driver_sql("DROP DOMAIN nosi_dm")
        con.exec_driver_sql("DROP DOMAIN money_dm")
        con.exec_driver_sql("DROP DOMAIN autoinc_dm")

    def test_tables_are_reflected_same_way(self):
        metadata = MetaData(testing.db)

        table_dom = Table("def_error", metadata, autoload=True)
        table_nodom = Table("def_error_nodom", metadata, autoload=True)

        eq_(
            table_dom.c.interessi.server_default.arg.text,
            table_nodom.c.interessi.server_default.arg.text,
        )
        eq_(
            table_dom.c.ritenuta.server_default.arg.text,
            table_nodom.c.ritenuta.server_default.arg.text,
        )
        eq_(
            table_dom.c.stampato_modulo.server_default.arg.text,
            table_nodom.c.stampato_modulo.server_default.arg.text,
        )

    def test_intermixed_comment(self):
        metadata = MetaData(testing.db)

        table_a = Table("a", metadata, autoload=True)

        eq_(table_a.c.id.server_default.arg.text, "0")

    def test_lowercase_default_name(self):
        metadata = MetaData(testing.db)

        table_b = Table("b", metadata, autoload=True)

        eq_(table_b.c.id.server_default.arg.text, "0")
Exemplo n.º 12
0
class MySQLInterface(DBInterface):
    _type_mapper = {
        'datetime': DateTime,
        'date': Date,
        'float': Float,
        'double': DOUBLE,
        'str': Text,
        'int': Integer,
        'varchar': VARCHAR(20),
        'boolean': Boolean
    }

    def __init__(self,
                 engine: sa.engine.Engine,
                 init: bool = False,
                 db_schema_loc: str = None) -> None:
        """ MySQL server reads and writes interface

        :param engine: sqlalchemy engine
        :param init: if needed to initialize database tables
        :param db_schema_loc: database schema description if you have custom schema
        """
        super().__init__()
        assert engine.name == 'mysql', 'This class is MySQL database ONLY!!!'
        self.engine = engine

        self.meta = sa.MetaData(bind=self.engine)
        self.meta.reflect()
        if init:
            self._create_db_schema_tables(db_schema_loc)

    def _create_db_schema_tables(self, db_schema_loc):
        self._db_parameters = utils.load_param('db_schema.json', db_schema_loc)
        for special_item in ['资产负债表', '现金流量表', '利润表']:
            tmp_item = self._db_parameters.pop(special_item)
            tmp_item['q1'] = 'date'
            tmp_item['q2'] = 'date'
            tmp_item['q4'] = 'date'
            tmp_item['q5'] = 'date'
            tmp_item['y1'] = 'date'
            tmp_item['y2'] = 'date'
            tmp_item['y3'] = 'date'
            tmp_item['y5'] = 'date'
            for prefix in ['合并', '母公司']:
                self._db_parameters[prefix + special_item] = tmp_item
        for table_name, table_schema in self._db_parameters.items():
            self.create_table(table_name, table_schema)

    def get_table_names(self) -> List[str]:
        return list(self.meta.tables.keys())

    def get_columns_names(self, table_name: str) -> List[str]:
        table = sa.Table(table_name, self.meta)
        return [str(it.name) for it in table.columns]

    def create_table(self, table_name: str,
                     table_schema: Mapping[str, str]) -> None:
        """
        创建表

        :param table_name: 表名
        :param table_schema: dict{字段名: 类型}
        """
        col_names = list(table_schema.keys())
        col_types = [self._type_mapper[it] for it in table_schema.values()]
        if 'id' in col_names:
            primary_keys = ['id']
        else:
            primary_keys = [
                it for it in ['DateTime', 'ID', '报告期', 'ConstituteTicker']
                if it in col_names
            ]

        existing_tables = [it.lower() for it in self.meta.tables]
        if table_name.lower() in existing_tables:
            logging.getLogger(__name__).debug(f'表 {table_name} 已存在.')
            return

        new_table = Table(
            table_name, self.meta,
            *(Column(col_name, col_type)
              for col_name, col_type in zip(col_names, col_types)),
            sa.PrimaryKeyConstraint(*primary_keys))
        new_table.create()
        self.meta.reflect()
        logging.getLogger(__name__).info(f'表 {table_name} 创建成功.')

    def drop_all_tables(self) -> None:
        """删除database内所有的表, 谨慎使用!!!"""
        logging.getLogger(__name__).debug('DROPPING ALL TABLES')
        for table in self.meta.tables.values():
            table.drop()
        self.meta.reflect()

    def purge_table(self, table_name: str) -> None:
        assert table_name in self.meta.tables.keys(
        ), f'数据库中无名为 {table_name} 的表'
        table = self.meta.tables[table_name]
        conn = self.engine.connect()
        conn.execute(table.delete())
        logging.getLogger(__name__).debug(f'table {table_name} purged')

    def insert_df(self, df: Union[pd.Series, pd.DataFrame],
                  table_name: str) -> None:
        if df.empty:
            return

        start_timestamp = time.time()
        df.to_sql(table_name, self.engine, if_exists='append')
        end_timestamp = time.time()
        logging.getLogger(__name__).debug(
            f'插入数据耗时 {(end_timestamp - start_timestamp):.2f} 秒.')

    def update_df(self, df: Union[pd.Series, pd.DataFrame],
                  table_name: str) -> None:
        """ 将DataFrame写入数据库"""
        if df is None:
            return
        if df.empty:
            return

        metadata = sa.MetaData(self.engine)
        metadata.reflect()
        table = metadata.tables[table_name.lower()]
        flat_df = df.reset_index()

        date_cols = flat_df.select_dtypes(
            np.datetime64).columns.values.tolist()
        for col in date_cols:
            flat_df[col] = flat_df[col].apply(self._date2str)

        # replace nan to None so that insert will not error out
        # it seems that this operation changes dtypes. so do it last
        start_timestamp = time.time()
        for col in flat_df.columns:
            flat_df[col] = flat_df[col].where(flat_df[col].notnull(),
                                              other=None)
        for _, row in flat_df.iterrows():
            insert_statement = insert(table).values(**row.to_dict())
            statement = insert_statement.on_duplicate_key_update(
                **row.to_dict())
            self.engine.execute(statement)
        end_timestamp = time.time()
        logging.getLogger(__name__).debug(
            f'插入数据耗时 {(end_timestamp - start_timestamp):.2f} 秒.')

    def update_compact_df(self,
                          df: pd.Series,
                          table_name: str,
                          old_df: pd.Series = None) -> None:
        if df.empty:
            return

        existing_data = self.read_table(
            table_name) if old_df is None else old_df
        if existing_data.empty:
            self.update_df(df, table_name)
        else:
            current_date = df.index.get_level_values(
                'DateTime').to_pydatetime()[0]
            existing_data = existing_data.loc[
                existing_data.index.get_level_values(
                    'DateTime') < current_date]
            new_info = compute_diff(df, existing_data)
            self.update_df(new_info, table_name)

    def get_latest_timestamp(
            self,
            table_name: str,
            default_ts: dt.datetime = None,
            column_condition: (str, str) = None) -> Optional[dt.datetime]:
        """
        返回数据库表中最新的时间戳

        :param table_name: 表名
        :param default_ts: 当无匹配对象时返回的默认值
        :param column_condition: 列条件Tuple: (列名, 符合条件的列内容)
        :return: 最新时间
        """
        assert table_name.lower() in self.meta.tables.keys(
        ), f'数据库中无名为 {table_name} 的表'
        table = self.meta.tables[table_name.lower()]
        if 'DateTime' in table.columns.keys():
            session = Session(self.engine)
            q = session.query(func.max(table.c.DateTime))
            if column_condition:
                q = q.filter(
                    table.columns[column_condition[0]] == column_condition[1])
            ret = q.one()[0]
            session.close()
            if isinstance(ret, dt.date):
                ret = dt.datetime.combine(ret, dt.time())
            elif ret is None:
                ret = default_ts
            return ret

    def get_column_min(self, table_name: str, column: str):
        """
        返回数据库表中某列的最小值

        :param table_name: 表名
        :param column: 列名
        :return: 列的最小值
        """
        assert table_name.lower() in self.meta.tables.keys(
        ), f'数据库中无名为 {table_name} 的表'
        table = self.meta.tables[table_name.lower()]
        if 'DateTime' in table.columns.keys():
            session = Session(self.engine)
            q = session.query(func.min(table.c[column]))
            ret = q.one()[0]
            session.close()
            return ret

    def get_column_max(self, table_name: str, column: str):
        """
        返回数据库表中某列的最大值

        :param table_name: 表名
        :param column: 列名
        :return: 列的最大值
        """
        assert table_name.lower() in self.meta.tables.keys(
        ), f'数据库中无名为 {table_name} 的表'
        table = self.meta.tables[table_name.lower()]
        if 'DateTime' in table.columns.keys():
            session = Session(self.engine)
            q = session.query(func.max(table.c[column]))
            ret = q.one()[0]
            session.close()
            return ret

    def get_all_id(self, table_name: str) -> Optional[List[str]]:
        """
        返回数据库表中的所有股票代码

        :param table_name: 表名
        :return: 证券代码列表
        """
        return self.get_column(table_name, 'ID')

    def get_column(self, table_name: str, column_name: str) -> Optional[List]:
        """
        返回数据库表中的`column_name`列排序后的非重复值

        :param table_name: 表名
        :param column_name: 列名
        :return: 数据库表中的`column_name`列排序后的非重复值
        """
        assert table_name.lower() in self.meta.tables.keys(
        ), f'数据库中无名为 {table_name} 的表'
        table = self.meta.tables[table_name.lower()]
        if column_name in table.columns.keys():
            logging.getLogger(__name__).debug(
                f'{table_name} 表中找到 {column_name} 列')
            session = Session(self.engine)
            tmp = session.query(table.columns[column_name]).distinct().all()
            session.close()
            return [it[0] for it in tmp]

    # todo: TBD
    def clean_db(self, table_name: str) -> None:
        """清理表中多余的数据. 未实现"""
        metadata = sa.MetaData(self.engine)
        metadata.reflect()
        assert table_name in metadata.tables.keys(), f'数据库中无名为 {table_name} 的表'
        table = metadata.tables[table_name]
        session = Session(self.engine)
        data = self.read_table(table_name).unstack()

    @staticmethod
    def _date2str(date) -> Optional[str]:
        if isinstance(date, pd.Timestamp):
            return date.strftime('%Y-%m-%d %H:%M:%S')

    def read_table(
            self,
            table_name: str,
            columns: Union[str, Sequence[str]] = None,
            start_date: dt.datetime = None,
            end_date: dt.datetime = None,
            dates: Union[Sequence[dt.datetime], dt.datetime] = None,
            report_period: dt.datetime = None,
            report_month: int = None,
            ids: Union[str, Sequence[str]] = None,
            constitute_ticker: str = None,
            text_statement: str = None) -> Union[pd.Series, pd.DataFrame]:
        """ 读取数据库中的表

        :param table_name: 表名
        :param columns: 所需的列名
        :param start_date: 开始时间
        :param end_date: 结束时间
        :param dates: 查询日期
        :param report_period: 报告期
        :param report_month: 报告月份
        :param ids: 合约代码
        :param constitute_ticker: 成分股代码
        :param text_statement: SQL指令
        :return:
        """
        table_name = table_name.lower()
        index_col = self.get_table_primary_keys(table_name)

        session = Session(self.engine)
        t = self.meta.tables[table_name]
        q = session.query()
        if columns:
            if isinstance(columns, str):
                columns = [columns]
            columns.extend(index_col)
        else:
            columns = [it.name for it in t.columns]
        for it in columns:
            q = q.add_columns(t.c[it])
        if dates is not None:
            if isinstance(dates, Sequence):
                q = q.filter(t.columns['DateTime'].in_(dates))
            else:
                q = q.filter(t.columns['DateTime'] == dates)
        elif end_date is not None:
            q = q.filter(t.columns['DateTime'] <= end_date)
            if start_date is not None:
                q = q.filter(t.columns['DateTime'] >= start_date)
        if report_period is not None:
            q = q.filter(t.columns['报告期'] == report_period)
        if report_month is not None:
            q = q.filter(extract('month', t.columns['报告期']) == report_month)
        if text_statement:
            q = q.filter(text(text_statement))
        if (ids is not None) and ('ID' in columns):
            if isinstance(ids, str):
                q = q.filter(t.columns['ID'] == ids)
            else:
                q = q.filter(t.columns['ID'].in_(ids))
        if (constitute_ticker is not None) and ('ConstituteTicker' in columns):
            q = q.filter(t.columns['ConstituteTicker'] == constitute_ticker)

        ret = pd.read_sql(q.statement, con=self.engine)
        session.close()
        if index_col:
            if 'DateTime' in index_col:
                ret.DateTime = pd.to_datetime(ret.DateTime)
            if '报告期' in index_col:
                ret['报告期'] = pd.to_datetime(ret['报告期'])
            ret = ret.set_index(index_col, drop=True)

        if ret.shape[1] == 1:
            ret = ret.iloc[:, 0]
        return ret

    def exist_table(self, table_name: str) -> bool:
        """ 数据库中是否存在该表"""
        table_name = table_name.lower()
        return table_name in self.meta.tables.keys()

    def get_table_primary_keys(self, table_name: str) -> Optional[List[str]]:
        table_name = table_name.lower()
        table = self.meta.tables[table_name]
        primary_key = [it.name for it in table.primary_key]
        if primary_key:
            return primary_key

    def delete_datetime_records(self, table_name: str, datetime: dt.datetime):
        table_name = table_name.lower()
        t = self.meta.tables[table_name]
        stmt = t.delete().where(t.c.DateTime == datetime)
        conn = self.engine.connect()
        conn.execute(stmt)

    def delete_id_records(self, table_name: str,
                          tickers: Union[str, Sequence[str]]):
        table_name = table_name.lower()
        t = self.meta.tables[table_name]
        if isinstance(tickers, str):
            stmt = t.delete().where(t.c.ID == tickers)
        else:
            stmt = t.delete().where(t.c.ID.in_(tickers))
        conn = self.engine.connect()
        conn.execute(stmt)
Exemplo n.º 13
0
class TestModel(Base, BaseModel):
    __tablename__ = 'test'

    id = Column(Integer, primary_key=True)
    name = Column(VARCHAR(255))
Exemplo n.º 14
0
class CompanyBasicModel(Base, BaseModel):
    __tablename__ = 'company_basic'
    __table_args__ = (Index('wy', 'no', 'creditno', unique=True), )

    id = Column(Integer, primary_key=True)
    name = Column(VARCHAR(255))
    no = Column(VARCHAR(80), index=True)
    web_key = Column(VARCHAR(80), index=True)
    creditno = Column(VARCHAR(80), index=True)
    is_del = Column(Integer)
    u_ts = Column(Integer)
    c_ts = Column(Integer)

    def __init__(self, *arg, **kw):
        self.c_ts = kw.get("c_ts", None)
        self.creditno = kw.get("creditno", None)
        self.get = kw.get("get", None)
        self.getAll = kw.get("getAll", None)
        self.getAllIds = kw.get("getAllIds", None)
        self.getByFromId = kw.get("getByFromId", None)
        self.getByFromIdAndMod = kw.get("getByFromIdAndMod", None)
        self.getByName = kw.get("getByName", None)
        self.id = kw.get("id", None)
        self.is_del = kw.get("is_del", None)
        self.metadata = kw.get("metadata", None)
        self.name = kw.get("name", None)
        self.web_key = kw.get("web_key", None)
        self.no = kw.get("no", None)
        self.u_ts = kw.get("u_ts", None)

    @staticmethod
    def getByNoOrCreditNo(no="", creditno="", session=None):
        return session.query(CompanyBasicModel).filter(
            or_(CompanyBasicModel.no == no,
                CompanyBasicModel.creditno == creditno)).limit(1).all()

    @staticmethod
    def updateComKey(id_, web_key, session):
        infos = session.query(CompanyBasicModel).filter(
            CompanyBasicModel.id == id_).update(
                {CompanyBasicModel.web_key: web_key})
        session.commit()
        return infos

    @staticmethod
    def addBasic(data, session):
        if data.get("no"):
            exists = CompanyBasicModel.getByNo(data.get("no"), session)
        elif data.get("web_key"):
            exists = CompanyBasicModel.getByCreditNo(data.get("web_key"),
                                                     session)
        elif data.get("creditno"):
            exists = CompanyBasicModel.getByCreditNo(data.get("creditno"),
                                                     session)
        elif data.get("name"):
            exists = CompanyBasicModel.getByName(data.get("name"), session)
        else:
            return None
        if not exists:
            basic = CompanyBasicModel(**data)
            CompanyBasicModel.addModel(basic, session)
            return basic.id
        return exists[0].id
Exemplo n.º 15
0
class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
    __dialect__ = oracle.OracleDialect()

    def test_no_clobs_for_string_params(self):
        """test that simple string params get a DBAPI type of
        VARCHAR, not CLOB. This is to prevent setinputsizes
        from setting up cx_oracle.CLOBs on
        string-based bind params [ticket:793]."""
        class FakeDBAPI:
            def __getattr__(self, attr):
                return attr

        dialect = oracle.OracleDialect()
        dbapi = FakeDBAPI()

        b = bindparam("foo", "hello world!")
        eq_(b.type.dialect_impl(dialect).get_dbapi_type(dbapi), "STRING")

        b = bindparam("foo", "hello world!")
        eq_(b.type.dialect_impl(dialect).get_dbapi_type(dbapi), "STRING")

    def test_long(self):
        self.assert_compile(oracle.LONG(), "LONG")

    @testing.combinations(
        (Date(), cx_oracle._OracleDate),
        (oracle.OracleRaw(), cx_oracle._OracleRaw),
        (String(), String),
        (VARCHAR(), cx_oracle._OracleString),
        (DATE(), cx_oracle._OracleDate),
        (oracle.DATE(), oracle.DATE),
        (String(50), cx_oracle._OracleString),
        (Unicode(), cx_oracle._OracleUnicodeStringCHAR),
        (Text(), cx_oracle._OracleText),
        (UnicodeText(), cx_oracle._OracleUnicodeTextCLOB),
        (CHAR(), cx_oracle._OracleChar),
        (NCHAR(), cx_oracle._OracleNChar),
        (NVARCHAR(), cx_oracle._OracleUnicodeStringNCHAR),
        (oracle.RAW(50), cx_oracle._OracleRaw),
    )
    def test_type_adapt(self, start, test):
        dialect = cx_oracle.dialect()

        assert isinstance(
            start.dialect_impl(dialect),
            test), "wanted %r got %r" % (test, start.dialect_impl(dialect))

    @testing.combinations(
        (String(), String),
        (VARCHAR(), cx_oracle._OracleString),
        (String(50), cx_oracle._OracleString),
        (Unicode(), cx_oracle._OracleUnicodeStringNCHAR),
        (Text(), cx_oracle._OracleText),
        (UnicodeText(), cx_oracle._OracleUnicodeTextNCLOB),
        (NCHAR(), cx_oracle._OracleNChar),
        (NVARCHAR(), cx_oracle._OracleUnicodeStringNCHAR),
    )
    def test_type_adapt_nchar(self, start, test):
        dialect = cx_oracle.dialect(use_nchar_for_unicode=True)

        assert isinstance(
            start.dialect_impl(dialect),
            test), "wanted %r got %r" % (test, start.dialect_impl(dialect))

    def test_raw_compile(self):
        self.assert_compile(oracle.RAW(), "RAW")
        self.assert_compile(oracle.RAW(35), "RAW(35)")

    def test_char_length(self):
        self.assert_compile(VARCHAR(50), "VARCHAR(50 CHAR)")

        oracle8dialect = oracle.dialect()
        oracle8dialect.server_version_info = (8, 0)
        self.assert_compile(VARCHAR(50), "VARCHAR(50)", dialect=oracle8dialect)

        self.assert_compile(NVARCHAR(50), "NVARCHAR2(50)")
        self.assert_compile(CHAR(50), "CHAR(50)")

    @testing.combinations(
        (String(50), "VARCHAR2(50 CHAR)"),
        (Unicode(50), "VARCHAR2(50 CHAR)"),
        (NVARCHAR(50), "NVARCHAR2(50)"),
        (VARCHAR(50), "VARCHAR(50 CHAR)"),
        (oracle.NVARCHAR2(50), "NVARCHAR2(50)"),
        (oracle.VARCHAR2(50), "VARCHAR2(50 CHAR)"),
        (String(), "VARCHAR2"),
        (Unicode(), "VARCHAR2"),
        (NVARCHAR(), "NVARCHAR2"),
        (VARCHAR(), "VARCHAR"),
        (oracle.NVARCHAR2(), "NVARCHAR2"),
        (oracle.VARCHAR2(), "VARCHAR2"),
    )
    def test_varchar_types(self, typ, exp):
        dialect = oracle.dialect()
        self.assert_compile(typ, exp, dialect=dialect)

    @testing.combinations(
        (String(50), "VARCHAR2(50 CHAR)"),
        (Unicode(50), "NVARCHAR2(50)"),
        (NVARCHAR(50), "NVARCHAR2(50)"),
        (VARCHAR(50), "VARCHAR(50 CHAR)"),
        (oracle.NVARCHAR2(50), "NVARCHAR2(50)"),
        (oracle.VARCHAR2(50), "VARCHAR2(50 CHAR)"),
        (String(), "VARCHAR2"),
        (Unicode(), "NVARCHAR2"),
        (NVARCHAR(), "NVARCHAR2"),
        (VARCHAR(), "VARCHAR"),
        (oracle.NVARCHAR2(), "NVARCHAR2"),
        (oracle.VARCHAR2(), "VARCHAR2"),
    )
    def test_varchar_use_nchar_types(self, typ, exp):
        dialect = oracle.dialect(use_nchar_for_unicode=True)
        self.assert_compile(typ, exp, dialect=dialect)

    @testing.combinations(
        (oracle.INTERVAL(), "INTERVAL DAY TO SECOND"),
        (oracle.INTERVAL(day_precision=3), "INTERVAL DAY(3) TO SECOND"),
        (oracle.INTERVAL(second_precision=5), "INTERVAL DAY TO SECOND(5)"),
        (
            oracle.INTERVAL(day_precision=2, second_precision=5),
            "INTERVAL DAY(2) TO SECOND(5)",
        ),
    )
    def test_interval(self, type_, expected):
        self.assert_compile(type_, expected)

    def test_interval_coercion_literal(self):
        expr = column("bar", oracle.INTERVAL) == datetime.timedelta(days=1)
        eq_(expr.right.type._type_affinity, sqltypes.Interval)
Exemplo n.º 16
0
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
    """Tests MySQL-dialect specific compilation."""

    __dialect__ = mysql.dialect()

    def test_precolumns(self):
        dialect = self.__dialect__

        def gen(distinct=None, prefixes=None):
            stmt = select(column("q"))
            if distinct:
                stmt = stmt.distinct()
            if prefixes is not None:
                stmt = stmt.prefix_with(*prefixes)

            return str(stmt.compile(dialect=dialect))

        eq_(gen(None), "SELECT q")
        eq_(gen(True), "SELECT DISTINCT q")

        eq_(gen(prefixes=["ALL"]), "SELECT ALL q")
        eq_(gen(prefixes=["DISTINCTROW"]), "SELECT DISTINCTROW q")

        # Interaction with MySQL prefix extensions
        eq_(gen(None, ["straight_join"]), "SELECT straight_join q")
        eq_(
            gen(False, ["HIGH_PRIORITY", "SQL_SMALL_RESULT", "ALL"]),
            "SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q",
        )
        eq_(
            gen(True, ["high_priority", sql.text("sql_cache")]),
            "SELECT high_priority sql_cache DISTINCT q",
        )

    def test_backslash_escaping(self):
        self.assert_compile(
            sql.column("foo").like("bar", escape="\\"),
            "foo LIKE %s ESCAPE '\\\\'",
        )

        dialect = mysql.dialect()
        dialect._backslash_escapes = False
        self.assert_compile(
            sql.column("foo").like("bar", escape="\\"),
            "foo LIKE %s ESCAPE '\\'",
            dialect=dialect,
        )

    def test_limit(self):
        t = sql.table("t", sql.column("col1"), sql.column("col2"))

        self.assert_compile(
            select(t).limit(10).offset(20),
            "SELECT t.col1, t.col2 FROM t  LIMIT %s, %s",
            {
                "param_1": 20,
                "param_2": 10
            },
        )
        self.assert_compile(
            select(t).limit(10),
            "SELECT t.col1, t.col2 FROM t  LIMIT %s",
            {"param_1": 10},
        )

        self.assert_compile(
            select(t).offset(10),
            "SELECT t.col1, t.col2 FROM t  LIMIT %s, 18446744073709551615",
            {"param_1": 10},
        )

    @testing.combinations(
        (String, ),
        (VARCHAR, ),
        (String(), ),
        (VARCHAR(), ),
        (NVARCHAR(), ),
        (Unicode, ),
        (Unicode(), ),
    )
    def test_varchar_raise(self, type_):
        type_ = sqltypes.to_instance(type_)
        assert_raises_message(
            exc.CompileError,
            "VARCHAR requires a length on dialect mysql",
            type_.compile,
            dialect=mysql.dialect(),
        )

        t1 = Table("sometable", MetaData(), Column("somecolumn", type_))
        assert_raises_message(
            exc.CompileError,
            r"\(in table 'sometable', column 'somecolumn'\)\: "
            r"(?:N)?VARCHAR requires a length on dialect mysql",
            schema.CreateTable(t1).compile,
            dialect=mysql.dialect(),
        )

    def test_update_limit(self):
        t = sql.table("t", sql.column("col1"), sql.column("col2"))

        self.assert_compile(t.update(values={"col1": 123}),
                            "UPDATE t SET col1=%s")
        self.assert_compile(
            t.update().values({
                "col1": 123
            }).with_dialect_options(mysql_limit=5),
            "UPDATE t SET col1=%s LIMIT 5",
        )
        self.assert_compile(
            t.update().values({
                "col1": 123
            }).with_dialect_options(mysql_limit=None),
            "UPDATE t SET col1=%s",
        )
        self.assert_compile(
            t.update().where(t.c.col2 == 456).values({
                "col1": 123
            }).with_dialect_options(mysql_limit=1),
            "UPDATE t SET col1=%s WHERE t.col2 = %s LIMIT 1",
        )

    def test_utc_timestamp(self):
        self.assert_compile(func.utc_timestamp(), "utc_timestamp()")

    def test_utc_timestamp_fsp(self):
        self.assert_compile(
            func.utc_timestamp(5),
            "utc_timestamp(%s)",
            checkparams={"utc_timestamp_1": 5},
        )

    def test_sysdate(self):
        self.assert_compile(func.sysdate(), "SYSDATE()")

    m = mysql

    @testing.combinations(
        (Integer, "CAST(t.col AS SIGNED INTEGER)"),
        (INT, "CAST(t.col AS SIGNED INTEGER)"),
        (m.MSInteger, "CAST(t.col AS SIGNED INTEGER)"),
        (m.MSInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"),
        (SmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
        (m.MSSmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
        (m.MSTinyInteger, "CAST(t.col AS SIGNED INTEGER)"),
        # 'SIGNED INTEGER' is a bigint, so this is ok.
        (m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
        (m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
        (m.MSBigInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"),
        # this is kind of sucky.  thank you default arguments!
        (NUMERIC, "CAST(t.col AS DECIMAL)"),
        (DECIMAL, "CAST(t.col AS DECIMAL)"),
        (Numeric, "CAST(t.col AS DECIMAL)"),
        (m.MSNumeric, "CAST(t.col AS DECIMAL)"),
        (m.MSDecimal, "CAST(t.col AS DECIMAL)"),
        (TIMESTAMP, "CAST(t.col AS DATETIME)"),
        (DATETIME, "CAST(t.col AS DATETIME)"),
        (DATE, "CAST(t.col AS DATE)"),
        (TIME, "CAST(t.col AS TIME)"),
        (DateTime, "CAST(t.col AS DATETIME)"),
        (Date, "CAST(t.col AS DATE)"),
        (Time, "CAST(t.col AS TIME)"),
        (DateTime, "CAST(t.col AS DATETIME)"),
        (Date, "CAST(t.col AS DATE)"),
        (m.MSTime, "CAST(t.col AS TIME)"),
        (m.MSTimeStamp, "CAST(t.col AS DATETIME)"),
        (String, "CAST(t.col AS CHAR)"),
        (Unicode, "CAST(t.col AS CHAR)"),
        (UnicodeText, "CAST(t.col AS CHAR)"),
        (VARCHAR, "CAST(t.col AS CHAR)"),
        (NCHAR, "CAST(t.col AS CHAR)"),
        (CHAR, "CAST(t.col AS CHAR)"),
        (m.CHAR(charset="utf8"), "CAST(t.col AS CHAR CHARACTER SET utf8)"),
        (CLOB, "CAST(t.col AS CHAR)"),
        (TEXT, "CAST(t.col AS CHAR)"),
        (m.TEXT(charset="utf8"), "CAST(t.col AS CHAR CHARACTER SET utf8)"),
        (String(32), "CAST(t.col AS CHAR(32))"),
        (Unicode(32), "CAST(t.col AS CHAR(32))"),
        (CHAR(32), "CAST(t.col AS CHAR(32))"),
        (m.MSString, "CAST(t.col AS CHAR)"),
        (m.MSText, "CAST(t.col AS CHAR)"),
        (m.MSTinyText, "CAST(t.col AS CHAR)"),
        (m.MSMediumText, "CAST(t.col AS CHAR)"),
        (m.MSLongText, "CAST(t.col AS CHAR)"),
        (m.MSNChar, "CAST(t.col AS CHAR)"),
        (m.MSNVarChar, "CAST(t.col AS CHAR)"),
        (LargeBinary, "CAST(t.col AS BINARY)"),
        (BLOB, "CAST(t.col AS BINARY)"),
        (m.MSBlob, "CAST(t.col AS BINARY)"),
        (m.MSBlob(32), "CAST(t.col AS BINARY)"),
        (m.MSTinyBlob, "CAST(t.col AS BINARY)"),
        (m.MSMediumBlob, "CAST(t.col AS BINARY)"),
        (m.MSLongBlob, "CAST(t.col AS BINARY)"),
        (m.MSBinary, "CAST(t.col AS BINARY)"),
        (m.MSBinary(32), "CAST(t.col AS BINARY)"),
        (m.MSVarBinary, "CAST(t.col AS BINARY)"),
        (m.MSVarBinary(32), "CAST(t.col AS BINARY)"),
        (Interval, "CAST(t.col AS DATETIME)"),
    )
    def test_cast(self, type_, expected):
        t = sql.table("t", sql.column("col"))
        self.assert_compile(cast(t.c.col, type_), expected)

    def test_cast_type_decorator(self):
        class MyInteger(sqltypes.TypeDecorator):
            impl = Integer

        type_ = MyInteger()
        t = sql.table("t", sql.column("col"))
        self.assert_compile(cast(t.c.col, type_),
                            "CAST(t.col AS SIGNED INTEGER)")

    def test_cast_literal_bind(self):
        expr = cast(column("foo", Integer) + 5, Integer())

        self.assert_compile(expr,
                            "CAST(foo + 5 AS SIGNED INTEGER)",
                            literal_binds=True)

    def test_unsupported_cast_literal_bind(self):
        expr = cast(column("foo", Integer) + 5, Float)

        with expect_warnings(
                "Datatype FLOAT does not support CAST on MySQL/MariaDb;"):
            self.assert_compile(expr, "(foo + 5)", literal_binds=True)

    m = mysql

    @testing.combinations(
        (m.MSBit, "t.col"),
        (FLOAT, "t.col"),
        (Float, "t.col"),
        (m.MSFloat, "t.col"),
        (m.MSDouble, "t.col"),
        (m.MSReal, "t.col"),
        (m.MSYear, "t.col"),
        (m.MSYear(2), "t.col"),
        (Boolean, "t.col"),
        (BOOLEAN, "t.col"),
        (m.MSEnum, "t.col"),
        (m.MSEnum("1", "2"), "t.col"),
        (m.MSSet, "t.col"),
        (m.MSSet("1", "2"), "t.col"),
    )
    def test_unsupported_casts(self, type_, expected):

        t = sql.table("t", sql.column("col"))
        with expect_warnings(
                "Datatype .* does not support CAST on MySQL/MariaDb;"):
            self.assert_compile(cast(t.c.col, type_), expected)

    @testing.combinations(
        (m.FLOAT, "CAST(t.col AS FLOAT)"),
        (Float, "CAST(t.col AS FLOAT)"),
        (FLOAT, "CAST(t.col AS FLOAT)"),
        (m.DOUBLE, "CAST(t.col AS DOUBLE)"),
        (m.FLOAT, "CAST(t.col AS FLOAT)"),
        argnames="type_,expected",
    )
    @testing.combinations(True, False, argnames="maria_db")
    def test_float_cast(self, type_, expected, maria_db):

        dialect = mysql.dialect()
        if maria_db:
            dialect.is_mariadb = maria_db
            dialect.server_version_info = (10, 4, 5)
        else:
            dialect.server_version_info = (8, 0, 17)
        t = sql.table("t", sql.column("col"))
        self.assert_compile(cast(t.c.col, type_), expected, dialect=dialect)

    def test_cast_grouped_expression_non_castable(self):
        with expect_warnings(
                "Datatype FLOAT does not support CAST on MySQL/MariaDb;"):
            self.assert_compile(cast(sql.column("x") + sql.column("y"), Float),
                                "(x + y)")

    def test_extract(self):
        t = sql.table("t", sql.column("col1"))

        for field in "year", "month", "day":
            self.assert_compile(
                select(extract(field, t.c.col1)),
                "SELECT EXTRACT(%s FROM t.col1) AS anon_1 FROM t" % field,
            )

        # millsecondS to millisecond
        self.assert_compile(
            select(extract("milliseconds", t.c.col1)),
            "SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t",
        )

    def test_too_long_index(self):
        exp = "ix_zyrenian_zyme_zyzzogeton_zyzzogeton_zyrenian_zyme_zyz_5cd2"
        tname = "zyrenian_zyme_zyzzogeton_zyzzogeton"
        cname = "zyrenian_zyme_zyzzogeton_zo"

        t1 = Table(tname, MetaData(), Column(cname, Integer, index=True))
        ix1 = list(t1.indexes)[0]

        self.assert_compile(
            schema.CreateIndex(ix1),
            "CREATE INDEX %s "
            "ON %s (%s)" % (exp, tname, cname),
        )

    def test_innodb_autoincrement(self):
        t1 = Table(
            "sometable",
            MetaData(),
            Column("assigned_id",
                   Integer(),
                   primary_key=True,
                   autoincrement=False),
            Column("id", Integer(), primary_key=True, autoincrement=True),
            mysql_engine="InnoDB",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE sometable (assigned_id "
            "INTEGER NOT NULL, id INTEGER NOT NULL "
            "AUTO_INCREMENT, PRIMARY KEY (id, assigned_id)"
            ")ENGINE=InnoDB",
        )

        t1 = Table(
            "sometable",
            MetaData(),
            Column("assigned_id",
                   Integer(),
                   primary_key=True,
                   autoincrement=True),
            Column("id", Integer(), primary_key=True, autoincrement=False),
            mysql_engine="InnoDB",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE sometable (assigned_id "
            "INTEGER NOT NULL AUTO_INCREMENT, id "
            "INTEGER NOT NULL, PRIMARY KEY "
            "(assigned_id, id))ENGINE=InnoDB",
        )

    def test_innodb_autoincrement_reserved_word_column_name(self):
        t1 = Table(
            "sometable",
            MetaData(),
            Column("id", Integer(), primary_key=True, autoincrement=False),
            Column("order", Integer(), primary_key=True, autoincrement=True),
            mysql_engine="InnoDB",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE sometable ("
            "id INTEGER NOT NULL, "
            "`order` INTEGER NOT NULL AUTO_INCREMENT, "
            "PRIMARY KEY (`order`, id)"
            ")ENGINE=InnoDB",
        )

    def test_create_table_with_partition(self):
        t1 = Table(
            "testtable",
            MetaData(),
            Column("id", Integer(), primary_key=True, autoincrement=True),
            Column("other_id",
                   Integer(),
                   primary_key=True,
                   autoincrement=False),
            mysql_partitions="2",
            mysql_partition_by="KEY(other_id)",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE testtable ("
            "id INTEGER NOT NULL AUTO_INCREMENT, "
            "other_id INTEGER NOT NULL, "
            "PRIMARY KEY (id, other_id)"
            ")PARTITION BY KEY(other_id) PARTITIONS 2",
        )

    def test_create_table_with_subpartition(self):
        t1 = Table(
            "testtable",
            MetaData(),
            Column("id", Integer(), primary_key=True, autoincrement=True),
            Column("other_id",
                   Integer(),
                   primary_key=True,
                   autoincrement=False),
            mysql_partitions="2",
            mysql_partition_by="KEY(other_id)",
            mysql_subpartition_by="HASH(some_expr)",
            mysql_subpartitions="2",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE testtable ("
            "id INTEGER NOT NULL AUTO_INCREMENT, "
            "other_id INTEGER NOT NULL, "
            "PRIMARY KEY (id, other_id)"
            ")PARTITION BY KEY(other_id) PARTITIONS 2 "
            "SUBPARTITION BY HASH(some_expr) SUBPARTITIONS 2",
        )

    def test_create_table_with_partition_hash(self):
        t1 = Table(
            "testtable",
            MetaData(),
            Column("id", Integer(), primary_key=True, autoincrement=True),
            Column("other_id",
                   Integer(),
                   primary_key=True,
                   autoincrement=False),
            mysql_partitions="2",
            mysql_partition_by="HASH(other_id)",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE testtable ("
            "id INTEGER NOT NULL AUTO_INCREMENT, "
            "other_id INTEGER NOT NULL, "
            "PRIMARY KEY (id, other_id)"
            ")PARTITION BY HASH(other_id) PARTITIONS 2",
        )

    def test_create_table_with_partition_and_other_opts(self):
        t1 = Table(
            "testtable",
            MetaData(),
            Column("id", Integer(), primary_key=True, autoincrement=True),
            Column("other_id",
                   Integer(),
                   primary_key=True,
                   autoincrement=False),
            mysql_stats_sample_pages="2",
            mysql_partitions="2",
            mysql_partition_by="HASH(other_id)",
        )
        self.assert_compile(
            schema.CreateTable(t1),
            "CREATE TABLE testtable ("
            "id INTEGER NOT NULL AUTO_INCREMENT, "
            "other_id INTEGER NOT NULL, "
            "PRIMARY KEY (id, other_id)"
            ")STATS_SAMPLE_PAGES=2 PARTITION BY HASH(other_id) PARTITIONS 2",
        )

    def test_create_table_with_collate(self):
        # issue #5411
        t1 = Table(
            "testtable",
            MetaData(),
            Column("id", Integer(), primary_key=True, autoincrement=True),
            mysql_engine="InnoDB",
            mysql_collate="utf8_icelandic_ci",
            mysql_charset="utf8",
        )
        first_part = ("CREATE TABLE testtable ("
                      "id INTEGER NOT NULL AUTO_INCREMENT, "
                      "PRIMARY KEY (id))")
        try:
            self.assert_compile(
                schema.CreateTable(t1),
                first_part +
                "ENGINE=InnoDB CHARSET=utf8 COLLATE utf8_icelandic_ci",
            )
        except AssertionError:
            self.assert_compile(
                schema.CreateTable(t1),
                first_part +
                "CHARSET=utf8 ENGINE=InnoDB COLLATE utf8_icelandic_ci",
            )

    def test_inner_join(self):
        t1 = table("t1", column("x"))
        t2 = table("t2", column("y"))

        self.assert_compile(t1.join(t2, t1.c.x == t2.c.y),
                            "t1 INNER JOIN t2 ON t1.x = t2.y")

    def test_outer_join(self):
        t1 = table("t1", column("x"))
        t2 = table("t2", column("y"))

        self.assert_compile(
            t1.outerjoin(t2, t1.c.x == t2.c.y),
            "t1 LEFT OUTER JOIN t2 ON t1.x = t2.y",
        )

    def test_full_outer_join(self):
        t1 = table("t1", column("x"))
        t2 = table("t2", column("y"))

        self.assert_compile(
            t1.outerjoin(t2, t1.c.x == t2.c.y, full=True),
            "t1 FULL OUTER JOIN t2 ON t1.x = t2.y",
        )
Exemplo n.º 17
0
class Document(Base):
    """
    Represents a user-submitted document for processing. (A single document
    may be processed by multiple processors.)
    """
    __tablename__ = 'documents'

    document_id = Column(
        "document_id",
        VARCHAR(MAX_DOC_ID_LEN), primary_key=True,
        comment="Primary key (UUID) for the document"
    )  # type: str
    doctext = deferred(Column(
        "doctext",
        Text,
        comment="Text contents of the document"
    ))  # type: Optional[str]
    client_job_id = Column(
        "client_job_id",
        VARCHAR(MAX_JOB_ID_LEN),
        comment="Client job ID (supplied by the client)",
        index=True
    )  # type: Optional[str]
    queue_id = Column(
        "queue_id",
        VARCHAR(MAX_QUEUE_ID_LEN),
        comment="The UUID of the client request, if in queued mode",
        index=True
    )  # type: Optional[str]
    username = Column(
        "username",
        VARCHAR(MAX_USERNAME_LEN),
        comment="Username that submitted this document",
        nullable=False,
        index=True,
    )  # type: Optional[str]
    client_metadata = deferred(Column(
        "client_metadata",
        Text,
        comment="Metadata submitted by the client"
    ))  # type: Optional[str]
    include_text = Column(
        "include_text",
        Boolean,
        nullable=False,
        default=False,
        comment="Include the source text in the reply?"
    )  # type: Optional[bool]
    datetime_submitted_utc = Column(
        "datetime_submitted_utc",
        DateTime,
        nullable=False,
        # Is the following OK, given that it's not exactly when it was
        # submitted?
        default=datetime.datetime.utcnow,
        comment="Date/time when the request was submitted (in UTC)"
    )  # type: Optional[datetime.datetime]

    docprocrequests = relationship(
        "DocProcRequest",
        cascade="all, delete-orphan",
        passive_deletes=True,
        back_populates="document",
        lazy="select",
        # https://docs.sqlalchemy.org/en/13/orm/collections.html#using-passive-deletes  # noqa
    )  # type: List[DocProcRequest]

    @property
    def datetime_submitted_pendulum(self) -> Optional[Pendulum]:
        return coerce_to_pendulum(self.datetime_submitted_utc,
                                  assume_local=False)
Exemplo n.º 18
0
class TBscSecuritycomp(Base):
    __tablename__ = 't_bsc_securitycomp'

    l_publish_date = Column(VARCHAR(50), primary_key=True, nullable=False)
    vc_scode = Column(VARCHAR(20), primary_key=True, nullable=False)
    vc_stock_code = Column(VARCHAR(16), nullable=False)
    l_market = Column(VARCHAR(50), nullable=False)
    vc_relation_type = Column(VARCHAR(32), primary_key=True, nullable=False)
    vc_relation_code = Column(VARCHAR(20), primary_key=True, nullable=False)
    l_relation_market = Column(VARCHAR(50), primary_key=True, nullable=False)
    l_begin_date = Column(VARCHAR(50))
    l_end_date = Column(VARCHAR(50))
    c_enable = Column(CHAR(1), nullable=False)
    d_updatetime = Column(DateTime, nullable=False)
    vc_md5 = Column(VARCHAR(32))
    vc_source = Column(VARCHAR(20))
    vc_update_operater = Column(VARCHAR(20))

    def __str__(self):
        return str(self.l_publish_date) + str(self.vc_scode) + str(
            self.l_relation_market) + str(self.vc_relation_code) + str(
                self.vc_relation_type) + '$*' + str(self.vc_md5)
Exemplo n.º 19
0
class OperateRevenue2(Base):
    __tablename__ = 'operate_his2'
    ID = Column(BIGINT, primary_key=True)
    EQUIPMENT_ID = Column(VARCHAR(30))
    PLATENO = Column(VARCHAR(24))
    COMPANY_ID = Column(VARCHAR(10))
    TEAM_CODE = Column(VARCHAR(8))
    EMPTY_MILE = Column(VARCHAR(24))
    EMPTY_BEGIN_TIME = Column(VARCHAR(255))
    WORK_BEGIN_TIME = Column(VARCHAR(255))
    WORK_END_TIME = Column(VARCHAR(255))
    UNIT_PRICE = Column(VARCHAR(7))
    LOAD_MILE = Column(VARCHAR(6))
    SLOW_COUNT_TIME = Column(VARCHAR(11))
    OPERATE_MONEY = Column(VARCHAR(20))
    EVALUATE = Column(VARCHAR(11))
    TRADE_CODE = Column(VARCHAR(96))
    GET_ON_LONGITUDE = Column(VARCHAR(16))
    GET_ON_LATITUDE = Column(VARCHAR(16))
    GET_OFF_LONGITUDE = Column(VARCHAR(16))
    GET_OFF_LATITUDE = Column(VARCHAR(16))
    TIME_REPRE = Column(VARCHAR(11))
    GEOHASH5 = Column(VARCHAR(30))
    MIN_REPRE = Column(VARCHAR(11))
    GEOHASH7 = Column(VARCHAR(30))
    GEOEND7 = Column(VARCHAR(765))
    CHAUFFEUR_NO = Column(VARCHAR(8))
    ROW_KEY = Column(VARCHAR(120))
Exemplo n.º 20
0
    def current_data(self, adjust_no_use=True):
        post_data = {
            'btype': 'C',
            'listed': 'Y',
            'rp': '50',
            'is_search': 'N',
        }
        js = self.download(self.url, data=post_data)
        if not js:
            return None
        ret = js.json()
        bond_list = ret.get('rows', {})
        cell_list = []
        for item in bond_list:
            cell_list.append(pd.Series(item.get('cell')))
        df = pd.DataFrame(cell_list)

        # 下面的数据暂时不需要
        if adjust_no_use:
            # del df['active_fl']
            # del df['adq_rating']
            # del df['list_dt']
            # del df['left_put_year']
            # del df['owned']
            # del df['put_dt']
            # del df['real_force_redeem_price']
            # del df['redeem_dt']
            # del df['apply_cd']
            # del df['force_redeem']
            # del df['stock_id']
            # del df['full_price']
            # del df['pre_bond_id']
            # del df['ytm_rt']
            # del df['ytm_rt_tax']
            # del df['repo_cd']
            # del df['last_time']
            # del df['pinyin']
            # del df['put_real_days']
            # del df['price_tips']
            # del df['btype']
            # del df['repo_valid']
            # del df['repo_valid_to']
            # del df['repo_valid_from']
            # del df['repo_discount_rt']
            # del df['adjust_tc']
            # del df['cpn_desc']
            # del df['market']
            # del df['stock_net_value']

            # 类型转换 部分含有%

            df['premium_rt'] = df['premium_rt'].map(
                lambda x: float(x.replace('%', '')))
            df['price'] = df['price'].astype('float64')
            df['convert_price'] = df['convert_price'].astype('float64')
            df['premium_rt'] = df['premium_rt'].astype('float64')
            df['redeem_price'] = df['redeem_price'].astype('float64')

            def convert_float(x):
                try:
                    ret_float = float(x)
                except:
                    ret_float = None
                return ret_float

            def convert_percent(x):
                try:
                    ret = float(x) * 100
                except:
                    ret = None
                return ret

            def remove_percent(x):
                try:
                    ret = x.replace(r'%', '')
                    ret = float(ret)
                except Exception as e:
                    ret = None

                return ret

            df['put_convert_price'] = df['put_convert_price'].map(
                convert_float)
            df['sprice'] = df['sprice'].map(convert_float)
            df['ration'] = df['ration'].map(convert_percent)
            df['volume'] = df['volume'].map(convert_float)
            df['convert_amt_ratio'] = df['convert_amt_ratio'].map(
                remove_percent)
            df['ration_rt'] = df['ration_rt'].map(convert_float)
            df['increase_rt'] = df['increase_rt'].map(remove_percent)
            df['sincrease_rt'] = df['sincrease_rt'].map(remove_percent)

            rename_columns = {
                'bond_id': '可转债代码',
                'bond_nm': '可转债名称',
                'price': '可转债价格',
                'stock_nm': '正股名称',
                'stock_cd': '正股代码',
                'sprice': '正股现价',
                'sincrease_rt': '正股涨跌幅',
                'convert_price': '最新转股价',
                'premium_rt': '溢价率',
                'increase_rt': '可转债涨幅',
                'put_convert_price': '回售触发价',
                'convert_dt': '转股起始日',
                'short_maturity_dt': '到期时间',
                'volume': '成交额(万元)',
                'redeem_price': '强赎价格',
                'year_left': '剩余时间',
                'next_put_dt': '回售起始日',
                'rating_cd': '评级',
                # 'issue_dt': '发行时间',
                # 'redeem_tc': '强制赎回条款',
                # 'adjust_tc': '下修条件',
                'adjust_tip': '下修提示',
                # 'put_tc': '回售',
                'adj_cnt': '下调次数',
                #   'ration':'已转股比例'
                'convert_amt_ratio': '转债剩余占总市值比',
                'curr_iss_amt': '剩余规模',
                'orig_iss_amt': '发行规模',
                'ration_rt': '股东配售率',
            }

            df = df.rename(columns=rename_columns)
            df = df[list(rename_columns.values())]
            df['更新日期'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M')

        # dfx = df[['可转债代码', '可转债名称', '可转债涨幅', '可转债价格', '正股名称', '正股代码',
        #           '正股涨跌幅', '正股现价', '最新转股价', '溢价率', '评级',
        #           '转股起始日', '回售起始日', '回售触发价', '剩余时间',
        #           '更新日期']]

        df = df.set_index('可转债代码', drop=True)
        try:

            df.to_sql('tb_jsl_{}'.format(self.date),
                      engine,
                      if_exists='replace',
                      dtype={'可转债代码': VARCHAR(10)})
            engine2 = get_engine('db_stock')
            df.to_sql('tb_bond_jisilu'.format(self.date),
                      engine2,
                      if_exists='replace',
                      dtype={'可转债代码': VARCHAR(10)})
        except Exception as e:
            logger.info(e)
Exemplo n.º 21
0
class Medico(Base):
    id_especialidade = Column(ForeignKey('especialidade.id'))
    nome = Column(VARCHAR(50), nullable=False)
    senha = Column(VARCHAR(32), nullable=False)
    email = Column(VARCHAR(50), unique=True, nullable=False)
    foto = Column(VARCHAR(50), nullable=False)
    celular = Column(VARCHAR(12), unique=True, nullable=False)
    sexo = Column(VARCHAR(11), nullable=False)
    nascimento = Column(Date(), nullable=False)
    cpf = Column(VARCHAR(11), nullable=False)
    cfm_cfp = Column(VARCHAR(25), nullable=False)
    estado = Column(VARCHAR(2), nullable=False)
    cidade = Column(VARCHAR(50), nullable=False)
    bairro = Column(VARCHAR(100), nullable=False)
    rua = Column(VARCHAR(100), nullable=False)
    numero = Column(VARCHAR(20), nullable=False)
    atendimento_online = Column(Boolean(), nullable=False)
    receber_noticias = Column(Boolean(), nullable=False)
    valor = Column(Float(), nullable=False)
    verificado = Column(Boolean(), nullable=False)
    codigo_troca_senha = Column(VARCHAR(32))
    expiracao = Column(Date())
    saldo = Column(Float(), nullable=False)
class IfBscSuspensionstockquote(Base):
    __tablename__ = 'if_bsc_suspensionstockquote'

    vc_scode = Column(VARCHAR(20), primary_key=True, nullable=False)
    vc_stock_code = Column(VARCHAR(16), nullable=False)
    l_market = Column(VARCHAR(50), nullable=False)
    l_estimate_date = Column(VARCHAR(50), primary_key=True, nullable=False)
    vc_index_code = Column(VARCHAR(16))
    vc_index_name = Column(VARCHAR(100))
    l_trade_date = Column(VARCHAR(50))
    en_suspension_price = Column(VARCHAR(50))
    en_suspension_index = Column(VARCHAR(50))
    en_latest_index = Column(VARCHAR(50))
    en_price = Column(VARCHAR(50))
    d_updatetime = Column(DateTime, nullable=False)
    vc_source = Column(VARCHAR(20))
    vc_update_operater = Column(VARCHAR(20))

    def __str__(self):
        return 'l_estimate_date' + '$*' + 'vc_scode'
Exemplo n.º 23
0
class Index(Base, DBBase):
    __tablename__ = 'indexes'
    db = db_init()

    idx_id = Column(Integer, primary_key=True, autoincrement=True)
    label = Column(VARCHAR(20))
    desc = Column(Text(1000))
    overall_qualified_draws = Column(Integer)
    overall_qualified_nums = Column(Integer)
    overall_wins = Column(Integer)
    overall_efficiency = Column(DECIMAL(5, 4))
    overall_best = Column(Integer)
    overall_worst = Column(Integer)
    overall_avg = Column(DECIMAL(6, 4))

    explores = relationship("iExplore", backref='my_index')

    #explores = relationship("iexplore.iExplore", foreign_keys=idx_id, backref='my_index', primary_join="iExplore.idx_id==Index.idx_id")

    def __init__(self, Label=None, Desc=None, idxID=0):
        self.label = Label
        self.desc = Desc
        self.idx_id = idxID
        if (self.label != None or self.idx_id > 0):
            self.setup()

        super().setupDBBase(Index, Index.idx_id, self.idx_id)

    def register(self):
        print("Index.register(): Want to register %s %s" %
              (self.label, self.desc))
        d = self.db.session.query(Index).filter(
            Index.label == self.label).first()
        if (d == None):
            self.db_save()
            print("Registered index %s %s" % (self.label, self.desc))
        else:
            print("Index already registered")

    def reset(self):
        print("Resetting Index")
        self.label = None
        self.desc = None
        self.overall_qualified_draws = 0
        self.overall_qualified_nums = 0
        self.overall_wins = 0
        self.overall_efficiency = 0
        self.overall_best = 0
        self.overall_worst = 0
        self.overall_avg = 0

    def setup(self):
        d = None
        print("Setting up index %s" % self.label)
        if (self.label != None):
            d = self.db.session.query(Index).filter(
                Index.label == self.label).first()
            print("pulling index by label")
        else:
            d = self.db.session.query(Index).filter(
                Index.idx_id == self.idx_id).first()
            print("pulling index by id")

        if d is not None:
            self.idx_id = d.idx_id
            self.label = d.label
            self.desc = d.desc
            self.overall_qualified_draws = d.overall_qualified_draws
            self.overall_qualified_nums = d.overall_qualified_nums
            self.overall_wins = d.overall_wins
            self.overall_efficiency = d.overall_efficiency
            self.overall_best = d.overall_best
            self.overall_worst = d.overall_worst
            self.overall_avg = d.overall_avg
Exemplo n.º 24
0
class ChannelTransactionHistory(Base):
    __tablename__ = "channel_transaction_history"
    row_id = Column("row_id", Integer, primary_key=True, autoincrement=True)
    order_id = Column("order_id", VARCHAR(128), nullable=False)
    amount = Column("amount", Integer, nullable=False)
    currency = Column("currency", VARCHAR(64), nullable=False)
    type = Column("type", VARCHAR(128), nullable=True, default=null)
    address = Column("address", VARCHAR(255), nullable=True, default=null)
    recipient = Column("recipient", VARCHAR(255), nullable=True, default=null)
    signature = Column("signature", VARCHAR(255), nullable=True, default=null)
    org_id = Column("org_id", VARCHAR(255), nullable=True, default=null)
    group_id = Column("group_id", VARCHAR(255), nullable=True, default=null)
    request_parameters = Column("request_parameters", VARCHAR(255), nullable=True, default=null)
    transaction_hash = Column("transaction_hash", VARCHAR(255), nullable=True, default=null)
    status = Column("status", VARCHAR(255), nullable=True, default=null)
    row_created = Column("row_created", TIMESTAMP(timezone=False), nullable=True, default=datetime.utcnow())
    row_updated = Column("row_updated", TIMESTAMP(timezone=False), nullable=True, default=datetime.utcnow())
    UniqueConstraint(order_id, name="uq_cht_hs")
Exemplo n.º 25
0
"""
import os

from sqlalchemy_aio import ASYNCIO_STRATEGY

from sqlalchemy import Column, Integer, MetaData, Table, VARCHAR, create_engine

from owllook_gui.config import Config

path = os.path.join(Config.BASE_DIR, 'database')
db_path = os.path.join(path, 'owllook_gui.db')

metadata = MetaData()

books = Table(
    'books',
    metadata,
    Column('id', Integer, primary_key=True, autoincrement=True),
    Column('title', VARCHAR(100)),
    Column('url', VARCHAR(200)),
    Column('latest_chapter_name', VARCHAR(50), default=None),
    Column('latest_chapter_url', VARCHAR(200), default=None),
)

engine = create_engine(
    # In-memory sqlite database cannot be accessed from different
    # threads, use file.
    'sqlite:///' + '/tmp/owllook_gui.db',
    strategy=ASYNCIO_STRATEGY)
metadata.create_all(engine._engine)
Exemplo n.º 26
0
from sqlalchemy import Column, VARCHAR, ForeignKey, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship

from Util import __generate_id__

Base = declarative_base()

pessoa_habilidade = Table(
    'pessoa_habilidade',
    Base.metadata,
    Column('pessoa_id', VARCHAR(36), ForeignKey('pessoa.email')),
    Column('habilidade_id', VARCHAR(36), ForeignKey('habilidade.id')),
)

pessoa_formacao = Table(
    'pessoa_formacao',
    Base.metadata,
    Column('pessoa_id', VARCHAR(36), ForeignKey('pessoa.email')),
    Column('formacao_id', VARCHAR(36), ForeignKey('formacao.id')),
)

pessoa_experiencia = Table(
    'pessoa_experiencia',
    Base.metadata,
    Column('pessoa_id', VARCHAR(36), ForeignKey('pessoa.email')),
    Column('experiencia_id', VARCHAR(36), ForeignKey('experiencia.id')),
)


class Pessoa(Base):
Exemplo n.º 27
0
class TestInfo(Base):

    __tablename__ = "test_info"

    id = Column(Integer, primary_key=True, autoincrement=True)
    nick_name = Column(VARCHAR(length=255))
class VdiEtlInstructionDetail(Base):
    __tablename__ = 'vdi_etl_instruction_detail'

    key_etl_instruction = Column(VARCHAR(50), primary_key=True)
    id_ind_port_code = Column(VARCHAR(30))
    id_ins_data_bgndate = Column(DateTime)
    id_ins_data_enddate = Column(DateTime)
    id_ins_data_kind = Column(VARCHAR(50))
    id_ins_exec_date = Column(DateTime)
    id_ins_exec_groupno = Column(VARCHAR(50))
    ins_exec_update_style = Column(VARCHAR(50))
    ins_exec_rec_source = Column(VARCHAR(50))
    ins_exec_bgntime = Column(DateTime)
    ins_exec_endtime = Column(DateTime)
    ins_exec_state = Column(VARCHAR(50))
    ins_rec_time = Column(DateTime)
    ins_remark = Column(VARCHAR(200))
    src_port_code = Column(VARCHAR(60))
    ind_rec_time = Column(DateTime)
    vc_source = Column(VARCHAR(20))
    vc_update_operater = Column(VARCHAR(20))
    d_updatetime = Column(DateTime)

    def __str__(self):
        return 'key_etl_instruction'
Exemplo n.º 29
0
class Split(DeclarativeBaseGuid):
    """
    A GnuCash Split.

    .. note::

        A split used in a scheduled transaction has its main attributes in form of slots.

    Attributes:
        transaction(:class:`piecash.core.transaction.Transaction`): transaction of the split
        account(:class:`piecash.core.account.Account`): account of the split
        lot(:class:`piecash.business.Lot`): lot to which the split pertains
        memo(str): memo of the split
        value(:class:`decimal.Decimal`): amount express in the currency of the transaction of the split
        quantity(:class:`decimal.Decimal`): amount express in the commodity of the account of the split
        reconcile_state(str): 'n', 'c' or 'y'
        reconcile_date(:class:`datetime.datetime`): time
        action(str): describe the type of action behind the split (free form string but with dropdown in the GUI
    """
    __tablename__ = 'splits'

    __table_args__ = (
        # indices
        Index('splits_tx_guid_index', 'tx_guid'),
        Index('splits_account_guid_index', 'account_guid'),
    )

    # column definitions
    # the transaction_guid is not mandatory at construction time because it can be set through a tr.splits.append(...) operation
    # however, in the validation of the object, we raise an error if there is no transaction set at that time
    transaction_guid = Column('tx_guid', VARCHAR(length=32), ForeignKey('transactions.guid'))
    account_guid = Column('account_guid', VARCHAR(length=32), ForeignKey('accounts.guid'), nullable=False)
    memo = Column('memo', VARCHAR(length=2048), nullable=False)
    action = Column('action', VARCHAR(length=2048), nullable=False)

    reconcile_state = Column('reconcile_state', VARCHAR(length=1), nullable=False)
    reconcile_date = Column('reconcile_date', _DateTime())

    _value_num = Column('value_num', BIGINT(), nullable=False)
    _value_denom = Column('value_denom', BIGINT(), nullable=False)
    _value_denom_basis = None
    value = hybrid_property_gncnumeric(_value_num, _value_denom)
    _quantity_num = Column('quantity_num', BIGINT(), nullable=False)
    _quantity_denom = Column('quantity_denom', BIGINT(), nullable=False)
    _quantity_denom_basis = None
    quantity = hybrid_property_gncnumeric(_quantity_num, _quantity_denom)

    lot_guid = Column('lot_guid', VARCHAR(length=32), ForeignKey('lots.guid'))

    # relation definitions
    account = relation('Account', back_populates='splits')
    lot = relation('Lot', back_populates='splits')
    transaction = relation('Transaction', back_populates='splits')

    @property
    def is_credit(self):
        return self.value < 0

    @property
    def is_debit(self):
        return self.value > 0

    def __init__(self,
                 account,
                 value,
                 quantity=None,
                 transaction=None,
                 memo="",
                 action="",
                 reconcile_date=None,
                 reconcile_state="n",
                 lot=None,
                 ):
        self.transaction = transaction
        self.account = account
        self.value = value
        self.quantity = value if quantity is None else quantity
        self.memo = memo
        self.action = action
        self.reconcile_date = reconcile_date
        self.reconcile_state = reconcile_state
        self.lot = lot

    def __str__(self):
        try:
            cur = self.transaction.currency.mnemonic
            acc = self.account
            com = acc.commodity.mnemonic
            if com == "template":
                # case of template split from scheduled transaction
                sched_xaction = self["sched-xaction"]
                credit = sched_xaction["credit-formula"].value
                debit = sched_xaction["debit-formula"].value
                return "SplitTemplate<{} {} {}>".format(sched_xaction["account"].value,
                                                        "credit={}".format(credit) if credit else "",
                                                        "debit={}".format(debit) if debit else "",

                                                        )
            elif cur == com:
                # case of same currency split
                return "Split<{} {} {}>".format(acc,
                                                self.value, cur)
            else:
                # case of non currency split
                return "Split<{} {} {} [{} {}]>".format(acc,
                                                        self.value, cur,
                                                        self.quantity, com)
        except AttributeError:
            return "Split<{}>".format(self.account)

    def object_to_validate(self, change):
        yield self
        if self.transaction:
            yield self.transaction
        if self.lot:
            yield self.lot

    def validate(self):
        old = self.get_all_changes()

        if old["STATE_CHANGES"][-1] == "deleted":
            return

        if '_quantity_num' in old or '_value_num' in old:
            self.transaction._recalculate_balance = True

        if self.transaction_guid is None:
            raise GncValidationError("The split is not linked to a transaction")

        if self.transaction.currency == self.account.commodity:
            if self.quantity != self.value:
                raise GncValidationError("The split has a quantity diffeerent from value "
                                         "while the transaction currency and the account commodity is the same")
        else:
            if self.quantity is None:
                raise GncValidationError(
                    "The split quantity is not defined while the split is on a commodity different from the transaction")
            if self.quantity.is_signed() != self.value.is_signed():
                raise GncValidationError("The split quantity has not the same sign as the split value")

        # everything is fine, let us normalise the value with respect to the currency/commodity precisions
        self._quantity_denom_basis = self.account.commodity_scu
        self._value_denom_basis = self.transaction.currency.fraction

        if self.transaction.currency != self.account.commodity and self.quantity != 0:
            # let us also add a Price
            from piecash import Price

            value = (self.value / self.quantity).quantize(Decimal("0.000001"))
            try:
                # find existing price if any and if so, do nothing
                pr = self.book.prices(commodity=self.account.commodity,
                                      currency=self.transaction.currency,
                                      date=self.transaction.post_date,
                                      )

            except KeyError:
                # otherwise, add a price in the database
                pr = Price(commodity=self.account.commodity,
                           currency=self.transaction.currency,
                           date=self.transaction.post_date,
                           value=value,
                           type="transaction",
                           source="user:split-register")

            # and an action if not yet defined
            if self.action == "":
                self.action = "Sell" if self.quantity.is_signed() else "Buy"
Exemplo n.º 30
0
class Account(Base):
    __tablename__ = "accounts"

    account_id = Column(Integer, primary_key=True, autoincrement=True)
    user_name = Column(VARCHAR(255), nullable=False)