""" # master table atoms = Table( 'atoms', metadata, Column('atom_id', Integer, primary_key=True), Column('biomolecule_id', Integer, nullable=False), Column('residue_id', Integer, nullable=False), Column('path', PTree), Column('atom_serial', Integer, nullable=False), Column('group_pdb', String(7), nullable=False), # HETATM/ATOM Column('atom_name', String(4), nullable=False), Column('alt_loc', String(1), nullable=False), Column('coords', Vector3D, nullable=False), Column('occupancy', Float(3, 2), nullable=False), Column('b_factor', Float(4, 2), nullable=False), Column('element', String(2)), Column('hyb', SmallInteger, nullable=False), Column('tripos_atom_type', String(5)), Column('is_donor', Boolean(create_constraint=False), DefaultClause('false'), nullable=False), Column('is_acceptor', Boolean(create_constraint=False), DefaultClause('false'), nullable=False), Column('is_aromatic', Boolean(create_constraint=False), DefaultClause('false'),
100xp Previously, we used the Table object to reflect a table from an existing database, but what if we want to create a new table? We still use the Table object; however, we replace the autoload keyword arguments with Column objects. The Column object takes a name, a SQLAlchemy type with an optional format, and optional keyword arguments for different constraints. With the table defined, we're now ready to create the table in the database by using the create_all method on metadata and supplying the engine as the only parameter. When building the table, recall how in the video we passed in 255 as the maximum length of a String, but there were no such constraints for other types. Instructions Import Table, Column, String, Integer, Float, Boolean from sqlalchemy. Build a new table called data with a name (String), count (Integer), amount(Float), and valid (Boolean) columns. Create the table in the database. """ # Import Table, Column, String, Integer, Float, Boolean from sqlalchemy from sqlalchemy import Table, Column, String, Integer, Float, Boolean # Define a new table with a name, count, amount, and valid column: data data = Table('data', metadata, Column('name', String(255)), Column('count', Integer()), Column('amount', Float()), Column('valid', Boolean())) # Use the metadata to create the table metadata.create_all(engine) # Print table repr print(repr(data)) """ sortie Ipython <script.py> output: Table('data', MetaData(bind=None), Column('name', String(length=255), table=<data>), Column('count', Integer(), table=<data>), Column('amount', Float(), table=<data>), Column('valid', Boolean(), table=<data>), schema=None) """ """ Constraints and Data Defaults 100xp Often, you need to make sure that a column is unique, nullable, a positive value, or related to a column in another table. These are called constraints. Many constraints are keywords on the column itself; however, they can also be passed directly to the Table object as well. In addition to constraints, you can also set a default value for the column if no data is passed to it via the default keyword on the column. There is also an onupdate keyword for setting the column value when the row is updated. This is extremely useful for keeping datetime stamps for auditing purposes.
db = Database(credentials=credentials) db_schema = None # set if you are not using the default ''' To do anything with IoT Platform Analytics, you will need one or more entity type. You can create entity types through the IoT Platform or using the python API as shown below. The database schema is only needed if you are not using the default schema. You can also rename the timestamp. ''' entity_name = 'Turbines' # dash100462 Used in dev2 db_schema = 'dash100462' # db_schema = None # replace if you are not using the default schema #db.drop_table(entity_name, schema = db_schema) entity = EntityType( entity_name, db, Column('Turbine_ID', String(50)), Column('Temperature', Float()), Column('Pressure', Float()), Column('Volume', Float()), DemoHTTPPreload(request='GET', url='internal_test', output_item='http_preload_done'), bif.PythonExpression(expression='df["Temperature"]*df["Pressure"]', output_name='Volume'), **{ '_timestamp': 'evt_timestamp', '_db_schema': db_schema }) ''' When creating an EntityType object you will need to specify the name of the entity, the database object that will contain entity data After creating an EntityType you will need to register it so that it visible in the UI. To also register the functions and constants associated with the entity type, specify 'publish_kpis' = True.
def load_unicode_test_data(): """Loading unicode test dataset from a csv file in the repo""" data = get_example_data("unicode_utf8_unixnl_test.csv", is_gzip=False, make_bytes=True) df = pd.read_csv(data, encoding="utf-8") # generate date/numeric data df["dttm"] = datetime.datetime.now().date() df["value"] = [random.randint(1, 100) for _ in range(len(df))] df.to_sql( # pylint: disable=no-member "unicode_test", db.engine, if_exists="replace", chunksize=500, dtype={ "phrase": String(500), "short_phrase": String(10), "with_missing": String(100), "dttm": Date(), "value": Float(), }, index=False, ) print("Done loading table!") print("-" * 80) print("Creating table [unicode_test] reference") obj = db.session.query(TBL).filter_by(table_name="unicode_test").first() if not obj: obj = TBL(table_name="unicode_test") obj.main_dttm_col = "dttm" obj.database = utils.get_or_create_main_db() db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj slice_data = { "granularity_sqla": "dttm", "groupby": [], "metric": { "aggregate": "SUM", "column": { "column_name": "value" }, "expressionType": "SIMPLE", "label": "Value", }, "row_limit": config.get("ROW_LIMIT"), "since": "100 years ago", "until": "now", "where": "", "viz_type": "word_cloud", "size_from": "10", "series": "short_phrase", "size_to": "70", "rotation": "square", "limit": "100", } print("Creating a slice") slc = Slice( slice_name="Unicode Cloud", viz_type="word_cloud", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) print("Creating a dashboard") dash = db.session.query(Dash).filter_by( dashboard_title="Unicode Test").first() if not dash: dash = Dash() js = """\ { "CHART-Hkx6154FEm": { "children": [], "id": "CHART-Hkx6154FEm", "meta": { "chartId": 2225, "height": 30, "sliceName": "slice 1", "width": 4 }, "type": "CHART" }, "GRID_ID": { "children": [ "ROW-SyT19EFEQ" ], "id": "GRID_ID", "type": "GRID" }, "ROOT_ID": { "children": [ "GRID_ID" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW-SyT19EFEQ": { "children": [ "CHART-Hkx6154FEm" ], "id": "ROW-SyT19EFEQ", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "DASHBOARD_VERSION_KEY": "v2" } """ dash.dashboard_title = "Unicode Test" pos = json.loads(js) update_slice_ids(pos, [slc]) dash.position_json = json.dumps(pos, indent=4) dash.slug = "unicode-test" dash.slices = [slc] db.session.merge(dash) db.session.commit()
def test_float_setinputsizes(self): self._test_setinputsizes(Float(15), 25.34534, None)
def test_compare_float_text(self): self._compare_default_roundtrip( Float(), text("5.2"), )
class Quote(Base): __tablename__ = 'quotes' id = Column(Integer, primary_key=True) stock_id = Column(ForeignKey('stocks.id')) stock = relationship('Stock', back_populates='quotes') symbol = Column(String(30)) symbol_name = Column(String(255)) stock_exchange = Column(String(30)) trade_currency = Column(String(30)) date_inserted = Column(DateTime, default=now) date_dividend_ex = Column(DateTime) dividend_pay_date = Column(DateTime) dividend_share = Column(Float()) dividend_yield = Column(Float()) date_last_traded = Column(DateTime) ebitda = Column(Float()) eps_current = Column(Float()) eps_current_year = Column(Float()) eps_next_quarter = Column(Float()) eps_next_year = Column(Float()) eps_price_est_current_year = Column(Float()) eps_price_est_next_year = Column(Float()) errored_symbol = Column(Boolean(), default=False) limit_high = Column(Float()) limit_low = Column(Float()) market_cap = Column(BigInteger) pct_change_200_day_avg = Column(Float(precision=4)) pct_change_50_day_avg = Column(Float(precision=4)) pct_change_current = Column(Float(precision=4)) pct_change_today = Column(Float(precision=4)) pct_change_year_high = Column(Float(precision=4)) pct_change_year_low = Column(Float(precision=4)) pct_day_change = Column(Float(precision=4)) pe_ratio = Column(Float(precision=4)) peg_ratio = Column(Float(precision=4)) price_200_day_moving_avg = Column(Float(precision=4)) price_50_day_moving_avg = Column(Float(precision=4)) price_ask = Column(Float(precision=4)) price_bid = Column(Float(precision=4)) price_book = Column(Float(precision=4)) price_book_value = Column(Float(precision=4)) price_change_200_day_avg = Column(Float(precision=4)) price_change_50_day_avg = Column(Float(precision=4)) price_change_year_high = Column(Float(precision=4)) price_change_year_low = Column(Float(precision=4)) price_day_change = Column(Float(precision=4)) price_day_high = Column(Float(precision=4)) price_day_low = Column(Float(precision=4)) price_day_open = Column(Float(precision=4)) price_last_close = Column(Float(precision=4)) price_last_trade = Column(Float(precision=4)) price_sales = Column(Float(precision=4)) price_year_high = Column(Float(precision=4)) price_year_low = Column(Float(precision=4)) price_year_target = Column(Float(precision=4)) range_day = Column(String(30)) range_year = Column(String(30)) short_ratio = Column(Float(precision=4)) volume_day = Column(Integer) volume_day_avg = Column(Integer) def __repr__(self): return """Quote(symbol='{}', symbol_name='{}', date_last_traded='{}', price_last_trade='{}', price_last_close='{}', date_inserted='{}'""".format( self.symbol, self.symbol_name, self.date_last_traded, self.price_last_trade, self.price_last_close, self.date_inserted)
def load_energy(): """Loads an energy related dataset to use with sankey and graphs""" tbl_name = 'energy_usage' with gzip.open(os.path.join(DATA_FOLDER, 'energy.json.gz')) as f: pdf = pd.read_json(f) pdf.to_sql( tbl_name, db.engine, if_exists='replace', chunksize=500, dtype={ 'source': String(255), 'target': String(255), 'value': Float(), }, index=False) print("Creating table [wb_health_population] reference") tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first() if not tbl: tbl = TBL(table_name=tbl_name) tbl.description = "Energy consumption" tbl.is_featured = True tbl.database = utils.get_or_create_main_db(caravel) db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() merge_slice( Slice( slice_name="Energy Sankey", viz_type='sankey', datasource_type='table', datasource_id=tbl.id, params=textwrap.dedent("""\ { "collapsed_fieldsets": "", "datasource_id": "3", "datasource_name": "energy_usage", "datasource_type": "table", "flt_col_0": "source", "flt_eq_0": "", "flt_op_0": "in", "groupby": [ "source", "target" ], "having": "", "metric": "sum__value", "row_limit": "5000", "slice_id": "", "slice_name": "Energy Sankey", "viz_type": "sankey", "where": "" } """)) ) merge_slice( Slice( slice_name="Energy Force Layout", viz_type='directed_force', datasource_type='table', datasource_id=tbl.id, params=textwrap.dedent("""\ { "charge": "-500", "collapsed_fieldsets": "", "datasource_id": "1", "datasource_name": "energy_usage", "datasource_type": "table", "flt_col_0": "source", "flt_eq_0": "", "flt_op_0": "in", "groupby": [ "source", "target" ], "having": "", "link_length": "200", "metric": "sum__value", "row_limit": "5000", "slice_id": "229", "slice_name": "Force", "viz_type": "directed_force", "where": "" } """)) ) merge_slice( Slice( slice_name="Heatmap", viz_type='heatmap', datasource_type='table', datasource_id=tbl.id, params=textwrap.dedent("""\ { "all_columns_x": "source", "all_columns_y": "target", "canvas_image_rendering": "pixelated", "collapsed_fieldsets": "", "datasource_id": "1", "datasource_name": "energy_usage", "datasource_type": "table", "flt_col_0": "source", "flt_eq_0": "", "flt_op_0": "in", "having": "", "linear_color_scheme": "blue_white_yellow", "metric": "sum__value", "normalize_across": "heatmap", "slice_id": "229", "slice_name": "Heatmap", "viz_type": "heatmap", "where": "", "xscale_interval": "1", "yscale_interval": "1" } """)) )
def load_unicode_test_data(): """Loading unicode test dataset from a csv file in the repo""" df = pd.read_csv(os.path.join(DATA_FOLDER, 'unicode_utf8_unixnl_test.csv'), encoding="utf-8") # generate date/numeric data df['date'] = datetime.datetime.now().date() df['value'] = [random.randint(1, 100) for _ in range(len(df))] df.to_sql( 'unicode_test', db.engine, if_exists='replace', chunksize=500, dtype={ 'phrase': String(500), 'short_phrase': String(10), 'with_missing': String(100), 'date': Date(), 'value': Float(), }, index=False) print("Done loading table!") print("-" * 80) print("Creating table [unicode_test] reference") obj = db.session.query(TBL).filter_by(table_name='unicode_test').first() if not obj: obj = TBL(table_name='unicode_test') obj.main_dttm_col = 'date' obj.database = utils.get_or_create_main_db(caravel) obj.is_featured = False db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj slice_data = { "datasource_id": "3", "datasource_name": "unicode_test", "datasource_type": "table", "flt_op_1": "in", "granularity": "date", "groupby": [], "metric": 'sum__value', "row_limit": config.get("ROW_LIMIT"), "since": "100 years ago", "until": "now", "where": "", "viz_type": "word_cloud", "size_from": "10", "series": "short_phrase", "size_to": "70", "rotation": "square", "limit": "100", } print("Creating a slice") slc = Slice( slice_name="Unicode Cloud", viz_type='word_cloud', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) print("Creating a dashboard") dash = db.session.query(Dash).filter_by(dashboard_title="Unicode Test").first() if not dash: dash = Dash() pos = { "size_y": 4, "size_x": 4, "col": 1, "row": 1, "slice_id": slc.id, } dash.dashboard_title = "Unicode Test" dash.position_json = json.dumps([pos], indent=4) dash.slug = "unicode-test" dash.slices = [slc] db.session.merge(dash) db.session.commit()
class Place(BaseModel, Base): """This is the class for Place Attributes: city_id: city id user_id: user id name: name input description: string of description number_rooms: number of room in int number_bathrooms: number of bathrooms in int max_guest: maximum guest in int price_by_night:: pice for a staying in int latitude: latitude in flaot longitude: longitude in float amenity_ids: list of Amenity ids """ if os_type_storage == "db": __tablename__ = "places" city_id = Column(String(60), ForeignKey('cities.id'), nullable=False) user_id = Column(String(60), ForeignKey('users.id'), nullable=False) name = Column(String(128), nullable=False) description = Column(String(1024), nullable=True) number_rooms = Column(Integer, default=0, nullable=False) number_bathrooms = Column(Integer, default=0, nullable=False) max_guest = Column(Integer, default=0, nullable=False) price_by_night = Column(Integer, default=0, nullable=False) latitude = Column(Float(), nullable=True) longitude = Column(Float(), nullable=True) reviews = relationship('Review', cascade="all, delete", backref='place') amenities = relationship("Amenity", secondary=place_amenity, viewonly=False, backref='place_amenities') else: city_id = "" user_id = "" name = "" description = "" number_rooms = 0 number_bathrooms = 0 max_guest = 0 price_by_night = 0 latitude = 0.0 longitude = 0.0 amenity_ids = [] @property def get_reviews(self): my_list = [] reviews_dict = models.storage.all(Review) for key, value in reviews_dict.items(): if self.id == reviews_dict['place_id']: my_list.append(value) return (my_list) @property def amenities(self): my_list = [] amen_dict = models.storage.all(models.amenity.Amenity) amen_ids = amen_dict['amenity_ids'] for item in amenity_ids: if self.id == item: my_list.append(item) return (my_list) @amenities.setter def amenities(self, obj): if isinstance(obj, Amenity): self.amenity_ids.append(obj.id)
class FundStatsHF(Base): __tablename__ = 'fund_statistics' __table_args__ = {'schema': 'cbaas'} id = Column(Integer, primary_key=True) created = Column(String(255)) dateAdd = Column(DateTime) lastUpdated = Column(DateTime) lastUpdatedby = Column(String(255)) fund_id = Column(Integer) S_INFO_WINDCODE = Column(String(255)) as_of_date_ = Column(Date) aum = Column(Float(6)) hasExceptio = Column(Integer) latest = Column(Integer) nav = Column(Float(6)) pre_sycamore_scoring_ty = Column(Integer) pre_sycamore_scoring_value = Column(Float(6)) rawDate = Column(Date) rawReturns = Column(Float(6)) rawRiskFreeRate = Column(Float(6)) reCalculate = Column(Float(6)) return_ = Column(Float(6)) start_date_ = Column(Date) sycamore_scoring_ty = Column(Integer) sycamore_scoring_value = Column(Float(6)) CAPM_Alpha1Y = Column(Float(6)) CAPM_Alpha2Y = Column(Float(6)) CAPM_Alpha3M = Column(Float(6)) CAPM_Alpha3Y = Column(Float(6)) CAPM_Alpha5Y = Column(Float(6)) CAPM_Alpha6M = Column(Float(6)) CAPM_Beta1Y = Column(Float(6)) CAPM_Beta2Y = Column(Float(6)) CAPM_Beta3M = Column(Float(6)) CAPM_Beta3Y = Column(Float(6)) CAPM_Beta5Y = Column(Float(6)) CAPM_Beta6M = Column(Float(6)) GII_Alpha1Y = Column(Float(6)) GII_Alpha2Y = Column(Float(6)) GII_Alpha3M = Column(Float(6)) GII_Alpha3Y = Column(Float(6)) GII_Alpha5Y = Column(Float(6)) GII_Alpha6M = Column(Float(6)) GII_Alpha_tstat1Y = Column(Float(6)) GII_Alpha_tstat2Y = Column(Float(6)) GII_Alpha_tstat3M = Column(Float(6)) GII_Alpha_tstat3Y = Column(Float(6)) GII_Alpha_tstat5Y = Column(Float(6)) GII_Alpha_tstat6M = Column(Float(6)) GII_Beta1Y = Column(Float(6)) GII_Beta2Y = Column(Float(6)) GII_Beta3M = Column(Float(6)) GII_Beta3Y = Column(Float(6)) GII_Beta5Y = Column(Float(6)) GII_Beta6M = Column(Float(6)) GII_Gamma1Y = Column(Float(6)) GII_Gamma2Y = Column(Float(6)) GII_Gamma3M = Column(Float(6)) GII_Gamma3Y = Column(Float(6)) GII_Gamma5Y = Column(Float(6)) GII_Gamma6M = Column(Float(6)) GII_gamma_tstat1Y = Column(Float(6)) GII_gamma_tstat2Y = Column(Float(6)) GII_gamma_tstat3M = Column(Float(6)) GII_gamma_tstat3Y = Column(Float(6)) GII_gamma_tstat5Y = Column(Float(6)) GII_gamma_tstat6M = Column(Float(6)) MH_Alpha1Y = Column(Float(6)) MH_Alpha2Y = Column(Float(6)) MH_Alpha3M = Column(Float(6)) MH_Alpha3Y = Column(Float(6)) MH_Alpha5Y = Column(Float(6)) MH_Alpha6M = Column(Float(6)) MH_Gamma1Y = Column(Float(6)) MH_Gamma2Y = Column(Float(6)) MH_Gamma3M = Column(Float(6)) MH_Gamma3Y = Column(Float(6)) MH_Gamma5Y = Column(Float(6)) MH_Gamma6M = Column(Float(6)) MH_Gamma_tstat1Y = Column(Float(6)) MH_Gamma_tstat2Y = Column(Float(6)) MH_Gamma_tstat3M = Column(Float(6)) MH_Gamma_tstat3Y = Column(Float(6)) MH_Gamma_tstat5Y = Column(Float(6)) MH_Gamma_tstat6M = Column(Float(6)) TM_Alpha1Y = Column(Float(6)) TM_Alpha2Y = Column(Float(6)) TM_Alpha3M = Column(Float(6)) TM_Alpha3Y = Column(Float(6)) TM_Alpha5Y = Column(Float(6)) TM_Alpha6M = Column(Float(6)) TM_Gamma1Y = Column(Float(6)) TM_Gamma2Y = Column(Float(6)) TM_Gamma3M = Column(Float(6)) TM_Gamma3Y = Column(Float(6)) TM_Gamma5Y = Column(Float(6)) TM_Gamma6M = Column(Float(6)) TM_Gamma_tstat1Y = Column(Float(6)) TM_Gamma_tstat2Y = Column(Float(6)) TM_Gamma_tstat3M = Column(Float(6)) TM_Gamma_tstat3Y = Column(Float(6)) TM_Gamma_tstat5Y = Column(Float(6)) TM_Gamma_tstat6M = Column(Float(6)) activePremium1Y = Column(Float(6)) activePremium2Y = Column(Float(6)) activePremium3M = Column(Float(6)) activePremium3Y = Column(Float(6)) activePremium5Y = Column(Float(6)) activePremium6M = Column(Float(6)) annualizedAlpha1Y = Column(Float(6)) annualizedAlpha2Y = Column(Float(6)) annualizedAlpha3M = Column(Float(6)) annualizedAlpha3Y = Column(Float(6)) annualizedAlpha5Y = Column(Float(6)) annualizedAlpha6M = Column(Float(6)) appraisalRatio1Y = Column(Float(6)) appraisalRatio2Y = Column(Float(6)) appraisalRatio3M = Column(Float(6)) appraisalRatio3Y = Column(Float(6)) appraisalRatio5Y = Column(Float(6)) appraisalRatio6M = Column(Float(6)) autoCorrelation1Y = Column(Float(6)) autoCorrelation2Y = Column(Float(6)) autoCorrelation3M = Column(Float(6)) autoCorrelation3Y = Column(Float(6)) autoCorrelation5Y = Column(Float(6)) autoCorrelation6M = Column(Float(6)) calmarRatio1Y = Column(Float(6)) calmarRatio2Y = Column(Float(6)) calmarRatio3M = Column(Float(6)) calmarRatio3Y = Column(Float(6)) calmarRatio5Y = Column(Float(6)) calmarRatio6M = Column(Float(6)) conditionalVar_nonParametric1Y = Column(Float(6)) conditionalVar_nonParametric2Y = Column(Float(6)) conditionalVar_nonParametric3M = Column(Float(6)) conditionalVar_nonParametric3Y = Column(Float(6)) conditionalVar_nonParametric5Y = Column(Float(6)) conditionalVar_nonParametric6M = Column(Float(6)) d4Ratio1Y = Column(Float(6)) d4Ratio2Y = Column(Float(6)) d4Ratio3M = Column(Float(6)) d4Ratio3Y = Column(Float(6)) d4Ratio5Y = Column(Float(6)) d4Ratio6M = Column(Float(6)) d5Ratio1Y = Column(Float(6)) d5Ratio2Y = Column(Float(6)) d5Ratio3M = Column(Float(6)) d5Ratio3Y = Column(Float(6)) d5Ratio5Y = Column(Float(6)) d5Ratio6M = Column(Float(6)) downsideDeviationRatio1Y = Column(Float(6)) downsideDeviationRatio2Y = Column(Float(6)) downsideDeviationRatio3M = Column(Float(6)) downsideDeviationRatio3Y = Column(Float(6)) downsideDeviationRatio5Y = Column(Float(6)) downsideDeviationRatio6M = Column(Float(6)) excessReturnsRatio1Y = Column(Float(6)) excessReturnsRatio2Y = Column(Float(6)) excessReturnsRatio3M = Column(Float(6)) excessReturnsRatio3Y = Column(Float(6)) excessReturnsRatio5Y = Column(Float(6)) excessReturnsRatio6M = Column(Float(6)) ff3_alpha1Y = Column(Float(6)) ff3_alpha2Y = Column(Float(6)) ff3_alpha3M = Column(Float(6)) ff3_alpha3Y = Column(Float(6)) ff3_alpha5Y = Column(Float(6)) ff3_alpha6M = Column(Float(6)) ff3_rsq1Y = Column(Float(6)) ff3_rsq2Y = Column(Float(6)) ff3_rsq3M = Column(Float(6)) ff3_rsq3Y = Column(Float(6)) ff3_rsq5Y = Column(Float(6)) ff3_rsq6M = Column(Float(6)) ff3_sysrisk1Y = Column(Float(6)) ff3_sysrisk2Y = Column(Float(6)) ff3_sysrisk3M = Column(Float(6)) ff3_sysrisk3Y = Column(Float(6)) ff3_sysrisk5Y = Column(Float(6)) ff3_sysrisk6M = Column(Float(6)) ff3_tstat1Y = Column(Float(6)) ff3_tstat2Y = Column(Float(6)) ff3_tstat3M = Column(Float(6)) ff3_tstat3Y = Column(Float(6)) ff3_tstat5Y = Column(Float(6)) ff3_tstat6M = Column(Float(6)) fungHsiehAlpha1Y = Column(Float(6)) fungHsiehAlpha2Y = Column(Float(6)) fungHsiehAlpha3M = Column(Float(6)) fungHsiehAlpha3Y = Column(Float(6)) fungHsiehAlpha5Y = Column(Float(6)) fungHsiehAlpha6M = Column(Float(6)) fungHsiehAlphatStat1Y = Column(Float(6)) fungHsiehAlphatStat2Y = Column(Float(6)) fungHsiehAlphatStat3M = Column(Float(6)) fungHsiehAlphatStat3Y = Column(Float(6)) fungHsiehAlphatStat5Y = Column(Float(6)) fungHsiehAlphatStat6M = Column(Float(6)) fungHsiehRsq1Y = Column(Float(6)) fungHsiehRsq2Y = Column(Float(6)) fungHsiehRsq3M = Column(Float(6)) fungHsiehRsq3Y = Column(Float(6)) fungHsiehRsq5Y = Column(Float(6)) fungHsiehRsq6M = Column(Float(6)) fungHsiehSysRisk1Y = Column(Float(6)) fungHsiehSysRisk2Y = Column(Float(6)) fungHsiehSysRisk3M = Column(Float(6)) fungHsiehSysRisk3Y = Column(Float(6)) fungHsiehSysRisk5Y = Column(Float(6)) fungHsiehSysRisk6M = Column(Float(6)) informationRatio1Y = Column(Float(6)) informationRatio2Y = Column(Float(6)) informationRatio3M = Column(Float(6)) informationRatio3Y = Column(Float(6)) informationRatio5Y = Column(Float(6)) informationRatio6M = Column(Float(6)) kurtosis1Y = Column(Float(6)) kurtosis2Y = Column(Float(6)) kurtosis3M = Column(Float(6)) kurtosis3Y = Column(Float(6)) kurtosis5Y = Column(Float(6)) kurtosis6M = Column(Float(6)) maxDrawdownRatio1Y = Column(Float(6)) maxDrawdownRatio2Y = Column(Float(6)) maxDrawdownRatio3M = Column(Float(6)) maxDrawdownRatio3Y = Column(Float(6)) maxDrawdownRatio5Y = Column(Float(6)) maxDrawdownRatio6M = Column(Float(6)) mean1Y = Column(Float(6)) mean2Y = Column(Float(6)) mean3M = Column(Float(6)) mean3Y = Column(Float(6)) mean5Y = Column(Float(6)) mean6M = Column(Float(6)) median1Y = Column(Float(6)) median2Y = Column(Float(6)) median3M = Column(Float(6)) median3Y = Column(Float(6)) median5Y = Column(Float(6)) median6M = Column(Float(6)) r3Ratio1Y = Column(Float(6)) r3Ratio2Y = Column(Float(6)) r3Ratio3M = Column(Float(6)) r3Ratio3Y = Column(Float(6)) r3Ratio5Y = Column(Float(6)) r3Ratio6M = Column(Float(6)) returns1Y = Column(Float(6)) returns2Y = Column(Float(6)) returns3M = Column(Float(6)) returns3Y = Column(Float(6)) returns5Y = Column(Float(6)) returns6M = Column(Float(6)) returns1M = Column(Float(6)) returnsMTD = Column(Float(6)) returnsTotal = Column(Float(6)) returnsYTD = Column(Float(6)) riskAdjustedReturns1Y = Column(Float(6)) riskAdjustedReturns2Y = Column(Float(6)) riskAdjustedReturns3M = Column(Float(6)) riskAdjustedReturns3Y = Column(Float(6)) riskAdjustedReturns5Y = Column(Float(6)) riskAdjustedReturns6M = Column(Float(6)) riskAdjustedReturns = Column(Float(6)) riskAdjustedReturnsMTD = Column(Float(6)) riskAdjustedReturnsTotal = Column(Float(6)) riskAdjustedReturnsYTD = Column(Float(6)) sharpRatio1Y = Column(Float(6)) sharpRatio2Y = Column(Float(6)) sharpRatio3M = Column(Float(6)) sharpRatio3Y = Column(Float(6)) sharpRatio5Y = Column(Float(6)) sharpRatio6M = Column(Float(6)) skewness1Y = Column(Float(6)) skewness2Y = Column(Float(6)) skewness3M = Column(Float(6)) skewness3Y = Column(Float(6)) skewness5Y = Column(Float(6)) skewness6M = Column(Float(6)) sortinoRatio1Y = Column(Float(6)) sortinoRatio2Y = Column(Float(6)) sortinoRatio3M = Column(Float(6)) sortinoRatio3Y = Column(Float(6)) sortinoRatio5Y = Column(Float(6)) sortinoRatio6M = Column(Float(6)) standardDeviationRatio1Y = Column(Float(6)) standardDeviationRatio2Y = Column(Float(6)) standardDeviationRatio3M = Column(Float(6)) standardDeviationRatio3Y = Column(Float(6)) standardDeviationRatio5Y = Column(Float(6)) standardDeviationRatio6M = Column(Float(6)) tailRisk_nonParametric1Y = Column(Float(6)) tailRisk_nonParametric2Y = Column(Float(6)) tailRisk_nonParametric3M = Column(Float(6)) tailRisk_nonParametric3Y = Column(Float(6)) tailRisk_nonParametric5Y = Column(Float(6)) tailRisk_nonParametric6M = Column(Float(6)) trackingError1Y = Column(Float(6)) trackingError2Y = Column(Float(6)) trackingError3M = Column(Float(6)) trackingError3Y = Column(Float(6)) trackingError5Y = Column(Float(6)) trackingError6M = Column(Float(6)) treynorRatio1Y = Column(Float(6)) treynorRatio2Y = Column(Float(6)) treynorRatio3M = Column(Float(6)) treynorRatio3Y = Column(Float(6)) treynorRatio5Y = Column(Float(6)) treynorRatio6M = Column(Float(6)) upsideDeviationRatio1Y = Column(Float(6)) upsideDeviationRatio2Y = Column(Float(6)) upsideDeviationRatio3M = Column(Float(6)) upsideDeviationRatio3Y = Column(Float(6)) upsideDeviationRatio5Y = Column(Float(6)) upsideDeviationRatio6M = Column(Float(6)) var_nonParametric1Y = Column(Float(6)) var_nonParametric2Y = Column(Float(6)) var_nonParametric3M = Column(Float(6)) var_nonParametric3Y = Column(Float(6)) var_nonParametric5Y = Column(Float(6)) var_nonParametric6M = Column(Float(6))
FutureFactor = Table( "future_factor", metadata, Column("id", Integer, primary_key=True), Column("futureId", Integer), Column("factorId", Integer), Column("tableNames", String(500)), # perhaps exceed the limitation Column("modelId", Integer), Column("name", String(100)), Column("version", String(50)), Column("component", String(1000)), Column("fCate", Integer), Column("generateTime", DateTime), Column("onlineTime", DateTime), Column("predict", Integer), Column("predictValue", Float(precision=6)), Column("confidence", Float(precision=3)), Column("accuracy1m", Float(precision=3)), Column("accuracy3m", Float(precision=3)), Column("accuracy6m", Float(precision=3)), Column("accuracy12m", Float(precision=3)), Column("trainInfo", String(1000)), Column("testInfo", String(1000)), Column("loadIndex", Float(precision=3)), Column("corrIndex", Float(precision=3)), Column("corr", String(255)), Column("mi", String(255)), Column("heatMap", String(255)), Column("enable", Integer), Column("status", Integer))
class Exposure(Base): __tablename__ = 'risk_exposure' __table_args__ = (Index('trade_date', 'security_code', unique=True), ) trade_date = Column(DateTime, primary_key=True, nullable=False) # symbol = Column(String, primary_key=True, nullable=False) security_code = Column(VARCHAR(24), primary_key=True) BETA = Column(Float(53)) MOMENTUM = Column(Float(53)) SIZE = Column(Float(53)) EARNYILD = Column(Float(53)) RESVOL = Column(Float(53)) GROWTH = Column(Float(53)) BTOP = Column(Float(53)) LEVERAGE = Column(Float(53)) LIQUIDTY = Column(Float(53)) SIZENL = Column(Float(53)) Bank = Column(Float(53)) RealEstate = Column(Float(53)) Health = Column(Float(53)) Transportation = Column(Float(53)) Mining = Column(Float(53)) NonFerMetal = Column(Float(53)) HouseApp = Column(Float(53)) LeiService = Column(Float(53)) MachiEquip = Column(Float(53)) BuildDeco = Column(Float(53)) CommeTrade = Column(Float(53)) CONMAT = Column(Float(53)) Auto = Column(Float(53)) Textile = Column(Float(53)) FoodBever = Column(Float(53)) Electronics = Column(Float(53)) Computer = Column(Float(53)) LightIndus = Column(Float(53)) Utilities = Column(Float(53)) Telecom = Column(Float(53)) AgriForest = Column(Float(53)) CHEM = Column(Float(53)) Media = Column(Float(53)) IronSteel = Column(Float(53)) NonBankFinan = Column(Float(53)) ELECEQP = Column(Float(53)) AERODEF = Column(Float(53)) Conglomerates = Column(Float(53)) COUNTRY = Column(Float(53))
class Headers(Base): __tablename__ = "headers" id = Column(Integer, primary_key=True) filetype = Column(String(67)) instrume = Column(String(3)) rootname = Column(String(9)) imagetyp = Column(String(20)) targname = Column(String(67)) ra_targ = Column(Float(20)) dec_targ = Column(Float(20)) proposid = Column(Integer) qualcom1 = Column(String(67)) qualcom2 = Column(String(67)) qualcom3 = Column(String(67)) quality = Column(String(67)) opus_ver = Column(String(30)) postarg1 = Column(Float(32)) postarg2 = Column(Float) cal_ver = Column(String(30)) proctime = Column(Float) obstype = Column(String(20)) obsmode = Column(String(20)) exptype = Column(String(20)) detector = Column(String(20)) segment = Column(String(20)) detecthv = Column(String(20)) life_adj = Column(Integer) fppos = Column(Integer) exp_num = Column(Integer) cenwave = Column(Integer) propaper = Column(String(20)) apmpos = Column(String(20)) aperxpos = Column(Float) aperypos = Column(Float) aperture = Column(String(8)) opt_elem = Column(String(7)) shutter = Column(String(20)) extended = Column(String(20)) obset_id = Column(String(2)) asn_id = Column(String(9)) asn_tab = Column(String(18)) randseed = Column(BigInteger) asn_mtyp = Column(String(20)) overflow = Column(Integer) nevents = Column(Integer) neventsa = Column(Float) neventsb = Column(Float) dethvla = Column(Integer) dethvlb = Column(Integer) deventa = Column(Float) deventb = Column(Float) feventa = Column(Float) feventb = Column(Float) hvlevela = Column(Integer) hvlevelb = Column(Integer) dpixel1a = Column(Float) dpixel1b = Column(Float) date_obs = Column(String(10)) time_obs = Column(String(8)) expstart = Column(Float) expend = Column(Float) exptime = Column(Float) numflash = Column(Integer) ra_aper = Column(Float) dec_aper = Column(Float) shift1a = Column(Float) shift2a = Column(Float) shift1b = Column(Float) shift2b = Column(Float) shift1c = Column(Float) shift2c = Column(Float) sp_loc_a = Column(Float) sp_loc_b = Column(Float) sp_loc_c = Column(Float) sp_nom_a = Column(Float) sp_nom_b = Column(Float) sp_nom_c = Column(Float) sp_off_a = Column(Float) sp_off_b = Column(Float) sp_off_c = Column(Float) sp_err_a = Column(Float) sp_err_b = Column(Float) sp_err_c = Column(Float) #NUV keywords dethvl = Column(Float) cycle = Column(Integer) file_id = Column(Integer, ForeignKey('files.id')) #file = relationship("Files", backref=backref('headers', order_by=id)) __table_args__ = (Index('idx_rootname', 'rootname', unique=False), ) __table_args__ = (Index('idx_config', 'segment', 'fppos', 'cenwave', 'opt_elem', unique=False), )
def result(self, context: TelRootContext) -> TelQueryResult: return TelQueryResult(literal(self._value, Float()), dialect=context.husky_dialect)
def load_long_lat_data(): """Loading lat/long data from a csv file in the repo""" with gzip.open(os.path.join(DATA_FOLDER, 'san_francisco.csv.gz')) as f: pdf = pd.read_csv(f, encoding="utf-8") pdf['date'] = datetime.datetime.now().date() pdf['occupancy'] = [random.randint(1, 6) for _ in range(len(pdf))] pdf['radius_miles'] = [random.uniform(1, 3) for _ in range(len(pdf))] pdf.to_sql( 'long_lat', db.engine, if_exists='replace', chunksize=500, dtype={ 'longitude': Float(), 'latitude': Float(), 'number': Float(), 'street': String(100), 'unit': String(10), 'city': String(50), 'district': String(50), 'region': String(50), 'postcode': Float(), 'id': String(100), 'date': Date(), 'occupancy': Float(), 'radius_miles': Float(), }, index=False) print("Done loading table!") print("-" * 80) print("Creating table reference") obj = db.session.query(TBL).filter_by(table_name='long_lat').first() if not obj: obj = TBL(table_name='long_lat') obj.main_dttm_col = 'date' obj.database = utils.get_or_create_main_db(caravel) obj.is_featured = False db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj slice_data = { "datasource_id": "7", "datasource_name": "long_lat", "datasource_type": "table", "granularity": "day", "since": "2014-01-01", "until": "2016-12-12", "where": "", "viz_type": "mapbox", "all_columns_x": "LON", "all_columns_y": "LAT", "mapbox_style": "mapbox://styles/mapbox/light-v9", "all_columns": ["occupancy"], "row_limit": 500000, } print("Creating a slice") slc = Slice( slice_name="Mapbox Long/Lat", viz_type='mapbox', datasource_type='table', datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc)
def test_compare_float_str(self): self._compare_default_roundtrip( Float(), "5.2", )
def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None: """Loading lat/long data from a csv file in the repo""" tbl_name = "long_lat" database = utils.get_example_database() table_exists = database.has_table_by_name(tbl_name) if not only_metadata and (not table_exists or force): data = get_example_data("san_francisco.csv.gz", make_bytes=True) pdf = pd.read_csv(data, encoding="utf-8") start = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) pdf["datetime"] = [ start + datetime.timedelta(hours=i * 24 / (len(pdf) - 1)) for i in range(len(pdf)) ] pdf["occupancy"] = [random.randint(1, 6) for _ in range(len(pdf))] pdf["radius_miles"] = [random.uniform(1, 3) for _ in range(len(pdf))] pdf["geohash"] = pdf[["LAT", "LON"]].apply(lambda x: geohash.encode(*x), axis=1) pdf["delimited"] = pdf["LAT"].map(str).str.cat(pdf["LON"].map(str), sep=",") pdf.to_sql( tbl_name, database.get_sqla_engine(), if_exists="replace", chunksize=500, dtype={ "longitude": Float(), "latitude": Float(), "number": Float(), "street": String(100), "unit": String(10), "city": String(50), "district": String(50), "region": String(50), "postcode": Float(), "id": String(100), "datetime": DateTime(), "occupancy": Float(), "radius_miles": Float(), "geohash": String(12), "delimited": String(60), }, index=False, ) print("Done loading table!") print("-" * 80) print("Creating table reference") table = get_table_connector_registry() obj = db.session.query(table).filter_by(table_name=tbl_name).first() if not obj: obj = table(table_name=tbl_name) obj.main_dttm_col = "datetime" obj.database = database obj.filter_select_enabled = True db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj slice_data = { "granularity_sqla": "day", "since": "2014-01-01", "until": "now", "viz_type": "mapbox", "all_columns_x": "LON", "all_columns_y": "LAT", "mapbox_style": "mapbox://styles/mapbox/light-v9", "all_columns": ["occupancy"], "row_limit": 500000, } print("Creating a slice") slc = Slice( slice_name="Mapbox Long/Lat", viz_type="mapbox", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) misc_dash_slices.add(slc.slice_name) merge_slice(slc)
def test_compare_float_no_diff6(self): self._compare_default_roundtrip(Float(), "5", text("5.0"), diff_expected=False)
class UT_table: # connection string postgres" internal_connection = Connection() # create engine and metadata engine = create_engine(internal_connection.conn_str(), echo=False, convert_unicode=True) metadata = MetaData(engine) # define tables ut_table = Table( 'ut_table', metadata, Column('id_ut', Integer, primary_key=True), # 0 Column('progetto', String(100)), # 1 Column('nr_ut', Integer), # 2 Column('ut_letterale', String(100)), # 3 Column('def_ut', String(100)), # 4 Column('descrizione_ut', Text), # 5 Column('interpretazione_ut', String(100)), # 6 Column('nazione', String(100)), # 7 Column('regione', String(100)), # 8 Column('provincia', String(100)), # 9 Column('comune', String(100)), # 10 Column('frazione', String(100)), # 11 Column('localita', String(100)), # 12 Column('indirizzo', String(100)), # 13 Column('nr_civico', String(100)), # 14 Column('carta_topo_igm', String(100)), # 15 Column('carta_ctr', String(100)), # 16 Column('coord_geografiche', String(100)), # 17 Column('coord_piane', String(100)), # 18 Column('quota', Float(3, 2)), # 19 Column('andamento_terreno_pendenza', String(100)), # 20 Column('utilizzo_suolo_vegetazione', String(100)), # 21 Column('descrizione_empirica_suolo', Text), # 22 Column('descrizione_luogo', Text), # 23 Column('metodo_rilievo_e_ricognizione', String(100)), # 24 Column('geometria', String(100)), # 25 Column('bibliografia', Text), # 26 Column('data', String(100)), # 27 Column('ora_meteo', String(100)), # 28 Column('responsabile', String(100)), # 29 Column('dimensioni_ut', String(100)), # 30 Column('rep_per_mq', String(100)), # 31 Column('rep_datanti', String(100)), # 32 Column('periodo_I', String(100)), # 33 Column('datazione_I', String(100)), # 34 Column('interpretazione_I', String(100)), # 35 Column('periodo_II', String(100)), # 36 Column('datazione_II', String(100)), # 37 Column('interpretazione_II', String(100)), # 38 Column('documentazione', Text), # 39 Column('enti_tutela_vincoli', String(100)), # 40 Column('indagini_preliminari', String(100)), # 41 # explicit/composite unique constraint. 'name' is optional. UniqueConstraint('progetto', 'nr_ut', 'ut_letterale', name='ID_ut_unico')) metadata.create_all(engine)
# Create an engine that connects to the census.sqlite file: engine engine = create_engine('sqlite:///MyStocks.sqlite') metadata = MetaData() # Print table names print(engine.table_names()) # Define a new table with a name, count, amount, and valid column: data data = Table( 'Stocks', metadata, Column('Period', String(20)), # Monthly, Weekly, Daily, Hourly, Minutely Column('Symbol', String(20)), Column('Date', Date()), Column('Open', Float()), Column('High', Float()), Column('Low', Float()), Column('Close', Float()), Column('Volume', Float()), Column('BigMove', Boolean(), default=False), Column('Direction', Integer(), default=0) # 1 = up, -1 = down, 0 = default (no big move) ) # Define a new table with a name, count, amount, and valid column: data # data = Table('Company', metadata, # Column('Symbol', String(10), unique=True), # Column('Name', String(100), unique=True), # Column('Sector', String(100)), # Column('Year', Integer()),
class RoadWeatherDB(DeclarativeBase): __tablename__ = "road_table" id = Column(Integer, primary_key=True) precipitation = Column('precipitation', String(40)) air_temperature_trend = Column('air_temperature_trend', String(40)) dew_point = Column('dew_point', Float()) freezing_point2 = Column('freezing_point2', Float()) road_warning1 = Column('road_warning1', String(40)) road_warning2 = Column('road_warning2', String(40)) freezing_point1 = Column('freezing_point1', Float()) intensity = Column('intensity', Float()) visibility = Column('visibility', Float()) road_temperature1_trend = Column('road_temperature1_trend', Float()) road_temperature2_trend = Column('road_temperature2_trend', Float()) air_temperature = Column('air_temperature', Float()) time = Column('time', String(40)) road_condition2 = Column('road_condition2', String(40)) air_humidity = Column('air_humidity', Float()) station_name = Column('station_name', String(100)) road_temperature2 = Column('road_temperature2', Float()) road_condition1 = Column('road_condition1', String(40)) road_temperature1 = Column('road_temperature1', Float())
class Indicator(BaseModel): # 继承生成的orm基类 __tablename__ = "indicator" # 表名 bar_id = Column(Integer, ForeignKey("bar_daily.id")) ma5 = Column(Float(4)) ma10 = Column(Float(4)) ma30 = Column(Float(4)) ma60 = Column(Float(4)) ma120 = Column(Float(4)) ma240 = Column(Float(4)) boll_up = Column(Float(4)) boll_mb = Column(Float(4)) boll_dn = Column(Float(4)) macd = Column(Float(4)) rsi = Column(Float(4))
def test_float_as_decimal(self): self._do_test( Float(precision=8, asdecimal=True), [15.7563, decimal.Decimal("15.7563"), None], [decimal.Decimal("15.7563"), None], )
def test_numerics_broken_inspection(self): """Numeric scenarios where Oracle type info is 'broken', returning us precision, scale of the form (0, 0) or (0, -127). We convert to Decimal and let int()/float() processors take over. """ metadata = self.metadata # this test requires cx_oracle 5 foo = Table( "foo", metadata, Column("idata", Integer), Column("ndata", Numeric(20, 2)), Column("ndata2", Numeric(20, 2)), Column("nidata", Numeric(5, 0)), Column("fdata", Float()), ) foo.create() foo.insert().execute({ "idata": 5, "ndata": decimal.Decimal("45.6"), "ndata2": decimal.Decimal("45.0"), "nidata": decimal.Decimal("53"), "fdata": 45.68392, }) stmt = "SELECT idata, ndata, ndata2, nidata, fdata FROM foo" row = testing.db.execute(stmt).fetchall()[0] eq_( [type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, int, float], ) eq_( row, ( 5, decimal.Decimal("45.6"), decimal.Decimal("45"), 53, 45.683920000000001, ), ) # with a nested subquery, # both Numeric values that don't have decimal places, regardless # of their originating type, come back as ints with no useful # typing information beyond "numeric". So native handler # must convert to int. # this means our Decimal converters need to run no matter what. # totally sucks. stmt = """ SELECT (SELECT (SELECT idata FROM foo) FROM DUAL) AS idata, (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata, (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata2, (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) FROM DUAL) AS nidata, (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata FROM dual """ row = testing.db.execute(stmt).fetchall()[0] eq_( [type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal], ) eq_( row, (5, decimal.Decimal("45.6"), 45, 53, decimal.Decimal("45.68392")), ) row = testing.db.execute( text( stmt, typemap={ "idata": Integer(), "ndata": Numeric(20, 2), "ndata2": Numeric(20, 2), "nidata": Numeric(5, 0), "fdata": Float(), }, )).fetchall()[0] eq_( [type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float], ) eq_( row, ( 5, decimal.Decimal("45.6"), decimal.Decimal("45"), decimal.Decimal("53"), 45.683920000000001, ), ) stmt = """ SELECT anon_1.idata AS anon_1_idata, anon_1.ndata AS anon_1_ndata, anon_1.ndata2 AS anon_1_ndata2, anon_1.nidata AS anon_1_nidata, anon_1.fdata AS anon_1_fdata FROM (SELECT idata, ndata, ndata2, nidata, fdata FROM ( SELECT (SELECT (SELECT idata FROM foo) FROM DUAL) AS idata, (SELECT CAST((SELECT ndata FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata, (SELECT CAST((SELECT ndata2 FROM foo) AS NUMERIC(20, 2)) FROM DUAL) AS ndata2, (SELECT CAST((SELECT nidata FROM foo) AS NUMERIC(5, 0)) FROM DUAL) AS nidata, (SELECT CAST((SELECT fdata FROM foo) AS FLOAT) FROM DUAL) AS fdata FROM dual ) WHERE ROWNUM >= 0) anon_1 """ row = testing.db.execute(stmt).fetchall()[0] eq_( [type(x) for x in row], [int, decimal.Decimal, int, int, decimal.Decimal], ) eq_( row, (5, decimal.Decimal("45.6"), 45, 53, decimal.Decimal("45.68392")), ) row = testing.db.execute( text( stmt, typemap={ "anon_1_idata": Integer(), "anon_1_ndata": Numeric(20, 2), "anon_1_ndata2": Numeric(20, 2), "anon_1_nidata": Numeric(5, 0), "anon_1_fdata": Float(), }, )).fetchall()[0] eq_( [type(x) for x in row], [int, decimal.Decimal, decimal.Decimal, decimal.Decimal, float], ) eq_( row, ( 5, decimal.Decimal("45.6"), decimal.Decimal("45"), decimal.Decimal("53"), 45.683920000000001, ), ) row = testing.db.execute( text( stmt, typemap={ "anon_1_idata": Integer(), "anon_1_ndata": Numeric(20, 2, asdecimal=False), "anon_1_ndata2": Numeric(20, 2, asdecimal=False), "anon_1_nidata": Numeric(5, 0, asdecimal=False), "anon_1_fdata": Float(asdecimal=True), }, )).fetchall()[0] eq_([type(x) for x in row], [int, float, float, float, decimal.Decimal]) eq_(row, (5, 45.6, 45, 53, decimal.Decimal("45.68392")))
class CompetenceOption(ConfigurableOption): """ A competence model (both for the main one and the sub-competences) :param int required_id: The id of the bareme element needed """ __table_args__ = default_table_args __colanderalchemy_config__ = { "title": u"Grille de compétences", "seq_widget_options": { "add_subitem_text_template": u"Ajouter une compétence", }, "validation_msg": u"La grille de compétences a bien été configurée", "help_msg": u"Définissez des compétences, celles-ci sont \ composées: <ul><li>D'un libellé</li>\ <li>D'un ensemble de sous-compétences</li></ul>" } id = get_id_foreignkey_col('configurable_option.id') # To be removed in 3.2 requirement = Column(Float(), default=0, info={'colanderalchemy': { "exclude": True }}) children = relationship( "CompetenceSubOption", primaryjoin="CompetenceOption.id==CompetenceSubOption.parent_id", info={ 'colanderalchemy': { 'title': u"Sous-compétences associées", "widget": deform.widget.SequenceWidget( add_subitem_text_template=u"Ajouter une \ sous-compétence", min_len=1, ) }, }, back_populates='parent', ) @classmethod def query(cls, active=True, *args): query = super(CompetenceOption, cls).query(*args) query = query.filter_by(active=active) return query def __json__(self, request): return dict( id=self.id, label=self.label, requirements=[req.__json__(request) for req in self.requirement], children=[child.__json__(request) for child in self.children], ) @classmethod def __radar_datas__(cls, deadline_id): result = [] for option in cls.query(): result.append({ 'axis': option.label, 'value': option.get_requirement(deadline_id) }) return result def get_requirement(self, deadline_id): for req in self.requirements: if req.deadline_id == deadline_id: return req.requirement return 0
class Filingtype(RefTypeMixin, AuditMixin, Model): __tablename__ = 'filingtype' id = Column(Integer, primary_key=True, autoincrement=True) cost = Column(Float(53), nullable=False) perpagecost = Column(Float(53), nullable=False)
class CompetenceRequirement(DBBASE): __colanderalchemy_config__ = { "title": u"Niveau de référence de la grille de compétence", "validation_msg": u"Les niveaux de référence de la grille de \ compétences ont bien été configurés", "help_msg": u"Pour chaque compétence, définissez les niveaux de \ référence à chaque échéance.", "seq_widget_options": { "add_subitem_text_template": u"Ajouter un niveau de référence", }, } competence_id = Column( ForeignKey('competence_option.id'), primary_key=True, info={'colanderalchemy': get_hidden_field_conf()}, ) deadline_id = Column( ForeignKey('competence_deadline.id'), primary_key=True, info={'colanderalchemy': get_hidden_field_conf()}, ) requirement = Column(Float(), default=0, info={ 'colanderalchemy': { "title": u"Niveau de référence", "widget": get_deferred_model_select( CompetenceScale, mandatory=True, keys=('value', 'label'), ) } }) competence = relationship( 'CompetenceOption', backref=backref('requirements', info={ 'colanderalchemy': { "title": u"Niveaux de référence pour cette compétence", }, }), info={ 'colanderalchemy': { "exclude": True }, }) deadline = relationship("CompetenceDeadline", backref=backref('requirements', info={ 'colanderalchemy': { "exclude": True }, }), info={ 'colanderalchemy': { "exclude": True }, }) def __json__(self, request): return dict( deadline_id=self.deadline_id, competence_id=self.competence_id, reqirement=self.requirement, deadline_label=self.deadline.label, )
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) meter = Table('meter', meta, autoload=True) meter.c.counter_volume.alter(type=Float(53))
class Uqer(Base): __tablename__ = 'uqer' __table_args__ = (Index('uqer_idx', 'trade_date', 'code', unique=True), ) trade_date = Column(DateTime, primary_key=True, nullable=False) code = Column(Integer, primary_key=True, nullable=False) AccountsPayablesTDays = Column(Float(53)) AccountsPayablesTRate = Column(Float(53)) AdminiExpenseRate = Column(Float(53)) ARTDays = Column(Float(53)) ARTRate = Column(Float(53)) ASSI = Column(Float(53)) BLEV = Column(Float(53)) BondsPayableToAsset = Column(Float(53)) CashRateOfSales = Column(Float(53)) CashToCurrentLiability = Column(Float(53)) CMRA = Column(Float(53)) CTOP = Column(Float(53)) CTP5 = Column(Float(53)) CurrentAssetsRatio = Column(Float(53)) CurrentAssetsTRate = Column(Float(53)) CurrentRatio = Column(Float(53)) DAVOL10 = Column(Float(53)) DAVOL20 = Column(Float(53)) DAVOL5 = Column(Float(53)) DDNBT = Column(Float(53)) DDNCR = Column(Float(53)) DDNSR = Column(Float(53)) DebtEquityRatio = Column(Float(53)) DebtsAssetRatio = Column(Float(53)) DHILO = Column(Float(53)) DilutedEPS = Column(Float(53)) DVRAT = Column(Float(53)) EBITToTOR = Column(Float(53)) EGRO = Column(Float(53)) EMA10 = Column(Float(53)) EMA120 = Column(Float(53)) EMA20 = Column(Float(53)) EMA5 = Column(Float(53)) EMA60 = Column(Float(53)) EPS = Column(Float(53)) EquityFixedAssetRatio = Column(Float(53)) EquityToAsset = Column(Float(53)) EquityTRate = Column(Float(53)) ETOP = Column(Float(53)) ETP5 = Column(Float(53)) FinancialExpenseRate = Column(Float(53)) FinancingCashGrowRate = Column(Float(53)) FixAssetRatio = Column(Float(53)) FixedAssetsTRate = Column(Float(53)) GrossIncomeRatio = Column(Float(53)) HBETA = Column(Float(53)) HSIGMA = Column(Float(53)) IntangibleAssetRatio = Column(Float(53)) InventoryTDays = Column(Float(53)) InventoryTRate = Column(Float(53)) InvestCashGrowRate = Column(Float(53)) LCAP = Column(Float(53)) LFLO = Column(Float(53)) LongDebtToAsset = Column(Float(53)) LongDebtToWorkingCapital = Column(Float(53)) LongTermDebtToAsset = Column(Float(53)) MA10 = Column(Float(53)) MA120 = Column(Float(53)) MA20 = Column(Float(53)) MA5 = Column(Float(53)) MA60 = Column(Float(53)) MAWVAD = Column(Float(53)) MFI = Column(Float(53)) MLEV = Column(Float(53)) NetAssetGrowRate = Column(Float(53)) NetProfitGrowRate = Column(Float(53)) NetProfitRatio = Column(Float(53)) NOCFToOperatingNI = Column(Float(53)) NonCurrentAssetsRatio = Column(Float(53)) NPParentCompanyGrowRate = Column(Float(53)) NPToTOR = Column(Float(53)) OperatingExpenseRate = Column(Float(53)) OperatingProfitGrowRate = Column(Float(53)) OperatingProfitRatio = Column(Float(53)) OperatingProfitToTOR = Column(Float(53)) OperatingRevenueGrowRate = Column(Float(53)) OperCashGrowRate = Column(Float(53)) OperCashInToCurrentLiability = Column(Float(53)) PB = Column(Float(53)) PCF = Column(Float(53)) PE = Column(Float(53)) PS = Column(Float(53)) PSY = Column(Float(53)) QuickRatio = Column(Float(53)) REVS10 = Column(Float(53)) REVS20 = Column(Float(53)) REVS5 = Column(Float(53)) ROA = Column(Float(53)) ROA5 = Column(Float(53)) ROE = Column(Float(53)) ROE5 = Column(Float(53)) RSI = Column(Float(53)) RSTR12 = Column(Float(53)) RSTR24 = Column(Float(53)) SalesCostRatio = Column(Float(53)) SaleServiceCashToOR = Column(Float(53)) SUE = Column(Float(53)) TaxRatio = Column(Float(53)) TOBT = Column(Float(53)) TotalAssetGrowRate = Column(Float(53)) TotalAssetsTRate = Column(Float(53)) TotalProfitCostRatio = Column(Float(53)) TotalProfitGrowRate = Column(Float(53)) VOL10 = Column(Float(53)) VOL120 = Column(Float(53)) VOL20 = Column(Float(53)) VOL240 = Column(Float(53)) VOL5 = Column(Float(53)) VOL60 = Column(Float(53)) WVAD = Column(Float(53)) REC = Column(Float(53)) DAREC = Column(Float(53)) GREC = Column(Float(53)) FY12P = Column(Float(53)) DAREV = Column(Float(53)) GREV = Column(Float(53)) SFY12P = Column(Float(53)) DASREV = Column(Float(53)) GSREV = Column(Float(53)) FEARNG = Column(Float(53)) FSALESG = Column(Float(53)) TA2EV = Column(Float(53)) CFO2EV = Column(Float(53)) ACCA = Column(Float(53)) DEGM = Column(Float(53)) SUOI = Column(Float(53)) EARNMOM = Column(Float(53)) FiftyTwoWeekHigh = Column(Float(53)) Volatility = Column(Float(53)) Skewness = Column(Float(53)) ILLIQUIDITY = Column(Float(53)) BackwardADJ = Column(Float(53)) MACD = Column(Float(53)) ADTM = Column(Float(53)) ATR14 = Column(Float(53)) ATR6 = Column(Float(53)) BIAS10 = Column(Float(53)) BIAS20 = Column(Float(53)) BIAS5 = Column(Float(53)) BIAS60 = Column(Float(53)) BollDown = Column(Float(53)) BollUp = Column(Float(53)) CCI10 = Column(Float(53)) CCI20 = Column(Float(53)) CCI5 = Column(Float(53)) CCI88 = Column(Float(53)) KDJ_K = Column(Float(53)) KDJ_D = Column(Float(53)) KDJ_J = Column(Float(53)) ROC6 = Column(Float(53)) ROC20 = Column(Float(53)) SBM = Column(Float(53)) STM = Column(Float(53)) UpRVI = Column(Float(53)) DownRVI = Column(Float(53)) RVI = Column(Float(53)) SRMI = Column(Float(53)) ChandeSD = Column(Float(53)) ChandeSU = Column(Float(53)) CMO = Column(Float(53)) DBCD = Column(Float(53)) ARC = Column(Float(53)) OBV = Column(Float(53)) OBV6 = Column(Float(53)) OBV20 = Column(Float(53)) TVMA20 = Column(Float(53)) TVMA6 = Column(Float(53)) TVSTD20 = Column(Float(53)) TVSTD6 = Column(Float(53)) VDEA = Column(Float(53)) VDIFF = Column(Float(53)) VEMA10 = Column(Float(53)) VEMA12 = Column(Float(53)) VEMA26 = Column(Float(53)) VEMA5 = Column(Float(53)) VMACD = Column(Float(53)) VOSC = Column(Float(53)) VR = Column(Float(53)) VROC12 = Column(Float(53)) VROC6 = Column(Float(53)) VSTD10 = Column(Float(53)) VSTD20 = Column(Float(53)) KlingerOscillator = Column(Float(53)) MoneyFlow20 = Column(Float(53)) AD = Column(Float(53)) AD20 = Column(Float(53)) AD6 = Column(Float(53)) CoppockCurve = Column(Float(53)) ASI = Column(Float(53)) ChaikinOscillator = Column(Float(53)) ChaikinVolatility = Column(Float(53)) EMV14 = Column(Float(53)) EMV6 = Column(Float(53)) plusDI = Column(Float(53)) minusDI = Column(Float(53)) ADX = Column(Float(53)) ADXR = Column(Float(53)) Aroon = Column(Float(53)) AroonDown = Column(Float(53)) AroonUp = Column(Float(53)) DEA = Column(Float(53)) DIFF = Column(Float(53)) DDI = Column(Float(53)) DIZ = Column(Float(53)) DIF = Column(Float(53)) MTM = Column(Float(53)) MTMMA = Column(Float(53)) PVT = Column(Float(53)) PVT6 = Column(Float(53)) PVT12 = Column(Float(53)) TRIX5 = Column(Float(53)) TRIX10 = Column(Float(53)) UOS = Column(Float(53)) MA10RegressCoeff12 = Column(Float(53)) MA10RegressCoeff6 = Column(Float(53)) PLRC6 = Column(Float(53)) PLRC12 = Column(Float(53)) SwingIndex = Column(Float(53)) Ulcer10 = Column(Float(53)) Ulcer5 = Column(Float(53)) Hurst = Column(Float(53)) ACD6 = Column(Float(53)) ACD20 = Column(Float(53)) EMA12 = Column(Float(53)) EMA26 = Column(Float(53)) APBMA = Column(Float(53)) BBI = Column(Float(53)) BBIC = Column(Float(53)) TEMA10 = Column(Float(53)) TEMA5 = Column(Float(53)) MA10Close = Column(Float(53)) AR = Column(Float(53)) BR = Column(Float(53)) ARBR = Column(Float(53)) CR20 = Column(Float(53)) MassIndex = Column(Float(53)) BearPower = Column(Float(53)) BullPower = Column(Float(53)) Elder = Column(Float(53)) NVI = Column(Float(53)) PVI = Column(Float(53)) RC12 = Column(Float(53)) RC24 = Column(Float(53)) JDQS20 = Column(Float(53)) Variance20 = Column(Float(53)) Variance60 = Column(Float(53)) Variance120 = Column(Float(53)) Kurtosis20 = Column(Float(53)) Kurtosis60 = Column(Float(53)) Kurtosis120 = Column(Float(53)) Alpha20 = Column(Float(53)) Alpha60 = Column(Float(53)) Alpha120 = Column(Float(53)) Beta20 = Column(Float(53)) Beta60 = Column(Float(53)) Beta120 = Column(Float(53)) SharpeRatio20 = Column(Float(53)) SharpeRatio60 = Column(Float(53)) SharpeRatio120 = Column(Float(53)) TreynorRatio20 = Column(Float(53)) TreynorRatio60 = Column(Float(53)) TreynorRatio120 = Column(Float(53)) InformationRatio20 = Column(Float(53)) InformationRatio60 = Column(Float(53)) InformationRatio120 = Column(Float(53)) GainVariance20 = Column(Float(53)) GainVariance60 = Column(Float(53)) GainVariance120 = Column(Float(53)) LossVariance20 = Column(Float(53)) LossVariance60 = Column(Float(53)) LossVariance120 = Column(Float(53)) GainLossVarianceRatio20 = Column(Float(53)) GainLossVarianceRatio60 = Column(Float(53)) GainLossVarianceRatio120 = Column(Float(53)) RealizedVolatility = Column(Float(53)) REVS60 = Column(Float(53)) REVS120 = Column(Float(53)) REVS250 = Column(Float(53)) REVS750 = Column(Float(53)) REVS5m20 = Column(Float(53)) REVS5m60 = Column(Float(53)) REVS5Indu1 = Column(Float(53)) REVS20Indu1 = Column(Float(53)) Volumn1M = Column(Float(53)) Volumn3M = Column(Float(53)) Price1M = Column(Float(53)) Price3M = Column(Float(53)) Price1Y = Column(Float(53)) Rank1M = Column(Float(53)) CashDividendCover = Column(Float(53)) DividendCover = Column(Float(53)) DividendPaidRatio = Column(Float(53)) RetainedEarningRatio = Column(Float(53)) CashEquivalentPS = Column(Float(53)) DividendPS = Column(Float(53)) EPSTTM = Column(Float(53)) NetAssetPS = Column(Float(53)) TORPS = Column(Float(53)) TORPSLatest = Column(Float(53)) OperatingRevenuePS = Column(Float(53)) OperatingRevenuePSLatest = Column(Float(53)) OperatingProfitPS = Column(Float(53)) OperatingProfitPSLatest = Column(Float(53)) CapitalSurplusFundPS = Column(Float(53)) SurplusReserveFundPS = Column(Float(53)) UndividedProfitPS = Column(Float(53)) RetainedEarningsPS = Column(Float(53)) OperCashFlowPS = Column(Float(53)) CashFlowPS = Column(Float(53)) NetNonOIToTP = Column(Float(53)) NetNonOIToTPLatest = Column(Float(53)) PeriodCostsRate = Column(Float(53)) InterestCover = Column(Float(53)) NetProfitGrowRate3Y = Column(Float(53)) NetProfitGrowRate5Y = Column(Float(53)) OperatingRevenueGrowRate3Y = Column(Float(53)) OperatingRevenueGrowRate5Y = Column(Float(53)) NetCashFlowGrowRate = Column(Float(53)) NetProfitCashCover = Column(Float(53)) OperCashInToAsset = Column(Float(53)) CashConversionCycle = Column(Float(53)) OperatingCycle = Column(Float(53)) PEG3Y = Column(Float(53)) PEG5Y = Column(Float(53)) PEIndu = Column(Float(53)) PBIndu = Column(Float(53)) PSIndu = Column(Float(53)) PCFIndu = Column(Float(53)) PEHist20 = Column(Float(53)) PEHist60 = Column(Float(53)) PEHist120 = Column(Float(53)) PEHist250 = Column(Float(53)) StaticPE = Column(Float(53)) ForwardPE = Column(Float(53)) EnterpriseFCFPS = Column(Float(53)) ShareholderFCFPS = Column(Float(53)) ROEDiluted = Column(Float(53)) ROEAvg = Column(Float(53)) ROEWeighted = Column(Float(53)) ROECut = Column(Float(53)) ROECutWeighted = Column(Float(53)) ROIC = Column(Float(53)) ROAEBIT = Column(Float(53)) ROAEBITTTM = Column(Float(53)) OperatingNIToTP = Column(Float(53)) OperatingNIToTPLatest = Column(Float(53)) InvestRAssociatesToTP = Column(Float(53)) InvestRAssociatesToTPLatest = Column(Float(53)) NPCutToNP = Column(Float(53)) SuperQuickRatio = Column(Float(53)) TSEPToInterestBearDebt = Column(Float(53)) DebtTangibleEquityRatio = Column(Float(53)) TangibleAToInteBearDebt = Column(Float(53)) TangibleAToNetDebt = Column(Float(53)) NOCFToTLiability = Column(Float(53)) NOCFToInterestBearDebt = Column(Float(53)) NOCFToNetDebt = Column(Float(53)) TSEPToTotalCapital = Column(Float(53)) InteBearDebtToTotalCapital = Column(Float(53)) NPParentCompanyCutYOY = Column(Float(53)) SalesServiceCashToORLatest = Column(Float(53)) CashRateOfSalesLatest = Column(Float(53)) NOCFToOperatingNILatest = Column(Float(53)) TotalAssets = Column(Float(53)) MktValue = Column(Float(53)) NegMktValue = Column(Float(53)) TEAP = Column(Float(53)) NIAP = Column(Float(53)) TotalFixedAssets = Column(Float(53)) IntFreeCL = Column(Float(53)) IntFreeNCL = Column(Float(53)) IntCL = Column(Float(53)) IntDebt = Column(Float(53)) NetDebt = Column(Float(53)) NetTangibleAssets = Column(Float(53)) WorkingCapital = Column(Float(53)) NetWorkingCapital = Column(Float(53)) TotalPaidinCapital = Column(Float(53)) RetainedEarnings = Column(Float(53)) OperateNetIncome = Column(Float(53)) ValueChgProfit = Column(Float(53)) NetIntExpense = Column(Float(53)) EBIT = Column(Float(53)) EBITDA = Column(Float(53)) EBIAT = Column(Float(53)) NRProfitLoss = Column(Float(53)) NIAPCut = Column(Float(53)) FCFF = Column(Float(53)) FCFE = Column(Float(53)) DA = Column(Float(53)) TRevenueTTM = Column(Float(53)) TCostTTM = Column(Float(53)) RevenueTTM = Column(Float(53)) CostTTM = Column(Float(53)) GrossProfitTTM = Column(Float(53)) SalesExpenseTTM = Column(Float(53)) AdminExpenseTTM = Column(Float(53)) FinanExpenseTTM = Column(Float(53)) AssetImpairLossTTM = Column(Float(53)) NPFromOperatingTTM = Column(Float(53)) NPFromValueChgTTM = Column(Float(53)) OperateProfitTTM = Column(Float(53)) NonOperatingNPTTM = Column(Float(53)) TProfitTTM = Column(Float(53)) NetProfitTTM = Column(Float(53)) NetProfitAPTTM = Column(Float(53)) SaleServiceRenderCashTTM = Column(Float(53)) NetOperateCFTTM = Column(Float(53)) NetInvestCFTTM = Column(Float(53)) NetFinanceCFTTM = Column(Float(53)) GrossProfit = Column(Float(53)) Beta252 = Column(Float(53)) RSTR504 = Column(Float(53)) EPIBS = Column(Float(53)) CETOP = Column(Float(53)) DASTD = Column(Float(53)) CmraCNE5 = Column(Float(53)) HsigmaCNE5 = Column(Float(53)) SGRO = Column(Float(53)) EgibsLong = Column(Float(53)) STOM = Column(Float(53)) STOQ = Column(Float(53)) STOA = Column(Float(53)) NLSIZE = Column(Float(53))