class AdserverTargetedSpentPenniesByCodename(Base): """Spend for ads, correlated by ad campaign.""" __tablename__ = "adserver_traffic_spentpenniestarget" codename = Column("fullname", String(), nullable=False, primary_key=True) subreddit = Column(String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def promotion_history(cls, codename, start, stop): return promotion_history(cls, cls.unique_count, codename, start, stop, interval="day") @classmethod @memoize_traffic(time=3600) def total_by_codename(cls, codenames): return total_by_codename(cls, codenames, interval="day") @classmethod def campaign_history(cls, codenames, start, stop): return campaign_history(cls, codenames, start, stop, interval="day")
class sys_op_ship(BaseModel): ''' ip属于哪个游戏哪个集群哪种服务,它所对应的资产信息 ''' __tablename__ = 'sys_op_ship' id = Column(Integer, primary_key=True) ip_id = Column(Integer(), ForeignKey('ip.id'), nullable=False) game_id = Column(Integer, ForeignKey('game.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) services_id = Column(Integer, ForeignKey('services.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) asset_id = Column(Integer, ForeignKey('asset.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) is_delete = Column(Integer(), nullable=False, default=0)
class idc_op_ship(BaseModel): ''' 资产在哪个idc机房 哪个机柜 属于哪个游戏 哪个集群,ip是多少 ''' __tablename__ = 'idc_op_ship' id = Column(Integer, primary_key=True) asset_id = Column(Integer(), ForeignKey('asset.id'), nullable=False) idc_id = Column(Integer, ForeignKey('idc.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) cabinet_id = Column(Integer, ForeignKey('cabinet.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) game_id = Column(Integer, ForeignKey('game.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) ip_id = Column(Integer, ForeignKey('ip.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) is_delete = Column(Integer(), nullable=False, default=0)
class TokenPrivlege(Base): __tablename__ = 'token_privlege' id = Column(Integer(), primary_key=True) token_id = ForeignKey('token.id', onupdate='CASCADE', ondelete='CASCADE') privlege_id = Column( Integer(), ForeignKey('privilege.id', onupdate='CASCADE', ondelete='CASCADE'))
class UserPrivlege(Base): __tablename__ = 'user_privlege' id = Column(Integer(), primary_key=True) user_id = ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE') privlege_id = Column( Integer(), ForeignKey('privilege.id', onupdate='CASCADE', ondelete='CASCADE'))
def save_owned_games(engine): dic_owned_games = {} with open('data/steam_owned_games.txt', 'r') as f: for raw_string in tqdm(f.readlines(), desc='Process Owned Games'): user_id, lst_inventory = list(json.loads(raw_string).items())[0] if lst_inventory: for i in lst_inventory: app_id = i.get('appid') playtime_forever = i.get('playtime_forever', 0) if playtime_forever > 0: dic_owned_games.update({ (user_id, app_id): { 'user_id': user_id, 'app_id': app_id, 'playtime_forever': playtime_forever } }) df_owned_games = pd.DataFrame.from_dict(dic_owned_games, 'index') df_owned_games.to_sql('steam_owned_games', engine, if_exists='replace', index=False, dtype={ 'user_id': BigInteger(), 'app_id': Integer(), 'playtime_forever': Integer() }, chunksize=10000)
class Contacts(Base): __tablename__ = "contacts" #c.categories = ['mysqlg1','apperrorlog','tmc'] id = Column(Integer, primary_key=True) eid = Column(String(100)) name = Column(String(100)) ename = Column(String(200)) title = Column(String(100)) subtel = Column(Integer(2)) mobile = Column(Integer(1)) email = Column(String(50)) forsearch = Column(String(50)) def __init__(self, datei='', _update=''): self.id = 0 self.eid = "" self.name = "" self.ename = "" self.title = "" self.subtel = "" self.mobile = "" self.email = "" self.forsearch = "" def __repr__(self): return "<Categories('%s')" % self.name
def test_edge_generation_from_foreign_keys(self): metadata = MetaData() table1 = Table( "Table1", metadata, Column("primary_key_column", Integer(), primary_key=True), Column("foreign_key_column", Integer(), ForeignKey("Table2.primary_key_column")), ) table2 = Table("Table2", metadata, Column("primary_key_column", Integer, primary_key=True)) vertex_name_to_table = { "TableWithForeignKey": table1, "TableWithReferencedPrimaryKey": table2, } direct_edge_descriptors = generate_direct_edge_descriptors_from_foreign_keys( vertex_name_to_table) self.assertEqual( direct_edge_descriptors, { DirectEdgeDescriptor( from_vertex="TableWithForeignKey", from_column="foreign_key_column", to_vertex="TableWithReferencedPrimaryKey", to_column="primary_key_column", ), }, )
def test_warning_for_ignored_foreign_keys(self): metadata = MetaData() table1 = Table( "Table1", metadata, Column("primary_key_column", Integer(), primary_key=True), Column("foreign_key_column1", Integer()), Column("foreign_key_column2", Integer()), ForeignKeyConstraint( ("foreign_key_column1", "foreign_key_column2"), ("Table2.primary_key_column1", "Table2.primary_key_column2"), ), ) table2 = Table( "Table2", metadata, Column("primary_key_column1", Integer, primary_key=True), Column("primary_key_column2", Integer, primary_key=True), ) vertex_name_to_table = { "TableWithForeignKey": table1, "TableWithReferencedPrimaryKey": table2, } with pytest.warns(Warning): direct_edge_descriptors = generate_direct_edge_descriptors_from_foreign_keys( vertex_name_to_table) self.assertEqual(direct_edge_descriptors, set())
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'flight_path_chunks', sa.Column('id', Integer(), nullable=False), sa.Column('time_created', DateTime(), nullable=False), sa.Column('time_modified', DateTime(), nullable=False), sa.Column('timestamps', postgresql.ARRAY(DateTime()), nullable=False), sa.Column('locations', Geometry(geometry_type='LINESTRING', srid=4326), nullable=False), sa.Column('start_time', DateTime(), nullable=False), sa.Column('end_time', DateTime(), nullable=False), sa.Column('flight_id', Integer(), nullable=False), sa.ForeignKeyConstraint(['flight_id'], ['flights.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id')) op.create_index('ix_flight_path_chunks_end_time', 'flight_path_chunks', ['end_time'], unique=False) op.create_index('ix_flight_path_chunks_start_time', 'flight_path_chunks', ['start_time'], unique=False) op.create_index('ix_flight_path_chunks_flight_id', 'flight_path_chunks', ['flight_id'], unique=False)
def step3_match_controls_to_sql(self): path = os.path.join(head, 'data', 'interim') file = "q13_matched_controls.csv" t_print("reading csv..") mc = pd.read_csv(path + file) t_print("read") print_time() types = { "icustay_id": Integer(), "hadm_id": Integer(), "intime": DateTime(), "outtime": DateTime(), "length_of_stay": Numeric(), "control_onset_hour": Numeric(), "control_onset_time": DateTime(), "matched_case_icustay_id": Integer() } t_print("saving to SQL...") # somehow we cannot overwrite tables directly with "to_sql" so let's do that before conn = psycopg2.connect(dbname=self.dbname, user=self.sqluser, password=self.sqlpass, host=self.host) cur = conn.cursor() cur.execute(self.query_schema + "drop table IF EXISTS matched_controls_hourly cascade") conn.commit() mc[mc.columns].to_sql("matched_controls_hourly", self.engine, if_exists='append', schema="mimic3_mrosnati", dtype=types) t_print("saved")
class Rating(BaseModel): """ Rating table. """ __tablename__ = "ratings" #{ Columns rating_id = Column(Integer(), autoincrement=True, primary_key=True) rater_id = Column(Integer(6), ForeignKey("users.user_id")) movie_id = Column(Integer(5), ForeignKey("movies.movie_id")) rating_date = Column(Date(), nullable=True) rating_value = Column(Integer(length=1)) #{ Relationships rater = relation(User, backref="rated_movies") movie = relation(Movie, backref="ratings") #} @classmethod def get_average_global_rating(cls, db_session): """Return the average global rating.""" query = db_session.query(func.avg(cls.rating_value)).one() return query[0]
def put_new_trainees(new_df): engine = helpers.db_engine() conn = engine.connect() if not new_df.empty: try: new_df.to_sql('enrolment_record', if_exists = 'append', con = engine, index = False, chunksize = 100, dtype = {'id': Integer(), 'phone': String(50), 'jcn': String(50), 'jc_status': Integer(), 'time_pref': String(50), 'time_pref_label': String(50), 'file_name_s3': String(50), 'file_upload_to_s3_date': String(50), 'breastfeeding': String(50), 'pregnant': String(50), 'children_under6': String(50), 'teenage_girls': String(50), 'nocategory': String(50), 'health_category': String(50), 'insert_date': String(50), 'enrolment_date': String(50), 'pilot': TINYINT(2)}) except Exception as e: er.handle_error(error_code ='23', data = {}) sys.exit() return
class Priority(TableBase): u"""Priority a package can have.""" __tablename__ = 'priorities' ident = Column( Unicode(), primary_key=True, nullable=False, doc=u"Machine-friendly name") name = Column( Unicode(), nullable=False, doc=u"Display name") abbrev = Column( Unicode(), nullable=False, doc=u"Abbreviation for reports") color = Column( Unicode(), nullable=False, doc=u"Color for reports (RRGGBB)") order = Column( Integer(), nullable=False, doc=u"Index for sorting") weight = Column( Integer(), nullable=False, doc=u"Weight for sorting packages") term = Column( Unicode(), nullable=False, doc=u"Terminal representation") def __repr__(self): return '<{} {}>'.format(type(self).__qualname__, self.ident) def __str__(self): return '{} priority'.format(self.name)
def _add_folder_to_sqlite(dbcon, folder_info): # modify folder info to prep for appending to sqlite table folder_info_dtypes = { '_accessLevel': Integer(), '_id': String(), '_modelType': String(), 'baseParentId': String(), 'baseParentType': String(), 'created': String(), 'creatorId': String(), 'description': String(), 'name': String(), 'parentCollection': String(), 'parentId': String(), 'public': Boolean(), 'size': Integer(), 'updated': String(), 'folder_path': String(), } # in case anything is not in the schema, drop it folder_info = { k: v for k, v in folder_info.items() if k in folder_info_dtypes.keys() } # convert to df and add to items table folder_info_df = DataFrame.from_dict(folder_info, orient='index').T folder_info_df.to_sql(name='folders', con=dbcon, if_exists='append', dtype=folder_info_dtypes, index=False)
class PageviewsBySubverbify(Base): """Pageviews within a subverbify (i.e. /r/something/...).""" __tablename__ = "traffic_subverbifys" subverbify = Column(String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def history(cls, interval, subverbify): time_points, q = make_history_query(cls, interval) q = q.filter(cls.subverbify == subverbify) return fill_gaps(time_points, q, "unique_count", "pageview_count") @classmethod @memoize_traffic(time=3600 * 6) def top_last_month(cls, num=None): return top_last_month(cls, "subverbify", num=num) @classmethod @memoize_traffic(time=3600 * 6) def last_month(cls, srs): ids = [sr.name for sr in srs] return top_last_month(cls, "subverbify", ids=ids)
def _add_annotation_docs_to_sqlite(dbcon, annotation_docs, item): # add full item path for convenience annotation_docs.loc[:, "item_name"] = item['name'] # save tables to sqlite annotation_docs.to_sql( name='annotation_docs', con=dbcon, if_exists='append', dtype={ 'annotation_girder_id': String(), '_modelType': String(), '_version': Integer(), 'itemId': String(), 'item_name': String(), 'created': String(), 'creatorId': String(), 'public': Boolean(), 'updated': String(), 'updatedId': String(), 'groups': String(), 'element_count': Integer(), 'element_details': Integer(), }, index=False, )
def _add_annotation_elements_to_sqlite(dbcon, annotation_elements): # drop index relative to JSON since its pretty arbitrary and would # change if the same girder client was used to get annotations twice # the actual girder ID string is what really matters and should be used annotation_elements.drop(labels=['annidx', 'elementidx'], axis=1, inplace=True) annotation_elements.to_sql( name='annotation_elements', con=dbcon, if_exists='append', dtype={ 'annotation_girder_id': String(), 'element_girder_id': String(), 'type': String(), 'group': String(), 'label': String(), 'color': String(), 'xmin': Integer(), 'xmax': Integer(), 'ymin': Integer(), 'ymax': Integer(), 'bbox_area': Integer(), 'coords_x': String(), 'coords_y': String(), }, index=False, )
class Watch(Base): """Watch target and user.""" __tablename__ = 'toranoana_watch' id = Column(Integer, primary_key=True) print_target_id = Column(String, nullable=False) genre_id = Column(Integer, ForeignKey(Genre.id)) genre = relationship(Genre) male = Column( ChoiceType(Target, impl=Integer()), nullable=False, ) female = Column( ChoiceType(Target, impl=Integer()), nullable=False, ) @hybrid_property def male_text(self) -> str: return TARGET_LABEL[self.male] @hybrid_property def female_text(self) -> str: return TARGET_LABEL[self.female]
def test_manual_table_auto_joins(self): from sqlalchemy import Table, Column, ForeignKey, ForeignKeyConstraint a_b = schema.Table('a_b', self.metadata, schema.Column('a_key1', Integer()), schema.Column('a_key2', String(40) ), schema.Column('b_id', Integer(), schema.ForeignKey('b.id')), schema.ForeignKeyConstraint(['a_key1', 'a_key2'], ['a.key1', 'a.key2'])) class A(self.Entity): using_options(shortnames=True) key1 = Field(Integer, primary_key=True, autoincrement=False) key2 = Field(String(40), primary_key=True) bs_ = ManyToMany('B', table=a_b) class B(self.Entity): using_options(shortnames=True) name = Field(String(60)) as_ = ManyToMany('A', table=a_b) self.create_all() with self.session.begin(): b1 = B(name='b1', as_=[A(key1=10, key2='a1')]) self.session.expire_all() a = A.query.one() b = B.query.one() assert a in b.as_ assert b in a.bs_
class AdImpressionsByCodename(Base): __tablename__ = "traffic_thing" codename = Column("fullname", String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def history(cls, interval, codename): start_time, stop_time, q = make_history_query(cls, interval) q = q.filter(cls.codename == codename) return fill_gaps(interval, start_time, stop_time, q, "unique_count", "pageview_count") @classmethod @memoize_traffic(time=3600) def promotion_history(cls, codename, start, stop): return promotion_history(cls, codename, start, stop) @classmethod @memoize_traffic(time=3600) def historical_totals(cls, interval): return totals(cls, interval) @classmethod @memoize_traffic(time=3600) def top_last_month(cls): return top_last_month(cls, "codename")
class Job(Base): __tablename__ = 'jobs' job_id = Column(Integer(), primary_key=True) title = Column(String(255), nullable=False) company = Column(String(255)) region = Column(String(255), nullable=False) city = Column(String(255), nullable=False) country = Column(String(255), nullable=False) job_type = Column(Enum(JobType), nullable=False) description = Column(String(), nullable=False) requirements = Column(String(), nullable=False) posted_by = Column(String(100), nullable=False) poster_family_name = Column(String(100), nullable=False) poster_given_name = Column(String(100), nullable=False) can_contact = Column(Boolean(), nullable=False) people_contacted = Column(Integer(), default=0) job_status = Column(Enum(JobStatus), nullable=False) job_tags = Column(ARRAY(Enum(JobTags)), nullable=False) salary = Column(Integer()) times_viewed = Column(Integer(), default=0) deadline = Column(DateTime(), nullable=False) created_on = Column(DateTime(), default=datetime.now) updated_on = Column(DateTime(), default=datetime.now, onupdate=datetime.now) job_applications = relationship("JobApplication")
class TargetedImpressionsByCodename(Base): """Impressions for ads, correlated by ad campaign.""" __tablename__ = "traffic_thingtarget" codename = Column("fullname", String(), nullable=False, primary_key=True) subreddit = Column(String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def promotion_history(cls, codename, start, stop): return promotion_history(cls, codename, start, stop) @classmethod @memoize_traffic(time=3600) def total_by_codename(cls, codenames): return total_by_codename(cls, codenames) @classmethod @memoize_traffic(time=3600) def campaign_history(cls, codenames, start, stop): return campaign_history(cls, codenames, start, stop)
class ClickthroughsByCodename(Base): """Clickthrough counts for ads.""" __tablename__ = "traffic_click" codename = Column("fullname", String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def history(cls, interval, codename): time_points, q = make_history_query(cls, interval) q = q.filter(cls.codename == codename) return fill_gaps(time_points, q, "unique_count", "pageview_count") @classmethod @memoize_traffic(time=3600) def promotion_history(cls, codename, start, stop): return promotion_history(cls, codename, start, stop) @classmethod @memoize_traffic(time=3600) def historical_totals(cls, interval): return totals(cls, interval) @classmethod @memoize_traffic(time=3600) def total_by_codename(cls, codenames): return total_by_codename(cls, codenames)
def put_scripts(scripts): engine = helpers.db_engine() conn = engine.connect() if not scripts.empty: try: scripts.to_sql('scripts', if_exists='replace', con=engine, index=False, chunksize=100, dtype={ 'id': Integer(), 'phone': String(50), 'time_pref': String(50), 'time_pref_label': String(50), 'amount': Integer(), 'transact_date': String(50), 'rejection_reason': String(50), 'day1': String(50), 'file_name_s3': String(50), 'file_upload_to_s3_date': String(50), 'insert_date': String(50) }) except Exception as e: er.handle_error(error_code='23', data={}) sys.exit() return
def save_app_details(): dic_app_details = {} config = yaml.safe_load(open('config.yaml')) db_username = config['mysql']['username'] db_password = config['mysql']['password'] db_endpoint = config['mysql']['endpoint'] db_database = config['mysql']['database'] engine = create_engine('mysql+pymysql://{}:{}@{}/{}?charset=utf8mb4'\ .format(db_username, db_password, db_endpoint, db_database)) with open('data/steam_app_details.txt', 'r') as f: for i in tqdm(f.readlines(), desc='Process App Details'): try: for app_id, dic_response in json.loads(i).items(): if dic_response.get('success'): dic_app_details[app_id] = \ parse_steam_app_details(dic_response.get('data',{})) except: pass df_steam_app = pd.DataFrame.from_dict(dic_app_details, 'index') df_steam_app.index.name = 'app_id' df_steam_app.reset_index(inplace=True) df_steam_app.to_sql('steam_app_details', engine, if_exists='replace', \ index=False, chunksize = 10000, dtype={'app_id':Integer(), 'required_age':Integer()})
class Update(Base): __tablename__ = "StoreUpdates" id = Column(Integer(), primary_key=True) storeId = Column(Integer(), ForeignKey(u'StoreInfos.id')) ip = Column(String(15)) userId = Column(String(50)) availabilityInfo = Column(Text()) safetyInfo = Column(Text()) openingTime = Column(DateTime) closingTime = Column(DateTime) createdAt = Column(DateTime(timezone=True)) updatedAt = Column(DateTime(timezone=True)) flag = Column(String(80), default=None) deleted = Column(Boolean(), default=False) reviewed = Column(Boolean(), default=False) Store = relationship('Store', foreign_keys=[storeId], lazy='joined') def to_dict(self): return { "id": self.id, "name": self.Store.name, "latitude": self.Store.latitude, "longitude": self.Store.longitude, "address": self.Store.address, "ip": self.ip, "availabilityInfo": self.availabilityInfo, "safetyInfo": self.safetyInfo, "openingTime": self.openingTime, "closingTime": self.closingTime, "flag": self.flag, "deleted": self.deleted }
class BlockBase(Base): __tablename__ = 'blocks' # User id. This field is auto generated. id = Column('id', Integer(), primary_key=True) user_id = Column('user_id', Integer(), nullable=False) person_id = Column('person_id', Integer(), nullable=False)
class PageviewsBySubredditAndPath(Base): __tablename__ = "traffic_srpaths" srpath = Column(String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer())
class Person(Base): __tablename__ = "people" id = Column(Integer(), primary_key = True) given_name = Column(String(255)) family_name = Column(String(255)) maiden_name = Column(String(255)) suffix = Column(String(255)) gender = Column(String(1)) birthday = Column(DateTime()) birthplace_id = Column(Integer(), ForeignKey("places.id"), index=True) birthplace = relationship("Place", primaryjoin = "(Person.birthplace_id == Place.id)", uselist = False, ) deathday = Column(DateTime()) deathplace_id = Column(Integer(), ForeignKey("places.id"), index=True) deathplace = relationship("Place", primaryjoin = "(Person.deathplace_id == Place.id)", uselist = False, ) father_id = Column(Integer(), ForeignKey("people.id"), index=True) father = relationship("Person", remote_side = [id], primaryjoin = "(Person.father_id == Person.id)", uselist = False, ) mother_id = Column(Integer(), ForeignKey("people.id"), index=True) mother = relationship("Person", remote_side = [id], primaryjoin = "(Person.mother_id == Person.id)", uselist = False, ) children = relationship("Person", remote_side = [mother_id, father_id], primaryjoin = "or_(Person.id == Person.mother_id, " \ +"Person.id == Person.father_id)", ) """ marriages = relationship("Marriage", remote_side = [id], primaryjoin = "or_(Person.id == Marriage.person1_id, " \ +"Person.id == Marriage.person2_id)", ) """ addresses = relationship("Place", secondary = people_addresses, backref="people") emails = relationship("Email", secondary = people_emails, backref="people") files = relationship("File", secondary = people_files, backref="people") phones = relationship("Phone", secondary = people_phones, backref="people") places = relationship("Place", secondary = people_places, backref="people") tags = relationship("Tag", secondary = people_tags, backref="people")