def _date_columns(use_primary_key: bool = True) -> List[Column]: return [ Column("jetavator_load_dt", DateTime(), nullable=True, primary_key=use_primary_key), Column( "jetavator_deleted_ind", # TODO: Loading as integer saves space in CSVs. # Does this make sense for other file formats? # Is there a more general solution? Integer(), nullable=True, default=0) ]
class Items(Base): """ Items table """ __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def __repr__(self): return '{ Item_id : %s, name:%s, quantity:%s, description:%s, date_added:%s}' % ( self.id, self.name, self.quantity, self.description, self.date_added)
class Items(Base): """ Items table """ __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def __repr__(self): return "{'id' : %d, 'name' : '%s', 'quantity' : %d, 'description' : '%s', 'date_added' : %s}" % ( self.id, self.name, self.quantity, self.description, self.date_added)
def insert_data(self, dataframe): self.logger.info('create engine and connect to database') engine = sqlalchemy.create_engine( f'{self.database_type}://{self.database_login}:{self.database_password}@{self.database_url}/{self.database_name}' ) self.logger.info('prepare to insert data') dataframe.to_sql('etl_hh', schema='adhoc_parser', con=engine, if_exists='append', index=False, dtype={ 'id': INT(), 'name': String(255), 'has_test': Boolean(), 'published_at': DateTime(), 'created_at': DateTime(), 'url': String(255), 'area_name': String(255), 'salary_from': INT(), 'salary_to': INT(), 'salary_currency': String(10), 'salary.gross': Boolean(), 'address.city': String(255), 'address.street': String(255), 'address_building': String(255), 'address_raw': String(500), 'metro_name': String(255), 'employer_id': INT(), 'employer_name': String(255), 'snippet_requirement': TEXT(), 'snippet_responsibility': TEXT(), 'contacts_name': String(255), 'contacts_email': String(255), }) self.logger.warning('data are inserted now')
class Items(Base): """ Items table """ __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def __repr__(self): return "id is {0}, name is {1}, quantity is {2}, description is {3}, date added is {4}.".format( self.id, self.name, self.quantity, self.description, self.date_added)
def test_datetime_properties(client): rest = UnRest(client.app, client.session, framework=client.__framework__) rest( Fruit, only=[], properties=[rest.Property('birthday', type=DateTime())] ) code, json = client.fetch('/api/fruit') assert code == 200 assert json['occurences'] == 5 assert idsorted(json['objects'], 'fruit_id') == [ {'fruit_id': 1, 'birthday': '2019-12-19T22:45:00'}, {'fruit_id': 2, 'birthday': '2019-11-12T23:56:09.787000'}, {'fruit_id': 3, 'birthday': '2020-01-01T00:00:00'}, {'fruit_id': 4, 'birthday': '2019-12-31T23:20:00'}, {'fruit_id': 5, 'birthday': '2019-12-31T21:59:59.999988'}, ]
class User(db.Base): __tablename__ = "user" # Auto incrementing ID id = Column(Integer, primary_key=True) first_name = Column(String(256)) last_name = Column(String(256)) email = Column(String(256), unique=True) date_signed_up = Column(DateTime()) available_units = Column(BigInteger) used_units = Column(BigInteger) # Foreign Key api_key_id = Column(Integer, ForeignKey(ApiKey.id)) api_key = relationship(ApiKey)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( "flight_path_chunks", sa.Column("id", Integer(), nullable=False), sa.Column("time_created", DateTime(), nullable=False), sa.Column("time_modified", DateTime(), nullable=False), sa.Column("timestamps", postgresql.ARRAY(DateTime()), nullable=False), sa.Column("locations", Geometry(geometry_type="LINESTRING", srid=4326), nullable=False), sa.Column("start_time", DateTime(), nullable=False), sa.Column("end_time", DateTime(), nullable=False), sa.Column("flight_id", Integer(), nullable=False), sa.ForeignKeyConstraint(["flight_id"], ["flights.id"], ondelete="CASCADE"), sa.PrimaryKeyConstraint("id"), ) op.create_index( "ix_flight_path_chunks_end_time", "flight_path_chunks", ["end_time"], unique=False, ) op.create_index( "ix_flight_path_chunks_start_time", "flight_path_chunks", ["start_time"], unique=False, ) op.create_index( "ix_flight_path_chunks_flight_id", "flight_path_chunks", ["flight_id"], unique=False, )
class SitewidePageviews(Base): """Pageviews across all areas of the site.""" __tablename__ = "traffic_aggregate" date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", BigInteger()) @classmethod @memoize_traffic(time=3600) def history(cls, interval): time_points, q = make_history_query(cls, interval) return fill_gaps(time_points, q, "unique_count", "pageview_count")
def save_to_postgres(self): engine = create_engine('postgresql+psycopg2://{0}:{1}@{2}:5432/mimic3'.format(self.sqluser, self.sqlpass, self.host)) self.sofa_within_si.rename(columns={"sofa_delta": "delta_score"}, inplace=True) # somehow we cannot overwrite tables directly with "to_sql" so let's do that before conn = psycopg2.connect(dbname=self.dbname, user=self.sqluser, password=self.sqlpass, host=self.host) cur = conn.cursor() cur.execute(self.query_schema + "drop table IF EXISTS sepsis_onset cascade") conn.commit() # now let's fill it again self.sofa_within_si[['hadm_id', 'sofa', 'sofaresp', 'sofacoag', 'sofaliv', 'sofacardio', 'sofagcs', 'sofaren', 'sepsis_time', 'sepsis_onset', 'delta_score', 'sofaresp_delta', 'sofacoag_delta', 'sofaliv_delta', 'sofacardio_delta', 'sofagcs_delta', 'sofaren_delta']] \ .to_sql("sepsis_onset", engine, if_exists='append', schema="mimic3_mrosnati", dtype={"hadm_id": Integer(), "sofa": Integer(), 'sofaresp': Integer(), 'sofacoag': Integer(), 'sofaliv': Integer(), 'sofacardio': Integer(), 'sofagcs': Integer(), 'sofaren': Integer(), "sepsis_time": DateTime(), "delta_score": Integer(), 'sofaresp_delta': Integer(), 'sofacoag_delta': Integer(), 'sofaliv_delta': Integer(), 'sofacardio_delta': Integer(), 'sofagcs_delta': Integer(), 'sofaren_delta': Integer()}) self.create_table(sqlfile="/sofa_delta.sql")
class DbGroup(Base): __tablename__ = "db_dbgroup" id = Column(Integer, primary_key=True) uuid = Column(UUID(as_uuid=True), default=uuid_func) name = Column(String(255), index=True) type = Column(String(255), default="", index=True) time = Column(DateTime(timezone=True), default=timezone.now) description = Column(Text, nullable=True) user_id = Column( Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially="DEFERRED")) user = relationship('DbUser', backref=backref('dbgroups', cascade='merge')) dbnodes = relationship('DbNode', secondary=table_groups_nodes, backref="dbgroups", lazy='dynamic') __table_args__ = (UniqueConstraint('name', 'type'), ) def __str__(self): if self.type: return '<DbGroup [type: {}] "{}">'.format(self.type, self.name) else: return '<DbGroup [user-defined] "{}">'.format(self.name) def get_aiida_class(self): from aiida.orm.implementation.django.group import Group as DjangoAiidaGroup from aiida.backends.djsite.db.models import DbGroup as DjangoSchemaDbGroup dbgroup = DjangoSchemaDbGroup( id=self.id, type=self.type, uuid=self.uuid, name=self.name, time=self.time, description=self.description, user_id=self.user_id, ) return DjangoAiidaGroup(dbgroup=dbgroup)
class Items(Base): """ Items table """ __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def print_as_string(self): return '{name: ' + self.name + ' quantity: ' + str( self.quantity ) + ' description: ' + self.description + ' date_added: ' + str( self.date_added) + '}'
class Group(Base): __tablename__ = "groups" id = Column(Integer, primary_key=True) created = Column(DateTime()) protected = Column(Boolean, default=False) name = Column(String(100), unique=True) #Permissions are linked to groups from within the Permissions object #Permissions will be available here by the 'permissions' backref def __init__(self, name): self.name = name def __repr__(self): return "<Group('%s')>" % (self.name)
class File(Base): """ Store the parsed files for logging purpose. """ __tablename__ = "tbl_files" filename = Column(String(200), primary_key=True) date = Column(DateTime(timezone=True), default=datetime.datetime.utcnow) # __table_args__ = ( # {'schema': Base.metadata.schema} # ) def __init__(self, filename): self.filename = filename
class Post(Base): """ Blog post. """ __tablename__ = 'posts' id = Column(Integer, primary_key=True) date = Column(DateTime(20), unique=True) title = Column(Unicode(50)) content = Column(UnicodeText()) category_id = Column(Integer, ForeignKey('categories.id')) category = relationship('Category', backref='posts') tags = relationship('Tag', backref='posts', secondary=tag_post_table) def __unicode__(self): return "%s" % self.id
class AuthorizeBatchTransaction(Base): """Settled transactions by settlement batch.""" __tablename__ = 'authorize_batch_transactions' batch_id = NotNullColumn(BigInteger(), index=True) trans_id = NotNullColumn(BigInteger(), primary_key=True, autoincrement=False) submit_time = NotNullColumn(DateTime()) transaction_status = NotNullColumn(String(), index=True) invoice_number = Column(String()) first_name = Column(String()) last_name = Column(String()) account_type = NotNullColumn(String()) account_number = NotNullColumn(String()) settle_amount = NotNullColumn(Float(precision=2, asdecimal=True))
class ProductObject(Base): __tablename__ = 'product_object' id = Column(String(256), primary_key=True) code = Column(String(256)) name = Column(String(256)) group_code = Column(String(256)) group_name = Column(String(256)) price = Column(String(256)) prof1_price = Column(String(256)) prof2_price = Column(String(256)) is_order_allow = Column(Integer) producer_name = Column(String(256)) is_universal = Column(Integer) onstock = Column(Integer) parse_date = Column(DateTime())
class Posts(Base): __tablename__ = 'forumposts1' URL = Column(String(200), primary_key=True) replyid = Column(Integer, primary_key=True) pid = Column(Integer) # post id title = Column(String(500)) category = Column(String(500)) # discussion category categoryURL = Column(String(500)) uid = Column(String(20), ForeignKey('forumusers1.uid', onupdate="CASCADE", ondelete='CASCADE')) # user id replyTo = Column(Integer) # This is the first post id of the discussion postTime = Column( DateTime(timezone=True)) # precise to hour eg. 2017-02-11 19:00:00 body = Column(Text)
class LuxPrintServers(Base): __tablename__ = 'lux_print_servers' __table_args__ = {'schema': _schema} __colanderalchemy_config__ = { 'title': _('Print server'), 'plural': _('Print servers') } id = Column(Integer, primary_key=True, info={ 'colanderalchemy': {'widget': HiddenWidget()} }) url = Column(Unicode, nullable=False, info={ 'colanderalchemy': {'title': _('Url')} }) creation = Column(DateTime(timezone=False), info={ 'colanderalchemy': {'title': _('Creation date')} })
class Agency(Base): datasource = config.DATASOURCE_GTFS filename = 'agency.txt' __tablename__ = 'agency' #id = Column(Integer, Sequence(None, optional=True), primary_key=True, nullable=True) agency_id = Column(String(255), primary_key=True, index=True) agency_name = Column(String(255), nullable=False) agency_url = Column(String(255), nullable=False) agency_timezone = Column(String(50), nullable=False) agency_lang = Column(String(10)) agency_phone = Column(String(50)) agency_fare_url = Column(String(255)) transport_mode = Column(String(50), primary_key=True) date_modified = Column(DateTime(timezone=True), primary_key=True)
class Items(Base): """ Items table """ __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def to_json(self): dict = self.__dict__ if "_sa_instance_state" in dict: del dict["_sa_instance_state"] return dict
class TargetedImpressionsByCodename(Base): """Impressions for ads, correlated by ad campaign.""" __tablename__ = "traffic_thingtarget" codename = Column("fullname", String(), nullable=False, primary_key=True) subreddit = Column(String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", Integer()) @classmethod @memoize_traffic(time=3600) def total_by_codename(cls, codenames): return total_by_codename(cls, codenames)
class StatAgentPeriodic(Base): __tablename__ = 'stat_agent_periodic' __table_args__ = ( Index('stat_agent_periodic__idx__stat_agent_id', 'stat_agent_id'), Index('stat_agent_periodic__idx__time', 'time'), ) id = Column(Integer, primary_key=True) time = Column(DateTime(timezone=True), nullable=False) login_time = Column(INTERVAL, nullable=False, server_default='0') pause_time = Column(INTERVAL, nullable=False, server_default='0') wrapup_time = Column(INTERVAL, nullable=False, server_default='0') stat_agent_id = Column(Integer, ForeignKey("stat_agent.id")) stat_agent = relationship(StatAgent, foreign_keys=stat_agent_id)
def product_out(): data.to_sql(table_name, conn, if_exists='replace', chunksize=500, index=True, index_label='Id', dtype={ 'waktu': DateTime(), 'sku': String(1000), 'nama_barang': String(1000), 'jumlah_keluar': Integer, 'harga_jual': Numeric(), 'total': Numeric(), 'catatan': String(1000) })
class UtcDateTime(TypeDecorator): impl = DateTime(timezone=True) def process_bind_param(self, value, dialect): if value is not None: if not isinstance(value, datetime.datetime): raise TypeError('expected datetime.datetime, not ' + repr(value)) elif value.tzinfo is None: raise ValueError('naive datetime is not allowed') return value.astimezone(datetime.timezone.utc) def process_result_value(self, value, dialect): if value is not None and value.tzinfo is None: value = value.replace(tzinfo=datetime.timezone.utc) return value
class AdserverImpressionsByCodename(Base): """Impressions for ads.""" __tablename__ = "adserver_traffic_thing" codename = Column("fullname", String(), nullable=False, primary_key=True) date = Column(DateTime(), nullable=False, primary_key=True) interval = Column(String(), nullable=False, primary_key=True) unique_count = Column("unique", Integer()) pageview_count = Column("total", BigInteger()) @classmethod @memoize_traffic(time=3600) def history(cls, interval, codename): if interval != "day": raise NotImplementedError time_points, q = make_history_query(cls, interval) q = q.filter(cls.codename == codename) return fill_gaps(time_points, q, "unique_count", "pageview_count") @classmethod @memoize_traffic(time=3600) def promotion_history(cls, codename, start, stop): return promotion_history(cls, cls.pageview_count, codename, start, stop, interval="day") @classmethod @memoize_traffic(time=3600) def historical_totals(cls, interval): if interval != "day": raise NotImplementedError return totals(cls, interval) @classmethod @memoize_traffic(time=3600) def top_last_month(cls): return top_last_month(cls, "codename") @classmethod def recent_codenames(cls, fullname): raise NotImplementedError @classmethod @memoize_traffic(time=3600) def total_by_codename(cls, codename): return total_by_codename(cls, codename, interval="day")
class DbGroup(Base): __tablename__ = "db_dbgroup" id = Column(Integer, primary_key=True) uuid = Column(UUID(as_uuid=True), default=uuid_func) name = Column(String(255), index=True) type = Column(String(255), default="", index=True) time = Column(DateTime(timezone=True), default=timezone.now) description = Column(Text, nullable=True) user_id = Column(Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially="DEFERRED")) user = relationship('DbUser', backref=backref('dbgroups', cascade='merge')) # dbnodes_relationship =relationship('DbNode', cascade='all,delete,delete-orphan') # # dbnodes_relationship = ('DbNode') # dbnodes = association_proxy('dbnodes_relationship', 'dbnodes', # creator=lambda perm: DbNode(perm=perm)) # dbnodes_relationship = relationship('DbNode', secondary=table_groups_nodes, # backref="dbgroups", lazy='dynamic') # # dbnodes = association_proxy('dbnodes_relationship', 'dbnodes', # creator=lambda perm: DbNode(id=id)) dbnodes = relationship('DbNode', secondary=table_groups_nodes, backref="dbgroups", lazy='dynamic') __table_args__ = ( UniqueConstraint('name', 'type'), ) @property def pk(self): return self.id def __str__(self): if self.type: return '<DbGroup [type: {}] "{}">'.format(self.type, self.name) else: return '<DbGroup [user-defined] "{}">'.format(self.name) def get_aiida_class(self): from aiida.orm.implementation.sqlalchemy.group import Group return Group(dbgroup=self)
class Items(Base): """ Items table """ __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(256)) quantity = Column(Integer) description = Column(String(256)) date_added = Column(DateTime()) def __repr__(self): return "name: {name}, quantity: {quantity}, description: {description}, date_added: {date_added}\n".format( name=self.name, quantity=self.quantity, description=self.description, date_added=self.date_added)
def test_defaults(self): assert self._comp(Integer()) == 'INTEGER' assert self._comp(SmallInteger()) == 'SMALLINT' assert self._comp(BigInteger()) == 'BIGINT' assert self._comp(Numeric()) == 'NUMERIC' assert self._comp(Float()) == 'FLOAT' assert self._comp(DateTime()) == 'TIMESTAMP(6)' assert self._comp(Date()) == 'DATE' assert self._comp(Time()) == 'TIME(6)' assert self._comp(String()) == 'LONG VARCHAR' assert self._comp(Text()) == 'CLOB' assert self._comp(Unicode()) == 'LONG VARCHAR CHAR SET UNICODE' assert self._comp(UnicodeText()) == 'CLOB CHAR SET UNICODE' assert self._comp(Boolean()) == 'BYTEINT'
class GoldPartnerDealCode(Base): """Promo codes for deals from reddit gold partners.""" __tablename__ = "reddit_gold_partner_deal_codes" id = Column(Integer, primary_key=True) deal = Column(String, nullable=False) code = Column(String, nullable=False) user = Column(Integer, nullable=True) date = Column(DateTime(timezone=True), nullable=True) @classmethod @with_sqlalchemy_session def get_codes_for_user(cls, user): results = Session.query(cls).filter(cls.user == user._id) codes = {r.deal: r.code for r in results} return codes @classmethod @with_sqlalchemy_session def claim_code(cls, user, deal): # check if they already have a code for this deal and return it try: result = (Session.query(cls).filter( and_(cls.user == user._id, cls.deal == deal)).one()) return result.code except NoResultFound: pass # select an unclaimed code, assign it to the user, and return it try: claiming = (Session.query(cls).filter( and_(cls.deal == deal, cls.user == None, func.pg_try_advisory_lock(cls.id))).limit(1).one()) except NoResultFound: raise GoldPartnerCodesExhaustedError claiming.user = user._id claiming.date = datetime.now(g.tz) Session.add(claiming) Session.commit() # release the lock Session.query(func.pg_advisory_unlock_all()).all() return claiming.code