class News(Base): __tablename__ = 'news' id = Column(Integer, primary_key=True) title = Column(String) body = Column(String) created = Column(DateTime, server_default=func.current_timestamp()) modified = Column(DateTime, server_default=func.current_timestamp()) user_id = Column(Integer, ForeignKey('user.id')) user = relationship('User', foreign_keys=user_id) def __repr__(self): return '<News: %s>' % self.title def toDict(self, full=False): ret = {} ret['title'] = self.title ret['body'] = self.body ret['created'] = self.created if full: ret['id'] = self.id ret['user'] = self.user.toDict() ret['modified'] = self.modified return ret
class IndexJob(Base): """IndexJob data model. Represents a job for the index service. Fields: data: JSON data which specifies info about the data to be indexed. This is not the data that is actually indexed. context: the request context created: datetime object containing the time the job was created. not_before: datetime object containing the earliest time that the job should be started. start: datetime object containing the time the job started. end: datetime object containing the time the job ended. successful: boolean indicating that the job was successfully completed. retries_remaining: number of retries remaining for the job. """ __tablename__ = "index_job" id = Column(Integer, primary_key=True) data = Column(Text(4096)) context = Column(String(1024)) created = Column(DateTime, server_default=func.current_timestamp()) not_before = Column(DateTime, server_default=func.current_timestamp()) start = Column(DateTime, nullable=True) end = Column(DateTime, nullable=True) owner = Column(String(1024), nullable=True) successful = Column(Boolean, nullable=True) retries_remaining = Column(Integer)
class ProductWholesale(Base): __tablename__ = "products_wholesale" id = Column(Integer, primary_key=True) shop_name = Column(String(length=100), nullable=False) name = Column(String(length=500), nullable=False) ean = Column(String(length=20), nullable=False) is_available = Column(Boolean) price_net = Column(Numeric(precision=8, scale=2), nullable=False) age_restriction = Column(Integer, nullable=False, default=0) timestamp_created = Column(TIMESTAMP, server_default=func.current_timestamp(), nullable=False) timestamp_updated = Column( TIMESTAMP, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), nullable=False, ) def update(self, other): if other.name is not None: self.name = other.name if other.is_available is not None: self.is_available = other.is_available if other.price_net is not None: self.price_net = other.price_net if other.age_restriction is not None: self.age_restriction = other.age_restriction def __eq__(self, other): if not self.shop_name == other.shop_name: return False if not self.name == other.name: return False if not self.ean == other.ean: return False if not self.is_available == other.is_available: return False if not self.price_net == other.price_net: # Decimal check is ok return False if not self.age_restriction == other.age_restriction: return False return True def __repr__(self): return (f"ProductWholesale(" f"id={self.id}, " f"shop_name='{self.shop_name}', " f"name='{self.name}', " f"ean='{self.ean}', " f"is_available={self.is_available}, " f"price_net={self.price_net}, " f"age_restriction={self.age_restriction}, " f"timestamp_created={self.timestamp_created}, " f"timestamp_updated={self.timestamp_updated}" f")") def __str__(self): return self.__repr__()
class BaseTable(db.Model): __abstract__ = True id = Column(Integer, primary_key=True) date_created = Column(DateTime, default=func.current_timestamp()) date_modified = Column(DateTime, default=func.current_timestamp(), onupdate=func.current_timestamp())
class Entry(db.Model): """Entry model class Each user has three rows on entries for a date - done, todo, blocking. """ __tablename__ = 'entries' id = db.Column(db.Integer, primary_key=True) text = db.Column(db.String) type = db.Column(db.Enum('done', 'todo', 'blocking', name='entry_types')) date = db.Column(db.Date, server_default=func.current_date()) time_created = db.Column(db.DateTime, server_default=func.current_timestamp()) time_updated = db.Column(db.DateTime, onupdate=func.current_timestamp()) user_id = db.Column(db.Integer, db.ForeignKey('users.id')) user = db.relationship('User', backref=db.backref('entries', lazy='dynamic')) __table_args__ = (db.UniqueConstraint('type', 'user_id', 'date', name='_type_user_date_uc'), ) def __init__(self, text, entry_type, user, date=None): self.text = text self.type = entry_type self.user = user if date: self.date = date def __repr__(self): return '<Entry {}:{}:{}'.format(self.user.name, self.type, self.text[:50])
class LotteryResult(Mixin, Base): __tablename__ = 'lottery_result' __table_args__ = {'mysql_charset': 'utf8', 'mysql_collate': 'utf8_bin'} id = Column(Integer, primary_key=True, autoincrement=True) created_time = Column(DateTime(timezone=True), nullable=False, server_default=func.current_timestamp()) updated_time = Column(DateTime(timezone=True), nullable=False, server_default=func.current_timestamp()) month = Column(CHAR(7), nullable=False, default="0000-00", unique=True, index=True) personal_participate_total = Column(Integer, nullable=False) personal_total = Column(Integer, nullable=False) corporate_participate_total = Column(Integer, nullable=False) corporate_total = Column(Integer, nullable=False) def __init__(self, **kwargs): super(LotteryResult, self).__init__(**kwargs) dict()
class BaseModel(Base): __abstract__ = True uuid = Column(GUID(), primary_key=True, default=uuid.uuid4) created_at = Column(DateTime, default=func.current_timestamp()) updated_at = Column(DateTime, default=func.current_timestamp(), onupdate=func.current_timestamp())
class MigrationTable(Base): __tablename__ = "migrations_{}".format(environment) id = Column(String(100), primary_key=True) commit_time = Column(TIMESTAMP, onupdate=func.current_timestamp(), default=func.current_timestamp()) status = Column(SmallInteger, default=0)
class Base(_Model): __abstract__ = True id = Column(Integer, primary_key=True) created = Column(DateTime, default=func.current_timestamp()) modified = Column(DateTime, default=func.current_timestamp(), onupdate=func.current_timestamp()) active = Column(Boolean, nullable=False, default=True)
class User(dbMixin, UserMixin, db.Model): __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(20), unique=True, nullable=False) first_name = db.Column(db.String(20), unique=False, nullable=True) last_name = db.Column(db.String(20), unique=False, nullable=True) hint = db.Column(db.String(2000), unique=False, nullable=True) address = db.Column(db.String(200), unique=False, nullable=True) password = db.Column(db.String(100), nullable=False) email = db.Column(db.String(120), unique=True, nullable=False) admin = db.Column(db.Boolean, unique=False, default=False) groups = db.relationship("GroupsAndUsersAssociation", backref="_user") last_logged_in = db.Column(db.DateTime, nullable=True) last_logged_out = db.Column(db.DateTime, nullable=True) created_at = db.Column(db.DateTime, default=func.current_timestamp()) updated_at = db.Column(db.DateTime, default=func.current_timestamp(), onupdate=datetime.now) def __init__( self, username="******", first_name="first_name", last_name="last_name", email="*****@*****.**", password="******", hint="somehint", address="whereilive", admin=False, ): self.username = username self.first_name = first_name self.last_name = last_name self.email = email self.admin = admin self.address = address self.hint = hint self.password = pbkdf2_sha256.hash(password) def verify_hash(self, password, hash): return pbkdf2_sha256.verify(password, hash) def set_password(self, password): self.password = pbkdf2_sha256.hash(password) @property def avatar_url(self): return f"https://avatars.dicebear.com/api/croodles/{self.username}.svg" def __str__(self): return self.email __repr__ = __str__
def _index(self, context, index_action, index_data, index_all=False): """Helper function. Pulled out common code from index() & indexAll(). This method creates a job to index the specified input data. Args: context: String to identify calling context index_action: IndexAction object index_data: Thrift IndexData object index_all: Boolean indicating if all keys should be acted upon Returns: None Raises: InvalidDataException if input data to index is invalid. UnavailableException for any other unexpected error. """ try: # Get a db session db_session = self.get_database_session() # Validate inputs self._validate_index_params( context, index_action, index_data, index_all ) # If input specified a start-processing-time # convert it to UTC DateTime object. if index_data.notBefore is not None: processing_start_time = tz.timestamp_to_utc(index_data.notBefore) else: processing_start_time = func.current_timestamp() # Massage input data into format for IndexJob data = IndexOp(action=index_action, data=index_data) # Create IndexJob job = IndexJobModel( created=func.current_timestamp(), context=context, not_before=processing_start_time, retries_remaining=settings.INDEXER_JOB_MAX_RETRY_ATTEMPTS, data=json.dumps(data.to_json()) ) db_session.add(job) db_session.commit() return finally: db_session.close()
class ModelBase(object): __abstract__ = True id = Column(Integer, primary_key=True) timestamp_created = Column(DateTime, nullable=False, default=func.current_timestamp()) timestamp_modified = Column(DateTime, nullable=False, default=func.current_timestamp(), onupdate=func.current_timestamp())
def _create_chat_archive_job(self, db_session): try: self.log.info("Creating ChatArchiveJob...") #wait 5 minutes before we start the archive job #since it takes Tokbox time a few minutes. not_before = func.current_timestamp() \ + datetime.timedelta(minutes=5) job = ChatArchiveJob(chat_session_id=self.chat_session_id, created=func.current_timestamp(), not_before=not_before, retries_remaining=3) db_session.add(job) db_session.flush() except Exception as e: raise e
class Startup(Base): __tablename__ = 'startup' id = Column(Integer, primary_key=True) name = Column(String(40), nullable=False) slug = Column(String(40), nullable=False) description = Column(String(500)) link = Column(String(100)) careers_link = Column(String(100)) city_id = Column(Integer, ForeignKey('city.id')) city = relationship(City) created = Column(DateTime(timezone=True), server_default=func.now()) last_updated = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.current_timestamp()) user_id = Column(Integer, ForeignKey('user.id')) user = relationship('User') __table_args__ = (UniqueConstraint('name', 'city_id'), ) def serialize(self): d = {} d['id'] = self.id d['name'] = self.name d['slug'] = self.slug d['description'] = self.description d['link'] = self.link d['careers_link'] = self.careers_link d['user_id'] = self.user_id d['created'] = self.created return d
def _create_chat_archive_job(self, db_session): try: self.log.info("Creating ChatArchiveJob...") #wait 5 minutes before we start the archive job #since it takes Tokbox time a few minutes. not_before = func.current_timestamp() \ + datetime.timedelta(minutes=5) job = ChatArchiveJob( chat_session_id=self.chat_session_id, created=func.current_timestamp(), not_before=not_before, retries_remaining=3) db_session.add(job) db_session.flush() except Exception as e: raise e
class Time(Base): __tablename__ = 'time' __table_args__ = (UniqueConstraint('date', 'game', 'player_id'), ) id = Column(Integer, primary_key=True, autoincrement=True) date = Column(Date, nullable=False) datetime = Column(DateTime, server_default=func.current_timestamp(), nullable=False) game = Column(String, nullable=False) player_id = Column(Integer, ForeignKey('player.id')) time = Column(Float, nullable=False) def __init__(self, player_id, time, game): self.player_id = player_id self.time = time self.game = game self.date = dt.datetime.now(nyt_tz).date() @validates('time') def validates_time(self, key, time): if time <= 0.: raise InvalidParameter(f'time must be greater than 0., got {time}') return time @validates('game') def validates_game(self, key, game): if game not in games: raise InvalidParameter(f'game must be in {games}, got {game}') return game
class Roll(Base): __tablename__ = 'rolls' id = Column(Integer, primary_key=True) guild_id = Column(BigInteger) message_id = Column( BigInteger, nullable=False) # discord message ID to remove reaction from channel_id = Column( BigInteger, nullable=False) # discord channel ID to remove reaction from skill_id = Column(Integer, ForeignKey('skills.id', ondelete='cascade'), nullable=False) token = Column(String(length=1), nullable=False) comment = Column(String) created = Column(DateTime(timezone=True), server_default=func.now()) modified = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.current_timestamp()) skill = relationship('Skill', back_populates='rolls') __table_args__ = (UniqueConstraint('guild_id', 'token'), )
class Parent(Base): __tablename__ = 'CAPTURE' id = Column(Integer, primary_key=True) time = Column(DateTime(timezone=True), server_default=func.current_timestamp()) count = Column(Integer) children = relationship("PRESENCE")
class Skill(Base): __tablename__ = 'skills' id = Column(Integer, primary_key=True) name = Column(String, nullable=False, server_default='do anything') slug = Column(String, nullable=False, server_default='do-anything') level = Column(Integer, nullable=False, server_default='1') xp = Column(Integer) char_id = Column(Integer, ForeignKey('chars.id', ondelete='cascade'), nullable=False) parent_id = Column(Integer, ForeignKey('skills.id', ondelete='cascade')) created = Column(DateTime(timezone=True), server_default=func.now()) modified = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.current_timestamp()) char = relationship('Char', back_populates='skills') parent = relationship('Skill', remote_side=[id]) children = relationship('Skill', remote_side=[parent_id], back_populates='parent', order_by='Skill.created', cascade="all, delete", passive_deletes=True) rolls = relationship('Roll', back_populates='skill', cascade="all, delete", passive_deletes=True) __table_args__ = (UniqueConstraint('char_id', 'slug'), )
class UserSurvey(db.Model): __tablename__ = 'user_survey' id = db.Column(BIGINT, primary_key=True) user = db.Column(BIGINT, db.ForeignKey('user.id'), nullable=False) dob = db.Column(VARCHAR(128), nullable=False) city_and_country = db.Column(VARCHAR(128), nullable=False) gender = db.Column(VARCHAR(128), nullable=False) race = db.Column(VARCHAR(128), nullable=False) allow_data_comparison = db.Column(BOOLEAN, default=False) term_of_service = db.Column(BOOLEAN, default=False) privacy_policy = db.Column(BOOLEAN, default=False) induction_completed = db.Column(BOOLEAN, default=False) service_consent = db.Column(BOOLEAN, default=False) consent_on_file = db.Column(BOOLEAN, default=False) record_created = db.Column(TIMESTAMP, default=func.current_timestamp(), nullable=False) def __init__(self, user, dob, city_and_country, gender, race, allow_data_comparison, term_of_service, privacy_policy, induction_completed, service_consent): self.user = user self.dob = dob self.city_and_country = city_and_country self.gender = gender self.race = race self.allow_data_comparison = allow_data_comparison self.privacy_policy = privacy_policy self.induction_completed = induction_completed self.service_consent = service_consent def __repr__(self): return '<id {}>'.format(self.id)
def test_alter_column_set_compiled_default(self): context = op_fixture() op.alter_column("t", "c", server_default=func.utc_thing(func.current_timestamp())) context.assert_( "ALTER TABLE t ALTER COLUMN c SET DEFAULT utc_thing(CURRENT_TIMESTAMP)" )
class ResultsTaxoncount(db.Model): __tablename__ = 'results_taxoncount' id = db.Column(BIGINT, primary_key=True) sample = db.Column(BIGINT, db.ForeignKey('samples.id'), nullable=False) ssr = db.Column(BIGINT, default=True, nullable=True) taxon = db.Column(BIGINT, db.ForeignKey('taxa.id'), nullable=False) count = db.Column(BIGINT, default=True, nullable=True) avg = db.Column(BIGINT, default=True, nullable=True) created = db.Column(TIMESTAMP, default=func.current_timestamp()) count_norm = db.Column(BIGINT, default=True, nullable=True) disabled_sample = db.Column(BIGINT, default=True, nullable=True) terminal_count = db.Column(BIGINT, nullable=False) def __init__(self, sample, ssr, taxon, count, avg, count_norm, disabled_sample, terminal_count): self.sample = sample self.ssr = ssr self.taxon = taxon self.count = count self.avg = avg self.count_norm = count_norm self.disabled_sample = disabled_sample self.terminal_count = terminal_count def __repr__(self): return '<id {}>'.format(self.id)
class Commit(Base): __tablename__ = "commit" id = Column(Integer, primary_key=True) hash = Column(String) timestamp = Column(TIMESTAMP, default=func.current_timestamp()) project_id = Column(Integer, ForeignKey("project.id")) project = relationship("Project", back_populates="commits") pages = relationship("Page", back_populates="commit") @staticmethod def get_or_create(db: Session, project_name: str, hash: str): project = Project.get_or_create(db, project_name) commit = db.query(Commit).filter(Commit.project_id == project.id, Commit.hash == hash).first() if not commit: commit = Commit(hash=hash, project=project) db.add(commit) db.commit() return commit @staticmethod def get(db: Session, project_name: str, hash: str): project = Project.get_or_create(db, project_name) return db.query(Commit).filter(Commit.project_id == project.id, Commit.hash == hash).first()
class CustomersSchema(Base): __tablename__ = "costumers" id = Column(UUID(as_uuid=True), primary_key=True, default=lambda: uuid4().hex, unique=True) name = Column(String) street_address = Column(String) address_number = Column(String) city = Column(String) state = Column(String) zip_code = Column(INTEGER) geolocation = Column(String) associated_route = Column(String, default="Outros") creation_date = Column(DateTime, default=datetime.datetime.now) last_update = Column(TIMESTAMP, server_default=func.now(), onupdate=func.current_timestamp()) class Config: schema_extra = { "example": { "name": "Costumer_1", "geolocation": "(40.721959482, -73.878993913)" } }
def init(self, auto_create=True): # TODO handle if user does not pass in table sqlite://path.db uri_splt = self.uri.split(":") engine_uri = ":".join(uri_splt[:-1]) table_name = uri_splt[-1] metadata = MetaData() postref_table = Table( table_name, metadata, Column('id', Integer, primary_key=True), Column('created_at', DateTime, default=func.now()), Column('updated_at', DateTime, default=func.now(), onupdate=func.current_timestamp()), Column('uuid', String(512)), Column('path', String(512)), Column('revision', Integer, default=0), Column('status', Integer, default=self.PostStatus.DRAFT.value), Column('ref', String(512)), Column('data', LargeBinary)) self.engine = create_engine(engine_uri, pool_recycle=3600) self.session = scoped_session(sessionmaker(bind=self.engine)) if auto_create: postref_table.create(self.engine, checkfirst=True) class PostRef(object): pass mapper(PostRef, postref_table) self.PostRef = PostRef
class Notification(Base): """Notification data model. The 'token' and 'context' are considered together for a notification to be unique. Fields: created: datetime object containing the time the Notification was created. token: notification ID. Used to allow creators of Notification objects to specify an ID. context: the request context priority: Priority level integer recipients: User data model objects subject: the notification subject html_text: html notification body plain_text: plain text notification body """ __tablename__ = "notification" __table_args__ = (UniqueConstraint('token', 'context'), ) id = Column(Integer, primary_key=True) created = Column(DateTime, server_default=func.current_timestamp()) token = Column(String(1024)) context = Column(String(1024)) priority = Column(Integer) recipients = relationship(User, secondary=lambda: NotificationUser.__table__) subject = Column(String(1024)) html_text = Column(Text, nullable=True) plain_text = Column(Text, nullable=True)
class Log(Base): """ Logging data """ __tablename__ = 'logs' id = Column(Integer, primary_key=True) created_at = Column(TIMESTAMP, server_default=func.current_timestamp()) created_by_id = Column(Integer, ForeignKey('annotators.id')) created_by = relationship('Annotator', backref='logs_created', foreign_keys=[created_by_id]) execution_number = Column(Integer) type = Column(Unicode(255), nullable=False, index=True) value = Column(UnicodeText) logger.debug('Initialized Log') def __repr__(self): return "<Log#%s(%s)>" % (str(self.id), unicode( self.type).encode('utf8'))
class Message(Base): __tablename__ = 'messages' id = Column(Integer, primary_key=True) ticket_id = Column(Integer, ForeignKey('tickets.id')) sender_id = Column(Integer, ForeignKey('users.id')) body = Column(Text) date = Column(DateTime, nullable=False, server_default=func.current_timestamp()) # Relationship ticket = relationship('Ticket', back_populates='messages') sender = relationship('User', back_populates='messages') @staticmethod def add(session, body: str, ticket_id: int, sender_conversation: int) -> None: '''Добавить сообщение в базу''' sender_id = User.find_by_conversation(session, sender_conversation).id session.add( Message(ticket_id=ticket_id, sender_id=sender_id, body=body)) session.commit()
class ProdutoModel(db.Model): __tablename__ = 'tb_produto' id = db.Column(db.Integer, primary_key=True) nome = db.Column(db.String(255)) descricao = db.Column(db.Text()) preco = db.Column(db.Float(precision='3,2')) is_visivel = db.Column(db.Boolean, default=False) is_deleted = db.Column(db.Boolean, default=False) dt_insercao = db.Column(db.DateTime, default=func.current_timestamp()) fk_id_empresa = db.Column(db.Integer, db.ForeignKey('tb_empresa.id'), nullable=False) empresa = db.relationship( 'EmpresaModel', backref='empresa', primaryjoin="ProdutoModel.fk_id_empresa==EmpresaModel.id", uselist=False) def __init__(self, nome, descricao, preco, is_visivel, is_deleted, empresa): self.nome = nome self.descricao = descricao self.preco = preco self.is_visivel = is_visivel self.is_deleted = is_deleted self.empresa = empresa def __str__(self): return '<Produto %r>' % (self.nome)
class Char(Base): __tablename__ = 'chars' id = Column(Integer, primary_key=True) guild_id = Column(BigInteger) # discord guild ID name = Column(String, nullable=False) slug = Column(String, nullable=False) xp = Column(Integer, nullable=False, server_default='0') created = Column(DateTime(timezone=True), server_default=func.now()) modified = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.current_timestamp()) players = relationship('Player', back_populates='char', cascade="all, delete", passive_deletes=True) skills = relationship('Skill', back_populates='char', cascade="all, delete", passive_deletes=True) __table_args__ = (UniqueConstraint('guild_id', 'slug'), )
class ProductMovement(Base): __tablename__ = 'productMovement' id = Column(Integer, primary_key = True) from_location = Column(Integer, ForeignKey('location.id'), nullable = True) to_location = Column(Integer, ForeignKey('location.id'), nullable = True) timestamp = Column(TIMESTAMP, server_default=func.now(), onupdate=func.current_timestamp()) # timestamp = Column(TIMESTAMP) # timestamp = DateTime(DateTime(timezone=True)) # timestamp = Column(DateTime(timezone=True), server_default=func.now()) product_id = Column(Integer, ForeignKey('product.id'), nullable = False) qyt = Column(Integer, nullable = False) product = relationship(Product) relation = relationship("Location", primaryjoin="and_(ProductMovement.from_location==Location.id, " "ProductMovement.to_location==Location.id)")
def run(self): """Monitor thread run method.""" session = self.create_db_session() while self.running: try: #Look for chats with closed check_in and no #existings ChatScheduleJob record. #This combination indicates a chat which #needs to be processed. for chat in session.query(Chat).\ outerjoin(ChatScheduleJob).\ filter(Chat.checkin_end < func.current_timestamp()).\ filter(Chat.checkin_end + datetime.timedelta(hours=1) > func.current_timestamp()).\ filter(ChatScheduleJob.id == None): #delegate chats to threadpool for processing self.threadpool.put(chat.id) #commit is required so changes to db will be #reflected (MVCC). session.commit() except Exception as error: session.rollback() self.log.exception(error) #Acquire exit conditional variable #and call wait on this to sleep the #necessary time between db checks. #waiting on a cv, allows the wait to be #interuppted when stop() is called. with self.exit: end = time.time() + self.poll_seconds #wait in loop, rechecking condition, #to combate spurious wakeups. while self.running and (time.time() < end): remaining_wait = end - time.time() self.exit.wait(remaining_wait) session.close()
def _end_update_values(self): """Model attributes/values to be updated when job is ended. Returns: dict of model {attribute: value} to be updated when the job is ended. """ update_values = {} if hasattr(self.model_class, "end"): update_values["end"] = func.current_timestamp() if hasattr(self.model_class, "successful"): update_values["successful"] = True return update_values
def _end_chat_persist_job(self, db_session): """End processing of the ChatPersistJob. Mark the ChatPersistJob record as finished by updating the 'end' field with the current time. """ try: job = db_session.query(ChatPersistJob).filter(ChatPersistJob.id==self.job_id).one() job.end = func.current_timestamp() job.successful = True db_session.flush() self.log.info("Finishing chat persist job with job_id=%d ..." % self.job_id) except Exception as e: raise e
def _query_filters(self): """Get job poll query filters. Returns: list of SQLAlchemy filters to be applied to the job poll query. """ filters = [] if hasattr(self.model_class, "owner"): filters.append(self.model_class.owner == None) if hasattr(self.model_class, "start"): filters.append(self.model_class.start == None) if hasattr(self.model_class, "not_before"): filters.append(self.model_class.not_before <= func.current_timestamp()) return filters
def _persist_ended_chat(self, chat, session): """Finish peristing ended chat. Args: chat: Chat object session: SQLAlchemy Sesison object """ chat.state.persisted = True #convert chat session to pure json data = dict(chat.state.session); if "twilio_data" in data: data["twilio_data"] = json.loads(data["twilio_data"]) data = json.dumps(data) #create chat archive job job = ChatArchiveJob( chat_id=chat.id, created=func.current_timestamp(), not_before=func.current_timestamp(), data=data, retries_remaining=4) session.add(job)
def _abort_update_values(self): """Model attributes/values to be updated when job is aborted. A job is considered to be aborted if an exception is raised within the scope of the context manager. Returns: dict of model {attribute: value} to be updated when the job is aborted. """ update_values = {} if hasattr(self.model_class, "end"): update_values["end"] = func.current_timestamp() if hasattr(self.model_class, "successful"): update_values["successful"] = False return update_values
def _end_chat_schedule_job(self, chat_id, session=None): """End ChatScheduleJob. Mark the current job record finished, by updating the end field with the current timestamp. """ self.log.info("Ending chat schedule job for chat_id=%d ..." % chat_id) try: close = session is None session = session or self.create_db_session() job = session.query(ChatScheduleJob).filter(ChatScheduleJob.chat_id==chat_id).first() job.end = func.current_timestamp() session.commit() finally: if close and session: session.close()
def _retry_job(self, failed_job): """Create a new IndexJob from a failed job. This method creates a new IndexJob from a job that failed processing. Args: failed_job: DatabaseJob object, or objected derived from DatabaseJob Returns: None """ try: db_session = None #create new job in db to retry. if failed_job.retries_remaining > 0: not_before = tz.utcnow() + datetime.timedelta(seconds=self.job_retry_seconds) new_job = IndexJob( data=failed_job.data, context=failed_job.context, created=func.current_timestamp(), not_before=not_before, retries_remaining=failed_job.retries_remaining-1 ) # Add job to db db_session = self.db_session_factory() db_session.add(new_job) db_session.commit() else: self.log.info("No retries remaining for job for index_job_id=%s"\ % (failed_job.id)) self.log.error("Job for index_job_id=%s failed!"\ % (failed_job.id)) except Exception as e: self.log.exception(e) if db_session: db_session.rollback() finally: if db_session: db_session.close()
def _create_chat_schedule_job(self, chat_id, session=None): """Create ChatScheduleJob db record. In order to coordinate across threads, and other instances of this service on other machines, the ChatScheduleJob database table is used. When a thread detects a chat with a closed check_in, and no corresponding ChatScheduleJob record, it will attempt to create one. The thread which successfully creates the job record is responsible for processing the item, and must delete the record on exception. Returns: True if job created, False otherwise. """ self.log.info("Attempting to create chat schedule job for chat_id=%d ..." % chat_id) result = False try: close = session is None session = session or self.create_db_session() job = ChatScheduleJob(chat_id=chat_id, start=func.current_timestamp()) session.add(job) session.commit() self.log.info("Created chat schedule job for chat_id=%d" % chat_id) result = True except IntegrityError: session.rollback() self.log.warning("Unable to create chat schedule job for chat_id=%d, job already exists." % chat_id) except Exception as error: session.rollback() self.log.error("Unable to create chat schedule job for chat_id=%d, %s" % (chat_id, str(error))) raise finally: if close and session: session.close() return result
def _retry_job(self, job): """Create a ChatArchiveJob to retry a failed job. This method will create a new ChatArchiveJob, which will be delayed by job_retry_seconds, as long as the number of retries_remaining on the failed job is greather than 1. """ try: db_session = self.db_session_factory() if job.retries_remaining: not_before =tz.utcnow() + \ datetime.timedelta(seconds=self.job_retry_seconds) self.log.info("Creating retry job for chat_id=%s at %s" \ % (job.chat_id, not_before)) retry = ChatArchiveJob( chat_id=job.chat_id, created=func.current_timestamp(), not_before=not_before, data=job.data, retries_remaining=job.retries_remaining-1) db_session.add(retry) db_session.commit() else: self.log.info("No retries remaining for job for chat_id=%s" \ % (job.chat_id)) self.log.error("Job for chat_id=%s failed!" \ % (job.chat_id)) except Exception as error: self.log.exception(error) db_session.rollback() finally: if db_session: db_session.close()
def _retry_job(self, failed_job): """Create a new NotificationJob from a failed job. This method creates a new Notification Job from a job that failed to process successfully. """ try: db_session = None #create new job in db to retry. if failed_job.retries_remaining > 0: not_before = tz.utcnow() + datetime.timedelta(seconds=self.job_retry_seconds) new_job = NotificationJob( created=func.current_timestamp(), not_before=not_before, notification_id=failed_job.notification_id, recipient_id=failed_job.recipient_id, priority=failed_job.priority, retries_remaining=failed_job.retries_remaining-1 ) # Add job to db db_session = self.db_session_factory() db_session.add(new_job) db_session.commit() else: self.log.info("No retries remaining for job for notification_job_id=%s"\ % (failed_job.id)) self.log.error("Job for notification_job_id=%s failed!"\ % (failed_job.id)) except Exception as e: self.log.exception(e) if db_session: db_session.rollback() finally: if db_session: db_session.close()
def receive_before_flush(session, flush_context, instances): """Update the data version before every flush""" db.session.query(LastUpdate).delete() db.session.add(LastUpdate(last_update=sql_func.current_timestamp()))
def _metadata(self): metadata = sa.MetaData() # TABLES['Usages'] = ( # "CREATE TABLE `Usages` (" # " `ts` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP," # " `user_hash` CHAR(40) NOT NULL," # " `resource_name` CHAR(80)," # " `start_date` TIMESTAMP NOT NULL," # " `data_type` CHAR (30) NOT NULL," # " `data` INTEGER NOT NULL," # " PRIMARY KEY (`user_hash`,`start_date`,`resource_name`, `data_type`)" # " )" usages = sa.Table('usages', metadata, sa.Column('ts', sa.TIMESTAMP, server_default=func.current_timestamp(), server_onupdate=func.current_timestamp()), sa.Column('user_hash', sa.CHAR(40), nullable=False, primary_key=True), sa.Column('resource_name', sa.CHAR(80), primary_key=True), sa.Column('start_date', sa.TIMESTAMP, nullable=False, primary_key=True), sa.Column('data_type', sa.CHAR(30), nullable=False, primary_key=True), sa.Column('data', sa.INTEGER, nullable=False), ) # TABLES['Resources'] = ( # "CREATE TABLE Resources (" # " `name` CHAR(250)," # " PRIMARY KEY (name)" # " )" resources = sa.Table('resources', metadata, sa.Column('name', sa.CHAR(250), primary_key=True), ) # TABLES['Users'] = ( # "CREATE TABLE Users(" # " `hash` CHAR (40) NOT NULL," # " `uuid` CHAR (32) NOT NULL," # " `machine_sn` CHAR(80)," # " `age` INTEGER NOT NULL," # " `school` CHAR(80)," # " `sw_version` CHAR (80)," # " `ts` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP," # " PRIMARY KEY (hash)" # " )" users = sa.Table('users', metadata, sa.Column('hash', sa.CHAR(40), nullable=False, primary_key=True), sa.Column('uuid', sa.CHAR(32), nullable=False), sa.Column('machine_sn', sa.CHAR(80)), sa.Column('age', sa.INTEGER, nullable=False), sa.Column('school', sa.CHAR(80)), sa.Column('sw_version', sa.CHAR(80)), sa.Column('ts', sa.TIMESTAMP, server_default=func.current_timestamp(), server_onupdate=func.current_timestamp()), ) # TABLES['Runs'] = ( # "CREATE TABLE Runs(" # " `last_ts` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP " # ")" runs = sa.Table('runs', metadata, sa.Column('last_ts', sa.TIMESTAMP, server_default=func.current_timestamp(), server_onupdate=func.current_timestamp()), ) return metadata
def _init_(self, login, password, email): # pragma: no cover self.login = login self.password = password #self.last_password_change = datetime.now() self.last_password_change = func.current_timestamp() self.email = email
def __init__(self, friend_id, music_id): self.friend_id = friend_id self.music_id = music_id self.requested_time = func.current_timestamp()
https://github.com/HSLdevcom/navigator-proto/blob/master/src/routing.coffee#L43 ''' mass_transit_types = ("FERRY", "SUBWAY", "TRAIN", "TRAM", "BUS") mass_transit_type_enum = Enum(*mass_transit_types, name='mass_transit_type_enum') # Schema definitions: # Table with one entry for each user users_table = Table('users', metadata, Column('id', Integer, primary_key=True), Column('user_id', String, unique=True, nullable=False), # hash value of Google Id Column('google_refresh_token', String), Column('google_server_access_token', String), Column('register_timestamp', TIMESTAMP, nullable=False, default=func.current_timestamp(), server_default=func.current_timestamp()), Index('idx_users_user_id', 'user_id')) if not users_table.exists(bind=db.engine): users_table.create(bind=db.engine) """ create legacy user that can be used to link existing data that was added before user objects were added to some user. """ db.engine.execute(users_table.insert({'id': 0, 'user_id': 'legacy-user'})) # Table with one entry for each client device devices_table = Table('devices', metadata, Column('id', Integer, primary_key=True), Column('user_id', Integer, ForeignKey('users.id'), nullable=False, default=0), Column('device_id', String, nullable=False),
def __init__(self, amount_actual, amount_promised, num_shirts): self.amount_actual = amount_actual self.amount_promised = amount_promised self.num_shirts = num_shirts self.time = func.current_timestamp()
def init(self, auto_create=True): # TODO handle if user does not pass in table sqlite://path.db uri_splt = self.uri.split(":") engine_uri = u":".join(uri_splt[:-1]) table_name = uri_splt[-1] metadata = MetaData() postref_table = Table(table_name, metadata, Column('id', Integer, primary_key=True), Column('created_at', DateTime, default=func.now()), Column('updated_at', DateTime, default=func.now(), onupdate=func.current_timestamp()), Column('uuid', String(512)), Column('path', String(512)), Column('revision', Integer, default=0), Column('status', Integer, default=self.PostStatus.DRAFT.value), Column('ref', String(512)), Column('data', LargeBinary)) self.engine = create_engine(engine_uri, pool_recycle=3600) self.session = scoped_session(sessionmaker(bind=self.engine)) if auto_create: postref_table.create(self.engine, checkfirst=True) class PostRef(object): pass mapper(PostRef, postref_table) self.PostRef = PostRef
def test_executing(self): # re-create a new INSERT object self.ins = self.users.insert() # execute the insert statement res = self.conn.execute(self.ins, uid=1, name='jack', fullname='Jack Jones') assert(res.inserted_primary_key == [1]) res = self.conn.execute(self.ins, uid=2, name='wendy', fullname='Wendy Williams') assert(res.inserted_primary_key == [2]) # the res variable is a ResultProxy object, analagous to DBAPI cursor # issue many inserts, the same is possible for update and delete self.conn.execute(self.addresses.insert(), [ {'id': 1, 'user_id': 1, 'email_address': '*****@*****.**'}, {'id': 2, 'user_id': 1, 'email_address': '*****@*****.**'}, {'id': 3, 'user_id': 2, 'email_address': '*****@*****.**'}, {'id': 4, 'user_id': 2, 'email_address': '*****@*****.**'} ]) # test selects on the inserted values from sqlalchemy.sql import select s = select([self.users]) res = self.conn.execute(s) u1 = res.fetchone() u2 = res.fetchone() # accessing rows assert(u1['name'] == u'jack') assert(u1['fullname'] == u'Jack Jones') assert(u2['name'] == u'wendy') assert(u2['fullname'] == u'Wendy Williams') assert(u1[1] == u1['name']) assert(u1[2] == u1['fullname']) assert(u2[1] == u2['name']) assert(u2[2] == u2['fullname']) # be sure to close the result set res.close() # use cols to access rows res = self.conn.execute(s) u3 = res.fetchone() u4 = res.fetchone() assert(u3[self.users.c.name] == u1['name']) assert(u3[self.users.c.fullname] == u1['fullname']) assert(u4[self.users.c.name] == u2['name']) assert(u4[self.users.c.fullname] == u2['fullname']) # reference individual columns in select clause s = select([self.users.c.name, self.users.c.fullname]) res = self.conn.execute(s) u3 = res.fetchone() u4 = res.fetchone() assert(u3[self.users.c.name] == u1['name']) assert(u3[self.users.c.fullname] == u1['fullname']) assert(u4[self.users.c.name] == u2['name']) assert(u4[self.users.c.fullname] == u2['fullname']) # test joins # cartesian product usrs = [row for row in self.conn.execute(select([self.users]))] addrs = [row for row in self.conn.execute(select([self.addresses]))] prod = [row for row in self.conn.execute(select([self.users, self.addresses]))] assert(len(prod) == len(usrs) * len(addrs)) # inner join on id s = select([self.users, self.addresses]).where(self.users.c.uid == self.addresses.c.user_id) inner = [row for row in self.conn.execute(s)] assert(len(inner) == 4) # operators between columns objects & other col objects/literals expr = self.users.c.uid == self.addresses.c.user_id assert('my_users.uid = addresses.user_id' == str(expr)) # see how Teradata concats two strings assert(str((self.users.c.name + self.users.c.fullname).compile(bind=self.engine)) == 'my_users.name || my_users.fullname') # built-in conjunctions from sqlalchemy.sql import and_, or_ s = select([(self.users.c.fullname + ", " + self.addresses.c.email_address).label('titles')]).where( and_( self.users.c.uid == self.addresses.c.user_id, self.users.c.name.between('m', 'z'), or_( self.addresses.c.email_address.like('*****@*****.**'), self.addresses.c.email_address.like('*****@*****.**') ) ) ) # print(s) res = self.conn.execute(s) for row in res: assert(str(row[0]) == u'Wendy Williams, [email protected]') # more joins # ON condition auto generated based on ForeignKey assert(str(self.users.join(self.addresses)) == 'my_users JOIN addresses ON my_users.uid = addresses.user_id') # specify the join ON condition self.users.join(self.addresses, self.addresses.c.email_address.like(self.users.c.name + '%')) # select from clause to specify tables and the ON condition s = select([self.users.c.fullname]).select_from( self.users.join(self.addresses, self.addresses.c.email_address.like(self.users.c.name + '%'))) res = self.conn.execute(s) assert(len(res.fetchall()) == 3) # left outer joins s = select([self.users.c.fullname]).select_from(self.users.outerjoin(self.addresses)) # outer join works with teradata dialect (unlike oracle dialect < version9) assert(str(s) == str(s.compile(dialect=self.dialect))) # test bind params (positional) from sqlalchemy import text s = self.users.select(self.users.c.name.like( bindparam('username', type_=String)+text("'%'"))) res = self.conn.execute(s, username='******').fetchall() assert(len(res), 1) # functions from sqlalchemy.sql import func, column # certain function names are known by sqlalchemy assert(str(func.current_timestamp()), 'CURRENT_TIMESTAMP') # functions can be used in the select res = self.conn.execute(select( [func.max(self.addresses.c.email_address, type_=String).label( 'max_email')])).scalar() assert(res, '*****@*****.**') # func result sets, define a function taking params x,y return q,z,r # useful for nested queries, subqueries - w/ dynamic params calculate = select([column('q'), column('z'), column('r')]).\ select_from( func.calculate( bindparam('x'), bindparam('y') ) ) calc = calculate.alias() s = select([self.users]).where(self.users.c.uid > calc.c.z) assert('SELECT my_users.uid, my_users.name, my_users.fullname\ FROM my_users, (SELECT q, z, r\ FROM calculate(:x, :y)) AS anon_1\ WHERE my_users.uid > anon_1.z', s) # instantiate the func calc1 = calculate.alias('c1').unique_params(x=17, y=45) calc2 = calculate.alias('c2').unique_params(x=5, y=12) s = select([self.users]).where(self.users.c.uid.between(calc1.c.z, calc2.c.z)) parms = s.compile().params assert('x_2' in parms, 'x_1' in parms) assert('y_2' in parms, 'y_1' in parms) assert(parms['x_1'] == 17, parms['y_1'] == 45) assert(parms['x_2'] == 5, parms['y_2'] == 12) # order by asc stmt = select([self.users.c.name]).order_by(self.users.c.name) res = self.conn.execute(stmt).fetchall() assert('jack' == res[0][0]) assert('wendy' == res[1][0]) # order by desc stmt = select([self.users.c.name]).order_by(self.users.c.name.desc()) res = self.conn.execute(stmt).fetchall() assert('wendy' == res[0][0]) assert('jack' == res[1][0]) # group by stmt = select([self.users.c.name, func.count(self.addresses.c.id)]).\ select_from(self.users.join(self.addresses)).\ group_by(self.users.c.name) res = self.conn.execute(stmt).fetchall() assert(res[0][0] == 'jack') assert(res[1][0] == 'wendy') assert(res[0][1] == res[1][1]) # group by having stmt = select([self.users.c.name, func.count(self.addresses.c.id)]).\ select_from(self.users.join(self.addresses)).\ group_by(self.users.c.name).\ having(func.length(self.users.c.name) > 4) res = self.conn.execute(stmt).fetchall() assert(res[0] == ('wendy', 2)) # distinct stmt = select([self.users.c.name]).\ where(self.addresses.c.email_address.contains(self.users.c.name)).distinct() res = self.conn.execute(stmt).fetchall() assert(len(res) == 2) assert(res[0][0] != res[1][0]) # limit stmt = select([self.users.c.name, self.addresses.c.email_address]).\ select_from(self.users.join(self.addresses)).\ limit(1) res = self.conn.execute(stmt).fetchall() assert(len(res) == 1) # offset # test union and except from sqlalchemy.sql import except_, union u = union( self.addresses.select().where(self.addresses.c.email_address == '*****@*****.**'), self.addresses.select().where(self.addresses.c.email_address.like('*****@*****.**')),)# .order_by(self.addresses.c.email_address) # print(u) # #res = self.conn.execute(u) this fails, syntax error order by expects pos integer? u = except_( self.addresses.select().where(self.addresses.c.email_address.like('%@%.com')), self.addresses.select().where(self.addresses.c.email_address.like('*****@*****.**'))) res = self.conn.execute(u).fetchall() assert(1, len(res)) u = except_( union( self.addresses.select().where(self.addresses.c.email_address.like('*****@*****.**')), self.addresses.select().where(self.addresses.c.email_address.like('*****@*****.**')) ).alias().select(), self.addresses.select(self.addresses.c.email_address.like('*****@*****.**')) ) res = self.conn.execute(u).fetchall() assert(1, len(res)) # scalar subqueries stmt = select([func.count(self.addresses.c.id)]).where(self.users.c.uid == self.addresses.c.user_id).as_scalar() # we can place stmt as any other column within another select res = self.conn.execute(select([self.users.c.name, stmt])).fetchall() # res is a list of tuples, one tuple per user's name assert(2, len(res)) u1 = res[0] u2 = res[1] assert(len(u1) == len(u2)) assert(u1[0] == u'jack') assert(u1[1] == u2[1]) assert(u2[0] == u'wendy') # we can label the inner query stmt = select([func.count(self.addresses.c.id)]).\ where(self.users.c.uid == self.addresses.c.user_id).\ label("address_count") res = self.conn.execute(select([self.users.c.name, stmt])).fetchall() assert(2, len(res)) u1 = res[0] u2 = res[1] assert(len(u1) == 2) assert(len(u2) == 2) # inserts, updates, deletes stmt = self.users.update().values(fullname="Fullname: " + self.users.c.name) res = self.conn.execute(stmt) assert('name_1' in res.last_updated_params()) assert(res.last_updated_params()['name_1'] == 'Fullname: ') stmt = self.users.insert().values(name=bindparam('_name') + " .. name") res = self.conn.execute(stmt, [{'uid': 4, '_name': 'name1'}, {'uid': 5, '_name': 'name2'}, {'uid': 6, '_name': 'name3'}, ]) # updates stmt = self.users.update().where(self.users.c.name == 'jack').values(name='ed') res = self.conn.execute(stmt) assert(res.rowcount == 1) assert(res.returns_rows is False) # update many with bound params stmt = self.users.update().where(self.users.c.name == bindparam('oldname')).\ values(name=bindparam('newname')) res = self.conn.execute(stmt, [ {'oldname': 'jack', 'newname': 'ed'}, {'oldname': 'wendy', 'newname': 'mary'}, ]) assert(res.returns_rows is False) assert(res.rowcount == 1) res = self.conn.execute(select([self.users]).where(self.users.c.name == 'ed')) r = res.fetchone() assert(r['name'] == 'ed') # correlated updates stmt = select([self.addresses.c.email_address]).\ where(self.addresses.c.user_id == self.users.c.uid).\ limit(1) # this fails, syntax error bc of LIMIT - need TOP/SAMPLE instead # Note: TOP can't be in a subquery # res = self.conn.execute(self.users.update().values(fullname=stmt)) # multiple table updates stmt = self.users.update().\ values(name='ed wood').\ where(self.users.c.uid == self.addresses.c.id).\ where(self.addresses.c.email_address.startswith('ed%')) # this fails, teradata does update from set where not update set from where # #res = self.conn.execute(stmt) stmt = self.users.update().\ values({ self.users.c.name: 'ed wood', self.addresses.c.email_address: '*****@*****.**' }).\ where(self.users.c.uid == self.addresses.c.id).\ where(self.addresses.c.email_address.startswith('ed%')) # fails but works on MySQL, should this work for us? # #res = self.conn.execute(stmt) # deletes self.conn.execute(self.addresses.delete()) self.conn.execute(self.users.delete().where(self.users.c.name > 'm')) # matched row counts # updates + deletes have a number indicating # rows matched by WHERE clause res = self.conn.execute(self.users.delete()) assert(res.rowcount == 1)
def notify(self, context, notification): """Send notification This method writes the input notification to the db and creates a job for each recipient for the notification to process. This is done to ensure that we don't lose any information should this service go down. Args: context: String to identify calling context notification: Thrift Notification object. The notification text fields (subject, plainText, htmlText) support python template strings (via string.Template). Note that all '$' chars will have to be properly escaped. The following template strings are supported: 'first_name' : the recipient's first name 'last_name' : the recipient's last name For example: 'Dear ${first_name}', Returns: Thrift Notification object. If no 'token' attribute was provided in the input notification object, the returned object will specify one. Raises: InvalidNotificationException if input Notification object is invalid. UnavailableException for any other unexpected error. """ try: # Get a db session db_session = self.get_database_session() # Validate inputs # During validation of recipients, populate a list of Users. users = [] self._validate_notify_params(db_session, users, context, notification) # If input notification doesn't specify a # unique token, then generate one. if not notification.token: notification.token = uuid.uuid4().hex # Create Notification Model notification_model = NotificationModel( created=func.current_timestamp(), token=notification.token, context=context, priority=NOTIFICATION_PRIORITY_VALUES[ NotificationPriority._VALUES_TO_NAMES[notification.priority]], recipients=users, subject=notification.subject, html_text=notification.htmlText, plain_text=notification.plainText ) db_session.add(notification_model) # If notification specified a start-processing-time # convert it to UTC DateTime object. if notification.notBefore is not None: processing_start_time = tz.timestamp_to_utc(notification.notBefore) else: processing_start_time = func.current_timestamp() # Create NotificationJobs for user_id in notification.recipientUserIds: job = NotificationJobModel( created=func.current_timestamp(), not_before=processing_start_time, notification=notification_model, recipient_id=user_id, priority=NOTIFICATION_PRIORITY_VALUES[ NotificationPriority._VALUES_TO_NAMES[notification.priority]], retries_remaining=settings.NOTIFIER_JOB_MAX_RETRY_ATTEMPTS ) db_session.add(job) db_session.commit() return notification except InvalidNotificationException as error: self.log.exception(error) raise InvalidNotificationException() except Exception as error: self.log.exception(error) raise UnavailableException(str(error)) finally: db_session.close()
def make_uuid(): return unicode(uuid.uuid4()) def get_ip(): ip = request.environ.get('HTTP_X_FORWARDED_FOR', '') if not ip: ip = request.environ.get('REMOTE_ADDR','Unknown IP Address') return ip metadata = MetaData() user_agreement_table = Table('user_agreement', metadata, Column('id', types.UnicodeText, primary_key=True, default=make_uuid()), Column('ip_addr', types.UnicodeText, nullable=False), Column('agreed_on', types.DateTime, server_default=func.current_timestamp()), ) def init_tables(): metadata.create_all(model.meta.engine) class UserAgreement(object): def __init__(self, id='', ip_addr=None): self.id = make_uuid() self.ip_addr = get_ip() def insert_new_agreement(self): new = UserAgreement( ip_addr=get_ip() )
def test_func(): print func.now() print func.concat('x', 'y') print func.current_timestamp()