class Bet(Base): __tablename__ = 'bets' PENDING = 'P' STARTED = 'S' SETTLED = 'T' id = Column(Integer, primary_key=True) userid = Column(Integer, ForeignKey('users.id'), nullable=False, primary_key=True) starting_oddsid = Column(INTEGER(unsigned=True), ForeignKey('pinn.odds.id')) settled_oddsid = Column(INTEGER(unsigned=True), ForeignKey('pinn.odds.id')) stake = Column(Numeric(8, 2), nullable=False) duration = Column(Integer) status = Column(String(2), nullable=False) placedat = Column(DateTime, nullable=False) settledat = Column(DateTime) reccreatedat = Column(DateTime, default=func.utc_timestamp()) recupdatedat = Column(DateTime, default=func.utc_timestamp(), onupdate=func.utc_timestamp()) user = relationship(User) starting_odds_inst = relationship(Odds, foreign_keys=starting_oddsid, backref=backref('bets_starting', order_by=id)) settled_odds_inst = relationship(Odds, foreign_keys=settled_oddsid, backref=backref('bets_settled', order_by=id))
class BaseMixin: id = Column(Integer, primary_key=True, index=True) created_at = Column(DateTime, nullable=False, default=func.utc_timestamp()) updated_at = Column(DateTime, nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp())
class Post(Base): def __lt__(self, other): return self.image_hash < other.image_hash def __repr__(self) -> str: return 'Post ID: {} - Type: {} - URL: {} - Source: {} - Created: {}'.format( self.post_id, self.post_type, self.url, self.ingested_from, self.created_at) # TODO - Move to_dict methods into JSON encoders __tablename__ = 'reddit_post' __table_args__ = ( Index('ingest_source', 'created_at', 'ingested_from'), Index('ingest_graph', 'ingested_at', 'post_type', unique=False), ) id = Column(Integer, primary_key=True) post_id = Column(String(100), nullable=False, unique=True) url = Column(String(2000, collation='utf8mb4_general_ci'), nullable=False) shortlink = Column(String(300)) perma_link = Column(String(1000, collation='utf8mb4_general_ci')) post_type = Column(String(20)) author = Column(String(100), nullable=False) selftext = Column(Text(75000, collation='utf8mb4_general_ci')) created_at = Column(DateTime) ingested_at = Column(DateTime, default=func.utc_timestamp()) subreddit = Column(String(100), nullable=False) title = Column(String(1000, collation='utf8mb4_general_ci'), nullable=False) crosspost_parent = Column(String(200)) dhash_v = Column(String(64)) dhash_h = Column(String(64)) ahash = Column(String(64)) checked_repost = Column(Boolean, default=False) crosspost_checked = Column(Boolean, default=False) last_deleted_check = Column(DateTime, default=func.utc_timestamp()) url_hash = Column(String(32)) # Needed to index URLs for faster lookups ingested_from = Column(String(40)) left_comment = Column(Boolean, default=False) bad_url = Column(Boolean, default=False) repost_count = Column(Integer, default=0) #fullname = Column(String(30)) def to_dict(self): return { 'post_id': self.post_id, 'url': self.url, 'shortlink': self.shortlink, 'perma_link': self.perma_link, 'title': self.title, 'dhash_v': self.dhash_v, 'dhash_h': self.dhash_h, 'created_at': self.created_at.timestamp(), 'author': self.author, 'subreddit': self.subreddit }
class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) pwhash = Column(String(120), nullable=False) reccreatedat = Column(DateTime, default=func.utc_timestamp()) recupdatedat = Column(DateTime, default=func.utc_timestamp(), onupdate=func.utc_timestamp()) def get_balance(self): return (sum(b.balance for b in self.init_balances) + sum(t.amount for t in self.transactions))
def user_schema(metadata): print("under user_schema") customer = Table('user', metadata, Column('userr_id', BIGINT, primary_key=True, nullable=False), Column('email', VARCHAR( length=255), nullable=False, unique=True), Column('password', VARCHAR( length=255), nullable=False, unique=False), Column('created_at', DATETIME(), nullable=False, default=func.utc_timestamp()), Column('updated_at', DATETIME(), nullable=False, default=func.utc_timestamp()) ) return customer
def sp_schema(metadata): print("under user_schema") sp = Table('sp_devices', metadata, Column('id', VARCHAR(length=11), primary_key = True), Column('deviceid', VARCHAR(length=100)), Column('publickey', VARCHAR(length=500)), Column('created_at', DATETIME(), nullable=False, default=func.utc_timestamp()), Column('updated_at', DATETIME(), nullable=False, default=func.utc_timestamp()), Column('userid',VARCHAR(length=100)), Column('devicetype',VARCHAR(length=100)), ) return sp
class InitBalance(Base): __tablename__ = 'initbalances' userid = Column(Integer, ForeignKey('users.id'), nullable=False, primary_key=True) balance = Column(Numeric(4, 0), nullable=False) reccreatedat = Column(DateTime, default=func.utc_timestamp()) recupdatedat = Column(DateTime, default=func.utc_timestamp(), onupdate=func.utc_timestamp()) user = relationship(User, backref=backref('init_balances'))
class shopping_api(Base): __tablename__ = "shopping_api" shopping_api_id = Column(Integer, primary_key=True, index=True) ingredient_id = Column(Integer, ForeignKey("ingredient.ingredient_id"), nullable=False) shopping_api_title = Column(VARCHAR(128), nullable=True) shopping_api_price = Column(Integer, nullable=True) shopping_api_store = Column(VARCHAR(20), nullable=True) shopping_api_date = Column(Date, default=func.utc_timestamp()) shopping_api_link = Column(VARCHAR(255), nullable=True) ingredient = relationship("ingredient", back_populates='shopping_api') def __init__(self, ingredient_id, shopping_api_title, shopping_api_price, shopping_api_store, shopping_api_link): self.ingredient_id = ingredient_id self.shopping_api_title = shopping_api_title self.shopping_api_price = shopping_api_price self.shopping_api_store = shopping_api_store self.shopping_api_link = shopping_api_link def __repr__(self): return "<shopping_api('%s', '%s', '%s', '%s', '%s')>" % ( self.ingredient_id, self.shopping_api_title, self.shopping_api_price, self.shopping_api_store, self.shopping_api_link)
class Post(Base): __tablename__ = 'Posts' id = Column(Integer, primary_key=True) head = Column(String(1024)) content = Column(String(4048)) author = Column(String(128)) registdt = Column(DateTime, default=func.utc_timestamp()) updatedt = Column(DateTime) def __init__(self, head, content, author): self.head = head self.content = content self.author = author def getupdatedt(self): return self.updatedt def setupdatedt(self): self.updatedt = func.utc_timestamp() return self.getupdatedt() def __repr__(self): return '<Post %s by %s>' % (self.head, self.author) def _jsonify(self): return json.dumps({"head": self.head, "content": self.content, \ "author": self.author}, ensure_ascii=False) def _getjson(self): return {"head": self.head, "content": self.content, \ "author": self.author, "id": self.id}
def test_update_executemany(self): with testing.db.connect() as conn: timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) conn.execute( self.tables.t.insert(), [ { "x": 5, "data": timestamp }, { "x": 6, "data": timestamp }, { "x": 7, "data": timestamp }, ], ) conn.execute( self.tables.t.update().values(data=func.utc_timestamp()).where( self.tables.t.c.x == bindparam("xval")), [{ "xval": 5 }, { "xval": 6 }, { "xval": 7 }], )
class SentNotification(Base): __tablename__ = 'sent_notification' id = Column(Integer, primary_key=True) sent_at = Column(DateTime, default=func.utc_timestamp()) triggered_post = Column(String(6)) watch_id = Column(Integer, ForeignKey('watch.id')) watch = relationship("Watch", back_populates='sent_notifications')
class AudioFingerPrint(Base): __tablename__ = 'audio_fingerprints' id = Column(Integer, primary_key=True) post_id = Column(String(100), nullable=False) hash = Column(String(30), nullable=False) offset = Column(Integer, nullable=False) created_at = Column(DateTime, default=func.utc_timestamp())
class LinkRepost(Base): __tablename__ = 'link_reposts' __table_args__ = (Index('Index 3', 'repost_of', unique=False), Index('idx_author', 'author', unique=False), Index('idx_detected_at', 'detected_at', unique=False), Index('idx_repost_of_date', 'detected_at', 'author', unique=False)) id = Column(Integer, primary_key=True) post_id = Column(String(100), nullable=False, unique=True) repost_of = Column(String(100), nullable=False) detected_at = Column(DateTime, default=func.utc_timestamp()) author = Column(String(100)) subreddit = Column(String(100), nullable=False) source = Column(String(100)) def to_dict(self): return { 'id': self.id, 'post_id': self.post_id, 'repost_of': self.repost_of, 'detected_at': self.detected_at.timestamp() if self.detected_at else None, 'author': self.author, 'subreddit': self.subreddit, 'source': self.source, }
class Comment(Base): __tablename__ = 'reddit_comments' id = Column(Integer, primary_key=True) comment_id = Column(String(100), nullable=False, unique=True) body = Column(Text(collation='utf8mb4_general_ci')) ingested_at = Column(DateTime, default=func.utc_timestamp())
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'user', sa.Column('last_login_ip', sa.String(length=50), nullable=True)) op.add_column( 'user', sa.Column('current_login_ip', sa.String(length=50), nullable=True)) op.add_column('user', sa.Column('last_login_at', sa.DateTime(), nullable=True)) op.add_column( 'user', sa.Column('current_login_at', sa.DateTime(), nullable=True, default=func.utc_timestamp())) op.add_column('user', sa.Column('login_count', sa.Integer(), nullable=True)) op.create_table('roles_users', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['role_id'], ['role.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), mysql_default_charset='utf8')
def update_ban_list(uowm: UnitOfWorkManager, reddit: Reddit, notification_svc: NotificationService = None) -> NoReturn: """ Go through banned subs and see if we're still banned :rtype: NoReturn :param uowm: UnitOfWorkManager :param reddit: Reddit """ log.info('Starting Job: Update Subreddit Bans') with uowm.start() as uow: bans = uow.banned_subreddit.get_all() for ban in bans: last_checked_delta = (datetime.utcnow() - ban.last_checked).days if last_checked_delta < 1: log.debug('Banned sub %s last checked %s days ago. Skipping', ban.subreddit, last_checked_delta) continue if is_bot_banned(ban.subreddit, reddit): log.info('[Subreddit Ban Check] Still banned on %s', ban.subreddit) ban.last_checked = func.utc_timestamp() else: log.info('[Subreddit Ban Check] No longer banned on %s', ban.subreddit) uow.banned_subreddit.remove(ban) if notification_svc: notification_svc.send_notification( f'Removed {ban.subreddit} from ban list', subject='Subreddit Removed From Ban List!') uow.commit()
class RepostWatch(Base): __tablename__ = 'reddit_repost_watch' id = Column(Integer, primary_key=True) post_id = Column(String(100), nullable=False) user = Column(String(100), nullable=False) created_at = Column(DateTime, default=func.utc_timestamp()) last_detection = Column(DateTime) same_sub = Column(Boolean, default=False, nullable=False) expire_after = Column(Integer) enabled = Column(Boolean, default=True) source = Column(String(100)) def to_dict(self): return { 'id': self.id, 'enabled': self.enabled, 'post_id': self.post_id, 'user': self.user, 'created_at': self.created_at.timestamp(), 'last_detection': self.last_detection.timestamp() if self.last_detection else None, 'expire_after': self.expire_after, 'source': self.source }
class VideoHash(Base): __tablename__ = 'reddit_video_hashes' id = Column(Integer, primary_key=True) post_id = Column(String(100), nullable=False, unique=True) created_at = Column(DateTime, default=func.utc_timestamp()) hashes = Column(String(1300)) length = Column(Integer)
def persistOperation(self, operation): """ update or insert request into db Also release the assignment tag :param operation: FTS3Operation instance """ session = self.dbSession(expire_on_commit=False) # set the assignment to NULL # so that another agent can work on the request operation.assignment = None # because of the merge we have to explicitely set lastUpdate operation.lastUpdate = func.utc_timestamp() try: # Merge it in case it already is in the DB operation = session.merge(operation) session.add(operation) session.commit() session.expunge_all() return S_OK(operation.operationID) except SQLAlchemyError as e: session.rollback() self.log.exception("persistOperation: unexpected exception", lException=e) return S_ERROR("persistOperation: unexpected exception %s" % e) finally: session.close()
class Seebug(Base): """ 定义数据库映射 """ __tablename__ = "vul_seebug" id = Column(Integer, primary_key=True) title = Column(String(256), nullable=False) ssvid = Column(String(32), nullable=False) discover_time = Column(Date, nullable=True) commit_time = Column(Date, nullable=True) #PubTime = Column(Date,nullable=True) danger_level = Column(String(8), nullable=False) bug_type = Column(String(32), nullable=True) cveid = Column(String(16), nullable=True) cnnydid = Column(String(16), nullable=True) cnvdid = Column(String(16), nullable=True) author = Column(String(16), nullable=True) commitor = Column(String(16), nullable=True) zoomeye_dork = Column(String(16), nullable=True) influence_component = Column(String(16), nullable=True) bug_abstract = Column(String(512), nullable=True) url = Column(String(256), nullable=False) url_md5 = Column(String(32), nullable=False, unique=True, default=func.md5(url)) save_time = Column(DateTime, default=func.now()) last_modified = Column(DateTime, onupdate=func.utc_timestamp())
class StakeWindow(Base): __tablename__ = "stake_window" id = Column("id", Integer, primary_key=True, autoincrement=True) blockchain_id = Column("blockchain_id", Integer, nullable=False) start_period = Column("start_period", Integer, nullable=False) submission_end_period = Column("submission_end_period", Integer, nullable=False) approval_end_period = Column("approval_end_period", Integer, nullable=False) request_withdraw_start_period = Column("request_withdraw_start_period", Integer, nullable=False) end_period = Column("end_period", Integer, nullable=False) min_stake = Column("min_stake", BIGINT, nullable=False) open_for_external = Column("open_for_external", BOOLEAN, nullable=False) total_stake = Column("total_stake", BIGINT, nullable=False) reward_amount = Column("reward_amount", BIGINT, nullable=False) token_operator = Column("token_operator", VARCHAR(50), nullable=False) created_on = Column("created_on", TIMESTAMP(timezone=False), nullable=False) updated_on = Column("updated_on", TIMESTAMP(timezone=False), nullable=False, default=func.utc_timestamp()) UniqueConstraint(blockchain_id, name="uq_stake_window")
def test_update_executemany_w_default(self, connection): conn = connection timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) conn.execute( self.tables.t_default.insert(), [ { "x": 5, "idata": timestamp }, { "x": 6, "idata": timestamp }, { "x": 7, "idata": timestamp }, ], ) conn.execute( self.tables.t_default.update().values( idata=func.utc_timestamp()).where( self.tables.t_default.c.x == bindparam("xval")), [{ "xval": 5 }, { "xval": 6 }, { "xval": 7 }], )
def kickStuckJobs(self, limit=20, kickDelay=2): """finds jobs that have not been updated for more than a given time but are still assigned and resets the assignment :param int limit: number of jobs to treat :param int kickDelay: age of the lastUpdate in hours :returns: S_OK/S_ERROR with number of kicked jobs """ session = self.dbSession(expire_on_commit=False) try: ftsJobs = session.query(FTS3Job.jobID)\ .filter(FTS3Job.lastUpdate < (func.date_sub(func.utc_timestamp(), text('INTERVAL %d HOUR' % kickDelay ))))\ .filter(~FTS3Job.assignment.is_(None))\ .limit(limit) jobIDs = [jobTuple[0] for jobTuple in ftsJobs] rowCount = 0 if jobIDs: result = session.execute( update(FTS3Job) .where( FTS3Job.jobID.in_(jobIDs)) .where( FTS3Job.lastUpdate < ( func.date_sub( func.utc_timestamp(), text( 'INTERVAL %d HOUR' % kickDelay)))) .values( { 'assignment': None})) rowCount = result.rowcount session.commit() session.expunge_all() return S_OK(rowCount) except SQLAlchemyError as e: session.rollback() return S_ERROR("kickStuckJobs: unexpected exception : %s" % e) finally: session.close()
class BaseMixin: id = Column(Integer, primary_key=True, index=True) created_at = Column(DateTime, nullable=False, default=func.utc_timestamp()) updated_at = Column(DateTime, nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp()) def all_columns(self): return [c for c in self.__table__.columns if c.primary_key is False and c.name != "created_at"] def __hash__(self): return hash(self.id) @classmethod def create(cls, session: Session, auto_commit=False, **kwargs): """ Table Data loading function :param session: :param auto_commit: :param kwargs: :return: """ obj = cls() for col in obj.all_columns(): col_name = col.name if col_name in kwargs: setattr(obj, col_name, kwargs.get(col_name)) session.add(obj) session.flush() if auto_commit: session.commit() return obj @classmethod def get(cls, **kwargs): """ Simply get a row :param kwargs: :return: """ session = next(db.session()) query = session.query(cls) for key, val in kwargs.items(): col = getattr(cls, key) query = query.filter(col == val) if query.count() > 1: raise Exception("Only one row is supposed to be returned, but got more than one.") return query.first()
def define_tables(cls, metadata): Table( "t", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("data", DateTime), ) Table( "t_default", metadata, Column("id", Integer, primary_key=True), Column("x", Integer), Column("idata", DateTime, default=func.utc_timestamp()), Column("udata", DateTime, onupdate=func.utc_timestamp()), )
class BannedUser(Base): __tablename__ = 'banned_users' id = Column(Integer, primary_key=True) name = Column(String(100), nullable=False, unique=True) reason = Column(String(150), nullable=False) banned_at = Column(DateTime, default=func.utc_timestamp(), nullable=False) expires_at = Column(DateTime) notes = Column(String(500))
class ImageSearch(Base): __tablename__ = 'reddit_image_search' __table_args__ = ( Index('subsearched', 'subreddit', 'source', 'matches_found', unique=False), Index('Index 2', 'post_id', unique=False), Index('idx_source', 'source', unique=False), ) id = Column(Integer, primary_key=True) post_id = Column(String(100), nullable=False) source = Column(String(50), nullable=False) used_historical_index = Column(Boolean, nullable=False) used_current_index = Column(Boolean, nullable=False) target_hamming_distance = Column(Integer, nullable=False) target_annoy_distance = Column(Float, nullable=False) same_sub = Column(Boolean, nullable=False) max_days_old = Column(Integer) filter_dead_matches = Column(Boolean, nullable=False) only_older_matches = Column(Boolean, nullable=False) meme_filter = Column(Boolean, nullable=False) target_title_match = Column(Integer, nullable=True) meme_template_used = Column(Integer) search_time = Column(Float, nullable=False) index_search_time = Column(Float) total_filter_time = Column(Float) matches_found = Column(Integer, nullable=False) searched_at = Column(DateTime, default=func.utc_timestamp(), nullable=True) search_results = Column(Text(75000, collation='utf8mb4_general_ci')) subreddit = Column(String(100), nullable=False) target_image_match = Column(Integer, default=92) target_image_meme_match = Column(Integer, default=97) def to_dict(self): return { 'id': self.id, 'post_id': self.post_id, 'source': self.source, 'target_hamming_distance': self.target_hamming_distance, 'used_historical_index': self.used_historical_index, 'used_current_index': self.used_current_index, 'same_sub': self.same_sub, 'max_days_old': self.max_days_old, 'filter_dead_matches': self.filter_dead_matches, 'only_older_matches': self.only_older_matches, 'meme_filter': self.meme_filter, 'meme_template_used': self.meme_template_used, 'search_time': self.search_time, 'index_search_time': self.index_search_time, 'total_filter_time': self.total_filter_time, 'searched_at': self.searched_at.timestamp(), 'matches_found': self.matches_found, 'subreddit': self.subreddit, 'target_image_match': self.target_image_match, 'target_image_meme_match': self.target_image_meme_match }
class Customer(Base): """Customer Entity""" __tablename__ = "customers" id = Column(Integer, primary_key=True) name = Column(String) email = Column(String, index=True, unique=True) create_date = Column(DateTime, default=func.now()) modified_date = Column(DateTime, onupdate=func.utc_timestamp())
def clearOldNotifications(conn): """Clear old notifications :param conn: SQLAlchemy connection object :type conn: sqlalchemy.engine.base.Connection """ delete = schema.notification.delete().where( # pylint: disable=E1120 func.date_add(schema.notification.c.timestamp, text("INTERVAL 30 DAY")) < func.utc_timestamp() ) conn.execute(delete)
def notify_watch(self, watches: List[Dict[SearchMatch, RepostWatch]], repost: Post): repost_watch_notify(watches, self.reddit, self.response_handler, repost) with self.uowm.start() as uow: for w in watches: w['watch'].last_detection = func.utc_timestamp() uow.repostwatch.update(w['watch']) try: uow.commit() except Exception as e: log.exception('Failed to save repost watch %s', w['watch'].id, exc_info=True)
class BotPrivateMessage(Base): __tablename__ = 'reddit_bot_private_message' id = Column(Integer, primary_key=True) subject = Column(String(200), nullable=False) body = Column(String(1000), nullable=False) in_response_to_comment = Column(String(20)) in_response_to_post = Column(String(100)) recipient = Column(String(150), nullable=False) triggered_from = Column(String(50), nullable=False) message_sent_at = Column(DateTime, default=func.utc_timestamp())
def deleteStaleNotificationDevices(conn, days): """Deletes devices from notifications if they haven't been active recently. :param conn: SQLAlchemy connection object :type conn: sqlalchemy.engine.base.Connection :param days: the number of days of absense before considering a device stale :type days: int """ query = schema.notification_settings.delete().where( # pylint: disable=E1120 func.date_add(schema.notification_settings.c.last_timestamp, text("INTERVAL %i DAY" % days)) < func.utc_timestamp()) conn.execute(query)
def updateJobStatus(self, jobStatusDict): """ Update the job Status and error The update is only done if the job is not in a final state The assignment flag is released :param jobStatusDict : { jobID : { status , error, completeness } } """ session = self.dbSession() try: for jobID, valueDict in jobStatusDict.iteritems(): updateDict = {FTS3Job.status: valueDict['status']} # We only update error if it is specified if 'error' in valueDict: newError = valueDict['error'] # Replace empty string with None if not newError: newError = None updateDict[FTS3Job.error] = newError if 'completeness' in valueDict: updateDict[FTS3Job.completeness] = valueDict['completeness'] if valueDict.get('lastMonitor'): updateDict[FTS3Job.lastMonitor] = func.utc_timestamp() updateDict[FTS3Job.assignment] = None session.execute(update(FTS3Job) .where(and_(FTS3Job.jobID == jobID, ~ FTS3Job.status.in_(FTS3Job.FINAL_STATES) ) ) .values(updateDict) ) session.commit() return S_OK() except SQLAlchemyError as e: session.rollback() self.log.exception("updateJobStatus: unexpected exception", lException=e) return S_ERROR("updateJobStatus: unexpected exception %s" % e) finally: session.close()
def updateNotificationDeviceTimestamp(conn, deviceId): """Updates last access timestamp for the specified device. :param conn: SQLAlchemy connection object :type conn: sqlalchemy.engine.base.Connection :param deviceId: Device uid :type deviceId: str :raises: ObjectNotFoundError when there is no device with deviceId configured """ query = (schema.notification_settings .update() .where(schema.notification_settings.c.uid == deviceId) .values(last_timestamp=func.utc_timestamp())) result = conn.execute(query) if result.rowcount == 0: raise ObjectNotFoundError("No notification settings for device: %s" % deviceId)
def test_update_executemany_w_default(self): with testing.db.connect() as conn: timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) conn.execute( self.tables.t_default.insert(), [ {"x": 5, "idata": timestamp}, {"x": 6, "idata": timestamp}, {"x": 7, "idata": timestamp}, ], ) conn.execute( self.tables.t_default.update() .values(idata=func.utc_timestamp()) .where(self.tables.t_default.c.x == bindparam("xval")), [{"xval": 5}, {"xval": 6}, {"xval": 7}], )
def deleteFinalOperations(self, limit=20, deleteDelay=180): """deletes operation in final state that are older than given time :param int limit: number of operations to treat :param int deleteDelay: age of the lastUpdate in days :returns: S_OK/S_ERROR with number of deleted operations """ session = self.dbSession(expire_on_commit=False) try: ftsOps = session.query( FTS3Operation.operationID) .filter( FTS3Operation.lastUpdate < ( func.date_sub( func.utc_timestamp(), text( 'INTERVAL %d DAY' % deleteDelay)))) .filter( FTS3Operation.status.in_( FTS3Operation.FINAL_STATES)) .limit(limit) opIDs = [opTuple[0] for opTuple in ftsOps] rowCount = 0 if opIDs: result = session.execute(delete(FTS3Operation) .where(FTS3Operation.operationID.in_(opIDs))) rowCount = result.rowcount session.commit() session.expunge_all() return S_OK(rowCount) except SQLAlchemyError as e: session.rollback() return S_ERROR("deleteFinalOperations: unexpected exception : %s" % e) finally: session.close()
def is_stuck(self, mins=10): diff = func.timestampdiff(text('minute'), self.updated_at, func.utc_timestamp()) return (self.updated_at == None) | (diff > mins)
from DIRAC.DataManagementSystem.Client.FTS3Job import FTS3Job from DIRAC.ConfigurationSystem.Client.Utilities import getDBParameters __RCSID__ = "$Id$" metadata = MetaData() fts3FileTable = Table('Files', metadata, Column('fileID', Integer, primary_key=True), Column('operationID', Integer, ForeignKey('Operations.operationID', ondelete='CASCADE'), nullable=False), Column('attempt', Integer, server_default='0'), Column('lastUpdate', DateTime, onupdate=func.utc_timestamp()), Column('rmsFileID', Integer, server_default='0'), Column('lfn', String(1024)), Column('checksum', String(255)), Column('size', BigInteger), Column('targetSE', String(255), nullable=False), Column('error', String(2048)), Column('ftsGUID', String(255)), Column('status', Enum(*FTS3File.ALL_STATES), server_default=FTS3File.INIT_STATE, index=True), mysql_engine='InnoDB', ) mapper(FTS3File, fts3FileTable)
def test_utc_timestamp(self): self.assert_compile(func.utc_timestamp(), "UTC_TIMESTAMP")
def test_utc_timestamp_fsp(self): self.assert_compile( func.utc_timestamp(5), "utc_timestamp(%s)", checkparams={"utc_timestamp_1": 5})
def test_utc_timestamp(self): self.assert_compile(func.utc_timestamp(), "utc_timestamp()")
) annotation_types = Table('annotation_types', meta, Column('annotation_type_id', Integer, primary_key=True), Column('name', String(255), nullable=False), UniqueConstraint('name', name='uc_annotation_type_name') ) annotations = Table('annotations', meta, Column('annotation_id', Integer, primary_key=True), Column('document_id', Integer, nullable=False), Column('annotation_type_id', Integer, nullable=False), Column('identifier_id', Integer, nullable=False), Column('creator_id', Integer, nullable=False), Column('created_ts', DateTime(timezone=True), default=func.utc_timestamp()), Column('comment', Text, nullable=False), ForeignKeyConstraint(['document_id'], ['documents.document_id']), ForeignKeyConstraint(['annotation_type_id'], ['annotation_types.annotation_type_id']), ForeignKeyConstraint(['identifier_id'], ['identifiers.identifier_id']), ForeignKeyConstraint(['creator_id'], ['creators.creator_id']), ) scanners = Table('scanners', meta, Column('scanner_id', Integer, primary_key=True), Column('name', String(255), nullable=False), UniqueConstraint('name', name='uc_scanner_name') ) packages_scans = Table('packages_scans', meta,
def test_insert_executemany(self): with testing.db.connect() as conn: conn.execute( self.tables.t.insert().values(data=func.utc_timestamp()), [{"x": 5}, {"x": 6}, {"x": 7}], )
#!/usr/bin/env python # -*- coding:utf8 -*- from datetime import datetime from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, Date from sqlalchemy.types import TypeDecorator from sqlalchemy import DateTime import time from sqlalchemy import func from sqlalchemy.sql.expression import select, text dsn = "mysql://*****:*****@localhost/xxxxx?charset=utf8" engine = create_engine(dsn, convert_unicode=True, echo=False) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base = declarative_base() Base.query = db_session.query_property() if __name__ == "__main__": r = db_session.execute(select([(func.utc_timestamp() - text("INTERVAL 2 DAY"))])).first() print r[0].strftime("%Y/%m/%d %H:%I:%S") # func => SQLの関数 # text => SQLの文
def _getCloudwatchMetricReadinessPredicate(conn): """ Generate an sqlAlchemy predicate that determines whether the metric is ready for data collection. :returns: sqlAlchemy predicate for use in `where` clause """ # NOTE: the time arithmetic must be coordinated with # grok.app.aws.cloudwatch_utils.getMetricCollectionBackoffSeconds() # NOTE: the time difference logic here is a heuristic fine-tuned for # cloudwatch-based metrics as follows: # * Cloudwatch metrics aggregated over a specific period (e.g., 5 minutes) # appear to be arranged in contiguous time buckets each with a specific # start and end time; we don't have visibility into the actual start time # of any bucket, which appears to depend on when the cloudwatch metric was # created. # * In the higher-level logic, the first time we query a metric, we pick the # metric's virtual starting time based on a 14-day backoff from current # time (which is not the actual metric time bucket's start time) and # subsequently add the metric's period to arrive at the next metric # value's virtual start time, and so on. # * Based on experiments with 5-minute-aggregated metrics, it appears that a # metric's aggregated value becomes availabe one period after the true end # of the metric value's time bucket. If you don't wait long enough, you # either don't get any value from cloudwatch (which is a wasted slow call # that contributes to API throttling) or you might get a non-final # value. # * Since we don't know the true start time of the time bucket, we # compensate for it: first, we add the metric period to the virtual start # time, which should get us at least to the end of the true time bucket; # then we add another period to get us at least to the point in time where # the time-bucket's data becomes available, and finally add a fudge value # (60 seconds at time of this writing) for the metric value to stabilize return ( (schema.metric.c.last_timestamp == None) | (func.timestampdiff(text("SECOND"), schema.metric.c.last_timestamp, func.utc_timestamp()) >= (schema.metric.c.poll_interval + (schema.metric.c.poll_interval + 60))))